Merge pull request #13373 from id/0701-sync-release-57

sync release-57
This commit is contained in:
Ivan Dyachkov 2024-07-01 16:02:29 +02:00 committed by GitHub
commit 532f04da9d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
186 changed files with 3302 additions and 962 deletions

View File

@ -18,7 +18,7 @@ services:
- /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret
kdc:
hostname: kdc.emqx.net
image: ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04
image: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu22.04
container_name: kdc.emqx.net
expose:
- 88 # kdc

View File

@ -3,7 +3,7 @@ version: '3.9'
services:
erlang:
container_name: erlang
image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04}
image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu22.04}
env_file:
- credentials.env
- conf.env

View File

@ -1,24 +1,8 @@
name: 'Prepare jmeter'
inputs:
version-emqx:
required: true
type: string
runs:
using: composite
steps:
- uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4
with:
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
env:
PKG_VSN: ${{ inputs.version-emqx }}
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
repository: emqx/emqx-fvt

View File

@ -11,23 +11,42 @@ on:
ref:
required: false
defaults:
run:
shell: bash
env:
IS_CI: "yes"
jobs:
init:
runs-on: ubuntu-22.04
outputs:
BUILDER_VSN: ${{ steps.env.outputs.BUILDER_VSN }}
OTP_VSN: ${{ steps.env.outputs.OTP_VSN }}
ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }}
BUILDER: ${{ steps.env.outputs.BUILDER }}
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ github.event.inputs.ref }}
- name: Set up environment
id: env
run: |
source ./env.sh
echo "BUILDER_VSN=$EMQX_BUILDER_VSN" | tee -a "$GITHUB_OUTPUT"
echo "OTP_VSN=$OTP_VSN" | tee -a "$GITHUB_OUTPUT"
echo "ELIXIR_VSN=$ELIXIR_VSN" | tee -a "$GITHUB_OUTPUT"
echo "BUILDER=$EMQX_BUILDER" | tee -a "$GITHUB_OUTPUT"
sanity-checks:
runs-on: ubuntu-22.04
container: "ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04"
needs: init
container: ${{ needs.init.outputs.BUILDER }}
outputs:
ct-matrix: ${{ steps.matrix.outputs.ct-matrix }}
ct-host: ${{ steps.matrix.outputs.ct-host }}
ct-docker: ${{ steps.matrix.outputs.ct-docker }}
version-emqx: ${{ steps.matrix.outputs.version-emqx }}
version-emqx-enterprise: ${{ steps.matrix.outputs.version-emqx-enterprise }}
builder: "ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04"
builder_vsn: "5.3-8"
otp_vsn: "26.2.5-2"
elixir_vsn: "1.15.7"
permissions:
contents: read
@ -92,35 +111,20 @@ jobs:
- name: Generate CT Matrix
id: matrix
run: |
APPS="$(./scripts/find-apps.sh --ci)"
MATRIX="$(echo "${APPS}" | jq -c '
[
(.[] | select(.profile == "emqx") | . + {
builder: "5.3-8",
otp: "26.2.5-2",
elixir: "1.15.7"
}),
(.[] | select(.profile == "emqx-enterprise") | . + {
builder: "5.3-8",
otp: ["26.2.5-2"][],
elixir: "1.15.7"
})
]
')"
MATRIX="$(./scripts/find-apps.sh --ci)"
echo "${MATRIX}" | jq
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile}) | unique')"
CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT
echo "ct-host=${CT_HOST}" | tee -a $GITHUB_OUTPUT
echo "ct-docker=${CT_DOCKER}" | tee -a $GITHUB_OUTPUT
echo "version-emqx=$(./pkg-vsn.sh emqx)" | tee -a $GITHUB_OUTPUT
echo "version-emqx-enterprise=$(./pkg-vsn.sh emqx-enterprise)" | tee -a $GITHUB_OUTPUT
compile:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral-xl","linux","x64"]') }}
container: ${{ needs.sanity-checks.outputs.builder }}
container: ${{ needs.init.outputs.BUILDER }}
needs:
- init
- sanity-checks
strategy:
matrix:
@ -156,53 +160,47 @@ jobs:
run_emqx_app_tests:
needs:
- init
- sanity-checks
- compile
uses: ./.github/workflows/run_emqx_app_tests.yaml
with:
builder: ${{ needs.sanity-checks.outputs.builder }}
builder: ${{ needs.init.outputs.BUILDER }}
before_ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }}
after_ref: ${{ github.sha }}
run_test_cases:
needs:
- init
- sanity-checks
- compile
uses: ./.github/workflows/run_test_cases.yaml
with:
builder: ${{ needs.sanity-checks.outputs.builder }}
builder: ${{ needs.init.outputs.BUILDER }}
ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }}
ct-host: ${{ needs.sanity-checks.outputs.ct-host }}
ct-docker: ${{ needs.sanity-checks.outputs.ct-docker }}
static_checks:
needs:
- init
- sanity-checks
- compile
uses: ./.github/workflows/static_checks.yaml
with:
builder: ${{ needs.sanity-checks.outputs.builder }}
builder: ${{ needs.init.outputs.BUILDER }}
ct-matrix: ${{ needs.sanity-checks.outputs.ct-matrix }}
build_slim_packages:
needs:
- sanity-checks
uses: ./.github/workflows/build_slim_packages.yaml
with:
builder: ${{ needs.sanity-checks.outputs.builder }}
builder_vsn: ${{ needs.sanity-checks.outputs.builder_vsn }}
otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }}
elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }}
build_docker_for_test:
needs:
- init
- sanity-checks
uses: ./.github/workflows/build_docker_for_test.yaml
with:
otp_vsn: ${{ needs.sanity-checks.outputs.otp_vsn }}
elixir_vsn: ${{ needs.sanity-checks.outputs.elixir_vsn }}
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
spellcheck:
needs:
@ -212,41 +210,35 @@ jobs:
run_conf_tests:
needs:
- init
- sanity-checks
- compile
uses: ./.github/workflows/run_conf_tests.yaml
with:
builder: ${{ needs.sanity-checks.outputs.builder }}
builder: ${{ needs.init.outputs.BUILDER }}
check_deps_integrity:
needs:
- init
- sanity-checks
uses: ./.github/workflows/check_deps_integrity.yaml
with:
builder: ${{ needs.sanity-checks.outputs.builder }}
builder: ${{ needs.init.outputs.BUILDER }}
run_jmeter_tests:
needs:
- sanity-checks
- build_docker_for_test
uses: ./.github/workflows/run_jmeter_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
run_docker_tests:
needs:
- sanity-checks
- build_docker_for_test
uses: ./.github/workflows/run_docker_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}
run_helm_tests:
needs:
- sanity-checks
- build_docker_for_test
uses: ./.github/workflows/run_helm_tests.yaml
with:
version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }}
version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }}

View File

@ -8,7 +8,6 @@ on:
push:
tags:
- 'v*'
- 'e*'
branches:
- 'master'
- 'release-5[0-9]'
@ -18,13 +17,42 @@ on:
ref:
required: false
defaults:
run:
shell: bash
env:
IS_CI: 'yes'
jobs:
init:
runs-on: ubuntu-22.04
outputs:
BUILDER_VSN: ${{ steps.env.outputs.BUILDER_VSN }}
OTP_VSN: ${{ steps.env.outputs.OTP_VSN }}
ELIXIR_VSN: ${{ steps.env.outputs.ELIXIR_VSN }}
BUILDER: ${{ steps.env.outputs.BUILDER }}
BUILD_FROM: ${{ steps.env.outputs.BUILD_FROM }}
RUN_FROM: ${{ steps.env.outputs.BUILD_FROM }}
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2
with:
ref: ${{ github.event.inputs.ref }}
- name: Set up environment
id: env
run: |
source env.sh
echo "BUILDER_VSN=$EMQX_BUILDER_VSN" >> "$GITHUB_OUTPUT"
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
echo "ELIXIR_VSN=$ELIXIR_VSN" >> "$GITHUB_OUTPUT"
echo "BUILDER=$EMQX_BUILDER" >> "$GITHUB_OUTPUT"
echo "BUILD_FROM=$EMQX_DOCKER_BUILD_FROM" >> "$GITHUB_OUTPUT"
echo "RUN_FROM=$EMQX_DOCKER_RUN_FROM" >> "$GITHUB_OUTPUT"
prepare:
runs-on: ubuntu-22.04
container: 'ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04'
needs: init
container: ${{ needs.init.outputs.BUILDER }}
outputs:
profile: ${{ steps.parse-git-ref.outputs.profile }}
release: ${{ steps.parse-git-ref.outputs.release }}
@ -32,10 +60,6 @@ jobs:
ct-matrix: ${{ steps.matrix.outputs.ct-matrix }}
ct-host: ${{ steps.matrix.outputs.ct-host }}
ct-docker: ${{ steps.matrix.outputs.ct-docker }}
builder: 'ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04'
builder_vsn: '5.3-8'
otp_vsn: '26.2.5-2'
elixir_vsn: '1.15.7'
permissions:
contents: read
@ -62,23 +86,9 @@ jobs:
- name: Build matrix
id: matrix
run: |
APPS="$(./scripts/find-apps.sh --ci)"
MATRIX="$(echo "${APPS}" | jq -c '
[
(.[] | select(.profile == "emqx") | . + {
builder: "5.3-8",
otp: "26.2.5-2",
elixir: "1.15.7"
}),
(.[] | select(.profile == "emqx-enterprise") | . + {
builder: "5.3-8",
otp: ["26.2.5-2"][],
elixir: "1.15.7"
})
]
')"
MATRIX="$(./scripts/find-apps.sh --ci)"
echo "${MATRIX}" | jq
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile, builder, otp, elixir}) | unique')"
CT_MATRIX="$(echo "${MATRIX}" | jq -c 'map({profile}) | unique')"
CT_HOST="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "host"))')"
CT_DOCKER="$(echo "${MATRIX}" | jq -c 'map(select(.runner == "docker"))')"
echo "ct-matrix=${CT_MATRIX}" | tee -a $GITHUB_OUTPUT
@ -88,46 +98,44 @@ jobs:
build_packages:
if: needs.prepare.outputs.release == 'true'
needs:
- init
- prepare
uses: ./.github/workflows/build_packages.yaml
with:
profile: ${{ needs.prepare.outputs.profile }}
publish: true
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
otp_vsn: ${{ needs.init.outputs.OTP_VSN }}
elixir_vsn: ${{ needs.init.outputs.ELIXIR_VSN }}
builder_vsn: ${{ needs.init.outputs.BUILDER_VSN }}
secrets: inherit
build_and_push_docker_images:
if: needs.prepare.outputs.release == 'true'
needs:
- init
- prepare
uses: ./.github/workflows/build_and_push_docker_images.yaml
with:
profile: ${{ needs.prepare.outputs.profile }}
publish: true
latest: ${{ needs.prepare.outputs.latest }}
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
build_from: ${{ needs.init.outputs.BUILD_FROM }}
run_from: ${{ needs.init.outputs.RUN_FROM }}
secrets: inherit
build_slim_packages:
if: needs.prepare.outputs.release != 'true'
needs:
- init
- prepare
uses: ./.github/workflows/build_slim_packages.yaml
with:
builder: ${{ needs.prepare.outputs.builder }}
builder_vsn: ${{ needs.prepare.outputs.builder_vsn }}
otp_vsn: ${{ needs.prepare.outputs.otp_vsn }}
elixir_vsn: ${{ needs.prepare.outputs.elixir_vsn }}
compile:
if: needs.prepare.outputs.release != 'true'
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container: ${{ needs.prepare.outputs.builder }}
container: ${{ needs.init.outputs.BUILDER }}
needs:
- init
- prepare
strategy:
matrix:
@ -163,22 +171,23 @@ jobs:
run_emqx_app_tests:
needs:
- prepare
- init
- compile
uses: ./.github/workflows/run_emqx_app_tests.yaml
with:
builder: ${{ needs.prepare.outputs.builder }}
builder: ${{ needs.init.outputs.BUILDER }}
before_ref: ${{ github.event.before }}
after_ref: ${{ github.sha }}
run_test_cases:
if: needs.prepare.outputs.release != 'true'
needs:
- init
- prepare
- compile
uses: ./.github/workflows/run_test_cases.yaml
with:
builder: ${{ needs.prepare.outputs.builder }}
builder: ${{ needs.init.outputs.BUILDER }}
ct-matrix: ${{ needs.prepare.outputs.ct-matrix }}
ct-host: ${{ needs.prepare.outputs.ct-host }}
ct-docker: ${{ needs.prepare.outputs.ct-docker }}
@ -186,18 +195,20 @@ jobs:
run_conf_tests:
if: needs.prepare.outputs.release != 'true'
needs:
- init
- prepare
- compile
uses: ./.github/workflows/run_conf_tests.yaml
with:
builder: ${{ needs.prepare.outputs.builder }}
builder: ${{ needs.init.outputs.BUILDER }}
static_checks:
if: needs.prepare.outputs.release != 'true'
needs:
- init
- prepare
- compile
uses: ./.github/workflows/static_checks.yaml
with:
builder: ${{ needs.prepare.outputs.builder }}
builder: ${{ needs.init.outputs.BUILDER }}
ct-matrix: ${{ needs.prepare.outputs.ct-matrix }}

View File

@ -16,13 +16,10 @@ on:
publish:
required: true
type: boolean
otp_vsn:
build_from:
required: true
type: string
elixir_vsn:
required: true
type: string
builder_vsn:
run_from:
required: true
type: string
secrets:
@ -50,18 +47,12 @@ on:
required: false
type: boolean
default: false
otp_vsn:
build_from:
required: false
type: string
default: '26.2.5-2'
elixir_vsn:
required: false
type: string
default: '1.15.7'
builder_vsn:
required: false
type: string
default: '5.3-8'
default: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-debian12
run_from:
default: public.ecr.aws/debian/debian:stable-20240612-slim
permissions:
contents: read
@ -69,7 +60,7 @@ permissions:
jobs:
build:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.arch)) || 'ubuntu-22.04' }}
container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ inputs.otp_vsn }}-debian12"
container: ${{ inputs.build_from }}
outputs:
PKG_VSN: ${{ steps.build.outputs.PKG_VSN }}
@ -164,13 +155,9 @@ jobs:
DOCKER_LATEST: ${{ inputs.latest }}
DOCKER_PUSH: false
DOCKER_BUILD_NOCACHE: true
DOCKER_LOAD: true
EMQX_RUNNER: 'public.ecr.aws/debian/debian:stable-20240612-slim'
EMQX_DOCKERFILE: 'deploy/docker/Dockerfile'
BUILD_FROM: ${{ inputs.build_from }}
RUN_FROM: ${{ inputs.run_from }}
PKG_VSN: ${{ needs.build.outputs.PKG_VSN }}
EMQX_BUILDER_VERSION: ${{ inputs.builder_vsn }}
OTP_VSN: ${{ inputs.otp_vsn }}
ELIXIR_VSN: ${{ inputs.elixir_vsn }}
EMQX_SOURCE_TYPE: tgz
run: |
./build ${PROFILE} docker
@ -184,7 +171,7 @@ jobs:
timeout-minutes: 1
run: |
for tag in $(cat .emqx_docker_image_tags); do
CID=$(docker run -d -P $tag)
CID=$(docker run -d -p 18083:18083 $tag)
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
docker rm -f $CID
@ -214,12 +201,9 @@ jobs:
DOCKER_BUILD_NOCACHE: false
DOCKER_PLATFORMS: linux/amd64,linux/arm64
DOCKER_LOAD: false
EMQX_RUNNER: 'public.ecr.aws/debian/debian:stable-20240612-slim'
EMQX_DOCKERFILE: 'deploy/docker/Dockerfile'
BUILD_FROM: ${{ inputs.build_from }}
RUN_FROM: ${{ inputs.run_from }}
PKG_VSN: ${{ needs.build.outputs.PKG_VSN }}
EMQX_BUILDER_VERSION: ${{ inputs.builder_vsn }}
OTP_VSN: ${{ inputs.otp_vsn }}
ELIXIR_VSN: ${{ inputs.elixir_vsn }}
EMQX_SOURCE_TYPE: tgz
run: |
./build ${PROFILE} docker

View File

@ -6,19 +6,6 @@ concurrency:
on:
workflow_call:
inputs:
otp_vsn:
required: true
type: string
elixir_vsn:
required: true
type: string
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions:
contents: read
@ -28,9 +15,6 @@ jobs:
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
env:
EMQX_NAME: ${{ matrix.profile }}
PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
OTP_VSN: ${{ inputs.otp_vsn }}
ELIXIR_VSN: ${{ inputs.elixir_vsn }}
strategy:
fail-fast: false
@ -43,6 +27,12 @@ jobs:
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- name: build and export to Docker
id: build
run: |
@ -52,9 +42,13 @@ jobs:
run: |
CID=$(docker run -d --rm -P $_EMQX_DOCKER_IMAGE_TAG)
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' $CID)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT || {
docker logs $CID
exit 1
}
docker stop $CID
- name: export docker image
if: always()
run: |
docker save $_EMQX_DOCKER_IMAGE_TAG | gzip > $EMQX_NAME-docker-$PKG_VSN.tar.gz
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3

View File

@ -55,7 +55,7 @@ on:
otp_vsn:
required: false
type: string
default: '26.2.5-2'
default: '26.2.5-3'
elixir_vsn:
required: false
type: string
@ -63,7 +63,7 @@ on:
builder_vsn:
required: false
type: string
default: '5.3-8'
default: '5.3-9'
permissions:
contents: read

View File

@ -16,19 +16,22 @@ jobs:
linux:
if: github.repository_owner == 'emqx'
runs-on: ${{ endsWith(github.repository, '/emqx') && 'ubuntu-22.04' || fromJSON('["self-hosted","ephemeral","linux","x64"]') }}
container:
image: "ghcr.io/emqx/emqx-builder/${{ matrix.profile[2] }}-${{ matrix.os }}"
strategy:
fail-fast: false
matrix:
profile:
- ['emqx', 'master', '5.3-8:1.15.7-26.2.5-2']
- ['emqx', 'release-57', '5.3-8:1.15.7-26.2.5-2']
- ['emqx', 'master']
- ['emqx', 'release-57']
os:
- ubuntu22.04
- amzn2023
env:
PROFILE: ${{ matrix.profile[0] }}
OS: ${{ matrix.os }}
BUILDER_SYSTEM: force_docker
defaults:
run:
shell: bash
@ -38,33 +41,18 @@ jobs:
with:
ref: ${{ matrix.profile[1] }}
fetch-depth: 0
- name: fix workdir
- name: Set up environment
id: env
run: |
set -eu
git config --global --add safe.directory "$GITHUB_WORKSPACE"
# Align path for CMake caches
if [ ! "$PWD" = "/emqx" ]; then
ln -s $PWD /emqx
cd /emqx
fi
echo "pwd is $PWD"
- name: build emqx packages
env:
PROFILE: ${{ matrix.profile[0] }}
ACLOCAL_PATH: "/usr/share/aclocal:/usr/local/share/aclocal"
source env.sh
BUILDER="ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VSN}:${ELIXIR_VSN}-${OTP_VSN}-${OS}"
echo "BUILDER=$BUILDER" >> "$GITHUB_ENV"
- name: build tgz
run: |
set -eu
make "${PROFILE}-tgz"
make "${PROFILE}-pkg"
- name: test emqx packages
env:
PROFILE: ${{ matrix.profile[0] }}
./scripts/buildx.sh --profile "$PROFILE" --pkgtype tgz --builder "$BUILDER"
- name: build pkg
run: |
set -eu
./scripts/pkg-tests.sh "${PROFILE}-tgz"
./scripts/pkg-tests.sh "${PROFILE}-pkg"
./scripts/buildx.sh --profile "$PROFILE" --pkgtype pkg --builder "$BUILDER"
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
if: success()
with:
@ -91,20 +79,23 @@ jobs:
- emqx
branch:
- master
otp:
- 26.2.5-2
os:
- macos-12-arm64
- macos-14-arm64
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
with:
ref: ${{ matrix.branch }}
fetch-depth: 0
- name: Set up environment
id: env
run: |
source env.sh
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
- uses: ./.github/actions/package-macos
with:
profile: ${{ matrix.profile }}
otp: ${{ matrix.otp }}
otp: ${{ steps.env.outputs.OTP_VSN }}
os: ${{ matrix.os }}
apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }}
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}

View File

@ -6,97 +6,50 @@ concurrency:
on:
workflow_call:
inputs:
builder:
required: true
type: string
builder_vsn:
required: true
type: string
otp_vsn:
required: true
type: string
elixir_vsn:
required: true
type: string
workflow_dispatch:
inputs:
ref:
required: false
builder:
required: false
type: string
default: 'ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04'
builder_vsn:
required: false
type: string
default: '5.3-8'
otp_vsn:
required: false
type: string
default: '26.2.5-2'
elixir_vsn:
required: false
type: string
default: '1.15.7'
permissions:
contents: read
jobs:
linux:
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.profile[4])) || 'ubuntu-22.04' }}
runs-on: ${{ github.repository_owner == 'emqx' && fromJSON(format('["self-hosted","ephemeral","linux","{0}"]', matrix.profile[2])) || 'ubuntu-22.04' }}
env:
EMQX_NAME: ${{ matrix.profile[0] }}
PROFILE: ${{ matrix.profile[0] }}
ELIXIR: ${{ matrix.profile[1] == 'elixir' && 'yes' || 'no' }}
ARCH: ${{ matrix.profile[2] == 'x64' && 'amd64' || 'arm64' }}
BUILDER_SYSTEM: force_docker
strategy:
fail-fast: false
matrix:
profile:
- ["emqx", "26.2.5-2", "ubuntu22.04", "elixir", "x64"]
- ["emqx", "26.2.5-2", "ubuntu22.04", "elixir", "arm64"]
- ["emqx-enterprise", "26.2.5-2", "ubuntu22.04", "erlang", "x64"]
container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
- ["emqx", "elixir", "x64"]
- ["emqx", "elixir", "arm64"]
- ["emqx-enterprise", "erlang", "x64"]
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
with:
fetch-depth: 0
- name: Work around https://github.com/actions/checkout/issues/766
- name: build tgz
run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
echo "CODE_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV
- name: build and test tgz package
if: matrix.profile[3] == 'erlang'
./scripts/buildx.sh --profile $PROFILE --pkgtype tgz --elixir $ELIXIR --arch $ARCH
- name: build pkg
run: |
make ${EMQX_NAME}-tgz
./scripts/pkg-tests.sh ${EMQX_NAME}-tgz
- name: build and test deb/rpm packages
if: matrix.profile[3] == 'erlang'
run: |
make ${EMQX_NAME}-pkg
./scripts/pkg-tests.sh ${EMQX_NAME}-pkg
- name: build and test tgz package (Elixir)
if: matrix.profile[3] == 'elixir'
run: |
make ${EMQX_NAME}-elixir-tgz
./scripts/pkg-tests.sh ${EMQX_NAME}-elixir-tgz
- name: build and test deb/rpm packages (Elixir)
if: matrix.profile[3] == 'elixir'
run: |
make ${EMQX_NAME}-elixir-pkg
./scripts/pkg-tests.sh ${EMQX_NAME}-elixir-pkg
./scripts/buildx.sh --profile $PROFILE --pkgtype pkg --elixir $ELIXIR --arch $ARCH
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
with:
name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}-${{ matrix.profile[3] }}-${{ matrix.profile[4] }}"
name: "${{ matrix.profile[0] }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
path: _packages/${{ matrix.profile[0] }}/*
retention-days: 7
compression-level: 0
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
with:
name: "${{ matrix.profile[0] }}-schema-dump-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}-${{ matrix.profile[3] }}-${{ matrix.profile[4] }}"
name: "${{ matrix.profile[0] }}-schema-dump-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}"
path: |
scripts/spellcheck
_build/docgen/${{ matrix.profile[0] }}/schema-en.json
@ -108,10 +61,8 @@ jobs:
matrix:
profile:
- emqx
otp:
- ${{ inputs.otp_vsn }}
os:
- macos-14
- macos-14-arm64
runs-on: ${{ matrix.os }}
env:
@ -119,10 +70,15 @@ jobs:
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- name: Set up environment
id: env
run: |
source env.sh
echo "OTP_VSN=$OTP_VSN" >> "$GITHUB_OUTPUT"
- uses: ./.github/actions/package-macos
with:
profile: ${{ matrix.profile }}
otp: ${{ matrix.otp }}
otp: ${{ steps.env.outputs.OTP_VSN }}
os: ${{ matrix.os }}
apple_id_password: ${{ secrets.APPLE_ID_PASSWORD }}
apple_developer_identity: ${{ secrets.APPLE_DEVELOPER_IDENTITY }}

View File

@ -17,8 +17,6 @@ jobs:
permissions:
actions: read
security-events: write
container:
image: ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04
strategy:
fail-fast: false
@ -36,11 +34,6 @@ jobs:
with:
ref: ${{ matrix.branch }}
- name: Ensure git safe dir
run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
make ensure-rebar3
- name: Initialize CodeQL
uses: github/codeql-action/init@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5
with:
@ -51,14 +44,7 @@ jobs:
env:
PROFILE: emqx-enterprise
run: |
make emqx-enterprise-compile
- name: Fetch deps
if: matrix.language == 'python'
env:
PROFILE: emqx-enterprise
run: |
make deps-emqx-enterprise
./scripts/buildx.sh --profile emqx-enterprise --pkgtype rel
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@7e187e1c529d80bac7b87a16e7a792427f65cf02 # v2.15.5

View File

@ -26,7 +26,7 @@ jobs:
prepare:
runs-on: ubuntu-latest
if: github.repository_owner == 'emqx'
container: ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu20.04
container: ghcr.io/emqx/emqx-builder/5.3-9:1.15.7-26.2.5-3-ubuntu20.04
outputs:
BENCH_ID: ${{ steps.prepare.outputs.BENCH_ID }}
PACKAGE_FILE: ${{ steps.package_file.outputs.PACKAGE_FILE }}

View File

@ -6,13 +6,6 @@ concurrency:
on:
workflow_call:
inputs:
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions:
contents: read
@ -32,11 +25,16 @@ jobs:
env:
EMQX_NAME: ${{ matrix.profile[0] }}
PKG_VSN: ${{ matrix.profile[0] == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
EMQX_IMAGE_OLD_VERSION_TAG: ${{ matrix.profile[1] }}
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7
with:
name: ${{ env.EMQX_NAME }}-docker
@ -52,9 +50,11 @@ jobs:
docker compose up --abort-on-container-exit --exit-code-from selenium
- name: test two nodes cluster with proto_dist=inet_tls in docker
run: |
./scripts/test/start-two-nodes-in-docker.sh -P $_EMQX_DOCKER_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
## -d 1 means only put node 1 (latest version) behind haproxy
./scripts/test/start-two-nodes-in-docker.sh -d 1 -P $_EMQX_DOCKER_IMAGE_TAG $EMQX_IMAGE_OLD_VERSION_TAG
HTTP_PORT=$(docker inspect --format='{{(index (index .NetworkSettings.Ports "18083/tcp") 0).HostPort}}' haproxy)
./scripts/test/emqx-smoke-test.sh localhost $HTTP_PORT
## -c menas 'cleanup'
./scripts/test/start-two-nodes-in-docker.sh -c
- name: cleanup
if: always()
@ -69,7 +69,6 @@ jobs:
shell: bash
env:
EMQX_NAME: ${{ matrix.profile }}
PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
_EMQX_TEST_DB_BACKEND: ${{ matrix.cluster_db_backend }}
strategy:
@ -84,6 +83,12 @@ jobs:
- rlog
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7
with:
name: ${{ env.EMQX_NAME }}-docker

View File

@ -6,13 +6,6 @@ concurrency:
on:
workflow_call:
inputs:
version-emqx:
required: true
type: string
version-emqx-enterprise:
required: true
type: string
permissions:
contents: read
@ -25,7 +18,6 @@ jobs:
shell: bash
env:
EMQX_NAME: ${{ matrix.profile }}
EMQX_TAG: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
REPOSITORY: "emqx/${{ matrix.profile }}"
strategy:
@ -45,6 +37,13 @@ jobs:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
with:
path: source
- name: Set up environment
id: env
run: |
cd source
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh "$EMQX_NAME")
echo "EMQX_TAG=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7
with:
name: "${{ env.EMQX_NAME }}-docker"

View File

@ -2,10 +2,6 @@ name: JMeter integration tests
on:
workflow_call:
inputs:
version-emqx:
required: true
type: string
permissions:
contents: read
@ -56,9 +52,22 @@ jobs:
needs: jmeter_artifact
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- uses: ./.github/actions/prepare-jmeter
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4
with:
version-emqx: ${{ inputs.version-emqx }}
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter
- name: docker compose up
timeout-minutes: 5
run: |
@ -112,9 +121,22 @@ jobs:
needs: jmeter_artifact
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- uses: ./.github/actions/prepare-jmeter
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4
with:
version-emqx: ${{ inputs.version-emqx }}
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter
- name: docker compose up
timeout-minutes: 5
env:
@ -176,9 +198,22 @@ jobs:
needs: jmeter_artifact
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- uses: ./.github/actions/prepare-jmeter
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4
with:
version-emqx: ${{ inputs.version-emqx }}
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter
- name: docker compose up
timeout-minutes: 5
env:
@ -232,9 +267,22 @@ jobs:
needs: jmeter_artifact
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- uses: ./.github/actions/prepare-jmeter
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4
with:
version-emqx: ${{ inputs.version-emqx }}
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter
- name: docker compose up
timeout-minutes: 5
run: |
@ -285,9 +333,22 @@ jobs:
needs: jmeter_artifact
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
- uses: ./.github/actions/prepare-jmeter
- name: Set up environment
id: env
run: |
source env.sh
PKG_VSN=$(docker run --rm -v $(pwd):$(pwd) -w $(pwd) -u $(id -u) "$EMQX_BUILDER" ./pkg-vsn.sh emqx)
echo "PKG_VSN=$PKG_VSN" >> "$GITHUB_ENV"
- uses: actions/download-artifact@c850b930e6ba138125429b7e5c93fc707a7f8427 # v4.1.4
with:
version-emqx: ${{ inputs.version-emqx }}
name: emqx-docker
path: /tmp
- name: load docker image
shell: bash
run: |
EMQX_DOCKER_IMAGE_TAG=$(docker load < /tmp/emqx-docker-${PKG_VSN}.tar.gz | sed 's/Loaded image: //g')
echo "_EMQX_DOCKER_IMAGE_TAG=$EMQX_DOCKER_IMAGE_TAG" >> $GITHUB_ENV
- uses: ./.github/actions/prepare-jmeter
- name: docker compose up
timeout-minutes: 5
run: |

View File

@ -35,12 +35,12 @@ jobs:
defaults:
run:
shell: bash
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
container: ${{ inputs.builder }}
env:
PROFILE: ${{ matrix.profile }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}
permissions:
contents: read
@ -100,7 +100,7 @@ jobs:
# produces $PROFILE-<app-name>-<otp-vsn>-sg<suitegroup>.coverdata
- name: run common tests
env:
DOCKER_CT_RUNNER_IMAGE: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
DOCKER_CT_RUNNER_IMAGE: ${{ inputs.builder }}
MONGO_TAG: "5"
MYSQL_TAG: "8"
PGSQL_TAG: "13"
@ -111,7 +111,7 @@ jobs:
MINIO_TAG: "RELEASE.2023-03-20T20-16-18Z"
SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-sg${{ matrix.suitegroup }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: ./scripts/ct/run.sh --ci --app ${{ matrix.app }} --keep-up
@ -136,7 +136,7 @@ jobs:
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
if: failure()
with:
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }}
path: logs.tar.gz
compression-level: 0
retention-days: 7
@ -149,7 +149,7 @@ jobs:
matrix:
include: ${{ fromJson(inputs.ct-host) }}
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
container: ${{ inputs.builder }}
defaults:
run:
shell: bash
@ -161,7 +161,7 @@ jobs:
PROFILE: ${{ matrix.profile }}
SUITEGROUP: ${{ matrix.suitegroup }}
ENABLE_COVER_COMPILE: 1
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-sg${{ matrix.suitegroup }}
steps:
- uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7
@ -196,7 +196,7 @@ jobs:
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
if: failure()
with:
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-${{ matrix.otp }}-sg${{ matrix.suitegroup }}
name: logs-${{ matrix.profile }}-${{ matrix.prefix }}-sg${{ matrix.suitegroup }}
path: logs.tar.gz
compression-level: 0
retention-days: 7

View File

@ -28,7 +28,7 @@ jobs:
fail-fast: false
matrix:
include: ${{ fromJson(inputs.ct-matrix) }}
container: "ghcr.io/emqx/emqx-builder/${{ matrix.builder }}:${{ matrix.elixir }}-${{ matrix.otp }}-ubuntu22.04"
container: "${{ inputs.builder }}"
steps:
- uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7
with:
@ -39,10 +39,10 @@ jobs:
git config --global --add safe.directory "$GITHUB_WORKSPACE"
- uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: "emqx_dialyzer_${{ matrix.otp }}_plt"
key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }}
path: "emqx_dialyzer_${{ matrix.profile }}_plt"
key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }}
restore-keys: |
rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-
rebar3-dialyzer-plt-${{ matrix.profile }}-
- run: cat .env | tee -a $GITHUB_ENV
- name: run static checks
run: make static_checks

View File

@ -1,2 +1,2 @@
erlang 26.2.5-2
erlang 26.2.5-3
elixir 1.15.7-otp-26

View File

@ -6,22 +6,15 @@ endif
REBAR = $(CURDIR)/rebar3
BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts
export EMQX_RELUP ?= true
export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-debian12
export EMQX_DEFAULT_RUNNER = public.ecr.aws/debian/debian:stable-20240612-slim
export EMQX_REL_FORM ?= tgz
export QUICER_DOWNLOAD_FROM_RELEASE = 1
ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none
FIND=/usr/bin/find
else
FIND=find
endif
include env.sh
# Dashboard version
# from https://github.com/emqx/emqx-dashboard5
export EMQX_DASHBOARD_VERSION ?= v1.9.1-beta.1
export EMQX_EE_DASHBOARD_VERSION ?= e1.7.1-beta.1
export EMQX_DASHBOARD_VERSION ?= v1.9.1
export EMQX_EE_DASHBOARD_VERSION ?= e1.7.1
export EMQX_RELUP ?= true
export EMQX_REL_FORM ?= tgz
-include default-profile.mk
PROFILE ?= emqx
@ -196,8 +189,8 @@ $(PROFILES:%=clean-%):
@if [ -d _build/$(@:clean-%=%) ]; then \
rm -f rebar.lock; \
rm -rf _build/$(@:clean-%=%)/rel; \
$(FIND) _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \
$(FIND) _build/$(@:clean-%=%) -type l -delete; \
find _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \
find _build/$(@:clean-%=%) -type l -delete; \
fi
.PHONY: clean-all
@ -317,7 +310,7 @@ $(foreach tt,$(ALL_ELIXIR_TGZS),$(eval $(call gen-elixir-tgz-target,$(tt))))
.PHONY: fmt
fmt: $(REBAR)
@$(FIND) . \( -name '*.app.src' -o \
@find . \( -name '*.app.src' -o \
-name '*.erl' -o \
-name '*.hrl' -o \
-name 'rebar.config' -o \

View File

@ -32,7 +32,7 @@
%% `apps/emqx/src/bpapi/README.md'
%% Opensource edition
-define(EMQX_RELEASE_CE, "5.7.1-alpha.1").
-define(EMQX_RELEASE_CE, "5.7.1").
%% Enterprise edition
-define(EMQX_RELEASE_EE, "5.7.1-alpha.1").
-define(EMQX_RELEASE_EE, "5.7.1").

View File

@ -29,7 +29,7 @@
{gproc, {git, "https://github.com/emqx/gproc", {tag, "0.9.0.1"}}},
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.2"}}},
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.11.2"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.4"}}},
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.19.5"}}},
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.1"}}},
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.42.2"}}},
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},

View File

@ -2,7 +2,7 @@
{application, emqx, [
{id, "emqx"},
{description, "EMQX Core"},
{vsn, "5.3.1"},
{vsn, "5.3.3"},
{modules, []},
{registered, []},
{applications, [

View File

@ -545,8 +545,10 @@ handle_in(
{error, ReasonCode} ->
handle_out(disconnect, ReasonCode, Channel)
end;
handle_in(?PACKET(?PINGREQ), Channel) ->
{ok, ?PACKET(?PINGRESP), Channel};
handle_in(?PACKET(?PINGREQ), Channel = #channel{keepalive = Keepalive}) ->
{ok, NKeepalive} = emqx_keepalive:check(Keepalive),
NChannel = Channel#channel{keepalive = NKeepalive},
{ok, ?PACKET(?PINGRESP), reset_timer(keepalive, NChannel)};
handle_in(
?DISCONNECT_PACKET(ReasonCode, Properties),
Channel = #channel{conninfo = ConnInfo}
@ -1230,11 +1232,12 @@ handle_call(
{keepalive, Interval},
Channel = #channel{
keepalive = KeepAlive,
conninfo = ConnInfo
conninfo = ConnInfo,
clientinfo = #{zone := Zone}
}
) ->
ClientId = info(clientid, Channel),
NKeepalive = emqx_keepalive:update(timer:seconds(Interval), KeepAlive),
NKeepalive = emqx_keepalive:update(Zone, Interval, KeepAlive),
NConnInfo = maps:put(keepalive, Interval, ConnInfo),
NChannel = Channel#channel{keepalive = NKeepalive, conninfo = NConnInfo},
SockInfo = maps:get(sockinfo, emqx_cm:get_chan_info(ClientId), #{}),
@ -1337,22 +1340,22 @@ die_if_test_compiled() ->
| {shutdown, Reason :: term(), channel()}.
handle_timeout(
_TRef,
{keepalive, _StatVal},
keepalive,
Channel = #channel{keepalive = undefined}
) ->
{ok, Channel};
handle_timeout(
_TRef,
{keepalive, _StatVal},
keepalive,
Channel = #channel{conn_state = disconnected}
) ->
{ok, Channel};
handle_timeout(
_TRef,
{keepalive, StatVal},
keepalive,
Channel = #channel{keepalive = Keepalive}
) ->
case emqx_keepalive:check(StatVal, Keepalive) of
case emqx_keepalive:check(Keepalive) of
{ok, NKeepalive} ->
NChannel = Channel#channel{keepalive = NKeepalive},
{ok, reset_timer(keepalive, NChannel)};
@ -1463,10 +1466,16 @@ reset_timer(Name, Time, Channel) ->
ensure_timer(Name, Time, clean_timer(Name, Channel)).
clean_timer(Name, Channel = #channel{timers = Timers}) ->
Channel#channel{timers = maps:remove(Name, Timers)}.
case maps:take(Name, Timers) of
error ->
Channel;
{TRef, NTimers} ->
ok = emqx_utils:cancel_timer(TRef),
Channel#channel{timers = NTimers}
end.
interval(keepalive, #channel{keepalive = KeepAlive}) ->
emqx_keepalive:info(interval, KeepAlive);
emqx_keepalive:info(check_interval, KeepAlive);
interval(retry_delivery, #channel{session = Session}) ->
emqx_session:info(retry_interval, Session);
interval(expire_awaiting_rel, #channel{session = Session}) ->
@ -2324,9 +2333,7 @@ ensure_keepalive_timer(0, Channel) ->
ensure_keepalive_timer(disabled, Channel) ->
Channel;
ensure_keepalive_timer(Interval, Channel = #channel{clientinfo = #{zone := Zone}}) ->
Multiplier = get_mqtt_conf(Zone, keepalive_multiplier),
RecvCnt = emqx_pd:get_counter(recv_pkt),
Keepalive = emqx_keepalive:init(RecvCnt, round(timer:seconds(Interval) * Multiplier)),
Keepalive = emqx_keepalive:init(Zone, Interval),
ensure_timer(keepalive, Channel#channel{keepalive = Keepalive}).
clear_keepalive(Channel = #channel{timers = Timers}) ->

View File

@ -727,9 +727,7 @@ handle_timeout(
disconnected ->
{ok, State};
_ ->
%% recv_pkt: valid MQTT message
RecvCnt = emqx_pd:get_counter(recv_pkt),
handle_timeout(TRef, {keepalive, RecvCnt}, State)
with_channel(handle_timeout, [TRef, keepalive], State)
end;
handle_timeout(TRef, Msg, State) ->
with_channel(handle_timeout, [TRef, Msg], State).

View File

@ -285,17 +285,24 @@ parse_connect(FrameBin, StrictMode) ->
end,
parse_connect2(ProtoName, Rest, StrictMode).
% Note: return malformed if reserved flag is not 0.
parse_connect2(
ProtoName,
<<BridgeTag:4, ProtoVer:4, UsernameFlag:1, PasswordFlag:1, WillRetain:1, WillQoS:2, WillFlag:1,
CleanStart:1, Reserved:1, KeepAlive:16/big, Rest2/binary>>,
<<BridgeTag:4, ProtoVer:4, UsernameFlagB:1, PasswordFlagB:1, WillRetainB:1, WillQoS:2,
WillFlagB:1, CleanStart:1, Reserved:1, KeepAlive:16/big, Rest2/binary>>,
StrictMode
) ->
case Reserved of
0 -> ok;
1 -> ?PARSE_ERR(reserved_connect_flag)
end,
_ = validate_connect_reserved(Reserved),
_ = validate_connect_will(
WillFlag = bool(WillFlagB),
WillRetain = bool(WillRetainB),
WillQoS
),
_ = validate_connect_password_flag(
StrictMode,
ProtoVer,
UsernameFlag = bool(UsernameFlagB),
PasswordFlag = bool(PasswordFlagB)
),
{Properties, Rest3} = parse_properties(Rest2, ProtoVer, StrictMode),
{ClientId, Rest4} = parse_utf8_string_with_cause(Rest3, StrictMode, invalid_clientid),
ConnPacket = #mqtt_packet_connect{
@ -305,9 +312,9 @@ parse_connect2(
%% Invented by mosquitto, named 'try_private': https://mosquitto.org/man/mosquitto-conf-5.html
is_bridge = (BridgeTag =:= 8),
clean_start = bool(CleanStart),
will_flag = bool(WillFlag),
will_flag = WillFlag,
will_qos = WillQoS,
will_retain = bool(WillRetain),
will_retain = WillRetain,
keepalive = KeepAlive,
properties = Properties,
clientid = ClientId
@ -318,14 +325,14 @@ parse_connect2(
fun(Bin) ->
parse_utf8_string_with_cause(Bin, StrictMode, invalid_username)
end,
bool(UsernameFlag)
UsernameFlag
),
{Password, Rest7} = parse_optional(
Rest6,
fun(Bin) ->
parse_utf8_string_with_cause(Bin, StrictMode, invalid_password)
end,
bool(PasswordFlag)
PasswordFlag
),
case Rest7 of
<<>> ->
@ -1150,6 +1157,32 @@ validate_subqos([3 | _]) -> ?PARSE_ERR(bad_subqos);
validate_subqos([_ | T]) -> validate_subqos(T);
validate_subqos([]) -> ok.
%% MQTT-v3.1.1-[MQTT-3.1.2-3], MQTT-v5.0-[MQTT-3.1.2-3]
validate_connect_reserved(0) -> ok;
validate_connect_reserved(1) -> ?PARSE_ERR(reserved_connect_flag).
%% MQTT-v3.1.1-[MQTT-3.1.2-13], MQTT-v5.0-[MQTT-3.1.2-11]
validate_connect_will(false, _, WillQos) when WillQos > 0 -> ?PARSE_ERR(invalid_will_qos);
%% MQTT-v3.1.1-[MQTT-3.1.2-14], MQTT-v5.0-[MQTT-3.1.2-12]
validate_connect_will(true, _, WillQoS) when WillQoS > 2 -> ?PARSE_ERR(invalid_will_qos);
%% MQTT-v3.1.1-[MQTT-3.1.2-15], MQTT-v5.0-[MQTT-3.1.2-13]
validate_connect_will(false, WillRetain, _) when WillRetain -> ?PARSE_ERR(invalid_will_retain);
validate_connect_will(_, _, _) -> ok.
%% MQTT-v3.1
%% Username flag and password flag are not strongly related
%% https://public.dhe.ibm.com/software/dw/webservices/ws-mqtt/mqtt-v3r1.html#connect
validate_connect_password_flag(true, ?MQTT_PROTO_V3, _, _) ->
ok;
%% MQTT-v3.1.1-[MQTT-3.1.2-22]
validate_connect_password_flag(true, ?MQTT_PROTO_V4, UsernameFlag, PasswordFlag) ->
%% BUG-FOR-BUG compatible, only check when `strict-mode`
UsernameFlag orelse PasswordFlag andalso ?PARSE_ERR(invalid_password_flag);
validate_connect_password_flag(true, ?MQTT_PROTO_V5, _, _) ->
ok;
validate_connect_password_flag(_, _, _, _) ->
ok.
bool(0) -> false;
bool(1) -> true.

View File

@ -19,10 +19,12 @@
-export([
init/1,
init/2,
init/3,
info/1,
info/2,
check/1,
check/2,
update/2
update/3
]).
-elvis([{elvis_style, no_if_expression, disable}]).
@ -30,8 +32,12 @@
-export_type([keepalive/0]).
-record(keepalive, {
interval :: pos_integer(),
statval :: non_neg_integer()
check_interval :: pos_integer(),
%% the received packets since last keepalive check
statval :: non_neg_integer(),
%% The number of idle intervals allowed before disconnecting the client.
idle_milliseconds = 0 :: non_neg_integer(),
max_idle_millisecond :: pos_integer()
}).
-opaque keepalive() :: #keepalive{}.
@ -39,7 +45,11 @@
%% @doc Init keepalive.
-spec init(Interval :: non_neg_integer()) -> keepalive().
init(Interval) -> init(0, Interval).
init(Interval) -> init(default, 0, Interval).
init(Zone, Interval) ->
RecvCnt = emqx_pd:get_counter(recv_pkt),
init(Zone, RecvCnt, Interval).
%% from mqtt-v3.1.1 specific
%% A Keep Alive value of zero (0) has the effect of turning off the keep alive mechanism.
@ -53,42 +63,88 @@ init(Interval) -> init(0, Interval).
%% typically this is a few minutes.
%% The maximum value is (65535s) 18 hours 12 minutes and 15 seconds.
%% @doc Init keepalive.
-spec init(StatVal :: non_neg_integer(), Interval :: non_neg_integer()) -> keepalive() | undefined.
init(StatVal, Interval) when Interval > 0 andalso Interval =< ?MAX_INTERVAL ->
#keepalive{interval = Interval, statval = StatVal};
init(_, 0) ->
-spec init(
Zone :: atom(),
StatVal :: non_neg_integer(),
Second :: non_neg_integer()
) -> keepalive() | undefined.
init(Zone, StatVal, Second) when Second > 0 andalso Second =< ?MAX_INTERVAL ->
#{keepalive_multiplier := Mul, keepalive_check_interval := CheckInterval} =
emqx_config:get_zone_conf(Zone, [mqtt]),
MilliSeconds = timer:seconds(Second),
Interval = emqx_utils:clamp(CheckInterval, 1000, max(MilliSeconds div 2, 1000)),
MaxIdleMs = ceil(MilliSeconds * Mul),
#keepalive{
check_interval = Interval,
statval = StatVal,
idle_milliseconds = 0,
max_idle_millisecond = MaxIdleMs
};
init(_Zone, _, 0) ->
undefined;
init(StatVal, Interval) when Interval > ?MAX_INTERVAL -> init(StatVal, ?MAX_INTERVAL).
init(Zone, StatVal, Interval) when Interval > ?MAX_INTERVAL -> init(Zone, StatVal, ?MAX_INTERVAL).
%% @doc Get Info of the keepalive.
-spec info(keepalive()) -> emqx_types:infos().
info(#keepalive{
interval = Interval,
statval = StatVal
check_interval = Interval,
statval = StatVal,
idle_milliseconds = IdleIntervals,
max_idle_millisecond = MaxMs
}) ->
#{
interval => Interval,
statval => StatVal
check_interval => Interval,
statval => StatVal,
idle_milliseconds => IdleIntervals,
max_idle_millisecond => MaxMs
}.
-spec info(interval | statval, keepalive()) ->
-spec info(check_interval | statval | idle_milliseconds, keepalive()) ->
non_neg_integer().
info(interval, #keepalive{interval = Interval}) ->
info(check_interval, #keepalive{check_interval = Interval}) ->
Interval;
info(statval, #keepalive{statval = StatVal}) ->
StatVal;
info(interval, undefined) ->
info(idle_milliseconds, #keepalive{idle_milliseconds = Val}) ->
Val;
info(check_interval, undefined) ->
0.
check(Keepalive = #keepalive{}) ->
RecvCnt = emqx_pd:get_counter(recv_pkt),
check(RecvCnt, Keepalive);
check(Keepalive) ->
{ok, Keepalive}.
%% @doc Check keepalive.
-spec check(non_neg_integer(), keepalive()) ->
{ok, keepalive()} | {error, timeout}.
check(Val, #keepalive{statval = Val}) -> {error, timeout};
check(Val, KeepAlive) -> {ok, KeepAlive#keepalive{statval = Val}}.
check(
NewVal,
#keepalive{
statval = NewVal,
idle_milliseconds = IdleAcc,
check_interval = Interval,
max_idle_millisecond = Max
}
) when IdleAcc + Interval >= Max ->
{error, timeout};
check(
NewVal,
#keepalive{
statval = NewVal,
idle_milliseconds = IdleAcc,
check_interval = Interval
} = KeepAlive
) ->
{ok, KeepAlive#keepalive{statval = NewVal, idle_milliseconds = IdleAcc + Interval}};
check(NewVal, #keepalive{} = KeepAlive) ->
{ok, KeepAlive#keepalive{statval = NewVal, idle_milliseconds = 0}}.
%% @doc Update keepalive.
%% The statval of the previous keepalive will be used,
%% and normal checks will begin from the next cycle.
-spec update(non_neg_integer(), keepalive() | undefined) -> keepalive() | undefined.
update(Interval, undefined) -> init(0, Interval);
update(Interval, #keepalive{statval = StatVal}) -> init(StatVal, Interval).
-spec update(atom(), non_neg_integer(), keepalive() | undefined) -> keepalive() | undefined.
update(Zone, Interval, undefined) -> init(Zone, 0, Interval);
update(Zone, Interval, #keepalive{statval = StatVal}) -> init(Zone, StatVal, Interval).

View File

@ -3487,6 +3487,7 @@ mqtt_general() ->
)},
{"max_clientid_len",
sc(
%% MQTT-v3.1.1-[MQTT-3.1.3-5], MQTT-v5.0-[MQTT-3.1.3-5]
range(23, 65535),
#{
default => 65535,
@ -3608,9 +3609,17 @@ mqtt_general() ->
desc => ?DESC(mqtt_keepalive_multiplier)
}
)},
{"keepalive_check_interval",
sc(
timeout_duration(),
#{
default => <<"30s">>,
desc => ?DESC(mqtt_keepalive_check_interval)
}
)},
{"retry_interval",
sc(
hoconsc:union([infinity, duration()]),
hoconsc:union([infinity, timeout_duration()]),
#{
default => infinity,
desc => ?DESC(mqtt_retry_interval)

View File

@ -555,8 +555,7 @@ handle_info(Info, State) ->
handle_timeout(TRef, idle_timeout, State = #state{idle_timer = TRef}) ->
shutdown(idle_timeout, State);
handle_timeout(TRef, keepalive, State) when is_reference(TRef) ->
RecvOct = emqx_pd:get_counter(recv_oct),
handle_timeout(TRef, {keepalive, RecvOct}, State);
with_channel(handle_timeout, [TRef, keepalive], State);
handle_timeout(
TRef,
emit_stats,

View File

@ -428,6 +428,7 @@ zone_global_defaults() ->
ignore_loop_deliver => false,
keepalive_backoff => 0.75,
keepalive_multiplier => 1.5,
keepalive_check_interval => 30000,
max_awaiting_rel => 100,
max_clientid_len => 65535,
max_inflight => 32,

View File

@ -64,7 +64,10 @@ groups() ->
t_malformed_connect_header,
t_malformed_connect_data,
t_reserved_connect_flag,
t_invalid_clientid
t_invalid_clientid,
t_undefined_password,
t_invalid_will_retain,
t_invalid_will_qos
]},
{connack, [parallel], [
t_serialize_parse_connack,
@ -703,9 +706,15 @@ t_invalid_clientid(_) ->
).
%% for regression: `password` must be `undefined`
%% BUG-FOR-BUG compatible
t_undefined_password(_) ->
Payload = <<16, 19, 0, 4, 77, 81, 84, 84, 4, 130, 0, 60, 0, 2, 97, 49, 0, 3, 97, 97, 97>>,
{ok, Packet, <<>>, {none, _}} = emqx_frame:parse(Payload),
%% Username Flag = true
%% Password Flag = false
%% Clean Session = true
ConnectFlags = <<2#1000:4, 2#0010:4>>,
ConnBin =
<<16, 17, 0, 4, 77, 81, 84, 84, 4, ConnectFlags/binary, 0, 60, 0, 2, 97, 49, 0, 1, 97>>,
{ok, Packet, <<>>, {none, _}} = emqx_frame:parse(ConnBin),
Password = undefined,
?assertEqual(
#mqtt_packet{
@ -729,7 +738,7 @@ t_undefined_password(_) ->
will_props = #{},
will_topic = undefined,
will_payload = undefined,
username = <<"aaa">>,
username = <<"a">>,
password = Password
},
payload = undefined
@ -738,6 +747,75 @@ t_undefined_password(_) ->
),
ok.
t_invalid_password_flag(_) ->
%% Username Flag = false
%% Password Flag = true
%% Clean Session = true
ConnectFlags = <<2#0100:4, 2#0010:4>>,
ConnectBin =
<<16, 17, 0, 4, 77, 81, 84, 84, 4, ConnectFlags/binary, 0, 60, 0, 2, 97, 49, 0, 1, 97>>,
?assertMatch(
{ok, _, _, _},
emqx_frame:parse(ConnectBin)
),
StrictModeParseState = emqx_frame:initial_parse_state(#{strict_mode => true}),
?assertException(
throw,
{frame_parse_error, invalid_password_flag},
emqx_frame:parse(ConnectBin, StrictModeParseState)
).
t_invalid_will_retain(_) ->
ConnectFlags = <<2#01100000>>,
ConnectBin =
<<16, 51, 0, 4, 77, 81, 84, 84, 5, ConnectFlags/binary, 174, 157, 24, 38, 0, 14, 98, 55,
122, 51, 83, 73, 89, 50, 54, 79, 77, 73, 65, 86, 0, 5, 66, 117, 53, 57, 66, 0, 6, 84,
54, 75, 78, 112, 57, 0, 6, 68, 103, 55, 87, 87, 87>>,
?assertException(
throw,
{frame_parse_error, invalid_will_retain},
emqx_frame:parse(ConnectBin)
),
ok.
t_invalid_will_qos(_) ->
Will_F_WillQoS0 = <<2#010:3, 2#00:2, 2#000:3>>,
Will_F_WillQoS1 = <<2#010:3, 2#01:2, 2#000:3>>,
Will_F_WillQoS2 = <<2#010:3, 2#10:2, 2#000:3>>,
Will_F_WillQoS3 = <<2#010:3, 2#11:2, 2#000:3>>,
Will_T_WillQoS3 = <<2#011:3, 2#11:2, 2#000:3>>,
ConnectBinFun = fun(ConnectFlags) ->
<<16, 51, 0, 4, 77, 81, 84, 84, 5, ConnectFlags/binary, 174, 157, 24, 38, 0, 14, 98, 55,
122, 51, 83, 73, 89, 50, 54, 79, 77, 73, 65, 86, 0, 5, 66, 117, 53, 57, 66, 0, 6, 84,
54, 75, 78, 112, 57, 0, 6, 68, 103, 55, 87, 87, 87>>
end,
?assertMatch(
{ok, _, _, _},
emqx_frame:parse(ConnectBinFun(Will_F_WillQoS0))
),
?assertException(
throw,
{frame_parse_error, invalid_will_qos},
emqx_frame:parse(ConnectBinFun(Will_F_WillQoS1))
),
?assertException(
throw,
{frame_parse_error, invalid_will_qos},
emqx_frame:parse(ConnectBinFun(Will_F_WillQoS2))
),
?assertException(
throw,
{frame_parse_error, invalid_will_qos},
emqx_frame:parse(ConnectBinFun(Will_F_WillQoS3))
),
?assertException(
throw,
{frame_parse_error, invalid_will_qos},
emqx_frame:parse(ConnectBinFun(Will_T_WillQoS3))
),
ok.
parse_serialize(Packet) ->
parse_serialize(Packet, #{strict_mode => true}).

View File

@ -19,22 +19,180 @@
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("emqx/include/emqx.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
all() -> emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
Apps = emqx_cth_suite:start(
[
{emqx,
"listeners {"
"tcp.default.bind = 1883,"
"ssl.default = marked_for_deletion,"
"quic.default = marked_for_deletion,"
"ws.default = marked_for_deletion,"
"wss.default = marked_for_deletion"
"}"}
],
#{work_dir => emqx_cth_suite:work_dir(Config)}
),
[{apps, Apps} | Config].
end_per_suite(Config) ->
emqx_cth_suite:stop(?config(apps, Config)).
t_check_keepalive_default_timeout(_) ->
emqx_config:put_zone_conf(default, [mqtt, keepalive_multiplier], 1.5),
emqx_config:put_zone_conf(default, [mqtt, keepalive_check_interval], 30000),
erlang:process_flag(trap_exit, true),
ClientID = <<"default">>,
KeepaliveSec = 10,
{ok, C} = emqtt:start_link([
{keepalive, KeepaliveSec},
{clientid, binary_to_list(ClientID)}
]),
{ok, _} = emqtt:connect(C),
emqtt:pause(C),
[ChannelPid] = emqx_cm:lookup_channels(ClientID),
erlang:link(ChannelPid),
CheckInterval = emqx_utils:clamp(keepalive_check_interval(), 1000, 5000),
?assertMatch(5000, CheckInterval),
%% when keepalive_check_interval is 30s and keepalive_multiplier is 1.5
%% connect T0(packet = 1, idle_milliseconds = 0)
%% check1 T1(packet = 1, idle_milliseconds = 1 * CheckInterval = 5000)
%% check2 T2(packet = 1, idle_milliseconds = 2 * CheckInterval = 10000)
%% check2 T3(packet = 1, idle_milliseconds = 3 * CheckInterval = 15000) -> timeout
Timeout = CheckInterval * 3,
%% connector but not send a packet.
?assertMatch(
no_keepalive_timeout_received,
receive_msg_in_time(ChannelPid, C, Timeout - 200),
Timeout - 200
),
?assertMatch(ok, receive_msg_in_time(ChannelPid, C, 1200)).
t_check_keepalive_other_timeout(_) ->
emqx_config:put_zone_conf(default, [mqtt, keepalive_multiplier], 1.5),
emqx_config:put_zone_conf(default, [mqtt, keepalive_check_interval], 2000),
erlang:process_flag(trap_exit, true),
ClientID = <<"other">>,
KeepaliveSec = 10,
{ok, C} = emqtt:start_link([
{keepalive, KeepaliveSec},
{clientid, binary_to_list(ClientID)}
]),
{ok, _} = emqtt:connect(C),
emqtt:pause(C),
{ok, _, [0]} = emqtt:subscribe(C, <<"mytopic">>, []),
[ChannelPid] = emqx_cm:lookup_channels(ClientID),
erlang:link(ChannelPid),
%%CheckInterval = ceil(keepalive_check_factor() * KeepaliveSec * 1000),
CheckInterval = emqx_utils:clamp(keepalive_check_interval(), 1000, 5000),
?assertMatch(2000, CheckInterval),
%% when keepalive_check_interval is 2s and keepalive_multiplier is 1.5
%% connect T0(packet = 1, idle_milliseconds = 0)
%% subscribe T1(packet = 2, idle_milliseconds = 0)
%% check1 T2(packet = 2, idle_milliseconds = 1 * CheckInterval = 2000)
%% check2 T3(packet = 2, idle_milliseconds = 2 * CheckInterval = 4000)
%% check3 T4(packet = 2, idle_milliseconds = 3 * CheckInterval = 6000)
%% check4 T5(packet = 2, idle_milliseconds = 4 * CheckInterval = 8000)
%% check4 T6(packet = 2, idle_milliseconds = 5 * CheckInterval = 10000)
%% check4 T7(packet = 2, idle_milliseconds = 6 * CheckInterval = 12000)
%% check4 T8(packet = 2, idle_milliseconds = 7 * CheckInterval = 14000)
%% check4 T9(packet = 2, idle_milliseconds = 8 * CheckInterval = 16000) > 15000 timeout
Timeout = CheckInterval * 9,
?assertMatch(
no_keepalive_timeout_received,
receive_msg_in_time(ChannelPid, C, Timeout - 200),
Timeout - 200
),
?assertMatch(ok, receive_msg_in_time(ChannelPid, C, 1200), Timeout).
t_check_keepalive_ping_reset_timer(_) ->
emqx_config:put_zone_conf(default, [mqtt, keepalive_multiplier], 1.5),
emqx_config:put_zone_conf(default, [mqtt, keepalive_check_interval], 100000),
erlang:process_flag(trap_exit, true),
ClientID = <<"ping_reset">>,
KeepaliveSec = 10,
{ok, C} = emqtt:start_link([
{keepalive, KeepaliveSec},
{clientid, binary_to_list(ClientID)}
]),
{ok, _} = emqtt:connect(C),
emqtt:pause(C),
ct:sleep(1000),
emqtt:resume(C),
pong = emqtt:ping(C),
emqtt:pause(C),
[ChannelPid] = emqx_cm:lookup_channels(ClientID),
erlang:link(ChannelPid),
CheckInterval = emqx_utils:clamp(keepalive_check_interval(), 1000, 5000),
?assertMatch(5000, CheckInterval),
%% when keepalive_check_interval is 30s and keepalive_multiplier is 1.5
%% connect T0(packet = 1, idle_milliseconds = 0)
%% sleep 1000ms
%% ping (packet = 2, idle_milliseconds = 0) restart timer
%% check1 T1(packet = 1, idle_milliseconds = 1 * CheckInterval = 5000)
%% check2 T2(packet = 1, idle_milliseconds = 2 * CheckInterval = 10000)
%% check2 T3(packet = 1, idle_milliseconds = 3 * CheckInterval = 15000) -> timeout
Timeout = CheckInterval * 3,
?assertMatch(
no_keepalive_timeout_received,
receive_msg_in_time(ChannelPid, C, Timeout - 200),
Timeout - 200
),
?assertMatch(ok, receive_msg_in_time(ChannelPid, C, 1200)).
t_check(_) ->
emqx_config:put_zone_conf(default, [mqtt, keepalive_multiplier], 1.5),
emqx_config:put_zone_conf(default, [mqtt, keepalive_check_interval], 30000),
Keepalive = emqx_keepalive:init(60),
?assertEqual(60, emqx_keepalive:info(interval, Keepalive)),
?assertEqual(30000, emqx_keepalive:info(check_interval, Keepalive)),
?assertEqual(0, emqx_keepalive:info(statval, Keepalive)),
Info = emqx_keepalive:info(Keepalive),
?assertEqual(
#{
interval => 60,
statval => 0
check_interval => 30000,
statval => 0,
idle_milliseconds => 0,
%% 60 * 1.5 * 1000
max_idle_millisecond => 90000
},
Info
),
{ok, Keepalive1} = emqx_keepalive:check(1, Keepalive),
?assertEqual(1, emqx_keepalive:info(statval, Keepalive1)),
?assertEqual({error, timeout}, emqx_keepalive:check(1, Keepalive1)).
{ok, Keepalive2} = emqx_keepalive:check(1, Keepalive1),
?assertEqual(1, emqx_keepalive:info(statval, Keepalive2)),
{ok, Keepalive3} = emqx_keepalive:check(1, Keepalive2),
?assertEqual(1, emqx_keepalive:info(statval, Keepalive3)),
?assertEqual({error, timeout}, emqx_keepalive:check(1, Keepalive3)),
Keepalive4 = emqx_keepalive:init(90),
?assertEqual(30000, emqx_keepalive:info(check_interval, Keepalive4)),
Keepalive5 = emqx_keepalive:init(1),
?assertEqual(1000, emqx_keepalive:info(check_interval, Keepalive5)),
ok.
keepalive_multiplier() ->
emqx_config:get_zone_conf(default, [mqtt, keepalive_multiplier]).
keepalive_check_interval() ->
emqx_config:get_zone_conf(default, [mqtt, keepalive_check_interval]).
receive_msg_in_time(ChannelPid, C, Timeout) ->
receive
{'EXIT', ChannelPid, {shutdown, keepalive_timeout}} ->
receive
{'EXIT', C, {shutdown, tcp_closed}} ->
ok
after 500 ->
throw(no_tcp_closed_from_mqtt_client)
end
after Timeout ->
no_keepalive_timeout_received
end.

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_auth, [
{description, "EMQX Authentication and authorization"},
{vsn, "0.3.1"},
{vsn, "0.3.3"},
{modules, []},
{registered, [emqx_auth_sup]},
{applications, [

View File

@ -408,7 +408,7 @@ init_metrics(Source) ->
emqx_metrics_worker:create_metrics(
authz_metrics,
TypeName,
[total, allow, deny, nomatch],
[total, allow, deny, nomatch, ignore],
[total]
)
end.
@ -510,8 +510,8 @@ do_authorize(
}),
do_authorize(Client, PubSub, Topic, Tail);
ignore ->
?TRACE("AUTHZ", "authorization_ignore", #{
authorize_type => Type,
emqx_metrics_worker:inc(authz_metrics, Type, ignore),
?TRACE("AUTHZ", "authorization_module_ignore", #{
module => Module,
username => Username,
topic => Topic,

View File

@ -10,7 +10,12 @@
make_tls_verify_fun/2
]).
-export([default_root_fun/1]).
-include_lib("public_key/include/public_key.hrl").
-define(unknown_ca, unknown_ca).
%% @doc Build a root fun for verify TLS partial_chain.
%% The `InputChain' is composed by OTP SSL with local cert store
%% AND the cert (chain if any) from the client.
@ -109,3 +114,8 @@ ext_key_opts(Str) ->
end,
Usages
).
%% @doc default root fun for partial_chain 'false'
-spec default_root_fun(_) -> ?unknown_ca.
default_root_fun(_) ->
?unknown_ca.

View File

@ -13,10 +13,12 @@
-include_lib("emqx/include/logger.hrl").
-define(CONST_MOD_V1, emqx_auth_ext_tls_const_v1).
%% @doc enable TLS partial_chain validation if set.
%% @doc enable TLS partial_chain validation
-spec opt_partial_chain(SslOpts :: map()) -> NewSslOpts :: map().
opt_partial_chain(#{partial_chain := false} = SslOpts) ->
maps:remove(partial_chain, SslOpts);
%% For config update scenario, we must set it to override
%% the 'existing' partial_chain in the listener
SslOpts#{partial_chain := fun ?CONST_MOD_V1:default_root_fun/1};
opt_partial_chain(#{partial_chain := true} = SslOpts) ->
SslOpts#{partial_chain := rootfun_trusted_ca_from_cacertfile(1, SslOpts)};
opt_partial_chain(#{partial_chain := cacert_from_cacertfile} = SslOpts) ->

View File

@ -24,7 +24,7 @@
"\n"
" listeners.ssl.auth_ext.bind = 28883\n"
" listeners.ssl.auth_ext.enable = true\n"
" listeners.ssl.auth_ext.ssl_options.partial_chain = true\n"
" listeners.ssl.auth_ext.ssl_options.partial_chain = false\n"
" listeners.ssl.auth_ext.ssl_options.verify = verify_peer\n"
" listeners.ssl.auth_ext.ssl_options.verify_peer_ext_key_usage = \"clientAuth\"\n"
" "
@ -62,5 +62,6 @@ t_conf_check_default(_Config) ->
t_conf_check_auth_ext(_Config) ->
Opts = esockd:get_options({'ssl:auth_ext', 28883}),
SSLOpts = proplists:get_value(ssl_options, Opts),
%% Even when partial_chain is set to `false`
?assertMatch(Fun when is_function(Fun), proplists:get_value(partial_chain, SSLOpts)),
?assertMatch({Fun, _} when is_function(Fun), proplists:get_value(verify_fun, SSLOpts)).

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_auth_http, [
{description, "EMQX External HTTP API Authentication and Authorization"},
{vsn, "0.2.2"},
{vsn, "0.2.3"},
{registered, []},
{mod, {emqx_auth_http_app, []}},
{applications, [

View File

@ -529,6 +529,68 @@ t_bad_response_content_type(_Config) ->
end
).
%% Checks that we bump the correct metrics when we receive an error response
t_bad_response(_Config) ->
ok = setup_handler_and_config(
fun(Req0, State) ->
?assertEqual(
<<"/authz/users/">>,
cowboy_req:path(Req0)
),
{ok, _PostVars, Req1} = cowboy_req:read_urlencoded_body(Req0),
Req = cowboy_req:reply(
400,
#{<<"content-type">> => <<"application/json">>},
"{\"error\":true}",
Req1
),
{ok, Req, State}
end,
#{
<<"method">> => <<"post">>,
<<"body">> => #{
<<"username">> => <<"${username}">>
},
<<"headers">> => #{}
}
),
ClientInfo = #{
clientid => <<"client id">>,
username => <<"user name">>,
peerhost => {127, 0, 0, 1},
protocol => <<"MQTT">>,
mountpoint => <<"MOUNTPOINT">>,
zone => default,
listener => {tcp, default},
cn => ?PH_CERT_CN_NAME,
dn => ?PH_CERT_SUBJECT
},
?assertEqual(
deny,
emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>)
),
?assertMatch(
#{
counters := #{
total := 1,
ignore := 1,
nomatch := 0,
allow := 0,
deny := 0
},
'authorization.superuser' := 0,
'authorization.matched.allow' := 0,
'authorization.matched.deny' := 0,
'authorization.nomatch' := 1
},
get_metrics()
),
ok.
t_no_value_for_placeholder(_Config) ->
ok = setup_handler_and_config(
fun(Req0, State) ->
@ -729,3 +791,18 @@ start_apps(Apps) ->
stop_apps(Apps) ->
lists:foreach(fun application:stop/1, Apps).
get_metrics() ->
Metrics = emqx_metrics_worker:get_metrics(authz_metrics, http),
lists:foldl(
fun(Name, Acc) ->
Acc#{Name => emqx_metrics:val(Name)}
end,
Metrics,
[
'authorization.superuser',
'authorization.matched.allow',
'authorization.matched.deny',
'authorization.nomatch'
]
).

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_auth_jwt, [
{description, "EMQX JWT Authentication and Authorization"},
{vsn, "0.3.1"},
{vsn, "0.3.2"},
{registered, []},
{mod, {emqx_auth_jwt_app, []}},
{applications, [

View File

@ -116,7 +116,7 @@ create(
user_id_type := Type,
password_hash_algorithm := Algorithm,
user_group := UserGroup
}
} = Config
) ->
ok = emqx_authn_password_hashing:init(Algorithm),
State = #{
@ -124,6 +124,7 @@ create(
user_id_type => Type,
password_hash_algorithm => Algorithm
},
ok = boostrap_user_from_file(Config, State),
{ok, State}.
update(Config, _State) ->
@ -338,8 +339,24 @@ run_fuzzy_filter(
%%------------------------------------------------------------------------------
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) ->
UserInfoRecord = user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
insert_user(UserInfoRecord).
UserInfoRecord =
#user_info{user_id = DBUserID} =
user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
case mnesia:read(?TAB, DBUserID, write) of
[] ->
insert_user(UserInfoRecord);
[UserInfoRecord] ->
ok;
[_] ->
?SLOG(warning, #{
msg => "bootstrap_authentication_overridden_in_the_built_in_database",
user_id => UserID,
group_id => UserGroup,
suggestion =>
"If you have made changes in other way, remove the user_id from the bootstrap file."
}),
insert_user(UserInfoRecord)
end.
insert_user(#user_info{} = UserInfoRecord) ->
mnesia:write(?TAB, UserInfoRecord, write).
@ -531,3 +548,25 @@ find_password_hash(_, _, _) ->
is_superuser(#{<<"is_superuser">> := <<"true">>}) -> true;
is_superuser(#{<<"is_superuser">> := true}) -> true;
is_superuser(_) -> false.
boostrap_user_from_file(Config, State) ->
case maps:get(boostrap_file, Config, <<>>) of
<<>> ->
ok;
FileName0 ->
#{boostrap_type := Type} = Config,
FileName = emqx_schema:naive_env_interpolation(FileName0),
case file:read_file(FileName) of
{ok, FileData} ->
%% if there is a key conflict, override with the key which from the bootstrap file
_ = import_users({Type, FileName, FileData}, State),
ok;
{error, Reason} ->
?SLOG(warning, #{
msg => "boostrap_authn_built_in_database_failed",
boostrap_file => FileName,
boostrap_type => Type,
reason => emqx_utils:explain_posix(Reason)
})
end
end.

View File

@ -46,7 +46,7 @@ select_union_member(_Kind, _Value) ->
fields(builtin_db) ->
[
{password_hash_algorithm, fun emqx_authn_password_hashing:type_rw/1}
] ++ common_fields();
] ++ common_fields() ++ bootstrap_fields();
fields(builtin_db_api) ->
[
{password_hash_algorithm, fun emqx_authn_password_hashing:type_rw_api/1}
@ -69,3 +69,24 @@ common_fields() ->
{backend, emqx_authn_schema:backend(?AUTHN_BACKEND)},
{user_id_type, fun user_id_type/1}
] ++ emqx_authn_schema:common_fields().
bootstrap_fields() ->
[
{bootstrap_file,
?HOCON(
binary(),
#{
desc => ?DESC(bootstrap_file),
required => false,
default => <<>>
}
)},
{bootstrap_type,
?HOCON(
?ENUM([hash, plain]), #{
desc => ?DESC(bootstrap_type),
required => false,
default => <<"plain">>
}
)}
].

View File

@ -54,7 +54,74 @@ t_create(_) ->
{ok, _} = emqx_authn_mnesia:create(?AUTHN_ID, Config0),
Config1 = Config0#{password_hash_algorithm => #{name => sha256}},
{ok, _} = emqx_authn_mnesia:create(?AUTHN_ID, Config1).
{ok, _} = emqx_authn_mnesia:create(?AUTHN_ID, Config1),
ok.
t_bootstrap_file(_) ->
Config = config(),
%% hash to hash
HashConfig = Config#{password_hash_algorithm => #{name => sha256, salt_position => suffix}},
?assertMatch(
[
{user_info, {_, <<"myuser1">>}, _, _, true},
{user_info, {_, <<"myuser2">>}, _, _, false}
],
test_bootstrap_file(HashConfig, hash, <<"user-credentials.json">>)
),
?assertMatch(
[
{user_info, {_, <<"myuser3">>}, _, _, true},
{user_info, {_, <<"myuser4">>}, _, _, false}
],
test_bootstrap_file(HashConfig, hash, <<"user-credentials.csv">>)
),
%% plain to plain
PlainConfig = Config#{
password_hash_algorithm =>
#{name => plain, salt_position => disable}
},
?assertMatch(
[
{user_info, {_, <<"myuser1">>}, <<"password1">>, _, true},
{user_info, {_, <<"myuser2">>}, <<"password2">>, _, false}
],
test_bootstrap_file(PlainConfig, plain, <<"user-credentials-plain.json">>)
),
?assertMatch(
[
{user_info, {_, <<"myuser3">>}, <<"password3">>, _, true},
{user_info, {_, <<"myuser4">>}, <<"password4">>, _, false}
],
test_bootstrap_file(PlainConfig, plain, <<"user-credentials-plain.csv">>)
),
%% plain to hash
?assertMatch(
[
{user_info, {_, <<"myuser1">>}, _, _, true},
{user_info, {_, <<"myuser2">>}, _, _, false}
],
test_bootstrap_file(HashConfig, plain, <<"user-credentials-plain.json">>)
),
?assertMatch(
[
{user_info, {_, <<"myuser3">>}, _, _, true},
{user_info, {_, <<"myuser4">>}, _, _, false}
],
test_bootstrap_file(HashConfig, plain, <<"user-credentials-plain.csv">>)
),
ok.
test_bootstrap_file(Config0, Type, File) ->
{Type, Filename, _FileData} = sample_filename_and_data(Type, File),
Config2 = Config0#{
boostrap_file => Filename,
boostrap_type => Type
},
{ok, State0} = emqx_authn_mnesia:create(?AUTHN_ID, Config2),
Result = ets:tab2list(emqx_authn_mnesia),
ok = emqx_authn_mnesia:destroy(State0),
?assertMatch([], ets:tab2list(emqx_authn_mnesia)),
Result.
t_update(_) ->
Config0 = config(),

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_bridge, [
{description, "EMQX bridges"},
{vsn, "0.2.1"},
{vsn, "0.2.3"},
{registered, [emqx_bridge_sup]},
{mod, {emqx_bridge_app, []}},
{applications, [

View File

@ -288,6 +288,14 @@ request(Method, Path, Params) ->
Error
end.
simplify_result(Res) ->
case Res of
{error, {{_, Status, _}, _, Body}} ->
{Status, Body};
{ok, {{_, Status, _}, _, Body}} ->
{Status, Body}
end.
list_bridges_api() ->
Params = [],
Path = emqx_mgmt_api_test_util:api_path(["actions"]),
@ -321,7 +329,7 @@ get_bridge_api(BridgeKind, BridgeType, BridgeName) ->
Path = emqx_mgmt_api_test_util:api_path([Root, BridgeId]),
ct:pal("get bridge ~p (via http)", [{BridgeKind, BridgeType, BridgeName}]),
Res = request(get, Path, Params),
ct:pal("get bridge ~p result: ~p", [{BridgeKind, BridgeType, BridgeName}, Res]),
ct:pal("get bridge ~p result:\n ~p", [{BridgeKind, BridgeType, BridgeName}, Res]),
Res.
create_bridge_api(Config) ->
@ -349,6 +357,26 @@ create_kind_api(Config, Overrides) ->
ct:pal("bridge create (~s, http) result:\n ~p", [Kind, Res]),
Res.
enable_kind_api(Kind, ConnectorType, ConnectorName) ->
do_enable_disable_kind_api(Kind, ConnectorType, ConnectorName, enable).
disable_kind_api(Kind, ConnectorType, ConnectorName) ->
do_enable_disable_kind_api(Kind, ConnectorType, ConnectorName, disable).
do_enable_disable_kind_api(Kind, Type, Name, Op) ->
BridgeId = emqx_bridge_resource:bridge_id(Type, Name),
RootBin = api_path_root(Kind),
{OpPath, OpStr} =
case Op of
enable -> {"true", "enable"};
disable -> {"false", "disable"}
end,
Path = emqx_mgmt_api_test_util:api_path([RootBin, BridgeId, "enable", OpPath]),
ct:pal(OpStr ++ " ~s ~s (http)", [Kind, BridgeId]),
Res = request(put, Path, []),
ct:pal(OpStr ++ " ~s ~s (http) result:\n ~p", [Kind, BridgeId, Res]),
simplify_result(Res).
create_connector_api(Config) ->
create_connector_api(Config, _Overrides = #{}).
@ -453,6 +481,15 @@ update_bridge_api(Config, Overrides) ->
ct:pal("update bridge (~s, http) result:\n ~p", [Kind, Res]),
Res.
delete_kind_api(Kind, Type, Name) ->
BridgeId = emqx_bridge_resource:bridge_id(Type, Name),
PathRoot = api_path_root(Kind),
Path = emqx_mgmt_api_test_util:api_path([PathRoot, BridgeId]),
ct:pal("deleting bridge (~s, http)", [Kind]),
Res = request(delete, Path, _Params = []),
ct:pal("delete bridge (~s, http) result:\n ~p", [Kind, Res]),
simplify_result(Res).
op_bridge_api(Op, BridgeType, BridgeName) ->
op_bridge_api(_Kind = action, Op, BridgeType, BridgeName).
@ -1054,6 +1091,7 @@ t_on_get_status(Config, Opts) ->
ProxyHost = ?config(proxy_host, Config),
ProxyName = ?config(proxy_name, Config),
FailureStatus = maps:get(failure_status, Opts, disconnected),
NormalStatus = maps:get(normal_status, Opts, connected),
?assertMatch({ok, _}, create_bridge_api(Config)),
ResourceId = resource_id(Config),
%% Since the connection process is async, we give it some time to
@ -1061,7 +1099,7 @@ t_on_get_status(Config, Opts) ->
?retry(
_Sleep = 1_000,
_Attempts = 20,
?assertEqual({ok, connected}, emqx_resource_manager:health_check(ResourceId))
?assertEqual({ok, NormalStatus}, emqx_resource_manager:health_check(ResourceId))
),
case ProxyHost of
undefined ->
@ -1080,7 +1118,7 @@ t_on_get_status(Config, Opts) ->
?retry(
_Sleep = 1_000,
_Attempts = 20,
?assertEqual({ok, connected}, emqx_resource_manager:health_check(ResourceId))
?assertEqual({ok, NormalStatus}, emqx_resource_manager:health_check(ResourceId))
)
end,
ok.

View File

@ -2,7 +2,7 @@
{erl_opts, [debug_info]}.
{deps, [
{wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.10.5"}}},
{wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "2.0.0"}}},
{kafka_protocol, {git, "https://github.com/kafka4beam/kafka_protocol.git", {tag, "4.1.5"}}},
{brod_gssapi, {git, "https://github.com/kafka4beam/brod_gssapi.git", {tag, "v0.1.1"}}},
{brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.18.0"}}},

View File

@ -40,6 +40,8 @@ init_per_suite(Config) ->
emqx,
emqx_management,
emqx_resource,
%% Just for test helpers
brod,
emqx_bridge_azure_event_hub,
emqx_bridge,
emqx_rule_engine,
@ -93,6 +95,9 @@ common_init_per_testcase(TestCase, Config) ->
{connector_type, ?CONNECTOR_TYPE},
{connector_name, Name},
{connector_config, ConnectorConfig},
{action_type, ?BRIDGE_TYPE},
{action_name, Name},
{action_config, BridgeConfig},
{bridge_type, ?BRIDGE_TYPE},
{bridge_name, Name},
{bridge_config, BridgeConfig}
@ -100,18 +105,13 @@ common_init_per_testcase(TestCase, Config) ->
].
end_per_testcase(_Testcase, Config) ->
case proplists:get_bool(skip_does_not_apply, Config) of
true ->
ok;
false ->
ProxyHost = ?config(proxy_host, Config),
ProxyPort = ?config(proxy_port, Config),
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
emqx_bridge_v2_testlib:delete_all_bridges_and_connectors(),
emqx_common_test_helpers:call_janitor(60_000),
ok = snabbkaffe:stop(),
ok
end.
ok.
%%------------------------------------------------------------------------------
%% Helper fns
@ -172,7 +172,7 @@ bridge_config(Name, ConnectorId, KafkaTopic) ->
#{
<<"enable">> => true,
<<"connector">> => ConnectorId,
<<"kafka">> =>
<<"parameters">> =>
#{
<<"buffer">> =>
#{
@ -322,7 +322,7 @@ t_same_name_azure_kafka_bridges(Config) ->
),
%% then create a Kafka bridge with same name and delete it after creation
ConfigKafka0 = lists:keyreplace(bridge_type, 1, Config, {bridge_type, ?KAFKA_BRIDGE_TYPE}),
ConfigKafka0 = lists:keyreplace(action_type, 1, Config, {action_type, ?KAFKA_BRIDGE_TYPE}),
ConfigKafka = lists:keyreplace(
connector_type, 1, ConfigKafka0, {connector_type, ?KAFKA_BRIDGE_TYPE}
),
@ -374,3 +374,20 @@ t_http_api_get(Config) ->
emqx_bridge_testlib:list_bridges_api()
),
ok.
t_multiple_actions_sharing_topic(Config) ->
ActionConfig0 = ?config(action_config, Config),
ActionConfig =
emqx_utils_maps:deep_merge(
ActionConfig0,
#{<<"parameters">> => #{<<"query_mode">> => <<"sync">>}}
),
ok = emqx_bridge_v2_kafka_producer_SUITE:t_multiple_actions_sharing_topic(
[
{type, ?BRIDGE_TYPE_BIN},
{connector_name, ?config(connector_name, Config)},
{connector_config, ?config(connector_config, Config)},
{action_config, ActionConfig}
]
),
ok.

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_cassandra, [
{description, "EMQX Enterprise Cassandra Bridge"},
{vsn, "0.3.0"},
{vsn, "0.3.1"},
{registered, []},
{applications, [
kernel,

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_clickhouse, [
{description, "EMQX Enterprise ClickHouse Bridge"},
{vsn, "0.4.0"},
{vsn, "0.4.1"},
{registered, []},
{applications, [
kernel,

View File

@ -2,7 +2,7 @@
{erl_opts, [debug_info]}.
{deps, [
{wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.10.5"}}},
{wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "2.0.0"}}},
{kafka_protocol, {git, "https://github.com/kafka4beam/kafka_protocol.git", {tag, "4.1.5"}}},
{brod_gssapi, {git, "https://github.com/kafka4beam/brod_gssapi.git", {tag, "v0.1.1"}}},
{brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.18.0"}}},

View File

@ -40,6 +40,8 @@ init_per_suite(Config) ->
emqx,
emqx_management,
emqx_resource,
%% Just for test helpers
brod,
emqx_bridge_confluent,
emqx_bridge,
emqx_rule_engine,
@ -93,6 +95,9 @@ common_init_per_testcase(TestCase, Config) ->
{connector_type, ?CONNECTOR_TYPE},
{connector_name, Name},
{connector_config, ConnectorConfig},
{action_type, ?ACTION_TYPE},
{action_name, Name},
{action_config, BridgeConfig},
{bridge_type, ?ACTION_TYPE},
{bridge_name, Name},
{bridge_config, BridgeConfig}
@ -306,7 +311,7 @@ t_same_name_confluent_kafka_bridges(Config) ->
),
%% then create a Kafka bridge with same name and delete it after creation
ConfigKafka0 = lists:keyreplace(bridge_type, 1, Config, {bridge_type, ?KAFKA_BRIDGE_TYPE}),
ConfigKafka0 = lists:keyreplace(action_type, 1, Config, {action_type, ?KAFKA_BRIDGE_TYPE}),
ConfigKafka = lists:keyreplace(
connector_type, 1, ConfigKafka0, {connector_type, ?KAFKA_BRIDGE_TYPE}
),
@ -378,3 +383,20 @@ t_list_v1_bridges(Config) ->
[]
),
ok.
t_multiple_actions_sharing_topic(Config) ->
ActionConfig0 = ?config(action_config, Config),
ActionConfig =
emqx_utils_maps:deep_merge(
ActionConfig0,
#{<<"parameters">> => #{<<"query_mode">> => <<"sync">>}}
),
ok = emqx_bridge_v2_kafka_producer_SUITE:t_multiple_actions_sharing_topic(
[
{type, ?ACTION_TYPE_BIN},
{connector_name, ?config(connector_name, Config)},
{connector_config, ?config(connector_config, Config)},
{action_config, ActionConfig}
]
),
ok.

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_dynamo, [
{description, "EMQX Enterprise Dynamo Bridge"},
{vsn, "0.2.1"},
{vsn, "0.2.2"},
{registered, []},
{applications, [
kernel,

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_bridge_es, [
{description, "EMQX Enterprise Elastic Search Bridge"},
{vsn, "0.1.2"},
{vsn, "0.1.3"},
{modules, [
emqx_bridge_es,
emqx_bridge_es_connector

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_gcp_pubsub, [
{description, "EMQX Enterprise GCP Pub/Sub Bridge"},
{vsn, "0.3.0"},
{vsn, "0.3.1"},
{registered, []},
{applications, [
kernel,

View File

@ -1448,7 +1448,10 @@ t_connection_down_before_starting(Config) ->
),
{ok, _} = create_bridge(Config),
{ok, _} = snabbkaffe:receive_events(SRef0),
?assertMatch({ok, connecting}, health_check(Config)),
?assertMatch(
{ok, Status} when Status =:= connecting orelse Status =:= disconnected,
health_check(Config)
),
emqx_common_test_helpers:heal_failure(down, ProxyName, ProxyHost, ProxyPort),
?retry(

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_greptimedb, [
{description, "EMQX GreptimeDB Bridge"},
{vsn, "0.2.1"},
{vsn, "0.2.2"},
{registered, []},
{applications, [
kernel,

View File

@ -363,7 +363,7 @@ do_start_client(
{error, Reason}
end.
grpc_config() ->
grpc_opts() ->
#{
sync_start => true,
connect_timeout => ?CONNECT_TIMEOUT
@ -382,7 +382,7 @@ client_config(
{pool, InstId},
{pool_type, random},
{auto_reconnect, ?AUTO_RECONNECT_S},
{gprc_options, grpc_config()}
{grpc_opts, grpc_opts()}
] ++ protocol_config(Config).
protocol_config(

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_hstreamdb, [
{description, "EMQX Enterprise HStreamDB Bridge"},
{vsn, "0.2.0"},
{vsn, "0.2.1"},
{registered, []},
{applications, [
kernel,

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_http, [
{description, "EMQX HTTP Bridge and Connector Application"},
{vsn, "0.3.1"},
{vsn, "0.3.3"},
{registered, []},
{applications, [kernel, stdlib, emqx_resource, ehttpc]},
{env, [

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_influxdb, [
{description, "EMQX Enterprise InfluxDB Bridge"},
{vsn, "0.2.2"},
{vsn, "0.2.3"},
{registered, []},
{applications, [
kernel,

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_bridge_iotdb, [
{description, "EMQX Enterprise Apache IoTDB Bridge"},
{vsn, "0.2.1"},
{vsn, "0.2.2"},
{modules, [
emqx_bridge_iotdb,
emqx_bridge_iotdb_connector

View File

@ -2,7 +2,7 @@
{erl_opts, [debug_info]}.
{deps, [
{wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "1.10.5"}}},
{wolff, {git, "https://github.com/kafka4beam/wolff.git", {tag, "2.0.0"}}},
{kafka_protocol, {git, "https://github.com/kafka4beam/kafka_protocol.git", {tag, "4.1.5"}}},
{brod_gssapi, {git, "https://github.com/kafka4beam/brod_gssapi.git", {tag, "v0.1.1"}}},
{brod, {git, "https://github.com/kafka4beam/brod.git", {tag, "3.18.0"}}},

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_bridge_kafka, [
{description, "EMQX Enterprise Kafka Bridge"},
{vsn, "0.3.1"},
{vsn, "0.3.3"},
{registered, [emqx_bridge_kafka_consumer_sup]},
{applications, [
kernel,

View File

@ -327,6 +327,12 @@ on_query(
}),
do_send_msg(sync, KafkaMessage, Producers, SyncTimeout)
catch
error:{invalid_partition_count, Count, _Partitioner} ->
?tp("kafka_producer_invalid_partition_count", #{
action_id => MessageTag,
query_mode => sync
}),
{error, {unrecoverable_error, {invalid_partition_count, Count}}};
throw:{bad_kafka_header, _} = Error ->
?tp(
emqx_bridge_kafka_impl_producer_sync_query_failed,
@ -387,8 +393,12 @@ on_query_async(
}),
do_send_msg(async, KafkaMessage, Producers, AsyncReplyFn)
catch
error:{invalid_partition_count, _Count, _Partitioner} ->
{error, invalid_partition_count};
error:{invalid_partition_count, Count, _Partitioner} ->
?tp("kafka_producer_invalid_partition_count", #{
action_id => MessageTag,
query_mode => async
}),
{error, {unrecoverable_error, {invalid_partition_count, Count}}};
throw:{bad_kafka_header, _} = Error ->
?tp(
emqx_bridge_kafka_impl_producer_async_query_failed,
@ -711,6 +721,7 @@ producers_config(BridgeType, BridgeName, Input, IsDryRun, BridgeV2Id) ->
max_batch_bytes => MaxBatchBytes,
max_send_ahead => MaxInflight - 1,
compression => Compression,
alias => BridgeV2Id,
telemetry_meta_data => #{bridge_id => BridgeV2Id},
max_partitions => MaxPartitions
}.

View File

@ -142,6 +142,9 @@ check_send_message_with_bridge(BridgeName) ->
check_kafka_message_payload(Offset, Payload).
send_message(ActionName) ->
send_message(?TYPE, ActionName).
send_message(Type, ActionName) ->
%% ######################################
%% Create Kafka message
%% ######################################
@ -157,8 +160,8 @@ send_message(ActionName) ->
%% ######################################
%% Send message
%% ######################################
emqx_bridge_v2:send_message(?TYPE, ActionName, Msg, #{}),
#{offset => Offset, payload => Payload}.
Res = emqx_bridge_v2:send_message(Type, ActionName, Msg, #{}),
#{offset => Offset, payload => Payload, result => Res}.
resolve_kafka_offset() ->
KafkaTopic = emqx_bridge_kafka_impl_producer_SUITE:test_topic_one_partition(),
@ -285,6 +288,21 @@ action_api_spec_props_for_get() ->
emqx_bridge_v2_testlib:actions_api_spec_schemas(),
Props.
assert_status_api(Line, Type, Name, Status) ->
?assertMatch(
{ok,
{{_, 200, _}, _, #{
<<"status">> := Status,
<<"node_status">> := [#{<<"status">> := Status}]
}}},
emqx_bridge_v2_testlib:get_bridge_api(Type, Name),
#{line => Line, name => Name, expected_status => Status}
).
-define(assertStatusAPI(TYPE, NAME, STATUS), assert_status_api(?LINE, TYPE, NAME, STATUS)).
get_rule_metrics(RuleId) ->
emqx_metrics_worker:get_metrics(rule_metrics, RuleId).
%%------------------------------------------------------------------------------
%% Testcases
%%------------------------------------------------------------------------------
@ -702,3 +720,204 @@ t_connector_health_check_topic(_Config) ->
[]
),
ok.
%% Checks that, if Kafka raises `invalid_partition_count' error, we bump the corresponding
%% failure rule action metric.
t_invalid_partition_count_metrics(Config) ->
Type = proplists:get_value(type, Config, ?TYPE),
ConnectorName = proplists:get_value(connector_name, Config, <<"c">>),
ConnectorConfig = proplists:get_value(connector_config, Config, connector_config()),
ActionConfig1 = proplists:get_value(action_config, Config, action_config(ConnectorName)),
?check_trace(
#{timetrap => 10_000},
begin
ConnectorParams = [
{connector_config, ConnectorConfig},
{connector_name, ConnectorName},
{connector_type, Type}
],
ActionName = <<"a">>,
ActionParams = [
{action_config, ActionConfig1},
{action_name, ActionName},
{action_type, Type}
],
{ok, {{_, 201, _}, _, #{}}} =
emqx_bridge_v2_testlib:create_connector_api(ConnectorParams),
{ok, {{_, 201, _}, _, #{}}} =
emqx_bridge_v2_testlib:create_action_api(ActionParams),
RuleTopic = <<"t/a">>,
{ok, #{<<"id">> := RuleId}} =
emqx_bridge_v2_testlib:create_rule_and_action_http(Type, RuleTopic, [
{bridge_name, ActionName}
]),
{ok, C} = emqtt:start_link([]),
{ok, _} = emqtt:connect(C),
%%--------------------------------------------
?tp(notice, "sync", #{}),
%%--------------------------------------------
%% Artificially force sync query to be used; otherwise, it's only used when the
%% resource is blocked and retrying.
ok = meck:new(emqx_bridge_kafka_impl_producer, [passthrough, no_history]),
on_exit(fun() -> catch meck:unload() end),
ok = meck:expect(emqx_bridge_kafka_impl_producer, query_mode, 1, simple_sync),
%% Simulate `invalid_partition_count'
emqx_common_test_helpers:with_mock(
wolff,
send_sync,
fun(_Producers, _Msgs, _Timeout) ->
error({invalid_partition_count, 0, partitioner})
end,
fun() ->
{{ok, _}, {ok, _}} =
?wait_async_action(
emqtt:publish(C, RuleTopic, <<"hi">>, 2),
#{
?snk_kind := "kafka_producer_invalid_partition_count",
query_mode := sync
}
),
?assertMatch(
#{
counters := #{
'actions.total' := 1,
'actions.failed' := 1
}
},
get_rule_metrics(RuleId)
),
ok
end
),
%%--------------------------------------------
%% Same thing, but async call
?tp(notice, "async", #{}),
%%--------------------------------------------
ok = meck:expect(
emqx_bridge_kafka_impl_producer,
query_mode,
fun(Conf) -> meck:passthrough([Conf]) end
),
ok = emqx_bridge_v2:remove(actions, Type, ActionName),
{ok, {{_, 201, _}, _, #{}}} =
emqx_bridge_v2_testlib:create_action_api(
ActionParams,
#{<<"parameters">> => #{<<"query_mode">> => <<"async">>}}
),
%% Simulate `invalid_partition_count'
emqx_common_test_helpers:with_mock(
wolff,
send,
fun(_Producers, _Msgs, _Timeout) ->
error({invalid_partition_count, 0, partitioner})
end,
fun() ->
{{ok, _}, {ok, _}} =
?wait_async_action(
emqtt:publish(C, RuleTopic, <<"hi">>, 2),
#{?snk_kind := "rule_engine_applied_all_rules"}
),
?assertMatch(
#{
counters := #{
'actions.total' := 2,
'actions.failed' := 2
}
},
get_rule_metrics(RuleId)
),
ok
end
),
ok
end,
fun(Trace) ->
?assertMatch(
[#{query_mode := sync}, #{query_mode := async} | _],
?of_kind("kafka_producer_invalid_partition_count", Trace)
),
ok
end
),
ok.
%% Tests that deleting/disabling an action that share the same Kafka topic with other
%% actions do not disturb the latter.
t_multiple_actions_sharing_topic(Config) ->
Type = proplists:get_value(type, Config, ?TYPE),
ConnectorName = proplists:get_value(connector_name, Config, <<"c">>),
ConnectorConfig = proplists:get_value(connector_config, Config, connector_config()),
ActionConfig = proplists:get_value(action_config, Config, action_config(ConnectorName)),
?check_trace(
begin
ConnectorParams = [
{connector_config, ConnectorConfig},
{connector_name, ConnectorName},
{connector_type, Type}
],
ActionName1 = <<"a1">>,
ActionParams1 = [
{action_config, ActionConfig},
{action_name, ActionName1},
{action_type, Type}
],
ActionName2 = <<"a2">>,
ActionParams2 = [
{action_config, ActionConfig},
{action_name, ActionName2},
{action_type, Type}
],
{ok, {{_, 201, _}, _, #{}}} =
emqx_bridge_v2_testlib:create_connector_api(ConnectorParams),
{ok, {{_, 201, _}, _, #{}}} =
emqx_bridge_v2_testlib:create_action_api(ActionParams1),
{ok, {{_, 201, _}, _, #{}}} =
emqx_bridge_v2_testlib:create_action_api(ActionParams2),
RuleTopic = <<"t/a2">>,
{ok, _} = emqx_bridge_v2_testlib:create_rule_and_action_http(Type, RuleTopic, Config),
?assertStatusAPI(Type, ActionName1, <<"connected">>),
?assertStatusAPI(Type, ActionName2, <<"connected">>),
%% Disabling a1 shouldn't disturb a2.
?assertMatch(
{204, _}, emqx_bridge_v2_testlib:disable_kind_api(action, Type, ActionName1)
),
?assertStatusAPI(Type, ActionName1, <<"disconnected">>),
?assertStatusAPI(Type, ActionName2, <<"connected">>),
?assertMatch(#{result := ok}, send_message(Type, ActionName2)),
?assertStatusAPI(Type, ActionName2, <<"connected">>),
?assertMatch(
{204, _},
emqx_bridge_v2_testlib:enable_kind_api(action, Type, ActionName1)
),
?assertStatusAPI(Type, ActionName1, <<"connected">>),
?assertStatusAPI(Type, ActionName2, <<"connected">>),
?assertMatch(#{result := ok}, send_message(Type, ActionName2)),
%% Deleting also shouldn't disrupt a2.
?assertMatch(
{204, _},
emqx_bridge_v2_testlib:delete_kind_api(action, Type, ActionName1)
),
?assertStatusAPI(Type, ActionName2, <<"connected">>),
?assertMatch(#{result := ok}, send_message(Type, ActionName2)),
ok
end,
fun(Trace) ->
?assertEqual([], ?of_kind("kafka_producer_invalid_partition_count", Trace)),
ok
end
),
ok.

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_kinesis, [
{description, "EMQX Enterprise Amazon Kinesis Bridge"},
{vsn, "0.2.0"},
{vsn, "0.2.1"},
{registered, []},
{applications, [
kernel,

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_mongodb, [
{description, "EMQX Enterprise MongoDB Bridge"},
{vsn, "0.3.1"},
{vsn, "0.3.2"},
{registered, []},
{applications, [
kernel,

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_bridge_mqtt, [
{description, "EMQX MQTT Broker Bridge"},
{vsn, "0.2.1"},
{vsn, "0.2.2"},
{registered, []},
{applications, [
kernel,

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_mysql, [
{description, "EMQX Enterprise MySQL Bridge"},
{vsn, "0.1.6"},
{vsn, "0.1.7"},
{registered, []},
{applications, [
kernel,

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_opents, [
{description, "EMQX Enterprise OpenTSDB Bridge"},
{vsn, "0.2.0"},
{vsn, "0.2.1"},
{registered, []},
{applications, [
kernel,

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_oracle, [
{description, "EMQX Enterprise Oracle Database Bridge"},
{vsn, "0.2.0"},
{vsn, "0.2.1"},
{registered, []},
{applications, [
kernel,

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_pgsql, [
{description, "EMQX Enterprise PostgreSQL Bridge"},
{vsn, "0.1.7"},
{vsn, "0.1.8"},
{registered, []},
{applications, [
kernel,

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_pulsar, [
{description, "EMQX Pulsar Bridge"},
{vsn, "0.2.1"},
{vsn, "0.2.2"},
{registered, []},
{applications, [
kernel,

View File

@ -127,10 +127,6 @@ init_per_testcase(TestCase, Config) ->
common_init_per_testcase(TestCase, Config).
end_per_testcase(_Testcase, Config) ->
case proplists:get_bool(skip_does_not_apply, Config) of
true ->
ok;
false ->
ok = emqx_config:delete_override_conf_files(),
ProxyHost = ?config(proxy_host, Config),
ProxyPort = ?config(proxy_port, Config),
@ -142,8 +138,7 @@ end_per_testcase(_Testcase, Config) ->
emqx_common_test_helpers:call_janitor(60_000),
ok = snabbkaffe:stop(),
flush_consumed(),
ok
end.
ok.
common_init_per_testcase(TestCase, Config0) ->
ct:timetrap(timer:seconds(60)),
@ -160,6 +155,10 @@ common_init_per_testcase(TestCase, Config0) ->
ok = snabbkaffe:start_trace(),
Config.
%%------------------------------------------------------------------------------
%% Helper fns
%%------------------------------------------------------------------------------
create_connector(Name, Config) ->
Connector = pulsar_connector(Config),
{ok, _} = emqx_connector:create(?TYPE, Name, Connector).
@ -174,69 +173,6 @@ create_action(Name, Config) ->
delete_action(Name) ->
ok = emqx_bridge_v2:remove(actions, ?TYPE, Name).
%%------------------------------------------------------------------------------
%% Testcases
%%------------------------------------------------------------------------------
t_action_probe(Config) ->
Name = atom_to_binary(?FUNCTION_NAME),
Action = pulsar_action(Config),
{ok, Res0} = emqx_bridge_v2_testlib:probe_bridge_api(action, ?TYPE, Name, Action),
?assertMatch({{_, 204, _}, _, _}, Res0),
ok.
t_action(Config) ->
Name = atom_to_binary(?FUNCTION_NAME),
create_action(Name, Config),
Actions = emqx_bridge_v2:list(actions),
Any = fun(#{name := BName}) -> BName =:= Name end,
?assert(lists:any(Any, Actions), Actions),
Topic = <<"lkadfdaction">>,
{ok, #{id := RuleId}} = emqx_rule_engine:create_rule(
#{
sql => <<"select * from \"", Topic/binary, "\"">>,
id => atom_to_binary(?FUNCTION_NAME),
actions => [<<"pulsar:", Name/binary>>],
description => <<"bridge_v2 send msg to pulsar action">>
}
),
on_exit(fun() -> emqx_rule_engine:delete_rule(RuleId) end),
MQTTClientID = <<"pulsar_mqtt_clientid">>,
{ok, C1} = emqtt:start_link([{clean_start, true}, {clientid, MQTTClientID}]),
{ok, _} = emqtt:connect(C1),
ReqPayload = payload(),
ReqPayloadBin = emqx_utils_json:encode(ReqPayload),
{ok, _} = emqtt:publish(C1, Topic, #{}, ReqPayloadBin, [{qos, 1}, {retain, false}]),
[#{<<"clientid">> := ClientID, <<"payload">> := RespPayload}] = receive_consumed(5000),
?assertEqual(MQTTClientID, ClientID),
?assertEqual(ReqPayload, emqx_utils_json:decode(RespPayload)),
ok = emqtt:disconnect(C1),
InstanceId = instance_id(actions, Name),
?retry(
100,
20,
?assertMatch(
#{
counters := #{
dropped := 0,
success := 1,
matched := 1,
failed := 0,
received := 0
}
},
emqx_resource:get_metrics(InstanceId)
)
),
ok = delete_action(Name),
ActionsAfterDelete = emqx_bridge_v2:list(actions),
?assertNot(lists:any(Any, ActionsAfterDelete), ActionsAfterDelete),
ok.
%%------------------------------------------------------------------------------
%% Helper fns
%%------------------------------------------------------------------------------
pulsar_connector(Config) ->
PulsarHost = ?config(pulsar_host, Config),
PulsarPort = ?config(pulsar_port, Config),
@ -455,3 +391,158 @@ maybe_skip_without_ci() ->
_ ->
{skip, no_pulsar}
end.
assert_status_api(Line, Type, Name, Status) ->
?assertMatch(
{ok,
{{_, 200, _}, _, #{
<<"status">> := Status,
<<"node_status">> := [#{<<"status">> := Status}]
}}},
emqx_bridge_v2_testlib:get_bridge_api(Type, Name),
#{line => Line, name => Name, expected_status => Status}
).
-define(assertStatusAPI(TYPE, NAME, STATUS), assert_status_api(?LINE, TYPE, NAME, STATUS)).
%%------------------------------------------------------------------------------
%% Testcases
%%------------------------------------------------------------------------------
t_action_probe(Config) ->
Name = atom_to_binary(?FUNCTION_NAME),
Action = pulsar_action(Config),
{ok, Res0} = emqx_bridge_v2_testlib:probe_bridge_api(action, ?TYPE, Name, Action),
?assertMatch({{_, 204, _}, _, _}, Res0),
ok.
t_action(Config) ->
Name = atom_to_binary(?FUNCTION_NAME),
create_action(Name, Config),
Actions = emqx_bridge_v2:list(actions),
Any = fun(#{name := BName}) -> BName =:= Name end,
?assert(lists:any(Any, Actions), Actions),
Topic = <<"lkadfdaction">>,
{ok, #{id := RuleId}} = emqx_rule_engine:create_rule(
#{
sql => <<"select * from \"", Topic/binary, "\"">>,
id => atom_to_binary(?FUNCTION_NAME),
actions => [<<"pulsar:", Name/binary>>],
description => <<"bridge_v2 send msg to pulsar action">>
}
),
on_exit(fun() -> emqx_rule_engine:delete_rule(RuleId) end),
MQTTClientID = <<"pulsar_mqtt_clientid">>,
{ok, C1} = emqtt:start_link([{clean_start, true}, {clientid, MQTTClientID}]),
{ok, _} = emqtt:connect(C1),
ReqPayload = payload(),
ReqPayloadBin = emqx_utils_json:encode(ReqPayload),
{ok, _} = emqtt:publish(C1, Topic, #{}, ReqPayloadBin, [{qos, 1}, {retain, false}]),
[#{<<"clientid">> := ClientID, <<"payload">> := RespPayload}] = receive_consumed(5000),
?assertEqual(MQTTClientID, ClientID),
?assertEqual(ReqPayload, emqx_utils_json:decode(RespPayload)),
ok = emqtt:disconnect(C1),
InstanceId = instance_id(actions, Name),
?retry(
100,
20,
?assertMatch(
#{
counters := #{
dropped := 0,
success := 1,
matched := 1,
failed := 0,
received := 0
}
},
emqx_resource:get_metrics(InstanceId)
)
),
ok = delete_action(Name),
ActionsAfterDelete = emqx_bridge_v2:list(actions),
?assertNot(lists:any(Any, ActionsAfterDelete), ActionsAfterDelete),
ok.
%% Tests that deleting/disabling an action that share the same Pulsar topic with other
%% actions do not disturb the latter.
t_multiple_actions_sharing_topic(Config) ->
Type = ?TYPE,
ConnectorName = <<"c">>,
ConnectorConfig = pulsar_connector(Config),
ActionConfig = pulsar_action(Config),
?check_trace(
begin
ConnectorParams = [
{connector_config, ConnectorConfig},
{connector_name, ConnectorName},
{connector_type, Type}
],
ActionName1 = <<"a1">>,
ActionParams1 = [
{action_config, ActionConfig},
{action_name, ActionName1},
{action_type, Type}
],
ActionName2 = <<"a2">>,
ActionParams2 = [
{action_config, ActionConfig},
{action_name, ActionName2},
{action_type, Type}
],
{ok, {{_, 201, _}, _, #{}}} =
emqx_bridge_v2_testlib:create_connector_api(ConnectorParams),
{ok, {{_, 201, _}, _, #{}}} =
emqx_bridge_v2_testlib:create_action_api(ActionParams1),
{ok, {{_, 201, _}, _, #{}}} =
emqx_bridge_v2_testlib:create_action_api(ActionParams2),
?assertStatusAPI(Type, ActionName1, <<"connected">>),
?assertStatusAPI(Type, ActionName2, <<"connected">>),
RuleTopic = <<"t/a2">>,
{ok, _} = emqx_bridge_v2_testlib:create_rule_and_action_http(Type, RuleTopic, [
{bridge_name, ActionName2}
]),
{ok, C} = emqtt:start_link([]),
{ok, _} = emqtt:connect(C),
SendMessage = fun() ->
ReqPayload = payload(),
ReqPayloadBin = emqx_utils_json:encode(ReqPayload),
{ok, _} = emqtt:publish(C, RuleTopic, #{}, ReqPayloadBin, [
{qos, 1}, {retain, false}
]),
ok
end,
%% Disabling a1 shouldn't disturb a2.
?assertMatch(
{204, _}, emqx_bridge_v2_testlib:disable_kind_api(action, Type, ActionName1)
),
?assertStatusAPI(Type, ActionName1, <<"disconnected">>),
?assertStatusAPI(Type, ActionName2, <<"connected">>),
?assertMatch(ok, SendMessage()),
?assertStatusAPI(Type, ActionName2, <<"connected">>),
?assertMatch(
{204, _},
emqx_bridge_v2_testlib:enable_kind_api(action, Type, ActionName1)
),
?assertStatusAPI(Type, ActionName1, <<"connected">>),
?assertStatusAPI(Type, ActionName2, <<"connected">>),
?assertMatch(ok, SendMessage()),
%% Deleting also shouldn't disrupt a2.
?assertMatch(
{204, _},
emqx_bridge_v2_testlib:delete_kind_api(action, Type, ActionName1)
),
?assertStatusAPI(Type, ActionName2, <<"connected">>),
?assertMatch(ok, SendMessage()),
ok
end,
[]
),
ok.

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_rabbitmq, [
{description, "EMQX Enterprise RabbitMQ Bridge"},
{vsn, "0.2.0"},
{vsn, "0.2.1"},
{registered, []},
{mod, {emqx_bridge_rabbitmq_app, []}},
{applications, [

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_redis, [
{description, "EMQX Enterprise Redis Bridge"},
{vsn, "0.1.7"},
{vsn, "0.1.8"},
{registered, []},
{applications, [
kernel,

View File

@ -19,6 +19,7 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-define(BRIDGE_TYPE, redis).
-define(BRIDGE_TYPE_BIN, <<"redis">>).
@ -46,6 +47,7 @@ matrix_testcases() ->
t_start_stop,
t_create_via_http,
t_on_get_status,
t_on_get_status_no_username_pass,
t_sync_query,
t_map_to_redis_hset_args
].
@ -325,6 +327,43 @@ t_on_get_status(Config) when is_list(Config) ->
emqx_bridge_v2_testlib:t_on_get_status(Config, #{failure_status => connecting}),
ok.
t_on_get_status_no_username_pass(matrix) ->
{on_get_status, [
[single, tcp],
[cluster, tcp],
[sentinel, tcp]
]};
t_on_get_status_no_username_pass(Config0) when is_list(Config0) ->
ConnectorConfig0 = ?config(connector_config, Config0),
ConnectorConfig1 = emqx_utils_maps:deep_put(
[<<"parameters">>, <<"password">>], ConnectorConfig0, <<"">>
),
ConnectorConfig2 = emqx_utils_maps:deep_put(
[<<"parameters">>, <<"username">>], ConnectorConfig1, <<"">>
),
Config1 = proplists:delete(connector_config, Config0),
Config2 = [{connector_config, ConnectorConfig2} | Config1],
?check_trace(
emqx_bridge_v2_testlib:t_on_get_status(
Config2,
#{
failure_status => disconnected,
normal_status => disconnected
}
),
fun(ok, Trace) ->
case ?config(redis_type, Config2) of
single ->
?assertMatch([_ | _], ?of_kind(emqx_redis_auth_required_error, Trace));
sentinel ->
?assertMatch([_ | _], ?of_kind(emqx_redis_auth_required_error, Trace));
cluster ->
ok
end
end
),
ok.
t_sync_query(matrix) ->
{sync_query, [
[single, tcp],

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_rocketmq, [
{description, "EMQX Enterprise RocketMQ Bridge"},
{vsn, "0.2.1"},
{vsn, "0.2.2"},
{registered, []},
{applications, [kernel, stdlib, emqx_resource, rocketmq]},
{env, [

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_s3, [
{description, "EMQX Enterprise S3 Bridge"},
{vsn, "0.1.2"},
{vsn, "0.1.5"},
{registered, []},
{applications, [
kernel,

View File

@ -146,29 +146,22 @@ on_stop(InstId, _State = #{pool_name := PoolName}) ->
on_get_status(_InstId, State = #{client_config := Config}) ->
case emqx_s3_client:aws_config(Config) of
{error, Reason} ->
{?status_disconnected, State, Reason};
{?status_disconnected, State, map_error_details(Reason)};
AWSConfig ->
try erlcloud_s3:list_buckets(AWSConfig) of
Props when is_list(Props) ->
?status_connected
catch
error:{aws_error, {http_error, _Code, _, Reason}} ->
{?status_disconnected, State, Reason};
error:{aws_error, {socket_error, Reason}} ->
{?status_disconnected, State, Reason}
error:Error ->
{?status_disconnected, State, map_error_details(Error)}
end
end.
-spec on_add_channel(_InstanceId :: resource_id(), state(), channel_id(), channel_config()) ->
{ok, state()} | {error, _Reason}.
on_add_channel(_InstId, State = #{channels := Channels}, ChannelId, Config) ->
try
ChannelState = start_channel(State, Config),
{ok, State#{channels => Channels#{ChannelId => ChannelState}}}
catch
throw:Reason ->
{error, Reason}
end.
{ok, State#{channels => Channels#{ChannelId => ChannelState}}}.
-spec on_remove_channel(_InstanceId :: resource_id(), state(), channel_id()) ->
{ok, state()}.
@ -217,7 +210,8 @@ start_channel(State, #{
max_records := MaxRecords
},
container := Container,
bucket := Bucket
bucket := Bucket,
key := Key
}
}) ->
AggregId = {Type, Name},
@ -226,7 +220,7 @@ start_channel(State, #{
max_records => MaxRecords,
work_dir => work_dir(Type, Name)
},
Template = ensure_ok(emqx_bridge_s3_upload:mk_key_template(Parameters)),
Template = emqx_bridge_s3_upload:mk_key_template(Key),
DeliveryOpts = #{
bucket => Bucket,
key => Template,
@ -253,11 +247,6 @@ start_channel(State, #{
on_stop => fun() -> ?AGGREG_SUP:delete_child(AggregId) end
}.
ensure_ok({ok, V}) ->
V;
ensure_ok({error, Reason}) ->
throw(Reason).
upload_options(Parameters) ->
#{acl => maps:get(acl, Parameters, undefined)}.
@ -285,7 +274,7 @@ channel_status(#{mode := aggregated, aggreg_id := AggregId, bucket := Bucket}, S
check_bucket_accessible(Bucket, #{client_config := Config}) ->
case emqx_s3_client:aws_config(Config) of
{error, Reason} ->
throw({unhealthy_target, Reason});
throw({unhealthy_target, map_error_details(Reason)});
AWSConfig ->
try erlcloud_s3:list_objects(Bucket, [{max_keys, 1}], AWSConfig) of
Props when is_list(Props) ->
@ -293,8 +282,8 @@ check_bucket_accessible(Bucket, #{client_config := Config}) ->
catch
error:{aws_error, {http_error, 404, _, _Reason}} ->
throw({unhealthy_target, "Bucket does not exist"});
error:{aws_error, {socket_error, Reason}} ->
throw({unhealthy_target, emqx_utils:format(Reason)})
error:Error ->
throw({unhealthy_target, map_error_details(Error)})
end
end.
@ -304,8 +293,7 @@ check_aggreg_upload_errors(AggregId) ->
%% TODO
%% This approach means that, for example, 3 upload failures will cause
%% the channel to be marked as unhealthy for 3 consecutive health checks.
ErrorMessage = emqx_utils:format(Error),
throw({unhealthy_target, ErrorMessage});
throw({unhealthy_target, map_error_details(Error)});
[] ->
ok
end.
@ -384,16 +372,38 @@ run_aggregated_upload(InstId, ChannelID, Records, #{aggreg_id := AggregId}) ->
?tp(s3_bridge_aggreg_push_ok, #{instance_id => InstId, name => AggregId}),
ok;
{error, Reason} ->
{error, {unrecoverable_error, Reason}}
{error, {unrecoverable_error, emqx_utils:explain_posix(Reason)}}
end.
map_error({socket_error, _} = Reason) ->
{recoverable_error, Reason};
map_error(Reason = {aws_error, Status, _, _Body}) when Status >= 500 ->
map_error(Error) ->
{map_error_class(Error), map_error_details(Error)}.
map_error_class({s3_error, _, _}) ->
unrecoverable_error;
map_error_class({aws_error, Error}) ->
map_error_class(Error);
map_error_class({socket_error, _}) ->
recoverable_error;
map_error_class({http_error, Status, _, _}) when Status >= 500 ->
%% https://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html#ErrorCodeList
{recoverable_error, Reason};
map_error(Reason) ->
{unrecoverable_error, Reason}.
recoverable_error;
map_error_class(_Error) ->
unrecoverable_error.
map_error_details({s3_error, Code, Message}) ->
emqx_utils:format("S3 error: ~s ~s", [Code, Message]);
map_error_details({aws_error, Error}) ->
map_error_details(Error);
map_error_details({socket_error, Reason}) ->
emqx_utils:format("Socket error: ~s", [emqx_utils:readable_error_msg(Reason)]);
map_error_details({http_error, _, _, _} = Error) ->
emqx_utils:format("AWS error: ~s", [map_aws_error_details(Error)]);
map_error_details({failed_to_obtain_credentials, Error}) ->
emqx_utils:format("Unable to obtain AWS credentials: ~s", [map_error_details(Error)]);
map_error_details({upload_failed, Error}) ->
map_error_details(Error);
map_error_details(Error) ->
Error.
render_bucket(Template, Data) ->
case emqx_template:render(Template, {emqx_jsonish, Data}) of
@ -416,6 +426,32 @@ render_content(Template, Data) ->
iolist_to_string(IOList) ->
unicode:characters_to_list(IOList).
%%
-include_lib("xmerl/include/xmerl.hrl").
-spec map_aws_error_details(_AWSError) ->
unicode:chardata().
map_aws_error_details({http_error, _Status, _, Body}) ->
try xmerl_scan:string(unicode:characters_to_list(Body), [{quiet, true}]) of
{Error = #xmlElement{name = 'Error'}, _} ->
map_aws_error_details(Error);
_ ->
Body
catch
exit:_ ->
Body
end;
map_aws_error_details(#xmlElement{content = Content}) ->
Code = extract_xml_text(lists:keyfind('Code', #xmlElement.name, Content)),
Message = extract_xml_text(lists:keyfind('Message', #xmlElement.name, Content)),
[Code, $:, $\s | Message].
extract_xml_text(#xmlElement{content = Content}) ->
[Fragment || #xmlText{value = Fragment} <- Content];
extract_xml_text(false) ->
[].
%% `emqx_connector_aggreg_delivery` APIs
-spec init_transfer_state(buffer(), map()) -> emqx_s3_upload:t().

View File

@ -29,7 +29,10 @@
]).
%% Internal exports
-export([convert_actions/2]).
-export([
convert_actions/2,
validate_key_template/1
]).
-define(DEFAULT_AGGREG_BATCH_SIZE, 100).
-define(DEFAULT_AGGREG_BATCH_TIME, <<"10ms">>).
@ -137,7 +140,10 @@ fields(s3_aggregated_upload_parameters) ->
)}
],
emqx_resource_schema:override(emqx_s3_schema:fields(s3_upload), [
{key, #{desc => ?DESC(s3_aggregated_upload_key)}}
{key, #{
desc => ?DESC(s3_aggregated_upload_key),
validator => fun ?MODULE:validate_key_template/1
}}
]),
emqx_s3_schema:fields(s3_uploader)
]);
@ -246,23 +252,13 @@ convert_action(Conf = #{<<"parameters">> := Params, <<"resource_opts">> := Resou
Conf#{<<"resource_opts">> := NResourceOpts}
end.
%% Interpreting options
-spec mk_key_template(_Parameters :: map()) ->
{ok, emqx_template:str()} | {error, _Reason}.
mk_key_template(#{key := Key}) ->
Template = emqx_template:parse(Key),
validate_key_template(Conf) ->
Template = emqx_template:parse(Conf),
case validate_bindings(emqx_template:placeholders(Template)) of
UsedBindings when is_list(UsedBindings) ->
SuffixTemplate = mk_suffix_template(UsedBindings),
case emqx_template:is_const(SuffixTemplate) of
true ->
{ok, Template};
false ->
{ok, Template ++ SuffixTemplate}
end;
Error = {error, _} ->
Error
Bindings when is_list(Bindings) ->
ok;
{error, {disallowed_placeholders, Disallowed}} ->
{error, emqx_utils:format("Template placeholders are disallowed: ~p", [Disallowed])}
end.
validate_bindings(Bindings) ->
@ -276,7 +272,22 @@ validate_bindings(Bindings) ->
[] ->
Bindings;
Disallowed ->
{error, {invalid_key_template, {disallowed_placeholders, Disallowed}}}
{error, {disallowed_placeholders, Disallowed}}
end.
%% Interpreting options
-spec mk_key_template(unicode:chardata()) ->
emqx_template:str().
mk_key_template(Key) ->
Template = emqx_template:parse(Key),
UsedBindings = emqx_template:placeholders(Template),
SuffixTemplate = mk_suffix_template(UsedBindings),
case emqx_template:is_const(SuffixTemplate) of
true ->
Template;
false ->
Template ++ SuffixTemplate
end.
mk_suffix_template(UsedBindings) ->

View File

@ -134,6 +134,22 @@ action_config(Name, ConnectorId) ->
t_start_stop(Config) ->
emqx_bridge_v2_testlib:t_start_stop(Config, s3_bridge_stopped).
t_create_unavailable_credentials(Config) ->
ConnectorName = ?config(connector_name, Config),
ConnectorType = ?config(connector_type, Config),
ConnectorConfig = maps:without(
[<<"access_key_id">>, <<"secret_access_key">>],
?config(connector_config, Config)
),
?assertMatch(
{ok,
{{_HTTP, 201, _}, _, #{
<<"status_reason">> :=
<<"Unable to obtain AWS credentials:", _/bytes>>
}}},
emqx_bridge_v2_testlib:create_connector_api(ConnectorName, ConnectorType, ConnectorConfig)
).
t_ignore_batch_opts(Config) ->
{ok, {_Status, _, Bridge}} = emqx_bridge_v2_testlib:create_bridge_api(Config),
?assertMatch(
@ -159,6 +175,13 @@ t_start_broken_update_restart(Config) ->
_Attempts = 20,
?assertEqual({ok, disconnected}, emqx_resource_manager:health_check(ConnectorId))
),
?assertMatch(
{ok,
{{_HTTP, 200, _}, _, #{
<<"status_reason">> := <<"AWS error: SignatureDoesNotMatch:", _/bytes>>
}}},
emqx_bridge_v2_testlib:get_connector_api(Type, Name)
),
?assertMatch(
{ok, {{_HTTP, 200, _}, _, _}},
emqx_bridge_v2_testlib:update_connector_api(Name, Type, ConnectorConf)

View File

@ -177,6 +177,27 @@ t_create_invalid_config(Config) ->
)
).
t_create_invalid_config_key_template(Config) ->
?assertMatch(
{error,
{_Status, _, #{
<<"code">> := <<"BAD_REQUEST">>,
<<"message">> := #{
<<"kind">> := <<"validation_error">>,
<<"reason">> := <<"Template placeholders are disallowed:", _/bytes>>,
<<"path">> := <<"root.parameters.key">>
}
}}},
emqx_bridge_v2_testlib:create_bridge_api(
Config,
_Overrides = #{
<<"parameters">> => #{
<<"key">> => <<"${action}/${foo}:${bar.rfc3339}">>
}
}
)
).
t_update_invalid_config(Config) ->
?assertMatch({ok, _Bridge}, emqx_bridge_v2_testlib:create_bridge(Config)),
?assertMatch(

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_sqlserver, [
{description, "EMQX Enterprise SQL Server Bridge"},
{vsn, "0.2.1"},
{vsn, "0.2.2"},
{registered, []},
{applications, [kernel, stdlib, emqx_resource, odbc]},
{env, [

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_syskeeper, [
{description, "EMQX Enterprise Data bridge for Syskeeper"},
{vsn, "0.1.2"},
{vsn, "0.1.3"},
{registered, []},
{applications, [
kernel,

View File

@ -1,6 +1,6 @@
{application, emqx_bridge_tdengine, [
{description, "EMQX Enterprise TDEngine Bridge"},
{vsn, "0.2.0"},
{vsn, "0.2.1"},
{registered, []},
{applications, [
kernel,

View File

@ -1,6 +1,6 @@
{application, emqx_conf, [
{description, "EMQX configuration management"},
{vsn, "0.2.1"},
{vsn, "0.2.3"},
{registered, []},
{mod, {emqx_conf_app, []}},
{applications, [kernel, stdlib]},

View File

@ -163,8 +163,13 @@ dump_schema(Dir, SchemaModule) ->
),
emqx_dashboard:save_dispatch_eterm(SchemaModule).
load(emqx_enterprise_schema, emqx_telemetry) -> ignore;
load(_, Lib) -> ok = application:load(Lib).
load(emqx_enterprise_schema, emqx_telemetry) ->
ignore;
load(_, Lib) ->
case application:load(Lib) of
ok -> ok;
{error, {already_loaded, _}} -> ok
end.
%% for scripts/spellcheck.
gen_schema_json(Dir, SchemaModule, Lang) ->

View File

@ -74,13 +74,14 @@ end_per_testcase(_Config) ->
t_base_test(_Config) ->
?assertEqual(emqx_cluster_rpc:status(), {atomic, []}),
Pid = self(),
MFA = {M, F, A} = {?MODULE, echo, [Pid, test]},
Msg = ?FUNCTION_NAME,
MFA = {M, F, A} = {?MODULE, echo, [Pid, Msg]},
{ok, TnxId, ok} = multicall(M, F, A),
{atomic, Query} = emqx_cluster_rpc:query(TnxId),
?assertEqual(MFA, maps:get(mfa, Query)),
?assertEqual(node(), maps:get(initiator, Query)),
?assert(maps:is_key(created_at, Query)),
?assertEqual(ok, receive_msg(3, test)),
?assertEqual(ok, receive_msg(3, Msg)),
?assertEqual({ok, 2, ok}, multicall(M, F, A)),
{atomic, Status} = emqx_cluster_rpc:status(),
case length(Status) =:= 3 of
@ -118,9 +119,10 @@ t_commit_ok_but_apply_fail_on_other_node(_Config) ->
emqx_cluster_rpc:reset(),
{atomic, []} = emqx_cluster_rpc:status(),
Pid = self(),
{BaseM, BaseF, BaseA} = {?MODULE, echo, [Pid, test]},
Msg = ?FUNCTION_NAME,
{BaseM, BaseF, BaseA} = {?MODULE, echo, [Pid, Msg]},
{ok, _TnxId, ok} = multicall(BaseM, BaseF, BaseA),
?assertEqual(ok, receive_msg(3, test)),
?assertEqual(ok, receive_msg(3, Msg)),
{M, F, A} = {?MODULE, failed_on_node, [erlang:whereis(?NODE1)]},
{ok, _, ok} = multicall(M, F, A, 1, 1000),
@ -154,9 +156,10 @@ t_commit_ok_but_apply_fail_on_other_node(_Config) ->
t_commit_concurrency(_Config) ->
{atomic, []} = emqx_cluster_rpc:status(),
Pid = self(),
{BaseM, BaseF, BaseA} = {?MODULE, echo, [Pid, test]},
{ok, _TnxId, ok} = multicall(BaseM, BaseF, BaseA),
?assertEqual(ok, receive_msg(3, test)),
Msg = ?FUNCTION_NAME,
{BaseM, BaseF, BaseA} = {?MODULE, echo, [Pid, Msg]},
?assertEqual({ok, 1, ok}, multicall(BaseM, BaseF, BaseA)),
?assertEqual(ok, receive_msg(3, Msg)),
%% call concurrently without stale tnx_id error
Workers = lists:seq(1, 256),
@ -231,23 +234,24 @@ t_commit_ok_apply_fail_on_other_node_then_recover(_Config) ->
{atomic, [_Status | L]} = emqx_cluster_rpc:status(),
?assertEqual([], L),
ets:insert(test, {other_mfa_result, ok}),
{ok, 2, ok} = multicall(io, format, ["test"], 1, 1000),
{ok, 2, ok} = multicall(io, format, ["format:~p~n", [?FUNCTION_NAME]], 1, 1000),
ct:sleep(1000),
{atomic, NewStatus} = emqx_cluster_rpc:status(),
?assertEqual(3, length(NewStatus)),
Pid = self(),
MFAEcho = {M1, F1, A1} = {?MODULE, echo, [Pid, test]},
Msg = ?FUNCTION_NAME,
MFAEcho = {M1, F1, A1} = {?MODULE, echo, [Pid, Msg]},
{ok, TnxId, ok} = multicall(M1, F1, A1),
{atomic, Query} = emqx_cluster_rpc:query(TnxId),
?assertEqual(MFAEcho, maps:get(mfa, Query)),
?assertEqual(node(), maps:get(initiator, Query)),
?assert(maps:is_key(created_at, Query)),
?assertEqual(ok, receive_msg(3, test)),
?assertEqual(ok, receive_msg(3, Msg)),
ok.
t_del_stale_mfa(_Config) ->
{atomic, []} = emqx_cluster_rpc:status(),
MFA = {M, F, A} = {io, format, ["test"]},
MFA = {M, F, A} = {io, format, ["format:~p~n", [?FUNCTION_NAME]]},
Keys = lists:seq(1, 50),
Keys2 = lists:seq(51, 150),
Ids =
@ -288,7 +292,7 @@ t_del_stale_mfa(_Config) ->
t_skip_failed_commit(_Config) ->
{atomic, []} = emqx_cluster_rpc:status(),
{ok, 1, ok} = multicall(io, format, ["test~n"], all, 1000),
{ok, 1, ok} = multicall(io, format, ["format:~p~n", [?FUNCTION_NAME]], all, 1000),
ct:sleep(180),
{atomic, List1} = emqx_cluster_rpc:status(),
Node = node(),
@ -308,7 +312,7 @@ t_skip_failed_commit(_Config) ->
t_fast_forward_commit(_Config) ->
{atomic, []} = emqx_cluster_rpc:status(),
{ok, 1, ok} = multicall(io, format, ["test~n"], all, 1000),
{ok, 1, ok} = multicall(io, format, ["format:~p~n", [?FUNCTION_NAME]], all, 1000),
ct:sleep(180),
{atomic, List1} = emqx_cluster_rpc:status(),
Node = node(),
@ -356,7 +360,11 @@ tnx_ids(Status) ->
start() ->
{ok, _Pid2} = emqx_cluster_rpc:start_link({node(), ?NODE2}, ?NODE2, 500),
{ok, _Pid3} = emqx_cluster_rpc:start_link({node(), ?NODE3}, ?NODE3, 500),
ok = emqx_cluster_rpc:wait_for_cluster_rpc(),
ok = emqx_cluster_rpc:reset(),
%% Ensure all processes are idle status.
ok = gen_server:call(?NODE2, test),
ok = gen_server:call(?NODE3, test),
ok.
stop() ->
@ -366,6 +374,7 @@ stop() ->
undefined ->
ok;
P ->
erlang:unregister(N),
erlang:unlink(P),
erlang:exit(P, kill)
end
@ -379,8 +388,9 @@ receive_msg(Count, Msg) when Count > 0 ->
receive
Msg ->
receive_msg(Count - 1, Msg)
after 1000 ->
timeout
after 1300 ->
Msg = iolist_to_binary(io_lib:format("There's still ~w messages to be received", [Count])),
{Msg, flush_msg([])}
end.
echo(Pid, Msg) ->
@ -425,3 +435,11 @@ multicall(M, F, A, N, T) ->
multicall(M, F, A) ->
multicall(M, F, A, all, timer:minutes(2)).
flush_msg(Acc) ->
receive
Msg ->
flush_msg([Msg | Acc])
after 10 ->
Acc
end.

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*-
{application, emqx_connector, [
{description, "EMQX Data Integration Connectors"},
{vsn, "0.3.1"},
{vsn, "0.3.3"},
{registered, []},
{mod, {emqx_connector_app, []}},
{applications, [

View File

@ -1,6 +1,6 @@
{application, emqx_connector_aggregator, [
{description, "EMQX Enterprise Connector Data Aggregator"},
{vsn, "0.1.1"},
{vsn, "0.1.2"},
{registered, []},
{applications, [
kernel,

View File

@ -2,7 +2,7 @@
{application, emqx_dashboard, [
{description, "EMQX Web Dashboard"},
% strict semver, bump manually!
{vsn, "5.1.1"},
{vsn, "5.1.3"},
{modules, []},
{registered, [emqx_dashboard_sup]},
{applications, [

View File

@ -4,5 +4,6 @@
{deps, [
{emqx_ldap, {path, "../../apps/emqx_ldap"}},
{emqx_dashboard, {path, "../../apps/emqx_dashboard"}},
{esaml, {git, "https://github.com/emqx/esaml", {tag, "v1.1.3"}}}
{esaml, {git, "https://github.com/emqx/esaml", {tag, "v1.1.3"}}},
{oidcc, {git, "https://github.com/emqx/oidcc.git", {tag, "v3.2.0-1"}}}
]}.

View File

@ -7,7 +7,8 @@
stdlib,
emqx_dashboard,
emqx_ldap,
esaml
esaml,
oidcc
]},
{mod, {emqx_dashboard_sso_app, []}},
{env, []},

View File

@ -92,7 +92,8 @@ provider(Backend) ->
backends() ->
#{
ldap => emqx_dashboard_sso_ldap,
saml => emqx_dashboard_sso_saml
saml => emqx_dashboard_sso_saml,
oidc => emqx_dashboard_sso_oidc
}.
format(Args) ->

View File

@ -33,7 +33,7 @@
backend/2
]).
-export([sso_parameters/1, login_meta/3]).
-export([sso_parameters/1, login_meta/4]).
-define(REDIRECT, 'REDIRECT').
-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
@ -168,7 +168,7 @@ login(post, #{bindings := #{backend := Backend}, body := Body} = Request) ->
request => emqx_utils:redact(Request)
}),
Username = maps:get(<<"username">>, Body),
{200, login_meta(Username, Role, Token)};
{200, login_meta(Username, Role, Token, Backend)};
{redirect, Redirect} ->
?SLOG(info, #{
msg => "dashboard_sso_login_redirect",
@ -286,11 +286,12 @@ to_redacted_json(Data) ->
end
).
login_meta(Username, Role, Token) ->
login_meta(Username, Role, Token, Backend) ->
#{
username => Username,
role => Role,
token => Token,
version => iolist_to_binary(proplists:get_value(version, emqx_sys:info())),
license => #{edition => emqx_release:edition()}
license => #{edition => emqx_release:edition()},
backend => Backend
}.

View File

@ -17,6 +17,7 @@
handle_call/3,
handle_cast/2,
handle_info/2,
handle_continue/2,
terminate/2,
code_change/3,
format_status/2
@ -106,7 +107,14 @@ get_backend_status(Backend, _) ->
end.
update(Backend, Config) ->
update_config(Backend, {?FUNCTION_NAME, Backend, Config}).
UpdateConf =
case emqx:get_raw_config(?MOD_KEY_PATH(Backend), #{}) of
RawConf when is_map(RawConf) ->
emqx_utils:deobfuscate(Config, RawConf);
null ->
Config
end,
update_config(Backend, {?FUNCTION_NAME, Backend, UpdateConf}).
delete(Backend) ->
update_config(Backend, {?FUNCTION_NAME, Backend}).
@ -154,8 +162,7 @@ init([]) ->
{read_concurrency, true}
]
),
start_backend_services(),
{ok, #{}}.
{ok, #{}, {continue, start_backend_services}}.
handle_call(_Request, _From, State) ->
Reply = ok,
@ -167,6 +174,12 @@ handle_cast(_Request, State) ->
handle_info(_Info, State) ->
{noreply, State}.
handle_continue(start_backend_services, State) ->
start_backend_services(),
{noreply, State};
handle_continue(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
remove_handler(),
ok.

View File

@ -0,0 +1,294 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%--------------------------------------------------------------------
-module(emqx_dashboard_sso_oidc).
-include_lib("emqx_dashboard/include/emqx_dashboard.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("hocon/include/hoconsc.hrl").
-behaviour(emqx_dashboard_sso).
-export([
namespace/0,
fields/1,
desc/1
]).
-export([
hocon_ref/0,
login_ref/0,
login/2,
create/1,
update/2,
destroy/1,
convert_certs/2
]).
-define(PROVIDER_SVR_NAME, ?MODULE).
-define(RESPHEADERS, #{
<<"cache-control">> => <<"no-cache">>,
<<"pragma">> => <<"no-cache">>,
<<"content-type">> => <<"text/plain">>
}).
-define(REDIRECT_BODY, <<"Redirecting...">>).
-define(PKCE_VERIFIER_LEN, 60).
%%------------------------------------------------------------------------------
%% Hocon Schema
%%------------------------------------------------------------------------------
namespace() ->
"sso".
hocon_ref() ->
hoconsc:ref(?MODULE, oidc).
login_ref() ->
hoconsc:ref(?MODULE, login).
fields(oidc) ->
emqx_dashboard_sso_schema:common_backend_schema([oidc]) ++
[
{issuer,
?HOCON(
binary(),
#{desc => ?DESC(issuer), required => true}
)},
{clientid,
?HOCON(
binary(),
#{desc => ?DESC(clientid), required => true}
)},
{secret,
emqx_schema_secret:mk(
maps:merge(#{desc => ?DESC(secret), required => true}, #{})
)},
{scopes,
?HOCON(
?ARRAY(binary()),
#{desc => ?DESC(scopes), default => [<<"openid">>]}
)},
{name_var,
?HOCON(
binary(),
#{desc => ?DESC(name_var), default => <<"${sub}">>}
)},
{dashboard_addr,
?HOCON(binary(), #{
desc => ?DESC(dashboard_addr),
default => <<"http://127.0.0.1:18083">>
})},
{session_expiry,
?HOCON(emqx_schema:timeout_duration_s(), #{
desc => ?DESC(session_expiry),
default => <<"30s">>
})},
{require_pkce,
?HOCON(boolean(), #{
desc => ?DESC(require_pkce),
default => false
})},
{preferred_auth_methods,
?HOCON(
?ARRAY(
?ENUM([
private_key_jwt,
client_secret_jwt,
client_secret_post,
client_secret_basic,
none
])
),
#{
desc => ?DESC(preferred_auth_methods),
default => [
client_secret_post,
client_secret_basic,
none
]
}
)},
{provider,
?HOCON(?ENUM([okta, generic]), #{
mapping => "oidcc.provider",
desc => ?DESC(provider),
default => generic
})},
{fallback_methods,
?HOCON(?ARRAY(binary()), #{
mapping => "oidcc.fallback_methods",
desc => ?DESC(fallback_methods),
default => [<<"RS256">>]
})},
{client_jwks,
%% TODO: add url JWKS
?HOCON(?UNION([none, ?R_REF(client_file_jwks)]), #{
desc => ?DESC(client_jwks),
default => none
})}
];
fields(client_file_jwks) ->
[
{type,
?HOCON(?ENUM([file]), #{
desc => ?DESC(client_file_jwks_type),
required => true
})},
{file,
?HOCON(binary(), #{
desc => ?DESC(client_file_jwks_file),
required => true
})}
];
fields(login) ->
[
emqx_dashboard_sso_schema:backend_schema([oidc])
].
desc(oidc) ->
"OIDC";
desc(client_file_jwks) ->
?DESC(client_file_jwks);
desc(_) ->
undefined.
%%------------------------------------------------------------------------------
%% APIs
%%------------------------------------------------------------------------------
create(#{name_var := NameVar} = Config) ->
case
emqx_dashboard_sso_oidc_session:start(
?PROVIDER_SVR_NAME,
Config
)
of
{error, _} = Error ->
Error;
_ ->
%% Note: the oidcc maintains an ETS with the same name of the provider gen_server,
%% we should use this name in each API calls not the PID,
%% or it would backoff to sync calls to the gen_server
ClientJwks = init_client_jwks(Config),
{ok, #{
name => ?PROVIDER_SVR_NAME,
config => Config,
client_jwks => ClientJwks,
name_tokens => emqx_placeholder:preproc_tmpl(NameVar)
}}
end.
update(Config, State) ->
destroy(State),
create(Config).
destroy(State) ->
emqx_dashboard_sso_oidc_session:stop(),
try_delete_jwks_file(State).
-dialyzer({nowarn_function, login/2}).
login(
_Req,
#{
client_jwks := ClientJwks,
config := #{
clientid := ClientId,
secret := Secret,
scopes := Scopes,
require_pkce := RequirePKCE,
preferred_auth_methods := AuthMethods
}
} = Cfg
) ->
Nonce = emqx_dashboard_sso_oidc_session:random_bin(),
Opts = maybe_require_pkce(RequirePKCE, #{
scopes => Scopes,
nonce => Nonce,
redirect_uri => emqx_dashboard_sso_oidc_api:make_callback_url(Cfg)
}),
Data = maps:with([nonce, require_pkce, pkce_verifier], Opts),
State = emqx_dashboard_sso_oidc_session:new(Data),
case
oidcc:create_redirect_url(
?PROVIDER_SVR_NAME,
ClientId,
emqx_secret:unwrap(Secret),
Opts#{
state => State,
client_jwks => ClientJwks,
preferred_auth_methods => AuthMethods
}
)
of
{ok, [Base, Delimiter, Params]} ->
RedirectUri = <<Base/binary, Delimiter/binary, Params/binary>>,
Redirect = {302, ?RESPHEADERS#{<<"location">> => RedirectUri}, ?REDIRECT_BODY},
{redirect, Redirect};
{error, _Reason} = Error ->
Error
end.
convert_certs(
Dir,
#{
<<"client_jwks">> := #{
<<"type">> := file,
<<"file">> := Content
} = Jwks
} = Conf
) ->
case save_jwks_file(Dir, Content) of
{ok, Path} ->
Conf#{<<"client_jwks">> := Jwks#{<<"file">> := Path}};
{error, Reason} ->
?SLOG(error, #{msg => "failed_to_save_client_jwks", reason => Reason}),
throw("Failed to save client jwks")
end;
convert_certs(_Dir, Conf) ->
Conf.
%%------------------------------------------------------------------------------
%% Internal functions
%%------------------------------------------------------------------------------
save_jwks_file(Dir, Content) ->
Path = filename:join([emqx_tls_lib:pem_dir(Dir), "client_jwks"]),
case filelib:ensure_dir(Path) of
ok ->
case file:write_file(Path, Content) of
ok ->
{ok, Path};
{error, Reason} ->
{error, #{failed_to_write_file => Reason, file_path => Path}}
end;
{error, Reason} ->
{error, #{failed_to_create_dir_for => Path, reason => Reason}}
end.
try_delete_jwks_file(#{config := #{client_jwks := #{type := file, file := File}}}) ->
_ = file:delete(File),
ok;
try_delete_jwks_file(_) ->
ok.
maybe_require_pkce(false, Opts) ->
Opts;
maybe_require_pkce(true, Opts) ->
Opts#{
require_pkce => true,
pkce_verifier => emqx_dashboard_sso_oidc_session:random_bin(?PKCE_VERIFIER_LEN)
}.
init_client_jwks(#{client_jwks := #{type := file, file := File}}) ->
case jose_jwk:from_file(File) of
{error, _} ->
none;
Jwks ->
Jwks
end;
init_client_jwks(_) ->
none.

View File

@ -0,0 +1,214 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2023-2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%--------------------------------------------------------------------
-module(emqx_dashboard_sso_oidc_api).
-behaviour(minirest_api).
-include_lib("hocon/include/hoconsc.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx_dashboard/include/emqx_dashboard.hrl").
-import(hoconsc, [
mk/2,
array/1,
enum/1,
ref/1
]).
-import(emqx_dashboard_sso_api, [login_meta/3]).
-export([
api_spec/0,
paths/0,
schema/1,
namespace/0
]).
-export([code_callback/2, make_callback_url/1]).
-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
-define(BACKEND_NOT_FOUND, 'BACKEND_NOT_FOUND').
-define(RESPHEADERS, #{
<<"cache-control">> => <<"no-cache">>,
<<"pragma">> => <<"no-cache">>,
<<"content-type">> => <<"text/plain">>
}).
-define(REDIRECT_BODY, <<"Redirecting...">>).
-define(TAGS, <<"Dashboard Single Sign-On">>).
-define(BACKEND, oidc).
-define(BASE_PATH, "/api/v5").
-define(CALLBACK_PATH, "/sso/oidc/callback").
namespace() -> "dashboard_sso".
api_spec() ->
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => false, translate_body => false}).
paths() ->
[
?CALLBACK_PATH
].
%% Handles Authorization Code callback from the OP.
schema("/sso/oidc/callback") ->
#{
'operationId' => code_callback,
get => #{
tags => [?TAGS],
desc => ?DESC(code_callback),
responses => #{
200 => emqx_dashboard_api:fields([token, version, license]),
401 => response_schema(401),
404 => response_schema(404)
},
security => []
}
}.
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
code_callback(get, #{query_string := QS}) ->
case ensure_sso_state(QS) of
{ok, Target} ->
?SLOG(info, #{
msg => "dashboard_sso_login_successful"
}),
{302, ?RESPHEADERS#{<<"location">> => Target}, ?REDIRECT_BODY};
{error, invalid_backend} ->
{404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}};
{error, Reason} ->
?SLOG(info, #{
msg => "dashboard_sso_login_failed",
reason => emqx_utils:redact(Reason)
}),
{401, #{code => ?BAD_USERNAME_OR_PWD, message => reason_to_message(Reason)}}
end.
%%--------------------------------------------------------------------
%% internal
%%--------------------------------------------------------------------
response_schema(401) ->
emqx_dashboard_swagger:error_codes([?BAD_USERNAME_OR_PWD], ?DESC(login_failed401));
response_schema(404) ->
emqx_dashboard_swagger:error_codes([?BACKEND_NOT_FOUND], ?DESC(backend_not_found)).
reason_to_message(Bin) when is_binary(Bin) ->
Bin;
reason_to_message(Term) ->
erlang:iolist_to_binary(io_lib:format("~p", [Term])).
ensure_sso_state(QS) ->
case emqx_dashboard_sso_manager:lookup_state(?BACKEND) of
undefined ->
{error, invalid_backend};
Cfg ->
ensure_oidc_state(QS, Cfg)
end.
ensure_oidc_state(#{<<"state">> := State} = QS, Cfg) ->
case emqx_dashboard_sso_oidc_session:lookup(State) of
{ok, Data} ->
emqx_dashboard_sso_oidc_session:delete(State),
retrieve_token(QS, Cfg, Data);
_ ->
{error, session_not_exists}
end.
retrieve_token(
#{<<"code">> := Code},
#{
name := Name,
client_jwks := ClientJwks,
config := #{
clientid := ClientId,
secret := Secret,
preferred_auth_methods := AuthMethods
}
} = Cfg,
Data
) ->
case
oidcc:retrieve_token(
Code,
Name,
ClientId,
emqx_secret:unwrap(Secret),
Data#{
redirect_uri => make_callback_url(Cfg),
client_jwks => ClientJwks,
preferred_auth_methods => AuthMethods
}
)
of
{ok, Token} ->
retrieve_userinfo(Token, Cfg);
{error, _Reason} = Error ->
Error
end.
retrieve_userinfo(
Token,
#{
name := Name,
client_jwks := ClientJwks,
config := #{clientid := ClientId, secret := Secret},
name_tokens := NameTks
} = Cfg
) ->
case
oidcc:retrieve_userinfo(
Token,
Name,
ClientId,
emqx_secret:unwrap(Secret),
#{client_jwks => ClientJwks}
)
of
{ok, UserInfo} ->
?SLOG(debug, #{
msg => "sso_oidc_login_user_info",
user_info => UserInfo
}),
Username = emqx_placeholder:proc_tmpl(NameTks, UserInfo),
ensure_user_exists(Cfg, Username);
{error, _Reason} = Error ->
Error
end.
-dialyzer({nowarn_function, ensure_user_exists/2}).
ensure_user_exists(_Cfg, <<>>) ->
{error, <<"Username can not be empty">>};
ensure_user_exists(_Cfg, <<"undefined">>) ->
{error, <<"Username can not be undefined">>};
ensure_user_exists(Cfg, Username) ->
case emqx_dashboard_admin:lookup_user(?BACKEND, Username) of
[User] ->
case emqx_dashboard_token:sign(User, <<>>) of
{ok, Role, Token} ->
{ok, login_redirect_target(Cfg, Username, Role, Token)};
Error ->
Error
end;
[] ->
case emqx_dashboard_admin:add_sso_user(?BACKEND, Username, ?ROLE_VIEWER, <<>>) of
{ok, _} ->
ensure_user_exists(Cfg, Username);
Error ->
Error
end
end.
make_callback_url(#{config := #{dashboard_addr := Addr}}) ->
list_to_binary(binary_to_list(Addr) ++ ?BASE_PATH ++ ?CALLBACK_PATH).
login_redirect_target(#{config := #{dashboard_addr := Addr}}, Username, Role, Token) ->
LoginMeta = emqx_dashboard_sso_api:login_meta(Username, Role, Token, oidc),
MetaBin = base64:encode(emqx_utils_json:encode(LoginMeta)),
<<Addr/binary, "/?login_meta=", MetaBin/binary>>.

View File

@ -0,0 +1,157 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved.
%%--------------------------------------------------------------------
-module(emqx_dashboard_sso_oidc_session).
-behaviour(gen_server).
-include_lib("emqx/include/logger.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
%% API
-export([start_link/1, start/2, stop/0]).
%% gen_server callbacks
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3,
format_status/2
]).
-export([new/1, delete/1, lookup/1, random_bin/0, random_bin/1]).
-define(TAB, ?MODULE).
-record(?TAB, {
state :: binary(),
created_at :: non_neg_integer(),
data :: map()
}).
-define(DEFAULT_RANDOM_LEN, 32).
-define(NOW, erlang:system_time(millisecond)).
%%------------------------------------------------------------------------------
%% API
%%------------------------------------------------------------------------------
start_link(Cfg) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, Cfg, []).
start(Name, #{issuer := Issuer, session_expiry := SessionExpiry0}) ->
case
emqx_dashboard_sso_sup:start_child(
oidcc_provider_configuration_worker,
[
#{
issuer => Issuer,
name => {local, Name}
}
]
)
of
{error, _} = Error ->
Error;
_ ->
SessionExpiry = timer:seconds(SessionExpiry0),
emqx_dashboard_sso_sup:start_child(?MODULE, [SessionExpiry])
end.
stop() ->
_ = emqx_dashboard_sso_sup:stop_child(oidcc_provider_configuration_worker),
_ = emqx_dashboard_sso_sup:stop_child(?MODULE),
ok.
new(Data) ->
State = new_state(),
ets:insert(
?TAB,
#?TAB{
state = State,
created_at = ?NOW,
data = Data
}
),
State.
delete(State) ->
ets:delete(?TAB, State).
lookup(State) ->
case ets:lookup(?TAB, State) of
[#?TAB{data = Data}] ->
{ok, Data};
_ ->
undefined
end.
random_bin() ->
random_bin(?DEFAULT_RANDOM_LEN).
random_bin(Len) ->
emqx_utils_conv:bin(emqx_utils:gen_id(Len)).
%%------------------------------------------------------------------------------
%% gen_server callbacks
%%------------------------------------------------------------------------------
init(SessionExpiry) ->
process_flag(trap_exit, true),
emqx_utils_ets:new(
?TAB,
[
ordered_set,
public,
named_table,
{keypos, #?TAB.state},
{read_concurrency, true}
]
),
State = #{session_expiry => SessionExpiry},
tick_session_expiry(State),
{ok, State}.
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
handle_cast(_Request, State) ->
{noreply, State}.
handle_info(tick_session_expiry, #{session_expiry := SessionExpiry} = State) ->
Now = ?NOW,
Spec = ets:fun2ms(fun(#?TAB{created_at = CreatedAt}) ->
Now - CreatedAt >= SessionExpiry
end),
_ = ets:select_delete(?TAB, Spec),
tick_session_expiry(State),
{noreply, State};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
format_status(_Opt, Status) ->
Status.
%%------------------------------------------------------------------------------
%% Internal functions
%%------------------------------------------------------------------------------
new_state() ->
State = random_bin(),
case ets:lookup(?TAB, State) of
[] ->
State;
_ ->
new_state()
end.
tick_session_expiry(#{session_expiry := SessionExpiry}) ->
erlang:send_after(SessionExpiry, self(), tick_session_expiry).

View File

@ -273,7 +273,7 @@ is_msie(Headers) ->
not (binary:match(UA, <<"MSIE">>) =:= nomatch).
login_redirect_target(DashboardAddr, Username, Role, Token) ->
LoginMeta = emqx_dashboard_sso_api:login_meta(Username, Role, Token),
LoginMeta = emqx_dashboard_sso_api:login_meta(Username, Role, Token, saml),
<<DashboardAddr/binary, "/?login_meta=", (base64_login_meta(LoginMeta))/binary>>.
base64_login_meta(LoginMeta) ->

Some files were not shown because too many files have changed in this diff Show More