Merge branch 'main-v4.4' into sync-from-v4.3

This commit is contained in:
tigercl 2021-12-09 18:49:38 +08:00 committed by GitHub
commit 2ed5e01054
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
119 changed files with 5395 additions and 1418 deletions

View File

@ -1,4 +1,4 @@
ARG BUILD_FROM=emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-ubuntu20.04
FROM ${BUILD_FROM} FROM ${BUILD_FROM}
ARG EMQX_NAME=emqx ARG EMQX_NAME=emqx

View File

@ -3,7 +3,7 @@ version: '3.9'
services: services:
erlang: erlang:
container_name: erlang container_name: erlang
image: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 image: ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-ubuntu20.04
env_file: env_file:
- conf.env - conf.env
environment: environment:

View File

@ -15,6 +15,8 @@ PROFILE="$1"
VSN="$2" VSN="$2"
OLD_VSN="$3" OLD_VSN="$3"
PACKAGE_PATH="$4" PACKAGE_PATH="$4"
FROM_OTP_VSN="${5:-23.3.4.9-3}"
TO_OTP_VSN="${6:-23.3.4.9-3}"
TEMPDIR=$(mktemp -d) TEMPDIR=$(mktemp -d)
trap '{ rm -rf -- "$TEMPDIR"; }' EXIT trap '{ rm -rf -- "$TEMPDIR"; }' EXIT
@ -37,4 +39,6 @@ exec docker run \
--var ONE_MORE_EMQX_PATH="/relup_test/one_more_emqx" \ --var ONE_MORE_EMQX_PATH="/relup_test/one_more_emqx" \
--var VSN="$VSN" \ --var VSN="$VSN" \
--var OLD_VSN="$OLD_VSN" \ --var OLD_VSN="$OLD_VSN" \
--var FROM_OTP_VSN="$FROM_OTP_VSN" \
--var TO_OTP_VSN="$TO_OTP_VSN" \
relup.lux relup.lux

View File

@ -3,6 +3,8 @@
[config var=ONE_MORE_EMQX_PATH] [config var=ONE_MORE_EMQX_PATH]
[config var=VSN] [config var=VSN]
[config var=OLD_VSN] [config var=OLD_VSN]
[config var=FROM_OTP_VSN]
[config var=TO_OTP_VSN]
[config shell_cmd=/bin/bash] [config shell_cmd=/bin/bash]
[config timeout=600000] [config timeout=600000]
@ -19,7 +21,7 @@
[shell emqx] [shell emqx]
!cd $PACKAGE_PATH !cd $PACKAGE_PATH
!unzip -q -o $PROFILE-ubuntu20.04-$(echo $OLD_VSN | sed -r 's/[v|e]//g')-amd64.zip !unzip -q -o $PROFILE-$(echo $OLD_VSN | sed -r 's/[v|e]//g')-otp${FROM_OTP_VSN}-ubuntu20.04-amd64.zip
?SH-PROMPT ?SH-PROMPT
!cd emqx !cd emqx
@ -80,7 +82,7 @@
!echo "" > log/emqx.log.1 !echo "" > log/emqx.log.1
?SH-PROMPT ?SH-PROMPT
!cp -f ../$PROFILE-ubuntu20.04-$VSN-amd64.zip releases/ !cp -f ../$PROFILE-$VSN-otp${TO_OTP_VSN}-ubuntu20.04-amd64.zip releases/
!./bin/emqx install $VSN !./bin/emqx install $VSN
?Made release permanent: "$VSN" ?Made release permanent: "$VSN"
@ -105,7 +107,7 @@
!echo "" > log/emqx.log.1 !echo "" > log/emqx.log.1
?SH-PROMPT ?SH-PROMPT
!cp -f ../$PROFILE-ubuntu20.04-$VSN-amd64.zip releases/ !cp -f ../$PROFILE-$VSN-otp${TO_OTP_VSN}-ubuntu20.04-amd64.zip releases/
!./bin/emqx install $VSN !./bin/emqx install $VSN
?Made release permanent: "$VSN" ?Made release permanent: "$VSN"

View File

@ -1,5 +1,9 @@
name: Cross build packages name: Cross build packages
concurrency:
group: build-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
on: on:
schedule: schedule:
- cron: '0 */6 * * *' - cron: '0 */6 * * *'
@ -11,11 +15,12 @@ on:
jobs: jobs:
prepare: prepare:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 # prepare source with any OTP version, no need for a matrix
container: ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-ubuntu20.04
outputs: outputs:
profiles: ${{ steps.set_profile.outputs.profiles}} profiles: ${{ steps.set_profile.outputs.profiles }}
old_vsns: ${{ steps.set_profile.outputs.old_vsns}} old_vsns: ${{ steps.set_profile.outputs.old_vsns }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -25,8 +30,8 @@ jobs:
- name: set profile - name: set profile
id: set_profile id: set_profile
shell: bash shell: bash
working-directory: source
run: | run: |
cd source
vsn="$(./pkg-vsn.sh)" vsn="$(./pkg-vsn.sh)"
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')" pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
if make emqx-ee --dry-run > /dev/null 2>&1; then if make emqx-ee --dry-run > /dev/null 2>&1; then
@ -43,7 +48,7 @@ jobs:
run: | run: |
make -C source deps-all make -C source deps-all
zip -ryq source.zip source/* source/.[^.]* zip -ryq source.zip source/* source/.[^.]*
- name: get_all_deps - name: get_all_deps_ee
if: endsWith(github.repository, 'enterprise') if: endsWith(github.repository, 'enterprise')
run: | run: |
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
@ -63,8 +68,11 @@ jobs:
if: endsWith(github.repository, 'emqx') if: endsWith(github.repository, 'emqx')
strategy: strategy:
fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
otp:
- 23.2
exclude: exclude:
- profile: emqx-edge - profile: emqx-edge
@ -76,26 +84,27 @@ jobs:
- name: unzip source code - name: unzip source code
run: Expand-Archive -Path source.zip -DestinationPath ./ run: Expand-Archive -Path source.zip -DestinationPath ./
- uses: ilammy/msvc-dev-cmd@v1 - uses: ilammy/msvc-dev-cmd@v1
- uses: gleam-lang/setup-erlang@v1.1.0 - uses: gleam-lang/setup-erlang@v1.1.2
id: install_erlang id: install_erlang
## gleam-lang/setup-erlang does not yet support the installation of otp24 on windows
with: with:
otp-version: 23.2 otp-version: ${{ matrix.otp }}
- name: build - name: build
env: env:
PYTHON: python PYTHON: python
DIAGNOSTIC: 1 DIAGNOSTIC: 1
working-directory: source
run: | run: |
$env:PATH = "${{ steps.install_erlang.outputs.erlpath }}\bin;$env:PATH" $env:PATH = "${{ steps.install_erlang.outputs.erlpath }}\bin;$env:PATH"
$version = $( "${{ github.ref }}" -replace "^(.*)/(.*)/" ) $version = $( "${{ github.ref }}" -replace "^(.*)/(.*)/" )
if ($version -match "^v[0-9]+\.[0-9]+(\.[0-9]+)?") { if ($version -match "^v[0-9]+\.[0-9]+(\.[0-9]+)?") {
$regex = "[0-9]+\.[0-9]+(-alpha|-beta|-rc)?\.[0-9]+" $regex = "[0-9]+\.[0-9]+(-alpha|-beta|-rc)?\.[0-9]+"
$pkg_name = "${{ matrix.profile }}-windows-$([regex]::matches($version, $regex).value).zip" $pkg_name = "${{ matrix.profile }}-$([regex]::matches($version, $regex).value)-otp${{ matrix.otp }}-windows-amd64.zip"
} }
else { else {
$pkg_name = "${{ matrix.profile }}-windows-$($version -replace '/').zip" $pkg_name = "${{ matrix.profile }}-$($version -replace '/')-otp${{ matrix.otp }}-windows-amd64.zip"
} }
cd source
## We do not build/release bcrypt for windows package ## We do not build/release bcrypt for windows package
Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/ Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/
if (Test-Path rebar.lock) { if (Test-Path rebar.lock) {
@ -112,8 +121,8 @@ jobs:
Get-FileHash -Path "_packages/${{ matrix.profile }}/$pkg_name" | Format-List | grep 'Hash' | awk '{print $3}' > _packages/${{ matrix.profile }}/$pkg_name.sha256 Get-FileHash -Path "_packages/${{ matrix.profile }}/$pkg_name" | Format-List | grep 'Hash' | awk '{print $3}' > _packages/${{ matrix.profile }}/$pkg_name.sha256
- name: run emqx - name: run emqx
timeout-minutes: 1 timeout-minutes: 1
working-directory: source
run: | run: |
cd source
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start
Start-Sleep -s 5 Start-Sleep -s 5
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop
@ -126,18 +135,19 @@ jobs:
path: source/_packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
mac: mac:
runs-on: macos-10.15
needs: prepare needs: prepare
strategy: strategy:
fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
erl_otp: erl_otp:
- 23.2.7.2-emqx-3 - 23.3.4.9-3
macos:
- macos-11
- macos-10.15
exclude: exclude:
- profile: emqx-edge - profile: emqx-edge
runs-on: ${{ matrix.macos }}
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
@ -155,7 +165,7 @@ jobs:
id: cache id: cache
with: with:
path: ~/.kerl path: ~/.kerl
key: erl${{ matrix.erl_otp }}-macos10.15 key: erl${{ matrix.erl_otp }}-${{ matrix.macos }}
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60 timeout-minutes: 60
@ -167,18 +177,19 @@ jobs:
kerl build ${{ matrix.erl_otp }} kerl build ${{ matrix.erl_otp }}
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }} kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }}
- name: build - name: build
working-directory: source
run: | run: |
. $HOME/.kerl/${{ matrix.erl_otp }}/activate . $HOME/.kerl/${{ matrix.erl_otp }}/activate
cd source
make ensure-rebar3 make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3 sudo cp rebar3 /usr/local/bin/rebar3
rm -rf _build/${{ matrix.profile }}/lib rm -rf _build/${{ matrix.profile }}/lib
make ${{ matrix.profile }}-zip make ${{ matrix.profile }}-zip
- name: test - name: test
working-directory: source
run: | run: |
cd source set -x
pkg_name=$(basename _packages/${{ matrix.profile }}/${{ matrix.profile }}-*.zip) pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip)
unzip -q _packages/${{ matrix.profile }}/$pkg_name unzip -q $pkg_name
gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no' ready='no'
@ -197,11 +208,11 @@ jobs:
./emqx/bin/emqx_ctl status ./emqx/bin/emqx_ctl status
./emqx/bin/emqx stop ./emqx/bin/emqx stop
rm -rf emqx rm -rf emqx
openssl dgst -sha256 ./_packages/${{ matrix.profile }}/$pkg_name | awk '{print $2}' > ./_packages/${{ matrix.profile }}/$pkg_name.sha256 openssl dgst -sha256 $pkg_name | awk '{print $2}' > $pkg_name.sha256
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: source/_packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
linux: linux:
@ -210,8 +221,11 @@ jobs:
needs: prepare needs: prepare
strategy: strategy:
fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
otp:
- 23.3.4.9-3
arch: arch:
- amd64 - amd64
- arm64 - arm64
@ -248,15 +262,11 @@ jobs:
shell: bash shell: bash
steps: steps:
- name: prepare docker - uses: docker/setup-buildx-action@v1
run: | - uses: docker/setup-qemu-action@v1
mkdir -p $HOME/.docker with:
echo '{ "experimental": "enabled" }' | tee $HOME/.docker/config.json image: tonistiigi/binfmt:latest
echo '{ "experimental": true, "storage-driver": "overlay2", "max-concurrent-downloads": 50, "max-concurrent-uploads": 50}' | sudo tee /etc/docker/daemon.json platforms: all
sudo systemctl restart docker
docker info
docker buildx create --use --name mybuild
docker run --rm --privileged tonistiigi/binfmt --install all
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source name: source
@ -265,10 +275,12 @@ jobs:
run: unzip -q source.zip run: unzip -q source.zip
- name: downloads old emqx zip packages - name: downloads old emqx zip packages
env: env:
OTP_VSN: ${{ matrix.otp }}
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }} ARCH: ${{ matrix.arch }}
SYSTEM: ${{ matrix.os }} SYSTEM: ${{ matrix.os }}
OLD_VSNS: ${{ needs.prepare.outputs.old_vsns }} OLD_VSNS: ${{ needs.prepare.outputs.old_vsns }}
working-directory: source
run: | run: |
set -e -x -u set -e -x -u
broker=$PROFILE broker=$PROFILE
@ -279,76 +291,61 @@ jobs:
export ARCH="arm" export ARCH="arm"
fi fi
mkdir -p source/_upgrade_base mkdir -p _upgrade_base
cd source/_upgrade_base cd _upgrade_base
old_vsns=($(echo $OLD_VSNS | tr ' ' ' ')) old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
for tag in ${old_vsns[@]}; do for tag in ${old_vsns[@]}; do
if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip) | grep -oE "^[23]+")" ];then package_name="${PROFILE}-${tag#[e|v]}-otp${OTP_VSN}-${SYSTEM}-${ARCH}"
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip) | grep -oE "^[23]+")" ]; then
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256 wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip
echo "$(cat $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256) $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip" | sha256sum -c || exit 1 wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip.sha256
echo "$(cat $package_name.zip.sha256) $package_name.zip" | sha256sum -c || exit 1
fi fi
done done
- name: build emqx packages - name: build emqx packages
env: env:
ERL_OTP: erl23.2.7.2-emqx-3 OTP: ${{ matrix.otp }}
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }} ARCH: ${{ matrix.arch }}
SYSTEM: ${{ matrix.os }} SYSTEM: ${{ matrix.os }}
working-directory: source
run: | run: |
set -e -u docker run -i --rm \
cd source -v $(pwd):/emqx \
docker buildx build --no-cache \ --workdir /emqx \
--platform=linux/$ARCH \ --platform linux/$ARCH \
-t cross_build_emqx_for_$SYSTEM \ ghcr.io/emqx/emqx-builder/4.4-2:$OTP-$SYSTEM \
-f .ci/build_packages/Dockerfile \ bash -euc "make $PROFILE-zip || cat rebar3.crashdump; \
--build-arg BUILD_FROM=emqx/build-env:$ERL_OTP-$SYSTEM \ make $PROFILE-pkg || cat rebar3.crashdump; \
--build-arg EMQX_NAME=$PROFILE \ EMQX_NAME=$PROFILE && .ci/build_packages/tests.sh"
--output type=tar,dest=/tmp/cross-build-$PROFILE-for-$SYSTEM.tar .
mkdir -p /tmp/packages/$PROFILE
tar -xvf /tmp/cross-build-$PROFILE-for-$SYSTEM.tar --wildcards emqx/_packages/$PROFILE/*
mv emqx/_packages/$PROFILE/* /tmp/packages/$PROFILE/
rm -rf /tmp/cross-build-$PROFILE-for-$SYSTEM.tar
docker rm -f $(docker ps -a -q)
docker volume prune -f
- name: create sha256 - name: create sha256
working-directory: source
env: env:
PROFILE: ${{ matrix.profile}} PROFILE: ${{ matrix.profile}}
run: | run: |
if [ -d /tmp/packages/$PROFILE ]; then if [ -d _packages/$PROFILE ]; then
cd /tmp/packages/$PROFILE cd _packages/$PROFILE
for var in $(ls emqx-* ); do for var in $(ls emqx-* ); do
bash -c "echo $(sha256sum $var | awk '{print $1}') > $var.sha256" sudo bash -c "echo $(sha256sum $var | awk '{print $1}') > $var.sha256"
done done
cd - cd -
fi fi
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: /tmp/packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
docker: docker:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: prepare needs: prepare
strategy: strategy:
fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
arch: otp:
- [amd64, x86_64] - 23.3.4.9-3
- [arm64v8, aarch64]
- [arm32v7, arm]
- [i386, i386]
- [s390x, s390x]
exclude:
- profile: emqx-ee
arch: [i386, i386]
- profile: emqx-ee
arch: [s390x, s390x]
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
@ -357,22 +354,57 @@ jobs:
path: . path: .
- name: unzip source code - name: unzip source code
run: unzip -q source.zip run: unzip -q source.zip
- name: build emqx docker image - uses: docker/setup-buildx-action@v1
env: - uses: docker/setup-qemu-action@v1
PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch[0] }}
QEMU_ARCH: ${{ matrix.arch[1] }}
run: |
sudo docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
cd source
sudo TARGET=emqx/$PROFILE ARCH=$ARCH QEMU_ARCH=$QEMU_ARCH make docker
cd _packages/$PROFILE && for var in $(ls ${PROFILE}-docker-* ); do sudo bash -c "echo $(sha256sum $var | awk '{print $1}') > $var.sha256"; done && cd -
- uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/')
with: with:
name: ${{ matrix.profile }} image: tonistiigi/binfmt:latest
path: source/_packages/${{ matrix.profile }}/. platforms: all
- uses: docker/metadata-action@v3
id: meta
with:
images: ${{ github.repository_owner }}/${{ matrix.profile }}
flavor: |
latest=${{ !github.event.release.prerelease }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=ref,event=tag
type=semver,pattern={{version}}
labels:
org.opencontainers.image.otp.version=${{ matrix.otp }}
- uses: docker/login-action@v1
if: github.event_name == 'release'
with:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- uses: docker/build-push-action@v2
with:
push: ${{ github.event_name == 'release' && !github.event.release.prerelease }}
pull: true
no-cache: true
platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
BUILD_FROM=ghcr.io/emqx/emqx-builder/4.4-2:${{ matrix.otp }}-alpine3.14
RUN_FROM=alpine:3.14
EMQX_NAME=${{ matrix.profile }}
file: source/deploy/docker/Dockerfile
context: source
- uses: aws-actions/configure-aws-credentials@v1
if: github.event_name == 'release' && !github.event.release.prerelease && matrix.profile == 'emqx'
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_DEFAULT_REGION }}
- name: Push image to aws ecr
if: github.event_name == 'release' && !github.event.release.prerelease && matrix.profile == 'emqx'
run: |
version=${GITHUB_REF##*/}
docker pull emqx/emqx:${version#v}
docker tag emqx/emqx:${version#v} public.ecr.aws/emqx/emqx:${version#v}
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws
docker push public.ecr.aws/emqx/emqx:${version#v}
delete-artifact: delete-artifact:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
@ -392,6 +424,8 @@ jobs:
strategy: strategy:
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
otp:
- 23.3.4.9-3
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -402,7 +436,7 @@ jobs:
echo 'EOF' >> $GITHUB_ENV echo 'EOF' >> $GITHUB_ENV
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: ./_packages/${{ matrix.profile }} path: ./_packages/${{ matrix.profile }}
- name: install dos2unix - name: install dos2unix
run: sudo apt-get update && sudo apt install -y dos2unix run: sudo apt-get update && sudo apt install -y dos2unix
@ -469,22 +503,22 @@ jobs:
docker tag emqx/emqx:${version#v} public.ecr.aws/emqx/emqx:${version#v} docker tag emqx/emqx:${version#v} public.ecr.aws/emqx/emqx:${version#v}
docker push public.ecr.aws/emqx/emqx:${version#v} docker push public.ecr.aws/emqx/emqx:${version#v}
- name: update repo.emqx.io - name: update repo.emqx.io
if: github.event_name == 'release' && endsWith(github.repository, 'enterprise') && matrix.profile == 'emqx-ee' if: github.event_name == 'release' && matrix.profile == 'emqx-ee'
run: | run: |
curl --silent --show-error \ curl --silent --show-error \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-X POST \ -X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ee\": \"true\"}}" \ -d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ee\": \"true\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches" "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
- name: update repo.emqx.io - name: update repo.emqx.io
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx' if: github.event_name == 'release' && matrix.profile == 'emqx'
run: | run: |
curl --silent --show-error \ curl --silent --show-error \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-X POST \ -X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ce\": \"true\"}}" \ -d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ce\": \"true\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches" "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
- name: update homebrew packages - name: update homebrew packages
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx' if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx'
@ -494,7 +528,7 @@ jobs:
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-X POST \ -X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \ -d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_homebrew.yaml/dispatches" "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_homebrew.yaml/dispatches"
fi fi
- uses: geekyeggo/delete-artifact@v1 - uses: geekyeggo/delete-artifact@v1

View File

@ -1,5 +1,10 @@
name: Build slim packages name: Build slim packages
concurrency:
group: slim-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
on: on:
push: push:
tags: tags:
@ -13,14 +18,15 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy: strategy:
fail-fast: false
matrix: matrix:
erl_otp: erl_otp:
- erl23.2.7.2-emqx-3 - 23.3.4.9-3
os: os:
- ubuntu20.04 - ubuntu20.04
- centos7 - centos7
container: emqx/build-env:${{ matrix.erl_otp }}-${{ matrix.os }} container: ghcr.io/emqx/emqx-builder/4.4-2:${{ matrix.erl_otp }}-${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
@ -43,7 +49,7 @@ jobs:
with: with:
name: rebar3.crashdump name: rebar3.crashdump
path: ./rebar3.crashdump path: ./rebar3.crashdump
- name: pakcages test - name: packages test
run: | run: |
export CODE_PATH=$GITHUB_WORKSPACE export CODE_PATH=$GITHUB_WORKSPACE
.ci/build_packages/tests.sh .ci/build_packages/tests.sh
@ -53,12 +59,17 @@ jobs:
path: _packages/**/*.zip path: _packages/**/*.zip
mac: mac:
runs-on: macos-10.15
strategy: strategy:
fail-fast: false
matrix: matrix:
erl_otp: erl_otp:
- 23.2.7.2-emqx-3 - 23.3.4.9-3
macos:
- macos-11
- macos-10.15
runs-on: ${{ matrix.macos }}
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
@ -82,7 +93,7 @@ jobs:
id: cache id: cache
with: with:
path: ~/.kerl path: ~/.kerl
key: erl${{ matrix.erl_otp }}-macos10.15 key: otp-${{ matrix.erl_otp }}-${{ matrix.macos }}
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60 timeout-minutes: 60
@ -106,8 +117,8 @@ jobs:
path: ./rebar3.crashdump path: ./rebar3.crashdump
- name: test - name: test
run: | run: |
pkg_name=$(basename _packages/${EMQX_NAME}/emqx-*.zip) pkg_name=$(find _packages/${EMQX_NAME} -mindepth 1 -maxdepth 1 -iname \*.zip)
unzip -q _packages/${EMQX_NAME}/$pkg_name unzip -q $pkg_name
gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no' ready='no'

View File

@ -5,7 +5,7 @@ on: [pull_request]
jobs: jobs:
check_deps_integrity: check_deps_integrity:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-ubuntu20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -5,7 +5,7 @@ on: workflow_dispatch
jobs: jobs:
test: test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-ubuntu20.04
strategy: strategy:
fail-fast: true fail-fast: true
env: env:

View File

@ -12,12 +12,12 @@ jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
imgname: ${{ steps.build_docker.outputs.imgname}} imgname: ${{ steps.prepare.outputs.imgname}}
version: ${{ steps.build_docker.outputs.version}} version: ${{ steps.prepare.outputs.version}}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: build docker - name: prepare
id: build_docker id: prepare
run: | run: |
if [ -f EMQX_ENTERPRISE ]; then if [ -f EMQX_ENTERPRISE ]; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
@ -25,18 +25,22 @@ jobs:
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token
make deps-emqx-ee make deps-emqx-ee
make clean make clean
fi
make docker
echo "::set-output name=version::$(./pkg-vsn.sh)"
if [ -f EMQX_ENTERPRISE ]; then
echo "::set-output name=imgname::emqx-ee" echo "::set-output name=imgname::emqx-ee"
echo "::set-output name=version::$(./pkg-vsn.sh)"
else else
echo "::set-output name=imgname::emqx" echo "::set-output name=imgname::emqx"
echo "::set-output name=version::$(./pkg-vsn.sh)"
fi fi
- name: build docker image
env:
OTP_VSN: 23.3.4.9-3
run: |
make ${{ steps.prepare.outputs.imgname }}-docker
docker save emqx/${{ steps.prepare.outputs.imgname }}:${{ steps.prepare.outputs.version }} -o image.tar.gz
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: emqx-docker-image-zip name: image
path: _packages/${{ steps.build_docker.outputs.imgname }}/${{ steps.build_docker.outputs.imgname }}-docker-${{ steps.build_docker.outputs.version }}.zip path: image.tar.gz
webhook: webhook:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -52,15 +56,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image-zip name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
env:
imgname: ${{ needs.build.outputs.imgname}}
version: ${{ needs.build.outputs.version }}
run: | run: |
unzip -q /tmp/${imgname}-docker-${version}.zip -d /tmp docker load < /tmp/image.tar.gz
docker load < /tmp/${imgname}-docker-${version}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -152,15 +152,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image-zip name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
env:
imgname: ${{ needs.build.outputs.imgname }}
version: ${{ needs.build.outputs.version }}
run: | run: |
unzip -q /tmp/${imgname}-docker-${version}.zip -d /tmp docker load < /tmp/image.tar.gz
docker load < /tmp/${imgname}-docker-${version}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -259,15 +255,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image-zip name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
env:
imgname: ${{ needs.build.outputs.imgname }}
version: ${{ needs.build.outputs.version }}
run: | run: |
unzip -q /tmp/${imgname}-docker-${version}.zip -d /tmp docker load < /tmp/image.tar.gz
docker load < /tmp/${imgname}-docker-${version}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -355,15 +347,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image-zip name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
env:
imgname: ${{ needs.build.outputs.imgname }}
version: ${{ needs.build.outputs.version }}
run: | run: |
unzip -q /tmp/${imgname}-docker-${version}.zip -d /tmp docker load < /tmp/image.tar.gz
docker load < /tmp/${imgname}-docker-${version}
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:

View File

@ -13,10 +13,6 @@ jobs:
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- uses: gleam-lang/setup-erlang@v1.1.2
id: install_erlang
with:
otp-version: 23.2
- name: prepare - name: prepare
run: | run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then if make emqx-ee --dry-run > /dev/null 2>&1; then
@ -24,14 +20,19 @@ jobs:
git config --global credential.helper store git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token
make deps-emqx-ee make deps-emqx-ee
make clean
echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
else else
echo "TARGET=emqx/emqx" >> $GITHUB_ENV echo "TARGET=emqx/emqx" >> $GITHUB_ENV
echo "PROFILE=emqx" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
fi fi
- name: make emqx image - name: make emqx image
run: make docker env:
OTP_VSN: 23.3.4.9-3
run: make ${PROFILE}-docker
- name: run emqx - name: run emqx
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -64,13 +65,15 @@ jobs:
helm_test: helm_test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
discovery:
- k8s
- dns
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- uses: gleam-lang/setup-erlang@v1.1.2
id: install_erlang
with:
otp-version: 23.2
- name: prepare - name: prepare
run: | run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then if make emqx-ee --dry-run > /dev/null 2>&1; then
@ -78,12 +81,19 @@ jobs:
git config --global credential.helper store git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token
make deps-emqx-ee make deps-emqx-ee
make clean
echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
else else
echo "TARGET=emqx/emqx" >> $GITHUB_ENV echo "TARGET=emqx/emqx" >> $GITHUB_ENV
echo "PROFILE=emqx" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
fi fi
- name: make emqx image - name: make emqx image
run: make docker env:
OTP_VSN: 23.3.4.9-3
run: make ${PROFILE}-docker
- name: install k3s - name: install k3s
env: env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml" KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
@ -100,18 +110,18 @@ jobs:
sudo chmod 700 get_helm.sh sudo chmod 700 get_helm.sh
sudo ./get_helm.sh sudo ./get_helm.sh
helm version helm version
- name: run emqx on chart - name: setup emqx chart
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
timeout-minutes: 5
run: | run: |
version=$(./pkg-vsn.sh) sudo docker save ${TARGET}:${EMQX_TAG} -o emqx.tar.gz
sudo docker save ${TARGET}:$version -o emqx.tar.gz
sudo k3s ctr image import emqx.tar.gz sudo k3s ctr image import emqx.tar.gz
sed -i -r "s/^appVersion: .*$/appVersion: \"${version}\"/g" deploy/charts/emqx/Chart.yaml sed -i -r "s/^appVersion: .*$/appVersion: \"${EMQX_TAG}\"/g" deploy/charts/emqx/Chart.yaml
sed -i '/emqx_telemetry/d' deploy/charts/emqx/values.yaml sed -i '/emqx_telemetry/d' deploy/charts/emqx/values.yaml
- name: run emqx on chart
if: matrix.discovery == 'k8s'
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
run: |
helm install emqx \ helm install emqx \
--set image.repository=${TARGET} \ --set image.repository=${TARGET} \
--set image.pullPolicy=Never \ --set image.pullPolicy=Never \
@ -121,7 +131,29 @@ jobs:
--set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \ --set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \
deploy/charts/emqx \ deploy/charts/emqx \
--debug --debug
- name: run emqx on chart
if: matrix.discovery == 'dns'
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
run: |
helm install emqx \
--set emqxConfig.EMQX_CLUSTER__DISCOVERY="dns" \
--set emqxConfig.EMQX_CLUSTER__DNS__NAME="emqx-headless.default.svc.cluster.local" \
--set emqxConfig.EMQX_CLUSTER__DNS__APP="emqx" \
--set emqxConfig.EMQX_CLUSTER__DNS__TYPE="srv" \
--set image.repository=${TARGET} \
--set image.pullPolicy=Never \
--set emqxAclConfig="" \
--set image.pullPolicy=Never \
--set emqxConfig.EMQX_ZONE__EXTERNAL__RETRY_INTERVAL=2s \
--set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \
deploy/charts/emqx \
--debug
- name: waiting emqx started
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
timeout-minutes: 5
run: |
while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \ while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \
!= "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do != "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do
echo "=============================="; echo "==============================";
@ -130,6 +162,18 @@ jobs:
echo "waiting emqx started"; echo "waiting emqx started";
sleep 10; sleep 10;
done done
- name: Check ${{ matrix.kind[0]}} cluster
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
timeout-minutes: 10
run: |
while
nodes="$(kubectl exec -i emqx-0 -- curl --silent --basic -u admin:public -X GET http://localhost:8081/api/v4/brokers | jq '.data|length')";
[ "$nodes" != "3" ];
do
echo "waiting emqx cluster scale"
sleep 1
done
- name: get emqx-0 pods log - name: get emqx-0 pods log
if: failure() if: failure()
env: env:
@ -180,7 +224,7 @@ jobs:
relup_test_plan: relup_test_plan:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-ubuntu20.04
outputs: outputs:
profile: ${{ steps.profile-and-versions.outputs.profile }} profile: ${{ steps.profile-and-versions.outputs.profile }}
vsn: ${{ steps.profile-and-versions.outputs.vsn }} vsn: ${{ steps.profile-and-versions.outputs.vsn }}
@ -229,7 +273,7 @@ jobs:
relup_test_build: relup_test_build:
needs: relup_test_plan needs: relup_test_plan
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-ubuntu20.04
defaults: defaults:
run: run:
shell: bash shell: bash
@ -273,7 +317,7 @@ jobs:
- relup_test_plan - relup_test_plan
- relup_test_build - relup_test_build
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/relup-test-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: emqx/relup-test-env:erl23.2.7.2-emqx-2-ubuntu20.04
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@ -316,6 +360,8 @@ jobs:
--var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \ --var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \
--var VSN="$VSN" \ --var VSN="$VSN" \
--var OLD_VSN="$OLD_VSN" \ --var OLD_VSN="$OLD_VSN" \
--var FROM_OTP_VSN="23.3.4.9-3" \
--var TO_OTP_VSN="23.3.4.9-3" \
emqx_built/.ci/fvt_tests/relup.lux emqx_built/.ci/fvt_tests/relup.lux
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
name: Save debug data name: Save debug data

View File

@ -10,7 +10,7 @@ on:
jobs: jobs:
run_static_analysis: run_static_analysis:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-ubuntu20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -27,7 +27,7 @@ jobs:
run_proper_test: run_proper_test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-ubuntu20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -1 +1 @@
erlang 23.2.7.2-emqx-3 erlang 23.3.4.9-3

View File

@ -3,9 +3,14 @@ REBAR_VERSION = 3.14.3-emqx-8
REBAR = $(CURDIR)/rebar3 REBAR = $(CURDIR)/rebar3
BUILD = $(CURDIR)/build BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts SCRIPTS = $(CURDIR)/scripts
export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-alpine3.14
export EMQX_DEFAULT_RUNNER = alpine:3.14
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh) export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
export EMQX_DESC ?= EMQ X export EMQX_DESC ?= EMQ X
export EMQX_CE_DASHBOARD_VERSION ?= v4.3.3 export EMQX_CE_DASHBOARD_VERSION ?= v4.3.3
export DOCKERFILE := deploy/docker/Dockerfile
export DOCKERFILE_TESTING := deploy/docker/Dockerfile.testing
ifeq ($(OS),Windows_NT) ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none export REBAR_COLOR=none
endif endif
@ -93,6 +98,7 @@ $(PROFILES:%=clean-%):
.PHONY: clean-all .PHONY: clean-all
clean-all: clean-all:
@rm -f rebar.lock
@rm -rf _build @rm -rf _build
.PHONY: deps-all .PHONY: deps-all
@ -148,11 +154,30 @@ $1: $1-rel
endef endef
$(foreach pt,$(PKG_PROFILES),$(eval $(call gen-pkg-target,$(pt)))) $(foreach pt,$(PKG_PROFILES),$(eval $(call gen-pkg-target,$(pt))))
## docker target is to create docker instructions
.PHONY: $(REL_PROFILES:%=%-docker)
define gen-docker-target
$1-docker: $(COMMON_DEPS)
@$(BUILD) $1 docker
endef
ALL_ZIPS = $(REL_PROFILES)
$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target,$(zt))))
## emqx-docker-testing
## emqx-ee-docker-testing
## is to directly copy a unzipped zip-package to a
## base image such as ubuntu20.04. Mostly for testing
.PHONY: $(REL_PROFILES:%=%-docker-testing)
define gen-docker-target-testing
$1-docker-testing: $(COMMON_DEPS)
@$(BUILD) $1 docker-testing
endef
ALL_ZIPS = $(REL_PROFILES)
$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target-testing,$(zt))))
.PHONY: run .PHONY: run
run: $(PROFILE) quickrun run: $(PROFILE) quickrun
.PHONY: quickrun .PHONY: quickrun
quickrun: quickrun:
./_build/$(PROFILE)/rel/emqx/bin/emqx console ./_build/$(PROFILE)/rel/emqx/bin/emqx console
include docker.mk

View File

@ -7,6 +7,12 @@
## Value: single | unknown | sharded | rs ## Value: single | unknown | sharded | rs
auth.mongo.type = single auth.mongo.type = single
## Whether to use SRV and TXT records.
##
## Value: true | false
## Default: false
auth.mongo.srv_record = false
## The set name if type is rs. ## The set name if type is rs.
## ##
## Value: String ## Value: String
@ -37,7 +43,6 @@ auth.mongo.pool = 8
## MongoDB AuthSource ## MongoDB AuthSource
## ##
## Value: String ## Value: String
## Default: mqtt
## auth.mongo.auth_source = admin ## auth.mongo.auth_source = admin
## MongoDB database ## MongoDB database

View File

@ -6,8 +6,12 @@
{datatype, {enum, [single, unknown, sharded, rs]}} {datatype, {enum, [single, unknown, sharded, rs]}}
]}. ]}.
{mapping, "auth.mongo.srv_record", "emqx_auth_mongo.server", [
{default, false},
{datatype, {enum, [true, false]}}
]}.
{mapping, "auth.mongo.rs_set_name", "emqx_auth_mongo.server", [ {mapping, "auth.mongo.rs_set_name", "emqx_auth_mongo.server", [
{default, "mqtt"},
{datatype, string} {datatype, string}
]}. ]}.
@ -41,7 +45,6 @@
]}. ]}.
{mapping, "auth.mongo.auth_source", "emqx_auth_mongo.server", [ {mapping, "auth.mongo.auth_source", "emqx_auth_mongo.server", [
{default, "mqtt"},
{datatype, string} {datatype, string}
]}. ]}.
@ -101,9 +104,9 @@
]}. ]}.
{translation, "emqx_auth_mongo.server", fun(Conf) -> {translation, "emqx_auth_mongo.server", fun(Conf) ->
H = cuttlefish:conf_get("auth.mongo.server", Conf), SrvRecord = cuttlefish:conf_get("auth.mongo.srv_record", Conf, false),
Hosts = string:tokens(H, ","), Server = cuttlefish:conf_get("auth.mongo.server", Conf),
Type0 = cuttlefish:conf_get("auth.mongo.type", Conf), Type = cuttlefish:conf_get("auth.mongo.type", Conf),
Pool = cuttlefish:conf_get("auth.mongo.pool", Conf), Pool = cuttlefish:conf_get("auth.mongo.pool", Conf),
%% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0 %% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0
Login = cuttlefish:conf_get("auth.mongo.username", Conf, Login = cuttlefish:conf_get("auth.mongo.username", Conf,
@ -111,7 +114,10 @@
), ),
Passwd = cuttlefish:conf_get("auth.mongo.password", Conf), Passwd = cuttlefish:conf_get("auth.mongo.password", Conf),
DB = cuttlefish:conf_get("auth.mongo.database", Conf), DB = cuttlefish:conf_get("auth.mongo.database", Conf),
AuthSrc = cuttlefish:conf_get("auth.mongo.auth_source", Conf), AuthSource = case cuttlefish:conf_get("auth.mongo.auth_source", Conf, undefined) of
undefined -> [];
AuthSource0 -> [{auth_source, list_to_binary(AuthSource0)}]
end,
R = cuttlefish:conf_get("auth.mongo.w_mode", Conf), R = cuttlefish:conf_get("auth.mongo.w_mode", Conf),
W = cuttlefish:conf_get("auth.mongo.r_mode", Conf), W = cuttlefish:conf_get("auth.mongo.r_mode", Conf),
Login0 = case Login =:= [] of Login0 = case Login =:= [] of
@ -156,8 +162,8 @@
false -> [] false -> []
end, end,
WorkerOptions = [{database, list_to_binary(DB)}, {auth_source, list_to_binary(AuthSrc)}] WorkerOptions = [{database, list_to_binary(DB)}]
++ Login0 ++ Passwd0 ++ W0 ++ R0 ++ Ssl, ++ Login0 ++ Passwd0 ++ W0 ++ R0 ++ Ssl ++ AuthSource,
Vars = cuttlefish_variable:fuzzy_matches(["auth", "mongo", "topology", "$name"], Conf), Vars = cuttlefish_variable:fuzzy_matches(["auth", "mongo", "topology", "$name"], Conf),
Options = lists:map(fun({_, Name}) -> Options = lists:map(fun({_, Name}) ->
@ -174,16 +180,17 @@
{list_to_atom(Name2), cuttlefish:conf_get("auth.mongo.topology."++Name, Conf)} {list_to_atom(Name2), cuttlefish:conf_get("auth.mongo.topology."++Name, Conf)}
end, Vars), end, Vars),
Type = case Type0 =:= rs of ReplicaSet = case cuttlefish:conf_get("auth.mongo.rs_set_name", Conf, undefined) of
true -> {Type0, list_to_binary(cuttlefish:conf_get("auth.mongo.rs_set_name", Conf))}; undefined -> [];
false -> Type0 ReplicaSet0 -> [{rs_set_name, list_to_binary(ReplicaSet0)}]
end, end,
[{type, Type}, [{srv_record, SrvRecord},
{hosts, Hosts}, {type, Type},
{server, Server},
{options, Options}, {options, Options},
{worker_options, WorkerOptions}, {worker_options, WorkerOptions},
{auto_reconnect, 1}, {auto_reconnect, 1},
{pool_size, Pool}] {pool_size, Pool}] ++ ReplicaSet
end}. end}.
%% The mongodb operation timeout is specified by the value of `cursor_timeout` from application config, %% The mongodb operation timeout is specified by the value of `cursor_timeout` from application config,

View File

@ -1,6 +1,6 @@
{application, emqx_auth_mongo, {application, emqx_auth_mongo,
[{description, "EMQ X Authentication/ACL with MongoDB"}, [{description, "EMQ X Authentication/ACL with MongoDB"},
{vsn, "4.3.0"}, % strict semver, bump manually! {vsn, "4.4.0"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_auth_mongo_sup]}, {registered, [emqx_auth_mongo_sup]},
{applications, [kernel,stdlib,mongodb,ecpool]}, {applications, [kernel,stdlib,mongodb,ecpool]},

View File

@ -28,7 +28,96 @@ start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []). supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) -> init([]) ->
{ok, PoolEnv} = application:get_env(?APP, server), {ok, Opts} = application:get_env(?APP, server),
PoolSpec = ecpool:pool_spec(?APP, ?APP, ?APP, PoolEnv), NOpts = may_parse_srv_and_txt_records(Opts),
PoolSpec = ecpool:pool_spec(?APP, ?APP, ?APP, NOpts),
{ok, {{one_for_all, 10, 100}, [PoolSpec]}}. {ok, {{one_for_all, 10, 100}, [PoolSpec]}}.
may_parse_srv_and_txt_records(Opts) when is_list(Opts) ->
maps:to_list(may_parse_srv_and_txt_records(maps:from_list(Opts)));
may_parse_srv_and_txt_records(#{type := Type,
srv_record := false,
server := Server} = Opts) ->
Hosts = to_hosts(Server),
case Type =:= rs of
true ->
case maps:get(rs_set_name, Opts, undefined) of
undefined ->
error({missing_parameter, rs_set_name});
ReplicaSet ->
Opts#{type => {rs, ReplicaSet},
hosts => Hosts}
end;
false ->
Opts#{hosts => Hosts}
end;
may_parse_srv_and_txt_records(#{type := Type,
srv_record := true,
server := Server,
worker_options := WorkerOptions} = Opts) ->
Hosts = parse_srv_records(Server),
Opts0 = parse_txt_records(Type, Server),
NWorkerOptions = maps:to_list(maps:merge(maps:from_list(WorkerOptions), maps:with([auth_source], Opts0))),
NOpts = Opts#{hosts => Hosts, worker_options => NWorkerOptions},
case Type =:= rs of
true ->
case maps:get(rs_set_name, Opts0, maps:get(rs_set_name, NOpts, undefined)) of
undefined ->
error({missing_parameter, rs_set_name});
ReplicaSet ->
NOpts#{type => {Type, ReplicaSet}}
end;
false ->
NOpts
end.
to_hosts(Server) ->
[string:trim(H) || H <- string:tokens(Server, ",")].
parse_srv_records(Server) ->
case inet_res:lookup("_mongodb._tcp." ++ Server, in, srv) of
[] ->
error(service_not_found);
Services ->
[Host ++ ":" ++ integer_to_list(Port) || {_, _, Port, Host} <- Services]
end.
parse_txt_records(Type, Server) ->
case inet_res:lookup(Server, in, txt) of
[] ->
#{};
[[QueryString]] ->
case uri_string:dissect_query(QueryString) of
{error, _, _} ->
error({invalid_txt_record, invalid_query_string});
Options ->
Fields = case Type of
rs -> ["authSource", "replicaSet"];
_ -> ["authSource"]
end,
take_and_convert(Fields, Options)
end;
_ ->
error({invalid_txt_record, multiple_records})
end.
take_and_convert(Fields, Options) ->
take_and_convert(Fields, Options, #{}).
take_and_convert([], [_ | _], _Acc) ->
error({invalid_txt_record, invalid_option});
take_and_convert([], [], Acc) ->
Acc;
take_and_convert([Field | More], Options, Acc) ->
case lists:keytake(Field, 1, Options) of
{value, {"authSource", V}, NOptions} ->
take_and_convert(More, NOptions, Acc#{auth_source => list_to_binary(V)});
{value, {"replicaSet", V}, NOptions} ->
take_and_convert(More, NOptions, Acc#{rs_set_name => list_to_binary(V)});
{value, _, _} ->
error({invalid_txt_record, invalid_option});
false ->
take_and_convert(More, Options, Acc)
end.

View File

@ -24,6 +24,11 @@
## Value: false | Duration ## Value: false | Duration
#exhook.auto_reconnect = 60s #exhook.auto_reconnect = 60s
## The process pool size for gRPC client
##
## Default: Equals cpu cores
## Value: Integer
#exhook.pool_size = 16
##-------------------------------------------------------------------- ##--------------------------------------------------------------------
## The Hook callback servers ## The Hook callback servers

View File

@ -26,6 +26,10 @@
end end
end}. end}.
{mapping, "exhook.pool_size", "emqx_exhook.pool_size", [
{datatype, integer}
]}.
{mapping, "exhook.server.$name.url", "emqx_exhook.servers", [ {mapping, "exhook.server.$name.url", "emqx_exhook.servers", [
{datatype, string} {datatype, string}
]}. ]}.

View File

@ -358,6 +358,31 @@ message Message {
bytes payload = 6; bytes payload = 6;
uint64 timestamp = 7; uint64 timestamp = 7;
// The key of header can be:
// - username:
// * Readonly
// * The username of sender client
// * Value type: utf8 string
// - protocol:
// * Readonly
// * The protocol name of sender client
// * Value type: string enum with "mqtt", "mqtt-sn", ...
// - peerhost:
// * Readonly
// * The peerhost of sender client
// * Value type: ip address string
// - allow_publish:
// * Writable
// * Whether to allow the message to be published by emqx
// * Value type: string enum with "true", "false", default is "true"
//
// Notes: All header may be missing, which means that the message does not
// carry these headers. We can guarantee that clients coming from MQTT,
// MQTT-SN, CoAP, LwM2M and other natively supported protocol clients will
// carry these headers, but there is no guarantee that messages published
// by other means will do, e.g. messages published by HTTP-API
map<string, string> headers = 8;
} }
message Property { message Property {

View File

@ -5,7 +5,7 @@
]}. ]}.
{deps, {deps,
[{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.3"}}} [{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.4"}}}
]}. ]}.
{grpc, {grpc,

View File

@ -1,6 +1,6 @@
{application, emqx_exhook, {application, emqx_exhook,
[{description, "EMQ X Extension for Hook"}, [{description, "EMQ X Extension for Hook"},
{vsn, "4.3.4"}, {vsn, "4.4.0"},
{modules, []}, {modules, []},
{registered, []}, {registered, []},
{mod, {emqx_exhook_app, []}}, {mod, {emqx_exhook_app, []}},

View File

@ -1,15 +1,7 @@
%% -*-: erlang -*- %% -*-: erlang -*-
{VSN, {VSN,
[ [{<<".*">>, []}
{<<"4.3.[0-3]">>, [
{restart_application, emqx_exhook}
]},
{<<".*">>, []}
], ],
[ [{<<".*">>, []}
{<<"4.3.[0-3]">>, [
{restart_application, emqx_exhook}
]},
{<<".*">>, []}
] ]
}. }.

View File

@ -50,6 +50,7 @@
%% Utils %% Utils
-export([ message/1 -export([ message/1
, headers/1
, stringfy/1 , stringfy/1
, merge_responsed_bool/2 , merge_responsed_bool/2
, merge_responsed_message/2 , merge_responsed_message/2
@ -62,6 +63,8 @@
, call_fold/3 , call_fold/3
]). ]).
-elvis([{elvis_style, god_modules, disable}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Clients %% Clients
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -258,17 +261,58 @@ clientinfo(ClientInfo =
cn => maybe(maps:get(cn, ClientInfo, undefined)), cn => maybe(maps:get(cn, ClientInfo, undefined)),
dn => maybe(maps:get(dn, ClientInfo, undefined))}. dn => maybe(maps:get(dn, ClientInfo, undefined))}.
message(#message{id = Id, qos = Qos, from = From, topic = Topic, payload = Payload, timestamp = Ts}) -> message(#message{id = Id, qos = Qos, from = From, topic = Topic,
payload = Payload, timestamp = Ts, headers = Headers}) ->
#{node => stringfy(node()), #{node => stringfy(node()),
id => emqx_guid:to_hexstr(Id), id => emqx_guid:to_hexstr(Id),
qos => Qos, qos => Qos,
from => stringfy(From), from => stringfy(From),
topic => Topic, topic => Topic,
payload => Payload, payload => Payload,
timestamp => Ts}. timestamp => Ts,
headers => headers(Headers)
}.
assign_to_message(#{qos := Qos, topic := Topic, payload := Payload}, Message) -> headers(Headers) ->
Message#message{qos = Qos, topic = Topic, payload = Payload}. Ls = [username, protocol, peerhost, allow_publish],
maps:fold(
fun
(_, undefined, Acc) ->
Acc; %% Ignore undefined value
(K, V, Acc) ->
case lists:member(K, Ls) of
true ->
Acc#{atom_to_binary(K) => bin(K, V)};
_ ->
Acc
end
end, #{}, Headers).
bin(K, V) when K == username;
K == protocol;
K == allow_publish ->
bin(V);
bin(peerhost, V) ->
bin(inet:ntoa(V)).
bin(V) when is_binary(V) -> V;
bin(V) when is_atom(V) -> atom_to_binary(V);
bin(V) when is_list(V) -> iolist_to_binary(V).
assign_to_message(InMessage = #{qos := Qos, topic := Topic,
payload := Payload}, Message) ->
NMsg = Message#message{qos = Qos, topic = Topic, payload = Payload},
enrich_header(maps:get(headers, InMessage, #{}), NMsg).
enrich_header(Headers, Message) ->
case maps:get(<<"allow_publish">>, Headers, undefined) of
<<"false">> ->
emqx_message:set_header(allow_publish, false, Message);
<<"true">> ->
emqx_message:set_header(allow_publish, true, Message);
_ ->
Message
end.
topicfilters(Tfs) when is_list(Tfs) -> topicfilters(Tfs) when is_list(Tfs) ->
[#{name => Topic, qos => Qos} || {Topic, #{qos := Qos}} <- Tfs]. [#{name => Topic, qos => Qos} || {Topic, #{qos := Qos}} <- Tfs].
@ -299,11 +343,7 @@ merge_responsed_bool(_Req, #{type := 'IGNORE'}) ->
ignore; ignore;
merge_responsed_bool(Req, #{type := Type, value := {bool_result, NewBool}}) merge_responsed_bool(Req, #{type := Type, value := {bool_result, NewBool}})
when is_boolean(NewBool) -> when is_boolean(NewBool) ->
NReq = Req#{result => NewBool}, {ret(Type), Req#{result => NewBool}};
case Type of
'CONTINUE' -> {ok, NReq};
'STOP_AND_RETURN' -> {stop, NReq}
end;
merge_responsed_bool(_Req, Resp) -> merge_responsed_bool(_Req, Resp) ->
?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]), ?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]),
ignore. ignore.
@ -311,11 +351,10 @@ merge_responsed_bool(_Req, Resp) ->
merge_responsed_message(_Req, #{type := 'IGNORE'}) -> merge_responsed_message(_Req, #{type := 'IGNORE'}) ->
ignore; ignore;
merge_responsed_message(Req, #{type := Type, value := {message, NMessage}}) -> merge_responsed_message(Req, #{type := Type, value := {message, NMessage}}) ->
NReq = Req#{message => NMessage}, {ret(Type), Req#{message => NMessage}};
case Type of
'CONTINUE' -> {ok, NReq};
'STOP_AND_RETURN' -> {stop, NReq}
end;
merge_responsed_message(_Req, Resp) -> merge_responsed_message(_Req, Resp) ->
?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]), ?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]),
ignore. ignore.
ret('CONTINUE') -> ok;
ret('STOP_AND_RETURN') -> stop.

View File

@ -36,6 +36,8 @@
, server/1 , server/1
, put_request_failed_action/1 , put_request_failed_action/1
, get_request_failed_action/0 , get_request_failed_action/0
, put_pool_size/1
, get_pool_size/0
]). ]).
%% gen_server callbacks %% gen_server callbacks
@ -84,11 +86,11 @@
start_link(Servers, AutoReconnect, ReqOpts) -> start_link(Servers, AutoReconnect, ReqOpts) ->
gen_server:start_link(?MODULE, [Servers, AutoReconnect, ReqOpts], []). gen_server:start_link(?MODULE, [Servers, AutoReconnect, ReqOpts], []).
-spec enable(pid(), atom()|string()) -> ok | {error, term()}. -spec enable(pid(), atom() | string()) -> ok | {error, term()}.
enable(Pid, Name) -> enable(Pid, Name) ->
call(Pid, {load, Name}). call(Pid, {load, Name}).
-spec disable(pid(), atom()|string()) -> ok | {error, term()}. -spec disable(pid(), atom() | string()) -> ok | {error, term()}.
disable(Pid, Name) -> disable(Pid, Name) ->
call(Pid, {unload, Name}). call(Pid, {unload, Name}).
@ -117,6 +119,9 @@ init([Servers, AutoReconnect, ReqOpts0]) ->
put_request_failed_action( put_request_failed_action(
maps:get(request_failed_action, ReqOpts0, deny) maps:get(request_failed_action, ReqOpts0, deny)
), ),
put_pool_size(
maps:get(pool_size, ReqOpts0, erlang:system_info(schedulers))
),
%% Load the hook servers %% Load the hook servers
ReqOpts = maps:without([request_failed_action], ReqOpts0), ReqOpts = maps:without([request_failed_action], ReqOpts0),
@ -136,7 +141,7 @@ load_all_servers(Servers, ReqOpts) ->
load_all_servers(Servers, ReqOpts, #{}, #{}). load_all_servers(Servers, ReqOpts, #{}, #{}).
load_all_servers([], _Request, Waiting, Running) -> load_all_servers([], _Request, Waiting, Running) ->
{Waiting, Running}; {Waiting, Running};
load_all_servers([{Name, Options}|More], ReqOpts, Waiting, Running) -> load_all_servers([{Name, Options} | More], ReqOpts, Waiting, Running) ->
{NWaiting, NRunning} = {NWaiting, NRunning} =
case emqx_exhook_server:load(Name, Options, ReqOpts) of case emqx_exhook_server:load(Name, Options, ReqOpts) of
{ok, ServerState} -> {ok, ServerState} ->
@ -286,6 +291,14 @@ put_request_failed_action(Val) ->
get_request_failed_action() -> get_request_failed_action() ->
persistent_term:get({?APP, request_failed_action}). persistent_term:get({?APP, request_failed_action}).
put_pool_size(Val) ->
persistent_term:put({?APP, pool_size}, Val).
get_pool_size() ->
%% Avoid the scenario that the parameter is not set after
%% the hot upgrade completed.
persistent_term:get({?APP, pool_size}, erlang:system_info(schedulers)).
save(Name, ServerState) -> save(Name, ServerState) ->
Saved = persistent_term:get(?APP, []), Saved = persistent_term:get(?APP, []),
persistent_term:put(?APP, lists:reverse([Name | Saved])), persistent_term:put(?APP, lists:reverse([Name | Saved])),

View File

@ -77,6 +77,8 @@
-dialyzer({nowarn_function, [inc_metrics/2]}). -dialyzer({nowarn_function, [inc_metrics/2]}).
-elvis([{elvis_style, dont_repeat_yourself, disable}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Load/Unload APIs %% Load/Unload APIs
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -125,13 +127,18 @@ channel_opts(Opts) ->
SvrAddr = format_http_uri(Scheme, Host, Port), SvrAddr = format_http_uri(Scheme, Host, Port),
ClientOpts = case Scheme of ClientOpts = case Scheme of
https -> https ->
SslOpts = lists:keydelete(ssl, 1, proplists:get_value(ssl_options, Opts, [])), SslOpts = lists:keydelete(
ssl,
1,
proplists:get_value(ssl_options, Opts, [])
),
#{gun_opts => #{gun_opts =>
#{transport => ssl, #{transport => ssl,
transport_opts => SslOpts}}; transport_opts => SslOpts}};
_ -> #{} _ -> #{}
end, end,
{SvrAddr, ClientOpts}. NClientOpts = ClientOpts#{pool_size => emqx_exhook_mngr:get_pool_size()},
{SvrAddr, NClientOpts}.
format_http_uri(Scheme, Host0, Port) -> format_http_uri(Scheme, Host0, Port) ->
Host = case is_tuple(Host0) of Host = case is_tuple(Host0) of
@ -174,16 +181,18 @@ resovle_hookspec(HookSpecs) when is_list(HookSpecs) ->
case maps:get(name, HookSpec, undefined) of case maps:get(name, HookSpec, undefined) of
undefined -> Acc; undefined -> Acc;
Name0 -> Name0 ->
Name = try binary_to_existing_atom(Name0, utf8) catch T:R:_ -> {T,R} end, Name = try
case lists:member(Name, AvailableHooks) of binary_to_existing_atom(Name0, utf8)
true -> catch T:R -> {T,R}
case lists:member(Name, MessageHooks) of end,
true -> case {lists:member(Name, AvailableHooks),
Acc#{Name => #{topics => maps:get(topics, HookSpec, [])}}; lists:member(Name, MessageHooks)} of
_ -> {false, _} ->
Acc#{Name => #{}} error({unknown_hookpoint, Name});
end; {true, false} ->
_ -> error({unknown_hookpoint, Name}) Acc#{Name => #{}};
{true, true} ->
Acc#{Name => #{topics => maps:get(topics, HookSpec, [])}}
end end
end end
end, #{}, HookSpecs). end, #{}, HookSpecs).
@ -255,7 +264,7 @@ call(Hookpoint, Req, #server{name = ChannName, options = ReqOpts,
%% @private %% @private
inc_metrics(IncFun, Name) when is_function(IncFun) -> inc_metrics(IncFun, Name) when is_function(IncFun) ->
%% BACKW: e4.2.0-e4.2.2 %% BACKW: e4.2.0-e4.2.2
{env, [Prefix|_]} = erlang:fun_info(IncFun, env), {env, [Prefix | _]} = erlang:fun_info(IncFun, env),
inc_metrics(Prefix, Name); inc_metrics(Prefix, Name);
inc_metrics(Prefix, Name) when is_list(Prefix) -> inc_metrics(Prefix, Name) when is_list(Prefix) ->
emqx_metrics:inc(list_to_atom(Prefix ++ atom_to_list(Name))). emqx_metrics:inc(list_to_atom(Prefix ++ atom_to_list(Name))).
@ -271,8 +280,8 @@ do_call(ChannName, Fun, Req, ReqOpts) ->
Options = ReqOpts#{channel => ChannName}, Options = ReqOpts#{channel => ChannName},
?LOG(debug, "Call ~0p:~0p(~0p, ~0p)", [?PB_CLIENT_MOD, Fun, Req, Options]), ?LOG(debug, "Call ~0p:~0p(~0p, ~0p)", [?PB_CLIENT_MOD, Fun, Req, Options]),
case catch apply(?PB_CLIENT_MOD, Fun, [Req, Options]) of case catch apply(?PB_CLIENT_MOD, Fun, [Req, Options]) of
{ok, Resp, _Metadata} -> {ok, Resp, Metadata} ->
?LOG(debug, "Response {ok, ~0p, ~0p}", [Resp, _Metadata]), ?LOG(debug, "Response {ok, ~0p, ~0p}", [Resp, Metadata]),
{ok, Resp}; {ok, Resp};
{error, {Code, Msg}, _Metadata} -> {error, {Code, Msg}, _Metadata} ->
?LOG(error, "CALL ~0p:~0p(~0p, ~0p) response errcode: ~0p, errmsg: ~0p", ?LOG(error, "CALL ~0p:~0p(~0p, ~0p) response errcode: ~0p, errmsg: ~0p",

View File

@ -54,7 +54,8 @@ auto_reconnect() ->
request_options() -> request_options() ->
#{timeout => env(request_timeout, 5000), #{timeout => env(request_timeout, 5000),
request_failed_action => env(request_failed_action, deny) request_failed_action => env(request_failed_action, deny),
pool_size => env(pool_size, erlang:system_info(schedulers))
}. }.
env(Key, Def) -> env(Key, Def) ->
@ -67,7 +68,7 @@ env(Key, Def) ->
-spec start_grpc_client_channel( -spec start_grpc_client_channel(
string(), string(),
uri_string:uri_string(), uri_string:uri_string(),
grpc_client:options()) -> {ok, pid()} | {error, term()}. grpc_client_sup:options()) -> {ok, pid()} | {error, term()}.
start_grpc_client_channel(Name, SvrAddr, Options) -> start_grpc_client_channel(Name, SvrAddr, Options) ->
grpc_client_sup:create_channel_pool(Name, SvrAddr, Options). grpc_client_sup:create_channel_pool(Name, SvrAddr, Options).

View File

@ -299,21 +299,31 @@ on_message_publish(#{message := #{from := From} = Msg} = Req, Md) ->
%% some cases for testing %% some cases for testing
case From of case From of
<<"baduser">> -> <<"baduser">> ->
NMsg = Msg#{qos => 0, NMsg = deny(Msg#{qos => 0,
topic => <<"">>, topic => <<"">>,
payload => <<"">> payload => <<"">>
}, }),
{ok, #{type => 'STOP_AND_RETURN', {ok, #{type => 'STOP_AND_RETURN',
value => {message, NMsg}}, Md}; value => {message, NMsg}}, Md};
<<"gooduser">> -> <<"gooduser">> ->
NMsg = Msg#{topic => From, NMsg = allow(Msg#{topic => From,
payload => From}, payload => From}),
{ok, #{type => 'STOP_AND_RETURN', {ok, #{type => 'STOP_AND_RETURN',
value => {message, NMsg}}, Md}; value => {message, NMsg}}, Md};
_ -> _ ->
{ok, #{type => 'IGNORE'}, Md} {ok, #{type => 'IGNORE'}, Md}
end. end.
deny(Msg) ->
NHeader = maps:put(<<"allow_publish">>, <<"false">>,
maps:get(headers, Msg, #{})),
maps:put(headers, NHeader, Msg).
allow(Msg) ->
NHeader = maps:put(<<"allow_publish">>, <<"true">>,
maps:get(headers, Msg, #{})),
maps:put(headers, NHeader, Msg).
-spec on_message_delivered(emqx_exhook_pb:message_delivered_request(), grpc:metadata()) -spec on_message_delivered(emqx_exhook_pb:message_delivered_request(), grpc:metadata())
-> {ok, emqx_exhook_pb:empty_success(), grpc:metadata()} -> {ok, emqx_exhook_pb:empty_success(), grpc:metadata()}
| {error, grpc_cowboy_h:error_response()}. | {error, grpc_cowboy_h:error_response()}.

View File

@ -299,19 +299,24 @@ prop_message_publish() ->
_ -> _ ->
ExpectedOutMsg = case emqx_message:from(Msg) of ExpectedOutMsg = case emqx_message:from(Msg) of
<<"baduser">> -> <<"baduser">> ->
MsgMap = emqx_message:to_map(Msg), MsgMap = #{headers := Headers}
= emqx_message:to_map(Msg),
emqx_message:from_map( emqx_message:from_map(
MsgMap#{qos => 0, MsgMap#{qos => 0,
topic => <<"">>, topic => <<"">>,
payload => <<"">> payload => <<"">>,
headers => maps:put(allow_publish, false, Headers)
}); });
<<"gooduser">> = From -> <<"gooduser">> = From ->
MsgMap = emqx_message:to_map(Msg), MsgMap = #{headers := Headers}
= emqx_message:to_map(Msg),
emqx_message:from_map( emqx_message:from_map(
MsgMap#{topic => From, MsgMap#{topic => From,
payload => From payload => From,
headers => maps:put(allow_publish, true, Headers)
}); });
_ -> Msg _ ->
Msg
end, end,
?assertEqual(ExpectedOutMsg, OutMsg), ?assertEqual(ExpectedOutMsg, OutMsg),
@ -464,7 +469,9 @@ from_message(Msg) ->
from => stringfy(emqx_message:from(Msg)), from => stringfy(emqx_message:from(Msg)),
topic => emqx_message:topic(Msg), topic => emqx_message:topic(Msg),
payload => emqx_message:payload(Msg), payload => emqx_message:payload(Msg),
timestamp => emqx_message:timestamp(Msg) timestamp => emqx_message:timestamp(Msg),
headers => emqx_exhook_handler:headers(
emqx_message:get_headers(Msg))
}. }.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -13,7 +13,7 @@
]}. ]}.
{deps, {deps,
[{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.3"}}} [{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.4"}}}
]}. ]}.
{grpc, {grpc,

View File

@ -32,4 +32,4 @@
-define(ERROR14, 114). %% OldPassword error -define(ERROR14, 114). %% OldPassword error
-define(ERROR15, 115). %% bad topic -define(ERROR15, 115). %% bad topic
-define(VERSIONS, ["4.0", "4.1", "4.2", "4.3"]). -define(VERSIONS, ["4.0", "4.1", "4.2", "4.3", "4.4"]).

View File

@ -1,6 +1,6 @@
{application, emqx_management, {application, emqx_management,
[{description, "EMQ X Management API and CLI"}, [{description, "EMQ X Management API and CLI"},
{vsn, "4.3.9"}, % strict semver, bump manually! {vsn, "4.4.0"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_management_sup]}, {registered, [emqx_management_sup]},
{applications, [kernel,stdlib,minirest]}, {applications, [kernel,stdlib,minirest]},

View File

@ -22,6 +22,9 @@
-include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/emqx_mqtt.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl").
-elvis([{elvis_style, invalid_dynamic_call, #{ignore => [emqx_mgmt]}}]).
-elvis([{elvis_style, god_modules, #{ignore => [emqx_mgmt]}}]).
%% Nodes and Brokers API %% Nodes and Brokers API
-export([ list_nodes/0 -export([ list_nodes/0
, lookup_node/1 , lookup_node/1
@ -49,6 +52,7 @@
, clean_acl_cache_all/1 , clean_acl_cache_all/1
, set_ratelimit_policy/2 , set_ratelimit_policy/2
, set_quota_policy/2 , set_quota_policy/2
, set_keepalive/2
]). ]).
%% Internal funcs %% Internal funcs
@ -143,9 +147,8 @@ node_info(Node) when Node =:= node() ->
memory_used => proplists:get_value(used, Memory), memory_used => proplists:get_value(used, Memory),
process_available => erlang:system_info(process_limit), process_available => erlang:system_info(process_limit),
process_used => erlang:system_info(process_count), process_used => erlang:system_info(process_count),
max_fds => max_fds => proplists:get_value(max_fds,
proplists:get_value( max_fds lists:usort(lists:flatten(erlang:system_info(check_io)))),
, lists:usort(lists:flatten(erlang:system_info(check_io)))),
connections => ets:info(emqx_channel, size), connections => ets:info(emqx_channel, size),
node_status => 'Running', node_status => 'Running',
uptime => iolist_to_binary(proplists:get_value(uptime, BrokerInfo)), uptime => iolist_to_binary(proplists:get_value(uptime, BrokerInfo)),
@ -227,7 +230,7 @@ lookup_client(Node, {username, Username}, FormatFun) ->
kickout_client(ClientId) -> kickout_client(ClientId) ->
Results = [kickout_client(Node, ClientId) || Node <- ekka_mnesia:running_nodes()], Results = [kickout_client(Node, ClientId) || Node <- ekka_mnesia:running_nodes()],
check_every_ok(Results). has_any_ok(Results).
kickout_client(Node, ClientId) when Node =:= node() -> kickout_client(Node, ClientId) when Node =:= node() ->
emqx_cm:kick_session(ClientId); emqx_cm:kick_session(ClientId);
@ -240,7 +243,7 @@ list_acl_cache(ClientId) ->
clean_acl_cache(ClientId) -> clean_acl_cache(ClientId) ->
Results = [clean_acl_cache(Node, ClientId) || Node <- ekka_mnesia:running_nodes()], Results = [clean_acl_cache(Node, ClientId) || Node <- ekka_mnesia:running_nodes()],
check_every_ok(Results). has_any_ok(Results).
clean_acl_cache(Node, ClientId) when Node =:= node() -> clean_acl_cache(Node, ClientId) when Node =:= node() ->
case emqx_cm:lookup_channels(ClientId) of case emqx_cm:lookup_channels(ClientId) of
@ -272,6 +275,11 @@ set_ratelimit_policy(ClientId, Policy) ->
set_quota_policy(ClientId, Policy) -> set_quota_policy(ClientId, Policy) ->
call_client(ClientId, {quota, Policy}). call_client(ClientId, {quota, Policy}).
set_keepalive(ClientId, Interval)when Interval >= 0 andalso Interval =< 65535 ->
call_client(ClientId, {keepalive, Interval});
set_keepalive(_ClientId, _Interval) ->
{error, ?ERROR2, <<"mqtt3.1.1 specification: keepalive must between 0~65535">>}.
%% @private %% @private
call_client(ClientId, Req) -> call_client(ClientId, Req) ->
Results = [call_client(Node, ClientId, Req) || Node <- ekka_mnesia:running_nodes()], Results = [call_client(Node, ClientId, Req) || Node <- ekka_mnesia:running_nodes()],
@ -315,6 +323,7 @@ list_subscriptions_via_topic(Topic, FormatFun) ->
lists:append([list_subscriptions_via_topic(Node, Topic, FormatFun) lists:append([list_subscriptions_via_topic(Node, Topic, FormatFun)
|| Node <- ekka_mnesia:running_nodes()]). || Node <- ekka_mnesia:running_nodes()]).
list_subscriptions_via_topic(Node, Topic, {M,F}) when Node =:= node() -> list_subscriptions_via_topic(Node, Topic, {M,F}) when Node =:= node() ->
MatchSpec = [{{{'_', '$1'}, '_'}, [{'=:=','$1', Topic}], ['$_']}], MatchSpec = [{{{'_', '$1'}, '_'}, [{'=:=','$1', Topic}], ['$_']}],
erlang:apply(M, F, [ets:select(emqx_suboption, MatchSpec)]); erlang:apply(M, F, [ets:select(emqx_suboption, MatchSpec)]);
@ -436,8 +445,8 @@ list_listeners(Node) when Node =:= node() ->
Http = lists:map(fun({Protocol, Opts}) -> Http = lists:map(fun({Protocol, Opts}) ->
#{protocol => Protocol, #{protocol => Protocol,
listen_on => proplists:get_value(port, Opts), listen_on => proplists:get_value(port, Opts),
acceptors => maps:get( num_acceptors acceptors => maps:get(num_acceptors,
, proplists:get_value(transport_options, Opts, #{}), 0), proplists:get_value(transport_options, Opts, #{}), 0),
max_conns => proplists:get_value(max_connections, Opts), max_conns => proplists:get_value(max_connections, Opts),
current_conns => proplists:get_value(all_connections, Opts), current_conns => proplists:get_value(all_connections, Opts),
shutdown_count => []} shutdown_count => []}
@ -486,10 +495,8 @@ add_duration_field([], _Now, Acc) ->
Acc; Acc;
add_duration_field([Alarm = #{activated := true, activate_at := ActivateAt} | Rest], Now, Acc) -> add_duration_field([Alarm = #{activated := true, activate_at := ActivateAt} | Rest], Now, Acc) ->
add_duration_field(Rest, Now, [Alarm#{duration => Now - ActivateAt} | Acc]); add_duration_field(Rest, Now, [Alarm#{duration => Now - ActivateAt} | Acc]);
add_duration_field([Alarm = #{ activated := false add_duration_field([Alarm = #{activated := false,
, activate_at := ActivateAt activate_at := ActivateAt, deactivate_at := DeactivateAt} | Rest], Now, Acc) ->
, deactivate_at := DeactivateAt}
| Rest], Now, Acc) ->
add_duration_field(Rest, Now, [Alarm#{duration => DeactivateAt - ActivateAt} | Acc]). add_duration_field(Rest, Now, [Alarm#{duration => DeactivateAt - ActivateAt} | Acc]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -570,13 +577,13 @@ check_row_limit([Tab | Tables], Limit) ->
false -> check_row_limit(Tables, Limit) false -> check_row_limit(Tables, Limit)
end. end.
check_every_ok(Results) ->
case lists:any(fun(Item) -> Item =:= ok end, Results) of
true -> ok;
false -> lists:last(Results)
end.
max_row_limit() -> max_row_limit() ->
application:get_env(?APP, max_row_limit, ?MAX_ROW_LIMIT). application:get_env(?APP, max_row_limit, ?MAX_ROW_LIMIT).
table_size(Tab) -> ets:info(Tab, size). table_size(Tab) -> ets:info(Tab, size).
has_any_ok(Results) ->
case lists:any(fun(Item) -> Item =:= ok end, Results) of
true -> ok;
false -> lists:last(Results)
end.

View File

@ -49,17 +49,46 @@ paginate(Tables, Params, RowFun) ->
query_handle(Table) when is_atom(Table) -> query_handle(Table) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table)]); qlc:q([R|| R <- ets:table(Table)]);
query_handle({Table, Opts}) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table, Opts)]);
query_handle([Table]) when is_atom(Table) -> query_handle([Table]) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table)]); qlc:q([R|| R <- ets:table(Table)]);
query_handle([{Table, Opts}]) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table, Opts)]);
query_handle(Tables) -> query_handle(Tables) ->
qlc:append([qlc:q([E || E <- ets:table(T)]) || T <- Tables]). Fold = fun({Table, Opts}, Acc) ->
Handle = qlc:q([R|| R <- ets:table(Table, Opts)]),
[Handle | Acc];
(Table, Acc) ->
Handle = qlc:q([R|| R <- ets:table(Table)]),
[Handle | Acc]
end,
Handles = lists:foldl(Fold, [], Tables),
qlc:append(lists:reverse(Handles)).
count(Table) when is_atom(Table) -> count(Table) when is_atom(Table) ->
ets:info(Table, size); ets:info(Table, size);
count({Table, _Opts}) when is_atom(Table) ->
ets:info(Table, size);
count([Table]) when is_atom(Table) -> count([Table]) when is_atom(Table) ->
ets:info(Table, size); ets:info(Table, size);
count([{Table, _Opts}]) when is_atom(Table) ->
ets:info(Table, size);
count(Tables) -> count(Tables) ->
lists:sum([count(T) || T <- Tables]). Fold = fun({Table, _Opts}, Acc) ->
count(Table) ++ Acc;
(Table, Acc) ->
count(Table) ++ Acc
end,
lists:foldl(Fold, 0, Tables).
count(Table, Nodes) -> count(Table, Nodes) ->
lists:sum([rpc_call(Node, ets, info, [Table, size], 5000) || Node <- Nodes]). lists:sum([rpc_call(Node, ets, info, [Table, size], 5000) || Node <- Nodes]).

View File

@ -117,6 +117,12 @@
func => clean_quota, func => clean_quota,
descr => "Clear the quota policy"}). descr => "Clear the quota policy"}).
-rest_api(#{name => set_keepalive,
method => 'PUT',
path => "/clients/:bin:clientid/keepalive",
func => set_keepalive,
descr => "Set the client keepalive"}).
-import(emqx_mgmt_util, [ ntoa/1 -import(emqx_mgmt_util, [ ntoa/1
, strftime/1 , strftime/1
]). ]).
@ -130,23 +136,24 @@
, set_quota_policy/2 , set_quota_policy/2
, clean_ratelimit/2 , clean_ratelimit/2
, clean_quota/2 , clean_quota/2
, set_keepalive/2
]). ]).
-export([ query/3 -export([ query/3
, format_channel_info/1 , format_channel_info/1
]). ]).
-define(query_fun, {?MODULE, query}). -define(QUERY_FUN, {?MODULE, query}).
-define(format_fun, {?MODULE, format_channel_info}). -define(FORMAT_FUN, {?MODULE, format_channel_info}).
list(Bindings, Params) when map_size(Bindings) == 0 -> list(Bindings, Params) when map_size(Bindings) == 0 ->
fence(fun() -> fence(fun() ->
emqx_mgmt_api:cluster_query(Params, ?CLIENT_QS_SCHEMA, ?query_fun) emqx_mgmt_api:cluster_query(Params, ?CLIENT_QS_SCHEMA, ?QUERY_FUN)
end); end);
list(#{node := Node}, Params) when Node =:= node() -> list(#{node := Node}, Params) when Node =:= node() ->
fence(fun() -> fence(fun() ->
emqx_mgmt_api:node_query(Node, Params, ?CLIENT_QS_SCHEMA, ?query_fun) emqx_mgmt_api:node_query(Node, Params, ?CLIENT_QS_SCHEMA, ?QUERY_FUN)
end); end);
list(Bindings = #{node := Node}, Params) -> list(Bindings = #{node := Node}, Params) ->
@ -169,16 +176,20 @@ fence(Func) ->
end. end.
lookup(#{node := Node, clientid := ClientId}, _Params) -> lookup(#{node := Node, clientid := ClientId}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client(Node, {clientid, emqx_mgmt_util:urldecode(ClientId)}, ?format_fun)}); minirest:return({ok, emqx_mgmt:lookup_client(Node,
{clientid, emqx_mgmt_util:urldecode(ClientId)}, ?FORMAT_FUN)});
lookup(#{clientid := ClientId}, _Params) -> lookup(#{clientid := ClientId}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client({clientid, emqx_mgmt_util:urldecode(ClientId)}, ?format_fun)}); minirest:return({ok, emqx_mgmt:lookup_client(
{clientid, emqx_mgmt_util:urldecode(ClientId)}, ?FORMAT_FUN)});
lookup(#{node := Node, username := Username}, _Params) -> lookup(#{node := Node, username := Username}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client(Node, {username, emqx_mgmt_util:urldecode(Username)}, ?format_fun)}); minirest:return({ok, emqx_mgmt:lookup_client(Node,
{username, emqx_mgmt_util:urldecode(Username)}, ?FORMAT_FUN)});
lookup(#{username := Username}, _Params) -> lookup(#{username := Username}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client({username, emqx_mgmt_util:urldecode(Username)}, ?format_fun)}). minirest:return({ok, emqx_mgmt:lookup_client({username,
emqx_mgmt_util:urldecode(Username)}, ?FORMAT_FUN)}).
kickout(#{clientid := ClientId}, _Params) -> kickout(#{clientid := ClientId}, _Params) ->
case emqx_mgmt:kickout_client(emqx_mgmt_util:urldecode(ClientId)) of case emqx_mgmt:kickout_client(emqx_mgmt_util:urldecode(ClientId)) of
@ -204,7 +215,7 @@ list_acl_cache(#{clientid := ClientId}, _Params) ->
set_ratelimit_policy(#{clientid := ClientId}, Params) -> set_ratelimit_policy(#{clientid := ClientId}, Params) ->
P = [{conn_bytes_in, proplists:get_value(<<"conn_bytes_in">>, Params)}, P = [{conn_bytes_in, proplists:get_value(<<"conn_bytes_in">>, Params)},
{conn_messages_in, proplists:get_value(<<"conn_messages_in">>, Params)}], {conn_messages_in, proplists:get_value(<<"conn_messages_in">>, Params)}],
case [{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined] of case filter_ratelimit_params(P) of
[] -> minirest:return(); [] -> minirest:return();
Policy -> Policy ->
case emqx_mgmt:set_ratelimit_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of case emqx_mgmt:set_ratelimit_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of
@ -223,7 +234,7 @@ clean_ratelimit(#{clientid := ClientId}, _Params) ->
set_quota_policy(#{clientid := ClientId}, Params) -> set_quota_policy(#{clientid := ClientId}, Params) ->
P = [{conn_messages_routing, proplists:get_value(<<"conn_messages_routing">>, Params)}], P = [{conn_messages_routing, proplists:get_value(<<"conn_messages_routing">>, Params)}],
case [{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined] of case filter_ratelimit_params(P) of
[] -> minirest:return(); [] -> minirest:return();
Policy -> Policy ->
case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of
@ -233,6 +244,7 @@ set_quota_policy(#{clientid := ClientId}, Params) ->
end end
end. end.
clean_quota(#{clientid := ClientId}, _Params) -> clean_quota(#{clientid := ClientId}, _Params) ->
case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), []) of case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), []) of
ok -> minirest:return(); ok -> minirest:return();
@ -240,6 +252,20 @@ clean_quota(#{clientid := ClientId}, _Params) ->
{error, Reason} -> minirest:return({error, ?ERROR1, Reason}) {error, Reason} -> minirest:return({error, ?ERROR1, Reason})
end. end.
set_keepalive(#{clientid := ClientId}, Params) ->
case proplists:get_value(<<"interval">>, Params) of
undefined ->
minirest:return({error, ?ERROR7, params_not_found});
Interval0 ->
Interval = binary_to_integer(Interval0),
case emqx_mgmt:set_keepalive(emqx_mgmt_util:urldecode(ClientId), Interval) of
ok -> minirest:return();
{error, not_found} -> minirest:return({error, ?ERROR12, not_found});
{error, Code, Reason} -> minirest:return({error, Code, Reason});
{error, Reason} -> minirest:return({error, ?ERROR1, Reason})
end
end.
%% @private %% @private
%% S = 100,1s %% S = 100,1s
%% | 100KB, 1m %% | 100KB, 1m
@ -266,7 +292,7 @@ format_channel_info({_Key, Info, Stats0}) ->
ConnInfo = maps:get(conninfo, Info, #{}), ConnInfo = maps:get(conninfo, Info, #{}),
Session = case maps:get(session, Info, #{}) of Session = case maps:get(session, Info, #{}) of
undefined -> #{}; undefined -> #{};
_Sess -> _Sess Sess -> Sess
end, end,
SessCreated = maps:get(created_at, Session, maps:get(connected_at, ConnInfo)), SessCreated = maps:get(created_at, Session, maps:get(connected_at, ConnInfo)),
Connected = case maps:get(conn_state, Info, connected) of Connected = case maps:get(conn_state, Info, connected) of
@ -306,7 +332,8 @@ format(Data) when is_map(Data)->
created_at => iolist_to_binary(strftime(CreatedAt div 1000))}, created_at => iolist_to_binary(strftime(CreatedAt div 1000))},
case maps:get(disconnected_at, Data, undefined) of case maps:get(disconnected_at, Data, undefined) of
undefined -> #{}; undefined -> #{};
DisconnectedAt -> #{disconnected_at => iolist_to_binary(strftime(DisconnectedAt div 1000))} DisconnectedAt -> #{disconnected_at =>
iolist_to_binary(strftime(DisconnectedAt div 1000))}
end). end).
format_acl_cache({{PubSub, Topic}, {AclResult, Timestamp}}) -> format_acl_cache({{PubSub, Topic}, {AclResult, Timestamp}}) ->
@ -326,7 +353,8 @@ query({Qs, []}, Start, Limit) ->
query({Qs, Fuzzy}, Start, Limit) -> query({Qs, Fuzzy}, Start, Limit) ->
Ms = qs2ms(Qs), Ms = qs2ms(Qs),
MatchFun = match_fun(Ms, Fuzzy), MatchFun = match_fun(Ms, Fuzzy),
emqx_mgmt_api:traverse_table(emqx_channel_info, MatchFun, Start, Limit, fun format_channel_info/1). emqx_mgmt_api:traverse_table(emqx_channel_info, MatchFun,
Start, Limit, fun format_channel_info/1).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Match funcs %% Match funcs
@ -352,7 +380,7 @@ escape(B) when is_binary(B) ->
run_fuzzy_match(_, []) -> run_fuzzy_match(_, []) ->
true; true;
run_fuzzy_match(E = {_, #{clientinfo := ClientInfo}, _}, [{Key, _, RE}|Fuzzy]) -> run_fuzzy_match(E = {_, #{clientinfo := ClientInfo}, _}, [{Key, _, RE} | Fuzzy]) ->
Val = case maps:get(Key, ClientInfo, "") of Val = case maps:get(Key, ClientInfo, "") of
undefined -> ""; undefined -> "";
V -> V V -> V
@ -406,6 +434,9 @@ ms(connected_at, X) ->
ms(created_at, X) -> ms(created_at, X) ->
#{session => #{created_at => X}}. #{session => #{created_at => X}}.
filter_ratelimit_params(P) ->
[{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined].
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% EUnits %% EUnits
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -71,8 +71,8 @@ subscribe(_Bindings, Params) ->
publish(_Bindings, Params) -> publish(_Bindings, Params) ->
logger:debug("API publish Params:~p", [Params]), logger:debug("API publish Params:~p", [Params]),
{ClientId, Topic, Qos, Retain, Payload} = parse_publish_params(Params), {ClientId, Topic, Qos, Retain, Payload, UserProps} = parse_publish_params(Params),
case do_publish(ClientId, Topic, Qos, Retain, Payload) of case do_publish(ClientId, Topic, Qos, Retain, Payload, UserProps) of
{ok, MsgIds} -> {ok, MsgIds} ->
case proplists:get_value(<<"return">>, Params, undefined) of case proplists:get_value(<<"return">>, Params, undefined) of
undefined -> minirest:return(ok); undefined -> minirest:return(ok);
@ -114,7 +114,8 @@ loop_subscribe([Params | ParamsN], Acc) ->
{_, Code0, _Reason} -> Code0 {_, Code0, _Reason} -> Code0
end, end,
Result = #{clientid => ClientId, Result = #{clientid => ClientId,
topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), topic => resp_topic(proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
code => Code}, code => Code},
loop_subscribe(ParamsN, [Result | Acc]). loop_subscribe(ParamsN, [Result | Acc]).
@ -123,12 +124,13 @@ loop_publish(Params) ->
loop_publish([], Result) -> loop_publish([], Result) ->
lists:reverse(Result); lists:reverse(Result);
loop_publish([Params | ParamsN], Acc) -> loop_publish([Params | ParamsN], Acc) ->
{ClientId, Topic, Qos, Retain, Payload} = parse_publish_params(Params), {ClientId, Topic, Qos, Retain, Payload, UserProps} = parse_publish_params(Params),
Code = case do_publish(ClientId, Topic, Qos, Retain, Payload) of Code = case do_publish(ClientId, Topic, Qos, Retain, Payload, UserProps) of
{ok, _} -> 0; {ok, _} -> 0;
{_, Code0, _} -> Code0 {_, Code0, _} -> Code0
end, end,
Result = #{topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), Result = #{topic => resp_topic(proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
code => Code}, code => Code},
loop_publish(ParamsN, [Result | Acc]). loop_publish(ParamsN, [Result | Acc]).
@ -143,7 +145,8 @@ loop_unsubscribe([Params | ParamsN], Acc) ->
{_, Code0, _} -> Code0 {_, Code0, _} -> Code0
end, end,
Result = #{clientid => ClientId, Result = #{clientid => ClientId,
topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), topic => resp_topic(proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
code => Code}, code => Code},
loop_unsubscribe(ParamsN, [Result | Acc]). loop_unsubscribe(ParamsN, [Result | Acc]).
@ -158,14 +161,17 @@ do_subscribe(ClientId, Topics, QoS) ->
_ -> ok _ -> ok
end. end.
do_publish(ClientId, _Topics, _Qos, _Retain, _Payload) when not (is_binary(ClientId) or (ClientId =:= undefined)) -> do_publish(ClientId, _Topics, _Qos, _Retain, _Payload, _UserProps)
when not (is_binary(ClientId) or (ClientId =:= undefined)) ->
{ok, ?ERROR8, <<"bad clientid: must be string">>}; {ok, ?ERROR8, <<"bad clientid: must be string">>};
do_publish(_ClientId, [], _Qos, _Retain, _Payload) -> do_publish(_ClientId, [], _Qos, _Retain, _Payload, _UserProps) ->
{ok, ?ERROR15, bad_topic}; {ok, ?ERROR15, bad_topic};
do_publish(ClientId, Topics, Qos, Retain, Payload) -> do_publish(ClientId, Topics, Qos, Retain, Payload, UserProps) ->
MsgIds = lists:map(fun(Topic) -> MsgIds = lists:map(fun(Topic) ->
Msg = emqx_message:make(ClientId, Qos, Topic, Payload), Msg = emqx_message:make(ClientId, Qos, Topic, Payload),
_ = emqx_mgmt:publish(Msg#message{flags = #{retain => Retain}}), UserProps1 = #{'User-Property' => UserProps},
_ = emqx_mgmt:publish(Msg#message{flags = #{retain => Retain},
headers = #{properties => UserProps1}}),
emqx_guid:to_hexstr(Msg#message.id) emqx_guid:to_hexstr(Msg#message.id)
end, Topics), end, Topics),
{ok, MsgIds}. {ok, MsgIds}.
@ -185,19 +191,22 @@ do_unsubscribe(ClientId, Topic) ->
parse_subscribe_params(Params) -> parse_subscribe_params(Params) ->
ClientId = proplists:get_value(<<"clientid">>, Params), ClientId = proplists:get_value(<<"clientid">>, Params),
Topics = topics(filter, proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), Topics = topics(filter, proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
QoS = proplists:get_value(<<"qos">>, Params, 0), QoS = proplists:get_value(<<"qos">>, Params, 0),
{ClientId, Topics, QoS}. {ClientId, Topics, QoS}.
parse_publish_params(Params) -> parse_publish_params(Params) ->
Topics = topics(name, proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), Topics = topics(name, proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
ClientId = proplists:get_value(<<"clientid">>, Params), ClientId = proplists:get_value(<<"clientid">>, Params),
Payload = decode_payload(proplists:get_value(<<"payload">>, Params, <<>>), Payload = decode_payload(proplists:get_value(<<"payload">>, Params, <<>>),
proplists:get_value(<<"encoding">>, Params, <<"plain">>)), proplists:get_value(<<"encoding">>, Params, <<"plain">>)),
Qos = proplists:get_value(<<"qos">>, Params, 0), Qos = proplists:get_value(<<"qos">>, Params, 0),
Retain = proplists:get_value(<<"retain">>, Params, false), Retain = proplists:get_value(<<"retain">>, Params, false),
Payload1 = maybe_maps_to_binary(Payload), Payload1 = maybe_maps_to_binary(Payload),
{ClientId, Topics, Qos, Retain, Payload1}. UserProps = check_user_props(proplists:get_value(<<"user_properties">>, Params, [])),
{ClientId, Topics, Qos, Retain, Payload1, UserProps}.
parse_unsubscribe_params(Params) -> parse_unsubscribe_params(Params) ->
ClientId = proplists:get_value(<<"clientid">>, Params), ClientId = proplists:get_value(<<"clientid">>, Params),
@ -251,3 +260,8 @@ maybe_maps_to_binary(Payload) ->
_C : _E : S -> _C : _E : S ->
error({encode_payload_fail, S}) error({encode_payload_fail, S})
end. end.
check_user_props(UserProps) when is_list(UserProps) ->
UserProps;
check_user_props(UserProps) ->
error({user_properties_type_error, UserProps}).

View File

@ -21,7 +21,9 @@
-include("emqx_mgmt.hrl"). -include("emqx_mgmt.hrl").
-define(PRINT_CMD(Cmd, Descr), io:format("~-48s# ~s~n", [Cmd, Descr])). -elvis([{elvis_style, invalid_dynamic_call, disable}]).
-define(PRINT_CMD(Cmd, Desc), io:format("~-48s# ~s~n", [Cmd, Desc])).
-export([load/0]). -export([load/0]).
@ -36,6 +38,7 @@
, vm/1 , vm/1
, mnesia/1 , mnesia/1
, trace/1 , trace/1
, traces/1
, log/1 , log/1
, mgmt/1 , mgmt/1
, data/1 , data/1
@ -74,11 +77,8 @@ mgmt(["insert", AppId, Name]) ->
mgmt(["lookup", AppId]) -> mgmt(["lookup", AppId]) ->
case emqx_mgmt_auth:lookup_app(list_to_binary(AppId)) of case emqx_mgmt_auth:lookup_app(list_to_binary(AppId)) of
{AppId1, AppSecret, Name, Desc, Status, Expired} -> undefined -> emqx_ctl:print("Not Found.~n");
emqx_ctl:print("app_id: ~s~nsecret: ~s~nname: ~s~ndesc: ~s~nstatus: ~s~nexpired: ~p~n", App -> print_app_info(App)
[AppId1, AppSecret, Name, Desc, Status, Expired]);
undefined ->
emqx_ctl:print("Not Found.~n")
end; end;
mgmt(["update", AppId, Status]) -> mgmt(["update", AppId, Status]) ->
@ -99,10 +99,7 @@ mgmt(["delete", AppId]) ->
end; end;
mgmt(["list"]) -> mgmt(["list"]) ->
lists:foreach(fun({AppId, AppSecret, Name, Desc, Status, Expired}) -> lists:foreach(fun print_app_info/1, emqx_mgmt_auth:list_apps());
emqx_ctl:print("app_id: ~s, secret: ~s, name: ~s, desc: ~s, status: ~s, expired: ~p~n",
[AppId, AppSecret, Name, Desc, Status, Expired])
end, emqx_mgmt_auth:list_apps());
mgmt(_) -> mgmt(_) ->
emqx_ctl:usage([{"mgmt list", "List Applications"}, emqx_ctl:usage([{"mgmt list", "List Applications"},
@ -128,10 +125,12 @@ broker([]) ->
[emqx_ctl:print("~-10s: ~s~n", [Fun, emqx_sys:Fun()]) || Fun <- Funs]; [emqx_ctl:print("~-10s: ~s~n", [Fun, emqx_sys:Fun()]) || Fun <- Funs];
broker(["stats"]) -> broker(["stats"]) ->
[emqx_ctl:print("~-30s: ~w~n", [Stat, Val]) || {Stat, Val} <- lists:sort(emqx_stats:getstats())]; [emqx_ctl:print("~-30s: ~w~n", [Stat, Val]) ||
{Stat, Val} <- lists:sort(emqx_stats:getstats())];
broker(["metrics"]) -> broker(["metrics"]) ->
[emqx_ctl:print("~-30s: ~w~n", [Metric, Val]) || {Metric, Val} <- lists:sort(emqx_metrics:all())]; [emqx_ctl:print("~-30s: ~w~n", [Metric, Val]) ||
{Metric, Val} <- lists:sort(emqx_metrics:all())];
broker(_) -> broker(_) ->
emqx_ctl:usage([{"broker", "Show broker version, uptime and description"}, emqx_ctl:usage([{"broker", "Show broker version, uptime and description"},
@ -258,8 +257,10 @@ subscriptions(["del", ClientId, Topic]) ->
subscriptions(_) -> subscriptions(_) ->
emqx_ctl:usage([{"subscriptions list", "List all subscriptions"}, emqx_ctl:usage([{"subscriptions list", "List all subscriptions"},
{"subscriptions show <ClientId>", "Show subscriptions of a client"}, {"subscriptions show <ClientId>", "Show subscriptions of a client"},
{"subscriptions add <ClientId> <Topic> <QoS>", "Add a static subscription manually"}, {"subscriptions add <ClientId> <Topic> <QoS>",
{"subscriptions del <ClientId> <Topic>", "Delete a static subscription manually"}]). "Add a static subscription manually"},
{"subscriptions del <ClientId> <Topic>",
"Delete a static subscription manually"}]).
if_valid_qos(QoS, Fun) -> if_valid_qos(QoS, Fun) ->
try list_to_integer(QoS) of try list_to_integer(QoS) of
@ -328,14 +329,20 @@ vm(["memory"]) ->
[emqx_ctl:print("memory/~-17s: ~w~n", [Cat, Val]) || {Cat, Val} <- erlang:memory()]; [emqx_ctl:print("memory/~-17s: ~w~n", [Cat, Val]) || {Cat, Val} <- erlang:memory()];
vm(["process"]) -> vm(["process"]) ->
[emqx_ctl:print("process/~-16s: ~w~n", [Name, erlang:system_info(Key)]) || {Name, Key} <- [{limit, process_limit}, {count, process_count}]]; [emqx_ctl:print("process/~-16s: ~w~n",
[Name, erlang:system_info(Key)]) ||
{Name, Key} <- [{limit, process_limit}, {count, process_count}]];
vm(["io"]) -> vm(["io"]) ->
IoInfo = lists:usort(lists:flatten(erlang:system_info(check_io))), IoInfo = lists:usort(lists:flatten(erlang:system_info(check_io))),
[emqx_ctl:print("io/~-21s: ~w~n", [Key, proplists:get_value(Key, IoInfo)]) || Key <- [max_fds, active_fds]]; [emqx_ctl:print("io/~-21s: ~w~n",
[Key, proplists:get_value(Key, IoInfo)]) ||
Key <- [max_fds, active_fds]];
vm(["ports"]) -> vm(["ports"]) ->
[emqx_ctl:print("ports/~-16s: ~w~n", [Name, erlang:system_info(Key)]) || {Name, Key} <- [{count, port_count}, {limit, port_limit}]]; [emqx_ctl:print("ports/~-16s: ~w~n",
[Name, erlang:system_info(Key)]) ||
{Name, Key} <- [{count, port_count}, {limit, port_limit}]];
vm(_) -> vm(_) ->
emqx_ctl:usage([{"vm all", "Show info of Erlang VM"}, emqx_ctl:usage([{"vm all", "Show info of Erlang VM"},
@ -372,8 +379,9 @@ log(["primary-level", Level]) ->
emqx_ctl:print("~s~n", [emqx_logger:get_primary_log_level()]); emqx_ctl:print("~s~n", [emqx_logger:get_primary_log_level()]);
log(["handlers", "list"]) -> log(["handlers", "list"]) ->
_ = [emqx_ctl:print("LogHandler(id=~s, level=~s, destination=~s, status=~s)~n", [Id, Level, Dst, Status]) _ = [emqx_ctl:print("LogHandler(id=~s, level=~s, destination=~s, status=~s)~n",
|| #{id := Id, level := Level, dst := Dst, status := Status} <- emqx_logger:get_log_handlers()], [Id, Level, Dst, Status]) || #{id := Id, level := Level, dst := Dst, status := Status}
<- emqx_logger:get_log_handlers()],
ok; ok;
log(["handlers", "start", HandlerId]) -> log(["handlers", "start", HandlerId]) ->
@ -406,43 +414,51 @@ log(_) ->
{"log handlers list", "Show log handlers"}, {"log handlers list", "Show log handlers"},
{"log handlers start <HandlerId>", "Start a log handler"}, {"log handlers start <HandlerId>", "Start a log handler"},
{"log handlers stop <HandlerId>", "Stop a log handler"}, {"log handlers stop <HandlerId>", "Stop a log handler"},
{"log handlers set-level <HandlerId> <Level>", "Set log level of a log handler"}]). {"log handlers set-level <HandlerId> <Level>",
"Set log level of a log handler"}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% @doc Trace Command %% @doc Trace Command
trace(["list"]) -> trace(["list"]) ->
lists:foreach(fun({{Who, Name}, {Level, LogFile}}) -> lists:foreach(fun(Trace) ->
emqx_ctl:print("Trace(~s=~s, level=~s, destination=~p)~n", [Who, Name, Level, LogFile]) #{type := Type, filter := Filter, level := Level, dst := Dst} = Trace,
end, emqx_tracer:lookup_traces()); emqx_ctl:print("Trace(~s=~s, level=~s, destination=~p)~n", [Type, Filter, Level, Dst])
end, emqx_trace_handler:running());
trace(["stop", "client", ClientId]) -> trace(["stop", Operation, ClientId]) ->
trace_off(clientid, ClientId); case trace_type(Operation) of
{ok, Type} -> trace_off(Type, ClientId);
error -> trace([])
end;
trace(["start", "client", ClientId, LogFile]) -> trace(["start", Operation, ClientId, LogFile]) ->
trace_on(clientid, ClientId, all, LogFile); trace(["start", Operation, ClientId, LogFile, "all"]);
trace(["start", "client", ClientId, LogFile, Level]) -> trace(["start", Operation, ClientId, LogFile, Level]) ->
trace_on(clientid, ClientId, list_to_atom(Level), LogFile); case trace_type(Operation) of
{ok, Type} -> trace_on(Type, ClientId, list_to_existing_atom(Level), LogFile);
trace(["stop", "topic", Topic]) -> error -> trace([])
trace_off(topic, Topic); end;
trace(["start", "topic", Topic, LogFile]) ->
trace_on(topic, Topic, all, LogFile);
trace(["start", "topic", Topic, LogFile, Level]) ->
trace_on(topic, Topic, list_to_atom(Level), LogFile);
trace(_) -> trace(_) ->
emqx_ctl:usage([{"trace list", "List all traces started"}, emqx_ctl:usage([{"trace list", "List all traces started on local node"},
{"trace start client <ClientId> <File> [<Level>]", "Traces for a client"}, {"trace start client <ClientId> <File> [<Level>]",
{"trace stop client <ClientId>", "Stop tracing for a client"}, "Traces for a client on local node"},
{"trace start topic <Topic> <File> [<Level>] ", "Traces for a topic"}, {"trace stop client <ClientId>",
{"trace stop topic <Topic> ", "Stop tracing for a topic"}]). "Stop tracing for a client on local node"},
{"trace start topic <Topic> <File> [<Level>] ",
"Traces for a topic on local node"},
{"trace stop topic <Topic> ",
"Stop tracing for a topic on local node"},
{"trace start ip_address <IP> <File> [<Level>] ",
"Traces for a client ip on local node"},
{"trace stop ip_addresss <IP> ",
"Stop tracing for a client ip on local node"}
]).
trace_on(Who, Name, Level, LogFile) -> trace_on(Who, Name, Level, LogFile) ->
case emqx_tracer:start_trace({Who, iolist_to_binary(Name)}, Level, LogFile) of case emqx_trace_handler:install(Who, Name, Level, LogFile) of
ok -> ok ->
emqx_ctl:print("trace ~s ~s successfully~n", [Who, Name]); emqx_ctl:print("trace ~s ~s successfully~n", [Who, Name]);
{error, Error} -> {error, Error} ->
@ -450,13 +466,94 @@ trace_on(Who, Name, Level, LogFile) ->
end. end.
trace_off(Who, Name) -> trace_off(Who, Name) ->
case emqx_tracer:stop_trace({Who, iolist_to_binary(Name)}) of case emqx_trace_handler:uninstall(Who, Name) of
ok -> ok ->
emqx_ctl:print("stop tracing ~s ~s successfully~n", [Who, Name]); emqx_ctl:print("stop tracing ~s ~s successfully~n", [Who, Name]);
{error, Error} -> {error, Error} ->
emqx_ctl:print("[error] stop tracing ~s ~s: ~p~n", [Who, Name, Error]) emqx_ctl:print("[error] stop tracing ~s ~s: ~p~n", [Who, Name, Error])
end. end.
%%--------------------------------------------------------------------
%% @doc Trace Cluster Command
traces(["list"]) ->
{ok, List} = emqx_trace_api:list_trace(get, []),
case List of
[] ->
emqx_ctl:print("Cluster Trace is empty~n", []);
_ ->
lists:foreach(fun(Trace) ->
#{type := Type, name := Name, status := Status,
log_size := LogSize} = Trace,
emqx_ctl:print("Trace(~s: ~s=~s, ~s, LogSize:~p)~n",
[Name, Type, maps:get(Type, Trace), Status, LogSize])
end, List)
end,
length(List);
traces(["stop", Name]) ->
trace_cluster_off(Name);
traces(["delete", Name]) ->
trace_cluster_del(Name);
traces(["start", Name, Operation, Filter]) ->
traces(["start", Name, Operation, Filter, "900"]);
traces(["start", Name, Operation, Filter, DurationS]) ->
case trace_type(Operation) of
{ok, Type} -> trace_cluster_on(Name, Type, Filter, DurationS);
error -> traces([])
end;
traces(_) ->
emqx_ctl:usage([{"traces list", "List all cluster traces started"},
{"traces start <Name> client <ClientId>", "Traces for a client in cluster"},
{"traces start <Name> topic <Topic>", "Traces for a topic in cluster"},
{"traces start <Name> ip_address <IPAddr>", "Traces for a IP in cluster"},
{"traces stop <Name>", "Stop trace in cluster"},
{"traces delete <Name>", "Delete trace in cluster"}
]).
trace_cluster_on(Name, Type, Filter, DurationS0) ->
case erlang:whereis(emqx_trace) of
undefined ->
emqx_ctl:print("[error] Tracer module not started~n"
"Please run `emqx_ctl modules start tracer` "
"or `emqx_ctl modules start emqx_mod_trace` first~n", []);
_ ->
DurationS = list_to_integer(DurationS0),
Now = erlang:system_time(second),
Trace = #{ name => list_to_binary(Name)
, type => atom_to_binary(Type)
, Type => list_to_binary(Filter)
, start_at => list_to_binary(calendar:system_time_to_rfc3339(Now))
, end_at => list_to_binary(calendar:system_time_to_rfc3339(Now + DurationS))
},
case emqx_trace:create(Trace) of
ok ->
emqx_ctl:print("Cluster_trace ~p ~s ~s successfully~n", [Type, Filter, Name]);
{error, Error} ->
emqx_ctl:print("[error] Cluster_trace ~s ~s=~s ~p~n",
[Name, Type, Filter, Error])
end
end.
trace_cluster_del(Name) ->
case emqx_trace:delete(list_to_binary(Name)) of
ok -> emqx_ctl:print("Del cluster_trace ~s successfully~n", [Name]);
{error, Error} -> emqx_ctl:print("[error] Del cluster_trace ~s: ~p~n", [Name, Error])
end.
trace_cluster_off(Name) ->
case emqx_trace:update(list_to_binary(Name), false) of
ok -> emqx_ctl:print("Stop cluster_trace ~s successfully~n", [Name]);
{error, Error} -> emqx_ctl:print("[error] Stop cluster_trace ~s: ~p~n", [Name, Error])
end.
trace_type("client") -> {ok, clientid};
trace_type("topic") -> {ok, topic};
trace_type("ip_address") -> {ok, ip_address};
trace_type(_) -> error.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% @doc Listeners Command %% @doc Listeners Command
@ -473,8 +570,9 @@ listeners([]) ->
end, esockd:listeners()), end, esockd:listeners()),
lists:foreach(fun({Protocol, Opts}) -> lists:foreach(fun({Protocol, Opts}) ->
Port = proplists:get_value(port, Opts), Port = proplists:get_value(port, Opts),
Acceptors = maps:get(num_acceptors, proplists:get_value(transport_options, Opts, #{}), 0),
Info = [{listen_on, {string, emqx_listeners:format_listen_on(Port)}}, Info = [{listen_on, {string, emqx_listeners:format_listen_on(Port)}},
{acceptors, maps:get(num_acceptors, proplists:get_value(transport_options, Opts, #{}), 0)}, {acceptors, Acceptors},
{max_conns, proplists:get_value(max_connections, Opts)}, {max_conns, proplists:get_value(max_connections, Opts)},
{current_conn, proplists:get_value(all_connections, Opts)}, {current_conn, proplists:get_value(all_connections, Opts)},
{shutdown_count, []}], {shutdown_count, []}],
@ -483,7 +581,8 @@ listeners([]) ->
end, ranch:info()); end, ranch:info());
listeners(["stop", Name = "http" ++ _N | _MaybePort]) -> listeners(["stop", Name = "http" ++ _N | _MaybePort]) ->
%% _MaybePort is to be backward compatible, to stop http listener, there is no need for the port number %% _MaybePort is to be backward compatible, to stop http listener,
%% there is no need for the port number
case minirest:stop_http(list_to_atom(Name)) of case minirest:stop_http(list_to_atom(Name)) of
ok -> ok ->
emqx_ctl:print("Stop ~s listener successfully.~n", [Name]); emqx_ctl:print("Stop ~s listener successfully.~n", [Name]);
@ -564,7 +663,8 @@ data(["import", Filename, "--env", Env]) ->
{error, unsupported_version} -> {error, unsupported_version} ->
emqx_ctl:print("The emqx data import failed: Unsupported version.~n"); emqx_ctl:print("The emqx data import failed: Unsupported version.~n");
{error, Reason} -> {error, Reason} ->
emqx_ctl:print("The emqx data import failed: ~0p while reading ~s.~n", [Reason, Filename]) emqx_ctl:print("The emqx data import failed: ~0p while reading ~s.~n",
[Reason, Filename])
end; end;
data(_) -> data(_) ->
@ -657,15 +757,19 @@ print({client, {ClientId, ChanPid}}) ->
maps:with([created_at], Session)]), maps:with([created_at], Session)]),
InfoKeys = [clientid, username, peername, InfoKeys = [clientid, username, peername,
clean_start, keepalive, expiry_interval, clean_start, keepalive, expiry_interval,
subscriptions_cnt, inflight_cnt, awaiting_rel_cnt, send_msg, mqueue_len, mqueue_dropped, subscriptions_cnt, inflight_cnt, awaiting_rel_cnt,
connected, created_at, connected_at] ++ case maps:is_key(disconnected_at, Info) of send_msg, mqueue_len, mqueue_dropped,
connected, created_at, connected_at] ++
case maps:is_key(disconnected_at, Info) of
true -> [disconnected_at]; true -> [disconnected_at];
false -> [] false -> []
end, end,
emqx_ctl:print("Client(~s, username=~s, peername=~s, " emqx_ctl:print("Client(~s, username=~s, peername=~s, "
"clean_start=~s, keepalive=~w, session_expiry_interval=~w, " "clean_start=~s, keepalive=~w, session_expiry_interval=~w, "
"subscriptions=~w, inflight=~w, awaiting_rel=~w, delivered_msgs=~w, enqueued_msgs=~w, dropped_msgs=~w, " "subscriptions=~w, inflight=~w, awaiting_rel=~w, "
"connected=~s, created_at=~w, connected_at=~w" ++ case maps:is_key(disconnected_at, Info) of "delivered_msgs=~w, enqueued_msgs=~w, dropped_msgs=~w, "
"connected=~s, created_at=~w, connected_at=~w" ++
case maps:is_key(disconnected_at, Info) of
true -> ", disconnected_at=~w)~n"; true -> ", disconnected_at=~w)~n";
false -> ")~n" false -> ")~n"
end, end,
@ -721,3 +825,7 @@ restart_http_listener(Scheme, AppName) ->
http_mod_name(emqx_management) -> emqx_mgmt_http; http_mod_name(emqx_management) -> emqx_mgmt_http;
http_mod_name(Name) -> Name. http_mod_name(Name) -> Name.
print_app_info({AppId, AppSecret, Name, Desc, Status, Expired}) ->
emqx_ctl:print("app_id: ~s, secret: ~s, name: ~s, desc: ~s, status: ~s, expired: ~p~n",
[AppId, AppSecret, Name, Desc, Status, Expired]).

View File

@ -237,10 +237,12 @@ import_resource(#{<<"id">> := Id,
config => Config, config => Config,
created_at => NCreatedAt, created_at => NCreatedAt,
description => Desc}). description => Desc}).
import_resources_and_rules(Resources, Rules, FromVersion) import_resources_and_rules(Resources, Rules, FromVersion)
when FromVersion =:= "4.0" orelse when FromVersion =:= "4.0" orelse
FromVersion =:= "4.1" orelse FromVersion =:= "4.1" orelse
FromVersion =:= "4.2" -> FromVersion =:= "4.2" orelse
FromVersion =:= "4.3" ->
Configs = lists:foldl(fun compatible_version/2 , [], Resources), Configs = lists:foldl(fun compatible_version/2 , [], Resources),
lists:foreach(fun(#{<<"actions">> := Actions} = Rule) -> lists:foreach(fun(#{<<"actions">> := Actions} = Rule) ->
NActions = apply_new_config(Actions, Configs), NActions = apply_new_config(Actions, Configs),
@ -305,6 +307,17 @@ compatible_version(#{<<"id">> := ID,
{ok, _Resource} = import_resource(Resource#{<<"config">> := Cfg}), {ok, _Resource} = import_resource(Resource#{<<"config">> := Cfg}),
NHeaders = maps:put(<<"content-type">>, ContentType, covert_empty_headers(Headers)), NHeaders = maps:put(<<"content-type">>, ContentType, covert_empty_headers(Headers)),
[{ID, #{headers => NHeaders, method => Method}} | Acc]; [{ID, #{headers => NHeaders, method => Method}} | Acc];
compatible_version(#{<<"id">> := ID,
<<"type">> := Type,
<<"config">> := Config} = Resource, Acc)
when Type =:= <<"backend_mongo_single">>
orelse Type =:= <<"backend_mongo_sharded">>
orelse Type =:= <<"backend_mongo_rs">> ->
NewConfig = maps:merge(#{<<"srv_record">> => false}, Config),
{ok, _Resource} = import_resource(Resource#{<<"config">> := NewConfig}),
[{ID, NewConfig} | Acc];
% normal version % normal version
compatible_version(Resource, Acc) -> compatible_version(Resource, Acc) ->
{ok, _Resource} = import_resource(Resource), {ok, _Resource} = import_resource(Resource),
@ -511,6 +524,7 @@ import_modules(Modules) ->
undefined -> undefined ->
ok; ok;
_ -> _ ->
NModules = migrate_modules(Modules),
lists:foreach(fun(#{<<"id">> := Id, lists:foreach(fun(#{<<"id">> := Id,
<<"type">> := Type, <<"type">> := Type,
<<"config">> := Config, <<"config">> := Config,
@ -518,9 +532,31 @@ import_modules(Modules) ->
<<"created_at">> := CreatedAt, <<"created_at">> := CreatedAt,
<<"description">> := Description}) -> <<"description">> := Description}) ->
_ = emqx_modules:import_module({Id, any_to_atom(Type), Config, Enabled, CreatedAt, Description}) _ = emqx_modules:import_module({Id, any_to_atom(Type), Config, Enabled, CreatedAt, Description})
end, Modules) end, NModules)
end. end.
migrate_modules(Modules) ->
migrate_modules(Modules, []).
migrate_modules([], Acc) ->
lists:reverse(Acc);
migrate_modules([#{<<"type">> := <<"mongo_authentication">>,
<<"config">> := Config} = Module | More], Acc) ->
WMode = case maps:get(<<"w_mode">>, Config, <<"unsafe">>) of
<<"undef">> -> <<"unsafe">>;
Other -> Other
end,
RMode = case maps:get(<<"r_mode">>, Config, <<"master">>) of
<<"undef">> -> <<"master">>;
<<"slave-ok">> -> <<"slave_ok">>;
Other0 -> Other0
end,
NConfig = Config#{<<"srv_record">> => false,
<<"w_mode">> => WMode,
<<"r_mode">> => RMode},
migrate_modules(More, [Module#{<<"config">> => NConfig} | Acc]);
migrate_modules([Module | More], Acc) ->
migrate_modules(More, [Module | Acc]).
import_schemas(Schemas) -> import_schemas(Schemas) ->
case ets:info(emqx_schema) of case ets:info(emqx_schema) of

View File

@ -45,6 +45,7 @@ groups() ->
t_vm_cmd, t_vm_cmd,
t_plugins_cmd, t_plugins_cmd,
t_trace_cmd, t_trace_cmd,
t_traces_cmd,
t_broker_cmd, t_broker_cmd,
t_router_cmd, t_router_cmd,
t_subscriptions_cmd, t_subscriptions_cmd,
@ -64,6 +65,23 @@ init_per_suite(Config) ->
end_per_suite(_Config) -> end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps(apps()). emqx_ct_helpers:stop_apps(apps()).
init_per_testcase(t_plugins_cmd, Config) ->
meck:new(emqx_plugins, [non_strict, passthrough]),
meck:expect(emqx_plugins, load, fun(_) -> ok end),
meck:expect(emqx_plugins, unload, fun(_) -> ok end),
meck:expect(emqx_plugins, reload, fun(_) -> ok end),
mock_print(),
Config;
init_per_testcase(_Case, Config) ->
mock_print(),
Config.
end_per_testcase(t_plugins_cmd, _Config) ->
meck:unload(emqx_plugins),
unmock_print();
end_per_testcase(_Case, _Config) ->
unmock_print().
t_app(_Config) -> t_app(_Config) ->
{ok, AppSecret} = emqx_mgmt_auth:add_app(<<"app_id">>, <<"app_name">>), {ok, AppSecret} = emqx_mgmt_auth:add_app(<<"app_id">>, <<"app_name">>),
?assert(emqx_mgmt_auth:is_authorized(<<"app_id">>, AppSecret)), ?assert(emqx_mgmt_auth:is_authorized(<<"app_id">>, AppSecret)),
@ -96,7 +114,6 @@ t_app(_Config) ->
ok. ok.
t_log_cmd(_) -> t_log_cmd(_) ->
mock_print(),
lists:foreach(fun(Level) -> lists:foreach(fun(Level) ->
emqx_mgmt_cli:log(["primary-level", Level]), emqx_mgmt_cli:log(["primary-level", Level]),
?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["primary-level"])) ?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["primary-level"]))
@ -109,12 +126,9 @@ t_log_cmd(_) ->
?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["handlers", "set-level", ?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["handlers", "set-level",
atom_to_list(Id), Level])) atom_to_list(Id), Level]))
end, ?LOG_LEVELS) end, ?LOG_LEVELS)
|| #{id := Id} <- emqx_logger:get_log_handlers()], || #{id := Id} <- emqx_logger:get_log_handlers()].
meck:unload().
t_mgmt_cmd(_) -> t_mgmt_cmd(_) ->
% ct:pal("start testing the mgmt command"),
mock_print(),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt( ?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt(
["lookup", "emqx_appid"]), "Not Found.")), ["lookup", "emqx_appid"]), "Not Found.")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt( ?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt(
@ -127,28 +141,19 @@ t_mgmt_cmd(_) ->
["update", "emqx_appid", "ts"]), "update successfully")), ["update", "emqx_appid", "ts"]), "update successfully")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt( ?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt(
["delete", "emqx_appid"]), "ok")), ["delete", "emqx_appid"]), "ok")),
ok = emqx_mgmt_cli:mgmt(["list"]), ok = emqx_mgmt_cli:mgmt(["list"]).
meck:unload().
t_status_cmd(_) -> t_status_cmd(_) ->
% ct:pal("start testing status command"),
mock_print(),
%% init internal status seem to be always 'starting' when running ct tests %% init internal status seem to be always 'starting' when running ct tests
?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([]), "Node\s.*@.*\sis\sstart(ed|ing)")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([]), "Node\s.*@.*\sis\sstart(ed|ing)")).
meck:unload().
t_broker_cmd(_) -> t_broker_cmd(_) ->
% ct:pal("start testing the broker command"),
mock_print(),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([]), "sysdescr")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([]), "sysdescr")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["stats"]), "subscriptions.shared")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["stats"]), "subscriptions.shared")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["metrics"]), "bytes.sent")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["metrics"]), "bytes.sent")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([undefined]), "broker")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([undefined]), "broker")).
meck:unload().
t_clients_cmd(_) -> t_clients_cmd(_) ->
% ct:pal("start testing the client command"),
mock_print(),
process_flag(trap_exit, true), process_flag(trap_exit, true),
{ok, T} = emqtt:start_link([{clientid, <<"client12">>}, {ok, T} = emqtt:start_link([{clientid, <<"client12">>},
{username, <<"testuser1">>}, {username, <<"testuser1">>},
@ -164,7 +169,6 @@ t_clients_cmd(_) ->
receive receive
{'EXIT', T, _} -> {'EXIT', T, _} ->
ok ok
% ct:pal("Connection closed: ~p~n", [Reason])
after after
500 -> 500 ->
erlang:error("Client is not kick") erlang:error("Client is not kick")
@ -179,10 +183,11 @@ t_clients_cmd(_) ->
{ok, Connack, <<>>, _} = raw_recv_pase(Bin), {ok, Connack, <<>>, _} = raw_recv_pase(Bin),
timer:sleep(300), timer:sleep(300),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "client13")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "client13")),
meck:unload().
% emqx_mgmt_cli:clients(["kick", "client13"]), % emqx_mgmt_cli:clients(["kick", "client13"]),
% timer:sleep(500), % timer:sleep(500),
% ?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "Not Found")). % ?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "Not Found")).
ok.
raw_recv_pase(Packet) -> raw_recv_pase(Packet) ->
emqx_frame:parse(Packet). emqx_frame:parse(Packet).
@ -191,8 +196,6 @@ raw_send_serialize(Packet) ->
emqx_frame:serialize(Packet). emqx_frame:serialize(Packet).
t_vm_cmd(_) -> t_vm_cmd(_) ->
% ct:pal("start testing the vm command"),
mock_print(),
[[?assertMatch({match, _}, re:run(Result, Name)) [[?assertMatch({match, _}, re:run(Result, Name))
|| Result <- emqx_mgmt_cli:vm([Name])] || Result <- emqx_mgmt_cli:vm([Name])]
|| Name <- ["load", "memory", "process", "io", "ports"]], || Name <- ["load", "memory", "process", "io", "ports"]],
@ -205,12 +208,9 @@ t_vm_cmd(_) ->
[?assertMatch({match, _}, re:run(Result, "io")) [?assertMatch({match, _}, re:run(Result, "io"))
|| Result <- emqx_mgmt_cli:vm(["io"])], || Result <- emqx_mgmt_cli:vm(["io"])],
[?assertMatch({match, _}, re:run(Result, "ports")) [?assertMatch({match, _}, re:run(Result, "ports"))
|| Result <- emqx_mgmt_cli:vm(["ports"])], || Result <- emqx_mgmt_cli:vm(["ports"])].
unmock_print().
t_trace_cmd(_) -> t_trace_cmd(_) ->
% ct:pal("start testing the trace command"),
mock_print(),
logger:set_primary_config(level, debug), logger:set_primary_config(level, debug),
{ok, T} = emqtt:start_link([{clientid, <<"client">>}, {ok, T} = emqtt:start_link([{clientid, <<"client">>},
{username, <<"testuser">>}, {username, <<"testuser">>},
@ -237,12 +237,34 @@ t_trace_cmd(_) ->
Trace7 = emqx_mgmt_cli:trace(["start", "topic", "a/b/c", Trace7 = emqx_mgmt_cli:trace(["start", "topic", "a/b/c",
"log/clientid_trace.log", "error"]), "log/clientid_trace.log", "error"]),
?assertMatch({match, _}, re:run(Trace7, "successfully")), ?assertMatch({match, _}, re:run(Trace7, "successfully")),
logger:set_primary_config(level, error), logger:set_primary_config(level, error).
unmock_print().
t_traces_cmd(_) ->
emqx_trace:create_table(),
Count1 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(0, Count1),
Error1 = emqx_mgmt_cli:traces(["start", "test-name", "client", "clientid-dev"]),
?assertMatch({match, _}, re:run(Error1, "Tracer module not started")),
emqx_trace:start_link(),
Trace1 = emqx_mgmt_cli:traces(["start", "test-name", "client", "clientid-dev"]),
?assertMatch({match, _}, re:run(Trace1, "successfully")),
Count2 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(1, Count2),
Error2 = emqx_mgmt_cli:traces(["start", "test-name", "client", "clientid-dev"]),
?assertMatch({match, _}, re:run(Error2, "already_existed")),
Trace2 = emqx_mgmt_cli:traces(["stop", "test-name"]),
?assertMatch({match, _}, re:run(Trace2, "successfully")),
Count3 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(1, Count3),
Trace3 = emqx_mgmt_cli:traces(["delete", "test-name"]),
?assertMatch({match, _}, re:run(Trace3, "successfully")),
Count4 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(0, Count4),
Error3 = emqx_mgmt_cli:traces(["delete", "test-name"]),
?assertMatch({match, _}, re:run(Error3, "not_found")),
ok.
t_router_cmd(_) -> t_router_cmd(_) ->
% ct:pal("start testing the router command"),
mock_print(),
{ok, T} = emqtt:start_link([{clientid, <<"client1">>}, {ok, T} = emqtt:start_link([{clientid, <<"client1">>},
{username, <<"testuser1">>}, {username, <<"testuser1">>},
{password, <<"pass1">>} {password, <<"pass1">>}
@ -257,12 +279,9 @@ t_router_cmd(_) ->
emqtt:connect(T1), emqtt:connect(T1),
emqtt:subscribe(T1, <<"a/b/c/d">>), emqtt:subscribe(T1, <<"a/b/c/d">>),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["list"]), "a/b/c | a/b/c")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["list"]), "a/b/c | a/b/c")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["show", "a/b/c"]), "a/b/c")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["show", "a/b/c"]), "a/b/c")).
unmock_print().
t_subscriptions_cmd(_) -> t_subscriptions_cmd(_) ->
% ct:pal("Start testing the subscriptions command"),
mock_print(),
{ok, T3} = emqtt:start_link([{clientid, <<"client">>}, {ok, T3} = emqtt:start_link([{clientid, <<"client">>},
{username, <<"testuser">>}, {username, <<"testuser">>},
{password, <<"pass">>} {password, <<"pass">>}
@ -273,22 +292,18 @@ t_subscriptions_cmd(_) ->
[?assertMatch({match, _} , re:run(Result, "b/b/c")) [?assertMatch({match, _} , re:run(Result, "b/b/c"))
|| Result <- emqx_mgmt_cli:subscriptions(["show", <<"client">>])], || Result <- emqx_mgmt_cli:subscriptions(["show", <<"client">>])],
?assertEqual(emqx_mgmt_cli:subscriptions(["add", "client", "b/b/c", "0"]), "ok~n"), ?assertEqual(emqx_mgmt_cli:subscriptions(["add", "client", "b/b/c", "0"]), "ok~n"),
?assertEqual(emqx_mgmt_cli:subscriptions(["del", "client", "b/b/c"]), "ok~n"), ?assertEqual(emqx_mgmt_cli:subscriptions(["del", "client", "b/b/c"]), "ok~n").
unmock_print().
t_listeners_cmd_old(_) -> t_listeners_cmd_old(_) ->
ok = emqx_listeners:ensure_all_started(), ok = emqx_listeners:ensure_all_started(),
mock_print(),
?assertEqual(emqx_mgmt_cli:listeners([]), ok), ?assertEqual(emqx_mgmt_cli:listeners([]), ok),
?assertEqual( ?assertEqual(
"Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n", "Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n",
emqx_mgmt_cli:listeners(["stop", "wss", "8084"]) emqx_mgmt_cli:listeners(["stop", "wss", "8084"])
), ).
unmock_print().
t_listeners_cmd_new(_) -> t_listeners_cmd_new(_) ->
ok = emqx_listeners:ensure_all_started(), ok = emqx_listeners:ensure_all_started(),
mock_print(),
?assertEqual(emqx_mgmt_cli:listeners([]), ok), ?assertEqual(emqx_mgmt_cli:listeners([]), ok),
?assertEqual( ?assertEqual(
"Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n", "Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n",
@ -304,16 +319,11 @@ t_listeners_cmd_new(_) ->
), ),
?assertEqual( ?assertEqual(
emqx_mgmt_cli:listeners(["restart", "bad:listener:identifier"]), emqx_mgmt_cli:listeners(["restart", "bad:listener:identifier"]),
"Failed to restart bad:listener:identifier listener: {no_such_listener,\"bad:listener:identifier\"}\n" "Failed to restart bad:listener:identifier listener: "
), "{no_such_listener,\"bad:listener:identifier\"}\n"
unmock_print(). ).
t_plugins_cmd(_) -> t_plugins_cmd(_) ->
mock_print(),
meck:new(emqx_plugins, [non_strict, passthrough]),
meck:expect(emqx_plugins, load, fun(_) -> ok end),
meck:expect(emqx_plugins, unload, fun(_) -> ok end),
meck:expect(emqx_plugins, reload, fun(_) -> ok end),
?assertEqual(emqx_mgmt_cli:plugins(["list"]), ok), ?assertEqual(emqx_mgmt_cli:plugins(["list"]), ok),
?assertEqual( ?assertEqual(
emqx_mgmt_cli:plugins(["unload", "emqx_auth_mnesia"]), emqx_mgmt_cli:plugins(["unload", "emqx_auth_mnesia"]),
@ -326,11 +336,9 @@ t_plugins_cmd(_) ->
?assertEqual( ?assertEqual(
emqx_mgmt_cli:plugins(["unload", "emqx_management"]), emqx_mgmt_cli:plugins(["unload", "emqx_management"]),
"Plugin emqx_management can not be unloaded.~n" "Plugin emqx_management can not be unloaded.~n"
), ).
unmock_print().
t_cli(_) -> t_cli(_) ->
mock_print(),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([""]), "status")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([""]), "status")),
[?assertMatch({match, _}, re:run(Value, "broker")) [?assertMatch({match, _}, re:run(Value, "broker"))
|| Value <- emqx_mgmt_cli:broker([""])], || Value <- emqx_mgmt_cli:broker([""])],
@ -352,9 +360,10 @@ t_cli(_) ->
|| Value <- emqx_mgmt_cli:mnesia([""])], || Value <- emqx_mgmt_cli:mnesia([""])],
[?assertMatch({match, _}, re:run(Value, "trace")) [?assertMatch({match, _}, re:run(Value, "trace"))
|| Value <- emqx_mgmt_cli:trace([""])], || Value <- emqx_mgmt_cli:trace([""])],
[?assertMatch({match, _}, re:run(Value, "traces"))
|| Value <- emqx_mgmt_cli:traces([""])],
[?assertMatch({match, _}, re:run(Value, "mgmt")) [?assertMatch({match, _}, re:run(Value, "mgmt"))
|| Value <- emqx_mgmt_cli:mgmt([""])], || Value <- emqx_mgmt_cli:mgmt([""])].
unmock_print().
mock_print() -> mock_print() ->
catch meck:unload(emqx_ctl), catch meck:unload(emqx_ctl),

View File

@ -29,6 +29,8 @@
-define(HOST, "http://127.0.0.1:8081/"). -define(HOST, "http://127.0.0.1:8081/").
-elvis([{elvis_style, line_length, disable}]).
-define(API_VERSION, "v4"). -define(API_VERSION, "v4").
-define(BASE_PATH, "api"). -define(BASE_PATH, "api").
@ -76,30 +78,40 @@ t_alarms(_) ->
?assert(is_existing(alarm2, emqx_alarm:get_alarms(activated))), ?assert(is_existing(alarm2, emqx_alarm:get_alarms(activated))),
{ok, Return1} = request_api(get, api_path(["alarms/activated"]), auth_header_()), {ok, Return1} = request_api(get, api_path(["alarms/activated"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return1))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return1))))), lists:nth(1, get(<<"data">>, Return1))))),
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return1))))),
emqx_alarm:deactivate(alarm1), emqx_alarm:deactivate(alarm1),
{ok, Return2} = request_api(get, api_path(["alarms"]), auth_header_()), {ok, Return2} = request_api(get, api_path(["alarms"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return2))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return2))))), lists:nth(1, get(<<"data">>, Return2))))),
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return2))))),
{ok, Return3} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()), {ok, Return3} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return3))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return3))))), lists:nth(1, get(<<"data">>, Return3))))),
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return3))))),
emqx_alarm:deactivate(alarm2), emqx_alarm:deactivate(alarm2),
{ok, Return4} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()), {ok, Return4} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return4))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return4))))), lists:nth(1, get(<<"data">>, Return4))))),
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return4))))),
{ok, _} = request_api(delete, api_path(["alarms/deactivated"]), auth_header_()), {ok, _} = request_api(delete, api_path(["alarms/deactivated"]), auth_header_()),
{ok, Return5} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()), {ok, Return5} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()),
?assertNot(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return5))))), ?assertNot(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return5))))). lists:nth(1, get(<<"data">>, Return5))))),
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return5))))).
t_apps(_) -> t_apps(_) ->
AppId = <<"123456">>, AppId = <<"123456">>,
@ -153,7 +165,8 @@ t_banned(_) ->
[Banned] = get(<<"data">>, Result), [Banned] = get(<<"data">>, Result),
?assertEqual(Who, maps:get(<<"who">>, Banned)), ?assertEqual(Who, maps:get(<<"who">>, Banned)),
{ok, _} = request_api(delete, api_path(["banned", "clientid", binary_to_list(Who)]), auth_header_()), {ok, _} = request_api(delete, api_path(["banned", "clientid", binary_to_list(Who)]),
auth_header_()),
{ok, Result2} = request_api(get, api_path(["banned"]), auth_header_()), {ok, Result2} = request_api(get, api_path(["banned"]), auth_header_()),
?assertEqual([], get(<<"data">>, Result2)). ?assertEqual([], get(<<"data">>, Result2)).
@ -205,40 +218,50 @@ t_clients(_) ->
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, kickout_client, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, kickout_client, 1, fun(_) -> {error, undefined} end),
{ok, MeckRet1} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), {ok, MeckRet1} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]),
auth_header_()),
?assertEqual(?ERROR1, get(<<"code">>, MeckRet1)), ?assertEqual(?ERROR1, get(<<"code">>, MeckRet1)),
meck:expect(emqx_mgmt, clean_acl_cache, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, clean_acl_cache, 1, fun(_) -> {error, undefined} end),
{ok, MeckRet2} = request_api(delete, api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()), {ok, MeckRet2} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR1, get(<<"code">>, MeckRet2)), ?assertEqual(?ERROR1, get(<<"code">>, MeckRet2)),
meck:expect(emqx_mgmt, list_acl_cache, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, list_acl_cache, 1, fun(_) -> {error, undefined} end),
{ok, MeckRet3} = request_api(get, api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()), {ok, MeckRet3} = request_api(get,
api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR1, get(<<"code">>, MeckRet3)), ?assertEqual(?ERROR1, get(<<"code">>, MeckRet3)),
meck:unload(emqx_mgmt), meck:unload(emqx_mgmt),
{ok, Ok} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), {ok, Ok} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1)]), auth_header_()),
?assertEqual(?SUCCESS, get(<<"code">>, Ok)), ?assertEqual(?SUCCESS, get(<<"code">>, Ok)),
timer:sleep(300), timer:sleep(300),
{ok, Ok1} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), {ok, Ok1} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1)]), auth_header_()),
?assertEqual(?SUCCESS, get(<<"code">>, Ok1)), ?assertEqual(?SUCCESS, get(<<"code">>, Ok1)),
{ok, Clients6} = request_api(get, api_path(["clients"]), "_limit=100&_page=1", auth_header_()), {ok, Clients6} = request_api(get,
api_path(["clients"]), "_limit=100&_page=1", auth_header_()),
?assertEqual(1, maps:get(<<"count">>, get(<<"meta">>, Clients6))), ?assertEqual(1, maps:get(<<"count">>, get(<<"meta">>, Clients6))),
{ok, NotFound1} = request_api(get, api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()), {ok, NotFound1} = request_api(get,
api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR12, get(<<"code">>, NotFound1)), ?assertEqual(?ERROR12, get(<<"code">>, NotFound1)),
{ok, NotFound2} = request_api(delete, api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()), {ok, NotFound2} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR12, get(<<"code">>, NotFound2)), ?assertEqual(?ERROR12, get(<<"code">>, NotFound2)),
{ok, EmptyAclCache} = request_api(get, api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()), {ok, EmptyAclCache} = request_api(get,
api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()),
?assertEqual(0, length(get(<<"data">>, EmptyAclCache))), ?assertEqual(0, length(get(<<"data">>, EmptyAclCache))),
{ok, Ok1} = request_api(delete, api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()), {ok, Ok1} = request_api(delete,
api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()),
?assertEqual(?SUCCESS, get(<<"code">>, Ok1)). ?assertEqual(?SUCCESS, get(<<"code">>, Ok1)).
receive_exit(0) -> receive_exit(0) ->
@ -257,7 +280,8 @@ receive_exit(Count) ->
t_listeners(_) -> t_listeners(_) ->
{ok, _} = request_api(get, api_path(["listeners"]), auth_header_()), {ok, _} = request_api(get, api_path(["listeners"]), auth_header_()),
{ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "listeners"]), auth_header_()), {ok, _} = request_api(get,
api_path(["nodes", atom_to_list(node()), "listeners"]), auth_header_()),
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, list_listeners, 0, fun() -> [{node(), {error, undefined}}] end), meck:expect(emqx_mgmt, list_listeners, 0, fun() -> [{node(), {error, undefined}}] end),
{ok, Return} = request_api(get, api_path(["listeners"]), auth_header_()), {ok, Return} = request_api(get, api_path(["listeners"]), auth_header_()),
@ -268,10 +292,12 @@ t_listeners(_) ->
t_metrics(_) -> t_metrics(_) ->
{ok, _} = request_api(get, api_path(["metrics"]), auth_header_()), {ok, _} = request_api(get, api_path(["metrics"]), auth_header_()),
{ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()), {ok, _} = request_api(get,
api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()),
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, get_metrics, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, get_metrics, 1, fun(_) -> {error, undefined} end),
{ok, "{\"message\":\"undefined\"}"} = request_api(get, api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()), {ok, "{\"message\":\"undefined\"}"} =
request_api(get, api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()),
meck:unload(emqx_mgmt). meck:unload(emqx_mgmt).
t_nodes(_) -> t_nodes(_) ->
@ -348,7 +374,8 @@ t_acl_cache(_) ->
{ok, _} = emqtt:connect(C1), {ok, _} = emqtt:connect(C1),
{ok, _, _} = emqtt:subscribe(C1, Topic, 2), {ok, _, _} = emqtt:subscribe(C1, Topic, 2),
%% get acl cache, should not be empty %% get acl cache, should not be empty
{ok, Result} = request_api(get, api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()), {ok, Result} = request_api(get,
api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()),
#{<<"code">> := 0, <<"data">> := Caches} = jiffy:decode(list_to_binary(Result), [return_maps]), #{<<"code">> := 0, <<"data">> := Caches} = jiffy:decode(list_to_binary(Result), [return_maps]),
?assert(length(Caches) > 0), ?assert(length(Caches) > 0),
?assertMatch(#{<<"access">> := <<"subscribe">>, ?assertMatch(#{<<"access">> := <<"subscribe">>,
@ -356,11 +383,14 @@ t_acl_cache(_) ->
<<"result">> := <<"allow">>, <<"result">> := <<"allow">>,
<<"updated_time">> := _}, hd(Caches)), <<"updated_time">> := _}, hd(Caches)),
%% clear acl cache %% clear acl cache
{ok, Result2} = request_api(delete, api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()), {ok, Result2} = request_api(delete,
api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()),
?assertMatch(#{<<"code">> := 0}, jiffy:decode(list_to_binary(Result2), [return_maps])), ?assertMatch(#{<<"code">> := 0}, jiffy:decode(list_to_binary(Result2), [return_maps])),
%% get acl cache again, after the acl cache is cleared %% get acl cache again, after the acl cache is cleared
{ok, Result3} = request_api(get, api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()), {ok, Result3} = request_api(get,
#{<<"code">> := 0, <<"data">> := Caches3} = jiffy:decode(list_to_binary(Result3), [return_maps]), api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()),
#{<<"code">> := 0, <<"data">> := Caches3}
= jiffy:decode(list_to_binary(Result3), [return_maps]),
?assertEqual(0, length(Caches3)), ?assertEqual(0, length(Caches3)),
ok = emqtt:disconnect(C1). ok = emqtt:disconnect(C1).
@ -371,7 +401,7 @@ t_pubsub(_) ->
ClientId = <<"client1">>, ClientId = <<"client1">>,
Options = #{clientid => ClientId, Options = #{clientid => ClientId,
proto_ver => 5}, proto_ver => v5},
Topic = <<"mytopic">>, Topic = <<"mytopic">>,
{ok, C1} = emqtt:start_link(Options), {ok, C1} = emqtt:start_link(Options),
{ok, _} = emqtt:connect(C1), {ok, _} = emqtt:connect(C1),
@ -482,12 +512,15 @@ t_pubsub(_) ->
Topic_list = [<<"mytopic1">>, <<"mytopic2">>], Topic_list = [<<"mytopic1">>, <<"mytopic2">>],
[ {ok, _, [2]} = emqtt:subscribe(C1, Topics, 2) || Topics <- Topic_list], [ {ok, _, [2]} = emqtt:subscribe(C1, Topics, 2) || Topics <- Topic_list],
Body1 = [ #{<<"clientid">> => ClientId, <<"topic">> => Topics, <<"qos">> => 2} || Topics <- Topic_list], Body1 = [ #{<<"clientid">> => ClientId,
<<"topic">> => Topics, <<"qos">> => 2} || Topics <- Topic_list],
{ok, Data1} = request_api(post, api_path(["mqtt/subscribe_batch"]), [], auth_header_(), Body1), {ok, Data1} = request_api(post, api_path(["mqtt/subscribe_batch"]), [], auth_header_(), Body1),
loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data1), [return_maps]))), loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data1), [return_maps]))),
%% tests publish_batch %% tests publish_batch
Body2 = [ #{<<"clientid">> => ClientId, <<"topic">> => Topics, <<"qos">> => 2, <<"retain">> => <<"false">>, <<"payload">> => #{body => "hello world"}} || Topics <- Topic_list ], Body2 = [ #{<<"clientid">> => ClientId, <<"topic">> => Topics, <<"qos">> => 2,
<<"retain">> => <<"false">>, <<"payload">> => #{body => "hello world"}}
|| Topics <- Topic_list ],
{ok, Data2} = request_api(post, api_path(["mqtt/publish_batch"]), [], auth_header_(), Body2), {ok, Data2} = request_api(post, api_path(["mqtt/publish_batch"]), [], auth_header_(), Body2),
loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data2), [return_maps]))), loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data2), [return_maps]))),
[ ?assert(receive [ ?assert(receive
@ -499,14 +532,33 @@ t_pubsub(_) ->
%% tests unsubscribe_batch %% tests unsubscribe_batch
Body3 = [#{<<"clientid">> => ClientId, <<"topic">> => Topics} || Topics <- Topic_list], Body3 = [#{<<"clientid">> => ClientId, <<"topic">> => Topics} || Topics <- Topic_list],
{ok, Data3} = request_api(post, api_path(["mqtt/unsubscribe_batch"]), [], auth_header_(), Body3), {ok, Data3} = request_api(post,
api_path(["mqtt/unsubscribe_batch"]), [], auth_header_(), Body3),
loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data3), [return_maps]))), loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data3), [return_maps]))),
{ok, _, [1]} = emqtt:subscribe(C1, <<"mytopic">>, qos1),
timer:sleep(50),
%% user properties
{ok, Code} = request_api(post, api_path(["mqtt/publish"]), [], auth_header_(),
#{<<"clientid">> => ClientId,
<<"topic">> => <<"mytopic">>,
<<"qos">> => 1,
<<"payload">> => <<"hello world">>,
<<"user_properties">> => #{<<"porp_1">> => <<"porp_1">>}}),
?assert(receive
{publish, #{payload := <<"hello world">>,
properties := #{'User-Property' := [{<<"porp_1">>,<<"porp_1">>}]}}} ->
true
after 100 ->
false
end),
ok = emqtt:disconnect(C1), ok = emqtt:disconnect(C1),
?assertEqual(3, emqx_metrics:val('messages.qos1.received') - Qos1Received), ?assertEqual(4, emqx_metrics:val('messages.qos1.received') - Qos1Received),
?assertEqual(2, emqx_metrics:val('messages.qos2.received') - Qos2Received), ?assertEqual(2, emqx_metrics:val('messages.qos2.received') - Qos2Received),
?assertEqual(5, emqx_metrics:val('messages.received') - Received). ?assertEqual(6, emqx_metrics:val('messages.received') - Received).
loop([]) -> []; loop([]) -> [];
@ -523,7 +575,8 @@ t_routes_and_subscriptions(_) ->
?assertEqual([], get(<<"data">>, NonRoute)), ?assertEqual([], get(<<"data">>, NonRoute)),
{ok, NonSubscription} = request_api(get, api_path(["subscriptions"]), auth_header_()), {ok, NonSubscription} = request_api(get, api_path(["subscriptions"]), auth_header_()),
?assertEqual([], get(<<"data">>, NonSubscription)), ?assertEqual([], get(<<"data">>, NonSubscription)),
{ok, NonSubscription1} = request_api(get, api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()), {ok, NonSubscription1} = request_api(get,
api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()),
?assertEqual([], get(<<"data">>, NonSubscription1)), ?assertEqual([], get(<<"data">>, NonSubscription1)),
{ok, NonSubscription2} = request_api(get, {ok, NonSubscription2} = request_api(get,
api_path(["subscriptions", binary_to_list(ClientId)]), api_path(["subscriptions", binary_to_list(ClientId)]),
@ -552,11 +605,14 @@ t_routes_and_subscriptions(_) ->
?assertEqual(Topic, maps:get(<<"topic">>, Subscription)), ?assertEqual(Topic, maps:get(<<"topic">>, Subscription)),
?assertEqual(ClientId, maps:get(<<"clientid">>, Subscription)), ?assertEqual(ClientId, maps:get(<<"clientid">>, Subscription)),
{ok, Result3} = request_api(get, api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()), {ok, Result3} = request_api(get,
api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()),
{ok, Result4} = request_api(get, api_path(["subscriptions", binary_to_list(ClientId)]), auth_header_()), {ok, Result4} = request_api(get,
api_path(["subscriptions", binary_to_list(ClientId)]), auth_header_()),
[Subscription] = get(<<"data">>, Result4), [Subscription] = get(<<"data">>, Result4),
{ok, Result4} = request_api(get, api_path(["nodes", atom_to_list(node()), "subscriptions", binary_to_list(ClientId)]) {ok, Result4} = request_api(get,
api_path(["nodes", atom_to_list(node()), "subscriptions", binary_to_list(ClientId)])
, auth_header_()), , auth_header_()),
ok = emqtt:disconnect(C1). ok = emqtt:disconnect(C1).
@ -566,7 +622,8 @@ t_stats(_) ->
{ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()), {ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()),
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, get_stats, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, get_stats, 1, fun(_) -> {error, undefined} end),
{ok, Return} = request_api(get, api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()), {ok, Return} = request_api(get,
api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()),
?assertEqual(<<"undefined">>, get(<<"message">>, Return)), ?assertEqual(<<"undefined">>, get(<<"message">>, Return)),
meck:unload(emqx_mgmt). meck:unload(emqx_mgmt).
@ -578,10 +635,15 @@ t_data(_) ->
{ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]), {ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]),
#{<<"filename">> := Filename, <<"node">> := Node} = emqx_ct_http:get_http_data(Data), #{<<"filename">> := Filename, <<"node">> := Node} = emqx_ct_http:get_http_data(Data),
{ok, DataList} = request_api(get, api_path(["data","export"]), auth_header_()), {ok, DataList} = request_api(get, api_path(["data","export"]), auth_header_()),
?assertEqual(true, lists:member(emqx_ct_http:get_http_data(Data), emqx_ct_http:get_http_data(DataList))), ?assertEqual(true,
lists:member(emqx_ct_http:get_http_data(Data), emqx_ct_http:get_http_data(DataList))),
?assertMatch({ok, _}, request_api(post, api_path(["data","import"]), [], auth_header_(), #{<<"filename">> => Filename, <<"node">> => Node})), ?assertMatch({ok, _}, request_api(post,
?assertMatch({ok, _}, request_api(post, api_path(["data","import"]), [], auth_header_(), #{<<"filename">> => Filename})), api_path(["data","import"]), [], auth_header_(),
#{<<"filename">> => Filename, <<"node">> => Node})),
?assertMatch({ok, _},
request_api(post, api_path(["data","import"]), [], auth_header_(),
#{<<"filename">> => Filename})),
application:stop(emqx_rule_engine), application:stop(emqx_rule_engine),
application:stop(emqx_dahboard), application:stop(emqx_dahboard),
ok. ok.
@ -596,9 +658,35 @@ t_data_import_content(_) ->
Dir = emqx:get_env(data_dir), Dir = emqx:get_env(data_dir),
{ok, Bin} = file:read_file(filename:join(Dir, Filename)), {ok, Bin} = file:read_file(filename:join(Dir, Filename)),
Content = emqx_json:decode(Bin), Content = emqx_json:decode(Bin),
?assertMatch({ok, "{\"code\":0}"}, request_api(post, api_path(["data","import"]), [], auth_header_(), Content)), ?assertMatch({ok, "{\"code\":0}"},
request_api(post, api_path(["data","import"]), [], auth_header_(), Content)),
application:stop(emqx_rule_engine), application:stop(emqx_rule_engine),
application:stop(emqx_dahboard). application:stop(emqx_dashboard).
t_keepalive(_Config) ->
application:ensure_all_started(emqx_dashboard),
Username = "user_keepalive",
ClientId = "client_keepalive",
AuthHeader = auth_header_(),
Path = api_path(["clients", ClientId, "keepalive"]),
{ok, NotFound} = request_api(put, Path, "interval=5", AuthHeader, [#{}]),
?assertEqual("{\"message\":\"not_found\",\"code\":112}", NotFound),
{ok, C1} = emqtt:start_link(#{username => Username, clientid => ClientId}),
{ok, _} = emqtt:connect(C1),
{ok, Ok} = request_api(put, Path, "interval=5", AuthHeader, [#{}]),
?assertEqual("{\"code\":0}", Ok),
[Pid] = emqx_cm:lookup_channels(list_to_binary(ClientId)),
#{conninfo := #{keepalive := Keepalive}} = emqx_connection:info(Pid),
?assertEqual(5, Keepalive),
{ok, Error1} = request_api(put, Path, "interval=-1", AuthHeader, [#{}]),
{ok, Error2} = request_api(put, Path, "interval=65536", AuthHeader, [#{}]),
ErrMsg = #{<<"code">> => 102,
<<"message">> => <<"mqtt3.1.1 specification: keepalive must between 0~65535">>},
?assertEqual(ErrMsg, jiffy:decode(Error1, [return_maps])),
?assertEqual(Error1, Error2),
emqtt:disconnect(C1),
application:stop(emqx_dashboard),
ok.
request_api(Method, Url, Auth) -> request_api(Method, Url, Auth) ->
request_api(Method, Url, [], Auth, []). request_api(Method, Url, [], Auth, []).

View File

@ -0,0 +1,68 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mongo_auth_module_migration_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-ifdef(EMQX_ENTERPRISE).
-include_lib("emqx_modules/include/emqx_modules.hrl").
-endif.
all() ->
emqx_ct:all(?MODULE).
-ifdef(EMQX_ENTERPRISE).
init_per_suite(Config) ->
application:load(emqx_modules_spec),
emqx_ct_helpers:start_apps([emqx_management, emqx_modules]),
Config.
end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([emqx_modules, emqx_management]),
application:unload(emqx_modules_spec),
ok.
t_import_4_2(Config) ->
?assertMatch(ok, import("e4.2.8.json", Config)),
timer:sleep(100),
MongoAuthNModule = emqx_modules_registry:find_module_by_type(mongo_authentication),
?assertNotEqual(not_found, MongoAuthNModule),
?assertMatch(#module{config = #{<<"srv_record">> := _}}, MongoAuthNModule),
delete_modules().
t_import_4_3(Config) ->
?assertMatch(ok, import("e4.3.5.json", Config)),
timer:sleep(100),
MongoAuthNModule = emqx_modules_registry:find_module_by_type(mongo_authentication),
?assertNotEqual(not_found, MongoAuthNModule),
?assertMatch(#module{config = #{<<"srv_record">> := _}}, MongoAuthNModule),
delete_modules().
import(File, Config) ->
Filename = filename:join(proplists:get_value(data_dir, Config), File),
emqx_mgmt_data_backup:import(Filename, "{}").
delete_modules() ->
[emqx_modules_registry:remove_module(Mod) || Mod <- emqx_modules_registry:get_modules()].
-endif.

View File

@ -0,0 +1 @@
{"version":"4.2","date":"2021-11-15 01:52:40","modules":[{"id":"module:79002e0f","type":"retainer","config":{"storage_type":"ram","max_retained_messages":0,"max_payload_size":"1MB","expiry_interval":0},"enabled":true,"created_at":1636941076704,"description":""},{"id":"module:34834081","type":"presence","config":{"qos":0},"enabled":true,"created_at":1636941076704,"description":""},{"id":"module:f6eb69d1","type":"recon","config":{},"enabled":true,"created_at":1636941076704,"description":""},{"id":"module:7ae737b2","type":"mongo_authentication","config":{"w_mode":"undef","verify":false,"type":"single","super_query_selector":"","super_query_field":"","super_query_collection":"","ssl":false,"server":"127.0.0.1:27017","r_mode":"undef","pool_size":8,"password":"public","login":"admin","keyfile":{"filename":"","file":""},"database":"mqtt","certfile":{"filename":"","file":""},"cacertfile":{"filename":"","file":""},"auth_source":"admin","auth_query_selector":"username=%u","auth_query_password_hash":"sha256","auth_query_password_field":"password","auth_query_collection":"mqtt_user","acl_query_selectors":[],"acl_query_collection":"mqtt_acl"},"enabled":false,"created_at":1636941148794,"description":""},{"id":"module:e8c63201","type":"internal_acl","config":{"acl_rule_file":"etc/acl.conf"},"enabled":true,"created_at":1636941076704,"description":""}],"rules":[],"resources":[],"blacklist":[],"apps":[{"id":"admin","secret":"public","name":"Default","desc":"Application user","status":true,"expired":"undefined"}],"users":[{"username":"admin","password":"qP5m2iS9qnn51gHoGLbaiMo/GwE=","tags":"administrator"}],"auth_mnesia":[],"acl_mnesia":[],"schemas":[],"configs":[],"listeners_state":[]}

View File

@ -0,0 +1 @@
{"version":"4.3","rules":[],"resources":[],"blacklist":[],"apps":[{"id":"admin","secret":"public","name":"Default","desc":"Application user","status":true,"expired":"undefined"}],"users":[{"username":"admin","password":"/mWV4UgV0xmVUZX4qdIXQvxXZB0=","tags":"administrator"}],"auth_mnesia":[],"acl_mnesia":[],"modules":[{"id":"module:5881add2","type":"mongo_authentication","config":{"w_mode":"undef","verify":false,"type":"single","super_query_selector":"","super_query_field":"","super_query_collection":"","ssl":false,"server":"127.0.0.1:27017","r_mode":"undef","pool_size":8,"password":"public","login":"admin","keyfile":{"filename":"","file":""},"database":"mqtt","certfile":{"filename":"","file":""},"cacertfile":{"filename":"","file":""},"auth_source":"admin","auth_query_selector":"username=%u","auth_query_password_hash":"sha256","auth_query_password_field":"password","auth_query_collection":"mqtt_user","acl_query_selectors":[],"acl_query_collection":"mqtt_acl"},"enabled":false,"created_at":1636942609573,"description":""},{"id":"module:2adb6480","type":"presence","config":{"qos":0},"enabled":true,"created_at":1636942586725,"description":""},{"id":"module:24fabe8a","type":"internal_acl","config":{"acl_rule_file":"etc/acl.conf"},"enabled":true,"created_at":1636942586725,"description":""},{"id":"module:22c70ab8","type":"recon","config":{},"enabled":true,"created_at":1636942586725,"description":""},{"id":"module:a59f9a4a","type":"retainer","config":{"storage_type":"ram","max_retained_messages":0,"max_payload_size":"1MB","expiry_interval":0},"enabled":true,"created_at":1636942586725,"description":""}],"schemas":[],"configs":[],"listeners_state":[],"date":"2021-11-15 10:16:56"}

View File

@ -0,0 +1,28 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-define(TOPK_TAB, emqx_slow_subs_topk).
-define(INDEX(Latency, ClientId), {Latency, ClientId}).
-record(top_k, { index :: index()
, type :: emqx_message_latency_stats:latency_type()
, last_update_time :: pos_integer()
, extra = []
}).
-type top_k() :: #top_k{}.
-type index() :: ?INDEX(non_neg_integer(), emqx_types:clientid()).

View File

@ -1,6 +1,6 @@
{application, emqx_plugin_libs, {application, emqx_plugin_libs,
[{description, "EMQ X Plugin utility libs"}, [{description, "EMQ X Plugin utility libs"},
{vsn, "4.3.1"}, {vsn, "4.4.0"},
{modules, []}, {modules, []},
{applications, [kernel,stdlib]}, {applications, [kernel,stdlib]},
{env, []} {env, []}

View File

@ -0,0 +1,327 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_slow_subs).
-behaviour(gen_server).
-include_lib("include/emqx.hrl").
-include_lib("include/logger.hrl").
-include_lib("emqx_plugin_libs/include/emqx_slow_subs.hrl").
-logger_header("[SLOW Subs]").
-export([ start_link/1, on_stats_update/2, enable/0
, disable/0, clear_history/0, init_topk_tab/0
]).
%% gen_server callbacks
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-compile(nowarn_unused_type).
-type state() :: #{ config := proplist:proplist()
, enable := boolean()
, last_tick_at := pos_integer()
}.
-type log() :: #{ rank := pos_integer()
, clientid := emqx_types:clientid()
, latency := non_neg_integer()
, type := emqx_message_latency_stats:latency_type()
}.
-type window_log() :: #{ last_tick_at := pos_integer()
, logs := [log()]
}.
-type message() :: #message{}.
-import(proplists, [get_value/2]).
-type stats_update_args() :: #{ clientid := emqx_types:clientid()
, latency := non_neg_integer()
, type := emqx_message_latency_stats:latency_type()
, last_insert_value := non_neg_integer()
, update_time := timer:time()
}.
-type stats_update_env() :: #{max_size := pos_integer()}.
-ifdef(TEST).
-define(EXPIRE_CHECK_INTERVAL, timer:seconds(1)).
-else.
-define(EXPIRE_CHECK_INTERVAL, timer:seconds(10)).
-endif.
-define(NOW, erlang:system_time(millisecond)).
-define(NOTICE_TOPIC_NAME, "slow_subs").
-define(DEF_CALL_TIMEOUT, timer:seconds(10)).
%% erlang term order
%% number < atom < reference < fun < port < pid < tuple < list < bit string
%% ets ordered_set is ascending by term order
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%% APIs
%%--------------------------------------------------------------------
%% @doc Start the st_statistics
-spec(start_link(Env :: list()) -> emqx_types:startlink_ret()).
start_link(Env) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [Env], []).
%% XXX NOTE:pay attention to the performance here
-spec on_stats_update(stats_update_args(), stats_update_env()) -> true.
on_stats_update(#{clientid := ClientId,
latency := Latency,
type := Type,
last_insert_value := LIV,
update_time := Ts},
#{max_size := MaxSize}) ->
LastIndex = ?INDEX(LIV, ClientId),
Index = ?INDEX(Latency, ClientId),
%% check whether the client is in the table
case ets:lookup(?TOPK_TAB, LastIndex) of
[#top_k{index = Index}] ->
%% if last value == the new value, update the type and last_update_time
%% XXX for clients whose latency are stable for a long time, is it possible to reduce updates?
ets:insert(?TOPK_TAB,
#top_k{index = Index, type = Type, last_update_time = Ts});
[_] ->
%% if Latency > minimum value, we should update it
%% if Latency < minimum value, maybe it can replace the minimum value
%% so alwyas update at here
%% do we need check if Latency == minimum ???
ets:insert(?TOPK_TAB,
#top_k{index = Index, type = Type, last_update_time = Ts}),
ets:delete(?TOPK_TAB, LastIndex);
[] ->
%% try to insert
try_insert_to_topk(MaxSize, Index, Latency, Type, Ts)
end.
clear_history() ->
gen_server:call(?MODULE, ?FUNCTION_NAME, ?DEF_CALL_TIMEOUT).
enable() ->
gen_server:call(?MODULE, {enable, true}, ?DEF_CALL_TIMEOUT).
disable() ->
gen_server:call(?MODULE, {enable, false}, ?DEF_CALL_TIMEOUT).
init_topk_tab() ->
case ets:whereis(?TOPK_TAB) of
undefined ->
?TOPK_TAB = ets:new(?TOPK_TAB,
[ ordered_set, public, named_table
, {keypos, #top_k.index}, {write_concurrency, true}
, {read_concurrency, true}
]);
_ ->
?TOPK_TAB
end.
%%--------------------------------------------------------------------
%% gen_server callbacks
%%--------------------------------------------------------------------
init([Conf]) ->
notice_tick(Conf),
expire_tick(Conf),
update_threshold(Conf),
load(Conf),
{ok, #{config => Conf,
last_tick_at => ?NOW,
enable => true}}.
handle_call({enable, Enable}, _From,
#{config := Cfg, enable := IsEnable} = State) ->
State2 = case Enable of
IsEnable ->
State;
true ->
update_threshold(Cfg),
load(Cfg),
State#{enable := true};
_ ->
unload(),
State#{enable := false}
end,
{reply, ok, State2};
handle_call(clear_history, _, State) ->
ets:delete_all_objects(?TOPK_TAB),
{reply, ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info(expire_tick, #{config := Cfg} = State) ->
expire_tick(Cfg),
Logs = ets:tab2list(?TOPK_TAB),
do_clear(Cfg, Logs),
{noreply, State};
handle_info(notice_tick, #{config := Cfg} = State) ->
notice_tick(Cfg),
Logs = ets:tab2list(?TOPK_TAB),
do_notification(Logs, State),
{noreply, State#{last_tick_at := ?NOW}};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, _) ->
unload(),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
%% Internal functions
%%--------------------------------------------------------------------
expire_tick(_) ->
erlang:send_after(?EXPIRE_CHECK_INTERVAL, self(), ?FUNCTION_NAME).
notice_tick(Cfg) ->
case get_value(notice_interval, Cfg) of
0 -> ok;
Interval ->
erlang:send_after(Interval, self(), ?FUNCTION_NAME),
ok
end.
-spec do_notification(list(), state()) -> ok.
do_notification([], _) ->
ok;
do_notification(Logs, #{last_tick_at := LastTickTime, config := Cfg}) ->
start_publish(Logs, LastTickTime, Cfg),
ok.
start_publish(Logs, TickTime, Cfg) ->
emqx_pool:async_submit({fun do_publish/4, [Logs, erlang:length(Logs), TickTime, Cfg]}).
do_publish([], _, _, _) ->
ok;
do_publish(Logs, Rank, TickTime, Cfg) ->
BatchSize = get_value(notice_batch_size, Cfg),
do_publish(Logs, BatchSize, Rank, TickTime, Cfg, []).
do_publish([Log | T], Size, Rank, TickTime, Cfg, Cache) when Size > 0 ->
Cache2 = [convert_to_notice(Rank, Log) | Cache],
do_publish(T, Size - 1, Rank - 1, TickTime, Cfg, Cache2);
do_publish(Logs, Size, Rank, TickTime, Cfg, Cache) when Size =:= 0 ->
publish(TickTime, Cfg, Cache),
do_publish(Logs, Rank, TickTime, Cfg);
do_publish([], _, _Rank, TickTime, Cfg, Cache) ->
publish(TickTime, Cfg, Cache),
ok.
convert_to_notice(Rank, #top_k{index = ?INDEX(Latency, ClientId),
type = Type,
last_update_time = Ts}) ->
#{rank => Rank,
clientid => ClientId,
latency => Latency,
type => Type,
timestamp => Ts}.
publish(TickTime, Cfg, Notices) ->
WindowLog = #{last_tick_at => TickTime,
logs => lists:reverse(Notices)},
Payload = emqx_json:encode(WindowLog),
Msg = #message{ id = emqx_guid:gen()
, qos = get_value(notice_qos, Cfg)
, from = ?MODULE
, topic = emqx_topic:systop(?NOTICE_TOPIC_NAME)
, payload = Payload
, timestamp = ?NOW
},
_ = emqx_broker:safe_publish(Msg),
ok.
load(Cfg) ->
MaxSize = get_value(top_k_num, Cfg),
_ = emqx:hook('message.slow_subs_stats',
fun ?MODULE:on_stats_update/2,
[#{max_size => MaxSize}]),
ok.
unload() ->
emqx:unhook('message.slow_subs_stats', fun ?MODULE:on_stats_update/2).
do_clear(Cfg, Logs) ->
Now = ?NOW,
Interval = get_value(expire_interval, Cfg),
Each = fun(#top_k{index = Index, last_update_time = Ts}) ->
case Now - Ts >= Interval of
true ->
ets:delete(?TOPK_TAB, Index);
_ ->
true
end
end,
lists:foreach(Each, Logs).
try_insert_to_topk(MaxSize, Index, Latency, Type, Ts) ->
case ets:info(?TOPK_TAB, size) of
Size when Size < MaxSize ->
%% if the size is under limit, insert it directly
ets:insert(?TOPK_TAB,
#top_k{index = Index, type = Type, last_update_time = Ts});
_Size ->
%% find the minimum value
?INDEX(Min, _) = First =
case ets:first(?TOPK_TAB) of
?INDEX(_, _) = I -> I;
_ -> ?INDEX(Latency - 1, <<>>)
end,
case Latency =< Min of
true -> true;
_ ->
ets:insert(?TOPK_TAB,
#top_k{index = Index, type = Type, last_update_time = Ts}),
ets:delete(?TOPK_TAB, First)
end
end.
update_threshold(Conf) ->
Threshold = proplists:get_value(threshold, Conf),
_ = emqx_message_latency_stats:update_threshold(Threshold),
ok.

View File

@ -0,0 +1,57 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_slow_subs_api).
-rest_api(#{name => clear_history,
method => 'DELETE',
path => "/slow_subscriptions",
func => clear_history,
descr => "Clear current data and re count slow topic"}).
-rest_api(#{name => get_history,
method => 'GET',
path => "/slow_subscriptions",
func => get_history,
descr => "Get slow topics statistics record data"}).
-export([ clear_history/2
, get_history/2
]).
-include("include/emqx_slow_subs.hrl").
-import(minirest, [return/1]).
%%--------------------------------------------------------------------
%% HTTP API
%%--------------------------------------------------------------------
clear_history(_Bindings, _Params) ->
ok = emqx_slow_subs:clear_history(),
return(ok).
get_history(_Bindings, Params) ->
RowFun = fun(#top_k{index = ?INDEX(Latency, ClientId),
type = Type,
last_update_time = Ts}) ->
[{clientid, ClientId},
{latency, Latency},
{type, Type},
{last_update_time, Ts}]
end,
Return = emqx_mgmt_api:paginate({?TOPK_TAB, [{traverse, last_prev}]}, Params, RowFun),
return({ok, Return}).

View File

@ -0,0 +1,487 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace).
-behaviour(gen_server).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-logger_header("[Tracer]").
-export([ publish/1
, subscribe/3
, unsubscribe/2
]).
-export([ start_link/0
, list/0
, list/1
, get_trace_filename/1
, create/1
, delete/1
, clear/0
, update/2
]).
-export([ format/1
, zip_dir/0
, filename/2
, trace_dir/0
, trace_file/1
, delete_files_after_send/2
]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-define(TRACE, ?MODULE).
-define(MAX_SIZE, 30).
-ifdef(TEST).
-export([ log_file/2
, create_table/0
, find_closest_time/2
]).
-endif.
-export_type([ip_address/0]).
-type ip_address() :: string().
-record(?TRACE,
{ name :: binary() | undefined | '_'
, type :: clientid | topic | ip_address | undefined | '_'
, filter :: emqx_types:topic() | emqx_types:clientid() | ip_address() | undefined | '_'
, enable = true :: boolean() | '_'
, start_at :: integer() | undefined | '_'
, end_at :: integer() | undefined | '_'
}).
publish(#message{topic = <<"$SYS/", _/binary>>}) -> ignore;
publish(#message{from = From, topic = Topic, payload = Payload}) when
is_binary(From); is_atom(From) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"PUBLISH to ~s: ~0p",
[Topic, Payload]
).
subscribe(<<"$SYS/", _/binary>>, _SubId, _SubOpts) -> ignore;
subscribe(Topic, SubId, SubOpts) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"~ts SUBSCRIBE ~ts: Options: ~0p",
[SubId, Topic, SubOpts]
).
unsubscribe(<<"$SYS/", _/binary>>, _SubOpts) -> ignore;
unsubscribe(Topic, SubOpts) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"~ts UNSUBSCRIBE ~ts: Options: ~0p",
[maps:get(subid, SubOpts, ""), Topic, SubOpts]
).
-spec(start_link() -> emqx_types:startlink_ret()).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec list() -> [tuple()].
list() ->
ets:match_object(?TRACE, #?TRACE{_ = '_'}).
-spec list(boolean()) -> [tuple()].
list(Enable) ->
ets:match_object(?TRACE, #?TRACE{enable = Enable, _ = '_'}).
-spec create([{Key :: binary(), Value :: binary()}] | #{atom() => binary()}) ->
ok | {error, {duplicate_condition, iodata()} | {already_existed, iodata()} | iodata()}.
create(Trace) ->
case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of
true ->
case to_trace(Trace) of
{ok, TraceRec} -> insert_new_trace(TraceRec);
{error, Reason} -> {error, Reason}
end;
false ->
{error, "The number of traces created has reache the maximum"
" please delete the useless ones first"}
end.
-spec delete(Name :: binary()) -> ok | {error, not_found}.
delete(Name) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name) of
[_] -> mnesia:delete(?TRACE, Name, write);
[] -> mnesia:abort(not_found)
end
end,
transaction(Tran).
-spec clear() -> ok | {error, Reason :: term()}.
clear() ->
case mnesia:clear_table(?TRACE) of
{atomic, ok} -> ok;
{aborted, Reason} -> {error, Reason}
end.
-spec update(Name :: binary(), Enable :: boolean()) ->
ok | {error, not_found | finished}.
update(Name, Enable) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name) of
[] -> mnesia:abort(not_found);
[#?TRACE{enable = Enable}] -> ok;
[Rec] ->
case erlang:system_time(second) >= Rec#?TRACE.end_at of
false -> mnesia:write(?TRACE, Rec#?TRACE{enable = Enable}, write);
true -> mnesia:abort(finished)
end
end
end,
transaction(Tran).
-spec get_trace_filename(Name :: binary()) ->
{ok, FileName :: string()} | {error, not_found}.
get_trace_filename(Name) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name, read) of
[] -> mnesia:abort(not_found);
[#?TRACE{start_at = Start}] -> {ok, filename(Name, Start)}
end end,
transaction(Tran).
-spec trace_file(File :: list()) ->
{ok, Node :: list(), Binary :: binary()} |
{error, Node :: list(), Reason :: term()}.
trace_file(File) ->
FileName = filename:join(trace_dir(), File),
Node = atom_to_list(node()),
case file:read_file(FileName) of
{ok, Bin} -> {ok, Node, Bin};
{error, Reason} -> {error, Node, Reason}
end.
delete_files_after_send(TraceLog, Zips) ->
gen_server:cast(?MODULE, {delete_tag, self(), [TraceLog | Zips]}).
-spec format(list(#?TRACE{})) -> list(map()).
format(Traces) ->
Fields = record_info(fields, ?TRACE),
lists:map(fun(Trace0 = #?TRACE{}) ->
[_ | Values] = tuple_to_list(Trace0),
maps:from_list(lists:zip(Fields, Values))
end, Traces).
init([]) ->
ok = create_table(),
erlang:process_flag(trap_exit, true),
OriginLogLevel = emqx_logger:get_primary_log_level(),
ok = filelib:ensure_dir(trace_dir()),
ok = filelib:ensure_dir(zip_dir()),
{ok, _} = mnesia:subscribe({table, ?TRACE, simple}),
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
TRef = update_trace(Traces),
{ok, #{timer => TRef, monitors => #{}, primary_log_level => OriginLogLevel}}.
create_table() ->
ok = ekka_mnesia:create_table(?TRACE, [
{type, set},
{disc_copies, [node()]},
{record_name, ?TRACE},
{attributes, record_info(fields, ?TRACE)}]),
ok = ekka_mnesia:copy_table(?TRACE, disc_copies).
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ok, State}.
handle_cast({delete_tag, Pid, Files}, State = #{monitors := Monitors}) ->
erlang:monitor(process, Pid),
{noreply, State#{monitors => Monitors#{Pid => Files}}};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info({'DOWN', _Ref, process, Pid, _Reason}, State = #{monitors := Monitors}) ->
case maps:take(Pid, Monitors) of
error -> {noreply, State};
{Files, NewMonitors} ->
lists:foreach(fun file:delete/1, Files),
{noreply, State#{monitors => NewMonitors}}
end;
handle_info({timeout, TRef, update_trace},
#{timer := TRef, primary_log_level := OriginLogLevel} = State) ->
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
NextTRef = update_trace(Traces),
{noreply, State#{timer => NextTRef}};
handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) ->
emqx_misc:cancel_timer(TRef),
handle_info({timeout, TRef, update_trace}, State);
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, #{timer := TRef, primary_log_level := OriginLogLevel}) ->
ok = set_log_primary_level(OriginLogLevel),
_ = mnesia:unsubscribe({table, ?TRACE, simple}),
emqx_misc:cancel_timer(TRef),
stop_all_trace_handler(),
_ = file:del_dir_r(zip_dir()),
ok.
code_change(_, State, _Extra) ->
{ok, State}.
insert_new_trace(Trace) ->
Tran = fun() ->
case mnesia:read(?TRACE, Trace#?TRACE.name) of
[] ->
#?TRACE{start_at = StartAt, type = Type, filter = Filter} = Trace,
Match = #?TRACE{_ = '_', start_at = StartAt, type = Type, filter = Filter},
case mnesia:match_object(?TRACE, Match, read) of
[] -> mnesia:write(?TRACE, Trace, write);
[#?TRACE{name = Name}] -> mnesia:abort({duplicate_condition, Name})
end;
[#?TRACE{name = Name}] -> mnesia:abort({already_existed, Name})
end
end,
transaction(Tran).
update_trace(Traces) ->
Now = erlang:system_time(second),
{_Waiting, Running, Finished} = classify_by_time(Traces, Now),
disable_finished(Finished),
Started = emqx_trace_handler:running(),
{NeedRunning, AllStarted} = start_trace(Running, Started),
NeedStop = AllStarted -- NeedRunning,
ok = stop_trace(NeedStop, Started),
clean_stale_trace_files(),
NextTime = find_closest_time(Traces, Now),
emqx_misc:start_timer(NextTime, update_trace).
stop_all_trace_handler() ->
lists:foreach(fun(#{id := Id}) -> emqx_trace_handler:uninstall(Id) end,
emqx_trace_handler:running()).
get_enable_trace() ->
{atomic, Traces} =
mnesia:transaction(fun() ->
mnesia:match_object(?TRACE, #?TRACE{enable = true, _ = '_'}, read)
end),
Traces.
find_closest_time(Traces, Now) ->
Sec =
lists:foldl(
fun(#?TRACE{start_at = Start, end_at = End, enable = true}, Closest) ->
min(closest(End, Now, Closest), closest(Start, Now, Closest));
(_, Closest) -> Closest
end, 60 * 15, Traces),
timer:seconds(Sec).
closest(Time, Now, Closest) when Now >= Time -> Closest;
closest(Time, Now, Closest) -> min(Time - Now, Closest).
disable_finished([]) -> ok;
disable_finished(Traces) ->
transaction(fun() ->
lists:map(fun(#?TRACE{name = Name}) ->
case mnesia:read(?TRACE, Name, write) of
[] -> ok;
[Trace] -> mnesia:write(?TRACE, Trace#?TRACE{enable = false}, write)
end end, Traces)
end).
start_trace(Traces, Started0) ->
Started = lists:map(fun(#{name := Name}) -> Name end, Started0),
lists:foldl(fun(#?TRACE{name = Name} = Trace, {Running, StartedAcc}) ->
case lists:member(Name, StartedAcc) of
true ->
{[Name | Running], StartedAcc};
false ->
case start_trace(Trace) of
ok -> {[Name | Running], [Name | StartedAcc]};
{error, _Reason} -> {[Name | Running], StartedAcc}
end
end
end, {[], Started}, Traces).
start_trace(Trace) ->
#?TRACE{name = Name
, type = Type
, filter = Filter
, start_at = Start
} = Trace,
Who = #{name => Name, type => Type, filter => Filter},
emqx_trace_handler:install(Who, debug, log_file(Name, Start)).
stop_trace(Finished, Started) ->
lists:foreach(fun(#{name := Name, type := Type}) ->
case lists:member(Name, Finished) of
true -> emqx_trace_handler:uninstall(Type, Name);
false -> ok
end
end, Started).
clean_stale_trace_files() ->
TraceDir = trace_dir(),
case file:list_dir(TraceDir) of
{ok, AllFiles} when AllFiles =/= ["zip"] ->
FileFun = fun(#?TRACE{name = Name, start_at = StartAt}) -> filename(Name, StartAt) end,
KeepFiles = lists:map(FileFun, list()),
case AllFiles -- ["zip" | KeepFiles] of
[] -> ok;
DeleteFiles ->
DelFun = fun(F) -> file:delete(filename:join(TraceDir, F)) end,
lists:foreach(DelFun, DeleteFiles)
end;
_ -> ok
end.
classify_by_time(Traces, Now) ->
classify_by_time(Traces, Now, [], [], []).
classify_by_time([], _Now, Wait, Run, Finish) -> {Wait, Run, Finish};
classify_by_time([Trace = #?TRACE{start_at = Start} | Traces],
Now, Wait, Run, Finish) when Start > Now ->
classify_by_time(Traces, Now, [Trace | Wait], Run, Finish);
classify_by_time([Trace = #?TRACE{end_at = End} | Traces],
Now, Wait, Run, Finish) when End =< Now ->
classify_by_time(Traces, Now, Wait, Run, [Trace | Finish]);
classify_by_time([Trace | Traces], Now, Wait, Run, Finish) ->
classify_by_time(Traces, Now, Wait, [Trace | Run], Finish).
to_trace(TraceParam) ->
case to_trace(ensure_proplists(TraceParam), #?TRACE{}) of
{error, Reason} -> {error, Reason};
{ok, #?TRACE{name = undefined}} ->
{error, "name required"};
{ok, #?TRACE{type = undefined}} ->
{error, "type=[topic,clientid,ip_address] required"};
{ok, #?TRACE{filter = undefined}} ->
{error, "topic/clientid/ip_address filter required"};
{ok, TraceRec0 = #?TRACE{}} ->
case fill_default(TraceRec0) of
#?TRACE{start_at = Start, end_at = End} when End =< Start ->
{error, "failed by start_at >= end_at"};
TraceRec ->
{ok, TraceRec}
end
end.
ensure_proplists(#{} = Trace) -> maps:to_list(Trace);
ensure_proplists(Trace) when is_list(Trace) ->
lists:foldl(
fun({K, V}, Acc) when is_binary(K) -> [{binary_to_existing_atom(K), V} | Acc];
({K, V}, Acc) when is_atom(K) -> [{K, V} | Acc];
(_, Acc) -> Acc
end, [], Trace).
fill_default(Trace = #?TRACE{start_at = undefined}) ->
fill_default(Trace#?TRACE{start_at = erlang:system_time(second)});
fill_default(Trace = #?TRACE{end_at = undefined, start_at = StartAt}) ->
fill_default(Trace#?TRACE{end_at = StartAt + 10 * 60});
fill_default(Trace) -> Trace.
-define(NAME_RE, "^[A-Za-z]+[A-Za-z0-9-_]*$").
to_trace([], Rec) -> {ok, Rec};
to_trace([{name, Name} | Trace], Rec) ->
case re:run(Name, ?NAME_RE) of
nomatch -> {error, "Name should be " ?NAME_RE};
_ -> to_trace(Trace, Rec#?TRACE{name = Name})
end;
to_trace([{type, Type} | Trace], Rec) ->
case lists:member(Type, [<<"clientid">>, <<"topic">>, <<"ip_address">>]) of
true -> to_trace(Trace, Rec#?TRACE{type = binary_to_existing_atom(Type)});
false -> {error, "incorrect type: only support clientid/topic/ip_address"}
end;
to_trace([{topic, Topic} | Trace], Rec) ->
case validate_topic(Topic) of
ok -> to_trace(Trace, Rec#?TRACE{filter = Topic});
{error, Reason} -> {error, Reason}
end;
to_trace([{clientid, ClientId} | Trace], Rec) ->
to_trace(Trace, Rec#?TRACE{filter = ClientId});
to_trace([{ip_address, IP} | Trace], Rec) ->
case inet:parse_address(binary_to_list(IP)) of
{ok, _} -> to_trace(Trace, Rec#?TRACE{filter = binary_to_list(IP)});
{error, Reason} -> {error, lists:flatten(io_lib:format("ip address: ~p", [Reason]))}
end;
to_trace([{start_at, StartAt} | Trace], Rec) ->
case to_system_second(StartAt) of
{ok, Sec} -> to_trace(Trace, Rec#?TRACE{start_at = Sec});
{error, Reason} -> {error, Reason}
end;
to_trace([{end_at, EndAt} | Trace], Rec) ->
Now = erlang:system_time(second),
case to_system_second(EndAt) of
{ok, Sec} when Sec > Now ->
to_trace(Trace, Rec#?TRACE{end_at = Sec});
{ok, _Sec} ->
{error, "end_at time has already passed"};
{error, Reason} ->
{error, Reason}
end;
to_trace([Unknown | _Trace], _Rec) -> {error, io_lib:format("unknown field: ~p", [Unknown])}.
validate_topic(TopicName) ->
try emqx_topic:validate(filter, TopicName) of
true -> ok
catch
error:Error ->
{error, io_lib:format("topic: ~s invalid by ~p", [TopicName, Error])}
end.
to_system_second(At) ->
try
Sec = calendar:rfc3339_to_system_time(binary_to_list(At), [{unit, second}]),
Now = erlang:system_time(second),
{ok, erlang:max(Now, Sec)}
catch error: {badmatch, _} ->
{error, ["The rfc3339 specification not satisfied: ", At]}
end.
zip_dir() ->
trace_dir() ++ "zip/".
trace_dir() ->
filename:join(emqx:get_env(data_dir), "trace") ++ "/".
log_file(Name, Start) ->
filename:join(trace_dir(), filename(Name, Start)).
filename(Name, Start) ->
[Time, _] = string:split(calendar:system_time_to_rfc3339(Start), "T", leading),
lists:flatten(["trace_", binary_to_list(Name), "_", Time, ".log"]).
transaction(Tran) ->
case mnesia:transaction(Tran) of
{atomic, Res} -> Res;
{aborted, Reason} -> {error, Reason}
end.
update_log_primary_level([], OriginLevel) -> set_log_primary_level(OriginLevel);
update_log_primary_level(_, _) -> set_log_primary_level(debug).
set_log_primary_level(NewLevel) ->
case NewLevel =/= emqx_logger:get_primary_log_level() of
true -> emqx_logger:set_primary_log_level(NewLevel);
false -> ok
end.

View File

@ -0,0 +1,212 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_api).
-include_lib("emqx/include/logger.hrl").
-include_lib("kernel/include/file.hrl").
%% API
-export([ list_trace/2
, create_trace/2
, update_trace/2
, delete_trace/2
, clear_traces/2
, download_zip_log/2
, stream_log_file/2
]).
-export([ read_trace_file/3
, get_trace_size/0
]).
-define(TO_BIN(_B_), iolist_to_binary(_B_)).
-define(NOT_FOUND(N), {error, 'NOT_FOUND', ?TO_BIN([N, " NOT FOUND"])}).
list_trace(_, _Params) ->
case emqx_trace:list() of
[] -> {ok, []};
List0 ->
List = lists:sort(fun(#{start_at := A}, #{start_at := B}) -> A > B end,
emqx_trace:format(List0)),
Nodes = ekka_mnesia:running_nodes(),
TraceSize = cluster_call(?MODULE, get_trace_size, [], 30000),
AllFileSize = lists:foldl(fun(F, Acc) -> maps:merge(Acc, F) end, #{}, TraceSize),
Now = erlang:system_time(second),
Traces =
lists:map(fun(Trace = #{name := Name, start_at := Start,
end_at := End, enable := Enable, type := Type, filter := Filter}) ->
FileName = emqx_trace:filename(Name, Start),
LogSize = collect_file_size(Nodes, FileName, AllFileSize),
Trace0 = maps:without([enable, filter], Trace),
Trace0#{ log_size => LogSize
, Type => iolist_to_binary(Filter)
, start_at => list_to_binary(calendar:system_time_to_rfc3339(Start))
, end_at => list_to_binary(calendar:system_time_to_rfc3339(End))
, status => status(Enable, Start, End, Now)
}
end, List),
{ok, Traces}
end.
create_trace(_, Param) ->
case emqx_trace:create(Param) of
ok -> ok;
{error, {already_existed, Name}} ->
{error, 'ALREADY_EXISTED', ?TO_BIN([Name, "Already Exists"])};
{error, {duplicate_condition, Name}} ->
{error, 'DUPLICATE_CONDITION', ?TO_BIN([Name, "Duplication Condition"])};
{error, Reason} ->
{error, 'INCORRECT_PARAMS', ?TO_BIN(Reason)}
end.
delete_trace(#{name := Name}, _Param) ->
case emqx_trace:delete(Name) of
ok -> ok;
{error, not_found} -> ?NOT_FOUND(Name)
end.
clear_traces(_, _) ->
emqx_trace:clear().
update_trace(#{name := Name, operation := Operation}, _Param) ->
Enable = case Operation of disable -> false; enable -> true end,
case emqx_trace:update(Name, Enable) of
ok -> {ok, #{enable => Enable, name => Name}};
{error, not_found} -> ?NOT_FOUND(Name)
end.
%% if HTTP request headers include accept-encoding: gzip and file size > 300 bytes.
%% cowboy_compress_h will auto encode gzip format.
download_zip_log(#{name := Name}, _Param) ->
case emqx_trace:get_trace_filename(Name) of
{ok, TraceLog} ->
TraceFiles = collect_trace_file(TraceLog),
ZipDir = emqx_trace:zip_dir(),
Zips = group_trace_file(ZipDir, TraceLog, TraceFiles),
ZipFileName = ZipDir ++ binary_to_list(Name) ++ ".zip",
{ok, ZipFile} = zip:zip(ZipFileName, Zips, [{cwd, ZipDir}]),
emqx_trace:delete_files_after_send(ZipFileName, Zips),
{ok, ZipFile};
{error, Reason} ->
{error, Reason}
end.
group_trace_file(ZipDir, TraceLog, TraceFiles) ->
lists:foldl(fun(Res, Acc) ->
case Res of
{ok, Node, Bin} ->
ZipName = ZipDir ++ Node ++ "-" ++ TraceLog,
case file:write_file(ZipName, Bin) of
ok -> [Node ++ "-" ++ TraceLog | Acc];
_ -> Acc
end;
{error, Node, Reason} ->
?LOG(error, "download trace log error:~p", [{Node, TraceLog, Reason}]),
Acc
end
end, [], TraceFiles).
collect_trace_file(TraceLog) ->
cluster_call(emqx_trace, trace_file, [TraceLog], 60000).
cluster_call(Mod, Fun, Args, Timeout) ->
Nodes = ekka_mnesia:running_nodes(),
{GoodRes, BadNodes} = rpc:multicall(Nodes, Mod, Fun, Args, Timeout),
BadNodes =/= [] andalso ?LOG(error, "rpc call failed on ~p ~p", [BadNodes, {Mod, Fun, Args}]),
GoodRes.
stream_log_file(#{name := Name}, Params) ->
Node0 = proplists:get_value(<<"node">>, Params, atom_to_binary(node())),
Position0 = proplists:get_value(<<"position">>, Params, <<"0">>),
Bytes0 = proplists:get_value(<<"bytes">>, Params, <<"1000">>),
case to_node(Node0) of
{ok, Node} ->
Position = binary_to_integer(Position0),
Bytes = binary_to_integer(Bytes0),
case rpc:call(Node, ?MODULE, read_trace_file, [Name, Position, Bytes]) of
{ok, Bin} ->
Meta = #{<<"position">> => Position + byte_size(Bin), <<"bytes">> => Bytes},
{ok, #{meta => Meta, items => Bin}};
{eof, Size} ->
Meta = #{<<"position">> => Size, <<"bytes">> => Bytes},
{ok, #{meta => Meta, items => <<"">>}};
{error, Reason} ->
logger:log(error, "read_file_failed ~p", [{Node, Name, Reason, Position, Bytes}]),
{error, Reason};
{badrpc, nodedown} ->
{error, "BadRpc node down"}
end;
{error, Reason} -> {error, Reason}
end.
get_trace_size() ->
TraceDir = emqx_trace:trace_dir(),
Node = node(),
case file:list_dir(TraceDir) of
{ok, AllFiles} ->
lists:foldl(fun(File, Acc) ->
FullFileName = filename:join(TraceDir, File),
Acc#{{Node, File} => filelib:file_size(FullFileName)}
end, #{}, lists:delete("zip", AllFiles));
_ -> #{}
end.
%% this is an rpc call for stream_log_file/2
read_trace_file(Name, Position, Limit) ->
case emqx_trace:get_trace_filename(Name) of
{error, _} = Error -> Error;
{ok, TraceFile} ->
TraceDir = emqx_trace:trace_dir(),
TracePath = filename:join([TraceDir, TraceFile]),
read_file(TracePath, Position, Limit)
end.
read_file(Path, Offset, Bytes) ->
case file:open(Path, [read, raw, binary]) of
{ok, IoDevice} ->
try
_ = case Offset of
0 -> ok;
_ -> file:position(IoDevice, {bof, Offset})
end,
case file:read(IoDevice, Bytes) of
{ok, Bin} -> {ok, Bin};
{error, Reason} -> {error, Reason};
eof ->
{ok, #file_info{size = Size}} = file:read_file_info(IoDevice),
{eof, Size}
end
after
file:close(IoDevice)
end;
{error, Reason} -> {error, Reason}
end.
to_node(Node) ->
try {ok, binary_to_existing_atom(Node)}
catch _:_ ->
{error, "node not found"}
end.
collect_file_size(Nodes, FileName, AllFiles) ->
lists:foldl(fun(Node, Acc) ->
Size = maps:get({Node, FileName}, AllFiles, 0),
Acc#{Node => Size}
end, #{}, Nodes).
status(false, _Start, _End, _Now) -> <<"stopped">>;
status(true, Start, _End, Now) when Now < Start -> <<"waiting">>;
status(true, _Start, End, Now) when Now >= End -> <<"stopped">>;
status(true, _Start, _End, _Now) -> <<"running">>.

View File

@ -0,0 +1,334 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_SUITE).
%% API
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx/include/emqx.hrl").
-record(emqx_trace, {name, type, filter, enable = true, start_at, end_at}).
%%--------------------------------------------------------------------
%% Setups
%%--------------------------------------------------------------------
all() ->
emqx_ct:all(?MODULE).
init_per_suite(Config) ->
emqx_ct_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([]).
init_per_testcase(_, Config) ->
load(),
ok = emqx_trace:clear(),
Config.
end_per_testcase(_) ->
unload(),
ok.
t_base_create_delete(_Config) ->
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
End = to_rfc3339(Now + 30 * 60),
Name = <<"name1">>,
ClientId = <<"test-device">>,
Trace = #{
name => Name,
type => <<"clientid">>,
clientid => ClientId,
start_at => Start,
end_at => End
},
AnotherTrace = Trace#{name => <<"anotherTrace">>},
ok = emqx_trace:create(Trace),
?assertEqual({error, {already_existed, Name}}, emqx_trace:create(Trace)),
?assertEqual({error, {duplicate_condition, Name}}, emqx_trace:create(AnotherTrace)),
[TraceRec] = emqx_trace:list(),
Expect = #emqx_trace{
name = Name,
type = clientid,
filter = ClientId,
start_at = Now,
end_at = Now + 30 * 60
},
?assertEqual(Expect, TraceRec),
ExpectFormat = [
#{
filter => <<"test-device">>,
enable => true,
type => clientid,
name => <<"name1">>,
start_at => Now,
end_at => Now + 30 * 60
}
],
?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])),
?assertEqual(ok, emqx_trace:delete(Name)),
?assertEqual({error, not_found}, emqx_trace:delete(Name)),
?assertEqual([], emqx_trace:list()),
ok.
t_create_size_max(_Config) ->
lists:map(fun(Seq) ->
Name = list_to_binary("name" ++ integer_to_list(Seq)),
Trace = [{name, Name}, {type, <<"topic">>},
{topic, list_to_binary("/x/y/" ++ integer_to_list(Seq))}],
ok = emqx_trace:create(Trace)
end, lists:seq(1, 30)),
Trace31 = [{<<"name">>, <<"name31">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/31">>}],
{error, _} = emqx_trace:create(Trace31),
ok = emqx_trace:delete(<<"name30">>),
ok = emqx_trace:create(Trace31),
?assertEqual(30, erlang:length(emqx_trace:list())),
ok.
t_create_failed(_Config) ->
UnknownField = [{<<"unknown">>, 12}],
{error, Reason1} = emqx_trace:create(UnknownField),
?assertEqual(<<"unknown field: {unknown,12}">>, iolist_to_binary(Reason1)),
InvalidTopic = [{<<"topic">>, "#/#//"}],
{error, Reason2} = emqx_trace:create(InvalidTopic),
?assertEqual(<<"topic: #/#// invalid by function_clause">>, iolist_to_binary(Reason2)),
InvalidStart = [{<<"start_at">>, <<"2021-12-3:12">>}],
{error, Reason3} = emqx_trace:create(InvalidStart),
?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>,
iolist_to_binary(Reason3)),
InvalidEnd = [{<<"end_at">>, <<"2021-12-3:12">>}],
{error, Reason4} = emqx_trace:create(InvalidEnd),
?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>,
iolist_to_binary(Reason4)),
{error, Reason7} = emqx_trace:create([{<<"name">>, <<"test">>}, {<<"type">>, <<"clientid">>}]),
?assertEqual(<<"topic/clientid/ip_address filter required">>, iolist_to_binary(Reason7)),
InvalidPackets4 = [{<<"name">>, <<"/test">>}, {<<"clientid">>, <<"t">>},
{<<"type">>, <<"clientid">>}],
{error, Reason9} = emqx_trace:create(InvalidPackets4),
?assertEqual(<<"Name should be ^[A-Za-z]+[A-Za-z0-9-_]*$">>, iolist_to_binary(Reason9)),
?assertEqual({error, "type=[topic,clientid,ip_address] required"},
emqx_trace:create([{<<"name">>, <<"test-name">>}, {<<"clientid">>, <<"good">>}])),
?assertEqual({error, "incorrect type: only support clientid/topic/ip_address"},
emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"clientid">>, <<"good">>}, {<<"type">>, <<"typeerror">> }])),
?assertEqual({error, "ip address: einval"},
emqx_trace:create([{<<"ip_address">>, <<"test-name">>}])),
ok.
t_create_default(_Config) ->
{error, "name required"} = emqx_trace:create([]),
ok = emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, <<"good">>}]),
[#emqx_trace{name = <<"test-name">>}] = emqx_trace:list(),
ok = emqx_trace:clear(),
Trace = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"start_at">>, <<"2021-10-28T10:54:47+08:00">>},
{<<"end_at">>, <<"2021-10-27T10:54:47+08:00">>}
],
{error, "end_at time has already passed"} = emqx_trace:create(Trace),
Now = erlang:system_time(second),
Trace2 = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"start_at">>, to_rfc3339(Now + 10)},
{<<"end_at">>, to_rfc3339(Now + 3)}
],
{error, "failed by start_at >= end_at"} = emqx_trace:create(Trace2),
ok = emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/z">>}]),
[#emqx_trace{start_at = Start, end_at = End}] = emqx_trace:list(),
?assertEqual(10 * 60, End - Start),
?assertEqual(true, Start - erlang:system_time(second) < 5),
ok.
t_update_enable(_Config) ->
Name = <<"test-name">>,
Now = erlang:system_time(second),
End = list_to_binary(calendar:system_time_to_rfc3339(Now + 2)),
ok = emqx_trace:create([{<<"name">>, Name}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>}, {<<"end_at">>, End}]),
[#emqx_trace{enable = Enable}] = emqx_trace:list(),
?assertEqual(Enable, true),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
ok = emqx_trace:update(Name, true),
[#emqx_trace{enable = true}] = emqx_trace:list(),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
?assertEqual({error, not_found}, emqx_trace:update(<<"Name not found">>, true)),
ct:sleep(2100),
?assertEqual({error, finished}, emqx_trace:update(Name, true)),
ok.
t_load_state(_Config) ->
Now = erlang:system_time(second),
Running = #{name => <<"Running">>, type => <<"topic">>,
topic => <<"/x/y/1">>, start_at => to_rfc3339(Now - 1),
end_at => to_rfc3339(Now + 2)},
Waiting = [{<<"name">>, <<"Waiting">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/2">>}, {<<"start_at">>, to_rfc3339(Now + 3)},
{<<"end_at">>, to_rfc3339(Now + 8)}],
Finished = [{<<"name">>, <<"Finished">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/3">>}, {<<"start_at">>, to_rfc3339(Now - 5)},
{<<"end_at">>, to_rfc3339(Now)}],
ok = emqx_trace:create(Running),
ok = emqx_trace:create(Waiting),
{error, "end_at time has already passed"} = emqx_trace:create(Finished),
Traces = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces)),
Enables = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces),
ExpectEnables = [{<<"Running">>, true}, {<<"Waiting">>, true}],
?assertEqual(ExpectEnables, lists:sort(Enables)),
ct:sleep(3500),
Traces2 = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces2)),
Enables2 = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces2),
ExpectEnables2 = [{<<"Running">>, false}, {<<"Waiting">>, true}],
?assertEqual(ExpectEnables2, lists:sort(Enables2)),
ok.
t_client_event(_Config) ->
application:set_env(emqx, allow_anonymous, true),
ClientId = <<"client-test">>,
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
Name = <<"test_client_id_event">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
emqtt:ping(Client),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"1">>, [{qos, 0}]),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"2">>, [{qos, 0}]),
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
ok = emqx_trace:create([{<<"name">>, <<"test_topic">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/test">>}, {<<"start_at">>, Start}]),
ok = emqx_trace_handler_SUITE:filesync(<<"test_topic">>, topic),
{ok, Bin} = file:read_file(emqx_trace:log_file(Name, Now)),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"3">>, [{qos, 0}]),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"4">>, [{qos, 0}]),
ok = emqtt:disconnect(Client),
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
ok = emqx_trace_handler_SUITE:filesync(<<"test_topic">>, topic),
{ok, Bin2} = file:read_file(emqx_trace:log_file(Name, Now)),
{ok, Bin3} = file:read_file(emqx_trace:log_file(<<"test_topic">>, Now)),
ct:pal("Bin ~p Bin2 ~p Bin3 ~p", [byte_size(Bin), byte_size(Bin2), byte_size(Bin3)]),
?assert(erlang:byte_size(Bin) > 0),
?assert(erlang:byte_size(Bin) < erlang:byte_size(Bin2)),
?assert(erlang:byte_size(Bin3) > 0),
ok.
t_get_log_filename(_Config) ->
Now = erlang:system_time(second),
Start = calendar:system_time_to_rfc3339(Now),
End = calendar:system_time_to_rfc3339(Now + 2),
Name = <<"name1">>,
Trace = [
{<<"name">>, Name},
{<<"type">>, <<"ip_address">>},
{<<"ip_address">>, <<"127.0.0.1">>},
{<<"start_at">>, list_to_binary(Start)},
{<<"end_at">>, list_to_binary(End)}
],
ok = emqx_trace:create(Trace),
?assertEqual({error, not_found}, emqx_trace:get_trace_filename(<<"test">>)),
?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))),
ct:sleep(3000),
?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))),
ok.
t_trace_file(_Config) ->
FileName = "test.log",
Content = <<"test \n test">>,
TraceDir = emqx_trace:trace_dir(),
File = filename:join(TraceDir, FileName),
ok = file:write_file(File, Content),
{ok, Node, Bin} = emqx_trace:trace_file(FileName),
?assertEqual(Node, atom_to_list(node())),
?assertEqual(Content, Bin),
ok = file:delete(File),
ok.
t_download_log(_Config) ->
ClientId = <<"client-test">>,
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
Name = <<"test_client_id">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
[begin _ = emqtt:ping(Client) end ||_ <- lists:seq(1, 5)],
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
{ok, ZipFile} = emqx_trace_api:download_zip_log(#{name => Name}, []),
?assert(filelib:file_size(ZipFile) > 0),
ok = emqtt:disconnect(Client),
ok.
t_find_closed_time(_Config) ->
DefaultMs = 60 * 15000,
Now = erlang:system_time(second),
Traces2 = [],
?assertEqual(DefaultMs, emqx_trace:find_closest_time(Traces2, Now)),
Traces3 = [#emqx_trace{name = <<"disable">>, start_at = Now + 1,
end_at = Now + 2, enable = false}],
?assertEqual(DefaultMs, emqx_trace:find_closest_time(Traces3, Now)),
Traces4 = [#emqx_trace{name = <<"running">>, start_at = Now, end_at = Now + 10, enable = true}],
?assertEqual(10000, emqx_trace:find_closest_time(Traces4, Now)),
Traces5 = [#emqx_trace{name = <<"waiting">>, start_at = Now + 2,
end_at = Now + 10, enable = true}],
?assertEqual(2000, emqx_trace:find_closest_time(Traces5, Now)),
Traces = [
#emqx_trace{name = <<"waiting">>, start_at = Now + 1, end_at = Now + 2, enable = true},
#emqx_trace{name = <<"running0">>, start_at = Now, end_at = Now + 5, enable = true},
#emqx_trace{name = <<"running1">>, start_at = Now - 1, end_at = Now + 1, enable = true},
#emqx_trace{name = <<"finished">>, start_at = Now - 2, end_at = Now - 1, enable = true},
#emqx_trace{name = <<"waiting">>, start_at = Now + 1, end_at = Now + 1, enable = true},
#emqx_trace{name = <<"stopped">>, start_at = Now, end_at = Now + 10, enable = false}
],
?assertEqual(1000, emqx_trace:find_closest_time(Traces, Now)),
ok.
to_rfc3339(Second) ->
list_to_binary(calendar:system_time_to_rfc3339(Second)).
load() ->
emqx_trace:start_link().
unload() ->
gen_server:stop(emqx_trace).

View File

@ -1,6 +1,6 @@
{application, emqx_retainer, {application, emqx_retainer,
[{description, "EMQ X Retainer"}, [{description, "EMQ X Retainer"},
{vsn, "4.3.2"}, % strict semver, bump manually! {vsn, "4.4.0"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_retainer_sup]}, {registered, [emqx_retainer_sup]},
{applications, [kernel,stdlib]}, {applications, [kernel,stdlib]},

View File

@ -78,7 +78,8 @@ dispatch(Pid, Topic) ->
false -> read_messages(Topic); false -> read_messages(Topic);
true -> match_messages(Topic) true -> match_messages(Topic)
end, end,
[Pid ! {deliver, Topic, Msg} || Msg <- sort_retained(Msgs)]. Now = erlang:system_time(millisecond),
[Pid ! {deliver, Topic, refresh_timestamp_expiry(Msg, Now)} || Msg <- sort_retained(Msgs)].
%% RETAIN flag set to 1 and payload containing zero bytes %% RETAIN flag set to 1 and payload containing zero bytes
on_message_publish(Msg = #message{flags = #{retain := true}, on_message_publish(Msg = #message{flags = #{retain := true},
@ -151,7 +152,7 @@ init([Env]) ->
ok ok
end, end,
StatsFun = emqx_stats:statsfun('retained.count', 'retained.max'), StatsFun = emqx_stats:statsfun('retained.count', 'retained.max'),
{ok, StatsTimer} = timer:send_interval(timer:seconds(1), stats), StatsTimer = erlang:send_after(timer:seconds(1), self(), stats),
State = #state{stats_fun = StatsFun, stats_timer = StatsTimer}, State = #state{stats_fun = StatsFun, stats_timer = StatsTimer},
{ok, start_expire_timer(proplists:get_value(expiry_interval, Env, 0), State)}. {ok, start_expire_timer(proplists:get_value(expiry_interval, Env, 0), State)}.
@ -160,7 +161,7 @@ start_expire_timer(0, State) ->
start_expire_timer(undefined, State) -> start_expire_timer(undefined, State) ->
State; State;
start_expire_timer(Ms, State) -> start_expire_timer(Ms, State) ->
{ok, Timer} = timer:send_interval(Ms, expire), Timer = erlang:send_after(Ms, self(), {expire, Ms}),
State#state{expiry_timer = Timer}. State#state{expiry_timer = Timer}.
handle_call(Req, _From, State) -> handle_call(Req, _From, State) ->
@ -172,12 +173,14 @@ handle_cast(Msg, State) ->
{noreply, State}. {noreply, State}.
handle_info(stats, State = #state{stats_fun = StatsFun}) -> handle_info(stats, State = #state{stats_fun = StatsFun}) ->
StatsTimer = erlang:send_after(timer:seconds(1), self(), stats),
StatsFun(retained_count()), StatsFun(retained_count()),
{noreply, State, hibernate}; {noreply, State#state{stats_timer = StatsTimer}, hibernate};
handle_info(expire, State) -> handle_info({expire, Ms} = Expire, State) ->
Timer = erlang:send_after(Ms, self(), Expire),
ok = expire_messages(), ok = expire_messages(),
{noreply, State, hibernate}; {noreply, State#state{expiry_timer = Timer}, hibernate};
handle_info(Info, State) -> handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]), ?LOG(error, "Unexpected info: ~p", [Info]),
@ -214,11 +217,13 @@ store_retained(Msg = #message{topic = Topic, payload = Payload}, Env) ->
fun() -> fun() ->
case mnesia:read(?TAB, Topic) of case mnesia:read(?TAB, Topic) of
[_] -> [_] ->
mnesia:write(?TAB, #retained{topic = topic2tokens(Topic), mnesia:write(?TAB,
#retained{topic = topic2tokens(Topic),
msg = Msg, msg = Msg,
expiry_time = get_expiry_time(Msg, Env)}, write); expiry_time = get_expiry_time(Msg, Env)}, write);
[] -> [] ->
?LOG(error, "Cannot retain message(topic=~s) for table is full!", [Topic]) ?LOG(error,
"Cannot retain message(topic=~s) for table is full!", [Topic])
end end
end), end),
ok; ok;
@ -242,7 +247,8 @@ is_too_big(Size, Env) ->
get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := 0}}}, _Env) -> get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := 0}}}, _Env) ->
0; 0;
get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := Interval}}, timestamp = Ts}, _Env) -> get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := Interval}},
timestamp = Ts}, _Env) ->
Ts + Interval * 1000; Ts + Interval * 1000;
get_expiry_time(#message{timestamp = Ts}, Env) -> get_expiry_time(#message{timestamp = Ts}, Env) ->
case proplists:get_value(expiry_interval, Env, 0) of case proplists:get_value(expiry_interval, Env, 0) of
@ -311,3 +317,18 @@ condition(Ws) ->
false -> Ws1; false -> Ws1;
_ -> (Ws1 -- ['#']) ++ '_' _ -> (Ws1 -- ['#']) ++ '_'
end. end.
-spec(refresh_timestamp_expiry(emqx_types:message(), pos_integer()) -> emqx_types:message()).
refresh_timestamp_expiry(Msg = #message{headers =
#{properties :=
#{'Message-Expiry-Interval' := Interval} = Props},
timestamp = CreatedAt},
Now) ->
Elapsed = max(0, Now - CreatedAt),
Interval1 = max(1, Interval - (Elapsed div 1000)),
emqx_message:set_header(properties,
Props#{'Message-Expiry-Interval' => Interval1},
Msg#message{timestamp = Now});
refresh_timestamp_expiry(Msg, Now) ->
Msg#message{timestamp = Now}.

View File

@ -1,6 +1,6 @@
{application, emqx_rule_engine, {application, emqx_rule_engine,
[{description, "EMQ X Rule Engine"}, [{description, "EMQ X Rule Engine"},
{vsn, "4.3.6"}, % strict semver, bump manually! {vsn, "4.4.0"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_rule_engine_sup, emqx_rule_registry]}, {registered, [emqx_rule_engine_sup, emqx_rule_registry]},
{applications, [kernel,stdlib,rulesql,getopt]}, {applications, [kernel,stdlib,rulesql,getopt]},

View File

@ -454,7 +454,8 @@ columns_with_exam('message.publish') ->
, {<<"topic">>, <<"t/a">>} , {<<"topic">>, <<"t/a">>}
, {<<"qos">>, 1} , {<<"qos">>, 1}
, {<<"flags">>, #{}} , {<<"flags">>, #{}}
, {<<"headers">>, undefined} , {<<"headers">>, #{<<"properties">> => #{<<"User-Property">> =>
#{'prop_key' => <<"prop_val">>}}}}
, {<<"publish_received_at">>, erlang:system_time(millisecond)} , {<<"publish_received_at">>, erlang:system_time(millisecond)}
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}

View File

@ -17,6 +17,9 @@
-module(emqx_rule_funcs). -module(emqx_rule_funcs).
-include("rule_engine.hrl"). -include("rule_engine.hrl").
-elvis([{elvis_style, god_modules, disable}]).
-elvis([{elvis_style, function_naming_convention, disable}]).
-elvis([{elvis_style, macro_names, disable}]).
%% IoT Funcs %% IoT Funcs
-export([ msgid/0 -export([ msgid/0
@ -438,7 +441,8 @@ subbits(Bits, Len) when is_integer(Len), is_bitstring(Bits) ->
subbits(Bits, Start, Len) when is_integer(Start), is_integer(Len), is_bitstring(Bits) -> subbits(Bits, Start, Len) when is_integer(Start), is_integer(Len), is_bitstring(Bits) ->
get_subbits(Bits, Start, Len, <<"integer">>, <<"unsigned">>, <<"big">>). get_subbits(Bits, Start, Len, <<"integer">>, <<"unsigned">>, <<"big">>).
subbits(Bits, Start, Len, Type, Signedness, Endianness) when is_integer(Start), is_integer(Len), is_bitstring(Bits) -> subbits(Bits, Start, Len, Type, Signedness, Endianness)
when is_integer(Start), is_integer(Len), is_bitstring(Bits) ->
get_subbits(Bits, Start, Len, Type, Signedness, Endianness). get_subbits(Bits, Start, Len, Type, Signedness, Endianness).
get_subbits(Bits, Start, Len, Type, Signedness, Endianness) -> get_subbits(Bits, Start, Len, Type, Signedness, Endianness) ->
@ -520,7 +524,7 @@ map(Data) ->
emqx_rule_utils:map(Data). emqx_rule_utils:map(Data).
bin2hexstr(Bin) when is_binary(Bin) -> bin2hexstr(Bin) when is_binary(Bin) ->
emqx_misc:bin2hexstr_A_F(Bin). emqx_misc:bin2hexstr_a_f_upper(Bin).
hexstr2bin(Str) when is_binary(Str) -> hexstr2bin(Str) when is_binary(Str) ->
emqx_misc:hexstr2bin(Str). emqx_misc:hexstr2bin(Str).
@ -608,7 +612,8 @@ tokens(S, Separators) ->
[list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators))]. [list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators))].
tokens(S, Separators, <<"nocrlf">>) -> tokens(S, Separators, <<"nocrlf">>) ->
[list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators) ++ [$\r,$\n,[$\r,$\n]])]. [list_to_binary(R) || R <- string:lexemes(binary_to_list(S),
binary_to_list(Separators) ++ [$\r,$\n,[$\r,$\n]])].
concat(S1, S2) when is_binary(S1), is_binary(S2) -> concat(S1, S2) when is_binary(S1), is_binary(S2) ->
unicode:characters_to_binary([S1, S2], unicode). unicode:characters_to_binary([S1, S2], unicode).
@ -646,7 +651,8 @@ replace(SrcStr, P, RepStr) when is_binary(SrcStr), is_binary(P), is_binary(RepSt
replace(SrcStr, P, RepStr, <<"all">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> replace(SrcStr, P, RepStr, <<"all">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, all)); iolist_to_binary(string:replace(SrcStr, P, RepStr, all));
replace(SrcStr, P, RepStr, <<"trailing">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> replace(SrcStr, P, RepStr, <<"trailing">>)
when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, trailing)); iolist_to_binary(string:replace(SrcStr, P, RepStr, trailing));
replace(SrcStr, P, RepStr, <<"leading">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> replace(SrcStr, P, RepStr, <<"leading">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
@ -662,7 +668,7 @@ regex_replace(SrcStr, RE, RepStr) ->
re:replace(SrcStr, RE, RepStr, [global, {return,binary}]). re:replace(SrcStr, RE, RepStr, [global, {return,binary}]).
ascii(Char) when is_binary(Char) -> ascii(Char) when is_binary(Char) ->
[FirstC| _] = binary_to_list(Char), [FirstC | _] = binary_to_list(Char),
FirstC. FirstC.
find(S, P) when is_binary(S), is_binary(P) -> find(S, P) when is_binary(S), is_binary(P) ->
@ -782,7 +788,7 @@ sha256(S) when is_binary(S) ->
hash(sha256, S). hash(sha256, S).
hash(Type, Data) -> hash(Type, Data) ->
emqx_misc:bin2hexstr_a_f(crypto:hash(Type, Data)). emqx_misc:bin2hexstr_a_f_lower(crypto:hash(Type, Data)).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Data encode and decode Funcs %% Data encode and decode Funcs
@ -875,23 +881,23 @@ time_unit(<<"nanosecond">>) -> nanosecond.
%% the function handling to the worker module. %% the function handling to the worker module.
%% @end %% @end
-ifdef(EMQX_ENTERPRISE). -ifdef(EMQX_ENTERPRISE).
'$handle_undefined_function'(schema_decode, [SchemaId, Data|MoreArgs]) -> '$handle_undefined_function'(schema_decode, [SchemaId, Data | MoreArgs]) ->
emqx_schema_parser:decode(SchemaId, Data, MoreArgs); emqx_schema_parser:decode(SchemaId, Data, MoreArgs);
'$handle_undefined_function'(schema_decode, Args) -> '$handle_undefined_function'(schema_decode, Args) ->
error({args_count_error, {schema_decode, Args}}); error({args_count_error, {schema_decode, Args}});
'$handle_undefined_function'(schema_encode, [SchemaId, Term|MoreArgs]) -> '$handle_undefined_function'(schema_encode, [SchemaId, Term | MoreArgs]) ->
emqx_schema_parser:encode(SchemaId, Term, MoreArgs); emqx_schema_parser:encode(SchemaId, Term, MoreArgs);
'$handle_undefined_function'(schema_encode, Args) -> '$handle_undefined_function'(schema_encode, Args) ->
error({args_count_error, {schema_encode, Args}}); error({args_count_error, {schema_encode, Args}});
'$handle_undefined_function'(sprintf, [Format|Args]) -> '$handle_undefined_function'(sprintf, [Format | Args]) ->
erlang:apply(fun sprintf_s/2, [Format, Args]); erlang:apply(fun sprintf_s/2, [Format, Args]);
'$handle_undefined_function'(Fun, Args) -> '$handle_undefined_function'(Fun, Args) ->
error({sql_function_not_supported, function_literal(Fun, Args)}). error({sql_function_not_supported, function_literal(Fun, Args)}).
-else. -else.
'$handle_undefined_function'(sprintf, [Format|Args]) -> '$handle_undefined_function'(sprintf, [Format | Args]) ->
erlang:apply(fun sprintf_s/2, [Format, Args]); erlang:apply(fun sprintf_s/2, [Format, Args]);
'$handle_undefined_function'(Fun, Args) -> '$handle_undefined_function'(Fun, Args) ->

View File

@ -73,6 +73,8 @@
, terminate/2 , terminate/2
]). ]).
-elvis([{elvis_style, god_modules, disable}]).
-ifndef(TEST). -ifndef(TEST).
-define(SECS_5M, 300). -define(SECS_5M, 300).
-define(SAMPLING, 10). -define(SAMPLING, 10).
@ -235,7 +237,7 @@ start_link() ->
init([]) -> init([]) ->
erlang:process_flag(trap_exit, true), erlang:process_flag(trap_exit, true),
%% the overall counters %% the overall counters
[ok = emqx_metrics:ensure(Metric)|| Metric <- overall_metrics()], [ok = emqx_metrics:ensure(Metric) || Metric <- overall_metrics()],
%% the speed metrics %% the speed metrics
erlang:send_after(timer:seconds(?SAMPLING), self(), ticking), erlang:send_after(timer:seconds(?SAMPLING), self(), ticking),
{ok, #state{overall_rule_speed = #rule_speed{}}}. {ok, #state{overall_rule_speed = #rule_speed{}}}.
@ -388,17 +390,19 @@ calculate_speed(CurrVal, #rule_speed{max = MaxSpeed0, last_v = LastVal,
%% calculate the max speed since the emqx startup %% calculate the max speed since the emqx startup
MaxSpeed = MaxSpeed =
if MaxSpeed0 >= CurrSpeed -> MaxSpeed0; case MaxSpeed0 >= CurrSpeed of
true -> CurrSpeed true -> MaxSpeed0;
false -> CurrSpeed
end, end,
%% calculate the average speed in last 5 mins %% calculate the average speed in last 5 mins
{Last5MinSamples, Acc5Min, Last5Min} = {Last5MinSamples, Acc5Min, Last5Min} =
if Tick =< ?SAMPCOUNT_5M -> case Tick =< ?SAMPCOUNT_5M of
true ->
Acc = AccSpeed5Min0 + CurrSpeed, Acc = AccSpeed5Min0 + CurrSpeed,
{lists:reverse([CurrSpeed | lists:reverse(Last5MinSamples0)]), {lists:reverse([CurrSpeed | lists:reverse(Last5MinSamples0)]),
Acc, Acc / Tick}; Acc, Acc / Tick};
true -> false ->
[FirstSpeed | Speeds] = Last5MinSamples0, [FirstSpeed | Speeds] = Last5MinSamples0,
Acc = AccSpeed5Min0 + CurrSpeed - FirstSpeed, Acc = AccSpeed5Min0 + CurrSpeed - FirstSpeed,
{lists:reverse([CurrSpeed | lists:reverse(Speeds)]), {lists:reverse([CurrSpeed | lists:reverse(Speeds)]),
@ -410,7 +414,7 @@ calculate_speed(CurrVal, #rule_speed{max = MaxSpeed0, last_v = LastVal,
last5m_smpl = Last5MinSamples, tick = Tick + 1}. last5m_smpl = Last5MinSamples, tick = Tick + 1}.
format_rule_speed(#rule_speed{max = Max, current = Current, last5m = Last5Min}) -> format_rule_speed(#rule_speed{max = Max, current = Current, last5m = Last5Min}) ->
#{max => Max, current => precision(Current, 2), last5m => precision(Last5Min, 2)}. #{max => precision(Max, 2), current => precision(Current, 2), last5m => precision(Last5Min, 2)}.
precision(Float, N) -> precision(Float, N) ->
Base = math:pow(10, N), Base = math:pow(10, N),

View File

@ -98,21 +98,8 @@ sql_test_action() ->
fill_default_values(Event, Context) -> fill_default_values(Event, Context) ->
maps:merge(envs_examp(Event), Context). maps:merge(envs_examp(Event), Context).
envs_examp(<<"$events/", _/binary>> = EVENT_TOPIC) -> envs_examp(EVENT_TOPIC) ->
EventName = emqx_rule_events:event_name(EVENT_TOPIC), EventName = emqx_rule_events:event_name(EVENT_TOPIC),
emqx_rule_maps:atom_key_map( emqx_rule_maps:atom_key_map(
maps:from_list( maps:from_list(
emqx_rule_events:columns_with_exam(EventName))); emqx_rule_events:columns_with_exam(EventName))).
envs_examp(_) ->
#{id => emqx_guid:to_hexstr(emqx_guid:gen()),
clientid => <<"c_emqx">>,
username => <<"u_emqx">>,
payload => <<"{\"id\": 1, \"name\": \"ha\"}">>,
peerhost => <<"127.0.0.1">>,
topic => <<"t/a">>,
qos => 1,
flags => #{sys => true, event => true},
publish_received_at => emqx_rule_utils:now_ms(),
timestamp => emqx_rule_utils:now_ms(),
node => node()
}.

View File

@ -50,6 +50,9 @@
%% erlang:system_time should be unique and random enough %% erlang:system_time should be unique and random enough
-define(CLIENTID, iolist_to_binary([atom_to_list(?FUNCTION_NAME), "-", -define(CLIENTID, iolist_to_binary([atom_to_list(?FUNCTION_NAME), "-",
integer_to_list(erlang:system_time())])). integer_to_list(erlang:system_time())])).
-elvis([{elvis_style, dont_repeat_yourself, disable}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Setups %% Setups
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -66,7 +69,9 @@ end_per_suite(_) ->
emqx_ct_helpers:stop_apps([emqx_sn]). emqx_ct_helpers:stop_apps([emqx_sn]).
set_special_confs(emqx) -> set_special_confs(emqx) ->
application:set_env(emqx, plugins_loaded_file, application:set_env(
emqx,
plugins_loaded_file,
emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/loaded_plugins")); emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/loaded_plugins"));
set_special_confs(emqx_sn) -> set_special_confs(emqx_sn) ->
application:set_env(emqx_sn, enable_qos3, ?ENABLE_QOS3), application:set_env(emqx_sn, enable_qos3, ?ENABLE_QOS3),
@ -113,7 +118,8 @@ t_subscribe(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
TopicName1 = <<"abcD">>, TopicName1 = <<"abcD">>,
send_register_msg(Socket, TopicName1, MsgId), send_register_msg(Socket, TopicName1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1,
CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16,
@ -145,7 +151,8 @@ t_subscribe_case01(_) ->
TopicName1 = <<"abcD">>, TopicName1 = <<"abcD">>,
send_register_msg(Socket, TopicName1, MsgId), send_register_msg(Socket, TopicName1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1,
?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>,
@ -166,17 +173,18 @@ t_subscribe_case02(_) ->
Will = 0, Will = 0,
CleanSession = 0, CleanSession = 0,
MsgId = 1, MsgId = 1,
TopicId = ?PREDEF_TOPIC_ID1, %this TopicId is the predefined topic id corresponding to ?PREDEF_TOPIC_NAME1 TopicId = ?PREDEF_TOPIC_ID1,
ReturnCode = 0, ReturnCode = 0,
{ok, Socket} = gen_udp:open(0, [binary]), {ok, Socket} = gen_udp:open(0, [binary]),
ClientId = ?CLIENTID, ClientId = ?CLIENTID,
send_connect_msg(Socket, ?CLIENTID), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
Topic1 = ?PREDEF_TOPIC_NAME1, Topic1 = ?PREDEF_TOPIC_NAME1,
send_register_msg(Socket, Topic1, MsgId), send_register_msg(Socket, Topic1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1,
?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>,
@ -206,9 +214,11 @@ t_subscribe_case03(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_short_topic(Socket, QoS, <<"te">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"te">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1,
?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_unsubscribe_msg_short_topic(Socket, <<"te">>, MsgId), send_unsubscribe_msg_short_topic(Socket, <<"te">>, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -217,8 +227,12 @@ t_subscribe_case03(_) ->
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
gen_udp:close(Socket). gen_udp:close(Socket).
%%In this case We use predefined topic name to register and subcribe, and expect to receive the corresponding predefined topic id but not a new generated topic id from broker. We design this case to illustrate %% In this case We use predefined topic name to register and subcribe, and
%% emqx_sn_gateway's compatibility of dealing with predefined and normal topics. Once we give more restrictions to different topic id type, this case would be deleted or modified. %% expect to receive the corresponding predefined topic id but not a new
%% generated topic id from broker. We design this case to illustrate
%% emqx_sn_gateway's compatibility of dealing with predefined and normal topics.
%% Once we give more restrictions to different topic id type, this case would
%% be deleted or modified.
t_subscribe_case04(_) -> t_subscribe_case04(_) ->
Dup = 0, Dup = 0,
QoS = 0, QoS = 0,
@ -226,7 +240,7 @@ t_subscribe_case04(_) ->
Will = 0, Will = 0,
CleanSession = 0, CleanSession = 0,
MsgId = 1, MsgId = 1,
TopicId = ?PREDEF_TOPIC_ID1, %this TopicId is the predefined topic id corresponding to ?PREDEF_TOPIC_NAME1 TopicId = ?PREDEF_TOPIC_ID1,
ReturnCode = 0, ReturnCode = 0,
{ok, Socket} = gen_udp:open(0, [binary]), {ok, Socket} = gen_udp:open(0, [binary]),
ClientId = ?CLIENTID, ClientId = ?CLIENTID,
@ -234,10 +248,14 @@ t_subscribe_case04(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
Topic1 = ?PREDEF_TOPIC_NAME1, Topic1 = ?PREDEF_TOPIC_NAME1,
send_register_msg(Socket, Topic1, MsgId), send_register_msg(Socket, Topic1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
send_subscribe_msg_normal_topic(Socket, QoS, Topic1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>,
receive_response(Socket)), receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, Topic1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_unsubscribe_msg_normal_topic(Socket, Topic1, MsgId), send_unsubscribe_msg_normal_topic(Socket, Topic1, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -264,19 +282,30 @@ t_subscribe_case05(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_register_msg(Socket, <<"abcD">>, MsgId), send_register_msg(Socket, <<"abcD">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"abcD">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"abcD">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"/sport/#">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"/sport/#">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"/a/+/water">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"/a/+/water">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"/Tom/Home">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"/Tom/Home">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1,
@ -306,19 +335,32 @@ t_subscribe_case06(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_register_msg(Socket, <<"abc">>, MsgId), send_register_msg(Socket, <<"abc">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>,
receive_response(Socket)
),
send_register_msg(Socket, <<"/blue/#">>, MsgId), send_register_msg(Socket, <<"/blue/#">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId0:16, MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId0:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>,
receive_response(Socket)
),
send_register_msg(Socket, <<"/blue/+/white">>, MsgId), send_register_msg(Socket, <<"/blue/+/white">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId0:16, MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId0:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>,
receive_response(Socket)
),
send_register_msg(Socket, <<"/$sys/rain">>, MsgId), send_register_msg(Socket, <<"/$sys/rain">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId2:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId2:16, MsgId:16, 0:8>>,
receive_response(Socket)
),
send_subscribe_msg_short_topic(Socket, QoS, <<"Q2">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"Q2">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_unsubscribe_msg_normal_topic(Socket, <<"Q2">>, MsgId), send_unsubscribe_msg_normal_topic(Socket, <<"Q2">>, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -342,8 +384,11 @@ t_subscribe_case07(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>,
receive_response(Socket)
),
send_unsubscribe_msg_predefined_topic(Socket, TopicId2, MsgId), send_unsubscribe_msg_predefined_topic(Socket, TopicId2, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -365,8 +410,11 @@ t_subscribe_case08(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_reserved_topic(Socket, QoS, TopicId2, MsgId), send_subscribe_msg_reserved_topic(Socket, QoS, TopicId2, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, ?SN_INVALID_TOPIC_ID:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
?SN_INVALID_TOPIC_ID:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -390,15 +438,20 @@ t_publish_negqos_case09(_) ->
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_normal_topic(Socket, NegQoS, MsgId1, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, NegQoS, MsgId1, TopicId1, Payload1),
timer:sleep(100), timer:sleep(100),
case ?ENABLE_QOS3 of case ?ENABLE_QOS3 of
true -> true ->
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What) ?assertEqual(Eexp, What)
end, end,
@ -431,7 +484,9 @@ t_publish_qos0_case01(_) ->
send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -453,15 +508,20 @@ t_publish_qos0_case02(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId1, PredefTopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId1, PredefTopicId, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC:2, PredefTopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC:2,
PredefTopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -484,15 +544,20 @@ t_publish_qos0_case3(_) ->
Topic = <<"/a/b/c">>, Topic = <<"/a/b/c">>,
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId1, TopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId1, TopicId, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -514,8 +579,11 @@ t_publish_qos0_case04(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, <<"#">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"#">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 2, MsgId1 = 2,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
@ -523,7 +591,9 @@ t_publish_qos0_case04(_) ->
send_publish_msg_short_topic(Socket, QoS, MsgId1, Topic, Payload1), send_publish_msg_short_topic(Socket, QoS, MsgId1, Topic, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_SHORT_TOPIC:2, Topic/binary, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_SHORT_TOPIC:2,
Topic/binary, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -544,8 +614,11 @@ t_publish_qos0_case05(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_short_topic(Socket, QoS, <<"/#">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"/#">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -567,15 +640,20 @@ t_publish_qos0_case06(_) ->
Topic = <<"abc">>, Topic = <<"abc">>,
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -597,16 +675,25 @@ t_publish_qos1_case01(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1),
?assertEqual(<<7, ?SN_PUBACK, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicId1:16,
MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
timer:sleep(100), timer:sleep(100),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
gen_udp:close(Socket). gen_udp:close(Socket).
@ -625,12 +712,18 @@ t_publish_qos1_case02(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1),
?assertEqual(<<7, ?SN_PUBACK, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, PredefTopicId:16,
MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
timer:sleep(100), timer:sleep(100),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
@ -645,7 +738,10 @@ t_publish_qos1_case03(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_publish_msg_predefined_topic(Socket, QoS, MsgId, tid(5), <<20, 21, 22, 23>>), send_publish_msg_predefined_topic(Socket, QoS, MsgId, tid(5), <<20, 21, 22, 23>>),
?assertEqual(<<7, ?SN_PUBACK, TopicId5:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicId5:16,
MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -664,15 +760,20 @@ t_publish_qos1_case04(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_short_topic(Socket, QoS, <<"ab">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"ab">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Topic = <<"ab">>, Topic = <<"ab">>,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_short_topic(Socket, QoS, MsgId, Topic, Payload1), send_publish_msg_short_topic(Socket, QoS, MsgId, Topic, Payload1),
<<TopicIdShort:16>> = Topic, <<TopicIdShort:16>> = Topic,
?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16, MsgId:16, ?SN_RC_ACCEPTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16,
MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
timer:sleep(100), timer:sleep(100),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
@ -692,13 +793,18 @@ t_publish_qos1_case05(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, <<"ab">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"ab">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/#">>, <<20, 21, 22, 23>>), send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/#">>, <<20, 21, 22, 23>>),
<<TopicIdShort:16>> = <<"/#">>, <<TopicIdShort:16>> = <<"/#">>,
?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16, MsgId:16, ?SN_RC_NOT_SUPPORTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -724,7 +830,10 @@ t_publish_qos1_case06(_) ->
send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/+">>, <<20, 21, 22, 23>>), send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/+">>, <<20, 21, 22, 23>>),
<<TopicIdShort:16>> = <<"/+">>, <<TopicIdShort:16>> = <<"/+">>,
?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16, MsgId:16, ?SN_RC_NOT_SUPPORTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -751,7 +860,11 @@ t_publish_qos2_case01(_) ->
send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1),
?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)),
send_pubrel_msg(Socket, MsgId), send_pubrel_msg(Socket, MsgId),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, 1:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, 1:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)),
timer:sleep(100), timer:sleep(100),
@ -773,15 +886,21 @@ t_publish_qos2_case02(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, ?FNU:1, QoS:2, ?FNU:5, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, ?FNU:1, QoS:2, ?FNU:5,
receive_response(Socket)), PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1),
?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)),
send_pubrel_msg(Socket, MsgId), send_pubrel_msg(Socket, MsgId),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC :2, PredefTopicId:16, 1:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC:2,
PredefTopicId:16, 1:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)),
timer:sleep(100), timer:sleep(100),
@ -812,7 +931,11 @@ t_publish_qos2_case03(_) ->
?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)),
send_pubrel_msg(Socket, MsgId), send_pubrel_msg(Socket, MsgId),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_SHORT_TOPIC :2, <<"/a">>/binary, 1:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_SHORT_TOPIC:2,
"/a", 1:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)),
timer:sleep(100), timer:sleep(100),
@ -1083,7 +1206,11 @@ t_asleep_test03_to_awake_qos1_dl_msg(_) ->
send_register_msg(Socket, TopicName1, MsgId1), send_register_msg(Socket, TopicName1, MsgId1),
?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId1:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId1:16, 0:8>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ReturnCode>>, receive_response(Socket)), ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
send_disconnect_msg(Socket, 1), send_disconnect_msg(Socket, 1),
@ -1109,7 +1236,10 @@ t_asleep_test03_to_awake_qos1_dl_msg(_) ->
%% the broker should sent dl msgs to the awake client before sending the pingresp %% the broker should sent dl msgs to the awake client before sending the pingresp
UdpData = receive_response(Socket), UdpData = receive_response(Socket),
MsgId_udp = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicId1, Payload1}, UdpData), MsgId_udp = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicId1, Payload1}, UdpData),
send_puback_msg(Socket, TopicId1, MsgId_udp), send_puback_msg(Socket, TopicId1, MsgId_udp),
%% check the pingresp is received at last %% check the pingresp is received at last
@ -1141,8 +1271,11 @@ t_asleep_test04_to_awake_qos1_dl_msg(_) ->
CleanSession = 0, CleanSession = 0,
ReturnCode = 0, ReturnCode = 0,
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1,CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
send_disconnect_msg(Socket, 1), send_disconnect_msg(Socket, 1),
@ -1176,11 +1309,17 @@ t_asleep_test04_to_awake_qos1_dl_msg(_) ->
send_regack_msg(Socket, TopicIdNew, MsgId3), send_regack_msg(Socket, TopicIdNew, MsgId3),
UdpData2 = receive_response(Socket), UdpData2 = receive_response(Socket),
MsgId_udp2 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload1}, UdpData2), MsgId_udp2 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload1}, UdpData2),
send_puback_msg(Socket, TopicIdNew, MsgId_udp2), send_puback_msg(Socket, TopicIdNew, MsgId_udp2),
UdpData3 = receive_response(Socket), UdpData3 = receive_response(Socket),
MsgId_udp3 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload2}, UdpData3), MsgId_udp3 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload2}, UdpData3),
send_puback_msg(Socket, TopicIdNew, MsgId_udp3), send_puback_msg(Socket, TopicIdNew, MsgId_udp3),
?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)),
@ -1216,8 +1355,11 @@ t_asleep_test05_to_awake_qos1_dl_msg(_) ->
CleanSession = 0, CleanSession = 0,
ReturnCode = 0, ReturnCode = 0,
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
SleepDuration = 30, SleepDuration = 30,
@ -1250,19 +1392,26 @@ t_asleep_test05_to_awake_qos1_dl_msg(_) ->
send_regack_msg(Socket, TopicIdNew, MsgId_reg), send_regack_msg(Socket, TopicIdNew, MsgId_reg),
UdpData2 = receive_response(Socket), UdpData2 = receive_response(Socket),
MsgId2 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload2}, UdpData2), MsgId2 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload2}, UdpData2),
send_puback_msg(Socket, TopicIdNew, MsgId2), send_puback_msg(Socket, TopicIdNew, MsgId2),
timer:sleep(50), timer:sleep(50),
UdpData3 = wrap_receive_response(Socket), UdpData3 = wrap_receive_response(Socket),
MsgId3 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload3}, UdpData3), MsgId3 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload3}, UdpData3),
send_puback_msg(Socket, TopicIdNew, MsgId3), send_puback_msg(Socket, TopicIdNew, MsgId3),
timer:sleep(50), timer:sleep(50),
case receive_response(Socket) of case receive_response(Socket) of
<<2,23>> -> ok; <<2,23>> -> ok;
UdpData4 -> UdpData4 ->
MsgId4 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, MsgId4 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC, CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload4}, UdpData4), TopicIdNew, Payload4}, UdpData4),
send_puback_msg(Socket, TopicIdNew, MsgId4) send_puback_msg(Socket, TopicIdNew, MsgId4)
@ -1322,7 +1471,10 @@ t_asleep_test06_to_awake_qos2_dl_msg(_) ->
send_pingreq_msg(Socket, ClientId), send_pingreq_msg(Socket, ClientId),
UdpData = wrap_receive_response(Socket), UdpData = wrap_receive_response(Socket),
MsgId_udp = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicId_tom, Payload1}, UdpData), MsgId_udp = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicId_tom, Payload1}, UdpData),
send_pubrec_msg(Socket, MsgId_udp), send_pubrec_msg(Socket, MsgId_udp),
?assertMatch(<<_:8, ?SN_PUBREL:8, _/binary>>, receive_response(Socket)), ?assertMatch(<<_:8, ?SN_PUBREL:8, _/binary>>, receive_response(Socket)),
send_pubcomp_msg(Socket, MsgId_udp), send_pubcomp_msg(Socket, MsgId_udp),
@ -1357,8 +1509,11 @@ t_asleep_test07_to_connected(_) ->
send_register_msg(Socket, TopicName_tom, MsgId1), send_register_msg(Socket, TopicName_tom, MsgId1),
TopicId_tom = check_regack_msg_on_udp(MsgId1, receive_response(Socket)), TopicId_tom = check_regack_msg_on_udp(MsgId1, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId_tom, MsgId1), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId_tom, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId_tom:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1,CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId_tom:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
send_disconnect_msg(Socket, SleepDuration), send_disconnect_msg(Socket, SleepDuration),
@ -1436,8 +1591,11 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
CleanSession = 0, CleanSession = 0,
ReturnCode = 0, ReturnCode = 0,
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1,CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
SleepDuration = 30, SleepDuration = 30,
send_disconnect_msg(Socket, SleepDuration), send_disconnect_msg(Socket, SleepDuration),
@ -1471,7 +1629,10 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
udp_receive_timeout -> udp_receive_timeout ->
ok; ok;
UdpData2 -> UdpData2 ->
MsgId2 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload2}, UdpData2), MsgId2 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload2}, UdpData2),
send_puback_msg(Socket, TopicIdNew, MsgId2) send_puback_msg(Socket, TopicIdNew, MsgId2)
end, end,
timer:sleep(100), timer:sleep(100),
@ -1480,7 +1641,10 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
udp_receive_timeout -> udp_receive_timeout ->
ok; ok;
UdpData3 -> UdpData3 ->
MsgId3 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload3}, UdpData3), MsgId3 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload3}, UdpData3),
send_puback_msg(Socket, TopicIdNew, MsgId3) send_puback_msg(Socket, TopicIdNew, MsgId3)
end, end,
timer:sleep(100), timer:sleep(100),
@ -1489,7 +1653,8 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
udp_receive_timeout -> udp_receive_timeout ->
ok; ok;
UdpData4 -> UdpData4 ->
MsgId4 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, MsgId4 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC, CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload4}, UdpData4), TopicIdNew, Payload4}, UdpData4),
send_puback_msg(Socket, TopicIdNew, MsgId4) send_puback_msg(Socket, TopicIdNew, MsgId4)
@ -1498,7 +1663,8 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
%% send PINGREQ again to enter awake state %% send PINGREQ again to enter awake state
send_pingreq_msg(Socket, ClientId), send_pingreq_msg(Socket, ClientId),
%% will not receive any buffered PUBLISH messages buffered before last awake, only receive PINGRESP here %% will not receive any buffered PUBLISH messages buffered before last
%% awake, only receive PINGRESP here
?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)),
gen_udp:close(Socket). gen_udp:close(Socket).
@ -1901,8 +2067,12 @@ check_dispatched_message(Dup, QoS, Retain, TopicIdType, TopicId, Payload, Socket
PubMsg = receive_response(Socket), PubMsg = receive_response(Socket),
Length = 7 + byte_size(Payload), Length = 7 + byte_size(Payload),
?LOG("check_dispatched_message ~p~n", [PubMsg]), ?LOG("check_dispatched_message ~p~n", [PubMsg]),
?LOG("expected ~p xx ~p~n", [<<Length, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2, TopicId:16>>, Payload]), ?LOG("expected ~p xx ~p~n",
<<Length, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2, TopicId:16, MsgId:16, Payload/binary>> = PubMsg, [<<Length, ?SN_PUBLISH,
Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2, TopicId:16>>, Payload]),
<<Length, ?SN_PUBLISH,
Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2,
TopicId:16, MsgId:16, Payload/binary>> = PubMsg,
case QoS of case QoS of
0 -> ok; 0 -> ok;
1 -> send_puback_msg(Socket, TopicId, MsgId); 1 -> send_puback_msg(Socket, TopicId, MsgId);
@ -1914,11 +2084,14 @@ check_dispatched_message(Dup, QoS, Retain, TopicIdType, TopicId, Payload, Socket
get_udp_broadcast_address() -> get_udp_broadcast_address() ->
"255.255.255.255". "255.255.255.255".
check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, TopicType, TopicId, Payload}, UdpData) -> check_publish_msg_on_udp({Dup, QoS, Retain, WillBit,
CleanSession, TopicType, TopicId, Payload}, UdpData) ->
<<HeaderUdp:5/binary, MsgId:16, PayloadIn/binary>> = UdpData, <<HeaderUdp:5/binary, MsgId:16, PayloadIn/binary>> = UdpData,
ct:pal("UdpData: ~p, Payload: ~p, PayloadIn: ~p", [UdpData, Payload, PayloadIn]), ct:pal("UdpData: ~p, Payload: ~p, PayloadIn: ~p", [UdpData, Payload, PayloadIn]),
Size9 = byte_size(Payload) + 7, Size9 = byte_size(Payload) + 7,
Eexp = <<Size9:8, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, TopicType:2, TopicId:16>>, Eexp = <<Size9:8,
?SN_PUBLISH, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1,
TopicType:2, TopicId:16>>,
?assertEqual(Eexp, HeaderUdp), % mqtt-sn header should be same ?assertEqual(Eexp, HeaderUdp), % mqtt-sn header should be same
?assertEqual(Payload, PayloadIn), % payload should be same ?assertEqual(Payload, PayloadIn), % payload should be same
MsgId. MsgId.

66
build
View File

@ -76,7 +76,7 @@ make_relup() {
rm -rf "$tmp_dir" rm -rf "$tmp_dir"
fi fi
releases+=( "$base_vsn" ) releases+=( "$base_vsn" )
done < <(find _upgrade_base -maxdepth 1 -name "*$PROFILE-$SYSTEM*-$ARCH.zip" -type f) done < <(find _upgrade_base -maxdepth 1 -name "*$PROFILE-otp${OTP_VSN}-$SYSTEM*-$ARCH.zip" -type f)
fi fi
if [ ${#releases[@]} -eq 0 ]; then if [ ${#releases[@]} -eq 0 ]; then
log "No upgrade base found, relup ignored" log "No upgrade base found, relup ignored"
@ -120,7 +120,7 @@ make_zip() {
log "ERROR: $tarball is not found" log "ERROR: $tarball is not found"
fi fi
local zipball local zipball
zipball="${pkgpath}/${PROFILE}-${SYSTEM}-${PKG_VSN}-${ARCH}.zip" zipball="${pkgpath}/${PROFILE}-${PKG_VSN}-otp${OTP_VSN}-${SYSTEM}-${ARCH}.zip"
tar zxf "${tarball}" -C "${tard}/emqx" tar zxf "${tarball}" -C "${tard}/emqx"
## try to be portable for zip packages. ## try to be portable for zip packages.
## for DEB and RPM packages the dependencies are resoved by yum and apt ## for DEB and RPM packages the dependencies are resoved by yum and apt
@ -128,6 +128,62 @@ make_zip() {
(cd "${tard}" && zip -qr - emqx) > "${zipball}" (cd "${tard}" && zip -qr - emqx) > "${zipball}"
} }
## This function builds the default docker image based on alpine:3.14 (by default)
make_docker() {
EMQX_BUILDER="${EMQX_BUILDER:-${EMQX_DEFAULT_BUILDER}}"
EMQX_RUNNER="${EMQX_RUNNER:-${EMQX_DEFAULT_RUNNER}}"
set -x
docker build --no-cache --pull \
--build-arg BUILD_FROM="${EMQX_BUILDER}" \
--build-arg RUN_FROM="${EMQX_RUNNER}" \
--build-arg EMQX_NAME="$PROFILE" \
--tag "emqx/$PROFILE:${PKG_VSN}" \
-f "${DOCKERFILE}" .
}
## This function accepts any base docker image,
## a emqx zip-image, and a image tag (for the image to be built),
## to build a docker image which runs EMQ X
##
## Export below variables to quickly build an image
##
## Name Default Example
## ---------------------------------------------------------------------
## EMQX_BASE_IMAGE current os centos:7
## EMQX_ZIP_PACKAGE _packages/<current-zip-target> /tmp/emqx-4.4.0-otp23.3.4.9-3-centos7-amd64.zip
## EMQX_IMAGE_TAG emqx/emqx:<current-vns-rel> emqx/emqx:testing-tag
##
make_docker_testing() {
if [ -z "${EMQX_BASE_IMAGE:-}" ]; then
case "$SYSTEM" in
ubuntu20*)
EMQX_BASE_IMAGE="ubuntu:20.04"
;;
centos8)
EMQX_BASE_IMAGE="centos:8"
;;
*)
echo "Unsupported testing base image for $SYSTEM"
exit 1
;;
esac
fi
EMQX_IMAGE_TAG="${EMQX_IMAGE_TAG:-emqx/$PROFILE:${PKG_VSN}-otp${OTP_VSN}-${SYSTEM}}"
local defaultzip
defaultzip="_packages/${PROFILE}/${PROFILE}-${PKG_VSN}-otp${OTP_VSN}-${SYSTEM}-${ARCH}.zip"
local zip="${EMQX_ZIP_PACKAGE:-$defaultzip}"
if [ ! -f "$zip" ]; then
log "ERROR: $zip not built?"
exit 1
fi
set -x
docker build \
--build-arg BUILD_FROM="${EMQX_BASE_IMAGE}" \
--build-arg EMQX_ZIP_PACKAGE="${zip}" \
--tag "$EMQX_IMAGE_TAG" \
-f "${DOCKERFILE_TESTING}" .
}
log "building artifact=$ARTIFACT for profile=$PROFILE" log "building artifact=$ARTIFACT for profile=$PROFILE"
case "$ARTIFACT" in case "$ARTIFACT" in
@ -148,6 +204,12 @@ case "$ARTIFACT" in
make -C "deploy/packages/${PKGERDIR}" clean make -C "deploy/packages/${PKGERDIR}" clean
EMQX_REL="$(pwd)" EMQX_BUILD="${PROFILE}" SYSTEM="${SYSTEM}" make -C "deploy/packages/${PKGERDIR}" EMQX_REL="$(pwd)" EMQX_BUILD="${PROFILE}" SYSTEM="${SYSTEM}" make -C "deploy/packages/${PKGERDIR}"
;; ;;
docker)
make_docker
;;
docker-testing)
make_docker_testing
;;
*) *)
log "Unknown artifact $ARTIFACT" log "Unknown artifact $ARTIFACT"
exit 1 exit 1

View File

@ -87,7 +87,9 @@ spec:
secret: secret:
secretName: {{ .Values.emqxLicneseSecretName }} secretName: {{ .Values.emqxLicneseSecretName }}
{{- end }} {{- end }}
{{- if eq (.Values.emqxConfig.EMQX_CLUSTER__DISCOVERY | default "k8s") "k8s" }}
serviceAccountName: {{ include "emqx.fullname" . }} serviceAccountName: {{ include "emqx.fullname" . }}
{{- end }}
{{- if .Values.podSecurityContext.enabled }} {{- if .Values.podSecurityContext.enabled }}
securityContext: {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }} securityContext: {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }}
{{- end }} {{- end }}
@ -134,17 +136,6 @@ spec:
envFrom: envFrom:
- configMapRef: - configMapRef:
name: {{ include "emqx.fullname" . }}-env name: {{ include "emqx.fullname" . }}-env
env:
- name: EMQX_NAME
value: {{ .Release.Name }}
- name: EMQX_CLUSTER__K8S__APP_NAME
value: {{ .Release.Name }}
- name: EMQX_CLUSTER__DISCOVERY
value: k8s
- name: EMQX_CLUSTER__K8S__SERVICE_NAME
value: {{ include "emqx.fullname" . }}-headless
- name: EMQX_CLUSTER__K8S__NAMESPACE
value: {{ .Release.Namespace }}
resources: resources:
{{ toYaml .Values.resources | indent 12 }} {{ toYaml .Values.resources | indent 12 }}
volumeMounts: volumeMounts:

View File

@ -10,7 +10,7 @@ metadata:
app.kubernetes.io/managed-by: {{ .Release.Service }} app.kubernetes.io/managed-by: {{ .Release.Service }}
data: data:
{{- range $index, $value := .Values.emqxConfig}} {{- range $index, $value := .Values.emqxConfig}}
{{$index}}: "{{ $value }}" {{$index}}: "{{ tpl (printf "%v" $value) $ }}"
{{- end}} {{- end}}
--- ---

View File

@ -1,3 +1,4 @@
{{- if eq (.Values.emqxConfig.EMQX_CLUSTER__DISCOVERY | default "k8s") "k8s" }}
apiVersion: v1 apiVersion: v1
kind: ServiceAccount kind: ServiceAccount
metadata: metadata:
@ -40,3 +41,4 @@ roleRef:
kind: Role kind: Role
name: {{ include "emqx.fullname" . }} name: {{ include "emqx.fullname" . }}
apiGroup: rbac.authorization.k8s.io apiGroup: rbac.authorization.k8s.io
{{- end }}

View File

@ -50,7 +50,20 @@ initContainers: {}
## EMQX configuration item, see the documentation (https://hub.docker.com/r/emqx/emqx) ## EMQX configuration item, see the documentation (https://hub.docker.com/r/emqx/emqx)
emqxConfig: emqxConfig:
EMQX_NAME: "{{ .Release.Name }}"
## Cluster discovery by dns
# EMQX_CLUSTER__DISCOVERY: "dns"
# EMQX_CLUSTER__DNS__NAME: "{{ .Release.Name }}-headless.{{ .Release.Namespace }}.svc.cluster.local"
# EMQX_CLUSTER__DNS__APP: "{{ .Release.Name }}"
# EMQX_CLUSTER__DNS__TYPE: "srv"
## Cluster discovery by k8s
EMQX_CLUSTER__DISCOVERY: "k8s"
EMQX_CLUSTER__K8S__APP_NAME: "{{ .Release.Name }}"
EMQX_CLUSTER__K8S__APISERVER: "https://kubernetes.default.svc:443" EMQX_CLUSTER__K8S__APISERVER: "https://kubernetes.default.svc:443"
EMQX_CLUSTER__K8S__SERVICE_NAME: "{{ .Release.Name }}-headless"
EMQX_CLUSTER__K8S__NAMESPACE: "{{ .Release.Namespace }}"
## The address type is used to extract host from k8s service. ## The address type is used to extract host from k8s service.
## Value: ip | dns | hostname ## Value: ip | dns | hostname
## NoteHostname is only supported after v4.0-rc.2 ## NoteHostname is only supported after v4.0-rc.2
@ -94,6 +107,8 @@ emqxLoadedPlugins: >
emqxLoadedModules: > emqxLoadedModules: >
{emqx_mod_acl_internal, true}. {emqx_mod_acl_internal, true}.
{emqx_mod_presence, true}. {emqx_mod_presence, true}.
{emqx_mod_trace, false}.
{emqx_mod_st_statistics, false}.
{emqx_mod_delayed, false}. {emqx_mod_delayed, false}.
{emqx_mod_rewrite, false}. {emqx_mod_rewrite, false}.
{emqx_mod_subscription, false}. {emqx_mod_subscription, false}.

View File

@ -1,10 +1,7 @@
ARG BUILD_FROM=emqx/build-env:erl23.2.7.2-emqx-3-alpine ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-alpine3.14
ARG RUN_FROM=alpine:3.12 ARG RUN_FROM=alpine:3.14
FROM ${BUILD_FROM} AS builder FROM ${BUILD_FROM} AS builder
ARG QEMU_ARCH=x86_64
COPY tmp/qemu-$QEMU_ARCH-stati* /usr/bin/
RUN apk add --no-cache \ RUN apk add --no-cache \
git \ git \
curl \ curl \
@ -32,21 +29,9 @@ RUN cd /emqx \
FROM $RUN_FROM FROM $RUN_FROM
# Basic build-time metadata as defined at http://label-schema.org
LABEL org.label-schema.docker.dockerfile="Dockerfile" \
org.label-schema.license="GNU" \
org.label-schema.name="emqx" \
org.label-schema.version=${PKG_VSN} \
org.label-schema.description="EMQ (Erlang MQTT Broker) is a distributed, massively scalable, highly extensible MQTT messaging broker written in Erlang/OTP." \
org.label-schema.url="https://emqx.io" \
org.label-schema.vcs-type="Git" \
org.label-schema.vcs-url="https://github.com/emqx/emqx" \
maintainer="EMQ X Team <support@emqx.io>"
ARG QEMU_ARCH=x86_64
ARG EMQX_NAME=emqx ARG EMQX_NAME=emqx
COPY deploy/docker/docker-entrypoint.sh tmp/qemu-$QEMU_ARCH-stati* /usr/bin/ COPY deploy/docker/docker-entrypoint.sh /usr/bin/
COPY --from=builder /emqx/_build/$EMQX_NAME/rel/emqx /opt/emqx COPY --from=builder /emqx/_build/$EMQX_NAME/rel/emqx /opt/emqx
RUN ln -s /opt/emqx/bin/* /usr/local/bin/ RUN ln -s /opt/emqx/bin/* /usr/local/bin/

View File

@ -0,0 +1,43 @@
ARG BUILD_FROM
FROM ${BUILD_FROM}
## all we need is the unzip command
RUN if command -v yum; then yum update -y && yum install -y unzip; fi
RUN if command -v apt-get; then apt-get update -y && apt-get install unzip; fi
ARG EMQX_ZIP_PACKAGE
COPY ${EMQX_ZIP_PACKAGE} /opt/emqx.zip
RUN unzip -q /opt/emqx.zip -d /opt/ && rm /opt/emqx.zip
COPY deploy/docker/docker-entrypoint.sh /usr/bin/
RUN ln -s /opt/emqx/bin/* /usr/local/bin/
WORKDIR /opt/emqx
RUN adduser -u 1000 emqx
RUN echo "emqx ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers
RUN chgrp -Rf emqx /opt/emqx && chmod -Rf g+w /opt/emqx \
&& chown -Rf emqx /opt/emqx
USER emqx
VOLUME ["/opt/emqx/log", "/opt/emqx/data", "/opt/emqx/etc"]
# emqx will occupy these port:
# - 1883 port for MQTT
# - 8081 for mgmt API
# - 8083 for WebSocket/HTTP
# - 8084 for WSS/HTTPS
# - 8883 port for MQTT(SSL)
# - 11883 port for internal MQTT/TCP
# - 18083 for dashboard
# - 4369 epmd (Erlang-distrbution port mapper daemon) listener (deprecated)
# - 4370 default Erlang distrbution port
# - 5369 for gen_rpc port mapping
# - 6369 6370 for distributed node
EXPOSE 1883 8081 8083 8084 8883 11883 18083 4369 4370 5369 6369 6370
ENTRYPOINT ["/usr/bin/docker-entrypoint.sh"]
CMD ["/opt/emqx/bin/emqx", "foreground"]

View File

@ -28,10 +28,18 @@ if [[ -z "$EMQX_NAME" ]]; then
fi fi
if [[ -z "$EMQX_HOST" ]]; then if [[ -z "$EMQX_HOST" ]]; then
if [[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == "dns" ]] && [[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then if [[ "$EMQX_CLUSTER__DISCOVERY" == "dns" ]] && \
[[ "$EMQX_CLUSTER__DNS__TYPE" == "srv" ]] && \
grep -q "$(hostname).$EMQX_CLUSTER__DNS__NAME" /etc/hosts; then
EMQX_HOST="$(hostname).$EMQX_CLUSTER__DNS__NAME"
elif [[ "$EMQX_CLUSTER__DISCOVERY" == "k8s" ]] && \
[[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == "dns" ]] && \
[[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then
EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-"pod.cluster.local"} EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-"pod.cluster.local"}
EMQX_HOST="${LOCAL_IP//./-}.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX" EMQX_HOST="${LOCAL_IP//./-}.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX"
elif [[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == 'hostname' ]] && [[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then elif [[ "$EMQX_CLUSTER__DISCOVERY" == "k8s" ]] && \
[[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == 'hostname' ]] && \
[[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then
EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-'svc.cluster.local'} EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-'svc.cluster.local'}
EMQX_HOST=$(grep -h "^$LOCAL_IP" /etc/hosts | grep -o "$(hostname).*.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX") EMQX_HOST=$(grep -h "^$LOCAL_IP" /etc/hosts | grep -o "$(hostname).*.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX")
else else

View File

@ -8,7 +8,7 @@ EMQX_NAME=$(subst -pkg,,$(EMQX_BUILD))
TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz
SOURCE_PKG := $(EMQX_NAME)_$(PKG_VSN)_$(shell dpkg --print-architecture) SOURCE_PKG := $(EMQX_NAME)_$(PKG_VSN)_$(shell dpkg --print-architecture)
TARGET_PKG := $(EMQX_NAME)-$(SYSTEM)-$(PKG_VSN)-$(ARCH) TARGET_PKG := $(EMQX_NAME)-$(PKG_VSN)-otp$(OTP_VSN)-$(SYSTEM)-$(ARCH)
.PHONY: all .PHONY: all
all: | $(BUILT) all: | $(BUILT)

View File

@ -4,7 +4,7 @@ Priority: optional
Maintainer: emqx <contact@emqx.io> Maintainer: emqx <contact@emqx.io>
Build-Depends: debhelper (>=9) Build-Depends: debhelper (>=9)
Standards-Version: 3.9.6 Standards-Version: 3.9.6
Homepage: https://www.emqx.io Homepage: https://www.emqx.com
Package: emqx Package: emqx
Architecture: any Architecture: any

View File

@ -5,8 +5,9 @@ BUILT := $(SRCDIR)/BUILT
dash := - dash := -
none := none :=
space := $(none) $(none) space := $(none) $(none)
RPM_VSN ?= $(shell echo $(PKG_VSN) | grep -oE "[0-9]+\.[0-9]+(\.[0-9]+)?") ## RPM does not allow '-' in version nubmer and release string, replace with '_'
RPM_REL ?= $(shell echo $(PKG_VSN) | grep -oE "(alpha|beta|rc)\.[0-9]") RPM_VSN := $(subst -,_,$(PKG_VSN))
RPM_REL := otp$(subst -,_,$(OTP_VSN))
ARCH ?= amd64 ARCH ?= amd64
ifeq ($(ARCH),mips64) ifeq ($(ARCH),mips64)
@ -16,12 +17,8 @@ endif
EMQX_NAME=$(subst -pkg,,$(EMQX_BUILD)) EMQX_NAME=$(subst -pkg,,$(EMQX_BUILD))
TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz
TARGET_PKG := $(EMQX_NAME)-$(SYSTEM)-$(PKG_VSN)-$(ARCH) TARGET_PKG := $(EMQX_NAME)-$(PKG_VSN)-otp$(OTP_VSN)-$(SYSTEM)-$(ARCH)
ifeq ($(RPM_REL),) SOURCE_PKG := emqx-$(RPM_VSN)-$(RPM_REL).$(shell uname -m)
# no tail
RPM_REL := 1
endif
SOURCE_PKG := emqx-$(SYSTEM)-$(RPM_VSN)-$(RPM_REL).$(shell uname -m)
SYSTEMD := $(shell if command -v systemctl >/dev/null 2>&1; then echo yes; fi) SYSTEMD := $(shell if command -v systemctl >/dev/null 2>&1; then echo yes; fi)
# Not $(PWD) as it does not work for make -C # Not $(PWD) as it does not work for make -C
@ -47,7 +44,6 @@ all: | $(BUILT)
--define "_service_dst $(SERVICE_DST)" \ --define "_service_dst $(SERVICE_DST)" \
--define "_post_addition $(POST_ADDITION)" \ --define "_post_addition $(POST_ADDITION)" \
--define "_preun_addition $(PREUN_ADDITION)" \ --define "_preun_addition $(PREUN_ADDITION)" \
--define "_ostype -$(SYSTEM)" \
--define "_sharedstatedir /var/lib" \ --define "_sharedstatedir /var/lib" \
emqx.spec emqx.spec
mkdir -p $(EMQX_REL)/_packages/$(EMQX_NAME) mkdir -p $(EMQX_REL)/_packages/$(EMQX_NAME)

View File

@ -5,7 +5,7 @@
%define _log_dir %{_var}/log/%{_name} %define _log_dir %{_var}/log/%{_name}
%define _lib_home /usr/lib/%{_name} %define _lib_home /usr/lib/%{_name}
%define _var_home %{_sharedstatedir}/%{_name} %define _var_home %{_sharedstatedir}/%{_name}
%define _build_name_fmt %{_arch}/%{_name}%{?_ostype}-%{_version}-%{_release}.%{_arch}.rpm %define _build_name_fmt %{_arch}/%{_name}-%{_version}-%{_release}.%{_arch}.rpm
%define _build_id_links none %define _build_id_links none
Name: %{_package_name} Name: %{_package_name}

188
docker.mk
View File

@ -1,188 +0,0 @@
#!/usr/bin/make -f
# -*- makefile -*-
## default globals.
## when built with `make docker` command the default profile is either emqx or emqx-ee (for enterprise)
## or the TARGET varialbe can be set beforehand to force a different name
TARGET ?= emqx/$(PROFILE)
QEMU_ARCH ?= x86_64
ARCH ?= amd64
QEMU_VERSION ?= v5.0.0-2
OS ?= alpine
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
ifeq ($(findstring emqx-ee, $(TARGET)), emqx-ee)
ARCH_LIST := amd64 arm64v8 arm32v7
EMQX_NAME := emqx-ee
else ifeq ($(findstring emqx-edge, $(TARGET)), emqx-edge)
ARCH_LIST := amd64 arm64v8 arm32v7 i386 s390x
EMQX_NAME := emqx-edge
else
ARCH_LIST := amd64 arm64v8 arm32v7 i386 s390x
EMQX_NAME := emqx
endif
.PHONY: docker
docker: docker-build docker-tag docker-save
.PHONY: docker-prepare
docker-prepare:
## Prepare the machine before any code installation scripts
# @echo "PREPARE: Setting up dependencies."
# @apt update -y
# @apt install --only-upgrade docker-ce -y
## Update docker configuration to enable docker manifest command
@echo "PREPARE: Updating docker configuration"
@mkdir -p $$HOME/.docker
# enable experimental to use docker manifest command
@echo '{ "experimental": "enabled" }' | tee $$HOME/.docker/config.json
# enable experimental
@echo '{ "experimental": true, "storage-driver": "overlay2", "max-concurrent-downloads": 50, "max-concurrent-uploads": 50 }' | tee /etc/docker/daemon.json
@service docker restart
.PHONY: docker-build
docker-build:
## Build Docker image
@echo "DOCKER BUILD: Build Docker image."
@echo "DOCKER BUILD: build version -> $(PKG_VSN)."
@echo "DOCKER BUILD: arch - $(ARCH)."
@echo "DOCKER BUILD: qemu arch - $(QEMU_ARCH)."
@echo "DOCKER BUILD: docker repo - $(TARGET) "
@echo "DOCKER BUILD: emqx name - $(EMQX_NAME)."
## Prepare qemu to build images other then x86_64 on travis
@echo "PREPARE: Qemu" \
&& docker run --rm --privileged multiarch/qemu-user-static:register --reset
@mkdir -p tmp \
&& cd tmp \
&& curl -L -o qemu-$(QEMU_ARCH)-static.tar.gz https://github.com/multiarch/qemu-user-static/releases/download/$(QEMU_VERSION)/qemu-$(QEMU_ARCH)-static.tar.gz \
&& tar xzf qemu-$(QEMU_ARCH)-static.tar.gz \
&& cd -
@docker build --no-cache \
--build-arg PKG_VSN=$(PKG_VSN) \
--build-arg BUILD_FROM=emqx/build-env:erl23.2.7.2-emqx-3-alpine \
--build-arg RUN_FROM=$(ARCH)/alpine:3.12 \
--build-arg EMQX_NAME=$(EMQX_NAME) \
--build-arg QEMU_ARCH=$(QEMU_ARCH) \
--tag $(TARGET):build-$(OS)-$(ARCH) \
-f deploy/docker/Dockerfile .
.PHONY: docker-tag
docker-tag:
@echo "DOCKER TAG: Tag Docker image."
@for arch in $(ARCH_LIST); do \
if [ -n "$$(docker images -q $(TARGET):build-$(OS)-$${arch})" ]; then \
docker tag $(TARGET):build-$(OS)-$${arch} $(TARGET):$(PKG_VSN)-$(OS)-$${arch}; \
echo "DOCKER TAG: $(TARGET):$(PKG_VSN)-$(OS)-$${arch}"; \
if [ $${arch} = amd64 ]; then \
docker tag $(TARGET):$(PKG_VSN)-$(OS)-amd64 $(TARGET):$(PKG_VSN); \
echo "DOCKER TAG: $(TARGET):$(PKG_VSN)"; \
fi; \
fi; \
done
.PHONY: docker-save
docker-save:
@echo "DOCKER SAVE: Save Docker image."
@mkdir -p _packages/$(EMQX_NAME)
@if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN))" ]; then \
docker save $(TARGET):$(PKG_VSN) > $(EMQX_NAME)-docker-$(PKG_VSN); \
zip -r -m $(EMQX_NAME)-docker-$(PKG_VSN).zip $(EMQX_NAME)-docker-$(PKG_VSN); \
mv ./$(EMQX_NAME)-docker-$(PKG_VSN).zip _packages/$(EMQX_NAME)/$(EMQX_NAME)-docker-$(PKG_VSN).zip; \
fi
@for arch in $(ARCH_LIST); do \
if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN)-$(OS)-$${arch})" ]; then \
docker save $(TARGET):$(PKG_VSN)-$(OS)-$${arch} > $(EMQX_NAME)-docker-$(PKG_VSN)-$(OS)-$${arch}; \
zip -r -m $(EMQX_NAME)-docker-$(PKG_VSN)-$(OS)-$${arch}.zip $(EMQX_NAME)-docker-$(PKG_VSN)-$(OS)-$${arch}; \
mv ./$(EMQX_NAME)-docker-$(PKG_VSN)-$(OS)-$${arch}.zip _packages/$(EMQX_NAME)/$(EMQX_NAME)-docker-$(PKG_VSN)-$(OS)-$${arch}.zip; \
fi; \
done
.PHONY: docker-push
docker-push:
@echo "DOCKER PUSH: Push Docker image.";
@echo "DOCKER PUSH: pushing - $(TARGET):$(PKG_VSN).";
@if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN))" ]; then \
docker push $(TARGET):$(PKG_VSN); \
docker tag $(TARGET):$(PKG_VSN) $(TARGET):latest; \
docker push $(TARGET):latest; \
fi;
@for arch in $(ARCH_LIST); do \
if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN)-$(OS)-$${arch})" ]; then \
docker push $(TARGET):$(PKG_VSN)-$(OS)-$${arch}; \
fi; \
done
.PHONY: docker-manifest-list
docker-manifest-list:
version="docker manifest create --amend $(TARGET):$(PKG_VSN)"; \
latest="docker manifest create --amend $(TARGET):latest"; \
for arch in $(ARCH_LIST); do \
if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN)-$(OS)-$${arch})" ];then \
version="$${version} $(TARGET):$(PKG_VSN)-$(OS)-$${arch} "; \
latest="$${latest} $(TARGET):$(PKG_VSN)-$(OS)-$${arch} "; \
fi; \
done; \
eval $$version; \
eval $$latest;
for arch in $(ARCH_LIST); do \
case $${arch} in \
"amd64") \
if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN)-$(OS)-$${arch})" ]; then \
docker manifest annotate $(TARGET):$(PKG_VSN) $(TARGET):$(PKG_VSN)-$(OS)-amd64 --os=linux --arch=amd64; \
docker manifest annotate $(TARGET):latest $(TARGET):$(PKG_VSN)-$(OS)-amd64 --os=linux --arch=amd64; \
fi; \
;; \
"arm64v8") \
if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN)-$(OS)-$${arch})" ]; then \
docker manifest annotate $(TARGET):$(PKG_VSN) $(TARGET):$(PKG_VSN)-$(OS)-arm64v8 --os=linux --arch=arm64 --variant=v8; \
docker manifest annotate $(TARGET):latest $(TARGET):$(PKG_VSN)-$(OS)-arm64v8 --os=linux --arch=arm64 --variant=v8; \
fi; \
;; \
"arm32v7") \
if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN)-$(OS)-$${arch})" ]; then \
docker manifest annotate $(TARGET):$(PKG_VSN) $(TARGET):$(PKG_VSN)-$(OS)-arm32v7 --os=linux --arch=arm --variant=v7; \
docker manifest annotate $(TARGET):latest $(TARGET):$(PKG_VSN)-$(OS)-arm32v7 --os=linux --arch=arm --variant=v7; \
fi; \
;; \
"i386") \
if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN)-$(OS)-$${arch})" ]; then \
docker manifest annotate $(TARGET):$(PKG_VSN) $(TARGET):$(PKG_VSN)-$(OS)-i386 --os=linux --arch=386; \
docker manifest annotate $(TARGET):latest $(TARGET):$(PKG_VSN)-$(OS)-i386 --os=linux --arch=386; \
fi; \
;; \
"s390x") \
if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN)-$(OS)-$${arch})" ]; then \
docker manifest annotate $(TARGET):$(PKG_VSN) $(TARGET):$(PKG_VSN)-$(OS)-s390x --os=linux --arch=s390x; \
docker manifest annotate $(TARGET):latest $(TARGET):$(PKG_VSN)-$(OS)-s390x --os=linux --arch=s390x; \
fi; \
;; \
esac; \
done;
docker manifest inspect $(TARGET):$(PKG_VSN)
docker manifest push $(TARGET):$(PKG_VSN);
docker manifest inspect $(TARGET):latest
docker manifest push $(TARGET):latest;
.PHONY: docker-clean
docker-clean:
@echo "DOCKER CLEAN: Clean Docker image."
@if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN))" ]; then docker rmi -f $$(docker images -q $(TARGET):$(PKG_VSN)); fi
@for arch in $(ARCH_LIST); do \
if [ -n "$$(docker images -q $(TARGET):$(PKG_VSN)-$(OS)-$${arch})" ]; then \
docker rmi -f $$(docker images -q $(TARGET):$(PKG_VSN)-$(OS)-$${arch}); \
fi \
done

View File

@ -101,6 +101,11 @@ cluster.autoclean = 5m
## Value: String ## Value: String
## cluster.dns.app = emqx ## cluster.dns.app = emqx
## Type of dns record.
##
## Value: Value: a | srv
## cluster.dns.type = a
##-------------------------------------------------------------------- ##--------------------------------------------------------------------
## Cluster using etcd ## Cluster using etcd
@ -354,7 +359,7 @@ rpc.port_discovery = stateless
## ##
## Value: Interger [0-256] ## Value: Interger [0-256]
## Default = 1 ## Default = 1
#rpc.tcp_client_num = 1 #rpc.tcp_client_num = 0
## RCP Client connect timeout. ## RCP Client connect timeout.
## ##
@ -2214,6 +2219,36 @@ module.presence.qos = 1
## module.rewrite.pub.rule.1 = x/# ^x/y/(.+)$ z/y/$1 ## module.rewrite.pub.rule.1 = x/# ^x/y/(.+)$ z/y/$1
## module.rewrite.sub.rule.1 = y/+/z/# ^y/(.+)/z/(.+)$ y/z/$2 ## module.rewrite.sub.rule.1 = y/+/z/# ^y/(.+)/z/(.+)$ y/z/$2
##--------------------------------------------------------------------
## Slow Subscribers Statistics Module
## the expire time of the record which in topk
##
## Value: 5 minutes
#module.slow_subs.expire_interval = 5m
## maximum number of Top-K record
##
## Value: 10
#module.slow_subs.top_k_num = 10
## enable notification
## publish topk list to $SYS/brokers/${node}/slow_subs per notice_interval
## publish is disabled if set to 0s.
##
## Defaut: 0s
#module.slow_subs.notice_interval = 0s
## QoS of notification message in notice topic
##
## Defaut: 0
#module.slow_subs.notice_qos = 0
## Maximum information number in one notification
##
## Default: 100
#module.slow_subs.notice_batch_size = 100
## CONFIG_SECTION_END=modules ================================================== ## CONFIG_SECTION_END=modules ==================================================
##------------------------------------------------------------------- ##-------------------------------------------------------------------

View File

@ -542,4 +542,22 @@
-define(SHARE(Group, Topic), emqx_topic:join([<<?SHARE>>, Group, Topic])). -define(SHARE(Group, Topic), emqx_topic:join([<<?SHARE>>, Group, Topic])).
-define(IS_SHARE(Topic), case Topic of <<?SHARE, _/binary>> -> true; _ -> false end). -define(IS_SHARE(Topic), case Topic of <<?SHARE, _/binary>> -> true; _ -> false end).
-define(TYPE_NAMES, {
'CONNECT'
, 'CONNACK'
, 'PUBLISH'
, 'PUBACK'
, 'PUBREC'
, 'PUBREL'
, 'PUBCOMP'
, 'SUBSCRIBE'
, 'SUBACK'
, 'UNSUBSCRIBE'
, 'UNSUBACK'
, 'PINGREQ'
, 'PINGRESP'
, 'DISCONNECT'
, 'AUTH'
}).
-endif. -endif.

View File

@ -29,7 +29,7 @@
-ifndef(EMQX_ENTERPRISE). -ifndef(EMQX_ENTERPRISE).
-define(EMQX_RELEASE, {opensource, "4.3.10"}). -define(EMQX_RELEASE, {opensource, "4.4-alpha.2"}).
-else. -else.

View File

@ -1,6 +1,6 @@
{application, emqx_dashboard, {application, emqx_dashboard,
[{description, "EMQ X Web Dashboard"}, [{description, "EMQ X Web Dashboard"},
{vsn, "4.3.7"}, % strict semver, bump manually! {vsn, "4.4.0"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_dashboard_sup]}, {registered, [emqx_dashboard_sup]},
{applications, [kernel,stdlib,mnesia,minirest]}, {applications, [kernel,stdlib,mnesia,minirest]},

View File

@ -41,18 +41,18 @@
start_listeners() -> start_listeners() ->
lists:foreach(fun(Listener) -> start_listener(Listener) end, listeners()). lists:foreach(fun(Listener) -> start_listener(Listener) end, listeners()).
%% Start HTTP Listener
start_listener({Proto, Port, Options}) when Proto == http ->
Dispatch = [{"/", cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}},
{"/static/[...]", cowboy_static, {priv_dir, emqx_dashboard, "www/static"}},
{"/api/v4/[...]", minirest, http_handlers()}],
minirest:start_http(listener_name(Proto), ranch_opts(Port, Options), Dispatch);
start_listener({Proto, Port, Options}) when Proto == https -> %% Start HTTP(S) Listener
start_listener({Proto, Port, Options}) ->
Dispatch = [{"/", cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}}, Dispatch = [{"/", cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}},
{"/static/[...]", cowboy_static, {priv_dir, emqx_dashboard, "www/static"}}, {"/static/[...]", cowboy_static, {priv_dir, emqx_dashboard, "www/static"}},
{"/api/v4/[...]", minirest, http_handlers()}], {"/api/v4/[...]", minirest, http_handlers()}],
minirest:start_https(listener_name(Proto), ranch_opts(Port, Options), Dispatch). Server = listener_name(Proto),
RanchOpts = ranch_opts(Port, Options),
case Proto of
http -> minirest:start_http(Server, RanchOpts, Dispatch);
https -> minirest:start_https(Server, RanchOpts, Dispatch)
end.
ranch_opts(Port, Options0) -> ranch_opts(Port, Options0) ->
NumAcceptors = get_value(num_acceptors, Options0, 4), NumAcceptors = get_value(num_acceptors, Options0, 4),
@ -89,7 +89,7 @@ listener_name(Proto) ->
http_handlers() -> http_handlers() ->
Plugins = lists:map(fun(Plugin) -> Plugin#plugin.name end, emqx_plugins:list()), Plugins = lists:map(fun(Plugin) -> Plugin#plugin.name end, emqx_plugins:list()),
[{"/api/v4/", [{"/api/v4/",
minirest:handler(#{apps => Plugins ++ [emqx_modules], minirest:handler(#{apps => Plugins ++ [emqx_modules, emqx_plugin_libs],
filter => fun ?MODULE:filter/1}), filter => fun ?MODULE:filter/1}),
[{authorization, fun ?MODULE:is_authorized/1}]}]. [{authorization, fun ?MODULE:is_authorized/1}]}].
@ -116,6 +116,7 @@ is_authorized(_Path, Req) ->
_ -> false _ -> false
end. end.
filter(#{app := emqx_plugin_libs}) -> true;
filter(#{app := emqx_modules}) -> true; filter(#{app := emqx_modules}) -> true;
filter(#{app := App}) -> filter(#{app := App}) ->
case emqx_plugins:find_plugin(App) of case emqx_plugins:find_plugin(App) of

View File

@ -54,6 +54,7 @@ groups() ->
]. ].
init_per_suite(Config) -> init_per_suite(Config) ->
application:load(emqx_plugin_libs),
emqx_ct_helpers:start_apps([emqx_modules, emqx_management, emqx_dashboard]), emqx_ct_helpers:start_apps([emqx_modules, emqx_management, emqx_dashboard]),
Config. Config.
@ -165,4 +166,3 @@ api_path(Path) ->
json(Data) -> json(Data) ->
{ok, Jsx} = emqx_json:safe_decode(Data, [return_maps]), Jsx. {ok, Jsx} = emqx_json:safe_decode(Data, [return_maps]), Jsx.

View File

@ -0,0 +1,49 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mod_slow_subs).
-behaviour(emqx_gen_mod).
-include_lib("include/emqx.hrl").
-include_lib("include/logger.hrl").
-logger_header("[SLOW Subs]").
%% emqx_gen_mod callbacks
-export([ load/1
, unload/1
, description/0
]).
-define(LIB, emqx_slow_subs).
%%--------------------------------------------------------------------
%% Load/Unload
%%--------------------------------------------------------------------
-spec(load(list()) -> ok).
load(Env) ->
emqx_mod_sup:start_child(?LIB, worker, [Env]),
ok.
-spec(unload(list()) -> ok).
unload(_Env) ->
_ = emqx_mod_sup:stop_child(?LIB),
ok.
description() ->
"EMQ X Slow Subscribers Statistics Module".

View File

@ -23,19 +23,23 @@
-export([ start_link/0 -export([ start_link/0
, start_child/1 , start_child/1
, start_child/2 , start_child/2
, start_child/3
, stop_child/1 , stop_child/1
]). ]).
-export([init/1]). -export([init/1]).
%% Helper macro for declaring children of supervisor %% Helper macro for declaring children of supervisor
-define(CHILD(Mod, Type), #{id => Mod, -define(CHILD(Mod, Type, Args),
start => {Mod, start_link, []}, #{id => Mod,
start => {Mod, start_link, Args},
restart => permanent, restart => permanent,
shutdown => 5000, shutdown => 5000,
type => Type, type => Type,
modules => [Mod]}). modules => [Mod]}).
-define(CHILD(MOD, Type), ?CHILD(MOD, Type, [])).
-spec(start_link() -> startlink_ret()). -spec(start_link() -> startlink_ret()).
start_link() -> start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []). supervisor:start_link({local, ?MODULE}, ?MODULE, []).
@ -48,6 +52,10 @@ start_child(ChildSpec) when is_map(ChildSpec) ->
start_child(Mod, Type) when is_atom(Mod) andalso is_atom(Type) -> start_child(Mod, Type) when is_atom(Mod) andalso is_atom(Type) ->
assert_started(supervisor:start_child(?MODULE, ?CHILD(Mod, Type))). assert_started(supervisor:start_child(?MODULE, ?CHILD(Mod, Type))).
-spec start_child(atom(), atom(), list(any())) -> ok.
start_child(Mod, Type, Args) when is_atom(Mod) andalso is_atom(Type) ->
assert_started(supervisor:start_child(?MODULE, ?CHILD(Mod, Type, Args))).
-spec(stop_child(any()) -> ok | {error, term()}). -spec(stop_child(any()) -> ok | {error, term()}).
stop_child(ChildId) -> stop_child(ChildId) ->
case supervisor:terminate_child(?MODULE, ChildId) of case supervisor:terminate_child(?MODULE, ChildId) of
@ -61,6 +69,7 @@ stop_child(ChildId) ->
init([]) -> init([]) ->
ok = emqx_tables:new(emqx_modules, [set, public, {write_concurrency, true}]), ok = emqx_tables:new(emqx_modules, [set, public, {write_concurrency, true}]),
emqx_slow_subs:init_topk_tab(),
{ok, {{one_for_one, 10, 100}, []}}. {ok, {{one_for_one, 10, 100}, []}}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -69,6 +78,5 @@ init([]) ->
assert_started({ok, _Pid}) -> ok; assert_started({ok, _Pid}) -> ok;
assert_started({ok, _Pid, _Info}) -> ok; assert_started({ok, _Pid, _Info}) -> ok;
assert_started({error, {already_tarted, _Pid}}) -> ok; assert_started({error, {already_started, _Pid}}) -> ok;
assert_started({error, Reason}) -> erlang:error(Reason). assert_started({error, Reason}) -> erlang:error(Reason).

View File

@ -0,0 +1,39 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mod_trace).
-behaviour(emqx_gen_mod).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-export([ load/1
, unload/1
, description/0
]).
-spec description() -> string().
description() ->
"EMQ X Trace Module".
-spec load(any()) -> ok.
load(_Env) ->
emqx_mod_sup:start_child(emqx_trace, worker).
-spec unload(any()) -> ok.
unload(_Env) ->
emqx_mod_sup:stop_child(emqx_trace).

View File

@ -0,0 +1,98 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mod_trace_api).
%% API
-export([ list_trace/2
, create_trace/2
, disable_trace/2
, delete_trace/2
, clear_traces/2
, download_zip_log/2
, stream_log_file/2
]).
-import(minirest, [return/1]).
-rest_api(#{name => list_trace,
method => 'GET',
path => "/trace/",
func => list_trace,
descr => "list all traces"}).
-rest_api(#{name => create_trace,
method => 'POST',
path => "/trace/",
func => create_trace,
descr => "create trace"}).
-rest_api(#{name => delete_trace,
method => 'DELETE',
path => "/trace/:bin:name",
func => delete_trace,
descr => "delete trace"}).
-rest_api(#{name => clear_trace,
method => 'DELETE',
path => "/trace/",
func => clear_traces,
descr => "clear all traces"}).
-rest_api(#{name => disable_trace,
method => 'PUT',
path => "/trace/:bin:name/stop",
func => disable_trace,
descr => "stop trace"}).
-rest_api(#{name => download_zip_log,
method => 'GET',
path => "/trace/:bin:name/download",
func => download_zip_log,
descr => "download trace's log"}).
-rest_api(#{name => stream_log_file,
method => 'GET',
path => "/trace/:bin:name/log",
func => stream_log_file,
descr => "download trace's log"}).
list_trace(Path, Params) ->
return(emqx_trace_api:list_trace(Path, Params)).
create_trace(Path, Params) ->
return(emqx_trace_api:create_trace(Path, Params)).
delete_trace(Path, Params) ->
return(emqx_trace_api:delete_trace(Path, Params)).
clear_traces(Path, Params) ->
return(emqx_trace_api:clear_traces(Path, Params)).
disable_trace(#{name := Name}, Params) ->
return(emqx_trace_api:update_trace(#{name => Name, operation => disable}, Params)).
download_zip_log(Path, Params) ->
case emqx_trace_api:download_zip_log(Path, Params) of
{ok, File} -> minirest:return_file(File);
{error, Reason} -> return({error, 'NOT_FOUND', Reason})
end.
stream_log_file(Path, Params) ->
case emqx_trace_api:stream_log_file(Path, Params) of
{ok, File} -> return({ok, File});
{error, Reason} -> return({error, 'NOT_FOUND', Reason})
end.

View File

@ -1,6 +1,6 @@
{application, emqx_modules, {application, emqx_modules,
[{description, "EMQ X Module Management"}, [{description, "EMQ X Module Management"},
{vsn, "4.3.3"}, {vsn, "4.4.0"},
{modules, []}, {modules, []},
{applications, [kernel,stdlib]}, {applications, [kernel,stdlib]},
{mod, {emqx_modules_app, []}}, {mod, {emqx_modules_app, []}},

View File

@ -1,6 +1,9 @@
%% -*-: erlang -*- %% -*-: erlang -*-
{VSN, {VSN,
[ [
{"4.3.3", [
{load_module, emqx_mod_st_statistics, brutal_purge, soft_purge, []}
]},
{"4.3.2", [ {"4.3.2", [
{load_module, emqx_mod_presence, brutal_purge, soft_purge, []} {load_module, emqx_mod_presence, brutal_purge, soft_purge, []}
]}, ]},
@ -16,6 +19,9 @@
{<<".*">>, []} {<<".*">>, []}
], ],
[ [
{"4.3.3", [
{load_module, emqx_mod_st_statistics, brutal_purge, soft_purge, []}
]},
{"4.3.2", [ {"4.3.2", [
{load_module, emqx_mod_presence, brutal_purge, soft_purge, []} {load_module, emqx_mod_presence, brutal_purge, soft_purge, []}
]}, ]},

View File

@ -62,7 +62,7 @@ t_mod_rewrite(_Config) ->
timer:sleep(100), timer:sleep(100),
?assertEqual([], emqx_broker:subscriptions(<<"rewrite_client">>)), ?assertEqual([], emqx_broker:subscriptions(<<"rewrite_client">>)),
%% Pub Rules %% Pub Rules
{ok, _Props, _} = emqtt:subscribe(C, [{Topic, ?QOS_1} || Topic <- PubDestTopics]), {ok, _Props1, _} = emqtt:subscribe(C, [{Topic, ?QOS_1} || Topic <- PubDestTopics]),
RecvTopics2 = [begin RecvTopics2 = [begin
ok = emqtt:publish(C, Topic, <<"payload">>), ok = emqtt:publish(C, Topic, <<"payload">>),
{ok, #{topic := RecvTopic}} = receive_publish(100), {ok, #{topic := RecvTopic}} = receive_publish(100),

View File

@ -41,9 +41,8 @@ t_start_child(_) ->
modules => [Mod]}, modules => [Mod]},
ok = emqx_mod_sup:start_child(Mod, worker), ok = emqx_mod_sup:start_child(Mod, worker),
?assertError({already_started, _}, emqx_mod_sup:start_child(Spec)), ?assertEqual(ok, emqx_mod_sup:start_child(Spec)),
ok = emqx_mod_sup:stop_child(Mod), ok = emqx_mod_sup:stop_child(Mod),
{error, not_found} = emqx_mod_sup:stop_child(Mod), {error, not_found} = emqx_mod_sup:stop_child(Mod),
ok. ok.

View File

@ -0,0 +1,187 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mod_trace_api_SUITE).
%% API
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx/include/emqx.hrl").
-define(HOST, "http://127.0.0.1:18083/").
-define(API_VERSION, "v4").
-define(BASE_PATH, "api").
%%--------------------------------------------------------------------
%% Setups
%%--------------------------------------------------------------------
all() ->
emqx_ct:all(?MODULE).
init_per_suite(Config) ->
application:load(emqx_plugin_libs),
emqx_ct_helpers:start_apps([emqx_modules, emqx_dashboard]),
Config.
end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([emqx_modules, emqx_dashboard]).
t_http_test(_Config) ->
emqx_trace:clear(),
load(),
Header = auth_header_(),
%% list
{ok, Empty} = request_api(get, api_path("trace"), Header),
?assertEqual(#{<<"code">> => 0, <<"data">> => []}, json(Empty)),
%% create
ErrorTrace = #{},
{ok, Error} = request_api(post, api_path("trace"), Header, ErrorTrace),
?assertEqual(#{<<"message">> => <<"name required">>,
<<"code">> => <<"INCORRECT_PARAMS">>}, json(Error)),
Name = <<"test-name">>,
Trace = [
{<<"name">>, Name},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>}
],
{ok, Create} = request_api(post, api_path("trace"), Header, Trace),
?assertEqual(#{<<"code">> => 0}, json(Create)),
{ok, List} = request_api(get, api_path("trace"), Header),
#{<<"code">> := 0, <<"data">> := [Data]} = json(List),
?assertEqual(Name, maps:get(<<"name">>, Data)),
%% update
{ok, Update} = request_api(put, api_path("trace/test-name/stop"), Header, #{}),
?assertEqual(#{<<"code">> => 0,
<<"data">> => #{<<"enable">> => false,
<<"name">> => <<"test-name">>}}, json(Update)),
{ok, List1} = request_api(get, api_path("trace"), Header),
#{<<"code">> := 0, <<"data">> := [Data1]} = json(List1),
Node = atom_to_binary(node()),
?assertMatch(#{
<<"status">> := <<"stopped">>,
<<"name">> := <<"test-name">>,
<<"log_size">> := #{Node := _},
<<"start_at">> := _,
<<"end_at">> := _,
<<"type">> := <<"topic">>,
<<"topic">> := <<"/x/y/z">>
}, Data1),
%% delete
{ok, Delete} = request_api(delete, api_path("trace/test-name"), Header),
?assertEqual(#{<<"code">> => 0}, json(Delete)),
{ok, DeleteNotFound} = request_api(delete, api_path("trace/test-name"), Header),
?assertEqual(#{<<"code">> => <<"NOT_FOUND">>,
<<"message">> => <<"test-name NOT FOUND">>}, json(DeleteNotFound)),
{ok, List2} = request_api(get, api_path("trace"), Header),
?assertEqual(#{<<"code">> => 0, <<"data">> => []}, json(List2)),
%% clear
{ok, Create1} = request_api(post, api_path("trace"), Header, Trace),
?assertEqual(#{<<"code">> => 0}, json(Create1)),
{ok, Clear} = request_api(delete, api_path("trace"), Header),
?assertEqual(#{<<"code">> => 0}, json(Clear)),
unload(),
ok.
t_stream_log(_Config) ->
application:set_env(emqx, allow_anonymous, true),
emqx_trace:clear(),
load(),
ClientId = <<"client-stream">>,
Now = erlang:system_time(second),
Name = <<"test_stream_log">>,
Start = to_rfc3339(Now - 10),
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
ct:sleep(200),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
[begin _ = emqtt:ping(Client) end ||_ <- lists:seq(1, 5)],
emqtt:publish(Client, <<"/good">>, #{}, <<"ghood1">>, [{qos, 0}]),
emqtt:publish(Client, <<"/good">>, #{}, <<"ghood2">>, [{qos, 0}]),
ok = emqtt:disconnect(Client),
ct:sleep(200),
File = emqx_trace:log_file(Name, Now),
ct:pal("FileName: ~p", [File]),
{ok, FileBin} = file:read_file(File),
ct:pal("FileBin: ~p ~s", [byte_size(FileBin), FileBin]),
Header = auth_header_(),
{ok, Binary} = request_api(get, api_path("trace/test_stream_log/log?bytes=10"), Header),
#{<<"code">> := 0, <<"data">> := #{<<"meta">> := Meta, <<"items">> := Bin}} = json(Binary),
?assertEqual(10, byte_size(Bin)),
?assertEqual(#{<<"position">> => 10, <<"bytes">> => 10}, Meta),
Path = api_path("trace/test_stream_log/log?position=20&bytes=10"),
{ok, Binary1} = request_api(get, Path, Header),
#{<<"code">> := 0, <<"data">> := #{<<"meta">> := Meta1, <<"items">> := Bin1}} = json(Binary1),
?assertEqual(#{<<"position">> => 30, <<"bytes">> => 10}, Meta1),
?assertEqual(10, byte_size(Bin1)),
unload(),
ok.
to_rfc3339(Second) ->
list_to_binary(calendar:system_time_to_rfc3339(Second)).
auth_header_() ->
auth_header_("admin", "public").
auth_header_(User, Pass) ->
Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),
{"Authorization", "Basic " ++ Encoded}.
request_api(Method, Url, Auth) -> do_request_api(Method, {Url, [Auth]}).
request_api(Method, Url, Auth, Body) ->
Request = {Url, [Auth], "application/json", emqx_json:encode(Body)},
do_request_api(Method, Request).
do_request_api(Method, Request) ->
ct:pal("Method: ~p, Request: ~p", [Method, Request]),
case httpc:request(Method, Request, [], [{body_format, binary}]) of
{error, socket_closed_remotely} ->
{error, socket_closed_remotely};
{error,{shutdown, server_closed}} ->
{error, server_closed};
{ok, {{"HTTP/1.1", Code, _}, _Headers, Return} }
when Code =:= 200 orelse Code =:= 201 ->
{ok, Return};
{ok, {Reason, _, _}} ->
{error, Reason}
end.
api_path(Path) ->
?HOST ++ filename:join([?BASE_PATH, ?API_VERSION, Path]).
json(Data) ->
{ok, Jsx} = emqx_json:safe_decode(Data, [return_maps]), Jsx.
load() ->
emqx_trace:start_link().
unload() ->
gen_server:stop(emqx_trace).

View File

@ -0,0 +1,125 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_slow_subs_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include("include/emqx_mqtt.hrl").
-include_lib("include/emqx.hrl").
%-define(LOGT(Format, Args), ct:pal(Format, Args)).
-define(TOPK_TAB, emqx_slow_subs_topk).
-define(NOW, erlang:system_time(millisecond)).
all() -> emqx_ct:all(?MODULE).
init_per_suite(Config) ->
emqx_ct_helpers:start_apps([emqx]),
Config.
end_per_suite(Config) ->
emqx_ct_helpers:stop_apps([emqx]),
Config.
init_per_testcase(_, Config) ->
emqx_mod_slow_subs:load(base_conf()),
Config.
end_per_testcase(_, _) ->
emqx_mod_slow_subs:unload([]),
ok.
%%--------------------------------------------------------------------
%% Test Cases
%%--------------------------------------------------------------------
t_log_and_pub(_) ->
%% Sub topic first
Subs = [{<<"/test1/+">>, ?QOS_1}, {<<"/test2/+">>, ?QOS_2}],
Clients = start_client(Subs),
emqx:subscribe("$SYS/brokers/+/slow_subs"),
timer:sleep(1000),
Now = ?NOW,
%% publish
lists:foreach(fun(I) ->
Topic = list_to_binary(io_lib:format("/test1/~p", [I])),
Msg = emqx_message:make(undefined, ?QOS_1, Topic, <<"Hello">>),
emqx:publish(Msg#message{timestamp = Now - 500})
end,
lists:seq(1, 10)),
lists:foreach(fun(I) ->
Topic = list_to_binary(io_lib:format("/test2/~p", [I])),
Msg = emqx_message:make(undefined, ?QOS_2, Topic, <<"Hello">>),
emqx:publish(Msg#message{timestamp = Now - 500})
end,
lists:seq(1, 10)),
timer:sleep(1000),
Size = ets:info(?TOPK_TAB, size),
%% some time record maybe delete due to it expired
?assert(Size =< 6 andalso Size >= 4),
timer:sleep(1500),
Recs = try_receive([]),
RecSum = lists:sum(Recs),
?assert(RecSum >= 5),
?assert(lists:all(fun(E) -> E =< 3 end, Recs)),
timer:sleep(2000),
?assert(ets:info(?TOPK_TAB, size) =:= 0),
[Client ! stop || Client <- Clients],
ok.
base_conf() ->
[ {threshold, 500}
, {top_k_num, 5}
, {expire_interval, timer:seconds(3)}
, {notice_interval, 1500}
, {notice_qos, 0}
, {notice_batch_size, 3}
].
start_client(Subs) ->
[spawn(fun() -> client(I, Subs) end) || I <- lists:seq(1, 10)].
client(I, Subs) ->
{ok, C} = emqtt:start_link([{host, "localhost"},
{clientid, io_lib:format("slow_subs_~p", [I])},
{username, <<"plain">>},
{password, <<"plain">>}]),
{ok, _} = emqtt:connect(C),
Len = erlang:length(Subs),
Sub = lists:nth(I rem Len + 1, Subs),
_ = emqtt:subscribe(C, Sub),
receive
stop ->
ok
end.
try_receive(Acc) ->
receive
{deliver, _, #message{payload = Payload}} ->
#{<<"logs">> := Logs} = emqx_json:decode(Payload, [return_maps]),
try_receive([length(Logs) | Acc])
after 500 ->
Acc
end.

View File

@ -0,0 +1,163 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------n
-module(emqx_slow_subs_api_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/emqx_mqtt.hrl").
-include_lib("emqx_management/include/emqx_mgmt.hrl").
-include_lib("emqx_plugin_libs/include/emqx_slow_subs.hrl").
-define(CONTENT_TYPE, "application/x-www-form-urlencoded").
-define(HOST, "http://127.0.0.1:18083/").
-define(API_VERSION, "v4").
-define(BASE_PATH, "api").
-define(NOW, erlang:system_time(millisecond)).
all() ->
emqx_ct:all(?MODULE).
init_per_suite(Config) ->
emqx_ct_helpers:boot_modules(all),
application:load(emqx_plugin_libs),
emqx_ct_helpers:start_apps([emqx_modules, emqx_management, emqx_dashboard]),
Config.
end_per_suite(Config) ->
emqx_ct_helpers:stop_apps([emqx_management]),
Config.
init_per_testcase(_, Config) ->
emqx_mod_slow_subs:load(base_conf()),
Config.
end_per_testcase(_, Config) ->
emqx_mod_slow_subs:unload([]),
Config.
base_conf() ->
[ {threshold, 500}
, {top_k_num, 5}
, {expire_interval, timer:seconds(60)}
, {notice_interval, 0}
, {notice_qos, 0}
, {notice_batch_size, 3}
].
t_get_history(_) ->
Now = ?NOW,
Each = fun(I) ->
ClientId = erlang:list_to_binary(io_lib:format("test_~p", [I])),
ets:insert(?TOPK_TAB, #top_k{index = ?INDEX(I, ClientId),
type = average,
last_update_time = Now})
end,
lists:foreach(Each, lists:seq(1, 5)),
{ok, Data} = request_api(get, api_path(["slow_subscriptions"]), "_page=1&_limit=10",
auth_header_()),
#{meta := Meta, data := [First | _]} = decode(Data),
RMeta = #{page => 1, limit => 10, count => 5},
?assertEqual(RMeta, Meta),
RFirst = #{clientid => <<"test_5">>,
latency => 5,
type => <<"average">>,
last_update_time => Now},
?assertEqual(RFirst, First).
t_clear(_) ->
ets:insert(?TOPK_TAB, #top_k{index = ?INDEX(1, <<"test">>),
type = average,
last_update_time = ?NOW}),
{ok, _} = request_api(delete, api_path(["slow_subscriptions"]), [],
auth_header_()),
?assertEqual(0, ets:info(?TOPK_TAB, size)).
decode(Data) ->
Pairs = emqx_json:decode(Data),
to_maps(Pairs).
to_maps([H | _] = List) when is_tuple(H) ->
to_maps(List, #{});
to_maps([_ | _] = List) ->
[to_maps(X) || X <- List];
to_maps(V) -> V.
to_maps([{K, V} | T], Map) ->
AtomKey = erlang:binary_to_atom(K),
to_maps(T, Map#{AtomKey => to_maps(V)});
to_maps([], Map) ->
Map.
request_api(Method, Url, Auth) ->
request_api(Method, Url, [], Auth, []).
request_api(Method, Url, QueryParams, Auth) ->
request_api(Method, Url, QueryParams, Auth, []).
request_api(Method, Url, QueryParams, Auth, []) ->
NewUrl = case QueryParams of
"" -> Url;
_ -> Url ++ "?" ++ QueryParams
end,
do_request_api(Method, {NewUrl, [Auth]});
request_api(Method, Url, QueryParams, Auth, Body) ->
NewUrl = case QueryParams of
"" -> Url;
_ -> Url ++ "?" ++ QueryParams
end,
do_request_api(Method, {NewUrl, [Auth], "application/json", emqx_json:encode(Body)}).
do_request_api(Method, Request)->
ct:pal("Method: ~p, Request: ~p", [Method, Request]),
case httpc:request(Method, Request, [], []) of
{error, socket_closed_remotely} ->
{error, socket_closed_remotely};
{ok, {{"HTTP/1.1", Code, _}, _, Return} }
when Code =:= 200 orelse Code =:= 201 ->
{ok, Return};
{ok, {Reason, _, _}} ->
{error, Reason}
end.
auth_header_() ->
AppId = <<"admin">>,
AppSecret = <<"public">>,
auth_header_(binary_to_list(AppId), binary_to_list(AppSecret)).
auth_header_(User, Pass) ->
Encoded = base64:encode_to_string(lists:append([User,":",Pass])),
{"Authorization","Basic " ++ Encoded}.
api_path(Parts)->
?HOST ++ filename:join([?BASE_PATH, ?API_VERSION] ++ Parts).

View File

@ -96,6 +96,11 @@
{datatype, string} {datatype, string}
]}. ]}.
{mapping, "cluster.dns.type", "ekka.cluster_discovery", [
{datatype, {enum, [a, srv]}},
{default, a}
]}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Cluster using etcd %% Cluster using etcd
@ -171,7 +176,8 @@
{loop, cuttlefish:conf_get("cluster.mcast.loop", Conf, true)}]; {loop, cuttlefish:conf_get("cluster.mcast.loop", Conf, true)}];
(dns) -> (dns) ->
[{name, cuttlefish:conf_get("cluster.dns.name", Conf)}, [{name, cuttlefish:conf_get("cluster.dns.name", Conf)},
{app, cuttlefish:conf_get("cluster.dns.app", Conf)}]; {app, cuttlefish:conf_get("cluster.dns.app", Conf)},
{type, cuttlefish:conf_get("cluster.dns.type", Conf)}];
(etcd) -> (etcd) ->
SslOpts = fun(Conf) -> SslOpts = fun(Conf) ->
Options = cuttlefish_variable:filter_by_prefix("cluster.etcd.ssl", Conf), Options = cuttlefish_variable:filter_by_prefix("cluster.etcd.ssl", Conf),
@ -362,11 +368,35 @@ end}.
]}. ]}.
%% RPC server port. %% RPC server port.
{mapping, "rpc.driver", "gen_rpc.driver",
[ {default, tcp}
, {datatype, {enum, [tcp, ssl]}}
]}.
{mapping, "rpc.tcp_server_port", "gen_rpc.tcp_server_port", [ {mapping, "rpc.tcp_server_port", "gen_rpc.tcp_server_port", [
{default, 5369}, {default, 5369},
{datatype, integer} {datatype, integer}
]}. ]}.
%% RPC SSL server port.
{mapping, "rpc.enable_ssl", "gen_rpc.ssl_server_port", [
{default, 5369},
{datatype, integer}
]}.
%% RPC SSL certificates
{mapping, "rpc.certfile", "gen_rpc.certfile", [
{datatype, string}
]}.
{mapping, "rpc.keyfile", "gen_rpc.keyfile", [
{datatype, string}
]}.
{mapping, "rpc.cacertfile", "gen_rpc.cacertfile", [
{datatype, string}
]}.
%% Number of tcp connections when connecting to RPC server %% Number of tcp connections when connecting to RPC server
{mapping, "rpc.tcp_client_num", "gen_rpc.tcp_client_num", [ {mapping, "rpc.tcp_client_num", "gen_rpc.tcp_client_num", [
{default, 0}, {default, 0},
@ -376,7 +406,7 @@ end}.
{translation, "gen_rpc.tcp_client_num", fun(Conf) -> {translation, "gen_rpc.tcp_client_num", fun(Conf) ->
case cuttlefish:conf_get("rpc.tcp_client_num", Conf) of case cuttlefish:conf_get("rpc.tcp_client_num", Conf) of
0 -> 1; %% keep allowing 0 for backward compatibility 0 -> max(1, erlang:system_info(schedulers) div 2);
V -> V V -> V
end end
end}. end}.
@ -977,6 +1007,12 @@ end}.
{datatype, {duration, s}} {datatype, {duration, s}}
]}. ]}.
%% @doc the number of smaples for calculate the average latency of delivery
{mapping, "zone.$name.latency_samples", "emqx.zones", [
{default, 10},
{datatype, integer}
]}.
%% @doc Max Packets that Awaiting PUBREL, 0 means no limit %% @doc Max Packets that Awaiting PUBREL, 0 means no limit
{mapping, "zone.$name.max_awaiting_rel", "emqx.zones", [ {mapping, "zone.$name.max_awaiting_rel", "emqx.zones", [
{default, 0}, {default, 0},
@ -2188,6 +2224,37 @@ end}.
{datatype, string} {datatype, string}
]}. ]}.
{mapping, "module.slow_subs.threshold", "emqx.modules", [
{default, "500ms"},
{datatype, {duration, ms}}
]}.
{mapping, "module.slow_subs.expire_interval", "emqx.modules", [
{default, "5m"},
{datatype, {duration, ms}}
]}.
{mapping, "module.slow_subs.top_k_num", "emqx.modules", [
{default, 500},
{datatype, integer}
]}.
{mapping, "module.slow_subs.notice_interval", "emqx.modules", [
{default, "0s"},
{datatype, {duration, ms}}
]}.
{mapping, "module.slow_subs.notice_qos", "emqx.modules", [
{default, 0},
{datatype, integer},
{validators, ["range:0-1"]}
]}.
{mapping, "module.slow_subs.notice_batch_size", "emqx.modules", [
{default, 500},
{datatype, integer}
]}.
{translation, "emqx.modules", fun(Conf, _, Conf1) -> {translation, "emqx.modules", fun(Conf, _, Conf1) ->
Subscriptions = fun() -> Subscriptions = fun() ->
List = cuttlefish_variable:filter_by_prefix("module.subscription", Conf), List = cuttlefish_variable:filter_by_prefix("module.subscription", Conf),
@ -2211,12 +2278,20 @@ end}.
{rewrite, list_to_atom(PubOrSub), list_to_binary(Topic), list_to_binary(Re), list_to_binary(Dest)} {rewrite, list_to_atom(PubOrSub), list_to_binary(Topic), list_to_binary(Re), list_to_binary(Dest)}
end, TotalRules) end, TotalRules)
end, end,
SlowSubs = fun() ->
List = cuttlefish_variable:filter_by_prefix("module.slow_subs", Conf),
[{erlang:list_to_atom(Key), Value} || {[_, _, Key], Value} <- List]
end,
lists:append([ lists:append([
[{emqx_mod_presence, [{qos, cuttlefish:conf_get("module.presence.qos", Conf, 1)}]}], [{emqx_mod_presence, [{qos, cuttlefish:conf_get("module.presence.qos", Conf, 1)}]}],
[{emqx_mod_subscription, Subscriptions()}], [{emqx_mod_subscription, Subscriptions()}],
[{emqx_mod_rewrite, Rewrites()}], [{emqx_mod_rewrite, Rewrites()}],
[{emqx_mod_topic_metrics, []}], [{emqx_mod_topic_metrics, []}],
[{emqx_mod_delayed, []}], [{emqx_mod_delayed, []}],
[{emqx_mod_trace, []}],
[{emqx_mod_slow_subs, SlowSubs()}],
[{emqx_mod_acl_internal, [{acl_file, cuttlefish:conf_get("acl_file", Conf1)}]}] [{emqx_mod_acl_internal, [{acl_file, cuttlefish:conf_get("acl_file", Conf1)}]}]
]) ])
end}. end}.

View File

@ -43,15 +43,15 @@
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}} , {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.8.2"}}} , {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.8.2"}}}
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.0"}}} , {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.0"}}}
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.8.1.6"}}} , {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.8.2"}}}
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}} , {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.7.0"}}}
, {cuttlefish, {git, "https://github.com/emqx/cuttlefish", {tag, "v3.3.6"}}} , {cuttlefish, {git, "https://github.com/emqx/cuttlefish", {tag, "v3.3.6"}}}
, {minirest, {git, "https://github.com/emqx/minirest", {tag, "0.3.7"}}} , {minirest, {git, "https://github.com/emqx/minirest", {tag, "0.3.7"}}}
, {ecpool, {git, "https://github.com/emqx/ecpool", {tag, "0.5.2"}}} , {ecpool, {git, "https://github.com/emqx/ecpool", {tag, "0.5.2"}}}
, {replayq, {git, "https://github.com/emqx/replayq", {tag, "0.3.2"}}} , {replayq, {git, "https://github.com/emqx/replayq", {tag, "0.3.2"}}}
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {branch, "2.0.4"}}} , {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {branch, "2.0.4"}}}
, {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.2.3.1"}}} , {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.2.3.1"}}}
, {rulesql, {git, "https://github.com/emqx/rulesql", {tag, "0.1.2"}}} , {rulesql, {git, "https://github.com/emqx/rulesql", {tag, "0.1.5"}}}
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}} , {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
, {observer_cli, "1.6.1"} % NOTE: depends on recon 2.5.1 , {observer_cli, "1.6.1"} % NOTE: depends on recon 2.5.1
, {getopt, "1.0.1"} , {getopt, "1.0.1"}

View File

@ -106,7 +106,7 @@ test_plugins() ->
test_deps() -> test_deps() ->
[ {bbmustache, "1.10.0"} [ {bbmustache, "1.10.0"}
, {emqx_ct_helpers, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "1.3.9"}}} , {emqx_ct_helpers, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "1.3.11"}}}
, meck , meck
]. ].

View File

@ -2,41 +2,68 @@
set -euo pipefail set -euo pipefail
latest_release=$(git describe --abbrev=0 --tags) latest_release=$(git describe --abbrev=0 --tags)
echo "Compare base: $latest_release"
bad_app_count=0 bad_app_count=0
while read -r app; do get_vsn() {
if [ "$app" != "emqx" ]; then commit="$1"
app_path="$app" app_src_file="$2"
else if [ "$commit" = 'HEAD' ]; then
app_path="." if [ -f "$app_src_file" ]; then
grep vsn "$app_src_file" | grep -oE '"[0-9]+.[0-9]+.[0-9]+"' | tr -d '"' || true
fi fi
src_file="$app_path/src/$(basename "$app").app.src" else
old_app_version="$(git show "$latest_release":"$src_file" | grep vsn | grep -oE '"[0-9]+.[0-9]+.[0-9]+"' | tr -d '"')" git show "$commit":"$app_src_file" 2>/dev/null | grep vsn | grep -oE '"[0-9]+.[0-9]+.[0-9]+"' | tr -d '"' || true
now_app_version=$(grep -E 'vsn' "$src_file" | grep -oE '"[0-9]+\.[0-9]+\.[0-9]+"' | tr -d '"') fi
if [ "$old_app_version" = "$now_app_version" ]; then }
changed="$(git diff --name-only "$latest_release"...HEAD \
check_apps() {
while read -r app_path; do
app=$(basename "$app_path")
src_file="$app_path/src/$app.app.src"
old_app_version="$(get_vsn "$latest_release" "$src_file")"
## TODO: delete it after new version is released with emqx app in apps dir
if [ "$app" = 'emqx' ] && [ "$old_app_version" = '' ]; then
old_app_version="$(get_vsn "$latest_release" 'src/emqx.app.src')"
fi
now_app_version="$(get_vsn 'HEAD' "$src_file")"
## TODO: delete it after new version is released with emqx app in apps dir
if [ "$app" = 'emqx' ] && [ "$now_app_version" = '' ]; then
now_app_version="$(get_vsn 'HEAD' 'src/emqx.app.src')"
fi
if [ -z "$now_app_version" ]; then
echo "failed_to_get_new_app_vsn for $app"
exit 1
fi
if [ -z "${old_app_version:-}" ]; then
echo "skiped checking new app ${app}"
elif [ "$old_app_version" = "$now_app_version" ]; then
lines="$(git diff --name-only "$latest_release"...HEAD \
-- "$app_path/src" \ -- "$app_path/src" \
-- "$app_path/priv" \ -- "$app_path/priv" \
-- "$app_path/c_src" | { grep -v -E 'appup\.src' || true; } | wc -l)" -- "$app_path/c_src")"
if [ "$changed" -gt 0 ]; then if [ "$lines" != '' ]; then
echo "$src_file needs a vsn bump" echo "$src_file needs a vsn bump (old=$old_app_version)"
bad_app_count=$(( bad_app_count + 1)) echo "changed: $lines"
elif [[ ${app_path} = *emqx_dashboard* ]]; then
## emqx_dashboard is ensured to be upgraded after all other plugins
## at the end of its appup instructions, there is the final instruction
## {apply, {emqx_plugins, load, []}
## since we don't know which plugins are stopped during the upgrade
## for safty, we just force a dashboard version bump for each and every release
## even if there is nothing changed in the app
echo "$src_file needs a vsn bump to ensure plugins loaded after upgrade"
bad_app_count=$(( bad_app_count + 1)) bad_app_count=$(( bad_app_count + 1))
fi fi
fi fi
done < <(./scripts/find-apps.sh) done < <(./scripts/find-apps.sh)
if [ $bad_app_count -gt 0 ]; then if [ $bad_app_count -gt 0 ]; then
exit 1 exit 1
else else
echo "apps version check successfully" echo "apps version check successfully"
fi fi
}
_main() {
if echo "${latest_release}" |grep -oE '[0-9]+.[0-9]+.[0-9]+' > /dev/null 2>&1; then
check_apps
else
echo "skiped unstable tag: ${latest_release}"
fi
}
_main

View File

@ -6,7 +6,9 @@
set -euo pipefail set -euo pipefail
if [ "$(uname -s)" = 'Darwin' ]; then if [ "$(uname -s)" = 'Darwin' ]; then
echo 'macos' DIST='macos'
VERSION_ID=$(sw_vers | gsed -n '/^ProductVersion:/p' | gsed -r 's/ProductVersion:(.*)/\1/g' | gsed -r 's/([0-9]+).*/\1/g' | gsed 's/^[ \t]*//g')
SYSTEM="$(echo "${DIST}${VERSION_ID}" | gsed -r 's/([a-zA-Z]*)-.*/\1/g')"
elif [ "$(uname -s)" = 'Linux' ]; then elif [ "$(uname -s)" = 'Linux' ]; then
if grep -q -i 'centos' /etc/*-release; then if grep -q -i 'centos' /etc/*-release; then
DIST='centos' DIST='centos'
@ -15,5 +17,6 @@ elif [ "$(uname -s)" = 'Linux' ]; then
DIST="$(sed -n '/^ID=/p' /etc/os-release | sed -r 's/ID=(.*)/\1/g' | sed 's/"//g')" DIST="$(sed -n '/^ID=/p' /etc/os-release | sed -r 's/ID=(.*)/\1/g' | sed 's/"//g')"
VERSION_ID="$(sed -n '/^VERSION_ID=/p' /etc/os-release | sed -r 's/VERSION_ID=(.*)/\1/g' | sed 's/"//g')" VERSION_ID="$(sed -n '/^VERSION_ID=/p' /etc/os-release | sed -r 's/VERSION_ID=(.*)/\1/g' | sed 's/"//g')"
fi fi
echo "${DIST}${VERSION_ID}" | sed -r 's/([a-zA-Z]*)-.*/\1/g' SYSTEM="$(echo "${DIST}${VERSION_ID}" | sed -r 's/([a-zA-Z]*)-.*/\1/g')"
fi fi
echo "$SYSTEM"

5
scripts/get-otp-vsn.sh Executable file
View File

@ -0,0 +1,5 @@
#!/bin/bash
set -euo pipefail
erl -noshell -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().'

View File

@ -82,7 +82,7 @@
-define(SUBSCRIPTION, emqx_subscription). -define(SUBSCRIPTION, emqx_subscription).
%% Guards %% Guards
-define(is_subid(Id), (is_binary(Id) orelse is_atom(Id))). -define(IS_SUBID(Id), (is_binary(Id) orelse is_atom(Id))).
-spec(start_link(atom(), pos_integer()) -> startlink_ret()). -spec(start_link(atom(), pos_integer()) -> startlink_ret()).
start_link(Pool, Id) -> start_link(Pool, Id) ->
@ -118,15 +118,17 @@ subscribe(Topic) when is_binary(Topic) ->
subscribe(Topic, undefined). subscribe(Topic, undefined).
-spec(subscribe(emqx_topic:topic(), emqx_types:subid() | emqx_types:subopts()) -> ok). -spec(subscribe(emqx_topic:topic(), emqx_types:subid() | emqx_types:subopts()) -> ok).
subscribe(Topic, SubId) when is_binary(Topic), ?is_subid(SubId) -> subscribe(Topic, SubId) when is_binary(Topic), ?IS_SUBID(SubId) ->
subscribe(Topic, SubId, ?DEFAULT_SUBOPTS); subscribe(Topic, SubId, ?DEFAULT_SUBOPTS);
subscribe(Topic, SubOpts) when is_binary(Topic), is_map(SubOpts) -> subscribe(Topic, SubOpts) when is_binary(Topic), is_map(SubOpts) ->
subscribe(Topic, undefined, SubOpts). subscribe(Topic, undefined, SubOpts).
-spec(subscribe(emqx_topic:topic(), emqx_types:subid(), emqx_types:subopts()) -> ok). -spec(subscribe(emqx_topic:topic(), emqx_types:subid(), emqx_types:subopts()) -> ok).
subscribe(Topic, SubId, SubOpts0) when is_binary(Topic), ?is_subid(SubId), is_map(SubOpts0) -> subscribe(Topic, SubId, SubOpts0) when is_binary(Topic), ?IS_SUBID(SubId), is_map(SubOpts0) ->
SubOpts = maps:merge(?DEFAULT_SUBOPTS, SubOpts0), SubOpts = maps:merge(?DEFAULT_SUBOPTS, SubOpts0),
case ets:member(?SUBOPTION, {SubPid = self(), Topic}) of _ = emqx_trace:subscribe(Topic, SubId, SubOpts),
SubPid = self(),
case ets:member(?SUBOPTION, {SubPid, Topic}) of
false -> %% New false -> %% New
ok = emqx_broker_helper:register_sub(SubPid, SubId), ok = emqx_broker_helper:register_sub(SubPid, SubId),
do_subscribe(Topic, SubPid, with_subid(SubId, SubOpts)); do_subscribe(Topic, SubPid, with_subid(SubId, SubOpts));
@ -171,6 +173,7 @@ unsubscribe(Topic) when is_binary(Topic) ->
SubPid = self(), SubPid = self(),
case ets:lookup(?SUBOPTION, {SubPid, Topic}) of case ets:lookup(?SUBOPTION, {SubPid, Topic}) of
[{_, SubOpts}] -> [{_, SubOpts}] ->
emqx_trace:unsubscribe(Topic, SubOpts),
_ = emqx_broker_helper:reclaim_seq(Topic), _ = emqx_broker_helper:reclaim_seq(Topic),
do_unsubscribe(Topic, SubPid, SubOpts); do_unsubscribe(Topic, SubPid, SubOpts);
[] -> ok [] -> ok
@ -183,13 +186,7 @@ do_unsubscribe(Topic, SubPid, SubOpts) ->
do_unsubscribe(Group, Topic, SubPid, SubOpts). do_unsubscribe(Group, Topic, SubPid, SubOpts).
do_unsubscribe(undefined, Topic, SubPid, SubOpts) -> do_unsubscribe(undefined, Topic, SubPid, SubOpts) ->
case maps:get(shard, SubOpts, 0) of clean_subscribe(SubOpts, Topic, SubPid);
0 -> true = ets:delete_object(?SUBSCRIBER, {Topic, SubPid}),
cast(pick(Topic), {unsubscribed, Topic});
I -> true = ets:delete_object(?SUBSCRIBER, {{shard, Topic, I}, SubPid}),
cast(pick({Topic, I}), {unsubscribed, Topic, I})
end;
do_unsubscribe(Group, Topic, SubPid, _SubOpts) -> do_unsubscribe(Group, Topic, SubPid, _SubOpts) ->
emqx_shared_sub:unsubscribe(Group, Topic, SubPid). emqx_shared_sub:unsubscribe(Group, Topic, SubPid).
@ -199,7 +196,7 @@ do_unsubscribe(Group, Topic, SubPid, _SubOpts) ->
-spec(publish(emqx_types:message()) -> emqx_types:publish_result()). -spec(publish(emqx_types:message()) -> emqx_types:publish_result()).
publish(Msg) when is_record(Msg, message) -> publish(Msg) when is_record(Msg, message) ->
_ = emqx_tracer:trace(publish, Msg), _ = emqx_trace:publish(Msg),
emqx_message:is_sys(Msg) orelse emqx_metrics:inc('messages.publish'), emqx_message:is_sys(Msg) orelse emqx_metrics:inc('messages.publish'),
case emqx_hooks:run_fold('message.publish', [], emqx_message:clean_dup(Msg)) of case emqx_hooks:run_fold('message.publish', [], emqx_message:clean_dup(Msg)) of
#message{headers = #{allow_publish := false}} -> #message{headers = #{allow_publish := false}} ->
@ -231,8 +228,7 @@ delivery(Msg) -> #delivery{sender = self(), message = Msg}.
-spec(route([emqx_types:route_entry()], emqx_types:delivery()) -spec(route([emqx_types:route_entry()], emqx_types:delivery())
-> emqx_types:publish_result()). -> emqx_types:publish_result()).
route([], #delivery{message = Msg}) -> route([], #delivery{message = Msg}) ->
ok = emqx_hooks:run('message.dropped', [Msg, #{node => node()}, no_subscribers]), drop_message(Msg),
ok = inc_dropped_cnt(Msg),
[]; [];
route(Routes, Delivery) -> route(Routes, Delivery) ->
@ -240,6 +236,10 @@ route(Routes, Delivery) ->
[do_route(Route, Delivery) | Acc] [do_route(Route, Delivery) | Acc]
end, [], Routes). end, [], Routes).
drop_message(Msg) ->
ok = emqx_hooks:run('message.dropped', [Msg, #{node => node()}, no_subscribers]),
ok = inc_dropped_cnt(Msg).
do_route({To, Node}, Delivery) when Node =:= node() -> do_route({To, Node}, Delivery) when Node =:= node() ->
{Node, To, dispatch(To, Delivery)}; {Node, To, dispatch(To, Delivery)};
do_route({To, Node}, Delivery) when is_atom(Node) -> do_route({To, Node}, Delivery) when is_atom(Node) ->
@ -261,7 +261,7 @@ aggre(Routes) ->
end, [], Routes). end, [], Routes).
%% @doc Forward message to another node. %% @doc Forward message to another node.
-spec(forward(node(), emqx_types:topic(), emqx_types:delivery(), RpcMode::sync|async) -spec(forward(node(), emqx_types:topic(), emqx_types:delivery(), RpcMode::sync | async)
-> emqx_types:deliver_result()). -> emqx_types:deliver_result()).
forward(Node, To, Delivery, async) -> forward(Node, To, Delivery, async) ->
case emqx_rpc:cast(To, Node, ?BROKER, dispatch, [To, Delivery]) of case emqx_rpc:cast(To, Node, ?BROKER, dispatch, [To, Delivery]) of
@ -288,8 +288,7 @@ dispatch(Topic, #delivery{message = Msg}) ->
end, 0, subscribers(Topic)), end, 0, subscribers(Topic)),
case DispN of case DispN of
0 -> 0 ->
ok = emqx_hooks:run('message.dropped', [Msg, #{node => node()}, no_subscribers]), drop_message(Msg),
ok = inc_dropped_cnt(Msg),
{error, no_subscribers}; {error, no_subscribers};
_ -> _ ->
{ok, DispN} {ok, DispN}
@ -336,16 +335,19 @@ subscriber_down(SubPid) ->
SubOpts when is_map(SubOpts) -> SubOpts when is_map(SubOpts) ->
_ = emqx_broker_helper:reclaim_seq(Topic), _ = emqx_broker_helper:reclaim_seq(Topic),
true = ets:delete(?SUBOPTION, {SubPid, Topic}), true = ets:delete(?SUBOPTION, {SubPid, Topic}),
clean_subscribe(SubOpts, Topic, SubPid);
undefined -> ok
end
end, lookup_value(?SUBSCRIPTION, SubPid, [])),
ets:delete(?SUBSCRIPTION, SubPid).
clean_subscribe(SubOpts, Topic, SubPid) ->
case maps:get(shard, SubOpts, 0) of case maps:get(shard, SubOpts, 0) of
0 -> true = ets:delete_object(?SUBSCRIBER, {Topic, SubPid}), 0 -> true = ets:delete_object(?SUBSCRIBER, {Topic, SubPid}),
ok = cast(pick(Topic), {unsubscribed, Topic}); ok = cast(pick(Topic), {unsubscribed, Topic});
I -> true = ets:delete_object(?SUBSCRIBER, {{shard, Topic, I}, SubPid}), I -> true = ets:delete_object(?SUBSCRIBER, {{shard, Topic, I}, SubPid}),
ok = cast(pick({Topic, I}), {unsubscribed, Topic, I}) ok = cast(pick({Topic, I}), {unsubscribed, Topic, I})
end; end.
undefined -> ok
end
end, lookup_value(?SUBSCRIPTION, SubPid, [])),
ets:delete(?SUBSCRIPTION, SubPid).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Management APIs %% Management APIs
@ -366,14 +368,14 @@ subscriptions(SubId) ->
-spec(subscribed(pid() | emqx_types:subid(), emqx_topic:topic()) -> boolean()). -spec(subscribed(pid() | emqx_types:subid(), emqx_topic:topic()) -> boolean()).
subscribed(SubPid, Topic) when is_pid(SubPid) -> subscribed(SubPid, Topic) when is_pid(SubPid) ->
ets:member(?SUBOPTION, {SubPid, Topic}); ets:member(?SUBOPTION, {SubPid, Topic});
subscribed(SubId, Topic) when ?is_subid(SubId) -> subscribed(SubId, Topic) when ?IS_SUBID(SubId) ->
SubPid = emqx_broker_helper:lookup_subpid(SubId), SubPid = emqx_broker_helper:lookup_subpid(SubId),
ets:member(?SUBOPTION, {SubPid, Topic}). ets:member(?SUBOPTION, {SubPid, Topic}).
-spec(get_subopts(pid(), emqx_topic:topic()) -> maybe(emqx_types:subopts())). -spec(get_subopts(pid(), emqx_topic:topic()) -> maybe(emqx_types:subopts())).
get_subopts(SubPid, Topic) when is_pid(SubPid), is_binary(Topic) -> get_subopts(SubPid, Topic) when is_pid(SubPid), is_binary(Topic) ->
lookup_value(?SUBOPTION, {SubPid, Topic}); lookup_value(?SUBOPTION, {SubPid, Topic});
get_subopts(SubId, Topic) when ?is_subid(SubId) -> get_subopts(SubId, Topic) when ?IS_SUBID(SubId) ->
case emqx_broker_helper:lookup_subpid(SubId) of case emqx_broker_helper:lookup_subpid(SubId) of
SubPid when is_pid(SubPid) -> SubPid when is_pid(SubPid) ->
get_subopts(SubPid, Topic); get_subopts(SubPid, Topic);
@ -498,4 +500,3 @@ code_change(_OldVsn, State, _Extra) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Internal functions %% Internal functions
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -102,7 +102,7 @@
-type(reply() :: {outgoing, emqx_types:packet()} -type(reply() :: {outgoing, emqx_types:packet()}
| {outgoing, [emqx_types:packet()]} | {outgoing, [emqx_types:packet()]}
| {event, conn_state()|updated} | {event, conn_state() | updated}
| {close, Reason :: atom()}). | {close, Reason :: atom()}).
-type(replies() :: emqx_types:packet() | reply() | [reply()]). -type(replies() :: emqx_types:packet() | reply() | [reply()]).
@ -131,7 +131,7 @@
info(Channel) -> info(Channel) ->
maps:from_list(info(?INFO_KEYS, Channel)). maps:from_list(info(?INFO_KEYS, Channel)).
-spec(info(list(atom())|atom(), channel()) -> term()). -spec(info(list(atom()) | atom(), channel()) -> term()).
info(Keys, Channel) when is_list(Keys) -> info(Keys, Channel) when is_list(Keys) ->
[{Key, info(Key, Channel)} || Key <- Keys]; [{Key, info(Key, Channel)} || Key <- Keys];
info(conninfo, #channel{conninfo = ConnInfo}) -> info(conninfo, #channel{conninfo = ConnInfo}) ->
@ -275,7 +275,7 @@ take_ws_cookie(ClientInfo, ConnInfo) ->
handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = connected}) -> handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = connected}) ->
handle_out(disconnect, ?RC_PROTOCOL_ERROR, Channel); handle_out(disconnect, ?RC_PROTOCOL_ERROR, Channel);
handle_in(?CONNECT_PACKET(ConnPkt), Channel) -> handle_in(?CONNECT_PACKET(ConnPkt) = Packet, Channel) ->
case pipeline([fun enrich_conninfo/2, case pipeline([fun enrich_conninfo/2,
fun run_conn_hooks/2, fun run_conn_hooks/2,
fun check_connect/2, fun check_connect/2,
@ -285,6 +285,7 @@ handle_in(?CONNECT_PACKET(ConnPkt), Channel) ->
fun auth_connect/2 fun auth_connect/2
], ConnPkt, Channel#channel{conn_state = connecting}) of ], ConnPkt, Channel#channel{conn_state = connecting}) of
{ok, NConnPkt, NChannel = #channel{clientinfo = ClientInfo}} -> {ok, NConnPkt, NChannel = #channel{clientinfo = ClientInfo}} ->
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
NChannel1 = NChannel#channel{ NChannel1 = NChannel#channel{
will_msg = emqx_packet:will_msg(NConnPkt), will_msg = emqx_packet:will_msg(NConnPkt),
alias_maximum = init_alias_maximum(NConnPkt, ClientInfo) alias_maximum = init_alias_maximum(NConnPkt, ClientInfo)
@ -619,7 +620,7 @@ ensure_quota(PubRes, Channel = #channel{quota = Limiter}) ->
-compile({inline, [puback_reason_code/1]}). -compile({inline, [puback_reason_code/1]}).
puback_reason_code([]) -> ?RC_NO_MATCHING_SUBSCRIBERS; puback_reason_code([]) -> ?RC_NO_MATCHING_SUBSCRIBERS;
puback_reason_code([_|_]) -> ?RC_SUCCESS. puback_reason_code([_ | _]) -> ?RC_SUCCESS.
-compile({inline, [after_message_acked/3]}). -compile({inline, [after_message_acked/3]}).
after_message_acked(ClientInfo, Msg, PubAckProps) -> after_message_acked(ClientInfo, Msg, PubAckProps) ->
@ -638,7 +639,7 @@ process_subscribe(TopicFilters, SubProps, Channel) ->
process_subscribe([], _SubProps, Channel, Acc) -> process_subscribe([], _SubProps, Channel, Acc) ->
{lists:reverse(Acc), Channel}; {lists:reverse(Acc), Channel};
process_subscribe([Topic = {TopicFilter, SubOpts}|More], SubProps, Channel, Acc) -> process_subscribe([Topic = {TopicFilter, SubOpts} | More], SubProps, Channel, Acc) ->
case check_sub_caps(TopicFilter, SubOpts, Channel) of case check_sub_caps(TopicFilter, SubOpts, Channel) of
ok -> ok ->
{ReasonCode, NChannel} = do_subscribe(TopicFilter, {ReasonCode, NChannel} = do_subscribe(TopicFilter,
@ -676,9 +677,9 @@ process_unsubscribe(TopicFilters, UnSubProps, Channel) ->
process_unsubscribe([], _UnSubProps, Channel, Acc) -> process_unsubscribe([], _UnSubProps, Channel, Acc) ->
{lists:reverse(Acc), Channel}; {lists:reverse(Acc), Channel};
process_unsubscribe([{TopicFilter, SubOpts}|More], UnSubProps, Channel, Acc) -> process_unsubscribe([{TopicFilter, SubOpts} | More], UnSubProps, Channel, Acc) ->
{RC, NChannel} = do_unsubscribe(TopicFilter, SubOpts#{unsub_props => UnSubProps}, Channel), {RC, NChannel} = do_unsubscribe(TopicFilter, SubOpts#{unsub_props => UnSubProps}, Channel),
process_unsubscribe(More, UnSubProps, NChannel, [RC|Acc]). process_unsubscribe(More, UnSubProps, NChannel, [RC | Acc]).
do_unsubscribe(TopicFilter, SubOpts, Channel = do_unsubscribe(TopicFilter, SubOpts, Channel =
#channel{clientinfo = ClientInfo = #{mountpoint := MountPoint}, #channel{clientinfo = ClientInfo = #{mountpoint := MountPoint},
@ -943,6 +944,17 @@ handle_call({quota, Policy}, Channel) ->
Quota = emqx_limiter:init(Zone, Policy), Quota = emqx_limiter:init(Zone, Policy),
reply(ok, Channel#channel{quota = Quota}); reply(ok, Channel#channel{quota = Quota});
handle_call({keepalive, Interval}, Channel = #channel{keepalive = KeepAlive,
conninfo = ConnInfo}) ->
ClientId = info(clientid, Channel),
NKeepalive = emqx_keepalive:set(interval, Interval * 1000, KeepAlive),
NConnInfo = maps:put(keepalive, Interval, ConnInfo),
NChannel = Channel#channel{keepalive = NKeepalive, conninfo = NConnInfo},
SockInfo = maps:get(sockinfo, emqx_cm:get_chan_info(ClientId), #{}),
ChanInfo1 = info(NChannel),
emqx_cm:set_chan_info(ClientId, ChanInfo1#{sockinfo => SockInfo}),
reply(ok, reset_timer(alive_timer, NChannel));
handle_call(Req, Channel) -> handle_call(Req, Channel) ->
?LOG(error, "Unexpected call: ~p", [Req]), ?LOG(error, "Unexpected call: ~p", [Req]),
reply(ignored, Channel). reply(ignored, Channel).
@ -1629,6 +1641,8 @@ ensure_disconnected(Reason, Channel = #channel{conninfo = ConnInfo,
clientinfo = ClientInfo}) -> clientinfo = ClientInfo}) ->
NConnInfo = ConnInfo#{disconnected_at => erlang:system_time(millisecond)}, NConnInfo = ConnInfo#{disconnected_at => erlang:system_time(millisecond)},
ok = run_hooks('client.disconnected', [ClientInfo, Reason, NConnInfo]), ok = run_hooks('client.disconnected', [ClientInfo, Reason, NConnInfo]),
ChanPid = self(),
emqx_cm:mark_channel_disconnected(ChanPid),
Channel#channel{conninfo = NConnInfo, conn_state = disconnected}. Channel#channel{conninfo = NConnInfo, conn_state = disconnected}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -1730,4 +1744,3 @@ flag(false) -> 0.
set_field(Name, Value, Channel) -> set_field(Name, Value, Channel) ->
Pos = emqx_misc:index_of(Name, record_info(fields, channel)), Pos = emqx_misc:index_of(Name, record_info(fields, channel)),
setelement(Pos+1, Channel, Value). setelement(Pos+1, Channel, Value).

View File

@ -22,6 +22,7 @@
-include("emqx.hrl"). -include("emqx.hrl").
-include("logger.hrl"). -include("logger.hrl").
-include("types.hrl"). -include("types.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl").
-logger_header("[CM]"). -logger_header("[CM]").
@ -72,7 +73,12 @@
]). ]).
%% Internal export %% Internal export
-export([stats_fun/0, clean_down/1]). -export([ stats_fun/0
, clean_down/1
, mark_channel_connected/1
, mark_channel_disconnected/1
, get_connected_client_count/0
]).
-type(chan_pid() :: pid()). -type(chan_pid() :: pid()).
@ -80,11 +86,13 @@
-define(CHAN_TAB, emqx_channel). -define(CHAN_TAB, emqx_channel).
-define(CHAN_CONN_TAB, emqx_channel_conn). -define(CHAN_CONN_TAB, emqx_channel_conn).
-define(CHAN_INFO_TAB, emqx_channel_info). -define(CHAN_INFO_TAB, emqx_channel_info).
-define(CHAN_LIVE_TAB, emqx_channel_live).
-define(CHAN_STATS, -define(CHAN_STATS,
[{?CHAN_TAB, 'channels.count', 'channels.max'}, [{?CHAN_TAB, 'channels.count', 'channels.max'},
{?CHAN_TAB, 'sessions.count', 'sessions.max'}, {?CHAN_TAB, 'sessions.count', 'sessions.max'},
{?CHAN_CONN_TAB, 'connections.count', 'connections.max'} {?CHAN_CONN_TAB, 'connections.count', 'connections.max'},
{?CHAN_LIVE_TAB, 'live_connections.count', 'live_connections.max'}
]). ]).
%% Batch drain %% Batch drain
@ -129,6 +137,7 @@ register_channel(ClientId, ChanPid, #{conn_mod := ConnMod}) when is_pid(ChanPid)
true = ets:insert(?CHAN_TAB, Chan), true = ets:insert(?CHAN_TAB, Chan),
true = ets:insert(?CHAN_CONN_TAB, {Chan, ConnMod}), true = ets:insert(?CHAN_CONN_TAB, {Chan, ConnMod}),
ok = emqx_cm_registry:register_channel(Chan), ok = emqx_cm_registry:register_channel(Chan),
mark_channel_connected(ChanPid),
cast({registered, Chan}). cast({registered, Chan}).
%% @doc Unregister a channel. %% @doc Unregister a channel.
@ -437,8 +446,10 @@ init([]) ->
ok = emqx_tables:new(?CHAN_TAB, [bag, {read_concurrency, true}|TabOpts]), ok = emqx_tables:new(?CHAN_TAB, [bag, {read_concurrency, true}|TabOpts]),
ok = emqx_tables:new(?CHAN_CONN_TAB, [bag | TabOpts]), ok = emqx_tables:new(?CHAN_CONN_TAB, [bag | TabOpts]),
ok = emqx_tables:new(?CHAN_INFO_TAB, [set, compressed | TabOpts]), ok = emqx_tables:new(?CHAN_INFO_TAB, [set, compressed | TabOpts]),
ok = emqx_tables:new(?CHAN_LIVE_TAB, [set, {write_concurrency, true} | TabOpts]),
ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0), ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0),
{ok, #{chan_pmon => emqx_pmon:new()}}. State = #{chan_pmon => emqx_pmon:new()},
{ok, State}.
handle_call(Req, _From, State) -> handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]), ?LOG(error, "Unexpected call: ~p", [Req]),
@ -447,17 +458,17 @@ handle_call(Req, _From, State) ->
handle_cast({registered, {ClientId, ChanPid}}, State = #{chan_pmon := PMon}) -> handle_cast({registered, {ClientId, ChanPid}}, State = #{chan_pmon := PMon}) ->
PMon1 = emqx_pmon:monitor(ChanPid, ClientId, PMon), PMon1 = emqx_pmon:monitor(ChanPid, ClientId, PMon),
{noreply, State#{chan_pmon := PMon1}}; {noreply, State#{chan_pmon := PMon1}};
handle_cast(Msg, State) -> handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]), ?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}. {noreply, State}.
handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) -> handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) ->
?tp(emqx_cm_process_down, #{pid => Pid, reason => _Reason}),
ChanPids = [Pid | emqx_misc:drain_down(?BATCH_SIZE)], ChanPids = [Pid | emqx_misc:drain_down(?BATCH_SIZE)],
{Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon), {Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon),
lists:foreach(fun mark_channel_disconnected/1, ChanPids),
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]), ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]),
{noreply, State#{chan_pmon := PMon1}}; {noreply, State#{chan_pmon := PMon1}};
handle_info(Info, State) -> handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]), ?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}. {noreply, State}.
@ -493,3 +504,18 @@ get_chann_conn_mod(ClientId, ChanPid) when node(ChanPid) == node() ->
get_chann_conn_mod(ClientId, ChanPid) -> get_chann_conn_mod(ClientId, ChanPid) ->
rpc_call(node(ChanPid), get_chann_conn_mod, [ClientId, ChanPid], ?T_GET_INFO). rpc_call(node(ChanPid), get_chann_conn_mod, [ClientId, ChanPid], ?T_GET_INFO).
mark_channel_connected(ChanPid) ->
?tp(emqx_cm_connected_client_count_inc, #{}),
ets:insert_new(?CHAN_LIVE_TAB, {ChanPid, true}),
ok.
mark_channel_disconnected(ChanPid) ->
?tp(emqx_cm_connected_client_count_dec, #{}),
ets:delete(?CHAN_LIVE_TAB, ChanPid),
ok.
get_connected_client_count() ->
case ets:info(?CHAN_LIVE_TAB, size) of
undefined -> 0;
Size -> Size
end.

View File

@ -517,8 +517,8 @@ terminate(Reason, State = #state{channel = Channel, transport = Transport,
E : C : S -> E : C : S ->
?tp(warning, unclean_terminate, #{exception => E, context => C, stacktrace => S}) ?tp(warning, unclean_terminate, #{exception => E, context => C, stacktrace => S})
end, end,
?tp(debug, terminate, #{reason => Reason}), ?tp(info, terminate, #{reason => Reason}),
maybe_raise_excption(Reason). maybe_raise_exception(Reason).
%% close socket, discard new state, always return ok. %% close socket, discard new state, always return ok.
close_socket_ok(State) -> close_socket_ok(State) ->
@ -526,12 +526,12 @@ close_socket_ok(State) ->
ok. ok.
%% tell truth about the original exception %% tell truth about the original exception
maybe_raise_excption(#{exception := Exception, maybe_raise_exception(#{exception := Exception,
context := Context, context := Context,
stacktrace := Stacktrace stacktrace := Stacktrace
}) -> }) ->
erlang:raise(Exception, Context, Stacktrace); erlang:raise(Exception, Context, Stacktrace);
maybe_raise_excption(Reason) -> maybe_raise_exception(Reason) ->
exit(Reason). exit(Reason).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -39,6 +39,8 @@
, from_base62/1 , from_base62/1
]). ]).
-elvis([{elvis_style, dont_repeat_yourself, disable}]).
-define(TAG_VERSION, 131). -define(TAG_VERSION, 131).
-define(PID_EXT, 103). -define(PID_EXT, 103).
-define(NEW_PID_EXT, 88). -define(NEW_PID_EXT, 88).
@ -137,7 +139,7 @@ npid() ->
NPid. NPid.
to_hexstr(I) when byte_size(I) =:= 16 -> to_hexstr(I) when byte_size(I) =:= 16 ->
emqx_misc:bin2hexstr_A_F(I). emqx_misc:bin2hexstr_a_f_upper(I).
from_hexstr(S) when byte_size(S) =:= 32 -> from_hexstr(S) when byte_size(S) =:= 32 ->
emqx_misc:hexstr2bin(S). emqx_misc:hexstr2bin(S).

View File

@ -20,9 +20,11 @@
, info/1 , info/1
, info/2 , info/2
, check/2 , check/2
, set/3
]). ]).
-export_type([keepalive/0]). -export_type([keepalive/0]).
-elvis([{elvis_style, no_if_expression, disable}]).
-record(keepalive, { -record(keepalive, {
interval :: pos_integer(), interval :: pos_integer(),
@ -49,7 +51,7 @@ info(#keepalive{interval = Interval,
repeat => Repeat repeat => Repeat
}. }.
-spec(info(interval|statval|repeat, keepalive()) -spec(info(interval | statval | repeat, keepalive())
-> non_neg_integer()). -> non_neg_integer()).
info(interval, #keepalive{interval = Interval}) -> info(interval, #keepalive{interval = Interval}) ->
Interval; Interval;
@ -71,3 +73,19 @@ check(NewVal, KeepAlive = #keepalive{statval = OldVal,
true -> {error, timeout} true -> {error, timeout}
end. end.
%% from mqtt-v3.1.1 specific
%% A Keep Alive value of zero (0) has the effect of turning off the keep alive mechanism.
%% This means that, in this case, the Server is not required
%% to disconnect the Client on the grounds of inactivity.
%% Note that a Server is permitted to disconnect a Client that it determines
%% to be inactive or non-responsive at any time,
%% regardless of the Keep Alive value provided by that Client.
%% Non normative comment
%%The actual value of the Keep Alive is application specific;
%% typically this is a few minutes.
%% The maximum value is (65535s) 18 hours 12 minutes and 15 seconds.
%% @doc Update keepalive's interval
-spec(set(interval, non_neg_integer(), keepalive()) -> keepalive()).
set(interval, Interval, KeepAlive) when Interval >= 0 andalso Interval =< 65535000 ->
KeepAlive#keepalive{interval = Interval}.

Some files were not shown because too many files have changed in this diff Show More