Merge branch 'main-v4.4' into copy-of-main-v4.3

This commit is contained in:
zhongwencool 2022-08-25 17:37:55 +08:00
commit 26956784ff
164 changed files with 8166 additions and 3287 deletions

View File

@ -1,16 +0,0 @@
ARG BUILD_FROM=emqx/build-env:erl23.3.4.9-3-ubuntu20.04
FROM ${BUILD_FROM}
ARG EMQX_NAME=emqx
COPY . /emqx
WORKDIR /emqx
RUN rm -rf _build/${EMQX_NAME}/lib _build/${EMQX_NAME}-pkg/lib
RUN make ${EMQX_NAME}-zip || cat rebar3.crashdump
RUN make ${EMQX_NAME}-pkg || cat rebar3.crashdump
RUN /emqx/.ci/build_packages/tests.sh

View File

@ -1,28 +1,56 @@
#!/bin/bash #!/usr/bin/env bash
## This script tests built package start/stop
## Accept 2 args PROFILE and PACKAGE_TYPE
set -x -e -u set -x -e -u
if [ -z "${1:-}" ]; then
echo "Usage $0 <PROFILE> e.g. emqx, emqx-edge"
exit 1
fi
if [ "${2:-}" != 'zip' ] && [ "${2:-}" != 'pkg' ]; then
echo "Usage $0 <PACKAGE_NAME> zip|pkg"
exit 1
fi
PROFILE="${1}"
PACKAGE_TYPE="${2}"
export CODE_PATH=${CODE_PATH:-"/emqx"} export CODE_PATH=${CODE_PATH:-"/emqx"}
export EMQX_NAME=${EMQX_NAME:-"emqx"} export PACKAGE_PATH="${CODE_PATH}/_packages/${PROFILE}"
export PACKAGE_PATH="${CODE_PATH}/_packages/${EMQX_NAME}"
export RELUP_PACKAGE_PATH="${CODE_PATH}/_upgrade_base" export RELUP_PACKAGE_PATH="${CODE_PATH}/_upgrade_base"
# export EMQX_NODE_NAME="emqx-on-$(uname -m)@127.0.0.1" # export EMQX_NODE_NAME="emqx-on-$(uname -m)@127.0.0.1"
# export EMQX_NODE_COOKIE=$(date +%s%N) # export EMQX_NODE_COOKIE=$(date +%s%N)
case "$(uname -m)" in SYSTEM="$("$CODE_PATH"/scripts/get-distro.sh)"
x86_64)
ARCH='amd64' if [ "$PACKAGE_TYPE" = 'zip' ]; then
;; PKG_SUFFIX="zip"
aarch64) else
ARCH='arm64' case "${SYSTEM:-}" in
;; ubuntu*|debian*|raspbian*)
arm*) PKG_SUFFIX='deb'
ARCH=arm ;;
;; *)
esac PKG_SUFFIX='rpm'
export ARCH ;;
esac
fi
PACKAGE_NAME="${PROFILE}-$("$CODE_PATH"/scripts/pkg-full-vsn.sh)"
OLD_PACKAGE_PATTERN="${PROFILE}-$("$CODE_PATH"/scripts/pkg-full-vsn.sh 'vsn_matcher')"
PACKAGE_FILE_NAME="${PACKAGE_NAME}.${PKG_SUFFIX}"
PACKAGE_FILE="${PACKAGE_PATH}/${PACKAGE_FILE_NAME}"
if ! [ -f "$PACKAGE_FILE" ]; then
echo "$PACKAGE_FILE is not a file"
exit 1
fi
emqx_prepare(){ emqx_prepare(){
mkdir -p "${PACKAGE_PATH}" mkdir -p "${PACKAGE_PATH}"
if [ ! -d "/paho-mqtt-testing" ]; then if [ ! -d "/paho-mqtt-testing" ]; then
git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho-mqtt-testing git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho-mqtt-testing
fi fi
@ -31,83 +59,80 @@ emqx_prepare(){
emqx_test(){ emqx_test(){
cd "${PACKAGE_PATH}" cd "${PACKAGE_PATH}"
local packagename="${PACKAGE_FILE_NAME}"
case "$PKG_SUFFIX" in
"zip")
unzip -q "${PACKAGE_PATH}/${packagename}"
export EMQX_ZONE__EXTERNAL__SERVER__KEEPALIVE=60 \
EMQX_MQTT__MAX_TOPIC_ALIAS=10
sed -i '/emqx_telemetry/d' "${PACKAGE_PATH}"/emqx/data/loaded_plugins
for var in "$PACKAGE_PATH"/"${EMQX_NAME}"-*;do echo "running ${packagename} start"
case ${var##*.} in if ! "${PACKAGE_PATH}"/emqx/bin/emqx start; then
"zip") cat "${PACKAGE_PATH}"/emqx/log/erlang.log.1 || true
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.zip) cat "${PACKAGE_PATH}"/emqx/log/emqx.log.1 || true
unzip -q "${PACKAGE_PATH}/${packagename}" exit 1
export EMQX_ZONE__EXTERNAL__SERVER__KEEPALIVE=60 \ fi
EMQX_MQTT__MAX_TOPIC_ALIAS=10 IDLE_TIME=0
sed -i '/emqx_telemetry/d' "${PACKAGE_PATH}"/emqx/data/loaded_plugins while ! "${PACKAGE_PATH}"/emqx/bin/emqx_ctl status | grep -qE 'Node\s.*@.*\sis\sstarted'
do
echo "running ${packagename} start" if [ $IDLE_TIME -gt 10 ]
"${PACKAGE_PATH}"/emqx/bin/emqx start || ( tail "${PACKAGE_PATH}"/emqx/log/emqx.log.1 && exit 1 )
IDLE_TIME=0
while ! "${PACKAGE_PATH}"/emqx/bin/emqx_ctl status | grep -qE 'Node\s.*@.*\sis\sstarted'
do
if [ $IDLE_TIME -gt 10 ]
then
echo "emqx running error"
exit 1
fi
sleep 10
IDLE_TIME=$((IDLE_TIME+1))
done
pytest -v /paho-mqtt-testing/interoperability/test_client/V5/test_connect.py::test_basic
"${PACKAGE_PATH}"/emqx/bin/emqx stop
echo "running ${packagename} stop"
rm -rf "${PACKAGE_PATH}"/emqx
;;
"deb")
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.deb)
dpkg -i "${PACKAGE_PATH}/${packagename}"
if [ "$(dpkg -l |grep emqx |awk '{print $1}')" != "ii" ]
then then
echo "package install error" echo "emqx running error"
exit 1 exit 1
fi fi
sleep 10
IDLE_TIME=$((IDLE_TIME+1))
done
pytest -v /paho-mqtt-testing/interoperability/test_client/V5/test_connect.py::test_basic
"${PACKAGE_PATH}"/emqx/bin/emqx stop
echo "running ${packagename} stop"
rm -rf "${PACKAGE_PATH}"/emqx
;;
"deb")
dpkg -i "${PACKAGE_PATH}/${packagename}"
if [ "$(dpkg -l |grep emqx |awk '{print $1}')" != "ii" ]
then
echo "package install error"
exit 1
fi
echo "running ${packagename} start" echo "running ${packagename} start"
running_test running_test
echo "running ${packagename} stop" echo "running ${packagename} stop"
dpkg -r "${EMQX_NAME}" dpkg -r "${PROFILE}"
if [ "$(dpkg -l |grep emqx |awk '{print $1}')" != "rc" ] if [ "$(dpkg -l |grep emqx |awk '{print $1}')" != "rc" ]
then then
echo "package remove error" echo "package remove error"
exit 1 exit 1
fi fi
dpkg -P "${EMQX_NAME}" dpkg -P "${PROFILE}"
if dpkg -l |grep -q emqx if dpkg -l |grep -q emqx
then then
echo "package uninstall error" echo "package uninstall error"
exit 1 exit 1
fi fi
;; ;;
"rpm") "rpm")
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.rpm) yum install -y "${PACKAGE_PATH}/${packagename}"
if ! rpm -q "${PROFILE}" | grep -q "${PROFILE}"; then
echo "package install error"
exit 1
fi
rpm -ivh "${PACKAGE_PATH}/${packagename}" echo "running ${packagename} start"
if ! rpm -q emqx | grep -q emqx; then running_test
echo "package install error" echo "running ${packagename} stop"
exit 1
fi
echo "running ${packagename} start" rpm -e "${PROFILE}"
running_test if [ "$(rpm -q emqx)" != "package emqx is not installed" ];then
echo "running ${packagename} stop" echo "package uninstall error"
exit 1
rpm -e "${EMQX_NAME}" fi
if [ "$(rpm -q emqx)" != "package emqx is not installed" ];then ;;
echo "package uninstall error" esac
exit 1
fi
;;
esac
done
} }
running_test(){ running_test(){
@ -115,7 +140,11 @@ running_test(){
EMQX_MQTT__MAX_TOPIC_ALIAS=10 EMQX_MQTT__MAX_TOPIC_ALIAS=10
sed -i '/emqx_telemetry/d' /var/lib/emqx/loaded_plugins sed -i '/emqx_telemetry/d' /var/lib/emqx/loaded_plugins
emqx start || ( tail /var/log/emqx/emqx.log.1 && exit 1 ) if ! emqx start; then
cat /var/log/emqx/erlang.log.1 || true
cat /var/log/emqx/emqx.log.1 || true
exit 1
fi
IDLE_TIME=0 IDLE_TIME=0
while ! emqx_ctl status | grep -qE 'Node\s.*@.*\sis\sstarted' while ! emqx_ctl status | grep -qE 'Node\s.*@.*\sis\sstarted'
do do
@ -134,45 +163,43 @@ running_test(){
relup_test(){ relup_test(){
TARGET_VERSION="$("$CODE_PATH"/pkg-vsn.sh)" TARGET_VERSION="$("$CODE_PATH"/pkg-vsn.sh)"
if [ -d "${RELUP_PACKAGE_PATH}" ];then if [ ! -d "${RELUP_PACKAGE_PATH}" ];then
cd "${RELUP_PACKAGE_PATH}" return 0
fi
cd "${RELUP_PACKAGE_PATH}"
while read -r pkg; do
packagename=$(basename "${pkg}")
unzip -q "$packagename"
if ! ./emqx/bin/emqx start; then
cat emqx/log/erlang.log.1 || true
cat emqx/log/emqx.log.1 || true
exit 1
fi
./emqx/bin/emqx_ctl status
./emqx/bin/emqx versions
OldVsn="$(./emqx/bin/emqx eval 'Versions=[{S, V} || {_,V,_, S} <- release_handler:which_releases()],
Current = proplists:get_value(current, Versions, proplists:get_value(permanent, Versions)),
io:format("~s", [Current])')"
cp "${PACKAGE_PATH}/${PROFILE}-${TARGET_VERSION}"-*.zip ./emqx/releases/
./emqx/bin/emqx install "${TARGET_VERSION}"
[ "$(./emqx/bin/emqx versions |grep permanent | awk '{print $2}')" = "${TARGET_VERSION}" ] || exit 1
export EMQX_WAIT_FOR_STOP=300
./emqx/bin/emqx_ctl status
find . -maxdepth 1 -name "${EMQX_NAME}-*-${ARCH}.zip" | # also test remove old rel
while read -r pkg; do ./emqx/bin/emqx uninstall "$OldVsn"
if [[ "${pkg}" == *4.3.13* ]]; then
echo "skipping upgrade test from 4.3.13 because this release had crypto linked with openssl 1.1.1n, it was in later version rolled back (to default 1.1.1k)."
continue
fi
packagename=$(basename "${pkg}")
unzip -q "$packagename"
./emqx/bin/emqx start || ( tail emqx/log/emqx.log.1 && exit 1 )
./emqx/bin/emqx_ctl status
./emqx/bin/emqx versions
OldVsn="$(./emqx/bin/emqx eval 'Versions=[{S, V} || {_,V,_, S} <- release_handler:which_releases()],
Current = proplists:get_value(current, Versions, proplists:get_value(permanent, Versions)),
io:format("~s", [Current])')"
cp "${PACKAGE_PATH}/${EMQX_NAME}"-*-"${TARGET_VERSION}-${ARCH}".zip ./emqx/releases
./emqx/bin/emqx install "${TARGET_VERSION}"
[ "$(./emqx/bin/emqx versions |grep permanent | awk '{print $2}')" = "${TARGET_VERSION}" ] || exit 1
export EMQX_WAIT_FOR_STOP=300
./emqx/bin/emqx_ctl status
# also test remove old rel # check emqx still runs
./emqx/bin/emqx uninstall "$OldVsn" ./emqx/bin/emqx ping
# check emqx still runs if ! ./emqx/bin/emqx stop; then
./emqx/bin/emqx ping cat emqx/log/erlang.log.1 || true
cat emqx/log/emqx.log.1 || true
if ! ./emqx/bin/emqx stop; then echo "failed to stop emqx"
cat emqx/log/erlang.log.1 || true exit 1
cat emqx/log/emqx.log.1 || true fi
echo "failed to stop emqx" rm -rf emqx
exit 1 done < <(find . -maxdepth 1 -name "${OLD_PACKAGE_PATTERN}.zip")
fi
rm -rf emqx
done
fi
} }
emqx_prepare emqx_prepare

View File

@ -3,7 +3,7 @@ version: '3.9'
services: services:
erlang: erlang:
container_name: erlang container_name: erlang
image: emqx/build-env:erl23.3.4.9-3-ubuntu20.04 image: ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-ubuntu20.04
env_file: env_file:
- conf.env - conf.env
environment: environment:

View File

@ -15,6 +15,8 @@ PROFILE="$1"
VSN="$2" VSN="$2"
OLD_VSN="$3" OLD_VSN="$3"
PACKAGE_PATH="$4" PACKAGE_PATH="$4"
FROM_OTP_VSN="${5:-24.1.5-3}"
TO_OTP_VSN="${6:-24.1.5-3}"
TEMPDIR=$(mktemp -d) TEMPDIR=$(mktemp -d)
trap '{ rm -rf -- "$TEMPDIR"; }' EXIT trap '{ rm -rf -- "$TEMPDIR"; }' EXIT
@ -37,4 +39,6 @@ exec docker run \
--var ONE_MORE_EMQX_PATH="/relup_test/one_more_emqx" \ --var ONE_MORE_EMQX_PATH="/relup_test/one_more_emqx" \
--var VSN="$VSN" \ --var VSN="$VSN" \
--var OLD_VSN="$OLD_VSN" \ --var OLD_VSN="$OLD_VSN" \
--var FROM_OTP_VSN="$FROM_OTP_VSN" \
--var TO_OTP_VSN="$TO_OTP_VSN" \
relup.lux relup.lux

View File

@ -3,6 +3,8 @@
[config var=ONE_MORE_EMQX_PATH] [config var=ONE_MORE_EMQX_PATH]
[config var=VSN] [config var=VSN]
[config var=OLD_VSN] [config var=OLD_VSN]
[config var=FROM_OTP_VSN]
[config var=TO_OTP_VSN]
[config shell_cmd=/bin/bash] [config shell_cmd=/bin/bash]
[config timeout=600000] [config timeout=600000]
@ -20,7 +22,7 @@
[shell emqx] [shell emqx]
!OLD_VSN=$(echo $OLD_VSN | sed -r 's/[v|e]//g') !OLD_VSN=$(echo $OLD_VSN | sed -r 's/[v|e]//g')
!cd $PACKAGE_PATH !cd $PACKAGE_PATH
!unzip -q -o $PROFILE-ubuntu20.04-$${OLD_VSN}-amd64.zip !unzip -q -o $PROFILE-$(echo $OLD_VSN | sed -r 's/[v|e]//g')-otp${FROM_OTP_VSN}-ubuntu20.04-amd64.zip
?SH-PROMPT ?SH-PROMPT
!cd emqx !cd emqx
@ -76,13 +78,14 @@
[shell bench] [shell bench]
!emqtt_bench pub -c 10 -I 1000 -t t/%i -s 64 -L 300 !emqtt_bench pub -c 10 -I 1000 -t t/%i -s 64 -L 300
???sent # e.g. Start with 20 workers, addrs pool size: 1 and req interval: 200 ms
?^Start
[shell emqx] [shell emqx]
!echo "" > log/emqx.log.1 !echo "" > log/emqx.log.1
?SH-PROMPT ?SH-PROMPT
!cp -f ../$PROFILE-ubuntu20.04-$VSN-amd64.zip releases/ !cp -f ../$PROFILE-$VSN-otp${TO_OTP_VSN}-ubuntu20.04-amd64.zip releases/
## upgrade to the new version ## upgrade to the new version
!./bin/emqx install $VSN !./bin/emqx install $VSN
@ -128,7 +131,7 @@
!echo "" > log/emqx.log.1 !echo "" > log/emqx.log.1
?SH-PROMPT ?SH-PROMPT
!cp -f ../$PROFILE-ubuntu20.04-$VSN-amd64.zip releases/ !cp -f ../$PROFILE-$VSN-otp${TO_OTP_VSN}-ubuntu20.04-amd64.zip releases/
## upgrade to the new version ## upgrade to the new version
!./bin/emqx install $VSN !./bin/emqx install $VSN
@ -171,8 +174,8 @@
?SH-PROMPT ?SH-PROMPT
[shell bench] [shell bench]
???publish complete ?publish complete
??SH-PROMPT: ?SH-PROMPT
!sleep 30 !sleep 30
?SH-PROMPT ?SH-PROMPT

1
.gitattributes vendored
View File

@ -1,3 +1,4 @@
build text eol=lf
* text=auto * text=auto
*.* text eol=lf *.* text eol=lf
*.jpg -text *.jpg -text

View File

@ -9,11 +9,11 @@ jobs:
strategy: strategy:
matrix: matrix:
erl_otp: erl_otp:
- erl23.3.4.9-3 - 24.1.5-3
os: os:
- ubuntu20.04 - ubuntu20.04
container: emqx/build-env:${{ matrix.erl_otp }}-${{ matrix.os }} container: ghcr.io/emqx/emqx-builder/4.4-19:${{ matrix.erl_otp }}-${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -21,6 +21,10 @@ jobs:
fetch-depth: 0 # need full history fetch-depth: 0 # need full history
- name: fix-git-unsafe-repository - name: fix-git-unsafe-repository
run: git config --global --add safe.directory /__w/emqx/emqx run: git config --global --add safe.directory /__w/emqx/emqx
- name: Check relup version DB
run: |
PKG_VSN=$(./pkg-vsn.sh)
./scripts/relup-base-vsns.escript check-vsn-db $PKG_VSN ./data/relup-paths.eterm
- name: Check relup (ce) - name: Check relup (ce)
if: endsWith(github.repository, 'emqx') if: endsWith(github.repository, 'emqx')
run: ./scripts/update-appup.sh emqx --check run: ./scripts/update-appup.sh emqx --check

View File

@ -18,8 +18,8 @@ on:
jobs: jobs:
prepare: prepare:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.3.4.9-3-ubuntu20.04 # prepare source with any OTP version, no need for a matrix
container: ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-ubuntu20.04
outputs: outputs:
profiles: ${{ steps.set_profile.outputs.profiles}} profiles: ${{ steps.set_profile.outputs.profiles}}
@ -31,8 +31,9 @@ jobs:
- name: set profile - name: set profile
id: set_profile id: set_profile
shell: bash shell: bash
working-directory: source
run: | run: |
cd source git config --global --add safe.directory "$GITHUB_WORKSPACE"
if make emqx-ee --dry-run > /dev/null 2>&1; then if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "::set-output name=profiles::[\"emqx-ee\"]" echo "::set-output name=profiles::[\"emqx-ee\"]"
else else
@ -43,7 +44,7 @@ jobs:
run: | run: |
make -C source deps-all make -C source deps-all
zip -ryq source.zip source/* source/.[^.]* zip -ryq source.zip source/* source/.[^.]*
- name: get_all_deps - name: get_all_deps_ee
if: endsWith(github.repository, 'enterprise') if: endsWith(github.repository, 'enterprise')
run: | run: |
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
@ -60,13 +61,13 @@ jobs:
needs: prepare needs: prepare
if: endsWith(github.repository, 'emqx') if: endsWith(github.repository, 'emqx')
strategy: strategy:
fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
otp: otp:
- 23.3.4.13 - 24.2.1
exclude: exclude:
- profile: emqx-edge - profile: emqx-edge
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
@ -82,56 +83,42 @@ jobs:
env: env:
PYTHON: python PYTHON: python
DIAGNOSTIC: 1 DIAGNOSTIC: 1
PROFILE: emqx
working-directory: source
run: | run: |
erl -eval "erlang:display(crypto:info_lib())" -s init stop erl -eval "erlang:display(crypto:info_lib())" -s init stop
$version = $( "${{ github.ref }}" -replace "^(.*)/(.*)/" ) make ${{ matrix.profile }}-zip
if ($version -match "^v[0-9]+\.[0-9]+(\.[0-9]+)?") {
$regex = "[0-9]+\.[0-9]+(-alpha|-beta|-rc)?\.[0-9]+"
$pkg_name = "${{ matrix.profile }}-windows-$([regex]::matches($version, $regex).value).zip"
}
else {
$pkg_name = "${{ matrix.profile }}-windows-$($version -replace '/').zip"
}
cd source
## We do not build/release bcrypt for windows package
Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/
if (Test-Path rebar.lock) {
Remove-Item -Force -Path rebar.lock
}
make ensure-rebar3
make ${{ matrix.profile }}
mkdir -p _packages/${{ matrix.profile }}
Compress-Archive -Path _build/${{ matrix.profile }}/rel/emqx -DestinationPath _build/${{ matrix.profile }}/rel/$pkg_name
mv _build/${{ matrix.profile }}/rel/$pkg_name _packages/${{ matrix.profile }}
sha256sum "_packages/${{ matrix.profile }}/$pkg_name" | head -c 64 > "_packages/${{ matrix.profile }}/${pkg_name}.sha256"
- name: run emqx - name: run emqx
timeout-minutes: 1 timeout-minutes: 1
working-directory: source
run: | run: |
cd source
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start
Start-Sleep -s 5 Start-Sleep -s 5
echo "EMQX started"
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop
echo "EMQX stopped"
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx install ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx install
echo "EQMX installed"
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx uninstall ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx uninstall
echo "EQMX uninstaled"
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-windows
path: source/_packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
mac: mac:
needs: prepare needs: prepare
strategy: strategy:
fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
erl_otp: otp:
- 23.3.4.9-3 - 24.1.5-3
macos:
- macos-11
exclude: exclude:
- profile: emqx-edge - profile: emqx-edge
macos: runs-on: ${{ matrix.macos }}
- macos-10.15
runs-on: ${{ matrix.macos }}
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
@ -148,8 +135,8 @@ jobs:
- uses: actions/cache@v2 - uses: actions/cache@v2
id: cache id: cache
with: with:
path: ~/.kerl/${{ matrix.erl_otp }} path: ~/.kerl/${{ matrix.otp }}
key: otp-install-${{ matrix.erl_otp }}-${{ matrix.macos }} key: otp-install-${{ matrix.otp }}-${{ matrix.macos }}
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60 timeout-minutes: 60
@ -158,21 +145,22 @@ jobs:
OTP_GITHUB_URL: https://github.com/emqx/otp OTP_GITHUB_URL: https://github.com/emqx/otp
run: | run: |
kerl update releases kerl update releases
kerl build ${{ matrix.erl_otp }} kerl build ${{ matrix.otp }}
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }} kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build - name: build
working-directory: source
run: | run: |
. $HOME/.kerl/${{ matrix.erl_otp }}/activate . $HOME/.kerl/${{ matrix.otp }}/activate
cd source
make ensure-rebar3 make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3 sudo cp rebar3 /usr/local/bin/rebar3
rm -rf _build/${{ matrix.profile }}/lib rm -rf _build/${{ matrix.profile }}/lib
make ${{ matrix.profile }}-zip make ${{ matrix.profile }}-zip
- name: test - name: test
working-directory: source
run: | run: |
cd source set -x
pkg_name=$(basename _packages/${{ matrix.profile }}/${{ matrix.profile }}-*.zip) pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip)
unzip -q _packages/${{ matrix.profile }}/$pkg_name unzip -q $pkg_name
gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no' ready='no'
@ -196,11 +184,9 @@ jobs:
exit 1 exit 1
fi fi
rm -rf emqx rm -rf emqx
#sha256sum ./_packages/${{ matrix.profile }}/$pkg_name | head -c64 > ./_packages/${{ matrix.profile }}/$pkg_name.sha256
openssl dgst -sha256 ./_packages/${{ matrix.profile }}/$pkg_name | awk '{print $2}' > ./_packages/${{ matrix.profile }}/$pkg_name.sha256
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: source/_packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
linux: linux:
@ -212,6 +198,11 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
package:
- zip
- pkg
otp:
- 24.1.5-3
arch: arch:
- amd64 - amd64
- arm64 - arm64
@ -219,20 +210,13 @@ jobs:
- ubuntu20.04 - ubuntu20.04
- ubuntu18.04 - ubuntu18.04
- ubuntu16.04 - ubuntu16.04
- debian11
- debian10 - debian10
- debian9 - debian9
# - opensuse - el8
- centos8 - el7
- centos7
- centos6
# - raspbian10 #armv6l is too slow to emulate # - raspbian10 #armv6l is too slow to emulate
# - raspbian9
otp_version:
#- 23.2.7.2-emqx-3
- 23.3.4.9-3
exclude: exclude:
- os: centos6
arch: arm64
- os: raspbian9 - os: raspbian9
arch: amd64 arch: amd64
- os: raspbian10 - os: raspbian10
@ -251,15 +235,11 @@ jobs:
shell: bash shell: bash
steps: steps:
- name: prepare docker - uses: docker/setup-buildx-action@v1
run: | - uses: docker/setup-qemu-action@v1
mkdir -p $HOME/.docker with:
echo '{ "experimental": "enabled" }' | tee $HOME/.docker/config.json image: tonistiigi/binfmt:latest
echo '{ "experimental": true, "storage-driver": "overlay2", "max-concurrent-downloads": 50, "max-concurrent-uploads": 50}' | sudo tee /etc/docker/daemon.json platforms: all
sudo systemctl restart docker
docker info
docker buildx create --use --name mybuild
docker run --rm --privileged tonistiigi/binfmt --install all
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source name: source
@ -268,63 +248,33 @@ jobs:
run: unzip -q source.zip run: unzip -q source.zip
- name: build emqx packages - name: build emqx packages
env: env:
ERL_OTP: erl${{ matrix.otp_version }} OTP: ${{ matrix.otp }}
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
PACKAGE: ${{ matrix.package}}
ARCH: ${{ matrix.arch }} ARCH: ${{ matrix.arch }}
SYSTEM: ${{ matrix.os }} SYSTEM: ${{ matrix.os }}
working-directory: source
run: | run: |
set -e -u ./scripts/buildx.sh \
cd source --profile "${PROFILE}" \
docker buildx build --no-cache \ --pkgtype "${PACKAGE}" \
--platform=linux/$ARCH \ --arch "${ARCH}" \
-t cross_build_emqx_for_$SYSTEM \ --builder "ghcr.io/emqx/emqx-builder/4.4-19:${OTP}-${SYSTEM}"
-f .ci/build_packages/Dockerfile \
--build-arg BUILD_FROM=emqx/build-env:$ERL_OTP-$SYSTEM \
--build-arg EMQX_NAME=$PROFILE \
--output type=tar,dest=/tmp/cross-build-$PROFILE-for-$SYSTEM.tar .
mkdir -p /tmp/packages/$PROFILE
tar -xvf /tmp/cross-build-$PROFILE-for-$SYSTEM.tar --wildcards emqx/_packages/$PROFILE/*
mv emqx/_packages/$PROFILE/* /tmp/packages/$PROFILE/
rm -rf /tmp/cross-build-$PROFILE-for-$SYSTEM.tar
docker rm -f $(docker ps -a -q)
docker volume prune -f
- name: create sha256
env:
PROFILE: ${{ matrix.profile }}
ERL_OTP: erl${{ matrix.otp_version }}
ARCH: ${{ matrix.arch }}
run: |
if [ -d /tmp/packages/$PROFILE ]; then
cd /tmp/packages/$PROFILE
for var in $(ls emqx-* ); do
if [[ $ERL_OTP == erl23.2* ]]; then
# Keep package with new OTP as default
# But move package with old otp to track 2
echo "rename track 2 package"
oldfile="$var"
var="${var/${ARCH}/otp23.2-${ARCH}}"
mv "$oldfile" "$var"
fi
bash -c "echo $(sha256sum $var | awk '{print $1}') > $var.sha256"
done
cd -
fi
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: /tmp/packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
docker: docker:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: prepare needs: prepare
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
otp:
- 24.1.5-3
registry: registry:
- 'docker.io' - 'docker.io'
- 'public.ecr.aws' - 'public.ecr.aws'
@ -367,7 +317,7 @@ jobs:
images: ${{ matrix.registry }}/${{ github.repository_owner }}/${{ matrix.profile }} images: ${{ matrix.registry }}/${{ github.repository_owner }}/${{ matrix.profile }}
## only 5.0 is latest ## only 5.0 is latest
flavor: | flavor: |
latest=false latest=false # latest is now 5.0
tags: | tags: |
type=ref,event=branch type=ref,event=branch
type=ref,event=pr type=ref,event=pr
@ -385,8 +335,8 @@ jobs:
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
build-args: | build-args: |
BUILD_FROM=emqx/build-env:erl23.3.4.9-3-alpine BUILD_FROM=ghcr.io/emqx/emqx-builder/4.4-19:${{ matrix.otp }}-alpine3.15.1
RUN_FROM=alpine:3.12 RUN_FROM=alpine:3.15.1
EMQX_NAME=${{ matrix.profile }} EMQX_NAME=${{ matrix.profile }}
file: source/deploy/docker/Dockerfile file: source/deploy/docker/Dockerfile
context: source context: source
@ -401,8 +351,8 @@ jobs:
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
build-args: | build-args: |
BUILD_FROM=emqx/build-env:erl23.3.4.9-3-alpine BUILD_FROM=ghcr.io/emqx/emqx-builder/4.4-19:${{ matrix.otp }}-alpine3.15.1
RUN_FROM=alpine:3.12 RUN_FROM=alpine:3.15.1
EMQX_NAME=${{ matrix.profile }} EMQX_NAME=${{ matrix.profile }}
file: source/deploy/docker/Dockerfile.enterprise file: source/deploy/docker/Dockerfile.enterprise
context: source context: source
@ -418,10 +368,16 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
otp:
- 24.1.5-3
include:
- profile: emqx
otp: windows # otp version on windows is rather fixed
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: packages/${{ matrix.profile }} path: packages/${{ matrix.profile }}
- name: install dos2unix - name: install dos2unix
run: sudo apt-get update && sudo apt install -y dos2unix run: sudo apt-get update && sudo apt install -y dos2unix

View File

@ -1,5 +1,10 @@
name: Build slim packages name: Build slim packages
concurrency:
group: slim-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
on: on:
pull_request: pull_request:
workflow_dispatch: workflow_dispatch:
@ -9,19 +14,21 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy: strategy:
fail-fast: false
matrix: matrix:
erl_otp: erl_otp:
- erl23.3.4.9-3 - 24.1.5-3
os: os:
- ubuntu20.04 - ubuntu20.04
- centos7 - el8
container: emqx/build-env:${{ matrix.erl_otp }}-${{ matrix.os }} container: ghcr.io/emqx/emqx-builder/4.4-19:${{ matrix.erl_otp }}-${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- name: prepare - name: prepare
run: | run: |
git config --global --add safe.directory "$GITHUB_WORKSPACE"
if make emqx-ee --dry-run > /dev/null 2>&1; then if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store git config --global credential.helper store
@ -42,23 +49,63 @@ jobs:
with: with:
name: rebar3.crashdump name: rebar3.crashdump
path: ./rebar3.crashdump path: ./rebar3.crashdump
- name: pakcages test - name: packages test
run: | run: |
export CODE_PATH=$GITHUB_WORKSPACE export CODE_PATH="$GITHUB_WORKSPACE"
.ci/build_packages/tests.sh .ci/build_packages/tests.sh "${EMQX_NAME}" zip
.ci/build_packages/tests.sh "${EMQX_NAME}" pkg
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: ${{ matrix.os }} name: ${{ matrix.os }}
path: _packages/**/*.zip path: _packages/**/*.zip
windows:
runs-on: windows-2019
if: endsWith(github.repository, 'emqx')
strategy:
fail-fast: false
matrix:
profile:
- emqx
otp:
- 24.2.1
steps:
- uses: actions/checkout@v2
- uses: ilammy/msvc-dev-cmd@v1
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ matrix.otp }}
- name: build
env:
PYTHON: python
DIAGNOSTIC: 1
run: |
erl -eval "erlang:display(crypto:info_lib())" -s init stop
make ${{ matrix.profile }}-zip
- name: run emqx
timeout-minutes: 1
run: |
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start
Start-Sleep -s 5
echo "EMQX started"
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop
echo "EMQX stopped"
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx install
echo "EQMX installed"
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx uninstall
echo "EQMX uninstaled"
mac: mac:
strategy: strategy:
fail-fast: false
matrix: matrix:
erl_otp: otp:
- 23.3.4.9-3 - 24.1.5-3
macos: macos:
- macos-11 - macos-11
runs-on: ${{ matrix.macos }}
runs-on: ${{ matrix.macos }}
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- name: prepare - name: prepare
@ -79,8 +126,8 @@ jobs:
- uses: actions/cache@v2 - uses: actions/cache@v2
id: cache id: cache
with: with:
path: ~/.kerl/${{ matrix.erl_otp }} path: ~/.kerl/${{ matrix.otp }}
key: otp-install-${{ matrix.erl_otp }}-${{ matrix.macos }} key: otp-install-${{ matrix.otp }}-${{ matrix.macos }}
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60 timeout-minutes: 60
@ -89,11 +136,11 @@ jobs:
OTP_GITHUB_URL: https://github.com/emqx/otp OTP_GITHUB_URL: https://github.com/emqx/otp
run: | run: |
kerl update releases kerl update releases
kerl build ${{ matrix.erl_otp }} kerl build ${{ matrix.otp }}
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }} kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build - name: build
run: | run: |
. $HOME/.kerl/${{ matrix.erl_otp }}/activate . $HOME/.kerl/${{ matrix.otp }}/activate
make ensure-rebar3 make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3 sudo cp rebar3 /usr/local/bin/rebar3
make ${EMQX_NAME}-zip make ${EMQX_NAME}-zip
@ -104,8 +151,8 @@ jobs:
path: ./rebar3.crashdump path: ./rebar3.crashdump
- name: test - name: test
run: | run: |
pkg_name=$(basename _packages/${EMQX_NAME}/emqx-*.zip) pkg_name=$(find _packages/${EMQX_NAME} -mindepth 1 -maxdepth 1 -iname \*.zip)
unzip -q _packages/${EMQX_NAME}/$pkg_name unzip -q $pkg_name
gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no' ready='no'

View File

@ -5,7 +5,7 @@ on: [pull_request]
jobs: jobs:
check_deps_integrity: check_deps_integrity:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.3.4.9-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-ubuntu20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -7,7 +7,7 @@ on:
jobs: jobs:
prepare: prepare:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.3.4.9-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-ubuntu20.04
outputs: outputs:
profiles: ${{ steps.set_profile.outputs.profiles}} profiles: ${{ steps.set_profile.outputs.profiles}}

View File

@ -5,7 +5,7 @@ on: workflow_dispatch
jobs: jobs:
test: test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.3.4.9-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-ubuntu20.04
strategy: strategy:
fail-fast: true fail-fast: true
env: env:

View File

@ -12,8 +12,8 @@ jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
imgname: ${{ steps.build_docker.outputs.imgname}} imgname: ${{ steps.prepare.outputs.imgname}}
version: ${{ steps.build_docker.outputs.version}} version: ${{ steps.prepare.outputs.version}}
steps: steps:
- name: download jmeter - name: download jmeter
id: dload_jmeter id: dload_jmeter
@ -29,29 +29,30 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: erlef/setup-beam@v1 - uses: erlef/setup-beam@v1
with: with:
otp-version: "23.3.4.13" otp-version: "24.1.5"
- name: build docker - name: prepare
id: build_docker id: prepare
run: | run: |
if [ -f EMQX_ENTERPRISE ]; then if [ -f EMQX_ENTERPRISE ]; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store git config --global credential.helper store
make deps-emqx-ee make deps-emqx-ee
make clean make clean
make emqx-ee-docker
echo "::set-output name=imgname::emqx-ee" echo "::set-output name=imgname::emqx-ee"
echo "::set-output name=version::$(./pkg-vsn.sh)" echo "::set-output name=version::$(./pkg-vsn.sh)"
docker save emqx/emqx-ee:$(./pkg-vsn.sh) -o emqx.tar
else else
make emqx-docker make emqx-docker
echo "::set-output name=imgname::emqx" echo "::set-output name=imgname::emqx"
echo "::set-output name=version::$(./pkg-vsn.sh)" echo "::set-output name=version::$(./pkg-vsn.sh)"
docker save emqx/emqx:$(./pkg-vsn.sh) -o emqx.tar
fi fi
- name: build docker image
run: |
make ${{ steps.prepare.outputs.imgname }}-docker
docker save emqx/${{ steps.prepare.outputs.imgname }}:${{ steps.prepare.outputs.version }} -o image.tar.gz
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: emqx-docker-image name: image
path: emqx.tar path: image.tar.gz
webhook: webhook:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -67,10 +68,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
run: docker load < /tmp/emqx.tar run: |
docker load < /tmp/image.tar.gz
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -165,10 +167,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
run: docker load < /tmp/emqx.tar run: |
docker load < /tmp/image.tar.gz
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -270,10 +273,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
run: docker load < /tmp/emqx.tar run: |
docker load < /tmp/image.tar.gz
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -364,10 +368,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
run: docker load < /tmp/emqx.tar run: |
docker load < /tmp/image.tar.gz
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:

View File

@ -15,21 +15,26 @@ jobs:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- uses: erlef/setup-beam@v1 - uses: erlef/setup-beam@v1
with: with:
otp-version: "23.3.4.9" otp-version: "24.1.5"
- name: make docker - name: prepare
run: | run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store git config --global credential.helper store
make deps-emqx-ee make deps-emqx-ee
make clean
echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
make emqx-ee-docker make emqx-ee-docker
else else
echo "TARGET=emqx/emqx" >> $GITHUB_ENV echo "TARGET=emqx/emqx" >> $GITHUB_ENV
echo "PROFILE=emqx" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
make emqx-docker make emqx-docker
fi fi
- name: make emqx image
run: make ${PROFILE}-docker
- name: run emqx - name: run emqx
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -62,24 +67,35 @@ jobs:
helm_test: helm_test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
discovery:
- k8s
- dns
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- uses: erlef/setup-beam@v1 - uses: erlef/setup-beam@v1
with: with:
otp-version: "23.3.4.9" otp-version: "24.1.5"
- name: prepare - name: prepare
run: | run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store git config --global credential.helper store
make deps-emqx-ee make deps-emqx-ee
make clean
echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV
make emqx-ee-docker echo "PROFILE=emqx-ee" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
else else
echo "TARGET=emqx/emqx" >> $GITHUB_ENV echo "TARGET=emqx/emqx" >> $GITHUB_ENV
make emqx-docker echo "PROFILE=emqx" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
fi fi
- name: make emqx image
run: make ${PROFILE}-docker
- name: install k3s - name: install k3s
env: env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml" KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
@ -96,18 +112,18 @@ jobs:
sudo chmod 700 get_helm.sh sudo chmod 700 get_helm.sh
sudo ./get_helm.sh sudo ./get_helm.sh
helm version helm version
- name: run emqx on chart - name: setup emqx chart
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
timeout-minutes: 5
run: | run: |
version=$(./pkg-vsn.sh) sudo docker save ${TARGET}:${EMQX_TAG} -o emqx.tar.gz
sudo docker save ${TARGET}:$version -o emqx.tar.gz
sudo k3s ctr image import emqx.tar.gz sudo k3s ctr image import emqx.tar.gz
sed -i -r "s/^appVersion: .*$/appVersion: \"${version}\"/g" deploy/charts/emqx/Chart.yaml sed -i -r "s/^appVersion: .*$/appVersion: \"${EMQX_TAG}\"/g" deploy/charts/emqx/Chart.yaml
sed -i '/emqx_telemetry/d' deploy/charts/emqx/values.yaml sed -i '/emqx_telemetry/d' deploy/charts/emqx/values.yaml
- name: run emqx on chart
if: matrix.discovery == 'k8s'
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
run: |
helm install emqx \ helm install emqx \
--set image.repository=${TARGET} \ --set image.repository=${TARGET} \
--set image.pullPolicy=Never \ --set image.pullPolicy=Never \
@ -117,7 +133,29 @@ jobs:
--set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \ --set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \
deploy/charts/emqx \ deploy/charts/emqx \
--debug --debug
- name: run emqx on chart
if: matrix.discovery == 'dns'
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
run: |
helm install emqx \
--set emqxConfig.EMQX_CLUSTER__DISCOVERY="dns" \
--set emqxConfig.EMQX_CLUSTER__DNS__NAME="emqx-headless.default.svc.cluster.local" \
--set emqxConfig.EMQX_CLUSTER__DNS__APP="emqx" \
--set emqxConfig.EMQX_CLUSTER__DNS__TYPE="srv" \
--set image.repository=${TARGET} \
--set image.pullPolicy=Never \
--set emqxAclConfig="" \
--set image.pullPolicy=Never \
--set emqxConfig.EMQX_ZONE__EXTERNAL__RETRY_INTERVAL=2s \
--set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \
deploy/charts/emqx \
--debug
- name: waiting emqx started
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
timeout-minutes: 5
run: |
while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \ while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \
!= "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do != "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do
echo "=============================="; echo "==============================";
@ -126,6 +164,18 @@ jobs:
echo "waiting emqx started"; echo "waiting emqx started";
sleep 10; sleep 10;
done done
- name: Check ${{ matrix.kind[0]}} cluster
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
timeout-minutes: 10
run: |
while
nodes="$(kubectl exec -i emqx-0 -- curl --silent --basic -u admin:public -X GET http://localhost:8081/api/v4/brokers | jq '.data|length')";
[ "$nodes" != "3" ];
do
echo "waiting emqx cluster scale"
sleep 1
done
- name: get emqx-0 pods log - name: get emqx-0 pods log
if: failure() if: failure()
env: env:
@ -176,7 +226,7 @@ jobs:
relup_test_plan: relup_test_plan:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.3.4.9-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-ubuntu20.04
outputs: outputs:
profile: ${{ steps.profile-and-versions.outputs.profile }} profile: ${{ steps.profile-and-versions.outputs.profile }}
vsn: ${{ steps.profile-and-versions.outputs.vsn }} vsn: ${{ steps.profile-and-versions.outputs.vsn }}
@ -221,8 +271,13 @@ jobs:
relup_test_build: relup_test_build:
needs: relup_test_plan needs: relup_test_plan
strategy:
fail-fast: false
matrix:
otp:
- 24.1.5-3
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.3.4.9-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-ubuntu20.04
defaults: defaults:
run: run:
shell: bash shell: bash
@ -257,11 +312,13 @@ jobs:
- relup_test_plan - relup_test_plan
- relup_test_build - relup_test_build
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/relup-test-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-ubuntu20.04
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
old_vsn: ${{ fromJson(needs.relup_test_plan.outputs.matrix) }} old_vsn: ${{ fromJson(needs.relup_test_plan.outputs.matrix) }}
otp:
- 24.1.5-3
env: env:
OLD_VSN: "${{ matrix.old_vsn }}" OLD_VSN: "${{ matrix.old_vsn }}"
PROFILE: "${{ needs.relup_test_plan.outputs.profile }}" PROFILE: "${{ needs.relup_test_plan.outputs.profile }}"
@ -288,7 +345,7 @@ jobs:
mkdir -p packages mkdir -p packages
cp emqx_built/_packages/*/*.zip packages cp emqx_built/_packages/*/*.zip packages
cd packages cd packages
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$OLD_VSN/$PROFILE-ubuntu20.04-${OLD_VSN#[e|v]}-amd64.zip wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$OLD_VSN/$PROFILE-${OLD_VSN#[e|v]}-otp${{ matrix.otp }}-ubuntu20.04-amd64.zip
- name: Run relup test scenario - name: Run relup test scenario
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -300,6 +357,8 @@ jobs:
--var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \ --var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \
--var VSN="$VSN" \ --var VSN="$VSN" \
--var OLD_VSN="$OLD_VSN" \ --var OLD_VSN="$OLD_VSN" \
--var FROM_OTP_VSN="24.1.5-3" \
--var TO_OTP_VSN="24.1.5-3" \
emqx_built/.ci/fvt_tests/relup.lux emqx_built/.ci/fvt_tests/relup.lux
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
name: Save debug data name: Save debug data

View File

@ -10,7 +10,7 @@ on:
jobs: jobs:
run_proper_test: run_proper_test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.3.4.9-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-ubuntu20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -1 +1 @@
erlang 23.3.4.9-3 erlang 24.1.5-3

View File

@ -176,6 +176,7 @@ File format:
to force an immediate reload of all certificates after the files are updated on disk. to force an immediate reload of all certificates after the files are updated on disk.
* Refactor the ExProto so that anonymous clients can also be displayed on the dashboard [#6983] * Refactor the ExProto so that anonymous clients can also be displayed on the dashboard [#6983]
* Force shutdown of processes that cannot answer takeover event [#7026] * Force shutdown of processes that cannot answer takeover event [#7026]
* Support set keepalive via queryString & Body HTTP API.
* `topic` parameter in bridge configuration can have `${node}` substitution (just like in `clientid` parameter) * `topic` parameter in bridge configuration can have `${node}` substitution (just like in `clientid` parameter)
* Add UTF-8 string validity check in `strict_mode` for MQTT packet. * Add UTF-8 string validity check in `strict_mode` for MQTT packet.
When set to true, invalid UTF-8 strings will cause the client to be disconnected. i.e. client ID, topic name. [#7261] When set to true, invalid UTF-8 strings will cause the client to be disconnected. i.e. client ID, topic name. [#7261]

221
CHANGES-4.4.md Normal file
View File

@ -0,0 +1,221 @@
# EMQX 4.4 Changes
## v4.4.8
### Enhancements (synced from v4.3.19)
* Support HTTP API `/trace/:name/detail`.
### Bug fixes
- Fix: Check if emqx_mod_trace is enabled when the trace file is not found.
## v4.4.5
### Enhancements (synced from v4.3.16)
* HTTP API `mqtt/publish` support to publish with properties and user_properties.
### Bug fixes
- Clean trace zip files when file has been downloaded.
- Remove unnecessary error messages generated by JWT auth.
## v4.4.4
### Enhancements (synced from v4.3.15)
* Refactored `bin/emqx` help messages.
* Upgrade script refuses upgrade from incompatible versions. (e.g. hot upgrade from 4.3 to 4.4 will fail fast).
* Made possible for EMQX to boot from a Linux directory which has white spaces in its path.
* Add support for JWT authorization [#7596]
Now MQTT clients may be authorized with respect to a specific claim containing publish/subscribe topic whitelists.
* Better randomisation of app screts (changed from timestamp seeded sha hash (uuid) to crypto:strong_rand_bytes)
* Return a client_identifier_not_valid error when username is empty and username_as_clientid is set to true [#7862]
* Add more rule engine date functions: format_date/3, format_date/4, date_to_unix_ts/4 [#7894]
* Add proto_name and proto_ver fields for $event/client_disconnected event.
* Mnesia auth/acl http api support multiple condition queries.
* Inflight QoS1 Messages for shared topics are now redispatched to another alive subscribers upon chosen subscriber session termination.
* Make auth metrics name more understandable.
* Allow emqx_management http listener binding to specific interface [#8005]
* Add rule-engine function float2str/2, user can specify the float output precision [#7991]
### Bug fixes
- Allow uploading or referencing a backup file outside the
`data/backup` directory when restoring a backup. [#7996]
### Bug fixes (synced from v4.3.15)
* List subscription topic (/api/v4/subscriptions), the result do not match with multiple conditions.
* SSL closed error bug fixed for redis client.
* Fix mqtt-sn client disconnected due to re-send a duplicated qos2 message
* Rule-engine function hexstr2bin/1 support half byte [#7977]
* Shared message delivery when all alive shared subs have full inflight [#7984]
* Improved resilience against autocluster partitioning during cluster
startup. [#7876]
[ekka-158](https://github.com/emqx/ekka/pull/158)
* Add regular expression check ^[0-9A-Za-z_\-]+$ for node name [#7979]
* Fix `node_dump` variable sourcing. [#8026]
* Fix heap size is growing too fast when trace large message.
* Support customized timestamp format of the log messages.
## v4.4.3
** NOTE**: v4.4.3 is in sync with v4.3.14
### Enhancements
* Add rule events: client.connack, client.check_acl_complete
- client.connack The rule event is triggered when the server sends a CONNACK packet to the client. reason_code contains the error reason code.
- client.check_acl_complete The rule event is triggered when the client check acl complete.
### Enhancements (synced from v4.3.14)
* Add `RequestMeta` for exhook.proto in order to expose `cluster_name` of emqx in each gRPC request. [#7524]
* Support customize emqx_exhook execution priority. [#7408]
* add api: PUT /rules/{id}/reset_metrics.
This api reset the metrics of the rule engine of a rule, and reset the metrics of the action related to this rule. [#7474]
* Enhanced rule engine error handling when json parsing error.
* Add support for `RSA-PSK-AES256-GCM-SHA384`, `RSA-PSK-AES256-CBC-SHA384`,
`RSA-PSK-AES128-GCM-SHA256`, `RSA-PSK-AES128-CBC-SHA256` PSK ciphers, and remove `PSK-3DES-EDE-CBC-SHA`,
`PSK-RC4-SHA` from the default configuration. [#7427]
* Diagnostic logging for mnesia `wait_for_table`
- prints check points of mnesia internal stats
- prints check points of per table loading stats
Help to locate the problem of long table loading time.
* Add `local` strategy for Shared Subscription.
That will preferentially dispatch messages to a shared subscriber at the same
node. It will improves the efficiency of shared messages dispatching in certain
scenarios, especially when the emqx-bridge-mqtt plugin is configured as shared
subscription. [#7462]
* Add some compression functions to rule-engine: gzip, gunzip, zip, unzip, zip_compress, zip_uncompress
### Bug Fixes (synced from v4.3.14)
* Prohibit empty topics in strict mode
* Make sure ehttpc delete useless pool always succeed.
* Update mongodb driver to fix potential process leak.
* Fix a potential security issue #3155 with emqx-dashboard plugin.
In the earlier implementation, the Dashboard password is reset back to the
default value of emqx_dashboard.conf after the node left cluster.
Now we persist changed password to protect against reset. [#7518]
* Silence grep/sed warnings in docker-entrypoint.sh. [#7520]
* Generate `loaded_modules` and `loaded_plugins` files with default values when no such files exists. [#7520]
* Fix the configuration `server_name_indication` set to disable does not take effect.
* Fix backup files are not deleted and downloaded correctly when the API path has ISO8859-1 escape characters.
## v4.4.2
**NOTE**: v4.4.2 is in sync with: v4.3.13
### Important changes
* Docker image is based on alpine-3.15.1 (OpenSSL-1.1.1n)
* For docker image, /opt/emqx/etc has been removed from the VOLUME list,
this made it easier for the users to rebuild image on top with changed configs.
* CentOS 7 Erlang runtime is rebuilt on OpenSSL-1.1.1n (previously on 1.0),
Prior to v4.4.1, EMQX may pick certain cipher suites proposed by the clients,
but then fail to handshake resulting in a `malformed_handshake_data` exception.
### Enhancements
* Windows package is built on Erlang/OTP 24
### Enhancements (synced from v4.3.13)
* CLI `emqx_ctl pem_cache clean` to force purge x509 certificate cache,
to force an immediate reload of all certificates after the files are updated on disk.
* Refactor the ExProto so that anonymous clients can also be displayed on the dashboard [#6983]
* Force shutdown of processes that cannot answer takeover event [#7026]
* Support set keepalive via queryString & Body HTTP API.
* `topic` parameter in bridge configuration can have `${node}` substitution (just like in `clientid` parameter)
* Add UTF-8 string validity check in `strict_mode` for MQTT packet.
When set to true, invalid UTF-8 strings will cause the client to be disconnected. i.e. client ID, topic name. [#7261]
* Changed systemd service restart delay from 10 seconds to 60 seconds.
* MQTT-SN gateway supports initiative to synchronize registered topics after session resumed. [#7300]
* Add load control app for future development.
* Change the precision of float to 17 digits after the decimal point when formatting a
float using payload templates of rule actions. The old precision is 10 digits before
this change.
### Bug fixes (synced from v4.3.13)
* Fix the `{error,eexist}` error when do release upgrade again if last run failed. [#7121]
* Fix case where publishing to a non-existent topic alias would crash the connection [#6979]
* Fix HTTP-API 500 error on querying the lwm2m client list on the another node [#7009]
* Fix the ExProto connection registry is not released after the client process abnormally exits [#6983]
* Fix Server-KeepAlive wrongly applied on MQTT v3.0/v3.1 [#7085]
* Fix Stomp client can not trigger `$event/client_connection` message [#7096]
* Fix system memory false alarm at boot
* Fix the MQTT-SN message replay when the topic is not registered to the client [#6970]
* Fix rpc get node info maybe crash when other nodes is not ready.
* Fix false alert level log “cannot_find_plugins” caused by duplicate plugin names in `loaded_plugins` files.
* Prompt user how to change the dashboard's initial default password when emqx start.
* Fix errno=13 'Permission denied' Cannot create FIFO boot error in Amazon Linux 2022 (el8 package)
* Fix user or appid created, name only allow `^[A-Za-z]+[A-Za-z0-9-_]*$`
* Fix subscribe http api crash by bad_qos `/mqtt/subscribe`,`/mqtt/subscribe_batch`.
* Send DISCONNECT packet with reason code 0x98 if connection has been kicked [#7309]
* Fix make all traces stopped when emqx_trace_module is disabled.
## v4.4.1
This patch release is only to fix windows build which failed on v4.4.0.
## v4.4.0
**NOTE**: v4.4.0 is in sync with: v4.3.12
### Important changes
- **For Debian/Ubuntu users**, Debian/Ubuntu package (deb) installed EMQ X is now started from systemd.
This is to use systemd's supervision functionality to ensure that EMQ X service restarts after a crash.
The package installation service upgrade from init.d to systemd has been verified,
it is still recommended that you verify and confirm again before deploying to the production environment,
at least to ensure that systemd is available in your system
- Package name scheme changed comparing to 4.3.
4.3 format: emqx-centos8-4.3.8-amd64.zip
4.4 format: emqx-4.4.0-rc.1-otp24.1.5-3-el8-amd64.zip
* Erlang/OTP version is included in the package name,
providing the possibility to release EMQX on multiple Erlang/OTP versions
* `centos` is renamed to `el`. This is mainly due to centos8 being dead (replaced with rockylinux8)
- MongoDB authentication supports DNS SRV and TXT Records resolution, which can seamlessly connect with MongoDB Altas
- Support dynamic modification of MQTT Keep Alive to adapt to different energy consumption strategies.
- Support 4.3 to 4.4 rolling upgrade of clustered nodes. See upgrade document for more dtails.
- TLS for cluster backplane (RPC) connections. See clustering document for more details.
- Support real-time tracing in the dashboard, with Client ID, Client IP address, and topic name based filtering.
- Add the Slow Subscriptions module to count the time spent during the message transmission. This feature will list the Clients and Topics with higher time consumption in Dashboard
### Minor changes
- Bumpped default boot wait time from 15 seconds to 150 seconds
because in some simulated environments it may take up to 70 seconds to boot in build CI
- Dashboard supports relative paths and custom access paths
- Supports configuring whether to forward retained messages with empty payload to suit users
who are still using MQTT v3.1. The relevant configurable item is `retainer.stop_publish_clear_msg`
- Multi-language hook extension (ExHook) supports dynamic cancellation of subsequent forwarding of client messages
- Rule engine SQL supports the use of single quotes in `FROM` clauses, for example: `SELECT * FROM 't/#'`
- Change the default value of the `max_topic_levels` configurable item to 128.
Previously, it had no limit (configured to 0), which may be a potential DoS threat
- Improve the error log content when the Proxy Protocol message is received without `proxy_protocol` configured.
- Add additional message attributes to the message reported by the gateway.
Messages from gateways such as CoAP, LwM2M, Stomp, ExProto, etc., when converted to EMQ X messages,
add fields such as protocol name, protocol version, user name, client IP, etc.,
which can be used for multi-language hook extension (ExHook)
- HTTP client performance improvement
- Add openssl-1.1 to RPM dependency

View File

@ -1,13 +1,14 @@
$(shell $(CURDIR)/scripts/git-hooks-init.sh) $(shell $(CURDIR)/scripts/git-hooks-init.sh)
REBAR_VERSION = 3.14.3-emqx-8
REBAR = $(CURDIR)/rebar3 REBAR = $(CURDIR)/rebar3
BUILD = $(CURDIR)/build BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts SCRIPTS = $(CURDIR)/scripts
export EMQX_RELUP ?= true export EMQX_RELUP ?= true
export EMQX_DEFAULT_BUILDER = emqx/build-env:erl23.3.4.9-3-alpine export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-alpine3.15.1
export EMQX_DEFAULT_RUNNER = alpine:3.12 export EMQX_DEFAULT_RUNNER = alpine:3.15.1
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh) export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
export DOCKERFILE := deploy/docker/Dockerfile export DOCKERFILE := deploy/docker/Dockerfile
export DOCKERFILE_TESTING := deploy/docker/Dockerfile.testing
ifeq ($(OS),Windows_NT) ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none export REBAR_COLOR=none
FIND=/usr/bin/find FIND=/usr/bin/find
@ -20,7 +21,7 @@ REL_PROFILES := emqx emqx-edge
PKG_PROFILES := emqx-pkg emqx-edge-pkg PKG_PROFILES := emqx-pkg emqx-edge-pkg
PROFILES := $(REL_PROFILES) $(PKG_PROFILES) default PROFILES := $(REL_PROFILES) $(PKG_PROFILES) default
export REBAR_GIT_CLONE_OPTIONS += --depth=1 export REBAR_GIT_CLONE_OPTIONS += --depth=1 --quiet
.PHONY: default .PHONY: default
default: $(REBAR) $(PROFILE) default: $(REBAR) $(PROFILE)
@ -31,7 +32,7 @@ all: $(REBAR) $(PROFILES)
.PHONY: ensure-rebar3 .PHONY: ensure-rebar3
ensure-rebar3: ensure-rebar3:
@$(SCRIPTS)/fail-on-old-otp-version.escript @$(SCRIPTS)/fail-on-old-otp-version.escript
@$(SCRIPTS)/ensure-rebar3.sh $(REBAR_VERSION) @$(SCRIPTS)/ensure-rebar3.sh
$(REBAR): ensure-rebar3 $(REBAR): ensure-rebar3
@ -98,6 +99,7 @@ $(PROFILES:%=clean-%):
.PHONY: clean-all .PHONY: clean-all
clean-all: clean-all:
@rm -f rebar.lock
@rm -rf _build @rm -rf _build
@rm -f rebar.lock @rm -f rebar.lock
@ -172,6 +174,18 @@ endef
ALL_ZIPS = $(REL_PROFILES) ALL_ZIPS = $(REL_PROFILES)
$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target,$(zt)))) $(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target,$(zt))))
## emqx-docker-testing
## emqx-ee-docker-testing
## is to directly copy a unzipped zip-package to a
## base image such as ubuntu20.04. Mostly for testing
.PHONY: $(REL_PROFILES:%=%-docker-testing)
define gen-docker-target-testing
$1-docker-testing: $(COMMON_DEPS)
@$(BUILD) $1 docker-testing
endef
ALL_ZIPS = $(REL_PROFILES)
$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target-testing,$(zt))))
.PHONY: run .PHONY: run
run: $(PROFILE) quickrun run: $(PROFILE) quickrun

View File

@ -29,7 +29,7 @@ The second path is for CMD to setup environment variables.
### Erlang/OTP ### Erlang/OTP
Install Erlang/OTP 23.3 from https://www.erlang.org/downloads Install Erlang/OTP 24.2.1 from https://www.erlang.org/downloads
You may need to edit the `Path` environment variable to allow running You may need to edit the `Path` environment variable to allow running
Erlang commands such as `erl` from CMD. Erlang commands such as `erl` from CMD.
@ -45,7 +45,7 @@ e.g.
``` ```
PS C:\Users\zmsto> erl PS C:\Users\zmsto> erl
Eshell V11.1.4 (abort with ^G) Eshell V12.2.1 (abort with ^G)
1> halt(). 1> halt().
``` ```

View File

@ -1,6 +1,6 @@
{deps, {deps,
[ [
{jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.1"}}} {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}}
]}. ]}.
{edoc_opts, [{preprocess, true}]}. {edoc_opts, [{preprocess, true}]}.

View File

@ -1,13 +1,13 @@
{application, emqx_auth_jwt, {application, emqx_auth_jwt,
[{description, "EMQ X Authentication with JWT"}, [{description, "EMQ X Authentication with JWT"},
{vsn, "4.3.5"}, % strict semver, bump manually! {vsn, "4.4.4"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_auth_jwt_sup]}, {registered, [emqx_auth_jwt_sup]},
{applications, [kernel,stdlib,jose]}, {applications, [kernel,stdlib,jose]},
{mod, {emqx_auth_jwt_app, []}}, {mod, {emqx_auth_jwt_app, []}},
{env, []}, {env, []},
{licenses, ["Apache-2.0"]}, {licenses, ["Apache-2.0"]},
{maintainers, ["EMQ X Team <contact@emqx.io>"]}, {maintainers, ["EMQX Team <contact@emqx.io>"]},
{links, [{"Homepage", "https://emqx.io/"}, {links, [{"Homepage", "https://emqx.io/"},
{"Github", "https://github.com/emqx/emqx-auth-jwt"} {"Github", "https://github.com/emqx/emqx-auth-jwt"}
]} ]}

View File

@ -1,11 +1,13 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
%% Unless you know what you are doing, DO NOT edit manually!! %% Unless you know what you are doing, DO NOT edit manually!!
{VSN, {VSN,
[{"4.3.4",[{load_module,emqx_auth_jwt_svr,brutal_purge,soft_purge,[]}]}, [{"4.4.3",[{load_module,emqx_auth_jwt_svr,brutal_purge,soft_purge,[]}]},
{"4.3.3",[{load_module,emqx_auth_jwt_svr,brutal_purge,soft_purge,[]}]}, {"4.4.2",[{load_module,emqx_auth_jwt_svr,brutal_purge,soft_purge,[]},
{<<"4\\.3\\.[0-2]">>,[{restart_application,emqx_auth_jwt}]}, {load_module,emqx_auth_jwt,brutal_purge,soft_purge,[]}]},
{<<"4\\.4\\.[0-1]">>,[{restart_application,emqx_auth_jwt}]},
{<<".*">>,[]}], {<<".*">>,[]}],
[{"4.3.4",[{load_module,emqx_auth_jwt_svr,brutal_purge,soft_purge,[]}]}, [{"4.4.3",[{load_module,emqx_auth_jwt_svr,brutal_purge,soft_purge,[]}]},
{"4.3.3",[{load_module,emqx_auth_jwt_svr,brutal_purge,soft_purge,[]}]}, {"4.4.2",[{load_module,emqx_auth_jwt_svr,brutal_purge,soft_purge,[]},
{<<"4\\.3\\.[0-2]">>,[{restart_application,emqx_auth_jwt}]}, {load_module,emqx_auth_jwt,brutal_purge,soft_purge,[]}]},
{<<"4\\.4\\.[0-1]">>,[{restart_application,emqx_auth_jwt}]},
{<<".*">>,[]}]}. {<<".*">>,[]}]}.

View File

@ -72,7 +72,13 @@ check_acl(ClientInfo = #{jwt_claims := Claims},
_ -> _ ->
?DEBUG("no_acl_jwt_claim", []), ?DEBUG("no_acl_jwt_claim", []),
ignore ignore
end. end;
check_acl(_ClientInfo,
_PubSub,
_Topic,
_NoMatchAction,
_AclEnv) ->
ignore.
is_expired(Exp) when is_binary(Exp) -> is_expired(Exp) when is_binary(Exp) ->
ExpInt = binary_to_integer(Exp), ExpInt = binary_to_integer(Exp),

View File

@ -58,7 +58,7 @@ insert_user(User = #emqx_user{login = Login}) ->
[_|_] -> mnesia:abort(existed) [_|_] -> mnesia:abort(existed)
end. end.
-spec(add_default_user(clientid | username, tuple(), binary()) -> ok | {error, any()}). -spec(add_default_user(clientid | username, binary(), binary()) -> ok | {error, any()}).
add_default_user(Type, Key, Password) -> add_default_user(Type, Key, Password) ->
Login = {Type, Key}, Login = {Type, Key},
case add_user(Login, Password) of case add_user(Login, Password) of

View File

@ -48,6 +48,7 @@ groups() ->
]}]. ]}].
init_per_suite(Config) -> init_per_suite(Config) ->
application:load(emqx_plugin_libs),
emqx_ct_helpers:start_apps( [emqx_modules, emqx_management, emqx_auth_mnesia] emqx_ct_helpers:start_apps( [emqx_modules, emqx_management, emqx_auth_mnesia]
, fun set_special_configs/1 , fun set_special_configs/1
), ),

View File

@ -7,6 +7,12 @@
## Value: single | unknown | sharded | rs ## Value: single | unknown | sharded | rs
auth.mongo.type = single auth.mongo.type = single
## Whether to use SRV and TXT records.
##
## Value: true | false
## Default: false
auth.mongo.srv_record = false
## The set name if type is rs. ## The set name if type is rs.
## ##
## Value: String ## Value: String
@ -37,7 +43,6 @@ auth.mongo.pool = 8
## MongoDB AuthSource ## MongoDB AuthSource
## ##
## Value: String ## Value: String
## Default: mqtt
## auth.mongo.auth_source = admin ## auth.mongo.auth_source = admin
## MongoDB database ## MongoDB database

View File

@ -6,8 +6,12 @@
{datatype, {enum, [single, unknown, sharded, rs]}} {datatype, {enum, [single, unknown, sharded, rs]}}
]}. ]}.
{mapping, "auth.mongo.srv_record", "emqx_auth_mongo.server", [
{default, false},
{datatype, {enum, [true, false]}}
]}.
{mapping, "auth.mongo.rs_set_name", "emqx_auth_mongo.server", [ {mapping, "auth.mongo.rs_set_name", "emqx_auth_mongo.server", [
{default, "mqtt"},
{datatype, string} {datatype, string}
]}. ]}.
@ -41,7 +45,6 @@
]}. ]}.
{mapping, "auth.mongo.auth_source", "emqx_auth_mongo.server", [ {mapping, "auth.mongo.auth_source", "emqx_auth_mongo.server", [
{default, "mqtt"},
{datatype, string} {datatype, string}
]}. ]}.
@ -101,9 +104,9 @@
]}. ]}.
{translation, "emqx_auth_mongo.server", fun(Conf) -> {translation, "emqx_auth_mongo.server", fun(Conf) ->
H = cuttlefish:conf_get("auth.mongo.server", Conf), SrvRecord = cuttlefish:conf_get("auth.mongo.srv_record", Conf, false),
Hosts = string:tokens(H, ","), Server = cuttlefish:conf_get("auth.mongo.server", Conf),
Type0 = cuttlefish:conf_get("auth.mongo.type", Conf), Type = cuttlefish:conf_get("auth.mongo.type", Conf),
Pool = cuttlefish:conf_get("auth.mongo.pool", Conf), Pool = cuttlefish:conf_get("auth.mongo.pool", Conf),
%% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0 %% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0
Login = cuttlefish:conf_get("auth.mongo.username", Conf, Login = cuttlefish:conf_get("auth.mongo.username", Conf,
@ -111,7 +114,10 @@
), ),
Passwd = cuttlefish:conf_get("auth.mongo.password", Conf), Passwd = cuttlefish:conf_get("auth.mongo.password", Conf),
DB = cuttlefish:conf_get("auth.mongo.database", Conf), DB = cuttlefish:conf_get("auth.mongo.database", Conf),
AuthSrc = cuttlefish:conf_get("auth.mongo.auth_source", Conf), AuthSource = case cuttlefish:conf_get("auth.mongo.auth_source", Conf, undefined) of
undefined -> [];
AuthSource0 -> [{auth_source, list_to_binary(AuthSource0)}]
end,
R = cuttlefish:conf_get("auth.mongo.w_mode", Conf), R = cuttlefish:conf_get("auth.mongo.w_mode", Conf),
W = cuttlefish:conf_get("auth.mongo.r_mode", Conf), W = cuttlefish:conf_get("auth.mongo.r_mode", Conf),
Login0 = case Login =:= [] of Login0 = case Login =:= [] of
@ -157,8 +163,8 @@
false -> [] false -> []
end, end,
WorkerOptions = [{database, list_to_binary(DB)}, {auth_source, list_to_binary(AuthSrc)}] WorkerOptions = [{database, list_to_binary(DB)}]
++ Login0 ++ Passwd0 ++ W0 ++ R0 ++ Ssl, ++ Login0 ++ Passwd0 ++ W0 ++ R0 ++ Ssl ++ AuthSource,
Vars = cuttlefish_variable:fuzzy_matches(["auth", "mongo", "topology", "$name"], Conf), Vars = cuttlefish_variable:fuzzy_matches(["auth", "mongo", "topology", "$name"], Conf),
Options = lists:map(fun({_, Name}) -> Options = lists:map(fun({_, Name}) ->
@ -175,16 +181,17 @@
{list_to_atom(Name2), cuttlefish:conf_get("auth.mongo.topology."++Name, Conf)} {list_to_atom(Name2), cuttlefish:conf_get("auth.mongo.topology."++Name, Conf)}
end, Vars), end, Vars),
Type = case Type0 =:= rs of ReplicaSet = case cuttlefish:conf_get("auth.mongo.rs_set_name", Conf, undefined) of
true -> {Type0, list_to_binary(cuttlefish:conf_get("auth.mongo.rs_set_name", Conf))}; undefined -> [];
false -> Type0 ReplicaSet0 -> [{rs_set_name, list_to_binary(ReplicaSet0)}]
end, end,
[{type, Type}, [{srv_record, SrvRecord},
{hosts, Hosts}, {type, Type},
{server, Server},
{options, Options}, {options, Options},
{worker_options, WorkerOptions}, {worker_options, WorkerOptions},
{auto_reconnect, 1}, {auto_reconnect, 1},
{pool_size, Pool}] {pool_size, Pool}] ++ ReplicaSet
end}. end}.
%% The mongodb operation timeout is specified by the value of `cursor_timeout` from application config, %% The mongodb operation timeout is specified by the value of `cursor_timeout` from application config,

View File

@ -1,6 +1,6 @@
{application, emqx_auth_mongo, {application, emqx_auth_mongo,
[{description, "EMQ X Authentication/ACL with MongoDB"}, [{description, "EMQ X Authentication/ACL with MongoDB"},
{vsn, "4.3.4"}, % strict semver, bump manually! {vsn, "4.4.4"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_auth_mongo_sup]}, {registered, [emqx_auth_mongo_sup]},
{applications, [kernel,stdlib,mongodb,ecpool]}, {applications, [kernel,stdlib,mongodb,ecpool]},

View File

@ -1,19 +1,29 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
%% Unless you know what you are doing, DO NOT edit manually!! %% Unless you know what you are doing, DO NOT edit manually!!
{VSN, {VSN,
[{<<"4\\.3\\.[1-3]">>, [{<<"4\\.4\\.[2-3]">>,
[{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]}, [{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_mongo,brutal_purge,soft_purge,[]}]}, {load_module,emqx_auth_mongo,brutal_purge,soft_purge,[]}]},
{"4.3.0", {"4.4.1",
[{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]}, [{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_mongo_sup,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_mongo,brutal_purge,soft_purge,[]}]},
{"4.4.0",
[{load_module,emqx_auth_mongo_sup,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_mongo,brutal_purge,soft_purge,[]}, {load_module,emqx_auth_mongo,brutal_purge,soft_purge,[]},
{load_module,emqx_acl_mongo,brutal_purge,soft_purge,[]}]}, {load_module,emqx_acl_mongo,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}], {<<".*">>,[]}],
[{<<"4\\.3\\.[1-3]">>, [{<<"4\\.4\\.[2-3]">>,
[{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]}, [{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_mongo,brutal_purge,soft_purge,[]}]}, {load_module,emqx_auth_mongo,brutal_purge,soft_purge,[]}]},
{"4.3.0", {"4.4.1",
[{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]}, [{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_mongo_sup,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_mongo,brutal_purge,soft_purge,[]}]},
{"4.4.0",
[{load_module,emqx_auth_mongo_sup,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_mongo,brutal_purge,soft_purge,[]}, {load_module,emqx_auth_mongo,brutal_purge,soft_purge,[]},
{load_module,emqx_acl_mongo,brutal_purge,soft_purge,[]}]}, {load_module,emqx_acl_mongo,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}]}. {<<".*">>,[]}]}.

View File

@ -28,7 +28,97 @@ start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []). supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) -> init([]) ->
{ok, PoolEnv} = application:get_env(?APP, server), {ok, Opts} = application:get_env(?APP, server),
PoolSpec = ecpool:pool_spec(?APP, ?APP, ?APP, PoolEnv), NOpts = may_parse_srv_and_txt_records(Opts),
PoolSpec = ecpool:pool_spec(?APP, ?APP, ?APP, NOpts),
{ok, {{one_for_all, 10, 100}, [PoolSpec]}}. {ok, {{one_for_all, 10, 100}, [PoolSpec]}}.
may_parse_srv_and_txt_records(Opts) when is_list(Opts) ->
Default = #{srv_record => false},
maps:to_list(may_parse_srv_and_txt_records(maps:merge(Default, maps:from_list(Opts))));
may_parse_srv_and_txt_records(#{type := Type,
srv_record := false,
server := Server} = Opts) ->
Hosts = to_hosts(Server),
case Type =:= rs of
true ->
case maps:get(rs_set_name, Opts, undefined) of
undefined ->
error({missing_parameter, rs_set_name});
ReplicaSet ->
Opts#{type => {rs, ReplicaSet},
hosts => Hosts}
end;
false ->
Opts#{hosts => Hosts}
end;
may_parse_srv_and_txt_records(#{type := Type,
srv_record := true,
server := Server,
worker_options := WorkerOptions} = Opts) ->
Hosts = parse_srv_records(Server),
Opts0 = parse_txt_records(Type, Server),
NWorkerOptions = maps:to_list(maps:merge(maps:from_list(WorkerOptions), maps:with([auth_source], Opts0))),
NOpts = Opts#{hosts => Hosts, worker_options => NWorkerOptions},
case Type =:= rs of
true ->
case maps:get(rs_set_name, Opts0, maps:get(rs_set_name, NOpts, undefined)) of
undefined ->
error({missing_parameter, rs_set_name});
ReplicaSet ->
NOpts#{type => {Type, ReplicaSet}}
end;
false ->
NOpts
end.
to_hosts(Server) ->
[string:trim(H) || H <- string:tokens(Server, ",")].
parse_srv_records(Server) ->
case inet_res:lookup("_mongodb._tcp." ++ Server, in, srv) of
[] ->
error(service_not_found);
Services ->
[Host ++ ":" ++ integer_to_list(Port) || {_, _, Port, Host} <- Services]
end.
parse_txt_records(Type, Server) ->
case inet_res:lookup(Server, in, txt) of
[] ->
#{};
[[QueryString]] ->
case uri_string:dissect_query(QueryString) of
{error, _, _} ->
error({invalid_txt_record, invalid_query_string});
Options ->
Fields = case Type of
rs -> ["authSource", "replicaSet"];
_ -> ["authSource"]
end,
take_and_convert(Fields, Options)
end;
_ ->
error({invalid_txt_record, multiple_records})
end.
take_and_convert(Fields, Options) ->
take_and_convert(Fields, Options, #{}).
take_and_convert([], [_ | _], _Acc) ->
error({invalid_txt_record, invalid_option});
take_and_convert([], [], Acc) ->
Acc;
take_and_convert([Field | More], Options, Acc) ->
case lists:keytake(Field, 1, Options) of
{value, {"authSource", V}, NOptions} ->
take_and_convert(More, NOptions, Acc#{auth_source => list_to_binary(V)});
{value, {"replicaSet", V}, NOptions} ->
take_and_convert(More, NOptions, Acc#{rs_set_name => list_to_binary(V)});
{value, _, _} ->
error({invalid_txt_record, invalid_option});
false ->
take_and_convert(More, Options, Acc)
end.

View File

@ -1,6 +1,6 @@
{application, emqx_auth_pgsql, {application, emqx_auth_pgsql,
[{description, "EMQ X Authentication/ACL with PostgreSQL"}, [{description, "EMQ X Authentication/ACL with PostgreSQL"},
{vsn, "4.3.3"}, % strict semver, bump manually! {vsn, "4.4.3"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_auth_pgsql_sup]}, {registered, [emqx_auth_pgsql_sup]},
{applications, [kernel,stdlib,epgsql,ecpool]}, {applications, [kernel,stdlib,epgsql,ecpool]},

View File

@ -1,11 +1,15 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
%% Unless you know what you are doing, DO NOT edit manually!! %% Unless you know what you are doing, DO NOT edit manually!!
{VSN, {VSN,
[{<<"4\\.3\\.[0-2]">>, [{<<"4\\.4\\.[0-2]">>,
%% restart it due to epgsql upgraded from 4.4.0 to 4.6.0 %% restart it due to epgsql upgraded from 4.4.0 to 4.6.0
%% in emqx_auth_pgsql:v4.3.3 %% in emqx_auth_pgsql:v4.3.3
[{restart_application,emqx_auth_pgsql}]}, [{load_module,emqx_auth_pgsql,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_pgsql_app,brutal_purge,soft_purge,[]}
]},
{<<".*">>,[]}], {<<".*">>,[]}],
[{<<"4\\.3\\.[0-2]">>, [{<<"4\\.4\\.[0-2]">>,
[{restart_application,emqx_auth_pgsql}]}, [{load_module,emqx_auth_pgsql,brutal_purge,soft_purge,[]},
{load_module,emqx_auth_pgsql_app,brutal_purge,soft_purge,[]}
]},
{<<".*">>,[]}]}. {<<".*">>,[]}]}.

View File

@ -24,6 +24,12 @@
## Value: false | Duration ## Value: false | Duration
#exhook.auto_reconnect = 60s #exhook.auto_reconnect = 60s
## The process pool size for gRPC client
##
## Default: Equals cpu cores
## Value: Integer
#exhook.pool_size = 16
## The exhook execution priority on the Chain of the emqx hooks. ## The exhook execution priority on the Chain of the emqx hooks.
## ##
## Modify the field to fix the exhook execute order before/after other plugins/modules. ## Modify the field to fix the exhook execute order before/after other plugins/modules.

View File

@ -31,6 +31,10 @@
end end
end}. end}.
{mapping, "exhook.pool_size", "emqx_exhook.pool_size", [
{datatype, integer}
]}.
{mapping, "exhook.server.$name.url", "emqx_exhook.servers", [ {mapping, "exhook.server.$name.url", "emqx_exhook.servers", [
{datatype, string} {datatype, string}
]}. ]}.

View File

@ -403,6 +403,31 @@ message Message {
bytes payload = 6; bytes payload = 6;
uint64 timestamp = 7; uint64 timestamp = 7;
// The key of header can be:
// - username:
// * Readonly
// * The username of sender client
// * Value type: utf8 string
// - protocol:
// * Readonly
// * The protocol name of sender client
// * Value type: string enum with "mqtt", "mqtt-sn", ...
// - peerhost:
// * Readonly
// * The peerhost of sender client
// * Value type: ip address string
// - allow_publish:
// * Writable
// * Whether to allow the message to be published by emqx
// * Value type: string enum with "true", "false", default is "true"
//
// Notes: All header may be missing, which means that the message does not
// carry these headers. We can guarantee that clients coming from MQTT,
// MQTT-SN, CoAP, LwM2M and other natively supported protocol clients will
// carry these headers, but there is no guarantee that messages published
// by other means will do, e.g. messages published by HTTP-API
map<string, string> headers = 8;
} }
message Property { message Property {

View File

@ -1,7 +1,7 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{application, emqx_exhook, {application, emqx_exhook,
[{description, "EMQ X Extension for Hook"}, [{description, "EMQ X Extension for Hook"},
{vsn, "4.3.7"}, {vsn, "4.4.3"},
{modules, []}, {modules, []},
{registered, []}, {registered, []},
{mod, {emqx_exhook_app, []}}, {mod, {emqx_exhook_app, []}},

View File

@ -1,30 +1,26 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
%% Unless you know what you are doing, DO NOT edit manually!! %% Unless you know what you are doing, DO NOT edit manually!!
{VSN, {VSN,
[{<<"4\\.3\\.[5-6]">>, [
[{load_module,emqx_exhook_server,brutal_purge,soft_purge,[]}, {<<"4\\.4\\.[1-2]">>,
{load_module,emqx_exhook_mngr,brutal_purge,soft_purge,[]}]}, [{load_module, emqx_exhook_server, brutal_purge, soft_purge, []},
{"4.3.4", {load_module,emqx_exhook_mngr,brutal_purge,soft_purge,[]}]},
[{load_module,emqx_exhook_sup,brutal_purge,soft_purge,[]}, {"4.4.0",
{load_module,emqx_exhook_server,brutal_purge,soft_purge,[]}, [{load_module,emqx_exhook_pb,brutal_purge,soft_purge,[]},
{load_module,emqx_exhook_handler,brutal_purge,soft_purge,[]}, {load_module,emqx_exhook,brutal_purge,soft_purge,[]},
{load_module,emqx_exhook_pb,brutal_purge,soft_purge,[]}, {load_module,emqx_exhook_sup,brutal_purge,soft_purge,[]},
{load_module,emqx_exhook,brutal_purge,soft_purge,[]}, {load_module,emqx_exhook_server,brutal_purge,soft_purge,[]},
{update,emqx_exhook_mngr,{advanced,["4.3.4"]}}]}, {load_module,emqx_exhook_handler,brutal_purge,soft_purge,[]},
{<<"4\\.3\\.[0-3]">>,[{restart_application,emqx_exhook}]}, {update, emqx_exhook_mngr, {advanced, ["4.4.0"]}}]},
{<<".*">>,[]}], {<<".*">>,[]}],
[{<<"4\\.3\\.[5-6]">>, [{<<"4\\.4\\.[1-2]">>,
[{load_module,emqx_exhook_server,brutal_purge,soft_purge,[]}, [{load_module, emqx_exhook_server, brutal_purge, soft_purge, []},
{load_module,emqx_exhook_mngr,brutal_purge,soft_purge,[]}]}, {load_module,emqx_exhook_mngr,brutal_purge,soft_purge,[]}]},
{"4.3.5", {"4.4.0",
[{load_module,emqx_exhook_mngr,brutal_purge,soft_purge,[]}, [{load_module,emqx_exhook_pb,brutal_purge,soft_purge,[]},
{load_module,emqx_exhook_server,brutal_purge,soft_purge,[]}]}, {load_module,emqx_exhook,brutal_purge,soft_purge,[]},
{"4.3.4", {load_module,emqx_exhook_sup,brutal_purge,soft_purge,[]},
[{load_module,emqx_exhook_sup,brutal_purge,soft_purge,[]}, {load_module,emqx_exhook_server,brutal_purge,soft_purge,[]},
{load_module,emqx_exhook_server,brutal_purge,soft_purge,[]}, {load_module,emqx_exhook_handler,brutal_purge,soft_purge,[]},
{load_module,emqx_exhook_handler,brutal_purge,soft_purge,[]}, {update, emqx_exhook_mngr, {advanced, ["4.4.0"]}}]},
{load_module,emqx_exhook_pb,brutal_purge,soft_purge,[]}, {<<".*">>,[]}]}.
{load_module,emqx_exhook,brutal_purge,soft_purge,[]},
{update,emqx_exhook_mngr,{advanced,["4.3.4"]}}]},
{<<"4\\.3\\.[0-3]">>,[{restart_application,emqx_exhook}]},
{<<".*">>,[]}]}.

View File

@ -50,6 +50,7 @@
%% Utils %% Utils
-export([ message/1 -export([ message/1
, headers/1
, stringfy/1 , stringfy/1
, merge_responsed_bool/2 , merge_responsed_bool/2
, merge_responsed_message/2 , merge_responsed_message/2
@ -62,6 +63,8 @@
, call_fold/3 , call_fold/3
]). ]).
-elvis([{elvis_style, god_modules, disable}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Clients %% Clients
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -258,17 +261,58 @@ clientinfo(ClientInfo =
cn => maybe(maps:get(cn, ClientInfo, undefined)), cn => maybe(maps:get(cn, ClientInfo, undefined)),
dn => maybe(maps:get(dn, ClientInfo, undefined))}. dn => maybe(maps:get(dn, ClientInfo, undefined))}.
message(#message{id = Id, qos = Qos, from = From, topic = Topic, payload = Payload, timestamp = Ts}) -> message(#message{id = Id, qos = Qos, from = From, topic = Topic,
payload = Payload, timestamp = Ts, headers = Headers}) ->
#{node => stringfy(node()), #{node => stringfy(node()),
id => emqx_guid:to_hexstr(Id), id => emqx_guid:to_hexstr(Id),
qos => Qos, qos => Qos,
from => stringfy(From), from => stringfy(From),
topic => Topic, topic => Topic,
payload => Payload, payload => Payload,
timestamp => Ts}. timestamp => Ts,
headers => headers(Headers)
}.
assign_to_message(#{qos := Qos, topic := Topic, payload := Payload}, Message) -> headers(Headers) ->
Message#message{qos = Qos, topic = Topic, payload = Payload}. Ls = [username, protocol, peerhost, allow_publish],
maps:fold(
fun
(_, undefined, Acc) ->
Acc; %% Ignore undefined value
(K, V, Acc) ->
case lists:member(K, Ls) of
true ->
Acc#{atom_to_binary(K) => bin(K, V)};
_ ->
Acc
end
end, #{}, Headers).
bin(K, V) when K == username;
K == protocol;
K == allow_publish ->
bin(V);
bin(peerhost, V) ->
bin(inet:ntoa(V)).
bin(V) when is_binary(V) -> V;
bin(V) when is_atom(V) -> atom_to_binary(V);
bin(V) when is_list(V) -> iolist_to_binary(V).
assign_to_message(InMessage = #{qos := Qos, topic := Topic,
payload := Payload}, Message) ->
NMsg = Message#message{qos = Qos, topic = Topic, payload = Payload},
enrich_header(maps:get(headers, InMessage, #{}), NMsg).
enrich_header(Headers, Message) ->
case maps:get(<<"allow_publish">>, Headers, undefined) of
<<"false">> ->
emqx_message:set_header(allow_publish, false, Message);
<<"true">> ->
emqx_message:set_header(allow_publish, true, Message);
_ ->
Message
end.
topicfilters(Tfs) when is_list(Tfs) -> topicfilters(Tfs) when is_list(Tfs) ->
[#{name => Topic, qos => Qos} || {Topic, #{qos := Qos}} <- Tfs]. [#{name => Topic, qos => Qos} || {Topic, #{qos := Qos}} <- Tfs].
@ -301,11 +345,7 @@ merge_responsed_bool(_Req, #{type := 'IGNORE'}) ->
ignore; ignore;
merge_responsed_bool(Req, #{type := Type, value := {bool_result, NewBool}}) merge_responsed_bool(Req, #{type := Type, value := {bool_result, NewBool}})
when is_boolean(NewBool) -> when is_boolean(NewBool) ->
NReq = Req#{result => NewBool}, {ret(Type), Req#{result => NewBool}};
case Type of
'CONTINUE' -> {ok, NReq};
'STOP_AND_RETURN' -> {stop, NReq}
end;
merge_responsed_bool(_Req, Resp) -> merge_responsed_bool(_Req, Resp) ->
?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]), ?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]),
ignore. ignore.
@ -313,11 +353,10 @@ merge_responsed_bool(_Req, Resp) ->
merge_responsed_message(_Req, #{type := 'IGNORE'}) -> merge_responsed_message(_Req, #{type := 'IGNORE'}) ->
ignore; ignore;
merge_responsed_message(Req, #{type := Type, value := {message, NMessage}}) -> merge_responsed_message(Req, #{type := Type, value := {message, NMessage}}) ->
NReq = Req#{message => NMessage}, {ret(Type), Req#{message => NMessage}};
case Type of
'CONTINUE' -> {ok, NReq};
'STOP_AND_RETURN' -> {stop, NReq}
end;
merge_responsed_message(_Req, Resp) -> merge_responsed_message(_Req, Resp) ->
?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]), ?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]),
ignore. ignore.
ret('CONTINUE') -> ok;
ret('STOP_AND_RETURN') -> stop.

View File

@ -36,6 +36,8 @@
, server/1 , server/1
, put_request_failed_action/1 , put_request_failed_action/1
, get_request_failed_action/0 , get_request_failed_action/0
, put_pool_size/1
, get_pool_size/0
]). ]).
%% gen_server callbacks %% gen_server callbacks
@ -93,11 +95,11 @@
start_link(Servers, AutoReconnect, ReqOpts, HooksOpts) -> start_link(Servers, AutoReconnect, ReqOpts, HooksOpts) ->
gen_server:start_link(?MODULE, [Servers, AutoReconnect, ReqOpts, HooksOpts], []). gen_server:start_link(?MODULE, [Servers, AutoReconnect, ReqOpts, HooksOpts], []).
-spec enable(pid(), atom()|string()) -> ok | {error, term()}. -spec enable(pid(), atom() | string()) -> ok | {error, term()}.
enable(Pid, Name) -> enable(Pid, Name) ->
call(Pid, {load, Name}). call(Pid, {load, Name}).
-spec disable(pid(), atom()|string()) -> ok | {error, term()}. -spec disable(pid(), atom() | string()) -> ok | {error, term()}.
disable(Pid, Name) -> disable(Pid, Name) ->
call(Pid, {unload, Name}). call(Pid, {unload, Name}).
@ -126,6 +128,9 @@ init([Servers, AutoReconnect, ReqOpts0, HooksOpts]) ->
put_request_failed_action( put_request_failed_action(
maps:get(request_failed_action, ReqOpts0, deny) maps:get(request_failed_action, ReqOpts0, deny)
), ),
put_pool_size(
maps:get(pool_size, ReqOpts0, erlang:system_info(schedulers))
),
%% Load the hook servers %% Load the hook servers
ReqOpts = maps:without([request_failed_action], ReqOpts0), ReqOpts = maps:without([request_failed_action], ReqOpts0),
@ -144,6 +149,7 @@ init([Servers, AutoReconnect, ReqOpts0, HooksOpts]) ->
%% @private %% @private
load_all_servers(Servers, ReqOpts, HooksOpts) -> load_all_servers(Servers, ReqOpts, HooksOpts) ->
load_all_servers(Servers, ReqOpts, HooksOpts, #{}, #{}). load_all_servers(Servers, ReqOpts, HooksOpts, #{}, #{}).
load_all_servers([], _Request, _HooksOpts, Waiting, Running) -> load_all_servers([], _Request, _HooksOpts, Waiting, Running) ->
{Waiting, Running}; {Waiting, Running};
load_all_servers([{Name, Options} | More], ReqOpts, HooksOpts, Waiting, Running) -> load_all_servers([{Name, Options} | More], ReqOpts, HooksOpts, Waiting, Running) ->
@ -223,7 +229,7 @@ terminate(_Reason, State = #state{running = Running}) ->
%% in the emqx_exhook:v4.3.5, we have added one new field in the state last: %% in the emqx_exhook:v4.3.5, we have added one new field in the state last:
%% - hooks_options :: map() %% - hooks_options :: map()
code_change({down, _Vsn}, State, [ToVsn]) -> code_change({down, _Vsn}, State, [ToVsn]) ->
case re:run(ToVsn, "4\\.3\\.[0-4]") of case re:run(ToVsn, "4\\.4\\.0") of
{match, _} -> {match, _} ->
NState = list_to_tuple( NState = list_to_tuple(
lists:droplast( lists:droplast(
@ -233,7 +239,7 @@ code_change({down, _Vsn}, State, [ToVsn]) ->
{ok, State} {ok, State}
end; end;
code_change(_Vsn, State, [FromVsn]) -> code_change(_Vsn, State, [FromVsn]) ->
case re:run(FromVsn, "4\\.3\\.[0-4]") of case re:run(FromVsn, "4\\.4\\.0") of
{match, _} -> {match, _} ->
NState = list_to_tuple( NState = list_to_tuple(
tuple_to_list(State) ++ [?DEFAULT_HOOK_OPTS]), tuple_to_list(State) ++ [?DEFAULT_HOOK_OPTS]),
@ -327,6 +333,14 @@ put_request_failed_action(Val) ->
get_request_failed_action() -> get_request_failed_action() ->
persistent_term:get({?APP, request_failed_action}). persistent_term:get({?APP, request_failed_action}).
put_pool_size(Val) ->
persistent_term:put({?APP, pool_size}, Val).
get_pool_size() ->
%% Avoid the scenario that the parameter is not set after
%% the hot upgrade completed.
persistent_term:get({?APP, pool_size}, erlang:system_info(schedulers)).
save(Name, ServerState) -> save(Name, ServerState) ->
Saved = persistent_term:get(?APP, []), Saved = persistent_term:get(?APP, []),
persistent_term:put(?APP, lists:usort(lists:reverse([Name | Saved]))), persistent_term:put(?APP, lists:usort(lists:reverse([Name | Saved]))),

View File

@ -77,6 +77,8 @@
-dialyzer({nowarn_function, [inc_metrics/2]}). -dialyzer({nowarn_function, [inc_metrics/2]}).
-elvis([{elvis_style, dont_repeat_yourself, disable}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Load/Unload APIs %% Load/Unload APIs
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -127,7 +129,11 @@ channel_opts(Opts) ->
SockOpts = proplists:get_value(socket_options, Opts), SockOpts = proplists:get_value(socket_options, Opts),
ClientOpts = case Scheme of ClientOpts = case Scheme of
https -> https ->
SslOpts = lists:keydelete(ssl, 1, proplists:get_value(ssl_options, Opts, [])), SslOpts = lists:keydelete(
ssl,
1,
proplists:get_value(ssl_options, Opts, [])
),
#{gun_opts => #{gun_opts =>
#{transport => ssl, #{transport => ssl,
transport_opts => SockOpts ++ SslOpts}}; transport_opts => SockOpts ++ SslOpts}};
@ -135,7 +141,8 @@ channel_opts(Opts) ->
#{gun_opts => #{gun_opts =>
#{transport_opts => SockOpts}} #{transport_opts => SockOpts}}
end, end,
{SvrAddr, ClientOpts}. NClientOpts = ClientOpts#{pool_size => emqx_exhook_mngr:get_pool_size()},
{SvrAddr, NClientOpts}.
format_http_uri(Scheme, Host0, Port) -> format_http_uri(Scheme, Host0, Port) ->
Host = case is_tuple(Host0) of Host = case is_tuple(Host0) of
@ -261,7 +268,7 @@ call(Hookpoint, Req, #server{name = ChannName, options = ReqOpts,
%% @private %% @private
inc_metrics(IncFun, Name) when is_function(IncFun) -> inc_metrics(IncFun, Name) when is_function(IncFun) ->
%% BACKW: e4.2.0-e4.2.2 %% BACKW: e4.2.0-e4.2.2
{env, [Prefix|_]} = erlang:fun_info(IncFun, env), {env, [Prefix | _]} = erlang:fun_info(IncFun, env),
inc_metrics(Prefix, Name); inc_metrics(Prefix, Name);
inc_metrics(Prefix, Name) when is_list(Prefix) -> inc_metrics(Prefix, Name) when is_list(Prefix) ->
emqx_metrics:inc(list_to_atom(Prefix ++ atom_to_list(Name))). emqx_metrics:inc(list_to_atom(Prefix ++ atom_to_list(Name))).
@ -278,8 +285,8 @@ do_call(ChannName, Fun, Req, ReqOpts) ->
Options = ReqOpts#{channel => ChannName, key_dispatch => key_dispatch(NReq)}, Options = ReqOpts#{channel => ChannName, key_dispatch => key_dispatch(NReq)},
?LOG(debug, "Call ~0p:~0p(~0p, ~0p)", [?PB_CLIENT_MOD, Fun, NReq, Options]), ?LOG(debug, "Call ~0p:~0p(~0p, ~0p)", [?PB_CLIENT_MOD, Fun, NReq, Options]),
case catch apply(?PB_CLIENT_MOD, Fun, [NReq, Options]) of case catch apply(?PB_CLIENT_MOD, Fun, [NReq, Options]) of
{ok, Resp, _Metadata} -> {ok, Resp, Metadata} ->
?LOG(debug, "Response {ok, ~0p, ~0p}", [Resp, _Metadata]), ?LOG(debug, "Response {ok, ~0p, ~0p}", [Resp, Metadata]),
{ok, Resp}; {ok, Resp};
{error, {Code, Msg}, _Metadata} -> {error, {Code, Msg}, _Metadata} ->
?LOG(error, "CALL ~0p:~0p(~0p, ~0p) response errcode: ~0p, errmsg: ~0p", ?LOG(error, "CALL ~0p:~0p(~0p, ~0p) response errcode: ~0p, errmsg: ~0p",

View File

@ -56,7 +56,8 @@ auto_reconnect() ->
request_options() -> request_options() ->
#{timeout => env(request_timeout, 5000), #{timeout => env(request_timeout, 5000),
request_failed_action => env(request_failed_action, deny) request_failed_action => env(request_failed_action, deny),
pool_size => env(pool_size, erlang:system_info(schedulers))
}. }.
hooks_options() -> hooks_options() ->
@ -73,7 +74,7 @@ env(Key, Def) ->
-spec start_grpc_client_channel( -spec start_grpc_client_channel(
string(), string(),
uri_string:uri_string(), uri_string:uri_string(),
grpc_client:options()) -> {ok, pid()} | {error, term()}. grpc_client_sup:options()) -> {ok, pid()} | {error, term()}.
start_grpc_client_channel(Name, SvrAddr, Options) -> start_grpc_client_channel(Name, SvrAddr, Options) ->
grpc_client_sup:create_channel_pool(Name, SvrAddr, Options). grpc_client_sup:create_channel_pool(Name, SvrAddr, Options).

View File

@ -308,21 +308,31 @@ on_message_publish(#{message := #{from := From} = Msg} = Req, Md) ->
%% some cases for testing %% some cases for testing
case From of case From of
<<"baduser">> -> <<"baduser">> ->
NMsg = Msg#{qos => 0, NMsg = deny(Msg#{qos => 0,
topic => <<"">>, topic => <<"">>,
payload => <<"">> payload => <<"">>
}, }),
{ok, #{type => 'STOP_AND_RETURN', {ok, #{type => 'STOP_AND_RETURN',
value => {message, NMsg}}, Md}; value => {message, NMsg}}, Md};
<<"gooduser">> -> <<"gooduser">> ->
NMsg = Msg#{topic => From, NMsg = allow(Msg#{topic => From,
payload => From}, payload => From}),
{ok, #{type => 'STOP_AND_RETURN', {ok, #{type => 'STOP_AND_RETURN',
value => {message, NMsg}}, Md}; value => {message, NMsg}}, Md};
_ -> _ ->
{ok, #{type => 'IGNORE'}, Md} {ok, #{type => 'IGNORE'}, Md}
end. end.
deny(Msg) ->
NHeader = maps:put(<<"allow_publish">>, <<"false">>,
maps:get(headers, Msg, #{})),
maps:put(headers, NHeader, Msg).
allow(Msg) ->
NHeader = maps:put(<<"allow_publish">>, <<"true">>,
maps:get(headers, Msg, #{})),
maps:put(headers, NHeader, Msg).
-spec on_message_delivered(emqx_exhook_pb:message_delivered_request(), grpc:metadata()) -spec on_message_delivered(emqx_exhook_pb:message_delivered_request(), grpc:metadata())
-> {ok, emqx_exhook_pb:empty_success(), grpc:metadata()} -> {ok, emqx_exhook_pb:empty_success(), grpc:metadata()}
| {error, grpc_cowboy_h:error_response()}. | {error, grpc_cowboy_h:error_response()}.

View File

@ -320,19 +320,24 @@ prop_message_publish() ->
_ -> _ ->
ExpectedOutMsg = case emqx_message:from(Msg) of ExpectedOutMsg = case emqx_message:from(Msg) of
<<"baduser">> -> <<"baduser">> ->
MsgMap = emqx_message:to_map(Msg), MsgMap = #{headers := Headers}
= emqx_message:to_map(Msg),
emqx_message:from_map( emqx_message:from_map(
MsgMap#{qos => 0, MsgMap#{qos => 0,
topic => <<"">>, topic => <<"">>,
payload => <<"">> payload => <<"">>,
headers => maps:put(allow_publish, false, Headers)
}); });
<<"gooduser">> = From -> <<"gooduser">> = From ->
MsgMap = emqx_message:to_map(Msg), MsgMap = #{headers := Headers}
= emqx_message:to_map(Msg),
emqx_message:from_map( emqx_message:from_map(
MsgMap#{topic => From, MsgMap#{topic => From,
payload => From payload => From,
headers => maps:put(allow_publish, true, Headers)
}); });
_ -> Msg _ ->
Msg
end, end,
?assertEqual(ExpectedOutMsg, OutMsg), ?assertEqual(ExpectedOutMsg, OutMsg),
@ -494,7 +499,9 @@ from_message(Msg) ->
from => stringfy(emqx_message:from(Msg)), from => stringfy(emqx_message:from(Msg)),
topic => emqx_message:topic(Msg), topic => emqx_message:topic(Msg),
payload => emqx_message:payload(Msg), payload => emqx_message:payload(Msg),
timestamp => emqx_message:timestamp(Msg) timestamp => emqx_message:timestamp(Msg),
headers => emqx_exhook_handler:headers(
emqx_message:get_headers(Msg))
}. }.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -1,10 +1,10 @@
{deps, {deps,
[{lwm2m_coap, {git, "https://github.com/emqx/lwm2m-coap", {tag, "v1.1.5"}}} [{lwm2m_coap, {git, "https://github.com/emqx/lwm2m-coap", {tag, "v2.0.1"}}}
]}. ]}.
{profiles, {profiles,
[{test, [{test,
[{deps, [{er_coap_client, {git, "https://github.com/emqx/er_coap_client", {tag, "v1.0"}}}, [{deps, [{er_coap_client, {git, "https://github.com/emqx/er_coap_client", {tag, "v1.0.4"}}},
{emqx_ct_helpers, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "1.2.2"}}}, {emqx_ct_helpers, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "1.2.2"}}},
{emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.2.0"}}} {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.2.0"}}}
]} ]}

View File

@ -33,4 +33,4 @@
-define(ERROR15, 115). %% bad topic -define(ERROR15, 115). %% bad topic
-define(ERROR16, 116). %% bad QoS -define(ERROR16, 116). %% bad QoS
-define(VERSIONS, ["4.0", "4.1", "4.2", "4.3"]). -define(VERSIONS, ["4.0", "4.1", "4.2", "4.3", "4.4"]).

View File

@ -1,9 +1,9 @@
{application, emqx_management, {application, emqx_management,
[{description, "EMQ X Management API and CLI"}, [{description, "EMQ X Management API and CLI"},
{vsn, "4.3.16"}, % strict semver, bump manually! {vsn, "4.4.7"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_management_sup]}, {registered, [emqx_management_sup]},
{applications, [kernel,stdlib,minirest]}, {applications, [kernel,stdlib,emqx_plugin_libs,minirest]},
{mod, {emqx_mgmt_app,[]}}, {mod, {emqx_mgmt_app,[]}},
{env, []}, {env, []},
{licenses, ["Apache-2.0"]}, {licenses, ["Apache-2.0"]},

View File

@ -1,20 +1,14 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{VSN, {VSN,
[ {<<"4\\.3\\.[0-9]+">>, [{<<".*">>,
[ %% Stop the http listener to load the latest http handlers. [%% Stop the http listener to load the latest http handlers.
%% We can only stop these listeners here because we can't get the list of %% We can only stop these listeners here because we can't get the list of
%% currently started http listener via app-env during the hot upgrade. %% currently started http listener via app-env during the hot upgrade.
{apply,{minirest,stop_http,['http:management']}}, {apply,{minirest,stop_http,['http:management']}},
{apply,{minirest,stop_http,['https:management']}}, {apply,{minirest,stop_http,['https:management']}},
{restart_application, emqx_management} {restart_application, emqx_management}]}],
]}, [{<<".*">>,
{<<".*">>, []} [{apply,{minirest,stop_http,['http:management']}},
], {apply,{minirest,stop_http,['https:management']}},
[ {<<"4\\.3\\.[0-9]+">>, {restart_application, emqx_management}]}]
[ {apply,{minirest,stop_http,['http:management']}},
{apply,{minirest,stop_http,['https:management']}},
{restart_application, emqx_management}
]},
{<<".*">>, []}
]
}. }.

View File

@ -22,6 +22,9 @@
-include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/emqx_mqtt.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl").
-elvis([{elvis_style, invalid_dynamic_call, #{ignore => [emqx_mgmt]}}]).
-elvis([{elvis_style, god_modules, #{ignore => [emqx_mgmt]}}]).
%% Nodes and Brokers API %% Nodes and Brokers API
-export([ list_nodes/0 -export([ list_nodes/0
, lookup_node/1 , lookup_node/1
@ -49,6 +52,7 @@
, clean_acl_cache_all/1 , clean_acl_cache_all/1
, set_ratelimit_policy/2 , set_ratelimit_policy/2
, set_quota_policy/2 , set_quota_policy/2
, set_keepalive/2
]). ]).
-export([ clean_pem_cache/0 -export([ clean_pem_cache/0
@ -145,9 +149,8 @@ node_info(Node) when Node =:= node() ->
memory_used => erlang:round(Total * UsedRatio), memory_used => erlang:round(Total * UsedRatio),
process_available => erlang:system_info(process_limit), process_available => erlang:system_info(process_limit),
process_used => erlang:system_info(process_count), process_used => erlang:system_info(process_count),
max_fds => max_fds => proplists:get_value(max_fds,
proplists:get_value( max_fds lists:usort(lists:flatten(erlang:system_info(check_io)))),
, lists:usort(lists:flatten(erlang:system_info(check_io)))),
connections => ets:info(emqx_channel, size), connections => ets:info(emqx_channel, size),
node_status => 'Running', node_status => 'Running',
uptime => iolist_to_binary(proplists:get_value(uptime, BrokerInfo)), uptime => iolist_to_binary(proplists:get_value(uptime, BrokerInfo)),
@ -210,11 +213,11 @@ get_stats(Node) ->
lookup_client({clientid, ClientId}, FormatFun) -> lookup_client({clientid, ClientId}, FormatFun) ->
lists:append([lookup_client(Node, {clientid, ClientId}, FormatFun) lists:append([lookup_client(Node, {clientid, ClientId}, FormatFun)
|| Node <- ekka_mnesia:running_nodes()]); || Node <- ekka_mnesia:running_nodes()]);
lookup_client({username, Username}, FormatFun) -> lookup_client({username, Username}, FormatFun) ->
lists:append([lookup_client(Node, {username, Username}, FormatFun) lists:append([lookup_client(Node, {username, Username}, FormatFun)
|| Node <- ekka_mnesia:running_nodes()]). || Node <- ekka_mnesia:running_nodes()]).
lookup_client(Node, {clientid, ClientId}, {M,F}) when Node =:= node() -> lookup_client(Node, {clientid, ClientId}, {M,F}) when Node =:= node() ->
lists:append(lists:map( lists:append(lists:map(
@ -237,7 +240,7 @@ lookup_client(Node, {username, Username}, FormatFun) ->
kickout_client(ClientId) -> kickout_client(ClientId) ->
Results = [kickout_client(Node, ClientId) || Node <- ekka_mnesia:running_nodes()], Results = [kickout_client(Node, ClientId) || Node <- ekka_mnesia:running_nodes()],
check_every_ok(Results). has_any_ok(Results).
kickout_client(Node, ClientId) when Node =:= node() -> kickout_client(Node, ClientId) when Node =:= node() ->
emqx_cm:kick_session(ClientId); emqx_cm:kick_session(ClientId);
@ -250,7 +253,7 @@ list_acl_cache(ClientId) ->
clean_acl_cache(ClientId) -> clean_acl_cache(ClientId) ->
Results = [clean_acl_cache(Node, ClientId) || Node <- ekka_mnesia:running_nodes()], Results = [clean_acl_cache(Node, ClientId) || Node <- ekka_mnesia:running_nodes()],
check_every_ok(Results). has_any_ok(Results).
clean_acl_cache(Node, ClientId) when Node =:= node() -> clean_acl_cache(Node, ClientId) when Node =:= node() ->
case emqx_cm:lookup_channels(ClientId) of case emqx_cm:lookup_channels(ClientId) of
@ -284,6 +287,11 @@ set_ratelimit_policy(ClientId, Policy) ->
set_quota_policy(ClientId, Policy) -> set_quota_policy(ClientId, Policy) ->
call_client(ClientId, {quota, Policy}). call_client(ClientId, {quota, Policy}).
set_keepalive(ClientId, Interval)when Interval >= 0 andalso Interval =< 65535 ->
call_client(ClientId, {keepalive, Interval});
set_keepalive(_ClientId, _Interval) ->
{error, ?ERROR2, <<"mqtt3.1.1 specification: keepalive must between 0~65535">>}.
clean_pem_cache() -> clean_pem_cache() ->
for_nodes(fun clean_pem_cache/1). for_nodes(fun clean_pem_cache/1).
@ -334,7 +342,8 @@ list_subscriptions(Node) ->
list_subscriptions_via_topic(Topic, FormatFun) -> list_subscriptions_via_topic(Topic, FormatFun) ->
lists:append([list_subscriptions_via_topic(Node, Topic, FormatFun) lists:append([list_subscriptions_via_topic(Node, Topic, FormatFun)
|| Node <- ekka_mnesia:running_nodes()]). || Node <- ekka_mnesia:running_nodes()]).
list_subscriptions_via_topic(Node, Topic, {M,F}) when Node =:= node() -> list_subscriptions_via_topic(Node, Topic, {M,F}) when Node =:= node() ->
MatchSpec = [{{{'_', '$1'}, '_'}, [{'=:=','$1', Topic}], ['$_']}], MatchSpec = [{{{'_', '$1'}, '_'}, [{'=:=','$1', Topic}], ['$_']}],
@ -459,8 +468,8 @@ list_listeners(Node) when Node =:= node() ->
Http = lists:map(fun({Protocol, Opts}) -> Http = lists:map(fun({Protocol, Opts}) ->
#{protocol => Protocol, #{protocol => Protocol,
listen_on => proplists:get_value(port, Opts), listen_on => proplists:get_value(port, Opts),
acceptors => maps:get( num_acceptors acceptors => maps:get(num_acceptors,
, proplists:get_value(transport_options, Opts, #{}), 0), proplists:get_value(transport_options, Opts, #{}), 0),
max_conns => proplists:get_value(max_connections, Opts), max_conns => proplists:get_value(max_connections, Opts),
current_conns => proplists:get_value(all_connections, Opts), current_conns => proplists:get_value(all_connections, Opts),
shutdown_count => []} shutdown_count => []}
@ -509,10 +518,8 @@ add_duration_field([], _Now, Acc) ->
Acc; Acc;
add_duration_field([Alarm = #{activated := true, activate_at := ActivateAt} | Rest], Now, Acc) -> add_duration_field([Alarm = #{activated := true, activate_at := ActivateAt} | Rest], Now, Acc) ->
add_duration_field(Rest, Now, [Alarm#{duration => Now - ActivateAt} | Acc]); add_duration_field(Rest, Now, [Alarm#{duration => Now - ActivateAt} | Acc]);
add_duration_field([Alarm = #{ activated := false add_duration_field([Alarm = #{activated := false,
, activate_at := ActivateAt activate_at := ActivateAt, deactivate_at := DeactivateAt} | Rest], Now, Acc) ->
, deactivate_at := DeactivateAt}
| Rest], Now, Acc) ->
add_duration_field(Rest, Now, [Alarm#{duration => DeactivateAt - ActivateAt} | Acc]). add_duration_field(Rest, Now, [Alarm#{duration => DeactivateAt - ActivateAt} | Acc]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -593,13 +600,13 @@ check_row_limit([Tab | Tables], Limit) ->
false -> check_row_limit(Tables, Limit) false -> check_row_limit(Tables, Limit)
end. end.
check_every_ok(Results) ->
case lists:any(fun(Item) -> Item =:= ok end, Results) of
true -> ok;
false -> lists:last(Results)
end.
max_row_limit() -> max_row_limit() ->
application:get_env(?APP, max_row_limit, ?MAX_ROW_LIMIT). application:get_env(?APP, max_row_limit, ?MAX_ROW_LIMIT).
table_size(Tab) -> ets:info(Tab, size). table_size(Tab) -> ets:info(Tab, size).
has_any_ok(Results) ->
case lists:any(fun(Item) -> Item =:= ok end, Results) of
true -> ok;
false -> lists:last(Results)
end.

View File

@ -55,10 +55,26 @@ paginate(Tables, Params, RowFun) ->
query_handle(Table) when is_atom(Table) -> query_handle(Table) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table)]); qlc:q([R|| R <- ets:table(Table)]);
query_handle({Table, Opts}) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table, Opts)]);
query_handle([Table]) when is_atom(Table) -> query_handle([Table]) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table)]); qlc:q([R|| R <- ets:table(Table)]);
query_handle([{Table, Opts}]) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table, Opts)]);
query_handle(Tables) -> query_handle(Tables) ->
qlc:append([qlc:q([E || E <- ets:table(T)]) || T <- Tables]). Fold = fun({Table, Opts}, Acc) ->
Handle = qlc:q([R|| R <- ets:table(Table, Opts)]),
[Handle | Acc];
(Table, Acc) ->
Handle = qlc:q([R|| R <- ets:table(Table)]),
[Handle | Acc]
end,
Handles = lists:foldl(Fold, [], Tables),
qlc:append(lists:reverse(Handles)).
count_size(Table, undefined) -> count_size(Table, undefined) ->
count(Table); count(Table);
@ -67,10 +83,23 @@ count_size(_Table, CountFun) ->
count(Table) when is_atom(Table) -> count(Table) when is_atom(Table) ->
ets:info(Table, size); ets:info(Table, size);
count({Table, _Opts}) when is_atom(Table) ->
ets:info(Table, size);
count([Table]) when is_atom(Table) -> count([Table]) when is_atom(Table) ->
ets:info(Table, size); ets:info(Table, size);
count([{Table, _Opts}]) when is_atom(Table) ->
ets:info(Table, size);
count(Tables) -> count(Tables) ->
lists:sum([count(T) || T <- Tables]). Fold = fun({Table, _Opts}, Acc) ->
count(Table) ++ Acc;
(Table, Acc) ->
count(Table) ++ Acc
end,
lists:foldl(Fold, 0, Tables).
count(Table, Nodes) -> count(Table, Nodes) ->
lists:sum([rpc_call(Node, ets, info, [Table, size], 5000) || Node <- Nodes]). lists:sum([rpc_call(Node, ets, info, [Table, size], 5000) || Node <- Nodes]).

View File

@ -121,6 +121,12 @@
func => clean_quota, func => clean_quota,
descr => "Clear the quota policy"}). descr => "Clear the quota policy"}).
-rest_api(#{name => set_keepalive,
method => 'PUT',
path => "/clients/:bin:clientid/keepalive",
func => set_keepalive,
descr => "Set the client keepalive"}).
-import(emqx_mgmt_util, [ ntoa/1 -import(emqx_mgmt_util, [ ntoa/1
, strftime/1 , strftime/1
]). ]).
@ -134,23 +140,24 @@
, set_quota_policy/2 , set_quota_policy/2
, clean_ratelimit/2 , clean_ratelimit/2
, clean_quota/2 , clean_quota/2
, set_keepalive/2
]). ]).
-export([ query/3 -export([ query/3
, format_channel_info/1 , format_channel_info/1
]). ]).
-define(query_fun, {?MODULE, query}). -define(QUERY_FUN, {?MODULE, query}).
-define(format_fun, {?MODULE, format_channel_info}). -define(FORMAT_FUN, {?MODULE, format_channel_info}).
list(Bindings, Params) when map_size(Bindings) == 0 -> list(Bindings, Params) when map_size(Bindings) == 0 ->
fence(fun() -> fence(fun() ->
emqx_mgmt_api:cluster_query(Params, ?CLIENT_QS_SCHEMA, ?query_fun) emqx_mgmt_api:cluster_query(Params, ?CLIENT_QS_SCHEMA, ?QUERY_FUN)
end); end);
list(#{node := Node}, Params) when Node =:= node() -> list(#{node := Node}, Params) when Node =:= node() ->
fence(fun() -> fence(fun() ->
emqx_mgmt_api:node_query(Node, Params, ?CLIENT_QS_SCHEMA, ?query_fun) emqx_mgmt_api:node_query(Node, Params, ?CLIENT_QS_SCHEMA, ?QUERY_FUN)
end); end);
list(Bindings = #{node := Node}, Params) -> list(Bindings = #{node := Node}, Params) ->
@ -173,16 +180,20 @@ fence(Func) ->
end. end.
lookup(#{node := Node, clientid := ClientId}, _Params) -> lookup(#{node := Node, clientid := ClientId}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client(Node, {clientid, emqx_mgmt_util:urldecode(ClientId)}, ?format_fun)}); minirest:return({ok, emqx_mgmt:lookup_client(Node,
{clientid, emqx_mgmt_util:urldecode(ClientId)}, ?FORMAT_FUN)});
lookup(#{clientid := ClientId}, _Params) -> lookup(#{clientid := ClientId}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client({clientid, emqx_mgmt_util:urldecode(ClientId)}, ?format_fun)}); minirest:return({ok, emqx_mgmt:lookup_client(
{clientid, emqx_mgmt_util:urldecode(ClientId)}, ?FORMAT_FUN)});
lookup(#{node := Node, username := Username}, _Params) -> lookup(#{node := Node, username := Username}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client(Node, {username, emqx_mgmt_util:urldecode(Username)}, ?format_fun)}); minirest:return({ok, emqx_mgmt:lookup_client(Node,
{username, emqx_mgmt_util:urldecode(Username)}, ?FORMAT_FUN)});
lookup(#{username := Username}, _Params) -> lookup(#{username := Username}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client({username, emqx_mgmt_util:urldecode(Username)}, ?format_fun)}). minirest:return({ok, emqx_mgmt:lookup_client({username,
emqx_mgmt_util:urldecode(Username)}, ?FORMAT_FUN)}).
kickout(#{clientid := ClientId}, _Params) -> kickout(#{clientid := ClientId}, _Params) ->
case emqx_mgmt:kickout_client(emqx_mgmt_util:urldecode(ClientId)) of case emqx_mgmt:kickout_client(emqx_mgmt_util:urldecode(ClientId)) of
@ -208,7 +219,7 @@ list_acl_cache(#{clientid := ClientId}, _Params) ->
set_ratelimit_policy(#{clientid := ClientId}, Params) -> set_ratelimit_policy(#{clientid := ClientId}, Params) ->
P = [{conn_bytes_in, proplists:get_value(<<"conn_bytes_in">>, Params)}, P = [{conn_bytes_in, proplists:get_value(<<"conn_bytes_in">>, Params)},
{conn_messages_in, proplists:get_value(<<"conn_messages_in">>, Params)}], {conn_messages_in, proplists:get_value(<<"conn_messages_in">>, Params)}],
case [{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined] of case filter_ratelimit_params(P) of
[] -> minirest:return(); [] -> minirest:return();
Policy -> Policy ->
case emqx_mgmt:set_ratelimit_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of case emqx_mgmt:set_ratelimit_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of
@ -227,7 +238,7 @@ clean_ratelimit(#{clientid := ClientId}, _Params) ->
set_quota_policy(#{clientid := ClientId}, Params) -> set_quota_policy(#{clientid := ClientId}, Params) ->
P = [{conn_messages_routing, proplists:get_value(<<"conn_messages_routing">>, Params)}], P = [{conn_messages_routing, proplists:get_value(<<"conn_messages_routing">>, Params)}],
case [{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined] of case filter_ratelimit_params(P) of
[] -> minirest:return(); [] -> minirest:return();
Policy -> Policy ->
case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of
@ -237,6 +248,7 @@ set_quota_policy(#{clientid := ClientId}, Params) ->
end end
end. end.
clean_quota(#{clientid := ClientId}, _Params) -> clean_quota(#{clientid := ClientId}, _Params) ->
case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), []) of case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), []) of
ok -> minirest:return(); ok -> minirest:return();
@ -244,6 +256,23 @@ clean_quota(#{clientid := ClientId}, _Params) ->
{error, Reason} -> minirest:return({error, ?ERROR1, Reason}) {error, Reason} -> minirest:return({error, ?ERROR1, Reason})
end. end.
set_keepalive(#{clientid := ClientId}, Params) ->
case proplists:get_value(<<"interval">>, Params) of
undefined ->
minirest:return({error, ?ERROR7, params_not_found});
Interval0 ->
Interval = to_integer(Interval0),
case emqx_mgmt:set_keepalive(emqx_mgmt_util:urldecode(ClientId), Interval) of
ok -> minirest:return();
{error, not_found} -> minirest:return({error, ?ERROR12, not_found});
{error, Code, Reason} -> minirest:return({error, Code, Reason});
{error, Reason} -> minirest:return({error, ?ERROR1, Reason})
end
end.
to_integer(Int)when is_integer(Int) -> Int;
to_integer(Bin) when is_binary(Bin) -> binary_to_integer(Bin).
%% @private %% @private
%% S = 100,1s %% S = 100,1s
%% | 100KB, 1m %% | 100KB, 1m
@ -270,7 +299,7 @@ format_channel_info({_Key, Info, Stats0}) ->
ConnInfo = maps:get(conninfo, Info, #{}), ConnInfo = maps:get(conninfo, Info, #{}),
Session = case maps:get(session, Info, #{}) of Session = case maps:get(session, Info, #{}) of
undefined -> #{}; undefined -> #{};
_Sess -> _Sess Sess -> Sess
end, end,
SessCreated = maps:get(created_at, Session, maps:get(connected_at, ConnInfo)), SessCreated = maps:get(created_at, Session, maps:get(connected_at, ConnInfo)),
Connected = case maps:get(conn_state, Info, connected) of Connected = case maps:get(conn_state, Info, connected) of
@ -292,8 +321,14 @@ format_channel_info({_Key, Info, Stats0}) ->
inflight, max_inflight, awaiting_rel, inflight, max_inflight, awaiting_rel,
max_awaiting_rel, mqueue_len, mqueue_dropped, max_awaiting_rel, mqueue_len, mqueue_dropped,
max_mqueue, heap_size, reductions, mailbox_len, max_mqueue, heap_size, reductions, mailbox_len,
recv_cnt, recv_msg, recv_oct, recv_pkt, send_cnt, recv_cnt,
send_msg, send_oct, send_pkt], NStats), recv_msg, 'recv_msg.qos0', 'recv_msg.qos1', 'recv_msg.qos2',
'recv_msg.dropped', 'recv_msg.dropped.expired',
recv_oct, recv_pkt, send_cnt,
send_msg, 'send_msg.qos0', 'send_msg.qos1', 'send_msg.qos2',
'send_msg.dropped', 'send_msg.dropped.expired',
'send_msg.dropped.queue_full', 'send_msg.dropped.too_large',
send_oct, send_pkt], NStats),
maps:with([clientid, username, mountpoint, is_bridge, zone], ClientInfo), maps:with([clientid, username, mountpoint, is_bridge, zone], ClientInfo),
maps:with([clean_start, keepalive, expiry_interval, proto_name, maps:with([clean_start, keepalive, expiry_interval, proto_name,
proto_ver, peername, connected_at, disconnected_at], ConnInfo), proto_ver, peername, connected_at, disconnected_at], ConnInfo),
@ -311,7 +346,8 @@ format(Data) when is_map(Data)->
created_at => iolist_to_binary(strftime(CreatedAt div 1000))}, created_at => iolist_to_binary(strftime(CreatedAt div 1000))},
case maps:get(disconnected_at, Data, undefined) of case maps:get(disconnected_at, Data, undefined) of
undefined -> #{}; undefined -> #{};
DisconnectedAt -> #{disconnected_at => iolist_to_binary(strftime(DisconnectedAt div 1000))} DisconnectedAt -> #{disconnected_at =>
iolist_to_binary(strftime(DisconnectedAt div 1000))}
end). end).
format_acl_cache({{PubSub, Topic}, {AclResult, Timestamp}}) -> format_acl_cache({{PubSub, Topic}, {AclResult, Timestamp}}) ->
@ -326,7 +362,7 @@ format_acl_cache({{PubSub, Topic}, {AclResult, Timestamp}}) ->
query({Qs, Fuzzy}, Start, Limit) -> query({Qs, Fuzzy}, Start, Limit) ->
case qs2ms(Qs) of case qs2ms(Qs) of
{Ms, []}when Fuzzy =:= [] -> {Ms, []} when Fuzzy =:= [] ->
emqx_mgmt_api:select_table(emqx_channel_info, Ms, Start, Limit, fun format_channel_info/1); emqx_mgmt_api:select_table(emqx_channel_info, Ms, Start, Limit, fun format_channel_info/1);
{Ms, FuzzyStats} -> {Ms, FuzzyStats} ->
MatchFun = match_fun(Ms, Fuzzy ++ FuzzyStats), MatchFun = match_fun(Ms, Fuzzy ++ FuzzyStats),
@ -350,7 +386,7 @@ match_fun(Ms, Fuzzy) ->
run_fuzzy_match(_, []) -> run_fuzzy_match(_, []) ->
true; true;
run_fuzzy_match(E = {_, #{clientinfo := ClientInfo}, _}, [{Key, like, SubStr}|Fuzzy]) -> run_fuzzy_match(E = {_, #{clientinfo := ClientInfo}, _}, [{Key, like, SubStr} | Fuzzy]) ->
Val = case maps:get(Key, ClientInfo, undefined) of Val = case maps:get(Key, ClientInfo, undefined) of
undefined -> <<>>; undefined -> <<>>;
V -> V V -> V
@ -429,6 +465,9 @@ ms(mqueue_len, _X) ->
ms(mqueue_dropped, _X) -> ms(mqueue_dropped, _X) ->
fuzzy_stats. fuzzy_stats.
filter_ratelimit_params(P) ->
[{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined].
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% EUnits %% EUnits
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -73,39 +73,17 @@
export(_Bindings, _Params) -> export(_Bindings, _Params) ->
case emqx_mgmt_data_backup:export() of case emqx_mgmt_data_backup:export() of
{ok, File = #{filename := Filename}} -> {ok, File = #{filename := Filename}} ->
minirest:return({ok, File#{filename => filename:basename(Filename)}}); minirest:return({ok, File#{filename => list_to_binary(filename:basename(Filename))}});
Return -> minirest:return(Return) Return -> minirest:return(Return)
end. end.
list_exported(_Bindings, _Params) -> list_exported(_Bindings, _Params) ->
List = [ rpc:call(Node, ?MODULE, get_list_exported, []) || Node <- ekka_mnesia:running_nodes() ], List = [rpc:call(Node, ?MODULE, get_list_exported, []) || Node <- ekka_mnesia:running_nodes()],
NList = lists:map(fun({_, FileInfo}) -> FileInfo end, lists:keysort(1, lists:append(List))), NList = lists:map(fun({_, FileInfo}) -> FileInfo end, lists:keysort(1, lists:append(List))),
minirest:return({ok, NList}). minirest:return({ok, NList}).
get_list_exported() -> get_list_exported() ->
Dir = emqx:get_env(data_dir), emqx_mgmt_data_backup:list_backup_file().
{ok, Files} = file:list_dir_all(Dir),
lists:foldl(
fun(File, Acc) ->
case filename:extension(File) =:= ".json" of
true ->
FullFile = filename:join([Dir, File]),
case file:read_file_info(FullFile) of
{ok, #file_info{size = Size, ctime = CTime = {{Y, M, D}, {H, MM, S}}}} ->
CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y, M, D, H, MM, S]),
Seconds = calendar:datetime_to_gregorian_seconds(CTime),
[{Seconds, [{filename, list_to_binary(File)},
{size, Size},
{created_at, list_to_binary(CreatedAt)},
{node, node()}
]} | Acc];
{error, Reason} ->
logger:error("Read file info of ~s failed with: ~p", [File, Reason]),
Acc
end;
false -> Acc
end
end, [], Files).
import(_Bindings, Params) -> import(_Bindings, Params) ->
case proplists:get_value(<<"filename">>, Params) of case proplists:get_value(<<"filename">>, Params) of
@ -121,22 +99,27 @@ import(_Bindings, Params) ->
case lists:member(Node, case lists:member(Node,
[ erlang:atom_to_binary(N, utf8) || N <- ekka_mnesia:running_nodes() ] [ erlang:atom_to_binary(N, utf8) || N <- ekka_mnesia:running_nodes() ]
) of ) of
true -> minirest:return(rpc:call(erlang:binary_to_atom(Node, utf8), ?MODULE, do_import, [Filename])); true ->
N = erlang:binary_to_atom(Node, utf8),
case rpc:call(N, ?MODULE, do_import, [Filename]) of
{badrpc, Reason} ->
minirest:return({error, Reason});
Res ->
minirest:return(Res)
end;
false -> minirest:return({error, no_existent_node}) false -> minirest:return({error, no_existent_node})
end end
end end
end. end.
do_import(Filename) -> do_import(Filename) ->
FullFilename = fullname(Filename), emqx_mgmt_data_backup:import(Filename, "{}").
emqx_mgmt_data_backup:import(FullFilename, "{}").
download(#{filename := Filename}, _Params) -> download(#{filename := Filename0}, _Params) ->
FullFilename = fullname(Filename), Filename = filename_decode(Filename0),
case file:read_file(FullFilename) of case emqx_mgmt_data_backup:read_backup_file(Filename) of
{ok, Bin} -> {ok, Res} ->
{ok, #{filename => list_to_binary(Filename), {ok, Res};
file => Bin}};
{error, Reason} -> {error, Reason} ->
minirest:return({error, Reason}) minirest:return({error, Reason})
end. end.
@ -146,8 +129,7 @@ upload(Bindings, Params) ->
do_upload(_Bindings, #{<<"filename">> := Filename, do_upload(_Bindings, #{<<"filename">> := Filename,
<<"file">> := Bin}) -> <<"file">> := Bin}) ->
FullFilename = fullname(Filename), case emqx_mgmt_data_backup:upload_backup_file(Filename, Bin) of
case file:write_file(FullFilename, Bin) of
ok -> ok ->
minirest:return({ok, [{node, node()}]}); minirest:return({ok, [{node, node()}]});
{error, Reason} -> {error, Reason} ->
@ -158,9 +140,9 @@ do_upload(Bindings, Params = #{<<"file">> := _}) ->
do_upload(_Bindings, _Params) -> do_upload(_Bindings, _Params) ->
minirest:return({error, missing_required_params}). minirest:return({error, missing_required_params}).
delete(#{filename := Filename}, _Params) -> delete(#{filename := Filename0}, _Params) ->
FullFilename = fullname(Filename), Filename = filename_decode(Filename0),
case file:delete(FullFilename) of case emqx_mgmt_data_backup:delete_backup_file(Filename) of
ok -> ok ->
minirest:return(); minirest:return();
{error, Reason} -> {error, Reason} ->
@ -168,20 +150,19 @@ delete(#{filename := Filename}, _Params) ->
end. end.
import_content(Content) -> import_content(Content) ->
File = dump_to_tmp_file(Content),
do_import(File).
dump_to_tmp_file(Content) ->
Bin = emqx_json:encode(Content), Bin = emqx_json:encode(Content),
Filename = tmp_filename(), Filename = tmp_filename(),
ok = file:write_file(fullname(Filename), Bin), case emqx_mgmt_data_backup:upload_backup_file(Filename, Bin) of
Filename. ok ->
do_import(Filename);
fullname(Name0) -> {error, Reason} ->
Name = uri_string:percent_decode(Name0), {error, Reason}
filename:join(emqx:get_env(data_dir), Name). end.
tmp_filename() -> tmp_filename() ->
Seconds = erlang:system_time(second), Seconds = erlang:system_time(second),
{{Y, M, D}, {H, MM, S}} = emqx_mgmt_util:datetime(Seconds), {{Y, M, D}, {H, MM, S}} = emqx_mgmt_util:datetime(Seconds),
io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S]). list_to_binary(io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S])).
filename_decode(Filename) ->
uri_string:percent_decode(Filename).

View File

@ -71,19 +71,12 @@ subscribe(_Bindings, Params) ->
publish(_Bindings, Params) -> publish(_Bindings, Params) ->
logger:debug("API publish Params:~p", [Params]), logger:debug("API publish Params:~p", [Params]),
{ClientId, Topic, Qos, Retain, Payload} = parse_publish_params(Params), try parse_publish_params(Params) of
case do_publish(ClientId, Topic, Qos, Retain, Payload) of Result -> do_publish(Params, Result)
{ok, MsgIds} -> catch
case proplists:get_value(<<"return">>, Params, undefined) of _E : _R ->
undefined -> minirest:return(ok); logger:debug("API publish result:~p ~p", [_E, _R]),
_Val -> minirest:return({ok, ?ERROR8, bad_params})
case proplists:get_value(<<"topics">>, Params, undefined) of
undefined -> minirest:return({ok, #{msgid => lists:last(MsgIds)}});
_ -> minirest:return({ok, #{msgids => MsgIds}})
end
end;
Result ->
minirest:return(Result)
end. end.
unsubscribe(_Bindings, Params) -> unsubscribe(_Bindings, Params) ->
@ -114,7 +107,8 @@ loop_subscribe([Params | ParamsN], Acc) ->
{_, Code0, _Reason} -> Code0 {_, Code0, _Reason} -> Code0
end, end,
Result = #{clientid => ClientId, Result = #{clientid => ClientId,
topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), topic => resp_topic(proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
code => Code}, code => Code},
loop_subscribe(ParamsN, [Result | Acc]). loop_subscribe(ParamsN, [Result | Acc]).
@ -123,13 +117,19 @@ loop_publish(Params) ->
loop_publish([], Result) -> loop_publish([], Result) ->
lists:reverse(Result); lists:reverse(Result);
loop_publish([Params | ParamsN], Acc) -> loop_publish([Params | ParamsN], Acc) ->
{ClientId, Topic, Qos, Retain, Payload} = parse_publish_params(Params), Result =
Code = case do_publish(ClientId, Topic, Qos, Retain, Payload) of try parse_publish_params(Params) of
{ok, _} -> 0; Res ->
{_, Code0, _} -> Code0 Code = case do_publish(Params, Res) of
end, {ok, _} -> 0;
Result = #{topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), {_, Code0, _} -> Code0
code => Code}, end,
#{topic => resp_topic(proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
code => Code}
catch
_E : _R -> #{code => ?ERROR8, message => <<"bad_params">>}
end,
loop_publish(ParamsN, [Result | Acc]). loop_publish(ParamsN, [Result | Acc]).
loop_unsubscribe(Params) -> loop_unsubscribe(Params) ->
@ -143,7 +143,8 @@ loop_unsubscribe([Params | ParamsN], Acc) ->
{_, Code0, _} -> Code0 {_, Code0, _} -> Code0
end, end,
Result = #{clientid => ClientId, Result = #{clientid => ClientId,
topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), topic => resp_topic(proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
code => Code}, code => Code},
loop_unsubscribe(ParamsN, [Result | Acc]). loop_unsubscribe(ParamsN, [Result | Acc]).
@ -160,14 +161,31 @@ do_subscribe(ClientId, Topics, QoS) ->
_ -> ok _ -> ok
end. end.
do_publish(ClientId, _Topics, _Qos, _Retain, _Payload) when not (is_binary(ClientId) or (ClientId =:= undefined)) -> do_publish(Params, {ClientId, Topic, Qos, Retain, Payload, Props}) ->
case do_publish(ClientId, Topic, Qos, Retain, Payload, Props) of
{ok, MsgIds} ->
case proplists:get_value(<<"return">>, Params, undefined) of
undefined -> minirest:return(ok);
_Val ->
case proplists:get_value(<<"topics">>, Params, undefined) of
undefined -> minirest:return({ok, #{msgid => lists:last(MsgIds)}});
_ -> minirest:return({ok, #{msgids => MsgIds}})
end
end;
Result ->
minirest:return(Result)
end.
do_publish(ClientId, _Topics, _Qos, _Retain, _Payload, _Props)
when not (is_binary(ClientId) or (ClientId =:= undefined)) ->
{ok, ?ERROR8, <<"bad clientid: must be string">>}; {ok, ?ERROR8, <<"bad clientid: must be string">>};
do_publish(_ClientId, [], _Qos, _Retain, _Payload) -> do_publish(_ClientId, [], _Qos, _Retain, _Payload, _Props) ->
{ok, ?ERROR15, bad_topic}; {ok, ?ERROR15, bad_topic};
do_publish(ClientId, Topics, Qos, Retain, Payload) -> do_publish(ClientId, Topics, Qos, Retain, Payload, Props) ->
MsgIds = lists:map(fun(Topic) -> MsgIds = lists:map(fun(Topic) ->
Msg = emqx_message:make(ClientId, Qos, Topic, Payload), Msg = emqx_message:make(ClientId, Qos, Topic, Payload,
_ = emqx_mgmt:publish(Msg#message{flags = #{retain => Retain}}), #{retain => Retain}, Props),
_ = emqx_mgmt:publish(Msg),
emqx_guid:to_hexstr(Msg#message.id) emqx_guid:to_hexstr(Msg#message.id)
end, Topics), end, Topics),
{ok, MsgIds}. {ok, MsgIds}.
@ -187,19 +205,22 @@ do_unsubscribe(ClientId, Topic) ->
parse_subscribe_params(Params) -> parse_subscribe_params(Params) ->
ClientId = proplists:get_value(<<"clientid">>, Params), ClientId = proplists:get_value(<<"clientid">>, Params),
Topics = topics(filter, proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), Topics = topics(filter, proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
QoS = proplists:get_value(<<"qos">>, Params, 0), QoS = proplists:get_value(<<"qos">>, Params, 0),
{ClientId, Topics, QoS}. {ClientId, Topics, QoS}.
parse_publish_params(Params) -> parse_publish_params(Params) ->
Topics = topics(name, proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), Topics = topics(name, proplists:get_value(<<"topic">>, Params),
ClientId = proplists:get_value(<<"clientid">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)),
Payload = decode_payload(proplists:get_value(<<"payload">>, Params, <<>>), ClientId = proplists:get_value(<<"clientid">>, Params),
proplists:get_value(<<"encoding">>, Params, <<"plain">>)), Payload = decode_payload(proplists:get_value(<<"payload">>, Params, <<>>),
Qos = proplists:get_value(<<"qos">>, Params, 0), proplists:get_value(<<"encoding">>, Params, <<"plain">>)),
Retain = proplists:get_value(<<"retain">>, Params, false), Qos = proplists:get_value(<<"qos">>, Params, 0),
Payload1 = maybe_maps_to_binary(Payload), Retain = proplists:get_value(<<"retain">>, Params, false),
{ClientId, Topics, Qos, Retain, Payload1}. Payload1 = maybe_maps_to_binary(Payload),
Props = parse_props(Params),
{ClientId, Topics, Qos, Retain, Payload1, Props}.
parse_unsubscribe_params(Params) -> parse_unsubscribe_params(Params) ->
ClientId = proplists:get_value(<<"clientid">>, Params), ClientId = proplists:get_value(<<"clientid">>, Params),
@ -253,3 +274,50 @@ maybe_maps_to_binary(Payload) ->
_C : _E : S -> _C : _E : S ->
error({encode_payload_fail, S}) error({encode_payload_fail, S})
end. end.
-define(PROP_MAPPING,
#{<<"payload_format_indicator">> => 'Payload-Format-Indicator',
<<"message_expiry_interval">> => 'Message-Expiry-Interval',
<<"response_topic">> => 'Response-Topic',
<<"correlation_data">> => 'Correlation-Data',
<<"user_properties">> => 'User-Property',
<<"subscription_identifier">> => 'Subscription-Identifier',
<<"content_type">> => 'Content-Type'
}).
parse_props(Params) ->
Properties0 = proplists:get_value(<<"properties">>, Params, []),
Properties1 = lists:foldl(fun({Name, Value}, Acc) ->
case maps:find(Name, ?PROP_MAPPING) of
{ok, Key} -> Acc#{Key => Value};
error -> error({invalid_property, Name})
end
end, #{}, Properties0),
%% Compatible with older API
UserProp1 = generate_user_props(proplists:get_value(<<"user_properties">>, Params, [])),
UserProp2 =
case Properties1 of
#{'User-Property' := UserProp1List} -> generate_user_props(UserProp1List);
_ -> []
end,
#{properties => Properties1#{'User-Property' => UserProp1 ++ UserProp2}}.
generate_user_props(UserProps) when is_list(UserProps)->
lists:map(fun
({Name, Value}) -> {bin(Name), bin(Value)};
(Invalid) -> error({invalid_user_property, Invalid})
end
, UserProps);
generate_user_props(UserProps) ->
error({user_properties_type_error, UserProps}).
bin(Bin) when is_binary(Bin) -> Bin;
bin(Num) when is_number(Num) -> number_to_binary(Num);
bin(Boolean) when is_boolean(Boolean) -> atom_to_binary(Boolean);
bin(Other) -> error({user_properties_type_error, Other}).
-define(FLOAT_PRECISION, 17).
number_to_binary(Int) when is_integer(Int) ->
integer_to_binary(Int);
number_to_binary(Float) when is_float(Float) ->
float_to_binary(Float, [{decimals, ?FLOAT_PRECISION}, compact]).

View File

@ -21,7 +21,9 @@
-include("emqx_mgmt.hrl"). -include("emqx_mgmt.hrl").
-define(PRINT_CMD(Cmd, Descr), io:format("~-48s# ~s~n", [Cmd, Descr])). -elvis([{elvis_style, invalid_dynamic_call, disable}]).
-define(PRINT_CMD(Cmd, Desc), io:format("~-48s# ~s~n", [Cmd, Desc])).
-export([load/0]). -export([load/0]).
@ -36,6 +38,7 @@
, vm/1 , vm/1
, mnesia/1 , mnesia/1
, trace/1 , trace/1
, traces/1
, log/1 , log/1
, mgmt/1 , mgmt/1
, data/1 , data/1
@ -75,11 +78,8 @@ mgmt(["insert", AppId, Name]) ->
mgmt(["lookup", AppId]) -> mgmt(["lookup", AppId]) ->
case emqx_mgmt_auth:lookup_app(list_to_binary(AppId)) of case emqx_mgmt_auth:lookup_app(list_to_binary(AppId)) of
{AppId1, AppSecret, Name, Desc, Status, Expired} -> undefined -> emqx_ctl:print("Not Found.~n");
emqx_ctl:print("app_id: ~s~nsecret: ~s~nname: ~s~ndesc: ~s~nstatus: ~s~nexpired: ~p~n", App -> print_app_info(App)
[AppId1, AppSecret, Name, Desc, Status, Expired]);
undefined ->
emqx_ctl:print("Not Found.~n")
end; end;
mgmt(["update", AppId, Status]) -> mgmt(["update", AppId, Status]) ->
@ -100,10 +100,7 @@ mgmt(["delete", AppId]) ->
end; end;
mgmt(["list"]) -> mgmt(["list"]) ->
lists:foreach(fun({AppId, AppSecret, Name, Desc, Status, Expired}) -> lists:foreach(fun print_app_info/1, emqx_mgmt_auth:list_apps());
emqx_ctl:print("app_id: ~s, secret: ~s, name: ~s, desc: ~s, status: ~s, expired: ~p~n",
[AppId, AppSecret, Name, Desc, Status, Expired])
end, emqx_mgmt_auth:list_apps());
mgmt(_) -> mgmt(_) ->
emqx_ctl:usage([{"mgmt list", "List Applications"}, emqx_ctl:usage([{"mgmt list", "List Applications"},
@ -129,10 +126,12 @@ broker([]) ->
[emqx_ctl:print("~-10s: ~s~n", [Fun, emqx_sys:Fun()]) || Fun <- Funs]; [emqx_ctl:print("~-10s: ~s~n", [Fun, emqx_sys:Fun()]) || Fun <- Funs];
broker(["stats"]) -> broker(["stats"]) ->
[emqx_ctl:print("~-30s: ~w~n", [Stat, Val]) || {Stat, Val} <- lists:sort(emqx_stats:getstats())]; [emqx_ctl:print("~-30s: ~w~n", [Stat, Val]) ||
{Stat, Val} <- lists:sort(emqx_stats:getstats())];
broker(["metrics"]) -> broker(["metrics"]) ->
[emqx_ctl:print("~-30s: ~w~n", [Metric, Val]) || {Metric, Val} <- lists:sort(emqx_metrics:all())]; [emqx_ctl:print("~-30s: ~w~n", [Metric, Val]) ||
{Metric, Val} <- lists:sort(emqx_metrics:all())];
broker(_) -> broker(_) ->
emqx_ctl:usage([{"broker", "Show broker version, uptime and description"}, emqx_ctl:usage([{"broker", "Show broker version, uptime and description"},
@ -257,10 +256,12 @@ subscriptions(["del", ClientId, Topic]) ->
end; end;
subscriptions(_) -> subscriptions(_) ->
emqx_ctl:usage([{"subscriptions list", "List all subscriptions"}, emqx_ctl:usage([{"subscriptions list", "List all subscriptions"},
{"subscriptions show <ClientId>", "Show subscriptions of a client"}, {"subscriptions show <ClientId>", "Show subscriptions of a client"},
{"subscriptions add <ClientId> <Topic> <QoS>", "Add a static subscription manually"}, {"subscriptions add <ClientId> <Topic> <QoS>",
{"subscriptions del <ClientId> <Topic>", "Delete a static subscription manually"}]). "Add a static subscription manually"},
{"subscriptions del <ClientId> <Topic>",
"Delete a static subscription manually"}]).
if_valid_qos(QoS, Fun) -> if_valid_qos(QoS, Fun) ->
try list_to_integer(QoS) of try list_to_integer(QoS) of
@ -329,14 +330,20 @@ vm(["memory"]) ->
[emqx_ctl:print("memory/~-17s: ~w~n", [Cat, Val]) || {Cat, Val} <- erlang:memory()]; [emqx_ctl:print("memory/~-17s: ~w~n", [Cat, Val]) || {Cat, Val} <- erlang:memory()];
vm(["process"]) -> vm(["process"]) ->
[emqx_ctl:print("process/~-16s: ~w~n", [Name, erlang:system_info(Key)]) || {Name, Key} <- [{limit, process_limit}, {count, process_count}]]; [emqx_ctl:print("process/~-16s: ~w~n",
[Name, erlang:system_info(Key)]) ||
{Name, Key} <- [{limit, process_limit}, {count, process_count}]];
vm(["io"]) -> vm(["io"]) ->
IoInfo = lists:usort(lists:flatten(erlang:system_info(check_io))), IoInfo = lists:usort(lists:flatten(erlang:system_info(check_io))),
[emqx_ctl:print("io/~-21s: ~w~n", [Key, proplists:get_value(Key, IoInfo)]) || Key <- [max_fds, active_fds]]; [emqx_ctl:print("io/~-21s: ~w~n",
[Key, proplists:get_value(Key, IoInfo)]) ||
Key <- [max_fds, active_fds]];
vm(["ports"]) -> vm(["ports"]) ->
[emqx_ctl:print("ports/~-16s: ~w~n", [Name, erlang:system_info(Key)]) || {Name, Key} <- [{count, port_count}, {limit, port_limit}]]; [emqx_ctl:print("ports/~-16s: ~w~n",
[Name, erlang:system_info(Key)]) ||
{Name, Key} <- [{count, port_count}, {limit, port_limit}]];
vm(_) -> vm(_) ->
emqx_ctl:usage([{"vm all", "Show info of Erlang VM"}, emqx_ctl:usage([{"vm all", "Show info of Erlang VM"},
@ -373,8 +380,9 @@ log(["primary-level", Level]) ->
emqx_ctl:print("~s~n", [emqx_logger:get_primary_log_level()]); emqx_ctl:print("~s~n", [emqx_logger:get_primary_log_level()]);
log(["handlers", "list"]) -> log(["handlers", "list"]) ->
_ = [emqx_ctl:print("LogHandler(id=~s, level=~s, destination=~s, status=~s)~n", [Id, Level, Dst, Status]) _ = [emqx_ctl:print("LogHandler(id=~s, level=~s, destination=~s, status=~s)~n",
|| #{id := Id, level := Level, dst := Dst, status := Status} <- emqx_logger:get_log_handlers()], [Id, Level, Dst, Status]) || #{id := Id, level := Level, dst := Dst, status := Status}
<- emqx_logger:get_log_handlers()],
ok; ok;
log(["handlers", "start", HandlerId]) -> log(["handlers", "start", HandlerId]) ->
@ -407,43 +415,51 @@ log(_) ->
{"log handlers list", "Show log handlers"}, {"log handlers list", "Show log handlers"},
{"log handlers start <HandlerId>", "Start a log handler"}, {"log handlers start <HandlerId>", "Start a log handler"},
{"log handlers stop <HandlerId>", "Stop a log handler"}, {"log handlers stop <HandlerId>", "Stop a log handler"},
{"log handlers set-level <HandlerId> <Level>", "Set log level of a log handler"}]). {"log handlers set-level <HandlerId> <Level>",
"Set log level of a log handler"}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% @doc Trace Command %% @doc Trace Command
trace(["list"]) -> trace(["list"]) ->
lists:foreach(fun({{Who, Name}, {Level, LogFile}}) -> lists:foreach(fun(Trace) ->
emqx_ctl:print("Trace(~s=~s, level=~s, destination=~p)~n", [Who, Name, Level, LogFile]) #{type := Type, filter := Filter, level := Level, dst := Dst} = Trace,
end, emqx_tracer:lookup_traces()); emqx_ctl:print("Trace(~s=~s, level=~s, destination=~p)~n", [Type, Filter, Level, Dst])
end, emqx_trace_handler:running());
trace(["stop", "client", ClientId]) -> trace(["stop", Operation, ClientId]) ->
trace_off(clientid, ClientId); case trace_type(Operation) of
{ok, Type} -> trace_off(Type, ClientId);
error -> trace([])
end;
trace(["start", "client", ClientId, LogFile]) -> trace(["start", Operation, ClientId, LogFile]) ->
trace_on(clientid, ClientId, all, LogFile); trace(["start", Operation, ClientId, LogFile, "all"]);
trace(["start", "client", ClientId, LogFile, Level]) -> trace(["start", Operation, ClientId, LogFile, Level]) ->
trace_on(clientid, ClientId, list_to_atom(Level), LogFile); case trace_type(Operation) of
{ok, Type} -> trace_on(Type, ClientId, list_to_existing_atom(Level), LogFile);
trace(["stop", "topic", Topic]) -> error -> trace([])
trace_off(topic, Topic); end;
trace(["start", "topic", Topic, LogFile]) ->
trace_on(topic, Topic, all, LogFile);
trace(["start", "topic", Topic, LogFile, Level]) ->
trace_on(topic, Topic, list_to_atom(Level), LogFile);
trace(_) -> trace(_) ->
emqx_ctl:usage([{"trace list", "List all traces started"}, emqx_ctl:usage([{"trace list", "List all traces started on local node"},
{"trace start client <ClientId> <File> [<Level>]", "Traces for a client"}, {"trace start client <ClientId> <File> [<Level>]",
{"trace stop client <ClientId>", "Stop tracing for a client"}, "Traces for a client on local node"},
{"trace start topic <Topic> <File> [<Level>] ", "Traces for a topic"}, {"trace stop client <ClientId>",
{"trace stop topic <Topic> ", "Stop tracing for a topic"}]). "Stop tracing for a client on local node"},
{"trace start topic <Topic> <File> [<Level>] ",
"Traces for a topic on local node"},
{"trace stop topic <Topic> ",
"Stop tracing for a topic on local node"},
{"trace start ip_address <IP> <File> [<Level>] ",
"Traces for a client ip on local node"},
{"trace stop ip_addresss <IP> ",
"Stop tracing for a client ip on local node"}
]).
trace_on(Who, Name, Level, LogFile) -> trace_on(Who, Name, Level, LogFile) ->
case emqx_tracer:start_trace({Who, iolist_to_binary(Name)}, Level, LogFile) of case emqx_trace_handler:install(Who, Name, Level, LogFile) of
ok -> ok ->
emqx_ctl:print("trace ~s ~s successfully~n", [Who, Name]); emqx_ctl:print("trace ~s ~s successfully~n", [Who, Name]);
{error, Error} -> {error, Error} ->
@ -451,13 +467,94 @@ trace_on(Who, Name, Level, LogFile) ->
end. end.
trace_off(Who, Name) -> trace_off(Who, Name) ->
case emqx_tracer:stop_trace({Who, iolist_to_binary(Name)}) of case emqx_trace_handler:uninstall(Who, Name) of
ok -> ok ->
emqx_ctl:print("stop tracing ~s ~s successfully~n", [Who, Name]); emqx_ctl:print("stop tracing ~s ~s successfully~n", [Who, Name]);
{error, Error} -> {error, Error} ->
emqx_ctl:print("[error] stop tracing ~s ~s: ~p~n", [Who, Name, Error]) emqx_ctl:print("[error] stop tracing ~s ~s: ~p~n", [Who, Name, Error])
end. end.
%%--------------------------------------------------------------------
%% @doc Trace Cluster Command
traces(["list"]) ->
{ok, List} = emqx_trace_api:list_trace(get, []),
case List of
[] ->
emqx_ctl:print("Cluster Trace is empty~n", []);
_ ->
lists:foreach(fun(Trace) ->
#{type := Type, name := Name, status := Status,
log_size := LogSize} = Trace,
emqx_ctl:print("Trace(~s: ~s=~s, ~s, LogSize:~p)~n",
[Name, Type, maps:get(Type, Trace), Status, LogSize])
end, List)
end,
length(List);
traces(["stop", Name]) ->
trace_cluster_off(Name);
traces(["delete", Name]) ->
trace_cluster_del(Name);
traces(["start", Name, Operation, Filter]) ->
traces(["start", Name, Operation, Filter, "900"]);
traces(["start", Name, Operation, Filter, DurationS]) ->
case trace_type(Operation) of
{ok, Type} -> trace_cluster_on(Name, Type, Filter, DurationS);
error -> traces([])
end;
traces(_) ->
emqx_ctl:usage([{"traces list", "List all cluster traces started"},
{"traces start <Name> client <ClientId>", "Traces for a client in cluster"},
{"traces start <Name> topic <Topic>", "Traces for a topic in cluster"},
{"traces start <Name> ip_address <IPAddr>", "Traces for a IP in cluster"},
{"traces stop <Name>", "Stop trace in cluster"},
{"traces delete <Name>", "Delete trace in cluster"}
]).
trace_cluster_on(Name, Type, Filter, DurationS0) ->
case erlang:whereis(emqx_trace) of
undefined ->
emqx_ctl:print("[error] Tracer module not started~n"
"Please run `emqx_ctl modules start tracer` "
"or `emqx_ctl modules start emqx_mod_trace` first~n", []);
_ ->
DurationS = list_to_integer(DurationS0),
Now = erlang:system_time(second),
Trace = #{ name => list_to_binary(Name)
, type => atom_to_binary(Type)
, Type => list_to_binary(Filter)
, start_at => list_to_binary(calendar:system_time_to_rfc3339(Now))
, end_at => list_to_binary(calendar:system_time_to_rfc3339(Now + DurationS))
},
case emqx_trace:create(Trace) of
ok ->
emqx_ctl:print("Cluster_trace ~p ~s ~s successfully~n", [Type, Filter, Name]);
{error, Error} ->
emqx_ctl:print("[error] Cluster_trace ~s ~s=~s ~p~n",
[Name, Type, Filter, Error])
end
end.
trace_cluster_del(Name) ->
case emqx_trace:delete(list_to_binary(Name)) of
ok -> emqx_ctl:print("Del cluster_trace ~s successfully~n", [Name]);
{error, Error} -> emqx_ctl:print("[error] Del cluster_trace ~s: ~p~n", [Name, Error])
end.
trace_cluster_off(Name) ->
case emqx_trace:update(list_to_binary(Name), false) of
ok -> emqx_ctl:print("Stop cluster_trace ~s successfully~n", [Name]);
{error, Error} -> emqx_ctl:print("[error] Stop cluster_trace ~s: ~p~n", [Name, Error])
end.
trace_type("client") -> {ok, clientid};
trace_type("topic") -> {ok, topic};
trace_type("ip_address") -> {ok, ip_address};
trace_type(_) -> error.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% @doc Listeners Command %% @doc Listeners Command
@ -473,18 +570,20 @@ listeners([]) ->
lists:foreach(fun indent_print/1, Info) lists:foreach(fun indent_print/1, Info)
end, esockd:listeners()), end, esockd:listeners()),
lists:foreach(fun({Protocol, Opts}) -> lists:foreach(fun({Protocol, Opts}) ->
Port = proplists:get_value(port, Opts), Port = proplists:get_value(port, Opts),
Info = [{listen_on, {string, emqx_listeners:format_listen_on(Port)}}, Acceptors = maps:get(num_acceptors, proplists:get_value(transport_options, Opts, #{}), 0),
{acceptors, maps:get(num_acceptors, proplists:get_value(transport_options, Opts, #{}), 0)}, Info = [{listen_on, {string, emqx_listeners:format_listen_on(Port)}},
{max_conns, proplists:get_value(max_connections, Opts)}, {acceptors, Acceptors},
{current_conn, proplists:get_value(all_connections, Opts)}, {max_conns, proplists:get_value(max_connections, Opts)},
{shutdown_count, []}], {current_conn, proplists:get_value(all_connections, Opts)},
emqx_ctl:print("~s~n", [listener_identifier(Protocol, Port)]), {shutdown_count, []}],
lists:foreach(fun indent_print/1, Info) emqx_ctl:print("~s~n", [listener_identifier(Protocol, Port)]),
end, ranch:info()); lists:foreach(fun indent_print/1, Info)
end, ranch:info());
listeners(["stop", Name = "http" ++ _N | _MaybePort]) -> listeners(["stop", Name = "http" ++ _N | _MaybePort]) ->
%% _MaybePort is to be backward compatible, to stop http listener, there is no need for the port number %% _MaybePort is to be backward compatible, to stop http listener,
%% there is no need for the port number
case minirest:stop_http(list_to_atom(Name)) of case minirest:stop_http(list_to_atom(Name)) of
ok -> ok ->
emqx_ctl:print("Stop ~s listener successfully.~n", [Name]); emqx_ctl:print("Stop ~s listener successfully.~n", [Name]);
@ -565,7 +664,8 @@ data(["import", Filename, "--env", Env]) ->
{error, unsupported_version} -> {error, unsupported_version} ->
emqx_ctl:print("The emqx data import failed: Unsupported version.~n"); emqx_ctl:print("The emqx data import failed: Unsupported version.~n");
{error, Reason} -> {error, Reason} ->
emqx_ctl:print("The emqx data import failed: ~0p while reading ~s.~n", [Reason, Filename]) emqx_ctl:print("The emqx data import failed: ~0p while reading ~s.~n",
[Reason, Filename])
end; end;
data(_) -> data(_) ->
@ -657,19 +757,23 @@ print({client, {ClientId, ChanPid}}) ->
maps:with([created_at], Session)]), maps:with([created_at], Session)]),
InfoKeys = [clientid, username, peername, InfoKeys = [clientid, username, peername,
clean_start, keepalive, expiry_interval, clean_start, keepalive, expiry_interval,
subscriptions_cnt, inflight_cnt, awaiting_rel_cnt, send_msg, mqueue_len, mqueue_dropped, subscriptions_cnt, inflight_cnt, awaiting_rel_cnt,
connected, created_at, connected_at] ++ case maps:is_key(disconnected_at, Info) of send_msg, mqueue_len, mqueue_dropped,
true -> [disconnected_at]; connected, created_at, connected_at] ++
false -> [] case maps:is_key(disconnected_at, Info) of
end, true -> [disconnected_at];
false -> []
end,
emqx_ctl:print("Client(~s, username=~s, peername=~s, " emqx_ctl:print("Client(~s, username=~s, peername=~s, "
"clean_start=~s, keepalive=~w, session_expiry_interval=~w, " "clean_start=~s, keepalive=~w, session_expiry_interval=~w, "
"subscriptions=~w, inflight=~w, awaiting_rel=~w, delivered_msgs=~w, enqueued_msgs=~w, dropped_msgs=~w, " "subscriptions=~w, inflight=~w, awaiting_rel=~w, "
"connected=~s, created_at=~w, connected_at=~w" ++ case maps:is_key(disconnected_at, Info) of "delivered_msgs=~w, enqueued_msgs=~w, dropped_msgs=~w, "
true -> ", disconnected_at=~w)~n"; "connected=~s, created_at=~w, connected_at=~w" ++
false -> ")~n" case maps:is_key(disconnected_at, Info) of
end, true -> ", disconnected_at=~w)~n";
[format(K, maps:get(K, Info)) || K <- InfoKeys]); false -> ")~n"
end,
[format(K, maps:get(K, Info)) || K <- InfoKeys]);
print({emqx_route, #route{topic = Topic, dest = {_, Node}}}) -> print({emqx_route, #route{topic = Topic, dest = {_, Node}}}) ->
emqx_ctl:print("~s -> ~s~n", [Topic, Node]); emqx_ctl:print("~s -> ~s~n", [Topic, Node]);
@ -722,6 +826,10 @@ restart_http_listener(Scheme, AppName) ->
http_mod_name(emqx_management) -> emqx_mgmt_http; http_mod_name(emqx_management) -> emqx_mgmt_http;
http_mod_name(Name) -> Name. http_mod_name(Name) -> Name.
print_app_info({AppId, AppSecret, Name, Desc, Status, Expired}) ->
emqx_ctl:print("app_id: ~s, secret: ~s, name: ~s, desc: ~s, status: ~s, expired: ~p~n",
[AppId, AppSecret, Name, Desc, Status, Expired]).
for_node(Fun, Node) -> for_node(Fun, Node) ->
try list_to_existing_atom(Node) of try list_to_existing_atom(Node) of
NodeAtom -> NodeAtom ->

View File

@ -31,6 +31,8 @@
]). ]).
-endif. -endif.
-define(BACKUP_DIR, backup).
-export([ export_rules/0 -export([ export_rules/0
, export_resources/0 , export_resources/0
, export_blacklist/0 , export_blacklist/0
@ -53,8 +55,18 @@
-export([ export/0 -export([ export/0
, import/2 , import/2
, upload_backup_file/2
, list_backup_file/0
, read_backup_file/1
, delete_backup_file/1
]). ]).
-ifdef(TEST).
-export([ backup_dir/0
, delete_all_backup_file/0
]).
-endif.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Data Export and Import %% Data Export and Import
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -253,10 +265,12 @@ import_resource(#{<<"id">> := Id,
config => Config, config => Config,
created_at => NCreatedAt, created_at => NCreatedAt,
description => Desc}). description => Desc}).
import_resources_and_rules(Resources, Rules, FromVersion) import_resources_and_rules(Resources, Rules, FromVersion)
when FromVersion =:= "4.0" orelse when FromVersion =:= "4.0" orelse
FromVersion =:= "4.1" orelse FromVersion =:= "4.1" orelse
FromVersion =:= "4.2" -> FromVersion =:= "4.2" orelse
FromVersion =:= "4.3" ->
Configs = lists:foldl(fun compatible_version/2 , [], Resources), Configs = lists:foldl(fun compatible_version/2 , [], Resources),
lists:foreach(fun(#{<<"actions">> := Actions} = Rule) -> lists:foreach(fun(#{<<"actions">> := Actions} = Rule) ->
NActions = apply_new_config(Actions, Configs), NActions = apply_new_config(Actions, Configs),
@ -321,6 +335,17 @@ compatible_version(#{<<"id">> := ID,
{ok, _Resource} = import_resource(Resource#{<<"config">> := Cfg}), {ok, _Resource} = import_resource(Resource#{<<"config">> := Cfg}),
NHeaders = maps:put(<<"content-type">>, ContentType, covert_empty_headers(Headers)), NHeaders = maps:put(<<"content-type">>, ContentType, covert_empty_headers(Headers)),
[{ID, #{headers => NHeaders, method => Method}} | Acc]; [{ID, #{headers => NHeaders, method => Method}} | Acc];
compatible_version(#{<<"id">> := ID,
<<"type">> := Type,
<<"config">> := Config} = Resource, Acc)
when Type =:= <<"backend_mongo_single">>
orelse Type =:= <<"backend_mongo_sharded">>
orelse Type =:= <<"backend_mongo_rs">> ->
NewConfig = maps:merge(#{<<"srv_record">> => false}, Config),
{ok, _Resource} = import_resource(Resource#{<<"config">> := NewConfig}),
[{ID, NewConfig} | Acc];
% normal version % normal version
compatible_version(Resource, Acc) -> compatible_version(Resource, Acc) ->
{ok, _Resource} = import_resource(Resource), {ok, _Resource} = import_resource(Resource),
@ -543,16 +568,39 @@ import_modules(Modules) ->
undefined -> undefined ->
ok; ok;
_ -> _ ->
lists:foreach(fun(#{<<"id">> := Id, NModules = migrate_modules(Modules),
<<"type">> := Type, lists:foreach(fun(#{<<"id">> := Id,
<<"config">> := Config, <<"type">> := Type,
<<"enabled">> := Enabled, <<"config">> := Config,
<<"created_at">> := CreatedAt, <<"enabled">> := Enabled,
<<"description">> := Description}) -> <<"created_at">> := CreatedAt,
_ = emqx_modules:import_module({Id, any_to_atom(Type), Config, Enabled, CreatedAt, Description}) <<"description">> := Description}) ->
end, Modules) _ = emqx_modules:import_module({Id, any_to_atom(Type), Config, Enabled, CreatedAt, Description})
end, NModules)
end. end.
migrate_modules(Modules) ->
migrate_modules(Modules, []).
migrate_modules([], Acc) ->
lists:reverse(Acc);
migrate_modules([#{<<"type">> := <<"mongo_authentication">>,
<<"config">> := Config} = Module | More], Acc) ->
WMode = case maps:get(<<"w_mode">>, Config, <<"unsafe">>) of
<<"undef">> -> <<"unsafe">>;
Other -> Other
end,
RMode = case maps:get(<<"r_mode">>, Config, <<"master">>) of
<<"undef">> -> <<"master">>;
<<"slave-ok">> -> <<"slave_ok">>;
Other0 -> Other0
end,
NConfig = Config#{<<"srv_record">> => false,
<<"w_mode">> => WMode,
<<"r_mode">> => RMode},
migrate_modules(More, [Module#{<<"config">> => NConfig} | Acc]);
migrate_modules([Module | More], Acc) ->
migrate_modules(More, [Module | Acc]).
import_schemas(Schemas) -> import_schemas(Schemas) ->
case ets:info(emqx_schema) of case ets:info(emqx_schema) of
@ -580,19 +628,129 @@ to_version(Version) when is_binary(Version) ->
to_version(Version) when is_list(Version) -> to_version(Version) when is_list(Version) ->
Version. Version.
upload_backup_file(Filename0, Bin) ->
case ensure_file_name(Filename0) of
{ok, Filename} ->
case check_json(Bin) of
{ok, _} ->
logger:info("write backup file ~p", [Filename]),
file:write_file(Filename, Bin);
{error, Reason} ->
{error, Reason}
end;
{error, Reason} ->
{error, Reason}
end.
list_backup_file() ->
Filter =
fun(File) ->
case file:read_file_info(File) of
{ok, #file_info{size = Size, ctime = CTime = {{Y, M, D}, {H, MM, S}}}} ->
Seconds = calendar:datetime_to_gregorian_seconds(CTime),
BaseFilename = to_binary(filename:basename(File)),
CreatedAt = to_binary(io_lib:format("~p-~p-~p ~p:~p:~p", [Y, M, D, H, MM, S])),
Info = {
Seconds,
[{filename, BaseFilename},
{size, Size},
{created_at, CreatedAt},
{node, node()}
]
},
{true, Info};
_ ->
false
end
end,
lists:filtermap(Filter, backup_files()).
backup_files() ->
backup_files(backup_dir()) ++ backup_files(backup_dir_old_version()).
backup_files(Dir) ->
{ok, FilesAll} = file:list_dir_all(Dir),
Files = lists:filtermap(fun legal_filename/1, FilesAll),
[filename:join([Dir, File]) || File <- Files].
look_up_file(Filename) when is_binary(Filename) ->
look_up_file(binary_to_list(Filename));
look_up_file(Filename) ->
DefOnNotFound = fun(_Filename) -> {error, not_found} end,
do_look_up_file(Filename, DefOnNotFound).
do_look_up_file(Filename, OnNotFound) when is_binary(Filename) ->
do_look_up_file(binary_to_list(Filename), OnNotFound);
do_look_up_file(Filename, OnNotFound) ->
Filter =
fun(MaybeFile) ->
filename:basename(MaybeFile) == Filename
end,
case lists:filter(Filter, backup_files()) of
[] ->
OnNotFound(Filename);
List ->
{ok, hd(List)}
end.
read_backup_file(Filename0) ->
case look_up_file(Filename0) of
{ok, Filename} ->
case file:read_file(Filename) of
{ok, Bin} ->
{ok, #{filename => to_binary(Filename0),
file => Bin}};
{error, Reason} ->
logger:error("read file ~p failed ~p", [Filename, Reason]),
{error, bad_file}
end;
{error, not_found} ->
{error, not_found}
end.
delete_backup_file(Filename0) ->
case look_up_file(Filename0) of
{ok, Filename} ->
case file:read_file_info(Filename) of
{ok, #file_info{}} ->
case file:delete(Filename) of
ok ->
logger:info("delete backup file ~p", [Filename]),
ok;
{error, Reason} ->
logger:error(
"delete backup file ~p error:~p", [Filename, Reason]),
{error, Reason}
end;
_ ->
{error, not_found}
end;
{error, not_found} ->
{error, not_found}
end.
-ifdef(TEST).
%% clean all for test
delete_all_backup_file() ->
[begin
Filename = proplists:get_value(filename, Info),
_ = delete_backup_file(Filename)
end || {_, Info} <- list_backup_file()],
ok.
-endif.
export() -> export() ->
Seconds = erlang:system_time(second), Seconds = erlang:system_time(second),
Data = do_export_data() ++ [{date, erlang:list_to_binary(emqx_mgmt_util:strftime(Seconds))}], Data = do_export_data() ++ [{date, erlang:list_to_binary(emqx_mgmt_util:strftime(Seconds))}],
{{Y, M, D}, {H, MM, S}} = emqx_mgmt_util:datetime(Seconds), {{Y, M, D}, {H, MM, S}} = emqx_mgmt_util:datetime(Seconds),
Filename = io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S]), BaseFilename = io_lib:format("emqx-export-~p-~p-~p-~p-~p-~p.json", [Y, M, D, H, MM, S]),
NFilename = filename:join([emqx:get_env(data_dir), Filename]), {ok, Filename} = ensure_file_name(BaseFilename),
ok = filelib:ensure_dir(NFilename), case file:write_file(Filename, emqx_json:encode(Data)) of
case file:write_file(NFilename, emqx_json:encode(Data)) of
ok -> ok ->
case file:read_file_info(NFilename) of case file:read_file_info(Filename) of
{ok, #file_info{size = Size, ctime = {{Y1, M1, D1}, {H1, MM1, S1}}}} -> {ok, #file_info{size = Size, ctime = {{Y1, M1, D1}, {H1, MM1, S1}}}} ->
CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y1, M1, D1, H1, MM1, S1]), CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y1, M1, D1, H1, MM1, S1]),
{ok, #{filename => list_to_binary(NFilename), {ok, #{filename => Filename,
size => Size, size => Size,
created_at => list_to_binary(CreatedAt), created_at => list_to_binary(CreatedAt),
node => node() node => node()
@ -628,9 +786,8 @@ do_export_extra_data() -> [].
-ifdef(EMQX_ENTERPRISE). -ifdef(EMQX_ENTERPRISE).
import(Filename, OverridesJson) -> import(Filename, OverridesJson) ->
case file:read_file(Filename) of case check_import_json(Filename) of
{ok, Json} -> {ok, Imported} ->
Imported = emqx_json:decode(Json, [return_maps]),
Overrides = emqx_json:decode(OverridesJson, [return_maps]), Overrides = emqx_json:decode(OverridesJson, [return_maps]),
Data = maps:merge(Imported, Overrides), Data = maps:merge(Imported, Overrides),
Version = to_version(maps:get(<<"version">>, Data)), Version = to_version(maps:get(<<"version">>, Data)),
@ -643,13 +800,13 @@ import(Filename, OverridesJson) ->
logger:error("The emqx data import failed: ~0p", [{Class, Reason, Stack}]), logger:error("The emqx data import failed: ~0p", [{Class, Reason, Stack}]),
{error, import_failed} {error, import_failed}
end; end;
Error -> Error {error, Reason} ->
{error, Reason}
end. end.
-else. -else.
import(Filename, OverridesJson) -> import(Filename, OverridesJson) ->
case file:read_file(Filename) of case check_import_json(Filename) of
{ok, Json} -> {ok, Imported} ->
Imported = emqx_json:decode(Json, [return_maps]),
Overrides = emqx_json:decode(OverridesJson, [return_maps]), Overrides = emqx_json:decode(OverridesJson, [return_maps]),
Data = maps:merge(Imported, Overrides), Data = maps:merge(Imported, Overrides),
Version = to_version(maps:get(<<"version">>, Data)), Version = to_version(maps:get(<<"version">>, Data)),
@ -668,10 +825,65 @@ import(Filename, OverridesJson) ->
logger:error("Unsupported version: ~p", [Version]), logger:error("Unsupported version: ~p", [Version]),
{error, unsupported_version, Version} {error, unsupported_version, Version}
end; end;
Error -> Error {error, Reason} ->
{error, Reason}
end. end.
-endif. -endif.
-spec(check_import_json(binary() | string()) -> {ok, map()} | {error, term()}).
check_import_json(Filename) ->
OnNotFound =
fun(F) ->
case filelib:is_file(F) of
true -> {ok, F};
false -> {error, not_found}
end
end,
FunList = [
fun(F) -> do_look_up_file(F, OnNotFound) end,
fun(F) -> file:read_file(F) end,
fun check_json/1
],
do_check_import_json(Filename, FunList).
do_check_import_json(Res, []) ->
{ok, Res};
do_check_import_json(Acc, [Fun | FunList]) ->
case Fun(Acc) of
{ok, Next} ->
do_check_import_json(Next, FunList);
{error, Reason} ->
{error, Reason}
end.
ensure_file_name(Filename) ->
case legal_filename(Filename) of
true ->
{ok, filename:join(backup_dir(), Filename)};
false ->
{error, bad_filename}
end.
backup_dir() ->
Dir = filename:join(emqx:get_env(data_dir), ?BACKUP_DIR),
ok = filelib:ensure_dir(filename:join([Dir, dummy])),
Dir.
backup_dir_old_version() ->
emqx:get_env(data_dir).
legal_filename(Filename) ->
MaybeJson = filename:extension(Filename),
MaybeJson == ".json" orelse MaybeJson == <<".json">>.
check_json(MaybeJson) ->
case emqx_json:safe_decode(MaybeJson, [return_maps]) of
{ok, Json} ->
{ok, Json};
{error, _} ->
{error, bad_json}
end.
do_import_data(Data, Version) -> do_import_data(Data, Version) ->
import_resources_and_rules(maps:get(<<"resources">>, Data, []), maps:get(<<"rules">>, Data, []), Version), import_resources_and_rules(maps:get(<<"resources">>, Data, []), maps:get(<<"rules">>, Data, []), Version),
import_blacklist(maps:get(<<"blacklist">>, Data, [])), import_blacklist(maps:get(<<"blacklist">>, Data, [])),
@ -725,6 +937,10 @@ is_version_supported2("4.1") ->
true; true;
is_version_supported2("4.3") -> is_version_supported2("4.3") ->
true; true;
is_version_supported2("4.4") ->
true;
is_version_supported2("4.5") ->
true;
is_version_supported2(Version) -> is_version_supported2(Version) ->
case re:run(Version, "^4.[02].\\d+$", [{capture, none}]) of case re:run(Version, "^4.[02].\\d+$", [{capture, none}]) of
match -> match ->
@ -790,3 +1006,6 @@ get_old_type() ->
set_old_type(Type) -> set_old_type(Type) ->
application:set_env(emqx_auth_mnesia, as, Type). application:set_env(emqx_auth_mnesia, as, Type).
to_binary(Bin) when is_binary(Bin) -> Bin;
to_binary(Str) when is_list(Str) -> list_to_binary(Str).

View File

@ -91,7 +91,8 @@ listener_name(Proto) ->
http_handlers() -> http_handlers() ->
Plugins = lists:map(fun(Plugin) -> Plugin#plugin.name end, emqx_plugins:list()), Plugins = lists:map(fun(Plugin) -> Plugin#plugin.name end, emqx_plugins:list()),
[{"/api/v4", minirest:handler(#{apps => (Plugins ++ [emqx_modules]) -- ?EXCEPT_PLUGIN, [{"/api/v4", minirest:handler(#{apps => (Plugins ++
[emqx_plugin_libs, emqx_modules]) -- ?EXCEPT_PLUGIN,
except => ?EXCEPT, except => ?EXCEPT,
filter => fun ?MODULE:filter/1}), filter => fun ?MODULE:filter/1}),
[{authorization, fun ?MODULE:authorize_appid/1}]}]. [{authorization, fun ?MODULE:authorize_appid/1}]}].
@ -131,6 +132,7 @@ filter(_) ->
true. true.
-else. -else.
filter(#{app := emqx_modules}) -> true; filter(#{app := emqx_modules}) -> true;
filter(#{app := emqx_plugin_libs}) -> true;
filter(#{app := App}) -> filter(#{app := App}) ->
case emqx_plugins:find_plugin(App) of case emqx_plugins:find_plugin(App) of
false -> false; false -> false;

View File

@ -183,7 +183,10 @@ do_import(File, Config, Overrides) ->
mnesia:clear_table(?ACL_TABLE2), mnesia:clear_table(?ACL_TABLE2),
mnesia:clear_table(emqx_user), mnesia:clear_table(emqx_user),
emqx_acl_mnesia_migrator:migrate_records(), emqx_acl_mnesia_migrator:migrate_records(),
Filename = filename:join(proplists:get_value(data_dir, Config), File), Filename = filename:basename(File),
FilePath = filename:join([proplists:get_value(data_dir, Config), File]),
{ok, Bin} = file:read_file(FilePath),
ok = emqx_mgmt_data_backup:upload_backup_file(Filename, Bin),
emqx_mgmt_data_backup:import(Filename, Overrides). emqx_mgmt_data_backup:import(Filename, Overrides).
test_import(username, {Username, Password}) -> test_import(username, {Username, Password}) ->

View File

@ -34,14 +34,18 @@ init_per_suite(Cfg) ->
Cfg. Cfg.
end_per_suite(Cfg) -> end_per_suite(Cfg) ->
emqx_mgmt_data_backup:delete_all_backup_file(),
emqx_ct_helpers:stop_apps([emqx_management, emqx_rule_engine]), emqx_ct_helpers:stop_apps([emqx_management, emqx_rule_engine]),
Cfg. Cfg.
get_data_path() -> get_data_path() ->
emqx_ct_helpers:deps_path(emqx_management, "test/emqx_bridge_mqtt_data_export_import_SUITE_data/"). emqx_ct_helpers:deps_path(emqx_management, "test/emqx_bridge_mqtt_data_export_import_SUITE_data/").
import(FilePath, Version) -> import(FilePath0, Version) ->
ok = emqx_mgmt_data_backup:import(get_data_path() ++ "/" ++ FilePath, <<"{}">>), Filename = filename:basename(FilePath0),
FilePath = filename:join([get_data_path(), FilePath0]),
{ok, Bin} = file:read_file(FilePath),
ok = emqx_mgmt_data_backup:upload_backup_file(Filename, Bin),
timer:sleep(500), timer:sleep(500),
lists:foreach(fun(#resource{id = Id, config = Config} = _Resource) -> lists:foreach(fun(#resource{id = Id, config = Config} = _Resource) ->
case Id of case Id of
@ -181,4 +185,4 @@ remove_resources() ->
lists:foreach(fun(#resource{id = Id}) -> lists:foreach(fun(#resource{id = Id}) ->
emqx_rule_engine:delete_resource(Id) emqx_rule_engine:delete_resource(Id)
end, emqx_rule_registry:get_resources()), end, emqx_rule_registry:get_resources()),
timer:sleep(500). timer:sleep(500).

View File

@ -29,28 +29,35 @@
-define(LOG_HANDLER_ID, [file, default]). -define(LOG_HANDLER_ID, [file, default]).
all() -> all() ->
OtherTCs = (emqx_ct:all(?MODULE) -- manage_apps_tests()) -- check_cli_tests(),
[{group, manage_apps}, [{group, manage_apps},
{group, check_cli}]. {group, check_cli}] ++ OtherTCs.
manage_apps_tests() ->
[t_app].
check_cli_tests() ->
[t_cli,
t_log_cmd,
t_mgmt_cmd,
t_status_cmd,
t_clients_cmd,
t_vm_cmd,
t_plugins_cmd,
t_trace_cmd,
t_traces_cmd,
t_broker_cmd,
t_router_cmd,
t_subscriptions_cmd,
t_listeners_cmd_old,
t_listeners_cmd_new
].
groups() -> groups() ->
[{manage_apps, [sequence], [{manage_apps, [sequence],
[t_app manage_apps_tests()},
]}, {check_cli, [sequence],
{check_cli, [sequence], check_cli_tests()}].
[t_cli,
t_log_cmd,
t_mgmt_cmd,
t_status_cmd,
t_clients_cmd,
t_vm_cmd,
t_plugins_cmd,
t_trace_cmd,
t_broker_cmd,
t_router_cmd,
t_subscriptions_cmd,
t_listeners_cmd_old,
t_listeners_cmd_new
]}].
apps() -> apps() ->
[emqx_management, emqx_auth_mnesia, emqx_modules]. [emqx_management, emqx_auth_mnesia, emqx_modules].
@ -64,6 +71,62 @@ init_per_suite(Config) ->
end_per_suite(_Config) -> end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps(apps()). emqx_ct_helpers:stop_apps(apps()).
init_per_testcase(t_plugins_cmd, Config) ->
meck:new(emqx_plugins, [non_strict, passthrough]),
meck:expect(emqx_plugins, load, fun(_) -> ok end),
meck:expect(emqx_plugins, unload, fun(_) -> ok end),
meck:expect(emqx_plugins, reload, fun(_) -> ok end),
mock_print(),
Config;
init_per_testcase(t_import_outside_backup_dir, Config) ->
BackupDir = emqx_mgmt_data_backup:backup_dir(),
{ok, Files} = file:list_dir(BackupDir),
lists:foreach(
fun(F) ->
file:delete(filename:join(BackupDir, F))
end, Files),
RandomName = emqx_guid:to_hexstr(emqx_guid:gen()),
Filepath = "/tmp/" ++ binary_to_list(RandomName) ++ ".json",
FakeData = #{version => "4.4"},
ok = file:write_file(Filepath, emqx_json:encode(FakeData)),
[ {tmp_file, Filepath}
| Config];
init_per_testcase(t_backup_file, Config) ->
BackupDir = emqx_mgmt_data_backup:backup_dir(),
{ok, Files} = file:list_dir(BackupDir),
lists:foreach(
fun(F) ->
file:delete(filename:join(BackupDir, F))
end, Files),
Config;
init_per_testcase(_Case, Config) ->
mock_print(),
Config.
end_per_testcase(t_plugins_cmd, _Config) ->
meck:unload(emqx_plugins),
unmock_print();
end_per_testcase(t_import_outside_backup_dir, Config) ->
Filepath = ?config(tmp_file, Config),
file:delete(Filepath),
BackupDir = emqx_mgmt_data_backup:backup_dir(),
{ok, Files} = file:list_dir(BackupDir),
lists:foreach(
fun(F) ->
file:delete(filename:join(BackupDir, F))
end, Files),
ok;
end_per_testcase(t_backup_file, _Config) ->
BackupDir = emqx_mgmt_data_backup:backup_dir(),
{ok, Files} = file:list_dir(BackupDir),
lists:foreach(
fun(F) ->
file:delete(filename:join(BackupDir, F))
end, Files),
ok;
end_per_testcase(_Case, _Config) ->
unmock_print().
t_app(_Config) -> t_app(_Config) ->
{ok, AppSecret} = emqx_mgmt_auth:add_app(<<"app_id">>, <<"app_name">>), {ok, AppSecret} = emqx_mgmt_auth:add_app(<<"app_id">>, <<"app_name">>),
?assert(emqx_mgmt_auth:is_authorized(<<"app_id">>, AppSecret)), ?assert(emqx_mgmt_auth:is_authorized(<<"app_id">>, AppSecret)),
@ -96,7 +159,6 @@ t_app(_Config) ->
ok. ok.
t_log_cmd(_) -> t_log_cmd(_) ->
mock_print(),
lists:foreach(fun(Level) -> lists:foreach(fun(Level) ->
emqx_mgmt_cli:log(["primary-level", Level]), emqx_mgmt_cli:log(["primary-level", Level]),
?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["primary-level"])) ?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["primary-level"]))
@ -109,12 +171,9 @@ t_log_cmd(_) ->
?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["handlers", "set-level", ?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["handlers", "set-level",
atom_to_list(Id), Level])) atom_to_list(Id), Level]))
end, ?LOG_LEVELS) end, ?LOG_LEVELS)
|| #{id := Id} <- emqx_logger:get_log_handlers()], || #{id := Id} <- emqx_logger:get_log_handlers()].
meck:unload().
t_mgmt_cmd(_) -> t_mgmt_cmd(_) ->
% ct:pal("start testing the mgmt command"),
mock_print(),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt( ?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt(
["lookup", "emqx_appid"]), "Not Found.")), ["lookup", "emqx_appid"]), "Not Found.")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt( ?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt(
@ -127,28 +186,19 @@ t_mgmt_cmd(_) ->
["update", "emqx_appid", "ts"]), "update successfully")), ["update", "emqx_appid", "ts"]), "update successfully")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt( ?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt(
["delete", "emqx_appid"]), "ok")), ["delete", "emqx_appid"]), "ok")),
ok = emqx_mgmt_cli:mgmt(["list"]), ok = emqx_mgmt_cli:mgmt(["list"]).
meck:unload().
t_status_cmd(_) -> t_status_cmd(_) ->
% ct:pal("start testing status command"),
mock_print(),
%% init internal status seem to be always 'starting' when running ct tests %% init internal status seem to be always 'starting' when running ct tests
?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([]), "Node\s.*@.*\sis\sstart(ed|ing)")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([]), "Node\s.*@.*\sis\sstart(ed|ing)")).
meck:unload().
t_broker_cmd(_) -> t_broker_cmd(_) ->
% ct:pal("start testing the broker command"),
mock_print(),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([]), "sysdescr")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([]), "sysdescr")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["stats"]), "subscriptions.shared")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["stats"]), "subscriptions.shared")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["metrics"]), "bytes.sent")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["metrics"]), "bytes.sent")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([undefined]), "broker")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([undefined]), "broker")).
meck:unload().
t_clients_cmd(_) -> t_clients_cmd(_) ->
% ct:pal("start testing the client command"),
mock_print(),
process_flag(trap_exit, true), process_flag(trap_exit, true),
{ok, T} = emqtt:start_link([{clientid, <<"client12">>}, {ok, T} = emqtt:start_link([{clientid, <<"client12">>},
{username, <<"testuser1">>}, {username, <<"testuser1">>},
@ -164,7 +214,6 @@ t_clients_cmd(_) ->
receive receive
{'EXIT', T, _} -> {'EXIT', T, _} ->
ok ok
% ct:pal("Connection closed: ~p~n", [Reason])
after after
500 -> 500 ->
erlang:error("Client is not kick") erlang:error("Client is not kick")
@ -179,10 +228,11 @@ t_clients_cmd(_) ->
{ok, Connack, <<>>, _} = raw_recv_pase(Bin), {ok, Connack, <<>>, _} = raw_recv_pase(Bin),
timer:sleep(300), timer:sleep(300),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "client13")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "client13")),
meck:unload().
% emqx_mgmt_cli:clients(["kick", "client13"]), % emqx_mgmt_cli:clients(["kick", "client13"]),
% timer:sleep(500), % timer:sleep(500),
% ?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "Not Found")). % ?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "Not Found")).
ok.
raw_recv_pase(Packet) -> raw_recv_pase(Packet) ->
emqx_frame:parse(Packet). emqx_frame:parse(Packet).
@ -191,8 +241,6 @@ raw_send_serialize(Packet) ->
emqx_frame:serialize(Packet). emqx_frame:serialize(Packet).
t_vm_cmd(_) -> t_vm_cmd(_) ->
% ct:pal("start testing the vm command"),
mock_print(),
[[?assertMatch({match, _}, re:run(Result, Name)) [[?assertMatch({match, _}, re:run(Result, Name))
|| Result <- emqx_mgmt_cli:vm([Name])] || Result <- emqx_mgmt_cli:vm([Name])]
|| Name <- ["load", "memory", "process", "io", "ports"]], || Name <- ["load", "memory", "process", "io", "ports"]],
@ -205,12 +253,9 @@ t_vm_cmd(_) ->
[?assertMatch({match, _}, re:run(Result, "io")) [?assertMatch({match, _}, re:run(Result, "io"))
|| Result <- emqx_mgmt_cli:vm(["io"])], || Result <- emqx_mgmt_cli:vm(["io"])],
[?assertMatch({match, _}, re:run(Result, "ports")) [?assertMatch({match, _}, re:run(Result, "ports"))
|| Result <- emqx_mgmt_cli:vm(["ports"])], || Result <- emqx_mgmt_cli:vm(["ports"])].
unmock_print().
t_trace_cmd(_) -> t_trace_cmd(_) ->
% ct:pal("start testing the trace command"),
mock_print(),
logger:set_primary_config(level, debug), logger:set_primary_config(level, debug),
{ok, T} = emqtt:start_link([{clientid, <<"client">>}, {ok, T} = emqtt:start_link([{clientid, <<"client">>},
{username, <<"testuser">>}, {username, <<"testuser">>},
@ -237,12 +282,34 @@ t_trace_cmd(_) ->
Trace7 = emqx_mgmt_cli:trace(["start", "topic", "a/b/c", Trace7 = emqx_mgmt_cli:trace(["start", "topic", "a/b/c",
"log/clientid_trace.log", "error"]), "log/clientid_trace.log", "error"]),
?assertMatch({match, _}, re:run(Trace7, "successfully")), ?assertMatch({match, _}, re:run(Trace7, "successfully")),
logger:set_primary_config(level, error), logger:set_primary_config(level, error).
unmock_print().
t_traces_cmd(_) ->
emqx_trace:create_table(),
Count1 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(0, Count1),
Error1 = emqx_mgmt_cli:traces(["start", "test-name", "client", "clientid-dev"]),
?assertMatch({match, _}, re:run(Error1, "Tracer module not started")),
emqx_trace:start_link(),
Trace1 = emqx_mgmt_cli:traces(["start", "test-name", "client", "clientid-dev"]),
?assertMatch({match, _}, re:run(Trace1, "successfully")),
Count2 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(1, Count2),
Error2 = emqx_mgmt_cli:traces(["start", "test-name", "client", "clientid-dev"]),
?assertMatch({match, _}, re:run(Error2, "already_existed")),
Trace2 = emqx_mgmt_cli:traces(["stop", "test-name"]),
?assertMatch({match, _}, re:run(Trace2, "successfully")),
Count3 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(1, Count3),
Trace3 = emqx_mgmt_cli:traces(["delete", "test-name"]),
?assertMatch({match, _}, re:run(Trace3, "successfully")),
Count4 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(0, Count4),
Error3 = emqx_mgmt_cli:traces(["delete", "test-name"]),
?assertMatch({match, _}, re:run(Error3, "not_found")),
ok.
t_router_cmd(_) -> t_router_cmd(_) ->
% ct:pal("start testing the router command"),
mock_print(),
{ok, T} = emqtt:start_link([{clientid, <<"client1">>}, {ok, T} = emqtt:start_link([{clientid, <<"client1">>},
{username, <<"testuser1">>}, {username, <<"testuser1">>},
{password, <<"pass1">>} {password, <<"pass1">>}
@ -257,12 +324,9 @@ t_router_cmd(_) ->
emqtt:connect(T1), emqtt:connect(T1),
emqtt:subscribe(T1, <<"a/b/c/d">>), emqtt:subscribe(T1, <<"a/b/c/d">>),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["list"]), "a/b/c | a/b/c")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["list"]), "a/b/c | a/b/c")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["show", "a/b/c"]), "a/b/c")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["show", "a/b/c"]), "a/b/c")).
unmock_print().
t_subscriptions_cmd(_) -> t_subscriptions_cmd(_) ->
% ct:pal("Start testing the subscriptions command"),
mock_print(),
{ok, T3} = emqtt:start_link([{clientid, <<"client">>}, {ok, T3} = emqtt:start_link([{clientid, <<"client">>},
{username, <<"testuser">>}, {username, <<"testuser">>},
{password, <<"pass">>} {password, <<"pass">>}
@ -273,22 +337,18 @@ t_subscriptions_cmd(_) ->
[?assertMatch({match, _} , re:run(Result, "b/b/c")) [?assertMatch({match, _} , re:run(Result, "b/b/c"))
|| Result <- emqx_mgmt_cli:subscriptions(["show", <<"client">>])], || Result <- emqx_mgmt_cli:subscriptions(["show", <<"client">>])],
?assertEqual(emqx_mgmt_cli:subscriptions(["add", "client", "b/b/c", "0"]), "ok\n"), ?assertEqual(emqx_mgmt_cli:subscriptions(["add", "client", "b/b/c", "0"]), "ok\n"),
?assertEqual(emqx_mgmt_cli:subscriptions(["del", "client", "b/b/c"]), "ok\n"), ?assertEqual(emqx_mgmt_cli:subscriptions(["del", "client", "b/b/c"]), "ok\n").
unmock_print().
t_listeners_cmd_old(_) -> t_listeners_cmd_old(_) ->
ok = emqx_listeners:ensure_all_started(), ok = emqx_listeners:ensure_all_started(),
mock_print(),
?assertEqual(emqx_mgmt_cli:listeners([]), ok), ?assertEqual(emqx_mgmt_cli:listeners([]), ok),
?assertEqual( ?assertEqual(
"Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n", "Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n",
emqx_mgmt_cli:listeners(["stop", "wss", "8084"]) emqx_mgmt_cli:listeners(["stop", "wss", "8084"])
), ).
unmock_print().
t_listeners_cmd_new(_) -> t_listeners_cmd_new(_) ->
ok = emqx_listeners:ensure_all_started(), ok = emqx_listeners:ensure_all_started(),
mock_print(),
?assertEqual(emqx_mgmt_cli:listeners([]), ok), ?assertEqual(emqx_mgmt_cli:listeners([]), ok),
?assertEqual( ?assertEqual(
"Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n", "Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n",
@ -306,15 +366,9 @@ t_listeners_cmd_new(_) ->
emqx_mgmt_cli:listeners(["restart", "bad:listener:identifier"]), emqx_mgmt_cli:listeners(["restart", "bad:listener:identifier"]),
"Failed to restart bad:listener:identifier listener:" "Failed to restart bad:listener:identifier listener:"
" {no_such_listener,\"bad:listener:identifier\"}\n" " {no_such_listener,\"bad:listener:identifier\"}\n"
), ).
unmock_print().
t_plugins_cmd(_) -> t_plugins_cmd(_) ->
mock_print(),
meck:new(emqx_plugins, [non_strict, passthrough]),
meck:expect(emqx_plugins, load, fun(_) -> ok end),
meck:expect(emqx_plugins, unload, fun(_) -> ok end),
meck:expect(emqx_plugins, reload, fun(_) -> ok end),
?assertEqual(emqx_mgmt_cli:plugins(["list"]), ok), ?assertEqual(emqx_mgmt_cli:plugins(["list"]), ok),
?assertEqual( ?assertEqual(
emqx_mgmt_cli:plugins(["unload", "emqx_auth_mnesia"]), emqx_mgmt_cli:plugins(["unload", "emqx_auth_mnesia"]),
@ -327,11 +381,9 @@ t_plugins_cmd(_) ->
?assertEqual( ?assertEqual(
emqx_mgmt_cli:plugins(["unload", "emqx_management"]), emqx_mgmt_cli:plugins(["unload", "emqx_management"]),
"Plugin emqx_management can not be unloaded.\n" "Plugin emqx_management can not be unloaded.\n"
), ).
unmock_print().
t_cli(_) -> t_cli(_) ->
mock_print(),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([""]), "status")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([""]), "status")),
[?assertMatch({match, _}, re:run(Value, "broker")) [?assertMatch({match, _}, re:run(Value, "broker"))
|| Value <- emqx_mgmt_cli:broker([""])], || Value <- emqx_mgmt_cli:broker([""])],
@ -353,12 +405,38 @@ t_cli(_) ->
|| Value <- emqx_mgmt_cli:mnesia([""])], || Value <- emqx_mgmt_cli:mnesia([""])],
[?assertMatch({match, _}, re:run(Value, "trace")) [?assertMatch({match, _}, re:run(Value, "trace"))
|| Value <- emqx_mgmt_cli:trace([""])], || Value <- emqx_mgmt_cli:trace([""])],
[?assertMatch({match, _}, re:run(Value, "traces"))
|| Value <- emqx_mgmt_cli:traces([""])],
[?assertMatch({match, _}, re:run(Value, "mgmt")) [?assertMatch({match, _}, re:run(Value, "mgmt"))
|| Value <- emqx_mgmt_cli:mgmt([""])], || Value <- emqx_mgmt_cli:mgmt([""])].
unmock_print().
t_backup_file(_)->
Filename = <<"test.json">>,
BadFilename = <<"bad.notjson">>,
Bin = emqx_json:encode(#{a => b}),
BadBin = <<"[bad json]">>,
{error, bad_filename} = emqx_mgmt_data_backup:upload_backup_file(BadFilename, Bin),
{error, bad_json} = emqx_mgmt_data_backup:upload_backup_file(Filename, BadBin),
ok = emqx_mgmt_data_backup:upload_backup_file(Filename, Bin),
{ok, #{file := <<"{\"a\":\"b\"}">>, filename := <<"test.json">>}} =
emqx_mgmt_data_backup:read_backup_file(Filename),
[{_, FileInfoList}] = emqx_mgmt_data_backup:list_backup_file(),
Filename = proplists:get_value(filename, FileInfoList),
ok = emqx_mgmt_data_backup:delete_backup_file(Filename),
{error, not_found} = emqx_mgmt_data_backup:delete_backup_file(BadFilename),
ok.
t_import_outside_backup_dir(Config) ->
Filepath = ?config(tmp_file, Config),
Env = "{}",
?assertEqual(ok, emqx_mgmt_data_backup:import(Filepath, Env)),
ok.
mock_print() -> mock_print() ->
catch meck:unload(emqx_ctl), ok = safe_unmeck(emqx_ctl),
meck:new(emqx_ctl, [non_strict, passthrough]), meck:new(emqx_ctl, [non_strict, passthrough]),
meck:expect(emqx_ctl, print, fun(Arg) -> emqx_ctl:format(Arg, []) end), meck:expect(emqx_ctl, print, fun(Arg) -> emqx_ctl:format(Arg, []) end),
meck:expect(emqx_ctl, print, fun(Msg, Arg) -> emqx_ctl:format(Msg, Arg) end), meck:expect(emqx_ctl, print, fun(Msg, Arg) -> emqx_ctl:format(Msg, Arg) end),
@ -367,3 +445,12 @@ mock_print() ->
unmock_print() -> unmock_print() ->
meck:unload(emqx_ctl). meck:unload(emqx_ctl).
safe_unmeck(Module) ->
try
meck:unload(Module),
ok
catch
_ : _ ->
ok
end.

View File

@ -75,30 +75,40 @@ t_alarms(_) ->
?assert(is_existing(alarm2, emqx_alarm:get_alarms(activated))), ?assert(is_existing(alarm2, emqx_alarm:get_alarms(activated))),
{ok, Return1} = request_api(get, api_path(["alarms/activated"]), auth_header_()), {ok, Return1} = request_api(get, api_path(["alarms/activated"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return1))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return1))))), lists:nth(1, get(<<"data">>, Return1))))),
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return1))))),
emqx_alarm:deactivate(alarm1), emqx_alarm:deactivate(alarm1),
{ok, Return2} = request_api(get, api_path(["alarms"]), auth_header_()), {ok, Return2} = request_api(get, api_path(["alarms"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return2))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return2))))), lists:nth(1, get(<<"data">>, Return2))))),
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return2))))),
{ok, Return3} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()), {ok, Return3} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return3))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return3))))), lists:nth(1, get(<<"data">>, Return3))))),
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return3))))),
emqx_alarm:deactivate(alarm2), emqx_alarm:deactivate(alarm2),
{ok, Return4} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()), {ok, Return4} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return4))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return4))))), lists:nth(1, get(<<"data">>, Return4))))),
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return4))))),
{ok, _} = request_api(delete, api_path(["alarms/deactivated"]), auth_header_()), {ok, _} = request_api(delete, api_path(["alarms/deactivated"]), auth_header_()),
{ok, Return5} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()), {ok, Return5} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()),
?assertNot(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return5))))), ?assertNot(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return5))))). lists:nth(1, get(<<"data">>, Return5))))),
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return5))))).
t_apps(_) -> t_apps(_) ->
AppId = <<"123456">>, AppId = <<"123456">>,
@ -152,7 +162,8 @@ t_banned(_) ->
[Banned] = get(<<"data">>, Result), [Banned] = get(<<"data">>, Result),
?assertEqual(Who, maps:get(<<"who">>, Banned)), ?assertEqual(Who, maps:get(<<"who">>, Banned)),
{ok, _} = request_api(delete, api_path(["banned", "clientid", binary_to_list(Who)]), auth_header_()), {ok, _} = request_api(delete, api_path(["banned", "clientid", binary_to_list(Who)]),
auth_header_()),
{ok, Result2} = request_api(get, api_path(["banned"]), auth_header_()), {ok, Result2} = request_api(get, api_path(["banned"]), auth_header_()),
?assertEqual([], get(<<"data">>, Result2)). ?assertEqual([], get(<<"data">>, Result2)).
@ -204,40 +215,50 @@ t_clients(_) ->
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, kickout_client, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, kickout_client, 1, fun(_) -> {error, undefined} end),
{ok, MeckRet1} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), {ok, MeckRet1} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]),
auth_header_()),
?assertEqual(?ERROR1, get(<<"code">>, MeckRet1)), ?assertEqual(?ERROR1, get(<<"code">>, MeckRet1)),
meck:expect(emqx_mgmt, clean_acl_cache, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, clean_acl_cache, 1, fun(_) -> {error, undefined} end),
{ok, MeckRet2} = request_api(delete, api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()), {ok, MeckRet2} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR1, get(<<"code">>, MeckRet2)), ?assertEqual(?ERROR1, get(<<"code">>, MeckRet2)),
meck:expect(emqx_mgmt, list_acl_cache, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, list_acl_cache, 1, fun(_) -> {error, undefined} end),
{ok, MeckRet3} = request_api(get, api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()), {ok, MeckRet3} = request_api(get,
api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR1, get(<<"code">>, MeckRet3)), ?assertEqual(?ERROR1, get(<<"code">>, MeckRet3)),
meck:unload(emqx_mgmt), meck:unload(emqx_mgmt),
{ok, Ok} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), {ok, Ok} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1)]), auth_header_()),
?assertEqual(?SUCCESS, get(<<"code">>, Ok)), ?assertEqual(?SUCCESS, get(<<"code">>, Ok)),
timer:sleep(300), timer:sleep(300),
{ok, Ok1} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), {ok, Ok1} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1)]), auth_header_()),
?assertEqual(?SUCCESS, get(<<"code">>, Ok1)), ?assertEqual(?SUCCESS, get(<<"code">>, Ok1)),
{ok, Clients6} = request_api(get, api_path(["clients"]), "_limit=100&_page=1", auth_header_()), {ok, Clients6} = request_api(get,
api_path(["clients"]), "_limit=100&_page=1", auth_header_()),
?assertEqual(1, maps:get(<<"count">>, get(<<"meta">>, Clients6))), ?assertEqual(1, maps:get(<<"count">>, get(<<"meta">>, Clients6))),
{ok, NotFound1} = request_api(get, api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()), {ok, NotFound1} = request_api(get,
api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR12, get(<<"code">>, NotFound1)), ?assertEqual(?ERROR12, get(<<"code">>, NotFound1)),
{ok, NotFound2} = request_api(delete, api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()), {ok, NotFound2} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR12, get(<<"code">>, NotFound2)), ?assertEqual(?ERROR12, get(<<"code">>, NotFound2)),
{ok, EmptyAclCache} = request_api(get, api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()), {ok, EmptyAclCache} = request_api(get,
api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()),
?assertEqual(0, length(get(<<"data">>, EmptyAclCache))), ?assertEqual(0, length(get(<<"data">>, EmptyAclCache))),
{ok, Ok1} = request_api(delete, api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()), {ok, Ok1} = request_api(delete,
api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()),
?assertEqual(?SUCCESS, get(<<"code">>, Ok1)). ?assertEqual(?SUCCESS, get(<<"code">>, Ok1)).
receive_exit(0) -> receive_exit(0) ->
@ -256,7 +277,8 @@ receive_exit(Count) ->
t_listeners(_) -> t_listeners(_) ->
{ok, _} = request_api(get, api_path(["listeners"]), auth_header_()), {ok, _} = request_api(get, api_path(["listeners"]), auth_header_()),
{ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "listeners"]), auth_header_()), {ok, _} = request_api(get,
api_path(["nodes", atom_to_list(node()), "listeners"]), auth_header_()),
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, list_listeners, 0, fun() -> [{node(), {error, undefined}}] end), meck:expect(emqx_mgmt, list_listeners, 0, fun() -> [{node(), {error, undefined}}] end),
{ok, Return} = request_api(get, api_path(["listeners"]), auth_header_()), {ok, Return} = request_api(get, api_path(["listeners"]), auth_header_()),
@ -267,10 +289,12 @@ t_listeners(_) ->
t_metrics(_) -> t_metrics(_) ->
{ok, _} = request_api(get, api_path(["metrics"]), auth_header_()), {ok, _} = request_api(get, api_path(["metrics"]), auth_header_()),
{ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()), {ok, _} = request_api(get,
api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()),
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, get_metrics, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, get_metrics, 1, fun(_) -> {error, undefined} end),
{ok, "{\"message\":\"undefined\"}"} = request_api(get, api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()), {ok, "{\"message\":\"undefined\"}"} =
request_api(get, api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()),
meck:unload(emqx_mgmt). meck:unload(emqx_mgmt).
t_nodes(_) -> t_nodes(_) ->
@ -347,7 +371,8 @@ t_acl_cache(_) ->
{ok, _} = emqtt:connect(C1), {ok, _} = emqtt:connect(C1),
{ok, _, _} = emqtt:subscribe(C1, Topic, 2), {ok, _, _} = emqtt:subscribe(C1, Topic, 2),
%% get acl cache, should not be empty %% get acl cache, should not be empty
{ok, Result} = request_api(get, api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()), {ok, Result} = request_api(get,
api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()),
#{<<"code">> := 0, <<"data">> := Caches} = jiffy:decode(list_to_binary(Result), [return_maps]), #{<<"code">> := 0, <<"data">> := Caches} = jiffy:decode(list_to_binary(Result), [return_maps]),
?assert(length(Caches) > 0), ?assert(length(Caches) > 0),
?assertMatch(#{<<"access">> := <<"subscribe">>, ?assertMatch(#{<<"access">> := <<"subscribe">>,
@ -355,11 +380,14 @@ t_acl_cache(_) ->
<<"result">> := <<"allow">>, <<"result">> := <<"allow">>,
<<"updated_time">> := _}, hd(Caches)), <<"updated_time">> := _}, hd(Caches)),
%% clear acl cache %% clear acl cache
{ok, Result2} = request_api(delete, api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()), {ok, Result2} = request_api(delete,
api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()),
?assertMatch(#{<<"code">> := 0}, jiffy:decode(list_to_binary(Result2), [return_maps])), ?assertMatch(#{<<"code">> := 0}, jiffy:decode(list_to_binary(Result2), [return_maps])),
%% get acl cache again, after the acl cache is cleared %% get acl cache again, after the acl cache is cleared
{ok, Result3} = request_api(get, api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()), {ok, Result3} = request_api(get,
#{<<"code">> := 0, <<"data">> := Caches3} = jiffy:decode(list_to_binary(Result3), [return_maps]), api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()),
#{<<"code">> := 0, <<"data">> := Caches3}
= jiffy:decode(list_to_binary(Result3), [return_maps]),
?assertEqual(0, length(Caches3)), ?assertEqual(0, length(Caches3)),
ok = emqtt:disconnect(C1). ok = emqtt:disconnect(C1).
@ -370,7 +398,7 @@ t_pubsub(_) ->
ClientId = <<"client1">>, ClientId = <<"client1">>,
Options = #{clientid => ClientId, Options = #{clientid => ClientId,
proto_ver => 5}, proto_ver => v5},
Topic = <<"mytopic">>, Topic = <<"mytopic">>,
{ok, C1} = emqtt:start_link(Options), {ok, C1} = emqtt:start_link(Options),
{ok, _} = emqtt:connect(C1), {ok, _} = emqtt:connect(C1),
@ -416,6 +444,20 @@ t_pubsub(_) ->
<<"payload">> => <<"hello">>}), <<"payload">> => <<"hello">>}),
?assertEqual(?ERROR8, get(<<"code">>, BadClient2)), ?assertEqual(?ERROR8, get(<<"code">>, BadClient2)),
{ok, BadParams} = request_api(post, api_path(["mqtt/publish"]), [], auth_header_(),
#{<<"clientid">> => 1,
<<"topics">> => <<"mytopic">>,
<<"qos">> => 1,
<<"payload">> => <<"hello">>,
<<"user_properties">> =>
#{<<"id">> => 10010,
<<"name">> => <<"emqx">>,
<<"foo">> => ["bad_properties1", "bad_properties2"],
<<"boolean">> => false
}
}),
?assertEqual(?ERROR8, get(<<"code">>, BadParams)),
{ok, BadClient3} = request_api(post, api_path(["mqtt/unsubscribe"]), [], auth_header_(), {ok, BadClient3} = request_api(post, api_path(["mqtt/unsubscribe"]), [], auth_header_(),
#{<<"clientid">> => 1, #{<<"clientid">> => 1,
<<"topic">> => <<"mytopic">>}), <<"topic">> => <<"mytopic">>}),
@ -481,12 +523,15 @@ t_pubsub(_) ->
Topic_list = [<<"mytopic1">>, <<"mytopic2">>], Topic_list = [<<"mytopic1">>, <<"mytopic2">>],
[ {ok, _, [2]} = emqtt:subscribe(C1, Topics, 2) || Topics <- Topic_list], [ {ok, _, [2]} = emqtt:subscribe(C1, Topics, 2) || Topics <- Topic_list],
Body1 = [ #{<<"clientid">> => ClientId, <<"topic">> => Topics, <<"qos">> => 2} || Topics <- Topic_list], Body1 = [ #{<<"clientid">> => ClientId,
<<"topic">> => Topics, <<"qos">> => 2} || Topics <- Topic_list],
{ok, Data1} = request_api(post, api_path(["mqtt/subscribe_batch"]), [], auth_header_(), Body1), {ok, Data1} = request_api(post, api_path(["mqtt/subscribe_batch"]), [], auth_header_(), Body1),
loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data1), [return_maps]))), loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data1), [return_maps]))),
%% tests publish_batch %% tests publish_batch
Body2 = [ #{<<"clientid">> => ClientId, <<"topic">> => Topics, <<"qos">> => 2, <<"retain">> => <<"false">>, <<"payload">> => #{body => "hello world"}} || Topics <- Topic_list ], Body2 = [ #{<<"clientid">> => ClientId, <<"topic">> => Topics, <<"qos">> => 2,
<<"retain">> => <<"false">>, <<"payload">> => #{body => "hello world"}}
|| Topics <- Topic_list ],
{ok, Data2} = request_api(post, api_path(["mqtt/publish_batch"]), [], auth_header_(), Body2), {ok, Data2} = request_api(post, api_path(["mqtt/publish_batch"]), [], auth_header_(), Body2),
loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data2), [return_maps]))), loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data2), [return_maps]))),
[ ?assert(receive [ ?assert(receive
@ -498,14 +543,57 @@ t_pubsub(_) ->
%% tests unsubscribe_batch %% tests unsubscribe_batch
Body3 = [#{<<"clientid">> => ClientId, <<"topic">> => Topics} || Topics <- Topic_list], Body3 = [#{<<"clientid">> => ClientId, <<"topic">> => Topics} || Topics <- Topic_list],
{ok, Data3} = request_api(post, api_path(["mqtt/unsubscribe_batch"]), [], auth_header_(), Body3), {ok, Data3} = request_api(post,
api_path(["mqtt/unsubscribe_batch"]), [], auth_header_(), Body3),
loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data3), [return_maps]))), loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data3), [return_maps]))),
{ok, _, [1]} = emqtt:subscribe(C1, <<"mytopic">>, qos1),
timer:sleep(50),
%% user properties
{ok, Code} = request_api(post, api_path(["mqtt/publish"]), [], auth_header_(),
#{<<"clientid">> => ClientId,
<<"topic">> => <<"mytopic">>,
<<"qos">> => 1,
<<"payload">> => <<"hello world">>,
<<"user_properties">> => #{<<"porp_1">> => <<"porp_1">>}}),
?assert(receive
{publish, #{payload := <<"hello world">>,
properties := #{'User-Property' := [{<<"porp_1">>,<<"porp_1">>}]}}} ->
true
after 100 ->
false
end),
%% properties
{ok, Code} = request_api(post, api_path(["mqtt/publish"]), [], auth_header_(),
#{<<"clientid">> => ClientId,
<<"topic">> => <<"mytopic">>,
<<"qos">> => 1,
<<"payload">> => <<"hello properties">>,
<<"user_properties">> => #{<<"prop_key1">> => <<"prop_val1">>},
<<"properties">> => #{
<<"message_expiry_interval">> => 1000,
<<"user_properties">> => #{<<"prop_key2">> => <<"prop_val2">>}}
}),
Msg = receive
{publish, MsgTmp} ->
MsgTmp
after 150 ->
false
end,
?assertMatch(#{payload := <<"hello properties">>,
qos := 1,
properties := #{
'Message-Expiry-Interval' := 1000,
'User-Property' := [{<<"prop_key1">>,<<"prop_val1">>},
{<<"prop_key2">>,<<"prop_val2">>}]}}, Msg),
ok = emqtt:disconnect(C1), ok = emqtt:disconnect(C1),
?assertEqual(3, emqx_metrics:val('messages.qos1.received') - Qos1Received), ?assertEqual(5, emqx_metrics:val('messages.qos1.received') - Qos1Received),
?assertEqual(2, emqx_metrics:val('messages.qos2.received') - Qos2Received), ?assertEqual(2, emqx_metrics:val('messages.qos2.received') - Qos2Received),
?assertEqual(5, emqx_metrics:val('messages.received') - Received). ?assertEqual(7, emqx_metrics:val('messages.received') - Received).
loop([]) -> []; loop([]) -> [];
@ -522,7 +610,8 @@ t_routes_and_subscriptions(_) ->
?assertEqual([], get(<<"data">>, NonRoute)), ?assertEqual([], get(<<"data">>, NonRoute)),
{ok, NonSubscription} = request_api(get, api_path(["subscriptions"]), auth_header_()), {ok, NonSubscription} = request_api(get, api_path(["subscriptions"]), auth_header_()),
?assertEqual([], get(<<"data">>, NonSubscription)), ?assertEqual([], get(<<"data">>, NonSubscription)),
{ok, NonSubscription1} = request_api(get, api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()), {ok, NonSubscription1} = request_api(get,
api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()),
?assertEqual([], get(<<"data">>, NonSubscription1)), ?assertEqual([], get(<<"data">>, NonSubscription1)),
{ok, NonSubscription2} = request_api(get, {ok, NonSubscription2} = request_api(get,
api_path(["subscriptions", binary_to_list(ClientId)]), api_path(["subscriptions", binary_to_list(ClientId)]),
@ -553,11 +642,14 @@ t_routes_and_subscriptions(_) ->
?assertMatch(#{<<"page">> := 1, <<"limit">> := 10000, <<"hasnext">> := false, <<"count">> := 1}, ?assertMatch(#{<<"page">> := 1, <<"limit">> := 10000, <<"hasnext">> := false, <<"count">> := 1},
get(<<"meta">>, Result3)), get(<<"meta">>, Result3)),
{ok, Result3} = request_api(get, api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()), {ok, Result3} = request_api(get,
api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()),
{ok, Result4} = request_api(get, api_path(["subscriptions", binary_to_list(ClientId)]), auth_header_()), {ok, Result4} = request_api(get,
api_path(["subscriptions", binary_to_list(ClientId)]), auth_header_()),
[Subscription] = get(<<"data">>, Result4), [Subscription] = get(<<"data">>, Result4),
{ok, Result4} = request_api(get, api_path(["nodes", atom_to_list(node()), "subscriptions", binary_to_list(ClientId)]) {ok, Result4} = request_api(get,
api_path(["nodes", atom_to_list(node()), "subscriptions", binary_to_list(ClientId)])
, auth_header_()), , auth_header_()),
ok = emqtt:disconnect(C1). ok = emqtt:disconnect(C1).
@ -622,7 +714,8 @@ t_stats(_) ->
{ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()), {ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()),
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, get_stats, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, get_stats, 1, fun(_) -> {error, undefined} end),
{ok, Return} = request_api(get, api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()), {ok, Return} = request_api(get,
api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()),
?assertEqual(<<"undefined">>, get(<<"message">>, Return)), ?assertEqual(<<"undefined">>, get(<<"message">>, Return)),
meck:unload(emqx_mgmt). meck:unload(emqx_mgmt).
@ -631,13 +724,21 @@ t_data(_) ->
ok = emqx_dashboard_admin:mnesia(boot), ok = emqx_dashboard_admin:mnesia(boot),
application:ensure_all_started(emqx_rule_engine), application:ensure_all_started(emqx_rule_engine),
application:ensure_all_started(emqx_dashboard), application:ensure_all_started(emqx_dashboard),
emqx_mgmt_data_backup:delete_all_backup_file(),
{ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]), {ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]),
#{<<"filename">> := Filename, <<"node">> := Node} = emqx_ct_http:get_http_data(Data), #{<<"filename">> := Filename, <<"node">> := Node} = emqx_ct_http:get_http_data(Data),
{ok, DataList} = request_api(get, api_path(["data","export"]), auth_header_()), {ok, DataList} = request_api(get, api_path(["data","export"]), auth_header_()),
?assertEqual(true, lists:member(emqx_ct_http:get_http_data(Data), emqx_ct_http:get_http_data(DataList))), ?assertEqual(true,
lists:member(emqx_ct_http:get_http_data(Data), emqx_ct_http:get_http_data(DataList))),
?assertMatch({ok, _}, request_api(post, api_path(["data","import"]), [], auth_header_(), #{<<"filename">> => Filename, <<"node">> => Node})), ?assertMatch({ok, _}, request_api(post,
?assertMatch({ok, _}, request_api(post, api_path(["data","import"]), [], auth_header_(), #{<<"filename">> => Filename})), api_path(["data","import"]), [], auth_header_(),
#{<<"filename">> => Filename, <<"node">> => Node})),
?assertMatch({ok, _},
request_api(post, api_path(["data","import"]), [], auth_header_(),
#{<<"filename">> => Filename})),
_ = emqx_mgmt_data_backup:delete_backup_file(Filename),
emqx_mgmt_data_backup:delete_all_backup_file(),
application:stop(emqx_rule_engine), application:stop(emqx_rule_engine),
application:stop(emqx_dashboard), application:stop(emqx_dashboard),
ok. ok.
@ -647,15 +748,45 @@ t_data_import_content(_) ->
ok = emqx_dashboard_admin:mnesia(boot), ok = emqx_dashboard_admin:mnesia(boot),
application:ensure_all_started(emqx_rule_engine), application:ensure_all_started(emqx_rule_engine),
application:ensure_all_started(emqx_dashboard), application:ensure_all_started(emqx_dashboard),
emqx_mgmt_data_backup:delete_all_backup_file(),
{ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]), {ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]),
#{<<"filename">> := Filename} = emqx_ct_http:get_http_data(Data), #{<<"filename">> := Filename} = emqx_ct_http:get_http_data(Data),
Dir = emqx:get_env(data_dir), Dir = emqx_mgmt_data_backup:backup_dir(),
{ok, Bin} = file:read_file(filename:join(Dir, Filename)), {ok, Bin} = file:read_file(filename:join(Dir, Filename)),
Content = emqx_json:decode(Bin), Content = emqx_json:decode(Bin),
?assertMatch({ok, "{\"code\":0}"}, request_api(post, api_path(["data","import"]), [], auth_header_(), Content)), ct:pal("Content:::: ~p~n", [Content]),
?assertMatch({ok, "{\"code\":0}"},
request_api(post, api_path(["data","import"]), [], auth_header_(), Content)),
emqx_mgmt_data_backup:delete_all_backup_file(),
application:stop(emqx_rule_engine), application:stop(emqx_rule_engine),
application:stop(emqx_dashboard). application:stop(emqx_dashboard).
t_keepalive(_Config) ->
application:ensure_all_started(emqx_dashboard),
Username = "user_keepalive",
ClientId = "client_keepalive",
AuthHeader = auth_header_(),
Path = api_path(["clients", ClientId, "keepalive"]),
{ok, NotFound} = request_api(put, Path, "interval=5", AuthHeader, [#{}]),
?assertEqual("{\"message\":\"not_found\",\"code\":112}", NotFound),
{ok, C1} = emqtt:start_link(#{username => Username, clientid => ClientId}),
{ok, _} = emqtt:connect(C1),
{ok, Ok} = request_api(put, Path, "interval=5", AuthHeader, [#{}]),
?assertEqual("{\"code\":0}", Ok),
[Pid] = emqx_cm:lookup_channels(list_to_binary(ClientId)),
#{conninfo := #{keepalive := Keepalive}} = emqx_connection:info(Pid),
?assertEqual(5, Keepalive),
{ok, Error1} = request_api(put, Path, "interval=-1", AuthHeader, [#{}]),
{ok, Error2} = request_api(put, Path, "interval=65536", AuthHeader, [#{}]),
ErrMsg = #{<<"code">> => 102,
<<"message">> => <<"mqtt3.1.1 specification: keepalive must between 0~65535">>},
?assertEqual(ErrMsg, jiffy:decode(Error1, [return_maps])),
?assertEqual(Error1, Error2),
emqtt:disconnect(C1),
application:stop(emqx_dashboard),
ok.
filter(List, Key, Value) -> filter(List, Key, Value) ->
lists:filter(fun(Item) -> lists:filter(fun(Item) ->
maps:get(Key, Item) == Value maps:get(Key, Item) == Value

View File

@ -0,0 +1,71 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mongo_auth_module_migration_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-ifdef(EMQX_ENTERPRISE).
-include_lib("emqx_modules/include/emqx_modules.hrl").
-endif.
all() ->
emqx_ct:all(?MODULE).
-ifdef(EMQX_ENTERPRISE).
init_per_suite(Config) ->
application:load(emqx_modules_spec),
emqx_ct_helpers:start_apps([emqx_management, emqx_modules]),
Config.
end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([emqx_modules, emqx_management]),
application:unload(emqx_modules_spec),
ok.
t_import_4_2(Config) ->
?assertMatch(ok, import("e4.2.8.json", Config)),
timer:sleep(100),
MongoAuthNModule = emqx_modules_registry:find_module_by_type(mongo_authentication),
?assertNotEqual(not_found, MongoAuthNModule),
?assertMatch(#module{config = #{<<"srv_record">> := _}}, MongoAuthNModule),
delete_modules().
t_import_4_3(Config) ->
?assertMatch(ok, import("e4.3.5.json", Config)),
timer:sleep(100),
MongoAuthNModule = emqx_modules_registry:find_module_by_type(mongo_authentication),
?assertNotEqual(not_found, MongoAuthNModule),
?assertMatch(#module{config = #{<<"srv_record">> := _}}, MongoAuthNModule),
delete_modules().
import(File, Config) ->
Filename = filename:join(proplists:get_value(data_dir, Config), File),
{ok, Content} = file:read_file(Filename),
BackupFile = filename:join(emqx:get_env(data_dir), File),
ok = file:write_file(BackupFile, Content),
emqx_mgmt_data_backup:import(File, "{}").
delete_modules() ->
[emqx_modules_registry:remove_module(Mod) || Mod <- emqx_modules_registry:get_modules()].
-endif.

View File

@ -0,0 +1 @@
{"version":"4.2","date":"2021-11-15 01:52:40","modules":[{"id":"module:79002e0f","type":"retainer","config":{"storage_type":"ram","max_retained_messages":0,"max_payload_size":"1MB","expiry_interval":0},"enabled":true,"created_at":1636941076704,"description":""},{"id":"module:34834081","type":"presence","config":{"qos":0},"enabled":true,"created_at":1636941076704,"description":""},{"id":"module:f6eb69d1","type":"recon","config":{},"enabled":true,"created_at":1636941076704,"description":""},{"id":"module:7ae737b2","type":"mongo_authentication","config":{"w_mode":"undef","verify":false,"type":"single","super_query_selector":"","super_query_field":"","super_query_collection":"","ssl":false,"server":"127.0.0.1:27017","r_mode":"undef","pool_size":8,"password":"public","login":"admin","keyfile":{"filename":"","file":""},"database":"mqtt","certfile":{"filename":"","file":""},"cacertfile":{"filename":"","file":""},"auth_source":"admin","auth_query_selector":"username=%u","auth_query_password_hash":"sha256","auth_query_password_field":"password","auth_query_collection":"mqtt_user","acl_query_selectors":[],"acl_query_collection":"mqtt_acl"},"enabled":false,"created_at":1636941148794,"description":""},{"id":"module:e8c63201","type":"internal_acl","config":{"acl_rule_file":"etc/acl.conf"},"enabled":true,"created_at":1636941076704,"description":""}],"rules":[],"resources":[],"blacklist":[],"apps":[{"id":"admin","secret":"public","name":"Default","desc":"Application user","status":true,"expired":"undefined"}],"users":[{"username":"admin","password":"qP5m2iS9qnn51gHoGLbaiMo/GwE=","tags":"administrator"}],"auth_mnesia":[],"acl_mnesia":[],"schemas":[],"configs":[],"listeners_state":[]}

View File

@ -0,0 +1 @@
{"version":"4.3","rules":[],"resources":[],"blacklist":[],"apps":[{"id":"admin","secret":"public","name":"Default","desc":"Application user","status":true,"expired":"undefined"}],"users":[{"username":"admin","password":"/mWV4UgV0xmVUZX4qdIXQvxXZB0=","tags":"administrator"}],"auth_mnesia":[],"acl_mnesia":[],"modules":[{"id":"module:5881add2","type":"mongo_authentication","config":{"w_mode":"undef","verify":false,"type":"single","super_query_selector":"","super_query_field":"","super_query_collection":"","ssl":false,"server":"127.0.0.1:27017","r_mode":"undef","pool_size":8,"password":"public","login":"admin","keyfile":{"filename":"","file":""},"database":"mqtt","certfile":{"filename":"","file":""},"cacertfile":{"filename":"","file":""},"auth_source":"admin","auth_query_selector":"username=%u","auth_query_password_hash":"sha256","auth_query_password_field":"password","auth_query_collection":"mqtt_user","acl_query_selectors":[],"acl_query_collection":"mqtt_acl"},"enabled":false,"created_at":1636942609573,"description":""},{"id":"module:2adb6480","type":"presence","config":{"qos":0},"enabled":true,"created_at":1636942586725,"description":""},{"id":"module:24fabe8a","type":"internal_acl","config":{"acl_rule_file":"etc/acl.conf"},"enabled":true,"created_at":1636942586725,"description":""},{"id":"module:22c70ab8","type":"recon","config":{},"enabled":true,"created_at":1636942586725,"description":""},{"id":"module:a59f9a4a","type":"retainer","config":{"storage_type":"ram","max_retained_messages":0,"max_payload_size":"1MB","expiry_interval":0},"enabled":true,"created_at":1636942586725,"description":""}],"schemas":[],"configs":[],"listeners_state":[],"date":"2021-11-15 10:16:56"}

View File

@ -46,8 +46,11 @@ remove_resource(Id) ->
emqx_rule_registry:remove_resource(Id), emqx_rule_registry:remove_resource(Id),
emqx_rule_registry:remove_resource_params(Id). emqx_rule_registry:remove_resource_params(Id).
import(FilePath, Version) -> import(FilePath0, Version) ->
ok = emqx_mgmt_data_backup:import(get_data_path() ++ "/" ++ FilePath, <<"{}">>), Filename = filename:basename(FilePath0),
FilePath = filename:join([get_data_path(), FilePath0]),
{ok, Bin} = file:read_file(FilePath),
ok = emqx_mgmt_data_backup:upload_backup_file(Filename, Bin),
lists:foreach(fun(#resource{id = Id, config = Config} = _Resource) -> lists:foreach(fun(#resource{id = Id, config = Config} = _Resource) ->
case Id of case Id of
<<"webhook">> -> <<"webhook">> ->

View File

@ -0,0 +1,38 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-define(TOPK_TAB, emqx_slow_subs_topk).
-define(INDEX_TAB, emqx_slow_subs_index).
-define(ID(ClientId, Topic), {ClientId, Topic}).
-define(INDEX(TimeSpan, Id), {Id, TimeSpan}).
-define(TOPK_INDEX(TimeSpan, Id), {TimeSpan, Id}).
-define(MAX_SIZE, 1000).
-record(top_k, { index :: topk_index()
, last_update_time :: pos_integer()
, extra = []
}).
-record(index_tab, { index :: index()}).
-type top_k() :: #top_k{}.
-type index_tab() :: #index_tab{}.
-type id() :: {emqx_types:clientid(), emqx_types:topic()}.
-type index() :: ?INDEX(non_neg_integer(), id()).
-type topk_index() :: ?TOPK_INDEX(non_neg_integer(), id()).

View File

@ -1,6 +1,6 @@
{application, emqx_plugin_libs, {application, emqx_plugin_libs,
[{description, "EMQ X Plugin utility libs"}, [{description, "EMQ X Plugin utility libs"},
{vsn, "4.3.2"}, {vsn, "4.4.5"},
{modules, []}, {modules, []},
{applications, [kernel,stdlib]}, {applications, [kernel,stdlib]},
{env, []} {env, []}

View File

@ -1,16 +1,47 @@
%% -*-: erlang -*- %% -*- mode: erlang -*-
%% Unless you know what you are doing, DO NOT edit manually!!
{VSN, {VSN,
[ [
{<<"4\\.3\\.[0-1]">>, [ {"4.4.4",
{load_module, emqx_plugin_libs_ssl, brutal_purge, soft_purge, []} [{load_module,emqx_trace,brutal_purge,soft_purge,[]},
]}, {load_module,emqx_trace_api,brutal_purge,soft_purge,[]}]},
{<<".*">>, []} {"4.4.3",
], [{load_module,emqx_trace,brutal_purge,soft_purge,[]},
{load_module,emqx_trace_api,brutal_purge,soft_purge,[]}]},
{"4.4.2",[
{load_module,emqx_plugin_libs_ssl,brutal_purge,soft_purge,[]},
{load_module,emqx_trace,brutal_purge,soft_purge,[]},
{load_module,emqx_trace_api,brutal_purge,soft_purge,[]}]},
{"4.4.1",
[{load_module,emqx_plugin_libs_ssl,brutal_purge,soft_purge,[]},
{load_module,emqx_trace,brutal_purge,soft_purge,[]},
{load_module,emqx_trace_api,brutal_purge,soft_purge,[]}]},
{"4.4.0",
[{load_module,emqx_plugin_libs_ssl,brutal_purge,soft_purge,[]},
{load_module,emqx_trace,brutal_purge,soft_purge,[]},
{load_module,emqx_trace_api,brutal_purge,soft_purge,[]},
{update,emqx_slow_subs,{advanced,["4.4.0"]}},
{load_module,emqx_slow_subs_api,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}],
[ [
{<<"4\\.3\\.[0-1]">>, [ {"4.4.4",
{load_module, emqx_plugin_libs_ssl, brutal_purge, soft_purge, []} [{load_module,emqx_trace,brutal_purge,soft_purge,[]},
]}, {load_module,emqx_trace_api,brutal_purge,soft_purge,[]}]},
{<<".*">>, []} {"4.4.3",
] [{load_module,emqx_trace,brutal_purge,soft_purge,[]},
}. {load_module,emqx_trace_api,brutal_purge,soft_purge,[]}]},
{"4.4.2",
[{load_module,emqx_plugin_libs_ssl,brutal_purge,soft_purge,[]},
{load_module,emqx_trace,brutal_purge,soft_purge,[]},
{load_module,emqx_trace_api,brutal_purge,soft_purge,[]}]},
{"4.4.1",
[{load_module,emqx_plugin_libs_ssl,brutal_purge,soft_purge,[]},
{load_module,emqx_trace,brutal_purge,soft_purge,[]},
{load_module,emqx_trace_api,brutal_purge,soft_purge,[]}]},
{"4.4.0",
[{load_module,emqx_plugin_libs_ssl,brutal_purge,soft_purge,[]},
{load_module,emqx_trace,brutal_purge,soft_purge,[]},
{load_module,emqx_trace_api,brutal_purge,soft_purge,[]},
{update,emqx_slow_subs,{advanced,["4.4.0"]}},
{load_module,emqx_slow_subs_api,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}]}.

View File

@ -0,0 +1,337 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_slow_subs).
-behaviour(gen_server).
-include_lib("include/emqx.hrl").
-include_lib("include/logger.hrl").
-include_lib("emqx_plugin_libs/include/emqx_slow_subs.hrl").
-logger_header("[SLOW Subs]").
-export([ start_link/1, on_delivery_completed/4, enable/0
, disable/0, clear_history/0, init_tab/0
]).
%% gen_server callbacks
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-compile(nowarn_unused_type).
-type state() :: #{ config := proplist:proplist()
, enable := boolean()
, last_tick_at := pos_integer()
}.
-type message() :: #message{}.
-import(proplists, [get_value/2, get_value/3]).
-type stats_type() :: whole %% whole = internal + response
| internal %% timespan from message in to deliver
| response. %% timespan from delivery to client response
-type stats_update_args() :: #{session_birth_time := pos_integer()}.
-type stats_update_env() :: #{ threshold := non_neg_integer()
, stats_type := stats_type()
, max_size := pos_integer()}.
-ifdef(TEST).
-define(EXPIRE_CHECK_INTERVAL, timer:seconds(1)).
-else.
-define(EXPIRE_CHECK_INTERVAL, timer:seconds(10)).
-endif.
-define(NOW, erlang:system_time(millisecond)).
-define(DEF_CALL_TIMEOUT, timer:seconds(10)).
%% erlang term order
%% number < atom < reference < fun < port < pid < tuple < list < bit string
%% ets ordered_set is ascending by term order
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%% APIs
%%--------------------------------------------------------------------
%% @doc Start the st_statistics
-spec(start_link(Env :: list()) -> emqx_types:startlink_ret()).
start_link(Env) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [Env], []).
on_delivery_completed(_ClientInfo, #message{timestamp = Ts}, #{session_birth_time := BirthTime}, _Cfg)
when Ts =< BirthTime ->
ok;
on_delivery_completed(ClientInfo, Msg, Env, Cfg) ->
on_delivery_completed(ClientInfo, Msg, Env, erlang:system_time(millisecond), Cfg).
on_delivery_completed(#{clientid := ClientId},
#message{topic = Topic} = Msg,
_Env,
Now,
#{threshold := Threshold,
stats_type := StatsType,
max_size := MaxSize}) ->
TimeSpan = calc_timespan(StatsType, Msg, Now),
case TimeSpan =< Threshold of
true -> ok;
_ ->
Id = ?ID(ClientId, Topic),
LastUpdateValue = find_last_update_value(Id),
case TimeSpan =< LastUpdateValue of
true -> ok;
_ ->
try_insert_to_topk(MaxSize, Now, LastUpdateValue, TimeSpan, Id)
end
end.
clear_history() ->
gen_server:call(?MODULE, ?FUNCTION_NAME, ?DEF_CALL_TIMEOUT).
enable() ->
gen_server:call(?MODULE, {enable, true}, ?DEF_CALL_TIMEOUT).
disable() ->
gen_server:call(?MODULE, {enable, false}, ?DEF_CALL_TIMEOUT).
init_tab() ->
safe_create_tab(?TOPK_TAB, [ ordered_set, public, named_table
, {keypos, #top_k.index}, {write_concurrency, true}
, {read_concurrency, true}
]),
safe_create_tab(?INDEX_TAB, [ ordered_set, public, named_table
, {keypos, #index_tab.index}, {write_concurrency, true}
, {read_concurrency, true}
]).
%%--------------------------------------------------------------------
%% gen_server callbacks
%%--------------------------------------------------------------------
init([Conf]) ->
erlang:process_flag(trap_exit, true),
expire_tick(Conf),
load(Conf),
{ok, #{config => Conf,
last_tick_at => ?NOW,
enable => true}}.
handle_call({enable, Enable}, _From,
#{config := Cfg, enable := IsEnable} = State) ->
State2 = case Enable of
IsEnable ->
State;
true ->
load(Cfg),
State#{enable := true};
_ ->
unload(),
State#{enable := false}
end,
{reply, ok, State2};
handle_call(clear_history, _, State) ->
do_clear_history(),
{reply, ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info(expire_tick, #{config := Cfg} = State) ->
expire_tick(Cfg),
Logs = ets:tab2list(?TOPK_TAB),
do_clear(Cfg, Logs),
{noreply, State};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, _) ->
unload(),
ok.
code_change({down, _Vsn}, #{config := Cfg} = State, ["4.4.0"]) ->
unload(),
MaxSize = get_value(top_k_num, Cfg),
_ = emqx:hook('message.slow_subs_stats',
{?MODULE, on_stats_update, [#{max_size => MaxSize}]}),
erlang:send_after(?EXPIRE_CHECK_INTERVAL, self(), ?FUNCTION_NAME),
{ok, State};
code_change(_OldVsn, #{config := Conf} = State, ["4.4.0"]) ->
%% clear old data
HookPoint = 'message.slow_subs_stats',
Callbacks = emqx_hooks:lookup(HookPoint),
_ = [emqx_hooks:del(HookPoint, Action) ||
{callback, Action, _Filter, _Priority} <- Callbacks],
try
ets:delete_all_objects(?TOPK_TAB)
catch _:_ ->
ok
end,
%% add new table
init_tab(),
[_Sup, SupPid] = erlang:get('$ancestors'),
ets:give_away(?INDEX_TAB, SupPid, undefined),
%% enable
expire_tick(Conf),
load(Conf),
{ok, State};
code_change(_OldVsn, State, _Extras) ->
{ok, State}.
%%--------------------------------------------------------------------
%% Internal functions
%%--------------------------------------------------------------------
expire_tick(_) ->
erlang:send_after(?EXPIRE_CHECK_INTERVAL, self(), ?FUNCTION_NAME).
load(Cfg) ->
MaxSize = get_value(top_k_num, Cfg),
StatsType = get_value(stats_type, Cfg, whole),
Threshold = get_value(threshold, Cfg),
_ = emqx:hook('delivery.completed',
fun ?MODULE:on_delivery_completed/4,
[#{max_size => MaxSize,
stats_type => StatsType,
threshold => Threshold
}]),
ok.
unload() ->
emqx:unhook('delivery.completed', fun ?MODULE:on_delivery_completed/4),
do_clear_history().
do_clear(Cfg, Logs) ->
Now = ?NOW,
Interval = get_value(expire_interval, Cfg),
Each = fun(#top_k{index = ?TOPK_INDEX(TimeSpan, Id), last_update_time = Ts}) ->
case Now - Ts >= Interval of
true ->
delete_with_index(TimeSpan, Id);
_ ->
true
end
end,
lists:foreach(Each, Logs).
-spec calc_timespan(stats_type(), emqx_types:message(), non_neg_integer()) -> non_neg_integer().
calc_timespan(whole, #message{timestamp = Ts}, Now) ->
Now - Ts;
calc_timespan(internal, #message{timestamp = Ts} = Msg, Now) ->
End = emqx_message:get_header(deliver_begin_at, Msg, Now),
End - Ts;
calc_timespan(response, Msg, Now) ->
Begin = emqx_message:get_header(deliver_begin_at, Msg, Now),
Now - Begin.
%% update_topk is safe, because each process has a unique clientid
%% insert or delete are bind to this clientid, so there is no race condition
%%
%% but, the delete_with_index in L249 may have a race condition
%% because the data belong to other clientid will be deleted here (deleted the data written by other processes).%% so it may appear that:
%% when deleting a record, the other process is performing an update operation on this recrod
%% in order to solve this race condition problem, the index table also uses the ordered_set type,
%% so that even if the above situation occurs, it will only cause the old data to be deleted twice
%% and the correctness of the data will not be affected
try_insert_to_topk(MaxSize, Now, LastUpdateValue, TimeSpan, Id) ->
case ets:info(?TOPK_TAB, size) of
Size when Size < MaxSize ->
update_topk(Now, LastUpdateValue, TimeSpan, Id);
_Size ->
case ets:first(?TOPK_TAB) of
'$end_of_table' ->
update_topk(Now, LastUpdateValue, TimeSpan, Id);
?TOPK_INDEX(_, Id) ->
update_topk(Now, LastUpdateValue, TimeSpan, Id);
?TOPK_INDEX(Min, MinId) ->
case TimeSpan =< Min of
true -> false;
_ ->
update_topk(Now, LastUpdateValue, TimeSpan, Id),
delete_with_index(Min, MinId)
end
end
end.
-spec find_last_update_value(id()) -> non_neg_integer().
find_last_update_value(Id) ->
case ets:next(?INDEX_TAB, ?INDEX(0, Id)) of
?INDEX(LastUpdateValue, Id) ->
LastUpdateValue;
_ ->
0
end.
-spec update_topk(pos_integer(), non_neg_integer(), non_neg_integer(), id()) -> true.
update_topk(Now, LastUpdateValue, TimeSpan, Id) ->
%% update record
ets:insert(?TOPK_TAB, #top_k{index = ?TOPK_INDEX(TimeSpan, Id),
last_update_time = Now,
extra = []
}),
%% update index
ets:insert(?INDEX_TAB, #index_tab{index = ?INDEX(TimeSpan, Id)}),
%% delete the old record & index
delete_with_index(LastUpdateValue, Id).
-spec delete_with_index(non_neg_integer(), id()) -> true.
delete_with_index(0, _) ->
true;
delete_with_index(TimeSpan, Id) ->
ets:delete(?INDEX_TAB, ?INDEX(TimeSpan, Id)),
ets:delete(?TOPK_TAB, ?TOPK_INDEX(TimeSpan, Id)).
safe_create_tab(Name, Opts) ->
case ets:whereis(Name) of
undefined ->
Name = ets:new(Name, Opts);
_ ->
Name
end.
do_clear_history() ->
ets:delete_all_objects(?INDEX_TAB),
ets:delete_all_objects(?TOPK_TAB).

View File

@ -0,0 +1,116 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_slow_subs_api).
-rest_api(#{name => clear_history,
method => 'DELETE',
path => "/slow_subscriptions",
func => clear_history,
descr => "Clear current data and re count slow topic"}).
-rest_api(#{name => get_history,
method => 'GET',
path => "/slow_subscriptions",
func => get_history,
descr => "Get slow topics statistics record data"}).
-export([ clear_history/2
, get_history/2
, get_history/0
]).
-include_lib("emqx_plugin_libs/include/emqx_slow_subs.hrl").
-define(DEFAULT_RPC_TIMEOUT, timer:seconds(5)).
-import(minirest, [return/1]).
%%--------------------------------------------------------------------
%% HTTP API
%%--------------------------------------------------------------------
clear_history(_Bindings, _Params) ->
Nodes = ekka_mnesia:running_nodes(),
_ = [rpc_call(Node, emqx_slow_subs, clear_history, [], ok, ?DEFAULT_RPC_TIMEOUT)
|| Node <- Nodes],
return(ok).
get_history(_Bindings, _Params) ->
execute_when_enabled(fun do_get_history/0).
get_history() ->
Node = node(),
RankL = ets:tab2list(?TOPK_TAB),
ConvFun = fun(#top_k{index = ?TOPK_INDEX(TimeSpan, ?ID(ClientId, Topic)),
last_update_time = LastUpdateTime
}) ->
#{ clientid => ClientId
, node => Node
, topic => Topic
, timespan => TimeSpan
, last_update_time => LastUpdateTime
}
end,
lists:map(ConvFun, RankL).
%%--------------------------------------------------------------------
%% Internal functions
%%--------------------------------------------------------------------
do_get_history() ->
Nodes = ekka_mnesia:running_nodes(),
Fun = fun(Node, Acc) ->
NodeRankL = rpc_call(Node,
?MODULE,
get_history,
[],
[],
?DEFAULT_RPC_TIMEOUT),
NodeRankL ++ Acc
end,
RankL = lists:foldl(Fun, [], Nodes),
SortFun = fun(#{timespan := A}, #{timespan := B}) ->
A > B
end,
SortedL = lists:sort(SortFun, RankL),
SortedL2 = lists:sublist(SortedL, ?MAX_SIZE),
return({ok, SortedL2}).
rpc_call(Node, M, F, A, _ErrorR, _T) when Node =:= node() ->
erlang:apply(M, F, A);
rpc_call(Node, M, F, A, ErrorR, T) ->
case rpc:call(Node, M, F, A, T) of
{badrpc, _} -> ErrorR;
Res -> Res
end.
-ifdef(EMQX_ENTERPRISE).
execute_when_enabled(Fun) ->
Fun().
-else.
%% this code from emqx_mod_api_topics_metrics:execute_when_enabled
execute_when_enabled(Fun) ->
case emqx_modules:find_module(emqx_mod_slow_subs) of
[{_, true}] -> Fun();
_ -> return({error, module_not_loaded})
end.
-endif.

View File

@ -0,0 +1,513 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace).
-behaviour(gen_server).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("kernel/include/file.hrl").
-logger_header("[Tracer]").
-export([ publish/1
, subscribe/3
, unsubscribe/2
]).
-export([ start_link/0
, list/0
, list/1
, get_trace_filename/1
, create/1
, delete/1
, clear/0
, update/2
]).
-export([ format/1
, zip_dir/0
, filename/2
, trace_dir/0
, trace_file/1
, trace_file_detail/1
, delete_files_after_send/2
, is_enable/0
]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-define(TRACE, ?MODULE).
-define(MAX_SIZE, 30).
-ifdef(TEST).
-export([ log_file/2
, create_table/0
, find_closest_time/2
]).
-endif.
-export_type([ip_address/0]).
-type ip_address() :: string().
-record(?TRACE,
{ name :: binary() | undefined | '_'
, type :: clientid | topic | ip_address | undefined | '_'
, filter :: emqx_types:topic() | emqx_types:clientid() | ip_address() | undefined | '_'
, enable = true :: boolean() | '_'
, start_at :: integer() | undefined | '_'
, end_at :: integer() | undefined | '_'
}).
publish(#message{topic = <<"$SYS/", _/binary>>}) -> ignore;
publish(#message{from = From, topic = Topic, payload = Payload}) when
is_binary(From); is_atom(From) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"PUBLISH to ~s: ~0p",
[Topic, Payload]
).
subscribe(<<"$SYS/", _/binary>>, _SubId, _SubOpts) -> ignore;
subscribe(Topic, SubId, SubOpts) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"~ts SUBSCRIBE ~ts: Options: ~0p",
[SubId, Topic, SubOpts]
).
unsubscribe(<<"$SYS/", _/binary>>, _SubOpts) -> ignore;
unsubscribe(Topic, SubOpts) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"~ts UNSUBSCRIBE ~ts: Options: ~0p",
[maps:get(subid, SubOpts, ""), Topic, SubOpts]
).
-spec(start_link() -> emqx_types:startlink_ret()).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec list() -> [tuple()].
list() ->
ets:match_object(?TRACE, #?TRACE{_ = '_'}).
-spec is_enable() -> boolean().
is_enable() ->
undefined =/= erlang:whereis(?MODULE).
-spec list(boolean()) -> [tuple()].
list(Enable) ->
ets:match_object(?TRACE, #?TRACE{enable = Enable, _ = '_'}).
-spec create([{Key :: binary(), Value :: binary()}] | #{atom() => binary()}) ->
ok | {error, {duplicate_condition, iodata()} | {already_existed, iodata()} | iodata()}.
create(Trace) ->
case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of
true ->
case to_trace(Trace) of
{ok, TraceRec} -> insert_new_trace(TraceRec);
{error, Reason} -> {error, Reason}
end;
false ->
{error, "The number of traces created has reache the maximum"
" please delete the useless ones first"}
end.
-spec delete(Name :: binary()) -> ok | {error, not_found}.
delete(Name) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name) of
[_] -> mnesia:delete(?TRACE, Name, write);
[] -> mnesia:abort(not_found)
end
end,
transaction(Tran).
-spec clear() -> ok | {error, Reason :: term()}.
clear() ->
case mnesia:clear_table(?TRACE) of
{atomic, ok} -> ok;
{aborted, Reason} -> {error, Reason}
end.
-spec update(Name :: binary(), Enable :: boolean()) ->
ok | {error, not_found | finished}.
update(Name, Enable) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name) of
[] -> mnesia:abort(not_found);
[#?TRACE{enable = Enable}] -> ok;
[Rec] ->
case erlang:system_time(second) >= Rec#?TRACE.end_at of
false -> mnesia:write(?TRACE, Rec#?TRACE{enable = Enable}, write);
true -> mnesia:abort(finished)
end
end
end,
transaction(Tran).
-spec get_trace_filename(Name :: binary()) ->
{ok, FileName :: string()} | {error, not_found}.
get_trace_filename(Name) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name, read) of
[] -> mnesia:abort(not_found);
[#?TRACE{start_at = Start}] -> {ok, filename(Name, Start)}
end end,
transaction(Tran).
-spec trace_file(File :: list()) ->
{ok, Node :: list(), Binary :: binary()} |
{error, Node :: list(), Reason :: term()}.
trace_file(File) ->
FileName = filename:join(trace_dir(), File),
Node = atom_to_list(node()),
case file:read_file(FileName) of
{ok, Bin} -> {ok, Node, Bin};
{error, enoent} ->
case emqx_trace:is_enable() of
false -> {error, Node, trace_disabled};
true -> {error, Node, enoent}
end;
{error, Reason} ->
{error, Node, Reason}
end.
trace_file_detail(File) ->
FileName = filename:join(trace_dir(), File),
Node = atom_to_binary(node()),
case file:read_file_info(FileName, [{'time', 'posix'}]) of
{ok, #file_info{size = Size, mtime = Mtime}} -> {ok, Node, #{size => Size, mtime => Mtime}};
{error, Reason} -> {error, Node, Reason}
end.
delete_files_after_send(TraceLog, Zips) ->
gen_server:cast(?MODULE, {delete_tag, self(), [TraceLog | Zips]}).
-spec format(list(#?TRACE{})) -> list(map()).
format(Traces) ->
Fields = record_info(fields, ?TRACE),
lists:map(fun(Trace0 = #?TRACE{}) ->
[_ | Values] = tuple_to_list(Trace0),
maps:from_list(lists:zip(Fields, Values))
end, Traces).
init([]) ->
ok = create_table(),
erlang:process_flag(trap_exit, true),
OriginLogLevel = emqx_logger:get_primary_log_level(),
ok = filelib:ensure_dir(filename:join([trace_dir(), dummy])),
ok = filelib:ensure_dir(filename:join([zip_dir(), dummy])),
{ok, _} = mnesia:subscribe({table, ?TRACE, simple}),
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
TRef = update_trace(Traces),
{ok, #{timer => TRef, monitors => #{}, primary_log_level => OriginLogLevel}}.
create_table() ->
ok = ekka_mnesia:create_table(?TRACE, [
{type, set},
{disc_copies, [node()]},
{record_name, ?TRACE},
{attributes, record_info(fields, ?TRACE)}]),
ok = ekka_mnesia:copy_table(?TRACE, disc_copies).
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ok, State}.
handle_cast({delete_tag, Pid, Files}, State = #{monitors := Monitors}) ->
erlang:monitor(process, Pid),
{noreply, State#{monitors => Monitors#{Pid => Files}}};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info({'DOWN', _Ref, process, Pid, _Reason}, State = #{monitors := Monitors}) ->
case maps:take(Pid, Monitors) of
error -> {noreply, State};
{Files, NewMonitors} ->
ZipDir = emqx_trace:zip_dir(),
lists:foreach(fun(F) -> file:delete(filename:join([ZipDir, F])) end, Files),
{noreply, State#{monitors => NewMonitors}}
end;
handle_info({timeout, TRef, update_trace},
#{timer := TRef, primary_log_level := OriginLogLevel} = State) ->
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
NextTRef = update_trace(Traces),
{noreply, State#{timer => NextTRef}};
handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) ->
emqx_misc:cancel_timer(TRef),
handle_info({timeout, TRef, update_trace}, State);
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, #{timer := TRef, primary_log_level := OriginLogLevel}) ->
ok = set_log_primary_level(OriginLogLevel),
_ = mnesia:unsubscribe({table, ?TRACE, simple}),
emqx_misc:cancel_timer(TRef),
stop_all_trace_handler(),
_ = file:del_dir_r(zip_dir()),
ok.
code_change(_, State, _Extra) ->
{ok, State}.
insert_new_trace(Trace) ->
Tran = fun() ->
case mnesia:read(?TRACE, Trace#?TRACE.name) of
[] ->
#?TRACE{start_at = StartAt, type = Type, filter = Filter} = Trace,
Match = #?TRACE{_ = '_', start_at = StartAt, type = Type, filter = Filter},
case mnesia:match_object(?TRACE, Match, read) of
[] -> mnesia:write(?TRACE, Trace, write);
[#?TRACE{name = Name}] -> mnesia:abort({duplicate_condition, Name})
end;
[#?TRACE{name = Name}] -> mnesia:abort({already_existed, Name})
end
end,
transaction(Tran).
update_trace(Traces) ->
Now = erlang:system_time(second),
{_Waiting, Running, Finished} = classify_by_time(Traces, Now),
disable_finished(Finished),
Started = emqx_trace_handler:running(),
{NeedRunning, AllStarted} = start_trace(Running, Started),
NeedStop = AllStarted -- NeedRunning,
ok = stop_trace(NeedStop, Started),
clean_stale_trace_files(),
NextTime = find_closest_time(Traces, Now),
emqx_misc:start_timer(NextTime, update_trace).
stop_all_trace_handler() ->
lists:foreach(fun(#{id := Id}) -> emqx_trace_handler:uninstall(Id) end,
emqx_trace_handler:running()).
get_enable_trace() ->
{atomic, Traces} =
mnesia:transaction(fun() ->
mnesia:match_object(?TRACE, #?TRACE{enable = true, _ = '_'}, read)
end),
Traces.
find_closest_time(Traces, Now) ->
Sec =
lists:foldl(
fun(#?TRACE{start_at = Start, end_at = End, enable = true}, Closest) ->
min(closest(End, Now, Closest), closest(Start, Now, Closest));
(_, Closest) -> Closest
end, 60 * 15, Traces),
timer:seconds(Sec).
closest(Time, Now, Closest) when Now >= Time -> Closest;
closest(Time, Now, Closest) -> min(Time - Now, Closest).
disable_finished([]) -> ok;
disable_finished(Traces) ->
transaction(fun() ->
lists:map(fun(#?TRACE{name = Name}) ->
case mnesia:read(?TRACE, Name, write) of
[] -> ok;
[Trace] -> mnesia:write(?TRACE, Trace#?TRACE{enable = false}, write)
end end, Traces)
end).
start_trace(Traces, Started0) ->
Started = lists:map(fun(#{name := Name}) -> Name end, Started0),
lists:foldl(fun(#?TRACE{name = Name} = Trace, {Running, StartedAcc}) ->
case lists:member(Name, StartedAcc) of
true ->
{[Name | Running], StartedAcc};
false ->
case start_trace(Trace) of
ok -> {[Name | Running], [Name | StartedAcc]};
{error, _Reason} -> {[Name | Running], StartedAcc}
end
end
end, {[], Started}, Traces).
start_trace(Trace) ->
#?TRACE{name = Name
, type = Type
, filter = Filter
, start_at = Start
} = Trace,
Who = #{name => Name, type => Type, filter => Filter},
emqx_trace_handler:install(Who, debug, log_file(Name, Start)).
stop_trace(Finished, Started) ->
lists:foreach(fun(#{name := Name, type := Type}) ->
case lists:member(Name, Finished) of
true -> emqx_trace_handler:uninstall(Type, Name);
false -> ok
end
end, Started).
clean_stale_trace_files() ->
TraceDir = trace_dir(),
case file:list_dir(TraceDir) of
{ok, AllFiles} when AllFiles =/= ["zip"] ->
FileFun = fun(#?TRACE{name = Name, start_at = StartAt}) -> filename(Name, StartAt) end,
KeepFiles = lists:map(FileFun, list()),
case AllFiles -- ["zip" | KeepFiles] of
[] -> ok;
DeleteFiles ->
DelFun = fun(F) -> file:delete(filename:join(TraceDir, F)) end,
lists:foreach(DelFun, DeleteFiles)
end;
_ -> ok
end.
classify_by_time(Traces, Now) ->
classify_by_time(Traces, Now, [], [], []).
classify_by_time([], _Now, Wait, Run, Finish) -> {Wait, Run, Finish};
classify_by_time([Trace = #?TRACE{start_at = Start} | Traces],
Now, Wait, Run, Finish) when Start > Now ->
classify_by_time(Traces, Now, [Trace | Wait], Run, Finish);
classify_by_time([Trace = #?TRACE{end_at = End} | Traces],
Now, Wait, Run, Finish) when End =< Now ->
classify_by_time(Traces, Now, Wait, Run, [Trace | Finish]);
classify_by_time([Trace | Traces], Now, Wait, Run, Finish) ->
classify_by_time(Traces, Now, Wait, [Trace | Run], Finish).
to_trace(TraceParam) ->
case to_trace(ensure_map(TraceParam), #?TRACE{}) of
{error, Reason} -> {error, Reason};
{ok, #?TRACE{name = undefined}} ->
{error, "name required"};
{ok, #?TRACE{type = undefined}} ->
{error, "type=[topic,clientid,ip_address] required"};
{ok, TraceRec0 = #?TRACE{}} ->
case fill_default(TraceRec0) of
#?TRACE{start_at = Start, end_at = End} when End =< Start ->
{error, "failed by start_at >= end_at"};
TraceRec ->
{ok, TraceRec}
end
end.
ensure_map(#{} = Trace) -> Trace;
ensure_map(Trace) when is_list(Trace) ->
lists:foldl(
fun({K, V}, Acc) when is_binary(K) -> Acc#{binary_to_existing_atom(K) => V};
({K, V}, Acc) when is_atom(K) -> Acc#{K => V};
(_, Acc) -> Acc
end, #{}, Trace).
fill_default(Trace = #?TRACE{start_at = undefined}) ->
fill_default(Trace#?TRACE{start_at = erlang:system_time(second)});
fill_default(Trace = #?TRACE{end_at = undefined, start_at = StartAt}) ->
fill_default(Trace#?TRACE{end_at = StartAt + 10 * 60});
fill_default(Trace) -> Trace.
-define(NAME_RE, "^[0-9A-Za-z]+[A-Za-z0-9-_]*$").
to_trace(#{name := Name} = Trace, Rec) ->
case re:run(Name, ?NAME_RE) of
nomatch -> {error, "Name should be " ?NAME_RE};
_ -> to_trace(maps:remove(name, Trace), Rec#?TRACE{name = Name})
end;
to_trace(#{type := <<"clientid">>, clientid := Filter} = Trace, Rec) ->
Trace0 = maps:without([type, clientid], Trace),
to_trace(Trace0, Rec#?TRACE{type = clientid, filter = Filter});
to_trace(#{type := <<"topic">>, topic := Filter} = Trace, Rec) ->
case validate_topic(Filter) of
ok ->
Trace0 = maps:without([type, topic], Trace),
to_trace(Trace0, Rec#?TRACE{type = topic, filter = Filter});
Error -> Error
end;
to_trace(#{type := <<"ip_address">>, ip_address := Filter} = Trace, Rec) ->
case validate_ip_address(Filter) of
ok ->
Trace0 = maps:without([type, ip_address], Trace),
to_trace(Trace0, Rec#?TRACE{type = ip_address, filter = Filter});
Error -> Error
end;
to_trace(#{type := Type}, _Rec) -> {error, io_lib:format("required ~s field", [Type])};
to_trace(#{start_at := StartAt} = Trace, Rec) ->
case to_system_second(StartAt) of
{ok, Sec} -> to_trace(maps:remove(start_at, Trace), Rec#?TRACE{start_at = Sec});
{error, Reason} -> {error, Reason}
end;
to_trace(#{end_at := EndAt} = Trace, Rec) ->
Now = erlang:system_time(second),
case to_system_second(EndAt) of
{ok, Sec} when Sec > Now ->
to_trace(maps:remove(end_at, Trace), Rec#?TRACE{end_at = Sec});
{ok, _Sec} ->
{error, "end_at time has already passed"};
{error, Reason} ->
{error, Reason}
end;
to_trace(_, Rec) -> {ok, Rec}.
validate_topic(TopicName) ->
try emqx_topic:validate(filter, TopicName) of
true -> ok
catch
error:Error ->
{error, io_lib:format("topic: ~s invalid by ~p", [TopicName, Error])}
end.
validate_ip_address(IP) ->
case inet:parse_address(binary_to_list(IP)) of
{ok, _} -> ok;
{error, Reason} -> {error, lists:flatten(io_lib:format("ip address: ~p", [Reason]))}
end.
to_system_second(At) ->
try
Sec = calendar:rfc3339_to_system_time(binary_to_list(At), [{unit, second}]),
Now = erlang:system_time(second),
{ok, erlang:max(Now, Sec)}
catch error: {badmatch, _} ->
{error, ["The rfc3339 specification not satisfied: ", At]}
end.
zip_dir() ->
filename:join(trace_dir(), "zip").
trace_dir() ->
filename:join(emqx:get_env(data_dir), "trace").
log_file(Name, Start) ->
filename:join(trace_dir(), filename(Name, Start)).
filename(Name, Start) ->
[Time, _] = string:split(calendar:system_time_to_rfc3339(Start), "T", leading),
lists:flatten(["trace_", binary_to_list(Name), "_", Time, ".log"]).
transaction(Tran) ->
case mnesia:transaction(Tran) of
{atomic, Res} -> Res;
{aborted, Reason} -> {error, Reason}
end.
update_log_primary_level([], OriginLevel) -> set_log_primary_level(OriginLevel);
update_log_primary_level(_, _) -> set_log_primary_level(debug).
set_log_primary_level(NewLevel) ->
case NewLevel =/= emqx_logger:get_primary_log_level() of
true -> emqx_logger:set_primary_log_level(NewLevel);
false -> ok
end.

View File

@ -0,0 +1,251 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_api).
-include_lib("emqx/include/logger.hrl").
-include_lib("kernel/include/file.hrl").
%% API
-export([ list_trace/2
, create_trace/2
, update_trace/2
, delete_trace/2
, clear_traces/2
, trace_file_detail/2
, download_zip_log/2
, stream_log_file/2
]).
-export([ read_trace_file/3
, get_trace_size/0
]).
-define(TO_BIN(_B_), iolist_to_binary(_B_)).
-define(NOT_FOUND(N), {error, 'NOT_FOUND', ?TO_BIN([N, " NOT FOUND"])}).
list_trace(_, _Params) ->
case emqx_trace:list() of
[] -> {ok, []};
List0 ->
List = lists:sort(fun(#{start_at := A}, #{start_at := B}) -> A > B end,
emqx_trace:format(List0)),
Nodes = ekka_mnesia:running_nodes(),
TraceSize = cluster_call(?MODULE, get_trace_size, [], 30000),
AllFileSize = lists:foldl(fun(F, Acc) -> maps:merge(Acc, F) end, #{}, TraceSize),
Now = erlang:system_time(second),
Traces =
lists:map(fun(Trace = #{name := Name, start_at := Start,
end_at := End, enable := Enable, type := Type, filter := Filter}) ->
FileName = emqx_trace:filename(Name, Start),
LogSize = collect_file_size(Nodes, FileName, AllFileSize),
Trace0 = maps:without([enable, filter], Trace),
ModEnable = emqx_trace:is_enable(),
Trace0#{ log_size => LogSize
, Type => iolist_to_binary(Filter)
, start_at => list_to_binary(calendar:system_time_to_rfc3339(Start))
, end_at => list_to_binary(calendar:system_time_to_rfc3339(End))
, status => status(ModEnable, Enable, Start, End, Now)
}
end, List),
{ok, Traces}
end.
create_trace(_, Param) ->
case emqx_trace:create(Param) of
ok -> ok;
{error, {already_existed, Name}} ->
{error, 'ALREADY_EXISTED', ?TO_BIN([Name, "Already Exists"])};
{error, {duplicate_condition, Name}} ->
{error, 'DUPLICATE_CONDITION', ?TO_BIN([Name, "Duplication Condition"])};
{error, Reason} ->
{error, 'INCORRECT_PARAMS', ?TO_BIN(Reason)}
end.
delete_trace(#{name := Name}, _Param) ->
case emqx_trace:delete(Name) of
ok -> ok;
{error, not_found} -> ?NOT_FOUND(Name)
end.
clear_traces(_, _) ->
emqx_trace:clear().
update_trace(#{name := Name, operation := Operation}, _Param) ->
Enable = case Operation of disable -> false; enable -> true end,
case emqx_trace:update(Name, Enable) of
ok -> {ok, #{enable => Enable, name => Name}};
{error, not_found} -> ?NOT_FOUND(Name)
end.
%% if HTTP request headers include accept-encoding: gzip and file size > 300 bytes.
%% cowboy_compress_h will auto encode gzip format.
download_zip_log(#{name := Name}, _Param) ->
case emqx_trace:get_trace_filename(Name) of
{ok, TraceLog} ->
TraceFiles = collect_trace_file(TraceLog),
ZipDir = emqx_trace:zip_dir(),
Zips = group_trace_file(ZipDir, TraceLog, TraceFiles),
ZipFileName0 = binary_to_list(Name) ++ ".zip",
ZipFileName = filename:join([ZipDir, ZipFileName0]),
{ok, ZipFile} = zip:zip(ZipFileName, Zips, [{cwd, ZipDir}]),
emqx_trace:delete_files_after_send(ZipFileName0, Zips),
{ok, ZipFile};
{error, Reason} ->
{error, Reason}
end.
trace_file_detail(#{name := Name}, _Param) ->
case emqx_trace:get_trace_filename(Name) of
{ok, TraceLog} ->
TraceFiles = collect_trace_file_detail(TraceLog),
{ok, group_trace_file_detail(TraceLog, TraceFiles)};
{error, Reason} ->
{error, Reason}
end.
group_trace_file(ZipDir, TraceLog, TraceFiles) ->
lists:foldl(fun(Res, Acc) ->
case Res of
{ok, Node, Bin} ->
FileName = Node ++ "-" ++ TraceLog,
ZipName = filename:join([ZipDir, FileName]),
case file:write_file(ZipName, Bin) of
ok -> [FileName | Acc];
_ -> Acc
end;
{error, Node, trace_disabled} ->
?LOG(warning, "emqx_mod_trace modules is disabled on ~s ~s", [Node, TraceLog]),
Acc;
{error, Node, Reason} ->
?LOG(error, "download trace log error:~p", [{Node, TraceLog, Reason}]),
Acc
end
end, [], TraceFiles).
group_trace_file_detail(TraceLog, TraceFiles) ->
lists:foldl(fun(Res, Acc) ->
case Res of
{ok, Node, Info} ->
[Info#{node => Node} | Acc];
{error, Node, Reason} ->
?LOG(error, "read trace file detail failed:~p", [{Node, TraceLog, Reason}]),
Acc
end
end, [], TraceFiles).
collect_trace_file(TraceLog) ->
cluster_call(emqx_trace, trace_file, [TraceLog], 60000).
collect_trace_file_detail(TraceLog) ->
cluster_call(emqx_trace, trace_file_detail, [TraceLog], 25000).
cluster_call(Mod, Fun, Args, Timeout) ->
Nodes = ekka_mnesia:running_nodes(),
{GoodRes, BadNodes} = rpc:multicall(Nodes, Mod, Fun, Args, Timeout),
BadNodes =/= [] andalso ?LOG(error, "rpc call failed on ~p ~p", [BadNodes, {Mod, Fun, Args}]),
GoodRes.
stream_log_file(#{name := Name}, Params) ->
Node0 = proplists:get_value(<<"node">>, Params, atom_to_binary(node())),
Position0 = proplists:get_value(<<"position">>, Params, <<"0">>),
Bytes0 = proplists:get_value(<<"bytes">>, Params, <<"1000">>),
case to_node(Node0) of
{ok, Node} ->
Position = binary_to_integer(Position0),
Bytes = binary_to_integer(Bytes0),
case rpc:call(Node, ?MODULE, read_trace_file, [Name, Position, Bytes]) of
{ok, Bin} ->
Meta = #{<<"position">> => Position + byte_size(Bin), <<"bytes">> => Bytes},
{ok, #{meta => Meta, items => Bin}};
{eof, Size} ->
Meta = #{<<"position">> => Size, <<"bytes">> => Bytes},
{ok, #{meta => Meta, items => <<"">>}};
{error, trace_disabled} ->
{error, io_lib:format("trace_disable_on_~s", [Node0])};
{error, Reason} ->
logger:log(error, "read_file_failed ~p", [{Node, Name, Reason, Position, Bytes}]),
{error, Reason};
{badrpc, nodedown} ->
{error, "BadRpc node down"}
end;
{error, Reason} -> {error, Reason}
end.
get_trace_size() ->
TraceDir = emqx_trace:trace_dir(),
Node = node(),
case file:list_dir(TraceDir) of
{ok, AllFiles} ->
lists:foldl(fun(File, Acc) ->
FullFileName = filename:join(TraceDir, File),
Acc#{{Node, File} => filelib:file_size(FullFileName)}
end, #{}, lists:delete("zip", AllFiles));
_ -> #{}
end.
%% this is an rpc call for stream_log_file/2
read_trace_file(Name, Position, Limit) ->
case emqx_trace:get_trace_filename(Name) of
{error, _} = Error -> Error;
{ok, TraceFile} ->
TraceDir = emqx_trace:trace_dir(),
TracePath = filename:join([TraceDir, TraceFile]),
read_file(TracePath, Position, Limit)
end.
read_file(Path, Offset, Bytes) ->
case file:open(Path, [read, raw, binary]) of
{ok, IoDevice} ->
try
_ = case Offset of
0 -> ok;
_ -> file:position(IoDevice, {bof, Offset})
end,
case file:read(IoDevice, Bytes) of
{ok, Bin} -> {ok, Bin};
{error, Reason} -> {error, Reason};
eof ->
{ok, #file_info{size = Size}} = file:read_file_info(IoDevice),
{eof, Size}
end
after
file:close(IoDevice)
end;
{error, enoent} ->
case emqx_trace:is_enable() of
false -> {error, trace_disabled};
true -> {error, enoent}
end;
{error, Reason} -> {error, Reason}
end.
to_node(Node) ->
try {ok, binary_to_existing_atom(Node)}
catch _:_ ->
{error, "node not found"}
end.
collect_file_size(Nodes, FileName, AllFiles) ->
lists:foldl(fun(Node, Acc) ->
Size = maps:get({Node, FileName}, AllFiles, 0),
Acc#{Node => Size}
end, #{}, Nodes).
%% if the module is not running, it will return stopped, user can download the trace file.
status(false, _Enable, _Start, _End, _Now) -> <<"stopped">>;
status(true, false, _Start, _End, _Now) -> <<"stopped">>;
status(true, true, Start, _End, Now) when Now < Start -> <<"waiting">>;
status(true, true, _Start, End, Now) when Now >= End -> <<"stopped">>;
status(true, true, _Start, _End, _Now) -> <<"running">>.

View File

@ -0,0 +1,369 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_SUITE).
%% API
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx/include/emqx.hrl").
-record(emqx_trace, {name, type, filter, enable = true, start_at, end_at}).
%%--------------------------------------------------------------------
%% Setups
%%--------------------------------------------------------------------
all() ->
emqx_ct:all(?MODULE).
init_per_suite(Config) ->
emqx_ct_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([]).
init_per_testcase(_, Config) ->
load(),
ok = emqx_trace:clear(),
Config.
end_per_testcase(_) ->
unload(),
ok.
t_base_create_delete(_Config) ->
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
End = to_rfc3339(Now + 30 * 60),
Name = <<"name1">>,
ClientId = <<"test-device">>,
Trace = #{
name => Name,
type => <<"clientid">>,
clientid => ClientId,
start_at => Start,
end_at => End
},
AnotherTrace = Trace#{name => <<"anotherTrace">>},
ok = emqx_trace:create(Trace),
?assertEqual({error, {already_existed, Name}}, emqx_trace:create(Trace)),
?assertEqual({error, {duplicate_condition, Name}}, emqx_trace:create(AnotherTrace)),
[TraceRec] = emqx_trace:list(),
Expect = #emqx_trace{
name = Name,
type = clientid,
filter = ClientId,
start_at = Now,
end_at = Now + 30 * 60
},
?assertEqual(Expect, TraceRec),
ExpectFormat = [
#{
filter => <<"test-device">>,
enable => true,
type => clientid,
name => <<"name1">>,
start_at => Now,
end_at => Now + 30 * 60
}
],
?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])),
?assertEqual(ok, emqx_trace:delete(Name)),
?assertEqual({error, not_found}, emqx_trace:delete(Name)),
?assertEqual([], emqx_trace:list()),
ok.
t_create_size_max(_Config) ->
lists:map(fun(Seq) ->
Name = list_to_binary("name" ++ integer_to_list(Seq)),
Trace = [{name, Name}, {type, <<"topic">>},
{topic, list_to_binary("/x/y/" ++ integer_to_list(Seq))}],
ok = emqx_trace:create(Trace)
end, lists:seq(1, 30)),
Trace31 = [{<<"name">>, <<"name31">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/31">>}],
{error, _} = emqx_trace:create(Trace31),
ok = emqx_trace:delete(<<"name30">>),
ok = emqx_trace:create(Trace31),
?assertEqual(30, erlang:length(emqx_trace:list())),
ok.
t_create_failed(_Config) ->
Name = {<<"name">>, <<"test">>},
UnknownField = [Name, {<<"unknown">>, 12}],
{error, Reason1} = emqx_trace:create(UnknownField),
?assertEqual(<<"type=[topic,clientid,ip_address] required">>, iolist_to_binary(Reason1)),
InvalidTopic = [Name, {<<"topic">>, "#/#//"}, {<<"type">>, <<"topic">>}],
{error, Reason2} = emqx_trace:create(InvalidTopic),
?assertEqual(<<"topic: #/#// invalid by function_clause">>, iolist_to_binary(Reason2)),
InvalidStart = [Name, {<<"type">>, <<"topic">>}, {<<"topic">>, <<"/sys/">>},
{<<"start_at">>, <<"2021-12-3:12">>}],
{error, Reason3} = emqx_trace:create(InvalidStart),
?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>,
iolist_to_binary(Reason3)),
InvalidEnd = [Name, {<<"type">>, <<"topic">>}, {<<"topic">>, <<"/sys/">>},
{<<"end_at">>, <<"2021-12-3:12">>}],
{error, Reason4} = emqx_trace:create(InvalidEnd),
?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>,
iolist_to_binary(Reason4)),
{error, Reason7} = emqx_trace:create([Name, {<<"type">>, <<"clientid">>}]),
?assertEqual(<<"required clientid field">>, iolist_to_binary(Reason7)),
InvalidPackets4 = [{<<"name">>, <<"/test">>}, {<<"clientid">>, <<"t">>},
{<<"type">>, <<"clientid">>}],
{error, Reason9} = emqx_trace:create(InvalidPackets4),
?assertEqual(<<"Name should be ^[0-9A-Za-z]+[A-Za-z0-9-_]*$">>, iolist_to_binary(Reason9)),
?assertEqual({error, "type=[topic,clientid,ip_address] required"},
emqx_trace:create([{<<"name">>, <<"test-name">>}, {<<"clientid">>, <<"good">>}])),
?assertEqual({error, "ip address: einval"},
emqx_trace:create([Name, {<<"type">>, <<"ip_address">>},
{<<"ip_address">>, <<"test-name">>}])),
ok.
t_create_default(_Config) ->
{error, "name required"} = emqx_trace:create([]),
ok = emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, <<"good">>}]),
[#emqx_trace{name = <<"test-name">>}] = emqx_trace:list(),
ok = emqx_trace:clear(),
Trace = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"start_at">>, <<"2021-10-28T10:54:47+08:00">>},
{<<"end_at">>, <<"2021-10-27T10:54:47+08:00">>}
],
{error, "end_at time has already passed"} = emqx_trace:create(Trace),
Now = erlang:system_time(second),
Trace2 = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"start_at">>, to_rfc3339(Now + 10)},
{<<"end_at">>, to_rfc3339(Now + 3)}
],
{error, "failed by start_at >= end_at"} = emqx_trace:create(Trace2),
ok = emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/z">>}]),
[#emqx_trace{start_at = Start, end_at = End}] = emqx_trace:list(),
?assertEqual(10 * 60, End - Start),
?assertEqual(true, Start - erlang:system_time(second) < 5),
ok.
t_create_with_extra_fields(_Config) ->
ok = emqx_trace:clear(),
Trace = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"clientid">>, <<"dev001">>},
{<<"ip_address">>, <<"127.0.0.1">>}
],
ok = emqx_trace:create(Trace),
?assertMatch([#emqx_trace{name = <<"test-name">>, filter = <<"/x/y/z">>, type = topic}],
emqx_trace:list()),
ok.
t_update_enable(_Config) ->
Name = <<"test-name">>,
Now = erlang:system_time(second),
End = list_to_binary(calendar:system_time_to_rfc3339(Now + 2)),
ok = emqx_trace:create([{<<"name">>, Name}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>}, {<<"end_at">>, End}]),
[#emqx_trace{enable = Enable}] = emqx_trace:list(),
?assertEqual(Enable, true),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
ok = emqx_trace:update(Name, true),
[#emqx_trace{enable = true}] = emqx_trace:list(),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
?assertEqual({error, not_found}, emqx_trace:update(<<"Name not found">>, true)),
ct:sleep(2100),
?assertEqual({error, finished}, emqx_trace:update(Name, true)),
ok.
t_load_state(_Config) ->
Now = erlang:system_time(second),
Running = #{name => <<"Running">>, type => <<"topic">>,
topic => <<"/x/y/1">>, start_at => to_rfc3339(Now - 1),
end_at => to_rfc3339(Now + 2)},
Waiting = [{<<"name">>, <<"Waiting">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/2">>}, {<<"start_at">>, to_rfc3339(Now + 3)},
{<<"end_at">>, to_rfc3339(Now + 8)}],
Finished = [{<<"name">>, <<"Finished">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/3">>}, {<<"start_at">>, to_rfc3339(Now - 5)},
{<<"end_at">>, to_rfc3339(Now)}],
ok = emqx_trace:create(Running),
ok = emqx_trace:create(Waiting),
{error, "end_at time has already passed"} = emqx_trace:create(Finished),
Traces = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces)),
Enables = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces),
ExpectEnables = [{<<"Running">>, true}, {<<"Waiting">>, true}],
?assertEqual(ExpectEnables, lists:sort(Enables)),
ct:sleep(3500),
Traces2 = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces2)),
Enables2 = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces2),
ExpectEnables2 = [{<<"Running">>, false}, {<<"Waiting">>, true}],
?assertEqual(ExpectEnables2, lists:sort(Enables2)),
ok.
t_client_event(_Config) ->
application:set_env(emqx, allow_anonymous, true),
ClientId = <<"client-test">>,
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
Name = <<"test_client_id_event">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
emqtt:ping(Client),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"1">>, [{qos, 0}]),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"2">>, [{qos, 0}]),
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
ok = emqx_trace:create([{<<"name">>, <<"test_topic">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/test">>}, {<<"start_at">>, Start}]),
ok = emqx_trace_handler_SUITE:filesync(<<"test_topic">>, topic),
{ok, Bin} = file:read_file(emqx_trace:log_file(Name, Now)),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"3">>, [{qos, 0}]),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"4">>, [{qos, 0}]),
ok = emqtt:disconnect(Client),
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
ok = emqx_trace_handler_SUITE:filesync(<<"test_topic">>, topic),
{ok, Bin2} = file:read_file(emqx_trace:log_file(Name, Now)),
{ok, Bin3} = file:read_file(emqx_trace:log_file(<<"test_topic">>, Now)),
ct:pal("Bin ~p Bin2 ~p Bin3 ~p", [byte_size(Bin), byte_size(Bin2), byte_size(Bin3)]),
?assert(erlang:byte_size(Bin) > 0),
?assert(erlang:byte_size(Bin) < erlang:byte_size(Bin2)),
?assert(erlang:byte_size(Bin3) > 0),
ok.
t_get_log_filename(_Config) ->
Now = erlang:system_time(second),
Start = calendar:system_time_to_rfc3339(Now),
End = calendar:system_time_to_rfc3339(Now + 2),
Name = <<"name1">>,
Trace = [
{<<"name">>, Name},
{<<"type">>, <<"ip_address">>},
{<<"ip_address">>, <<"127.0.0.1">>},
{<<"start_at">>, list_to_binary(Start)},
{<<"end_at">>, list_to_binary(End)}
],
ok = emqx_trace:create(Trace),
?assertEqual({error, not_found}, emqx_trace:get_trace_filename(<<"test">>)),
?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))),
ct:sleep(3000),
?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))),
ok.
t_trace_file(_Config) ->
FileName = "test.log",
Content = <<"test \n test">>,
TraceDir = emqx_trace:trace_dir(),
File = filename:join(TraceDir, FileName),
ok = file:write_file(File, Content),
{ok, Node, Bin} = emqx_trace:trace_file(FileName),
?assertEqual(Node, atom_to_list(node())),
?assertEqual(Content, Bin),
ok = file:delete(File),
ok.
t_download_log(_Config) ->
ClientId = <<"client-test">>,
Now = erlang:system_time(second) - 2,
Start = to_rfc3339(Now),
Name = <<"test_client_id">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
[begin _ = emqtt:ping(Client) end ||_ <- lists:seq(1, 5)],
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
{ok, ZipFile} = emqx_trace_api:download_zip_log(#{name => Name}, []),
?assert(filelib:file_size(ZipFile) > 0),
ok = emqtt:disconnect(Client),
ok.
t_trace_file_detail(_Config) ->
ClientId = <<"client-test1">>,
Now = erlang:system_time(second) - 10,
Start = to_rfc3339(Now),
Name = <<"test_client_id1">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
[begin _ = emqtt:ping(Client) end ||_ <- lists:seq(1, 10)],
ct:sleep(200),
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
{ok, [#{mtime := Mtime, node := Node, size := Size} = Detail]}
= emqx_trace_api:trace_file_detail(#{name => Name}, []),
ct:pal("~p detail:~p~n", [{Name, Now}, Detail]),
?assertEqual(atom_to_binary(node()), Node),
?assert(Size >= 0),
?assert(Mtime >= Now),
ok = emqtt:disconnect(Client),
ok.
t_find_closed_time(_Config) ->
DefaultMs = 60 * 15000,
Now = erlang:system_time(second),
Traces2 = [],
?assertEqual(DefaultMs, emqx_trace:find_closest_time(Traces2, Now)),
Traces3 = [#emqx_trace{name = <<"disable">>, start_at = Now + 1,
end_at = Now + 2, enable = false}],
?assertEqual(DefaultMs, emqx_trace:find_closest_time(Traces3, Now)),
Traces4 = [#emqx_trace{name = <<"running">>, start_at = Now, end_at = Now + 10, enable = true}],
?assertEqual(10000, emqx_trace:find_closest_time(Traces4, Now)),
Traces5 = [#emqx_trace{name = <<"waiting">>, start_at = Now + 2,
end_at = Now + 10, enable = true}],
?assertEqual(2000, emqx_trace:find_closest_time(Traces5, Now)),
Traces = [
#emqx_trace{name = <<"waiting">>, start_at = Now + 1, end_at = Now + 2, enable = true},
#emqx_trace{name = <<"running0">>, start_at = Now, end_at = Now + 5, enable = true},
#emqx_trace{name = <<"running1">>, start_at = Now - 1, end_at = Now + 1, enable = true},
#emqx_trace{name = <<"finished">>, start_at = Now - 2, end_at = Now - 1, enable = true},
#emqx_trace{name = <<"waiting">>, start_at = Now + 1, end_at = Now + 1, enable = true},
#emqx_trace{name = <<"stopped">>, start_at = Now, end_at = Now + 10, enable = false}
],
?assertEqual(1000, emqx_trace:find_closest_time(Traces, Now)),
ok.
to_rfc3339(Second) ->
list_to_binary(calendar:system_time_to_rfc3339(Second)).
load() ->
emqx_trace:start_link().
unload() ->
gen_server:stop(emqx_trace).

View File

@ -1,6 +1,6 @@
{application, emqx_retainer, {application, emqx_retainer,
[{description, "EMQ X Retainer"}, [{description, "EMQ X Retainer"},
{vsn, "4.3.4"}, % strict semver, bump manually! {vsn, "4.4.2"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_retainer_sup]}, {registered, [emqx_retainer_sup]},
{applications, [kernel,stdlib]}, {applications, [kernel,stdlib]},

View File

@ -1,21 +1,12 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
%% Unless you know what you are doing, DO NOT edit manually!! %% Unless you know what you are doing, DO NOT edit manually!!
{VSN, {VSN,
[{"4.3.3",[{load_module,emqx_retainer_sup,brutal_purge,soft_purge,[]}]}, [{"4.4.1",[{load_module,emqx_retainer_sup,brutal_purge,soft_purge,[]}]},
{"4.3.2", {"4.4.0",[{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]},
[{load_module,emqx_retainer_sup,brutal_purge,soft_purge,[]}, {load_module,emqx_retainer_sup,brutal_purge,soft_purge,[]}]},
{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]}]},
{<<"4\\.3\\.[0-1]">>,
[{load_module,emqx_retainer_sup,brutal_purge,soft_purge,[]},
{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_retainer,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}], {<<".*">>,[]}],
[{"4.3.3",[{load_module,emqx_retainer_sup,brutal_purge,soft_purge,[]}]}, [{"4.4.1",[{load_module,emqx_retainer_sup,brutal_purge,soft_purge,[]}]},
{"4.3.2", {"4.4.0",[{load_module,emqx_retainer_sup,brutal_purge,soft_purge,[]},
[{load_module,emqx_retainer_sup,brutal_purge,soft_purge,[]}, {load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]}]},
{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]}]}, {<<".*">>,[]}]
{<<"4\\.3\\.[0-1]">>, }.
[{load_module,emqx_retainer_sup,brutal_purge,soft_purge,[]},
{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_retainer,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}]}.

View File

@ -78,7 +78,8 @@ dispatch(Pid, Topic) ->
false -> read_messages(Topic); false -> read_messages(Topic);
true -> match_messages(Topic) true -> match_messages(Topic)
end, end,
[Pid ! {deliver, Topic, Msg} || Msg <- sort_retained(Msgs)]. Now = erlang:system_time(millisecond),
[Pid ! {deliver, Topic, refresh_timestamp_expiry(Msg, Now)} || Msg <- sort_retained(Msgs)].
%% RETAIN flag set to 1 and payload containing zero bytes %% RETAIN flag set to 1 and payload containing zero bytes
on_message_publish(Msg = #message{flags = #{retain := true}, on_message_publish(Msg = #message{flags = #{retain := true},
@ -151,7 +152,7 @@ init([Env]) ->
ok ok
end, end,
StatsFun = emqx_stats:statsfun('retained.count', 'retained.max'), StatsFun = emqx_stats:statsfun('retained.count', 'retained.max'),
{ok, StatsTimer} = timer:send_interval(timer:seconds(1), stats), StatsTimer = erlang:send_after(timer:seconds(1), self(), stats),
State = #state{stats_fun = StatsFun, stats_timer = StatsTimer}, State = #state{stats_fun = StatsFun, stats_timer = StatsTimer},
{ok, start_expire_timer(proplists:get_value(expiry_interval, Env, 0), State)}. {ok, start_expire_timer(proplists:get_value(expiry_interval, Env, 0), State)}.
@ -160,7 +161,7 @@ start_expire_timer(0, State) ->
start_expire_timer(undefined, State) -> start_expire_timer(undefined, State) ->
State; State;
start_expire_timer(Ms, State) -> start_expire_timer(Ms, State) ->
{ok, Timer} = timer:send_interval(Ms, expire), Timer = erlang:send_after(Ms, self(), {expire, Ms}),
State#state{expiry_timer = Timer}. State#state{expiry_timer = Timer}.
handle_call(Req, _From, State) -> handle_call(Req, _From, State) ->
@ -172,12 +173,14 @@ handle_cast(Msg, State) ->
{noreply, State}. {noreply, State}.
handle_info(stats, State = #state{stats_fun = StatsFun}) -> handle_info(stats, State = #state{stats_fun = StatsFun}) ->
StatsTimer = erlang:send_after(timer:seconds(1), self(), stats),
StatsFun(retained_count()), StatsFun(retained_count()),
{noreply, State, hibernate}; {noreply, State#state{stats_timer = StatsTimer}, hibernate};
handle_info(expire, State) -> handle_info({expire, Ms} = Expire, State) ->
Timer = erlang:send_after(Ms, self(), Expire),
ok = expire_messages(), ok = expire_messages(),
{noreply, State, hibernate}; {noreply, State#state{expiry_timer = Timer}, hibernate};
handle_info(Info, State) -> handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]), ?LOG(error, "Unexpected info: ~p", [Info]),
@ -199,7 +202,7 @@ sort_retained([]) -> [];
sort_retained([Msg]) -> [Msg]; sort_retained([Msg]) -> [Msg];
sort_retained(Msgs) -> sort_retained(Msgs) ->
lists:sort(fun(#message{timestamp = Ts1}, #message{timestamp = Ts2}) -> lists:sort(fun(#message{timestamp = Ts1}, #message{timestamp = Ts2}) ->
Ts1 =< Ts2 Ts1 =< Ts2
end, Msgs). end, Msgs).
store_retained(Msg = #message{topic = Topic, payload = Payload}, Env) -> store_retained(Msg = #message{topic = Topic, payload = Payload}, Env) ->
@ -214,11 +217,13 @@ store_retained(Msg = #message{topic = Topic, payload = Payload}, Env) ->
fun() -> fun() ->
case mnesia:read(?TAB, Topic) of case mnesia:read(?TAB, Topic) of
[_] -> [_] ->
mnesia:write(?TAB, #retained{topic = topic2tokens(Topic), mnesia:write(?TAB,
msg = Msg, #retained{topic = topic2tokens(Topic),
expiry_time = get_expiry_time(Msg, Env)}, write); msg = Msg,
expiry_time = get_expiry_time(Msg, Env)}, write);
[] -> [] ->
?LOG(error, "Cannot retain message(topic=~s) for table is full!", [Topic]) ?LOG(error,
"Cannot retain message(topic=~s) for table is full!", [Topic])
end end
end), end),
ok; ok;
@ -242,7 +247,8 @@ is_too_big(Size, Env) ->
get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := 0}}}, _Env) -> get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := 0}}}, _Env) ->
0; 0;
get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := Interval}}, timestamp = Ts}, _Env) -> get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := Interval}},
timestamp = Ts}, _Env) ->
Ts + Interval * 1000; Ts + Interval * 1000;
get_expiry_time(#message{timestamp = Ts}, Env) -> get_expiry_time(#message{timestamp = Ts}, Env) ->
case proplists:get_value(expiry_interval, Env, 0) of case proplists:get_value(expiry_interval, Env, 0) of
@ -311,3 +317,18 @@ condition(Ws) ->
false -> Ws1; false -> Ws1;
_ -> (Ws1 -- ['#']) ++ '_' _ -> (Ws1 -- ['#']) ++ '_'
end. end.
-spec(refresh_timestamp_expiry(emqx_types:message(), pos_integer()) -> emqx_types:message()).
refresh_timestamp_expiry(Msg = #message{headers =
#{properties :=
#{'Message-Expiry-Interval' := Interval} = Props},
timestamp = CreatedAt},
Now) ->
Elapsed = max(0, Now - CreatedAt),
Interval1 = max(1, Interval - (Elapsed div 1000)),
emqx_message:set_header(properties,
Props#{'Message-Expiry-Interval' => Interval1},
Msg#message{timestamp = Now});
refresh_timestamp_expiry(Msg, Now) ->
Msg#message{timestamp = Now}.

View File

@ -1,6 +1,6 @@
{application, emqx_rule_engine, {application, emqx_rule_engine,
[{description, "EMQ X Rule Engine"}, [{description, "EMQ X Rule Engine"},
{vsn, "4.3.14"}, % strict semver, bump manually! {vsn, "4.4.8"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_rule_engine_sup, emqx_rule_registry]}, {registered, [emqx_rule_engine_sup, emqx_rule_registry]},
{applications, [kernel,stdlib,rulesql,getopt]}, {applications, [kernel,stdlib,rulesql,getopt]},

View File

@ -1,48 +1,56 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
%% Unless you know what you are doing, DO NOT edit manually!! %% Unless you know what you are doing, DO NOT edit manually!!
{VSN, {VSN,
[{"4.3.13", [{<<"4\\.4\\.[6-7]">>,
[{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}, [{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}]},
{"4.4.5",
[{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}]}, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{"4.3.12",
[{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}]},
{"4.3.11",
[{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]}, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{"4.3.10", {"4.4.4",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]}, [{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}]},
{"4.4.3",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{"4.4.2",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]}, {load_module,emqx_rule_metrics,brutal_purge,soft_purge,[]},
{"4.3.9",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}]},
{add_module,emqx_rule_date}, {"4.4.1",
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}]},
{"4.3.8",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]}, [{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
@ -51,173 +59,76 @@
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]}]},
{"4.3.7",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_metrics,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}]}, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}]},
{"4.3.6", {"4.4.0",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]}, [{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.6"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{"4.3.5",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.5"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{"4.3.4",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.4"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{"4.3.3",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.3"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{"4.3.2",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.2"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{apply,{emqx_stats,cancel_update,[rule_registery_stats]}},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{"4.3.1",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.1"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{apply,{emqx_stats,cancel_update,[rule_registery_stats]}},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{"4.3.0",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{add_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.0"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}, {add_module,emqx_rule_date},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{apply,{emqx_stats,cancel_update,[rule_registery_stats]}}, {update,emqx_rule_metrics,{advanced,["4.4.0"]}},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]}, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}], {<<".*">>,[]}],
[{"4.3.13", [{<<"4\\.4\\.[6-7]">>,
[{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}, [{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}]},
{"4.4.5",
[{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}]},
{"4.3.12",
[{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}]},
{"4.3.11",
[{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]}, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{"4.3.10", {"4.4.4",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]}, [{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}]},
{"4.4.3",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]}, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}]},
{"4.3.9", {"4.4.2",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]}, [{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_metrics,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}, {"4.4.1",
{delete_module,emqx_rule_date}]},
{"4.3.8",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]}, [{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
@ -226,130 +137,23 @@
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date}]}, {delete_module,emqx_rule_date},
{"4.3.7", {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}]},
{"4.4.0",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]}, [{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_metrics,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date}]},
{"4.3.6",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.6"]}}, {update,emqx_rule_metrics,{advanced,["4.4.0"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date}]}, {delete_module,emqx_rule_date}]},
{"4.3.5",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.5"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date}]},
{"4.3.4",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.4"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date}]},
{"4.3.3",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.3"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date}]},
{"4.3.2",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.2"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{apply,{emqx_stats,cancel_update,[rule_registery_stats]}},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date}]},
{"4.3.1",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.1"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{apply,{emqx_stats,cancel_update,[rule_registery_stats]}},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date}]},
{"4.3.0",
[{load_module,emqx_rule_validator,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_maps,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_sqltester,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_utils,brutal_purge,soft_purge,[]},
{update,emqx_rule_metrics,{advanced,["4.3.0"]}},
{load_module,emqx_rule_events,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_registry,brutal_purge,soft_purge,[]},
{apply,{emqx_stats,cancel_update,[rule_registery_stats]}},
{load_module,emqx_rule_actions,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]},
{load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]},
{delete_module,emqx_rule_date}]},
{<<".*">>,[]}]}. {<<".*">>,[]}]}.

View File

@ -31,6 +31,7 @@
-export([ on_client_connected/3 -export([ on_client_connected/3
, on_client_disconnected/4 , on_client_disconnected/4
, on_client_connack/4
, on_session_subscribed/4 , on_session_subscribed/4
, on_session_unsubscribed/4 , on_session_unsubscribed/4
, on_message_publish/2 , on_message_publish/2
@ -38,6 +39,7 @@
, on_message_delivered/3 , on_message_delivered/3
, on_message_acked/3 , on_message_acked/3
, on_delivery_dropped/4 , on_delivery_dropped/4
, on_client_check_acl_complete/6
]). ]).
-export([ event_info/0 -export([ event_info/0
@ -48,6 +50,7 @@
-define(SUPPORTED_HOOK, -define(SUPPORTED_HOOK,
[ 'client.connected' [ 'client.connected'
, 'client.disconnected' , 'client.disconnected'
, 'client.connack'
, 'session.subscribed' , 'session.subscribed'
, 'session.unsubscribed' , 'session.unsubscribed'
, 'message.publish' , 'message.publish'
@ -55,6 +58,7 @@
, 'message.acked' , 'message.acked'
, 'message.dropped' , 'message.dropped'
, 'delivery.dropped' , 'delivery.dropped'
, 'client.check_acl_complete'
]). ]).
-ifdef(TEST). -ifdef(TEST).
@ -106,6 +110,18 @@ on_client_disconnected(ClientInfo, Reason, ConnInfo, Env) ->
may_publish_and_apply('client.disconnected', may_publish_and_apply('client.disconnected',
fun() -> eventmsg_disconnected(ClientInfo, ConnInfo, Reason) end, Env). fun() -> eventmsg_disconnected(ClientInfo, ConnInfo, Reason) end, Env).
on_client_connack(ConnInfo, Reason, _, Env) ->
may_publish_and_apply('client.connack',
fun() -> eventmsg_connack(ConnInfo, Reason) end, Env).
on_client_check_acl_complete(ClientInfo, PubSub, Topic, Result, IsCache, Env) ->
may_publish_and_apply('client.check_acl_complete',
fun() -> eventmsg_check_acl_complete(ClientInfo,
PubSub,
Topic,
Result,
IsCache) end, Env).
on_session_subscribed(ClientInfo, Topic, SubOpts, Env) -> on_session_subscribed(ClientInfo, Topic, SubOpts, Env) ->
may_publish_and_apply('session.subscribed', may_publish_and_apply('session.subscribed',
fun() -> eventmsg_sub_or_unsub('session.subscribed', ClientInfo, Topic, SubOpts) end, Env). fun() -> eventmsg_sub_or_unsub('session.subscribed', ClientInfo, Topic, SubOpts) end, Env).
@ -224,6 +240,46 @@ eventmsg_disconnected(_ClientInfo = #{
disconnected_at => DisconnectedAt disconnected_at => DisconnectedAt
}). }).
eventmsg_connack(ConnInfo = #{
clientid := ClientId,
clean_start := CleanStart,
username := Username,
peername := PeerName,
sockname := SockName,
proto_name := ProtoName,
proto_ver := ProtoVer
}, Reason) ->
Keepalive = maps:get(keepalive, ConnInfo, 0),
ConnProps = maps:get(conn_props, ConnInfo, #{}),
ExpiryInterval = maps:get(expiry_interval, ConnInfo, 0),
with_basic_columns('client.connack',
#{reason_code => reason(Reason),
clientid => ClientId,
clean_start => CleanStart,
username => Username,
peername => ntoa(PeerName),
sockname => ntoa(SockName),
proto_name => ProtoName,
proto_ver => ProtoVer,
keepalive => Keepalive,
expiry_interval => ExpiryInterval,
conn_props => printable_maps(ConnProps)
}).
eventmsg_check_acl_complete(_ClientInfo = #{
clientid := ClientId,
username := Username,
peerhost := PeerHost
}, PubSub, Topic, Result, IsCache) ->
with_basic_columns('client.check_acl_complete',
#{clientid => ClientId,
username => Username,
peerhost => ntoa(PeerHost),
topic => Topic,
action => PubSub,
is_cache => IsCache,
result => Result
}).
eventmsg_sub_or_unsub(Event, _ClientInfo = #{ eventmsg_sub_or_unsub(Event, _ClientInfo = #{
clientid := ClientId, clientid := ClientId,
username := Username, username := Username,
@ -376,8 +432,10 @@ event_info() ->
, event_info_delivery_dropped() , event_info_delivery_dropped()
, event_info_client_connected() , event_info_client_connected()
, event_info_client_disconnected() , event_info_client_disconnected()
, event_info_client_connack()
, event_info_session_subscribed() , event_info_session_subscribed()
, event_info_session_unsubscribed() , event_info_session_unsubscribed()
, event_info_client_check_acl_complete()
]. ].
event_info_message_publish() -> event_info_message_publish() ->
@ -431,6 +489,13 @@ event_info_client_disconnected() ->
{<<"client disconnected">>, <<"连接断开"/utf8>>}, {<<"client disconnected">>, <<"连接断开"/utf8>>},
<<"SELECT * FROM \"$events/client_disconnected\" WHERE topic =~ 't/#'">> <<"SELECT * FROM \"$events/client_disconnected\" WHERE topic =~ 't/#'">>
). ).
event_info_client_connack() ->
event_info_common(
'client.connack',
{<<"client connack">>, <<"连接确认"/utf8>>},
{<<"client connack">>, <<"连接确认"/utf8>>},
<<"SELECT * FROM \"$events/client_connack\"">>
).
event_info_session_subscribed() -> event_info_session_subscribed() ->
event_info_common( event_info_common(
'session.subscribed', 'session.subscribed',
@ -445,6 +510,13 @@ event_info_session_unsubscribed() ->
{<<"session unsubscribed">>, <<"会话取消订阅完成"/utf8>>}, {<<"session unsubscribed">>, <<"会话取消订阅完成"/utf8>>},
<<"SELECT * FROM \"$events/session_unsubscribed\" WHERE topic =~ 't/#'">> <<"SELECT * FROM \"$events/session_unsubscribed\" WHERE topic =~ 't/#'">>
). ).
event_info_client_check_acl_complete() ->
event_info_common(
'client.check_acl_complete',
{<<"client check acl complete">>, <<"鉴权结果"/utf8>>},
{<<"client check acl complete">>, <<"鉴权结果"/utf8>>},
<<"SELECT * FROM \"$events/client_check_acl_complete\"">>
).
event_info_common(Event, {TitleEN, TitleZH}, {DescrEN, DescrZH}, SqlExam) -> event_info_common(Event, {TitleEN, TitleZH}, {DescrEN, DescrZH}, SqlExam) ->
#{event => event_topic(Event), #{event => event_topic(Event),
@ -489,6 +561,11 @@ test_columns('client.disconnected') ->
, {<<"username">>, <<"u_emqx">>} , {<<"username">>, <<"u_emqx">>}
, {<<"reason">>, <<"normal">>} , {<<"reason">>, <<"normal">>}
]; ];
test_columns('client.connack') ->
[ {<<"clientid">>, <<"c_emqx">>}
, {<<"username">>, <<"u_emqx">>}
, {<<"reason_code">>, <<"sucess">>}
];
test_columns('session.unsubscribed') -> test_columns('session.unsubscribed') ->
test_columns('session.subscribed'); test_columns('session.subscribed');
test_columns('session.subscribed') -> test_columns('session.subscribed') ->
@ -496,6 +573,13 @@ test_columns('session.subscribed') ->
, {<<"username">>, <<"u_emqx">>} , {<<"username">>, <<"u_emqx">>}
, {<<"topic">>, <<"t/a">>} , {<<"topic">>, <<"t/a">>}
, {<<"qos">>, 1} , {<<"qos">>, 1}
];
test_columns('client.check_acl_complete') ->
[ {<<"clientid">>, <<"c_emqx">>}
, {<<"username">>, <<"u_emqx">>}
, {<<"topic">>, <<"t/1">>}
, {<<"action">>, <<"publish">>}
, {<<"result">>, <<"allow">>}
]. ].
columns_with_exam('message.publish') -> columns_with_exam('message.publish') ->
@ -508,8 +592,8 @@ columns_with_exam('message.publish') ->
, {<<"topic">>, <<"t/a">>} , {<<"topic">>, <<"t/a">>}
, {<<"qos">>, 1} , {<<"qos">>, 1}
, {<<"flags">>, #{}} , {<<"flags">>, #{}}
, {<<"headers">>, undefined}
, {<<"publish_received_at">>, erlang:system_time(millisecond)} , {<<"publish_received_at">>, erlang:system_time(millisecond)}
, columns_example_props(pub_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -526,6 +610,7 @@ columns_with_exam('message.delivered') ->
, {<<"qos">>, 1} , {<<"qos">>, 1}
, {<<"flags">>, #{}} , {<<"flags">>, #{}}
, {<<"publish_received_at">>, erlang:system_time(millisecond)} , {<<"publish_received_at">>, erlang:system_time(millisecond)}
, columns_example_props(pub_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -542,6 +627,8 @@ columns_with_exam('message.acked') ->
, {<<"qos">>, 1} , {<<"qos">>, 1}
, {<<"flags">>, #{}} , {<<"flags">>, #{}}
, {<<"publish_received_at">>, erlang:system_time(millisecond)} , {<<"publish_received_at">>, erlang:system_time(millisecond)}
, columns_example_props(pub_props)
, columns_example_props(puback_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -557,6 +644,7 @@ columns_with_exam('message.dropped') ->
, {<<"qos">>, 1} , {<<"qos">>, 1}
, {<<"flags">>, #{}} , {<<"flags">>, #{}}
, {<<"publish_received_at">>, erlang:system_time(millisecond)} , {<<"publish_received_at">>, erlang:system_time(millisecond)}
, columns_example_props(pub_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -591,6 +679,7 @@ columns_with_exam('client.connected') ->
, {<<"expiry_interval">>, 3600} , {<<"expiry_interval">>, 3600}
, {<<"is_bridge">>, false} , {<<"is_bridge">>, false}
, {<<"connected_at">>, erlang:system_time(millisecond)} , {<<"connected_at">>, erlang:system_time(millisecond)}
, columns_example_props(conn_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -604,6 +693,24 @@ columns_with_exam('client.disconnected') ->
, {<<"proto_name">>, <<"MQTT">>} , {<<"proto_name">>, <<"MQTT">>}
, {<<"proto_ver">>, 5} , {<<"proto_ver">>, 5}
, {<<"disconnected_at">>, erlang:system_time(millisecond)} , {<<"disconnected_at">>, erlang:system_time(millisecond)}
, columns_example_props(disconn_props)
, {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()}
];
columns_with_exam('client.connack') ->
[ {<<"event">>, 'client.connected'}
, {<<"reason_code">>, success}
, {<<"clientid">>, <<"c_emqx">>}
, {<<"username">>, <<"u_emqx">>}
, {<<"peername">>, <<"192.168.0.10:56431">>}
, {<<"sockname">>, <<"0.0.0.0:1883">>}
, {<<"proto_name">>, <<"MQTT">>}
, {<<"proto_ver">>, 5}
, {<<"keepalive">>, 60}
, {<<"clean_start">>, true}
, {<<"expiry_interval">>, 3600}
, {<<"connected_at">>, erlang:system_time(millisecond)}
, columns_example_props(conn_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -614,6 +721,7 @@ columns_with_exam('session.subscribed') ->
, {<<"peerhost">>, <<"192.168.0.10">>} , {<<"peerhost">>, <<"192.168.0.10">>}
, {<<"topic">>, <<"t/a">>} , {<<"topic">>, <<"t/a">>}
, {<<"qos">>, 1} , {<<"qos">>, 1}
, columns_example_props(sub_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -624,10 +732,54 @@ columns_with_exam('session.unsubscribed') ->
, {<<"peerhost">>, <<"192.168.0.10">>} , {<<"peerhost">>, <<"192.168.0.10">>}
, {<<"topic">>, <<"t/a">>} , {<<"topic">>, <<"t/a">>}
, {<<"qos">>, 1} , {<<"qos">>, 1}
, columns_example_props(unsub_props)
, {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()}
];
columns_with_exam('client.check_acl_complete') ->
[ {<<"event">>, 'client.check_acl_complete'}
, {<<"clientid">>, <<"c_emqx">>}
, {<<"username">>, <<"u_emqx">>}
, {<<"peerhost">>, <<"192.168.0.10">>}
, {<<"topic">>, <<"t/a">>}
, {<<"action">>, <<"publish">>}
, {<<"is_cache">>, <<"false">>}
, {<<"result">>, <<"allow">>}
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]. ].
columns_example_props(PropType) ->
Props = columns_example_props_specific(PropType),
UserProps = #{
'User-Property' => #{<<"foo">> => <<"bar">>},
'User-Property-Pairs' => [
#{key => <<"foo">>}, #{value => <<"bar">>}
]
},
{PropType, maps:merge(Props, UserProps)}.
columns_example_props_specific(pub_props) ->
#{ 'Payload-Format-Indicator' => 0
, 'Message-Expiry-Interval' => 30
};
columns_example_props_specific(puback_props) ->
#{ 'Reason-String' => <<"OK">>
};
columns_example_props_specific(conn_props) ->
#{ 'Session-Expiry-Interval' => 7200
, 'Receive-Maximum' => 32
};
columns_example_props_specific(disconn_props) ->
#{ 'Session-Expiry-Interval' => 7200
, 'Reason-String' => <<"Redirect to another server">>
, 'Server Reference' => <<"192.168.22.129">>
};
columns_example_props_specific(sub_props) ->
#{};
columns_example_props_specific(unsub_props) ->
#{}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Helper functions %% Helper functions
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -661,6 +813,7 @@ ntoa(IpAddr) ->
event_name(<<"$events/client_connected", _/binary>>) -> 'client.connected'; event_name(<<"$events/client_connected", _/binary>>) -> 'client.connected';
event_name(<<"$events/client_disconnected", _/binary>>) -> 'client.disconnected'; event_name(<<"$events/client_disconnected", _/binary>>) -> 'client.disconnected';
event_name(<<"$events/client_connack", _/binary>>) -> 'client.connack';
event_name(<<"$events/session_subscribed", _/binary>>) -> 'session.subscribed'; event_name(<<"$events/session_subscribed", _/binary>>) -> 'session.subscribed';
event_name(<<"$events/session_unsubscribed", _/binary>>) -> event_name(<<"$events/session_unsubscribed", _/binary>>) ->
'session.unsubscribed'; 'session.unsubscribed';
@ -668,17 +821,20 @@ event_name(<<"$events/message_delivered", _/binary>>) -> 'message.delivered';
event_name(<<"$events/message_acked", _/binary>>) -> 'message.acked'; event_name(<<"$events/message_acked", _/binary>>) -> 'message.acked';
event_name(<<"$events/message_dropped", _/binary>>) -> 'message.dropped'; event_name(<<"$events/message_dropped", _/binary>>) -> 'message.dropped';
event_name(<<"$events/delivery_dropped", _/binary>>) -> 'delivery.dropped'; event_name(<<"$events/delivery_dropped", _/binary>>) -> 'delivery.dropped';
event_name(<<"$events/client_check_acl_complete", _/binary>>) -> 'client.check_acl_complete';
event_name(_) -> 'message.publish'. event_name(_) -> 'message.publish'.
event_topic('client.connected') -> <<"$events/client_connected">>; event_topic('client.connected') -> <<"$events/client_connected">>;
event_topic('client.disconnected') -> <<"$events/client_disconnected">>; event_topic('client.disconnected') -> <<"$events/client_disconnected">>;
event_topic('client.connack') -> <<"$events/client_connack">>;
event_topic('session.subscribed') -> <<"$events/session_subscribed">>; event_topic('session.subscribed') -> <<"$events/session_subscribed">>;
event_topic('session.unsubscribed') -> <<"$events/session_unsubscribed">>; event_topic('session.unsubscribed') -> <<"$events/session_unsubscribed">>;
event_topic('message.delivered') -> <<"$events/message_delivered">>; event_topic('message.delivered') -> <<"$events/message_delivered">>;
event_topic('message.acked') -> <<"$events/message_acked">>; event_topic('message.acked') -> <<"$events/message_acked">>;
event_topic('message.dropped') -> <<"$events/message_dropped">>; event_topic('message.dropped') -> <<"$events/message_dropped">>;
event_topic('delivery.dropped') -> <<"$events/delivery_dropped">>; event_topic('delivery.dropped') -> <<"$events/delivery_dropped">>;
event_topic('message.publish') -> <<"$events/message_publish">>. event_topic('message.publish') -> <<"$events/message_publish">>;
event_topic('client.check_acl_complete') -> <<"$events/client_check_acl_complete">>.
printable_maps(undefined) -> #{}; printable_maps(undefined) -> #{};
printable_maps(Headers) -> printable_maps(Headers) ->
@ -687,6 +843,10 @@ printable_maps(Headers) ->
AccIn#{K => ntoa(V0)}; AccIn#{K => ntoa(V0)};
('User-Property', V0, AccIn) when is_list(V0) -> ('User-Property', V0, AccIn) when is_list(V0) ->
AccIn#{ AccIn#{
%% The 'User-Property' field is for the convenience of querying properties
%% using the '.' syntax, e.g. "SELECT 'User-Property'.foo as foo"
%% However, this does not allow duplicate property keys. To allow
%% duplicate keys, we have to use the 'User-Property-Pairs' field instead.
'User-Property' => maps:from_list(V0), 'User-Property' => maps:from_list(V0),
'User-Property-Pairs' => [#{ 'User-Property-Pairs' => [#{
key => Key, key => Key,

View File

@ -17,6 +17,9 @@
-module(emqx_rule_funcs). -module(emqx_rule_funcs).
-include("rule_engine.hrl"). -include("rule_engine.hrl").
-elvis([{elvis_style, god_modules, disable}]).
-elvis([{elvis_style, function_naming_convention, disable}]).
-elvis([{elvis_style, macro_names, disable}]).
%% IoT Funcs %% IoT Funcs
-export([ msgid/0 -export([ msgid/0
@ -462,7 +465,8 @@ subbits(Bits, Len) when is_integer(Len), is_bitstring(Bits) ->
subbits(Bits, Start, Len) when is_integer(Start), is_integer(Len), is_bitstring(Bits) -> subbits(Bits, Start, Len) when is_integer(Start), is_integer(Len), is_bitstring(Bits) ->
get_subbits(Bits, Start, Len, <<"integer">>, <<"unsigned">>, <<"big">>). get_subbits(Bits, Start, Len, <<"integer">>, <<"unsigned">>, <<"big">>).
subbits(Bits, Start, Len, Type, Signedness, Endianness) when is_integer(Start), is_integer(Len), is_bitstring(Bits) -> subbits(Bits, Start, Len, Type, Signedness, Endianness)
when is_integer(Start), is_integer(Len), is_bitstring(Bits) ->
get_subbits(Bits, Start, Len, Type, Signedness, Endianness). get_subbits(Bits, Start, Len, Type, Signedness, Endianness).
get_subbits(Bits, Start, Len, Type, Signedness, Endianness) -> get_subbits(Bits, Start, Len, Type, Signedness, Endianness) ->
@ -547,7 +551,7 @@ map(Data) ->
emqx_rule_utils:map(Data). emqx_rule_utils:map(Data).
bin2hexstr(Bin) when is_binary(Bin) -> bin2hexstr(Bin) when is_binary(Bin) ->
emqx_misc:bin2hexstr_A_F(Bin). emqx_misc:bin2hexstr_a_f_upper(Bin).
hexstr2bin(Str) when is_binary(Str) -> hexstr2bin(Str) when is_binary(Str) ->
emqx_misc:hexstr2bin(Str). emqx_misc:hexstr2bin(Str).
@ -635,7 +639,8 @@ tokens(S, Separators) ->
[list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators))]. [list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators))].
tokens(S, Separators, <<"nocrlf">>) -> tokens(S, Separators, <<"nocrlf">>) ->
[list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators) ++ [$\r,$\n,[$\r,$\n]])]. [list_to_binary(R) || R <- string:lexemes(binary_to_list(S),
binary_to_list(Separators) ++ [$\r,$\n,[$\r,$\n]])].
concat(S1, S2) when is_binary(S1), is_binary(S2) -> concat(S1, S2) when is_binary(S1), is_binary(S2) ->
unicode:characters_to_binary([S1, S2], unicode). unicode:characters_to_binary([S1, S2], unicode).
@ -673,7 +678,8 @@ replace(SrcStr, P, RepStr) when is_binary(SrcStr), is_binary(P), is_binary(RepSt
replace(SrcStr, P, RepStr, <<"all">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> replace(SrcStr, P, RepStr, <<"all">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, all)); iolist_to_binary(string:replace(SrcStr, P, RepStr, all));
replace(SrcStr, P, RepStr, <<"trailing">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> replace(SrcStr, P, RepStr, <<"trailing">>)
when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, trailing)); iolist_to_binary(string:replace(SrcStr, P, RepStr, trailing));
replace(SrcStr, P, RepStr, <<"leading">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> replace(SrcStr, P, RepStr, <<"leading">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
@ -689,7 +695,7 @@ regex_replace(SrcStr, RE, RepStr) ->
re:replace(SrcStr, RE, RepStr, [global, {return,binary}]). re:replace(SrcStr, RE, RepStr, [global, {return,binary}]).
ascii(Char) when is_binary(Char) -> ascii(Char) when is_binary(Char) ->
[FirstC| _] = binary_to_list(Char), [FirstC | _] = binary_to_list(Char),
FirstC. FirstC.
find(S, P) when is_binary(S), is_binary(P) -> find(S, P) when is_binary(S), is_binary(P) ->
@ -809,7 +815,7 @@ sha256(S) when is_binary(S) ->
hash(sha256, S). hash(sha256, S).
hash(Type, Data) -> hash(Type, Data) ->
emqx_misc:bin2hexstr_a_f(crypto:hash(Type, Data)). emqx_misc:bin2hexstr_a_f_lower(crypto:hash(Type, Data)).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% gzip Funcs %% gzip Funcs
@ -980,23 +986,23 @@ convert_timestamp(MillisecondsTimestamp) ->
%% the function handling to the worker module. %% the function handling to the worker module.
%% @end %% @end
-ifdef(EMQX_ENTERPRISE). -ifdef(EMQX_ENTERPRISE).
'$handle_undefined_function'(schema_decode, [SchemaId, Data|MoreArgs]) -> '$handle_undefined_function'(schema_decode, [SchemaId, Data | MoreArgs]) ->
emqx_schema_parser:decode(SchemaId, Data, MoreArgs); emqx_schema_parser:decode(SchemaId, Data, MoreArgs);
'$handle_undefined_function'(schema_decode, Args) -> '$handle_undefined_function'(schema_decode, Args) ->
error({args_count_error, {schema_decode, Args}}); error({args_count_error, {schema_decode, Args}});
'$handle_undefined_function'(schema_encode, [SchemaId, Term|MoreArgs]) -> '$handle_undefined_function'(schema_encode, [SchemaId, Term | MoreArgs]) ->
emqx_schema_parser:encode(SchemaId, Term, MoreArgs); emqx_schema_parser:encode(SchemaId, Term, MoreArgs);
'$handle_undefined_function'(schema_encode, Args) -> '$handle_undefined_function'(schema_encode, Args) ->
error({args_count_error, {schema_encode, Args}}); error({args_count_error, {schema_encode, Args}});
'$handle_undefined_function'(sprintf, [Format|Args]) -> '$handle_undefined_function'(sprintf, [Format | Args]) ->
erlang:apply(fun sprintf_s/2, [Format, Args]); erlang:apply(fun sprintf_s/2, [Format, Args]);
'$handle_undefined_function'(Fun, Args) -> '$handle_undefined_function'(Fun, Args) ->
error({sql_function_not_supported, function_literal(Fun, Args)}). error({sql_function_not_supported, function_literal(Fun, Args)}).
-else. -else.
'$handle_undefined_function'(sprintf, [Format|Args]) -> '$handle_undefined_function'(sprintf, [Format | Args]) ->
erlang:apply(fun sprintf_s/2, [Format, Args]); erlang:apply(fun sprintf_s/2, [Format, Args]);
'$handle_undefined_function'(Fun, Args) -> '$handle_undefined_function'(Fun, Args) ->

View File

@ -79,6 +79,8 @@
, terminate/2 , terminate/2
]). ]).
-elvis([{elvis_style, god_modules, disable}]).
-ifndef(TEST). -ifndef(TEST).
-define(SECS_5M, 300). -define(SECS_5M, 300).
-define(SAMPLING, 10). -define(SAMPLING, 10).
@ -368,10 +370,10 @@ handle_info(_Info, State) ->
{noreply, State}. {noreply, State}.
code_change({down, _Vsn}, State = #state{metric_ids = MIDs}, [Vsn]) -> code_change({down, _Vsn}, State = #state{metric_ids = MIDs}, [Vsn]) ->
case string:tokens(Vsn, ".") of case string:tokens(Vsn, ".") of
["4", "3", SVal] -> ["4", "4", SVal] ->
{Val, []} = string:to_integer(SVal), {Val, []} = string:to_integer(SVal),
case Val =< 6 of case Val == 0 of
true -> true ->
[begin [begin
Passed = get_rules_passed(Id), Passed = get_rules_passed(Id),
@ -381,7 +383,7 @@ code_change({down, _Vsn}, State = #state{metric_ids = MIDs}, [Vsn]) ->
Exception = get_actions_exception(Id), Exception = get_actions_exception(Id),
Retry = get_actions_retry(Id), Retry = get_actions_retry(Id),
ok = delete_counters(Id), ok = delete_counters(Id),
ok = create_counters(Id, 7), ok = create_counters(Id, max_counters_size_old()),
inc_rules_matched(Id, Passed), inc_rules_matched(Id, Passed),
inc_actions_taken(Id, Take), inc_actions_taken(Id, Take),
inc_actions_success(Id, Success), inc_actions_success(Id, Success),
@ -397,9 +399,9 @@ code_change({down, _Vsn}, State = #state{metric_ids = MIDs}, [Vsn]) ->
code_change(_Vsn, State = #state{metric_ids = MIDs}, [Vsn]) -> code_change(_Vsn, State = #state{metric_ids = MIDs}, [Vsn]) ->
case string:tokens(Vsn, ".") of case string:tokens(Vsn, ".") of
["4", "3", SVal] -> ["4", "4", SVal] ->
{Val, []} = string:to_integer(SVal), {Val, []} = string:to_integer(SVal),
case Val =< 6 of case Val == 0 of
true -> true ->
[begin [begin
Matched = get_rules_matched(Id), Matched = get_rules_matched(Id),
@ -471,17 +473,19 @@ calculate_speed(CurrVal, #rule_speed{max = MaxSpeed0, last_v = LastVal,
%% calculate the max speed since the emqx startup %% calculate the max speed since the emqx startup
MaxSpeed = MaxSpeed =
if MaxSpeed0 >= CurrSpeed -> MaxSpeed0; case MaxSpeed0 >= CurrSpeed of
true -> CurrSpeed true -> MaxSpeed0;
false -> CurrSpeed
end, end,
%% calculate the average speed in last 5 mins %% calculate the average speed in last 5 mins
{Last5MinSamples, Acc5Min, Last5Min} = {Last5MinSamples, Acc5Min, Last5Min} =
if Tick =< ?SAMPCOUNT_5M -> case Tick =< ?SAMPCOUNT_5M of
true ->
Acc = AccSpeed5Min0 + CurrSpeed, Acc = AccSpeed5Min0 + CurrSpeed,
{lists:reverse([CurrSpeed | lists:reverse(Last5MinSamples0)]), {lists:reverse([CurrSpeed | lists:reverse(Last5MinSamples0)]),
Acc, Acc / Tick}; Acc, Acc / Tick};
true -> false ->
[FirstSpeed | Speeds] = Last5MinSamples0, [FirstSpeed | Speeds] = Last5MinSamples0,
Acc = AccSpeed5Min0 + CurrSpeed - FirstSpeed, Acc = AccSpeed5Min0 + CurrSpeed - FirstSpeed,
{lists:reverse([CurrSpeed | lists:reverse(Speeds)]), {lists:reverse([CurrSpeed | lists:reverse(Speeds)]),
@ -493,7 +497,7 @@ calculate_speed(CurrVal, #rule_speed{max = MaxSpeed0, last_v = LastVal,
last5m_smpl = Last5MinSamples, tick = Tick + 1}. last5m_smpl = Last5MinSamples, tick = Tick + 1}.
format_rule_speed(#rule_speed{max = Max, current = Current, last5m = Last5Min}) -> format_rule_speed(#rule_speed{max = Max, current = Current, last5m = Last5Min}) ->
#{max => Max, current => precision(Current, 2), last5m => precision(Last5Min, 2)}. #{max => precision(Max, 2), current => precision(Current, 2), last5m => precision(Last5Min, 2)}.
precision(Float, N) -> precision(Float, N) ->
Base = math:pow(10, N), Base = math:pow(10, N),
@ -503,6 +507,9 @@ precision(Float, N) ->
%% Metrics Definitions %% Metrics Definitions
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% for code hot upgrade
max_counters_size_old() -> 7.
max_counters_size() -> 11. max_counters_size() -> 11.
metrics_idx('rules.matched') -> 1; metrics_idx('rules.matched') -> 1;

View File

@ -98,21 +98,8 @@ sql_test_action() ->
fill_default_values(Event, Context) -> fill_default_values(Event, Context) ->
maps:merge(envs_examp(Event), Context). maps:merge(envs_examp(Event), Context).
envs_examp(<<"$events/", _/binary>> = EVENT_TOPIC) -> envs_examp(EVENT_TOPIC) ->
EventName = emqx_rule_events:event_name(EVENT_TOPIC), EventName = emqx_rule_events:event_name(EVENT_TOPIC),
emqx_rule_maps:atom_key_map( emqx_rule_maps:atom_key_map(
maps:from_list( maps:from_list(
emqx_rule_events:columns_with_exam(EventName))); emqx_rule_events:columns_with_exam(EventName))).
envs_examp(_) ->
#{id => emqx_guid:to_hexstr(emqx_guid:gen()),
clientid => <<"c_emqx">>,
username => <<"u_emqx">>,
payload => <<"{\"id\": 1, \"name\": \"ha\"}">>,
peerhost => <<"127.0.0.1">>,
topic => <<"t/a">>,
qos => 1,
flags => #{sys => true, event => true},
publish_received_at => emqx_rule_utils:now_ms(),
timestamp => emqx_rule_utils:now_ms(),
node => node()
}.

View File

@ -205,6 +205,8 @@ init_per_testcase(t_events, Config) ->
description = #{en => <<"Hook metrics action">>}}), description = #{en => <<"Hook metrics action">>}}),
SQL = "SELECT * FROM \"$events/client_connected\", " SQL = "SELECT * FROM \"$events/client_connected\", "
"\"$events/client_disconnected\", " "\"$events/client_disconnected\", "
"\"$events/client_connack\", "
"\"$events/client_check_acl_complete\", "
"\"$events/session_subscribed\", " "\"$events/session_subscribed\", "
"\"$events/session_unsubscribed\", " "\"$events/session_unsubscribed\", "
"\"$events/message_acked\", " "\"$events/message_acked\", "
@ -1164,9 +1166,10 @@ t_events(_Config) ->
, {proto_ver, v5} , {proto_ver, v5}
, {properties, #{'Session-Expiry-Interval' => 60}} , {properties, #{'Session-Expiry-Interval' => 60}}
]), ]),
ct:pal("====== verify $events/client_connected"),
ct:pal("====== verify $events/client_connected, $events/client_connack"),
client_connected(Client, Client2), client_connected(Client, Client2),
ct:pal("====== verify $events/session_subscribed"), ct:pal("====== verify $events/session_subscribed, $events/client_check_acl_complete"),
session_subscribed(Client2), session_subscribed(Client2),
ct:pal("====== verify t1"), ct:pal("====== verify t1"),
message_publish(Client), message_publish(Client),
@ -1180,6 +1183,8 @@ t_events(_Config) ->
message_dropped(Client), message_dropped(Client),
ct:pal("====== verify $events/client_disconnected"), ct:pal("====== verify $events/client_disconnected"),
client_disconnected(Client, Client2), client_disconnected(Client, Client2),
ct:pal("====== verify $events/client_connack"),
client_connack_failed(),
ok. ok.
message_publish(Client) -> message_publish(Client) ->
@ -1187,11 +1192,33 @@ message_publish(Client) ->
<<"{\"id\": 1, \"name\": \"ha\"}">>, [{qos, 1}]), <<"{\"id\": 1, \"name\": \"ha\"}">>, [{qos, 1}]),
verify_event('message.publish'), verify_event('message.publish'),
ok. ok.
client_connected(Client, Client2) -> client_connected(Client, Client2) ->
{ok, _} = emqtt:connect(Client), {ok, _} = emqtt:connect(Client),
{ok, _} = emqtt:connect(Client2), {ok, _} = emqtt:connect(Client2),
verify_event('client.connack'),
verify_event('client.connected'), verify_event('client.connected'),
ok. ok.
client_connack_failed() ->
{ok, Client} = emqtt:start_link(
[ {username, <<"u_event3">>}
, {clientid, <<"c_event3">>}
, {proto_ver, v5}
, {properties, #{'Session-Expiry-Interval' => 60}}
]),
try
meck:new(emqx_access_control, [non_strict, passthrough]),
meck:expect(emqx_access_control, authenticate,
fun(_) -> {error, bad_username_or_password} end),
process_flag(trap_exit, true),
?assertMatch({error, _}, emqtt:connect(Client)),
timer:sleep(300),
verify_event('client.connack')
after
meck:unload(emqx_access_control)
end,
ok.
client_disconnected(Client, Client2) -> client_disconnected(Client, Client2) ->
ok = emqtt:disconnect(Client, 0, #{'User-Property' => {<<"reason">>, <<"normal">>}}), ok = emqtt:disconnect(Client, 0, #{'User-Property' => {<<"reason">>, <<"normal">>}}),
ok = emqtt:disconnect(Client2, 0, #{'User-Property' => {<<"reason">>, <<"normal">>}}), ok = emqtt:disconnect(Client2, 0, #{'User-Property' => {<<"reason">>, <<"normal">>}}),
@ -1204,6 +1231,7 @@ session_subscribed(Client2) ->
, 1 , 1
), ),
verify_event('session.subscribed'), verify_event('session.subscribed'),
verify_event('client.check_acl_complete'),
ok. ok.
session_unsubscribed(Client2) -> session_unsubscribed(Client2) ->
{ok, _, _} = emqtt:unsubscribe( Client2 {ok, _, _} = emqtt:unsubscribe( Client2
@ -3048,6 +3076,35 @@ verify_event_fields('client.disconnected', Fields) ->
?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000), ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000),
?assert(EventAt =< Timestamp); ?assert(EventAt =< Timestamp);
verify_event_fields('client.connack', Fields) ->
#{clientid := ClientId,
clean_start := CleanStart,
username := Username,
peername := PeerName,
sockname := SockName,
proto_name := ProtoName,
proto_ver := ProtoVer,
keepalive := Keepalive,
expiry_interval := ExpiryInterval,
conn_props := Properties,
reason_code := Reason,
timestamp := Timestamp
} = Fields,
Now = erlang:system_time(millisecond),
TimestampElapse = Now - Timestamp,
?assert(lists:member(Reason, [success, bad_username_or_password])),
?assert(lists:member(ClientId, [<<"c_event">>, <<"c_event2">>, <<"c_event3">>])),
?assert(lists:member(Username, [<<"u_event">>, <<"u_event2">>, <<"u_event3">>])),
verify_peername(PeerName),
verify_peername(SockName),
?assertEqual(<<"MQTT">>, ProtoName),
?assertEqual(5, ProtoVer),
?assert(is_integer(Keepalive)),
?assert(is_boolean(CleanStart)),
?assertEqual(60, ExpiryInterval),
?assertMatch(#{'Session-Expiry-Interval' := 60}, Properties),
?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000);
verify_event_fields(SubUnsub, Fields) when SubUnsub == 'session.subscribed' verify_event_fields(SubUnsub, Fields) when SubUnsub == 'session.subscribed'
; SubUnsub == 'session.unsubscribed' -> ; SubUnsub == 'session.unsubscribed' ->
#{clientid := ClientId, #{clientid := ClientId,
@ -3171,7 +3228,22 @@ verify_event_fields('message.acked', Fields) ->
?assert(is_map(PubAckProps)), ?assert(is_map(PubAckProps)),
?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000), ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000),
?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000), ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000),
?assert(EventAt =< Timestamp). ?assert(EventAt =< Timestamp);
verify_event_fields('client.check_acl_complete', Fields) ->
#{clientid := ClientId,
action := Action,
result := Result,
topic := Topic,
is_cache := IsCache,
username := Username
} = Fields,
?assertEqual(<<"t1">>, Topic),
?assert(lists:member(Action, [subscribe, publish])),
?assert(lists:member(Result, [allow, deny])),
?assert(lists:member(IsCache, [true, false])),
?assert(lists:member(ClientId, [<<"c_event">>, <<"c_event2">>])),
?assert(lists:member(Username, [<<"u_event">>, <<"u_event2">>])).
verify_peername(PeerName) -> verify_peername(PeerName) ->
case string:split(PeerName, ":") of case string:split(PeerName, ":") of

View File

@ -50,6 +50,9 @@
%% erlang:system_time should be unique and random enough %% erlang:system_time should be unique and random enough
-define(CLIENTID, iolist_to_binary([atom_to_list(?FUNCTION_NAME), "-", -define(CLIENTID, iolist_to_binary([atom_to_list(?FUNCTION_NAME), "-",
integer_to_list(erlang:system_time())])). integer_to_list(erlang:system_time())])).
-elvis([{elvis_style, dont_repeat_yourself, disable}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Setups %% Setups
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -66,8 +69,10 @@ end_per_suite(_) ->
emqx_ct_helpers:stop_apps([emqx_sn]). emqx_ct_helpers:stop_apps([emqx_sn]).
set_special_confs(emqx) -> set_special_confs(emqx) ->
application:set_env(emqx, plugins_loaded_file, application:set_env(
emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/loaded_plugins")); emqx,
plugins_loaded_file,
emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/loaded_plugins"));
set_special_confs(emqx_sn) -> set_special_confs(emqx_sn) ->
application:set_env(emqx_sn, enable_qos3, ?ENABLE_QOS3), application:set_env(emqx_sn, enable_qos3, ?ENABLE_QOS3),
application:set_env(emqx_sn, enable_stats, true), application:set_env(emqx_sn, enable_stats, true),
@ -119,7 +124,8 @@ t_subscribe(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
TopicName1 = <<"abcD">>, TopicName1 = <<"abcD">>,
send_register_msg(Socket, TopicName1, MsgId), send_register_msg(Socket, TopicName1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1,
CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16,
@ -151,7 +157,8 @@ t_subscribe_case01(_) ->
TopicName1 = <<"abcD">>, TopicName1 = <<"abcD">>,
send_register_msg(Socket, TopicName1, MsgId), send_register_msg(Socket, TopicName1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1,
?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>,
@ -172,16 +179,18 @@ t_subscribe_case02(_) ->
Will = 0, Will = 0,
CleanSession = 0, CleanSession = 0,
MsgId = 1, MsgId = 1,
TopicId = ?PREDEF_TOPIC_ID1, %this TopicId is the predefined topic id corresponding to ?PREDEF_TOPIC_NAME1 TopicId = ?PREDEF_TOPIC_ID1,
ReturnCode = 0, ReturnCode = 0,
{ok, Socket} = gen_udp:open(0, [binary]), {ok, Socket} = gen_udp:open(0, [binary]),
send_connect_msg(Socket, ?CLIENTID), ClientId = ?CLIENTID,
send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
Topic1 = ?PREDEF_TOPIC_NAME1, Topic1 = ?PREDEF_TOPIC_NAME1,
send_register_msg(Socket, Topic1, MsgId), send_register_msg(Socket, Topic1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1,
?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>,
@ -211,9 +220,11 @@ t_subscribe_case03(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_short_topic(Socket, QoS, <<"te">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"te">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1,
?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_unsubscribe_msg_short_topic(Socket, <<"te">>, MsgId), send_unsubscribe_msg_short_topic(Socket, <<"te">>, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -222,8 +233,12 @@ t_subscribe_case03(_) ->
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
gen_udp:close(Socket). gen_udp:close(Socket).
%%In this case We use predefined topic name to register and subcribe, and expect to receive the corresponding predefined topic id but not a new generated topic id from broker. We design this case to illustrate %% In this case We use predefined topic name to register and subcribe, and
%% emqx_sn_gateway's compatibility of dealing with predefined and normal topics. Once we give more restrictions to different topic id type, this case would be deleted or modified. %% expect to receive the corresponding predefined topic id but not a new
%% generated topic id from broker. We design this case to illustrate
%% emqx_sn_gateway's compatibility of dealing with predefined and normal topics.
%% Once we give more restrictions to different topic id type, this case would
%% be deleted or modified.
t_subscribe_case04(_) -> t_subscribe_case04(_) ->
Dup = 0, Dup = 0,
QoS = 0, QoS = 0,
@ -231,7 +246,7 @@ t_subscribe_case04(_) ->
Will = 0, Will = 0,
CleanSession = 0, CleanSession = 0,
MsgId = 1, MsgId = 1,
TopicId = ?PREDEF_TOPIC_ID1, %this TopicId is the predefined topic id corresponding to ?PREDEF_TOPIC_NAME1 TopicId = ?PREDEF_TOPIC_ID1,
ReturnCode = 0, ReturnCode = 0,
{ok, Socket} = gen_udp:open(0, [binary]), {ok, Socket} = gen_udp:open(0, [binary]),
ClientId = ?CLIENTID, ClientId = ?CLIENTID,
@ -239,10 +254,14 @@ t_subscribe_case04(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
Topic1 = ?PREDEF_TOPIC_NAME1, Topic1 = ?PREDEF_TOPIC_NAME1,
send_register_msg(Socket, Topic1, MsgId), send_register_msg(Socket, Topic1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, Topic1, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_unsubscribe_msg_normal_topic(Socket, Topic1, MsgId), send_unsubscribe_msg_normal_topic(Socket, Topic1, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -269,19 +288,30 @@ t_subscribe_case05(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_register_msg(Socket, <<"abcD">>, MsgId), send_register_msg(Socket, <<"abcD">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"abcD">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"abcD">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"/sport/#">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"/sport/#">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"/a/+/water">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"/a/+/water">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"/Tom/Home">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"/Tom/Home">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1,
@ -311,19 +341,32 @@ t_subscribe_case06(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_register_msg(Socket, <<"abc">>, MsgId), send_register_msg(Socket, <<"abc">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>,
receive_response(Socket)
),
send_register_msg(Socket, <<"/blue/#">>, MsgId), send_register_msg(Socket, <<"/blue/#">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId0:16, MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId0:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>,
receive_response(Socket)
),
send_register_msg(Socket, <<"/blue/+/white">>, MsgId), send_register_msg(Socket, <<"/blue/+/white">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId0:16, MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId0:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>,
receive_response(Socket)
),
send_register_msg(Socket, <<"/$sys/rain">>, MsgId), send_register_msg(Socket, <<"/$sys/rain">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId2:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId2:16, MsgId:16, 0:8>>,
receive_response(Socket)
),
send_subscribe_msg_short_topic(Socket, QoS, <<"Q2">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"Q2">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_unsubscribe_msg_normal_topic(Socket, <<"Q2">>, MsgId), send_unsubscribe_msg_normal_topic(Socket, <<"Q2">>, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -347,8 +390,11 @@ t_subscribe_case07(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>,
receive_response(Socket)
),
send_unsubscribe_msg_predefined_topic(Socket, TopicId2, MsgId), send_unsubscribe_msg_predefined_topic(Socket, TopicId2, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -370,8 +416,11 @@ t_subscribe_case08(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_reserved_topic(Socket, QoS, TopicId2, MsgId), send_subscribe_msg_reserved_topic(Socket, QoS, TopicId2, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, ?SN_INVALID_TOPIC_ID:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
?SN_INVALID_TOPIC_ID:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -395,15 +444,20 @@ t_publish_negqos_case09(_) ->
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_normal_topic(Socket, NegQoS, MsgId1, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, NegQoS, MsgId1, TopicId1, Payload1),
timer:sleep(100), timer:sleep(100),
case ?ENABLE_QOS3 of case ?ENABLE_QOS3 of
true -> true ->
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What) ?assertEqual(Eexp, What)
end, end,
@ -436,7 +490,9 @@ t_publish_qos0_case01(_) ->
send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -458,15 +514,20 @@ t_publish_qos0_case02(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId1, PredefTopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId1, PredefTopicId, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC:2, PredefTopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC:2,
PredefTopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -489,15 +550,20 @@ t_publish_qos0_case3(_) ->
Topic = <<"/a/b/c">>, Topic = <<"/a/b/c">>,
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId1, TopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId1, TopicId, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -519,8 +585,11 @@ t_publish_qos0_case04(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, <<"#">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"#">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 2, MsgId1 = 2,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
@ -528,7 +597,9 @@ t_publish_qos0_case04(_) ->
send_publish_msg_short_topic(Socket, QoS, MsgId1, Topic, Payload1), send_publish_msg_short_topic(Socket, QoS, MsgId1, Topic, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_SHORT_TOPIC:2, Topic/binary, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_SHORT_TOPIC:2,
Topic/binary, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -549,8 +620,11 @@ t_publish_qos0_case05(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_short_topic(Socket, QoS, <<"/#">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"/#">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -572,15 +646,20 @@ t_publish_qos0_case06(_) ->
Topic = <<"abc">>, Topic = <<"abc">>,
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -602,16 +681,25 @@ t_publish_qos1_case01(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1),
?assertEqual(<<7, ?SN_PUBACK, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicId1:16,
MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
timer:sleep(100), timer:sleep(100),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
gen_udp:close(Socket). gen_udp:close(Socket).
@ -630,12 +718,18 @@ t_publish_qos1_case02(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1),
?assertEqual(<<7, ?SN_PUBACK, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, PredefTopicId:16,
MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
timer:sleep(100), timer:sleep(100),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
@ -650,7 +744,10 @@ t_publish_qos1_case03(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_publish_msg_predefined_topic(Socket, QoS, MsgId, tid(5), <<20, 21, 22, 23>>), send_publish_msg_predefined_topic(Socket, QoS, MsgId, tid(5), <<20, 21, 22, 23>>),
?assertEqual(<<7, ?SN_PUBACK, TopicId5:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicId5:16,
MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -669,15 +766,20 @@ t_publish_qos1_case04(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_short_topic(Socket, QoS, <<"ab">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"ab">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Topic = <<"ab">>, Topic = <<"ab">>,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_short_topic(Socket, QoS, MsgId, Topic, Payload1), send_publish_msg_short_topic(Socket, QoS, MsgId, Topic, Payload1),
<<TopicIdShort:16>> = Topic, <<TopicIdShort:16>> = Topic,
?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16, MsgId:16, ?SN_RC_ACCEPTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16,
MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
timer:sleep(100), timer:sleep(100),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
@ -697,13 +799,18 @@ t_publish_qos1_case05(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, <<"ab">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"ab">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/#">>, <<20, 21, 22, 23>>), send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/#">>, <<20, 21, 22, 23>>),
<<TopicIdShort:16>> = <<"/#">>, <<TopicIdShort:16>> = <<"/#">>,
?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16, MsgId:16, ?SN_RC_NOT_SUPPORTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -729,7 +836,10 @@ t_publish_qos1_case06(_) ->
send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/+">>, <<20, 21, 22, 23>>), send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/+">>, <<20, 21, 22, 23>>),
<<TopicIdShort:16>> = <<"/+">>, <<TopicIdShort:16>> = <<"/+">>,
?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16, MsgId:16, ?SN_RC_NOT_SUPPORTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -756,7 +866,11 @@ t_publish_qos2_case01(_) ->
send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1),
?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)),
send_pubrel_msg(Socket, MsgId), send_pubrel_msg(Socket, MsgId),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, 1:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, 1:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)),
timer:sleep(100), timer:sleep(100),
@ -778,15 +892,21 @@ t_publish_qos2_case02(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, ?FNU:1, QoS:2, ?FNU:5, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, ?FNU:1, QoS:2, ?FNU:5,
receive_response(Socket)), PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1),
?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)),
send_pubrel_msg(Socket, MsgId), send_pubrel_msg(Socket, MsgId),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC :2, PredefTopicId:16, 1:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC:2,
PredefTopicId:16, 1:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)),
timer:sleep(100), timer:sleep(100),
@ -817,7 +937,11 @@ t_publish_qos2_case03(_) ->
?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)),
send_pubrel_msg(Socket, MsgId), send_pubrel_msg(Socket, MsgId),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_SHORT_TOPIC :2, <<"/a">>/binary, 1:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_SHORT_TOPIC:2,
"/a", 1:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)),
timer:sleep(100), timer:sleep(100),
@ -1167,7 +1291,11 @@ t_asleep_test03_to_awake_qos1_dl_msg(_) ->
send_register_msg(Socket, TopicName1, MsgId1), send_register_msg(Socket, TopicName1, MsgId1),
?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId1:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId1:16, 0:8>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ReturnCode>>, receive_response(Socket)), ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
send_disconnect_msg(Socket, 1), send_disconnect_msg(Socket, 1),
@ -1193,7 +1321,10 @@ t_asleep_test03_to_awake_qos1_dl_msg(_) ->
%% the broker should sent dl msgs to the awake client before sending the pingresp %% the broker should sent dl msgs to the awake client before sending the pingresp
UdpData = receive_response(Socket), UdpData = receive_response(Socket),
MsgId_udp = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicId1, Payload1}, UdpData), MsgId_udp = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicId1, Payload1}, UdpData),
send_puback_msg(Socket, TopicId1, MsgId_udp), send_puback_msg(Socket, TopicId1, MsgId_udp),
%% check the pingresp is received at last %% check the pingresp is received at last
@ -1225,8 +1356,11 @@ t_asleep_test04_to_awake_qos1_dl_msg(_) ->
CleanSession = 0, CleanSession = 0,
ReturnCode = 0, ReturnCode = 0,
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1,CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
send_disconnect_msg(Socket, 1), send_disconnect_msg(Socket, 1),
@ -1260,11 +1394,17 @@ t_asleep_test04_to_awake_qos1_dl_msg(_) ->
send_regack_msg(Socket, TopicIdNew, MsgId3), send_regack_msg(Socket, TopicIdNew, MsgId3),
UdpData2 = receive_response(Socket), UdpData2 = receive_response(Socket),
MsgId_udp2 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload1}, UdpData2), MsgId_udp2 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload1}, UdpData2),
send_puback_msg(Socket, TopicIdNew, MsgId_udp2), send_puback_msg(Socket, TopicIdNew, MsgId_udp2),
UdpData3 = receive_response(Socket), UdpData3 = receive_response(Socket),
MsgId_udp3 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload2}, UdpData3), MsgId_udp3 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload2}, UdpData3),
send_puback_msg(Socket, TopicIdNew, MsgId_udp3), send_puback_msg(Socket, TopicIdNew, MsgId_udp3),
?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)),
@ -1300,8 +1440,11 @@ t_asleep_test05_to_awake_qos1_dl_msg(_) ->
CleanSession = 0, CleanSession = 0,
ReturnCode = 0, ReturnCode = 0,
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
SleepDuration = 30, SleepDuration = 30,
@ -1334,21 +1477,28 @@ t_asleep_test05_to_awake_qos1_dl_msg(_) ->
send_regack_msg(Socket, TopicIdNew, MsgId_reg), send_regack_msg(Socket, TopicIdNew, MsgId_reg),
UdpData2 = receive_response(Socket), UdpData2 = receive_response(Socket),
MsgId2 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload2}, UdpData2), MsgId2 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload2}, UdpData2),
send_puback_msg(Socket, TopicIdNew, MsgId2), send_puback_msg(Socket, TopicIdNew, MsgId2),
timer:sleep(50), timer:sleep(50),
UdpData3 = wrap_receive_response(Socket), UdpData3 = wrap_receive_response(Socket),
MsgId3 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload3}, UdpData3), MsgId3 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload3}, UdpData3),
send_puback_msg(Socket, TopicIdNew, MsgId3), send_puback_msg(Socket, TopicIdNew, MsgId3),
timer:sleep(50), timer:sleep(50),
case receive_response(Socket) of case receive_response(Socket) of
<<2,23>> -> ok; <<2,23>> -> ok;
UdpData4 -> UdpData4 ->
MsgId4 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, MsgId4 = check_publish_msg_on_udp(
CleanSession, ?SN_NORMAL_TOPIC, {Dup, QoS, Retain, WillBit,
TopicIdNew, Payload4}, UdpData4), CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload4}, UdpData4),
send_puback_msg(Socket, TopicIdNew, MsgId4) send_puback_msg(Socket, TopicIdNew, MsgId4)
end, end,
?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)),
@ -1406,7 +1556,10 @@ t_asleep_test06_to_awake_qos2_dl_msg(_) ->
send_pingreq_msg(Socket, ClientId), send_pingreq_msg(Socket, ClientId),
UdpData = wrap_receive_response(Socket), UdpData = wrap_receive_response(Socket),
MsgId_udp = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicId_tom, Payload1}, UdpData), MsgId_udp = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicId_tom, Payload1}, UdpData),
send_pubrec_msg(Socket, MsgId_udp), send_pubrec_msg(Socket, MsgId_udp),
?assertMatch(<<_:8, ?SN_PUBREL:8, _/binary>>, receive_response(Socket)), ?assertMatch(<<_:8, ?SN_PUBREL:8, _/binary>>, receive_response(Socket)),
send_pubcomp_msg(Socket, MsgId_udp), send_pubcomp_msg(Socket, MsgId_udp),
@ -1441,8 +1594,11 @@ t_asleep_test07_to_connected(_) ->
send_register_msg(Socket, TopicName_tom, MsgId1), send_register_msg(Socket, TopicName_tom, MsgId1),
TopicId_tom = check_regack_msg_on_udp(MsgId1, receive_response(Socket)), TopicId_tom = check_regack_msg_on_udp(MsgId1, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId_tom, MsgId1), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId_tom, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId_tom:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1,CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId_tom:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
send_disconnect_msg(Socket, SleepDuration), send_disconnect_msg(Socket, SleepDuration),
@ -1520,8 +1676,11 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
CleanSession = 0, CleanSession = 0,
ReturnCode = 0, ReturnCode = 0,
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1,CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
SleepDuration = 30, SleepDuration = 30,
send_disconnect_msg(Socket, SleepDuration), send_disconnect_msg(Socket, SleepDuration),
@ -1555,7 +1714,10 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
udp_receive_timeout -> udp_receive_timeout ->
ok; ok;
UdpData2 -> UdpData2 ->
MsgId2 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload2}, UdpData2), MsgId2 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload2}, UdpData2),
send_puback_msg(Socket, TopicIdNew, MsgId2) send_puback_msg(Socket, TopicIdNew, MsgId2)
end, end,
timer:sleep(100), timer:sleep(100),
@ -1564,7 +1726,10 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
udp_receive_timeout -> udp_receive_timeout ->
ok; ok;
UdpData3 -> UdpData3 ->
MsgId3 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload3}, UdpData3), MsgId3 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload3}, UdpData3),
send_puback_msg(Socket, TopicIdNew, MsgId3) send_puback_msg(Socket, TopicIdNew, MsgId3)
end, end,
timer:sleep(100), timer:sleep(100),
@ -1573,16 +1738,18 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
udp_receive_timeout -> udp_receive_timeout ->
ok; ok;
UdpData4 -> UdpData4 ->
MsgId4 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, MsgId4 = check_publish_msg_on_udp(
CleanSession, ?SN_NORMAL_TOPIC, {Dup, QoS, Retain, WillBit,
TopicIdNew, Payload4}, UdpData4), CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload4}, UdpData4),
send_puback_msg(Socket, TopicIdNew, MsgId4) send_puback_msg(Socket, TopicIdNew, MsgId4)
end, end,
?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)),
%% send PINGREQ again to enter awake state %% send PINGREQ again to enter awake state
send_pingreq_msg(Socket, ClientId), send_pingreq_msg(Socket, ClientId),
%% will not receive any buffered PUBLISH messages buffered before last awake, only receive PINGRESP here %% will not receive any buffered PUBLISH messages buffered before last
%% awake, only receive PINGRESP here
?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)),
gen_udp:close(Socket). gen_udp:close(Socket).
@ -2312,8 +2479,12 @@ check_dispatched_message(Dup, QoS, Retain, TopicIdType, TopicId, Payload, Socket
PubMsg = receive_response(Socket), PubMsg = receive_response(Socket),
Length = 7 + byte_size(Payload), Length = 7 + byte_size(Payload),
?LOG("check_dispatched_message ~p~n", [PubMsg]), ?LOG("check_dispatched_message ~p~n", [PubMsg]),
?LOG("expected ~p xx ~p~n", [<<Length, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2, TopicId:16>>, Payload]), ?LOG("expected ~p xx ~p~n",
<<Length, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2, TopicId:16, MsgId:16, Payload/binary>> = PubMsg, [<<Length, ?SN_PUBLISH,
Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2, TopicId:16>>, Payload]),
<<Length, ?SN_PUBLISH,
Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2,
TopicId:16, MsgId:16, Payload/binary>> = PubMsg,
case QoS of case QoS of
0 -> ok; 0 -> ok;
1 -> send_puback_msg(Socket, TopicId, MsgId); 1 -> send_puback_msg(Socket, TopicId, MsgId);
@ -2325,11 +2496,14 @@ check_dispatched_message(Dup, QoS, Retain, TopicIdType, TopicId, Payload, Socket
get_udp_broadcast_address() -> get_udp_broadcast_address() ->
"255.255.255.255". "255.255.255.255".
check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, TopicType, TopicId, Payload}, UdpData) -> check_publish_msg_on_udp({Dup, QoS, Retain, WillBit,
CleanSession, TopicType, TopicId, Payload}, UdpData) ->
<<HeaderUdp:5/binary, MsgId:16, PayloadIn/binary>> = UdpData, <<HeaderUdp:5/binary, MsgId:16, PayloadIn/binary>> = UdpData,
ct:pal("UdpData: ~p, Payload: ~p, PayloadIn: ~p", [UdpData, Payload, PayloadIn]), ct:pal("UdpData: ~p, Payload: ~p, PayloadIn: ~p", [UdpData, Payload, PayloadIn]),
Size9 = byte_size(Payload) + 7, Size9 = byte_size(Payload) + 7,
Eexp = <<Size9:8, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, TopicType:2, TopicId:16>>, Eexp = <<Size9:8,
?SN_PUBLISH, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1,
TopicType:2, TopicId:16>>,
?assertEqual(Eexp, HeaderUdp), % mqtt-sn header should be same ?assertEqual(Eexp, HeaderUdp), % mqtt-sn header should be same
?assertEqual(Payload, PayloadIn), % payload should be same ?assertEqual(Payload, PayloadIn), % payload should be same
MsgId. MsgId.

View File

@ -4,6 +4,11 @@
set -e set -e
DEBUG="${DEBUG:-0}"
if [ "$DEBUG" -eq 1 ]; then
set -x
fi
RUNNER_ROOT_DIR="$(cd "$(dirname "$(readlink "$0" || echo "$0")")"/..; pwd -P)" RUNNER_ROOT_DIR="$(cd "$(dirname "$(readlink "$0" || echo "$0")")"/..; pwd -P)"
# shellcheck disable=SC1090 # shellcheck disable=SC1090
. "$RUNNER_ROOT_DIR"/releases/emqx_vars . "$RUNNER_ROOT_DIR"/releases/emqx_vars
@ -424,6 +429,43 @@ generate_config() {
fi fi
} }
# check if a PID is down
is_down() {
PID="$1"
if ps -p "$PID" >/dev/null; then
# still around
# shellcheck disable=SC2009 # this grep pattern is not a part of the progra names
if ps -p "$PID" | grep -q 'defunct'; then
# zombie state, print parent pid
parent="$(ps -o ppid= -p "$PID" | tr -d ' ')"
echo "WARN: $PID is marked <defunct>, parent:"
ps -p "$parent"
return 0
fi
return 1
fi
# it's gone
return 0
}
wait_for() {
local WAIT_TIME
local CMD
WAIT_TIME="$1"
shift
CMD="$*"
while true; do
if $CMD >/dev/null 2>&1; then
return 0
fi
if [ "$WAIT_TIME" -le 0 ]; then
return 1
fi
WAIT_TIME=$((WAIT_TIME - 1))
sleep 1
done
}
# Call bootstrapd for daemon commands like start/stop/console # Call bootstrapd for daemon commands like start/stop/console
bootstrapd() { bootstrapd() {
if [ -e "$RUNNER_DATA_DIR/.erlang.cookie" ]; then if [ -e "$RUNNER_DATA_DIR/.erlang.cookie" ]; then
@ -611,7 +653,7 @@ case "$1" in
"$BINDIR/run_erl" -daemon "$PIPE_DIR" "$RUNNER_LOG_DIR" \ "$BINDIR/run_erl" -daemon "$PIPE_DIR" "$RUNNER_LOG_DIR" \
"$(relx_start_command)" "$(relx_start_command)"
WAIT_TIME=${WAIT_FOR_ERLANG:-15} WAIT_TIME=${WAIT_FOR_ERLANG:-150}
while [ "$WAIT_TIME" -gt 0 ]; do while [ "$WAIT_TIME" -gt 0 ]; do
if ! relx_nodetool "ping" >/dev/null 2>&1; then if ! relx_nodetool "ping" >/dev/null 2>&1; then
WAIT_TIME=$((WAIT_TIME - 1)) WAIT_TIME=$((WAIT_TIME - 1))
@ -623,7 +665,7 @@ case "$1" in
echo "$EMQX_DESCRIPTION $REL_VSN is started successfully!" echo "$EMQX_DESCRIPTION $REL_VSN is started successfully!"
exit 0 exit 0
fi fi
done && echo "$EMQX_DESCRIPTION $REL_VSN failed to start within ${WAIT_FOR_ERLANG:-15} seconds," done && echo "$EMQX_DESCRIPTION $REL_VSN failed to start within ${WAIT_FOR_ERLANG:-150} seconds,"
echo "see the output of '$0 console' for more information." echo "see the output of '$0 console' for more information."
echo "If you want to wait longer, set the environment variable" echo "If you want to wait longer, set the environment variable"
echo "WAIT_FOR_ERLANG to the number of seconds to wait." echo "WAIT_FOR_ERLANG to the number of seconds to wait."
@ -634,6 +676,7 @@ case "$1" in
# Wait for the node to completely stop... # Wait for the node to completely stop...
PID="$(relx_get_pid)" PID="$(relx_get_pid)"
if ! relx_nodetool "stop"; then if ! relx_nodetool "stop"; then
echoerr "Graceful shutdown failed PID=[$PID]"
exit 1 exit 1
fi fi
WAIT_TIME="${EMQX_WAIT_FOR_STOP:-120}" WAIT_TIME="${EMQX_WAIT_FOR_STOP:-120}"

View File

@ -437,6 +437,8 @@ validate_target_version(TargetVersion, TargetNode) ->
CurrentVersion = current_release_version(TargetNode), CurrentVersion = current_release_version(TargetNode),
case {get_major_minor_vsn(CurrentVersion), get_major_minor_vsn(TargetVersion)} of case {get_major_minor_vsn(CurrentVersion), get_major_minor_vsn(TargetVersion)} of
{{Major, Minor}, {Major, Minor}} -> ok; {{Major, Minor}, {Major, Minor}} -> ok;
{{<<"4">>, <<"5">>}, {<<"4">>, <<"4">>}} -> ok;
{{<<"4">>, <<"4">>}, {<<"4">>, <<"5">>}} -> ok;
_ -> _ ->
?INFO("Cannot upgrade/downgrade to ~s from ~s~n" ?INFO("Cannot upgrade/downgrade to ~s from ~s~n"
"We only support relup between patch versions", "We only support relup between patch versions",

177
build
View File

@ -18,21 +18,7 @@ cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")"
PKG_VSN="${PKG_VSN:-$(./pkg-vsn.sh)}" PKG_VSN="${PKG_VSN:-$(./pkg-vsn.sh)}"
export PKG_VSN export PKG_VSN
SYSTEM="$(./scripts/get-distro.sh)" SYSTEM="${SYSTEM:-$(./scripts/get-distro.sh)}"
ARCH="$(uname -m)"
case "$ARCH" in
x86_64)
ARCH='amd64'
;;
aarch64)
ARCH='arm64'
;;
arm*)
ARCH=arm
;;
esac
export ARCH
## ##
## Support RPM and Debian based linux systems ## Support RPM and Debian based linux systems
@ -55,6 +41,21 @@ else
FIND='find' FIND='find'
fi fi
UNAME="$(uname -m)"
case "$UNAME" in
x86_64)
ARCH='amd64'
;;
aarch64)
ARCH='arm64'
;;
arm*)
ARCH=arm
;;
esac
# used in -pkg Makefile
export ARCH
log() { log() {
local msg="$1" local msg="$1"
# rebar3 prints ===>, so we print ===< # rebar3 prints ===>, so we print ===<
@ -62,31 +63,35 @@ log() {
} }
make_rel() { make_rel() {
# shellcheck disable=SC1010 ./rebar3 as "$PROFILE" tar
./rebar3 as "$PROFILE" do release,tar }
relup_db() {
./scripts/relup-base-vsns.escript "$@" ./data/relup-paths.eterm
} }
## unzip previous version .zip files to _build/$PROFILE/rel/emqx/releases before making relup ## unzip previous version .zip files to _build/$PROFILE/rel/emqx/releases before making relup
make_relup() { make_relup() {
local lib_dir="_build/$PROFILE/rel/emqx/lib" local lib_dir="_build/$PROFILE/rel/emqx/lib"
local releases_dir="_build/$PROFILE/rel/emqx/releases" local releases_dir="_build/$PROFILE/rel/emqx/releases"
mkdir -p "$lib_dir" "$releases_dir" local zip_file
mkdir -p "$lib_dir" "$releases_dir" '_upgrade_base'
local releases=() local releases=()
if [ -d "$releases_dir" ]; then if [ -d "$releases_dir" ]; then
while read -r zip; do for BASE_VSN in $(relup_db base-vsns "$PKG_VSN"); do
local base_vsn OTP_BASE=$(relup_db otp-vsn-for "$PKG_VSN")
base_vsn="$(echo "$zip" | grep -oE "[0-9]+\.[0-9]+\.[0-9]+(-[0-9a-f]{8})?")" zip_file="_upgrade_base/${PROFILE}-$(env OTP_VSN="$OTP_BASE" PKG_VSN="$BASE_VSN" ./scripts/pkg-full-vsn.sh 'vsn_exact').zip"
if [ ! -d "$releases_dir/$base_vsn" ]; then if [ ! -d "$releases_dir/$BASE_VSN" ]; then
local tmp_dir local tmp_dir
tmp_dir="$(mktemp -d -t emqx.XXXXXXX)" tmp_dir="$(mktemp -d -t emqx.XXXXXXX)"
unzip -q "$zip" "emqx/releases/*" -d "$tmp_dir" unzip -q "$zip_file" "emqx/releases/*" -d "$tmp_dir"
unzip -q "$zip" "emqx/lib/*" -d "$tmp_dir" unzip -q "$zip_file" "emqx/lib/*" -d "$tmp_dir"
cp -r -n "$tmp_dir/emqx/releases"/* "$releases_dir" || true cp -r -n "$tmp_dir/emqx/releases"/* "$releases_dir" || true
cp -r -n "$tmp_dir/emqx/lib"/* "$lib_dir" || true cp -r -n "$tmp_dir/emqx/lib"/* "$lib_dir" || true
rm -rf "$tmp_dir" rm -rf "$tmp_dir"
fi fi
releases+=( "$base_vsn" ) releases+=( "$BASE_VSN" )
done < <("$FIND" _upgrade_base -maxdepth 1 -name "*$PROFILE-$SYSTEM*-$ARCH.zip" -type f) done
fi fi
if [ ${#releases[@]} -eq 0 ]; then if [ ${#releases[@]} -eq 0 ]; then
log "No upgrade base found, relup ignored" log "No upgrade base found, relup ignored"
@ -127,6 +132,8 @@ make_relup() {
fi fi
} }
## try to be portable for zip packages.
## for DEB and RPM packages the dependencies are resoved by yum and apt
cp_dyn_libs() { cp_dyn_libs() {
local rel_dir="$1" local rel_dir="$1"
local target_dir="${rel_dir}/dynlibs" local target_dir="${rel_dir}/dynlibs"
@ -143,32 +150,75 @@ cp_dyn_libs() {
| sort -u) | sort -u)
} }
## make_zip turns .tar.gz into a .zip with a slightly different name. ## make_zip turns .tar.gz into a .zip with a slightly different name.
## It assumes the .tar.gz has been built -- relies on Makefile dependency ## It assumes the .tar.gz has been built -- relies on Makefile dependency
make_zip() { make_zip() {
# build the tarball again to ensure relup is included # build the tarball again to ensure relup is included
make_rel make_rel
# use relative path because abs path is tricky in windows
tard="/tmp/emqx_untar_${PKG_VSN}" tard="tmp/zip-wd-${PKG_VSN}"
rm -rf "${tard}" rm -rf "${tard}/emqx"
mkdir -p "${tard}/emqx" mkdir -p "${tard}/emqx"
local relpath="_build/${PROFILE}/rel/emqx" local relpath="_build/${PROFILE}/rel/emqx"
local pkgpath="_packages/${PROFILE}" local pkgpath="_packages/${PROFILE}"
local pkgname
pkgname="${PROFILE}-$(./scripts/pkg-full-vsn.sh).zip"
mkdir -p "${pkgpath}" mkdir -p "${pkgpath}"
local tarball="${relpath}/emqx-${PKG_VSN}.tar.gz" local tarname="emqx-${PKG_VSN}.tar.gz"
if [ ! -f "$tarball" ]; then local tarball="${relpath}/${tarname}"
log "ERROR: $tarball is not found" local target_zip="${pkgpath}/${pkgname}"
fi
local zipball
zipball="${pkgpath}/${PROFILE}-${SYSTEM}-${PKG_VSN}-${ARCH}.zip"
tar zxf "${tarball}" -C "${tard}/emqx" tar zxf "${tarball}" -C "${tard}/emqx"
## try to be portable for zip packages. has_relup='yes'
## for DEB and RPM packages the dependencies are resoved by yum and apt case "$SYSTEM" in
windows*)
# no relup support for windows
has_relup='no'
;;
debian11)
case "$PKG_VSN" in
4.4.2*)
# this is the first version for debian11, no relup
has_relup='no'
;;
esac
;;
esac
# shellcheck disable=SC2207
bases=($(relup_db base-vsns "$PKG_VSN"))
if [[ "${#bases[@]}" -eq 0 ]]; then
has_relup='no'
fi
if [ "$has_relup" = 'yes' ]; then
./scripts/inject-relup.escript "${tard}/emqx/releases/${PKG_VSN}/relup"
fi
cp_dyn_libs "${tard}/emqx" cp_dyn_libs "${tard}/emqx"
(cd "${tard}" && zip -qr - emqx) > "${zipball}" pushd "${tard}" >/dev/null
case "$SYSTEM" in
windows*)
7z a "${pkgname}" emqx
;;
*)
zip -qr "${pkgname}" emqx
;;
esac
popd >/dev/null
mv "${tard}/${pkgname}" "${target_zip}"
case "$SYSTEM" in
macos*)
# sha256sum may not be available on macos
openssl dgst -sha256 "${target_zip}" | cut -d ' ' -f 2 > "${target_zip}.sha256"
;;
*)
sha256sum "${target_zip}" | head -c 64 > "${target_zip}.sha256"
;;
esac
log "Zip package successfully created: ${target_zip}"
log "Zip package sha256sum: $(cat "${target_zip}.sha256")"
} }
## This function builds the default docker image based on alpine:3.14 (by default) ## This function builds the default docker image
## based images is by default $EMQX_DEFAULT_BUILDER (see Makefile)
make_docker() { make_docker() {
EMQX_BUILDER="${EMQX_BUILDER:-${EMQX_DEFAULT_BUILDER}}" EMQX_BUILDER="${EMQX_BUILDER:-${EMQX_DEFAULT_BUILDER}}"
EMQX_RUNNER="${EMQX_RUNNER:-${EMQX_DEFAULT_RUNNER}}" EMQX_RUNNER="${EMQX_RUNNER:-${EMQX_DEFAULT_RUNNER}}"
@ -181,6 +231,49 @@ make_docker() {
-f "${DOCKERFILE}" . -f "${DOCKERFILE}" .
} }
## This function accepts any base docker image,
## a emqx zip-image, and a image tag (for the image to be built),
## to build a docker image which runs EMQ X
##
## Export below variables to quickly build an image
##
## Name Default Example
## ---------------------------------------------------------------------
## EMQX_BASE_IMAGE current os centos:7
## EMQX_ZIP_PACKAGE _packages/<current-zip-target> /tmp/emqx-4.4.0-otp24.1.5-3-el7-amd64.zip
## EMQX_IMAGE_TAG emqx/emqx:<current-vns-rel> emqx/emqx:testing-tag
##
make_docker_testing() {
if [ -z "${EMQX_BASE_IMAGE:-}" ]; then
case "$SYSTEM" in
ubuntu20*)
EMQX_BASE_IMAGE="ubuntu:20.04"
;;
el8)
EMQX_BASE_IMAGE="rockylinux:8"
;;
*)
echo "Unsupported testing base image for $SYSTEM"
exit 1
;;
esac
fi
EMQX_IMAGE_TAG="${EMQX_IMAGE_TAG:-emqx/$PROFILE:${PKG_VSN}-otp${OTP_VSN}-${SYSTEM}}"
local defaultzip
defaultzip="_packages/${PROFILE}/${PROFILE}-$(./scripts/pkg-full-vsn.sh).zip"
local zip="${EMQX_ZIP_PACKAGE:-$defaultzip}"
if [ ! -f "$zip" ]; then
log "ERROR: $zip not built?"
exit 1
fi
set -x
docker build \
--build-arg BUILD_FROM="${EMQX_BASE_IMAGE}" \
--build-arg EMQX_ZIP_PACKAGE="${zip}" \
--tag "$EMQX_IMAGE_TAG" \
-f "${DOCKERFILE_TESTING}" .
}
log "building artifact=$ARTIFACT for profile=$PROFILE" log "building artifact=$ARTIFACT for profile=$PROFILE"
case "$ARTIFACT" in case "$ARTIFACT" in
@ -198,13 +291,15 @@ case "$ARTIFACT" in
log "Skipped making deb/rpm package for $SYSTEM" log "Skipped making deb/rpm package for $SYSTEM"
exit 0 exit 0
fi fi
make -C "deploy/packages/${PKGERDIR}" clean EMQX_REL="$(pwd)" make -C "deploy/packages/${PKGERDIR}" clean
EMQX_REL="$(pwd)" EMQX_BUILD="${PROFILE}" SYSTEM="${SYSTEM}" make -C "deploy/packages/${PKGERDIR}" EMQX_REL="$(pwd)" EMQX_BUILD="${PROFILE}" SYSTEM="${SYSTEM}" make -C "deploy/packages/${PKGERDIR}"
;; ;;
docker) docker)
make_docker make_docker
;; ;;
docker-testing)
make_docker_testing
;;
*) *)
log "Unknown artifact $ARTIFACT" log "Unknown artifact $ARTIFACT"
exit 1 exit 1

32
data/relup-paths.eterm Normal file
View File

@ -0,0 +1,32 @@
%% -*- mode: erlang; -*-
{<<"4.4.0">>,#{from_versions => [],otp => <<"24.1.5-3">>}}.
{<<"4.4.1">>,#{from_versions => [<<"4.4.0">>],otp => <<"24.1.5-3">>}}.
{<<"4.4.2">>,
#{from_versions => [<<"4.4.0">>,<<"4.4.1">>],otp => <<"24.1.5-3">>}}.
{<<"4.4.3">>,
#{from_versions => [<<"4.4.0">>,<<"4.4.1">>,<<"4.4.2">>],
otp => <<"24.1.5-3">>}}.
{<<"4.4.4">>,
#{from_versions => [<<"4.4.0">>,<<"4.4.1">>,<<"4.4.2">>,<<"4.4.3">>],
otp => <<"24.1.5-3">>}}.
{<<"4.4.5">>,
#{from_versions =>
[<<"4.4.0">>,<<"4.4.1">>,<<"4.4.2">>,<<"4.4.3">>,<<"4.4.4">>],
otp => <<"24.1.5-3">>}}.
{<<"4.4.6">>,
#{from_versions =>
[<<"4.4.0">>,<<"4.4.1">>,<<"4.4.2">>,<<"4.4.3">>,<<"4.4.4">>,
<<"4.4.5">>],
otp => <<"24.1.5-3">>}}.
{<<"4.4.7">>,
#{from_versions =>
[<<"4.4.0">>,<<"4.4.1">>,<<"4.4.2">>,<<"4.4.3">>,<<"4.4.4">>,
<<"4.4.5">>,<<"4.4.6">>],
otp => <<"24.1.5-3">>}}.
{<<"4.4.8">>,
#{from_versions =>
[<<"4.4.0">>,<<"4.4.1">>,<<"4.4.2">>,<<"4.4.3">>,<<"4.4.4">>,
<<"4.4.5">>,<<"4.4.6">>,<<"4.4.7">>],
otp => <<"24.1.5-3">>}}.
{<<"4.5.0">>,#{from_versions => [<<"4.4.8">>],otp => <<"24.3.4.2-1">>}}.

View File

@ -13,8 +13,8 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes # This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version. # to the chart and its templates, including the app version.
version: 4.3.18 version: 4.4.7
# This is the version number of the application being deployed. This version number should be # This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. # incremented each time you make changes to the application.
appVersion: 4.3.18 appVersion: 4.4.7

View File

@ -5,7 +5,7 @@
{{ $configData := printf "%s\n%s\n%s\n%s" $cfgEnv $cfgAcl $cfgPlugins $cfgModules}} {{ $configData := printf "%s\n%s\n%s\n%s" $cfgEnv $cfgAcl $cfgPlugins $cfgModules}}
## Compatible with previous misspellings ## Compatible with previous misspellings
{{ $licenseSecretName := coalesce .Values.emqxLicenseSecretName .Values.emqxLicneseSecretName }} {{ $licenseSecretName := coalesce .Values.emqxLicenseSecretName .Values.emqxLicneseSecretName }}
{{ $image := printf "%s:%s" .Values.image.repository (default .Values.image.tag .Chart.AppVersion) }} {{ $image := printf "%s:%s" .Values.image.repository (.Values.image.tag | default .Chart.AppVersion) }}
apiVersion: apps/v1 apiVersion: apps/v1
kind: StatefulSet kind: StatefulSet
@ -107,7 +107,9 @@ spec:
secret: secret:
secretName: {{ $licenseSecretName }} secretName: {{ $licenseSecretName }}
{{- end }} {{- end }}
{{- if eq (.Values.emqxConfig.EMQX_CLUSTER__DISCOVERY | default "k8s") "k8s" }}
serviceAccountName: {{ include "emqx.fullname" . }} serviceAccountName: {{ include "emqx.fullname" . }}
{{- end }}
{{- if .Values.podSecurityContext.enabled }} {{- if .Values.podSecurityContext.enabled }}
securityContext: {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }} securityContext: {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }}
{{- end }} {{- end }}
@ -157,18 +159,8 @@ spec:
{{- if .Values.extraEnvFrom }} {{- if .Values.extraEnvFrom }}
{{ toYaml .Values.extraEnvFrom | indent 10 }} {{ toYaml .Values.extraEnvFrom | indent 10 }}
{{- end }} {{- end }}
env:
- name: EMQX_NAME
value: {{ .Release.Name }}
- name: EMQX_CLUSTER__K8S__APP_NAME
value: {{ .Release.Name }}
- name: EMQX_CLUSTER__DISCOVERY
value: k8s
- name: EMQX_CLUSTER__K8S__SERVICE_NAME
value: {{ include "emqx.fullname" . }}-headless
- name: EMQX_CLUSTER__K8S__NAMESPACE
value: {{ .Release.Namespace }}
{{- if .Values.extraEnv }} {{- if .Values.extraEnv }}
env:
{{ toYaml .Values.extraEnv | indent 10 }} {{ toYaml .Values.extraEnv | indent 10 }}
{{- end }} {{- end }}
resources: resources:

View File

@ -13,7 +13,7 @@ data:
{{- range $index, $value := .Values.emqxConfig }} {{- range $index, $value := .Values.emqxConfig }}
{{- if $value }} {{- if $value }}
{{- $key := (regexReplaceAllLiteral "\\." (regexReplaceAllLiteral "EMQX[_\\.]" (upper (trimAll " " $index)) "") "__") }} {{- $key := (regexReplaceAllLiteral "\\." (regexReplaceAllLiteral "EMQX[_\\.]" (upper (trimAll " " $index)) "") "__") }}
{{ print "EMQX_" $key }}: {{ $value | quote }} {{ print "EMQX_" $key }}: "{{ tpl (printf "%v" $value) $ }}"
{{- end }} {{- end }}
{{- end}} {{- end}}

View File

@ -1,3 +1,4 @@
{{- if eq (.Values.emqxConfig.EMQX_CLUSTER__DISCOVERY | default "k8s") "k8s" }}
apiVersion: v1 apiVersion: v1
kind: ServiceAccount kind: ServiceAccount
metadata: metadata:
@ -17,8 +18,8 @@ rules:
- apiGroups: - apiGroups:
- "" - ""
resources: resources:
- endpoints - endpoints
verbs: verbs:
- get - get
- watch - watch
- list - list
@ -40,3 +41,4 @@ roleRef:
kind: Role kind: Role
name: {{ include "emqx.fullname" . }} name: {{ include "emqx.fullname" . }}
apiGroup: rbac.authorization.k8s.io apiGroup: rbac.authorization.k8s.io
{{- end }}

View File

@ -72,9 +72,20 @@ initContainers: {}
## EMQX configuration item, see the documentation (https://hub.docker.com/r/emqx/emqx) ## EMQX configuration item, see the documentation (https://hub.docker.com/r/emqx/emqx)
emqxConfig: emqxConfig:
EMQX_MANAGEMENT__DEFAULT_APPLICATION__ID: "admin" EMQX_NAME: "{{ .Release.Name }}"
EMQX_MANAGEMENT__DEFAULT_APPLICATION__SECRET: "public"
## Cluster discovery by dns
# EMQX_CLUSTER__DISCOVERY: "dns"
# EMQX_CLUSTER__DNS__NAME: "{{ .Release.Name }}-headless.{{ .Release.Namespace }}.svc.cluster.local"
# EMQX_CLUSTER__DNS__APP: "{{ .Release.Name }}"
# EMQX_CLUSTER__DNS__TYPE: "srv"
## Cluster discovery by k8s
EMQX_CLUSTER__DISCOVERY: "k8s"
EMQX_CLUSTER__K8S__APP_NAME: "{{ .Release.Name }}"
EMQX_CLUSTER__K8S__APISERVER: "https://kubernetes.default.svc:443" EMQX_CLUSTER__K8S__APISERVER: "https://kubernetes.default.svc:443"
EMQX_CLUSTER__K8S__SERVICE_NAME: "{{ .Release.Name }}-headless"
EMQX_CLUSTER__K8S__NAMESPACE: "{{ .Release.Namespace }}"
## The address type is used to extract host from k8s service. ## The address type is used to extract host from k8s service.
## Value: ip | dns | hostname ## Value: ip | dns | hostname
## NoteHostname is only supported after v4.0-rc.2 ## NoteHostname is only supported after v4.0-rc.2
@ -82,6 +93,8 @@ emqxConfig:
EMQX_CLUSTER__K8S__SUFFIX: "svc.cluster.local" EMQX_CLUSTER__K8S__SUFFIX: "svc.cluster.local"
## if EMQX_CLUSTER__K8S__ADDRESS_TYPE eq dns ## if EMQX_CLUSTER__K8S__ADDRESS_TYPE eq dns
# EMQX_CLUSTER__K8S__SUFFIX: "pod.cluster.local" # EMQX_CLUSTER__K8S__SUFFIX: "pod.cluster.local"
EMQX_MANAGEMENT__DEFAULT_APPLICATION__ID: "admin"
EMQX_MANAGEMENT__DEFAULT_APPLICATION__SECRET: "public"
## -------------------------------------------------------------------- ## --------------------------------------------------------------------
## [ACL](https://docs.emqx.io/broker/latest/en/advanced/acl-file.html) ## [ACL](https://docs.emqx.io/broker/latest/en/advanced/acl-file.html)
@ -118,6 +131,8 @@ emqxLoadedPlugins: >
emqxLoadedModules: > emqxLoadedModules: >
{emqx_mod_acl_internal, true}. {emqx_mod_acl_internal, true}.
{emqx_mod_presence, true}. {emqx_mod_presence, true}.
{emqx_mod_trace, false}.
{emqx_mod_slow_subs, false}.
{emqx_mod_delayed, false}. {emqx_mod_delayed, false}.
{emqx_mod_rewrite, false}. {emqx_mod_rewrite, false}.
{emqx_mod_subscription, false}. {emqx_mod_subscription, false}.

View File

@ -1,5 +1,5 @@
ARG BUILD_FROM=emqx/build-env:erl23.3.4.9-3-alpine ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/4.4-19:24.1.5-3-alpine3.15.1
ARG RUN_FROM=alpine:3.12 ARG RUN_FROM=alpine:3.15.1
FROM ${BUILD_FROM} AS builder FROM ${BUILD_FROM} AS builder
RUN apk add --no-cache \ RUN apk add --no-cache \
@ -32,17 +32,6 @@ RUN cd /emqx \
FROM $RUN_FROM FROM $RUN_FROM
# Basic build-time metadata as defined at http://label-schema.org
LABEL org.label-schema.docker.dockerfile="Dockerfile" \
org.label-schema.license="GNU" \
org.label-schema.name="emqx" \
org.label-schema.version=${PKG_VSN} \
org.label-schema.description="EMQ (Erlang MQTT Broker) is a distributed, massively scalable, highly extensible MQTT messaging broker written in Erlang/OTP." \
org.label-schema.url="https://emqx.io" \
org.label-schema.vcs-type="Git" \
org.label-schema.vcs-url="https://github.com/emqx/emqx" \
maintainer="EMQX Team <support@emqx.io>"
ARG EMQX_NAME=emqx ARG EMQX_NAME=emqx
COPY deploy/docker/docker-entrypoint.sh /usr/bin/ COPY deploy/docker/docker-entrypoint.sh /usr/bin/

View File

@ -0,0 +1,43 @@
ARG BUILD_FROM
FROM ${BUILD_FROM}
## all we need is the unzip command
RUN if command -v yum; then yum update -y && yum install -y unzip; fi
RUN if command -v apt-get; then apt-get update -y && apt-get install unzip; fi
ARG EMQX_ZIP_PACKAGE
COPY ${EMQX_ZIP_PACKAGE} /opt/emqx.zip
RUN unzip -q /opt/emqx.zip -d /opt/ && rm /opt/emqx.zip
COPY deploy/docker/docker-entrypoint.sh /usr/bin/
RUN ln -s /opt/emqx/bin/* /usr/local/bin/
WORKDIR /opt/emqx
RUN adduser -u 1000 emqx
RUN echo "emqx ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers
RUN chgrp -Rf emqx /opt/emqx && chmod -Rf g+w /opt/emqx \
&& chown -Rf emqx /opt/emqx
USER emqx
VOLUME ["/opt/emqx/log", "/opt/emqx/data", "/opt/emqx/etc"]
# emqx will occupy these port:
# - 1883 port for MQTT
# - 8081 for mgmt API
# - 8083 for WebSocket/HTTP
# - 8084 for WSS/HTTPS
# - 8883 port for MQTT(SSL)
# - 11883 port for internal MQTT/TCP
# - 18083 for dashboard
# - 4369 epmd (Erlang-distrbution port mapper daemon) listener (deprecated)
# - 4370 default Erlang distrbution port
# - 5369 for gen_rpc port mapping
# - 6369 6370 for distributed node
EXPOSE 1883 8081 8083 8084 8883 11883 18083 4369 4370 5369 6369 6370
ENTRYPOINT ["/usr/bin/docker-entrypoint.sh"]
CMD ["/opt/emqx/bin/emqx", "foreground"]

View File

@ -28,12 +28,20 @@ if [[ -z "$EMQX_NAME" ]]; then
fi fi
if [[ -z "$EMQX_HOST" ]]; then if [[ -z "$EMQX_HOST" ]]; then
if [[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == "dns" ]] && [[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then if [[ "$EMQX_CLUSTER__DISCOVERY" == "dns" ]] && \
EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-"pod.cluster.local"} [[ "$EMQX_CLUSTER__DNS__TYPE" == "srv" ]] && \
EMQX_HOST="${LOCAL_IP//./-}.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX" grep -q "$(hostname).$EMQX_CLUSTER__DNS__NAME" /etc/hosts; then
elif [[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == 'hostname' ]] && [[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then EMQX_HOST="$(hostname).$EMQX_CLUSTER__DNS__NAME"
EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-'svc.cluster.local'} elif [[ "$EMQX_CLUSTER__DISCOVERY" == "k8s" ]] && \
EMQX_HOST=$(grep -h "^$LOCAL_IP" /etc/hosts | grep -o "$(hostname).*.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX") [[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == "dns" ]] && \
[[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then
EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-"pod.cluster.local"}
EMQX_HOST="${LOCAL_IP//./-}.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX"
elif [[ "$EMQX_CLUSTER__DISCOVERY" == "k8s" ]] && \
[[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == 'hostname' ]] && \
[[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then
EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-'svc.cluster.local'}
EMQX_HOST=$(grep -h "^$LOCAL_IP" /etc/hosts | grep -o "$(hostname).*.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX")
else else
EMQX_HOST="$LOCAL_IP" EMQX_HOST="$LOCAL_IP"
fi fi

View File

@ -1,4 +1,3 @@
ARCH ?= amd64
TOPDIR := /tmp/emqx TOPDIR := /tmp/emqx
# Keep this short to avoid bloating beam files with long file path info # Keep this short to avoid bloating beam files with long file path info
SRCDIR := $(TOPDIR)/$(PKG_VSN) SRCDIR := $(TOPDIR)/$(PKG_VSN)
@ -8,7 +7,7 @@ EMQX_NAME=$(subst -pkg,,$(EMQX_BUILD))
TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz
SOURCE_PKG := $(EMQX_NAME)_$(PKG_VSN)_$(shell dpkg --print-architecture) SOURCE_PKG := $(EMQX_NAME)_$(PKG_VSN)_$(shell dpkg --print-architecture)
TARGET_PKG := $(EMQX_NAME)-$(SYSTEM)-$(PKG_VSN)-$(ARCH) TARGET_PKG := $(EMQX_NAME)-$(shell $(EMQX_REL)/scripts/pkg-full-vsn.sh)
.PHONY: all .PHONY: all
all: | $(BUILT) all: | $(BUILT)
@ -22,6 +21,8 @@ all: | $(BUILT)
cd $(SRCDIR) && dpkg-buildpackage -us -uc cd $(SRCDIR) && dpkg-buildpackage -us -uc
mkdir -p $(EMQX_REL)/_packages/$(EMQX_NAME) mkdir -p $(EMQX_REL)/_packages/$(EMQX_NAME)
cp $(SRCDIR)/../$(SOURCE_PKG).deb $(EMQX_REL)/_packages/$(EMQX_NAME)/$(TARGET_PKG).deb cp $(SRCDIR)/../$(SOURCE_PKG).deb $(EMQX_REL)/_packages/$(EMQX_NAME)/$(TARGET_PKG).deb
sha256sum $(EMQX_REL)/_packages/$(EMQX_NAME)/$(TARGET_PKG).deb | head -c 64 > \
$(EMQX_REL)/_packages/$(EMQX_NAME)/$(TARGET_PKG).deb.sha256
$(BUILT): $(BUILT):
mkdir -p $(TOPDIR) $(SRCDIR) mkdir -p $(TOPDIR) $(SRCDIR)

View File

@ -4,7 +4,7 @@ Priority: optional
Maintainer: emqx <contact@emqx.io> Maintainer: emqx <contact@emqx.io>
Build-Depends: debhelper (>=9) Build-Depends: debhelper (>=9)
Standards-Version: 3.9.6 Standards-Version: 3.9.6
Homepage: https://www.emqx.io Homepage: https://www.emqx.com
Package: emqx Package: emqx
Architecture: any Architecture: any

View File

@ -5,8 +5,9 @@ BUILT := $(SRCDIR)/BUILT
dash := - dash := -
none := none :=
space := $(none) $(none) space := $(none) $(none)
RPM_VSN ?= $(shell echo $(PKG_VSN) | grep -oE "[0-9]+\.[0-9]+(\.[0-9]+)?") ## RPM does not allow '-' in version nubmer and release string, replace with '_'
RPM_REL ?= $(shell echo $(PKG_VSN) | grep -oE "(alpha|beta|rc)\.[0-9]") RPM_VSN := $(subst -,_,$(PKG_VSN))
RPM_REL := otp$(subst -,_,$(OTP_VSN))
ARCH ?= amd64 ARCH ?= amd64
ifeq ($(ARCH),mips64) ifeq ($(ARCH),mips64)
@ -16,12 +17,8 @@ endif
EMQX_NAME=$(subst -pkg,,$(EMQX_BUILD)) EMQX_NAME=$(subst -pkg,,$(EMQX_BUILD))
TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz
TARGET_PKG := $(EMQX_NAME)-$(SYSTEM)-$(PKG_VSN)-$(ARCH) SOURCE_PKG := emqx-$(RPM_VSN)-$(RPM_REL).$(shell uname -m)
ifeq ($(RPM_REL),) TARGET_PKG := $(EMQX_NAME)-$(shell $(EMQX_REL)/scripts/pkg-full-vsn.sh)
# no tail
RPM_REL := 1
endif
SOURCE_PKG := emqx-$(SYSTEM)-$(RPM_VSN)-$(RPM_REL).$(shell uname -m)
SYSTEMD := $(shell if command -v systemctl >/dev/null 2>&1; then echo yes; fi) SYSTEMD := $(shell if command -v systemctl >/dev/null 2>&1; then echo yes; fi)
# Not $(PWD) as it does not work for make -C # Not $(PWD) as it does not work for make -C
@ -47,11 +44,12 @@ all: | $(BUILT)
--define "_service_dst $(SERVICE_DST)" \ --define "_service_dst $(SERVICE_DST)" \
--define "_post_addition $(POST_ADDITION)" \ --define "_post_addition $(POST_ADDITION)" \
--define "_preun_addition $(PREUN_ADDITION)" \ --define "_preun_addition $(PREUN_ADDITION)" \
--define "_ostype -$(SYSTEM)" \
--define "_sharedstatedir /var/lib" \ --define "_sharedstatedir /var/lib" \
emqx.spec emqx.spec
mkdir -p $(EMQX_REL)/_packages/$(EMQX_NAME) mkdir -p $(EMQX_REL)/_packages/$(EMQX_NAME)
cp $(TOPDIR)/RPMS/$(shell uname -m)/$(SOURCE_PKG).rpm $(EMQX_REL)/_packages/$(EMQX_NAME)/$(TARGET_PKG).rpm cp $(TOPDIR)/RPMS/$(shell uname -m)/$(SOURCE_PKG).rpm $(EMQX_REL)/_packages/$(EMQX_NAME)/$(TARGET_PKG).rpm
sha256sum $(EMQX_REL)/_packages/$(EMQX_NAME)/$(TARGET_PKG).rpm | head -c 64 > \
$(EMQX_REL)/_packages/$(EMQX_NAME)/$(TARGET_PKG).rpm.sha256
$(BUILT): $(BUILT):
mkdir -p $(TOPDIR) $(SRCDIR) $(SRCDIR)/BUILT mkdir -p $(TOPDIR) $(SRCDIR) $(SRCDIR)/BUILT

View File

@ -5,7 +5,7 @@
%define _log_dir %{_var}/log/%{_name} %define _log_dir %{_var}/log/%{_name}
%define _lib_home /usr/lib/%{_name} %define _lib_home /usr/lib/%{_name}
%define _var_home %{_sharedstatedir}/%{_name} %define _var_home %{_sharedstatedir}/%{_name}
%define _build_name_fmt %{_arch}/%{_name}%{?_ostype}-%{_version}-%{_release}.%{_arch}.rpm %define _build_name_fmt %{_arch}/%{_name}-%{_version}-%{_release}.%{_arch}.rpm
%define _build_id_links none %define _build_id_links none
Name: %{_package_name} Name: %{_package_name}

View File

@ -1,4 +1,4 @@
## EMQX Configuration 4.3 ## EMQX Configuration
## NOTE: Do not change format of CONFIG_SECTION_{BGN,END} comments! ## NOTE: Do not change format of CONFIG_SECTION_{BGN,END} comments!
@ -101,6 +101,11 @@ cluster.autoclean = 5m
## Value: String ## Value: String
## cluster.dns.app = emqx ## cluster.dns.app = emqx
## Type of dns record.
##
## Value: Value: a | srv
## cluster.dns.type = a
##-------------------------------------------------------------------- ##--------------------------------------------------------------------
## Cluster using etcd ## Cluster using etcd
@ -376,7 +381,7 @@ rpc.port_discovery = stateless
## ##
## Value: Interger [0-256] ## Value: Interger [0-256]
## Default = 1 ## Default = 1
#rpc.tcp_client_num = 1 #rpc.tcp_client_num = 0
## RCP Client connect timeout. ## RCP Client connect timeout.
## ##
@ -2298,6 +2303,29 @@ module.presence.qos = 1
## module.rewrite.pub.rule.1 = x/# ^x/y/(.+)$ z/y/$1 ## module.rewrite.pub.rule.1 = x/# ^x/y/(.+)$ z/y/$1
## module.rewrite.sub.rule.1 = y/+/z/# ^y/(.+)/z/(.+)$ y/z/$2 ## module.rewrite.sub.rule.1 = y/+/z/# ^y/(.+)/z/(.+)$ y/z/$2
##--------------------------------------------------------------------
## Slow Subscribers Statistics Module
## the expire time of the record which in topk
##
## Value: 5 minutes
#module.slow_subs.expire_interval = 5m
## maximum number of Top-K record
##
## Defalut: 10
#module.slow_subs.top_k_num = 10
## Stats Type
##
## Default: whole
#module.slow_subs.stats_type = whole
## Stats Threshold
##
## Default: 500ms
#module.slow_subs.threshold = 500ms
## CONFIG_SECTION_END=modules ================================================== ## CONFIG_SECTION_END=modules ==================================================
##------------------------------------------------------------------- ##-------------------------------------------------------------------

Some files were not shown because too many files have changed in this diff Show More