Merge branch 'main-v4.4' into sync-v4.3-to-v4.4

This commit is contained in:
zhongwencool 2022-02-08 09:59:21 +08:00
commit 617402ed09
137 changed files with 5926 additions and 2083 deletions

View File

@ -1,4 +1,4 @@
ARG BUILD_FROM=emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/4.4-4:24.1.5-3-ubuntu20.04
FROM ${BUILD_FROM} FROM ${BUILD_FROM}
ARG EMQX_NAME=emqx ARG EMQX_NAME=emqx

View File

@ -1,5 +1,23 @@
#!/bin/bash #!/usr/bin/env bash
## This script tests built package start/stop
## Accept 2 args PACKAGE_NAME and PACKAGE_TYPE
set -x -e -u set -x -e -u
if [ -z "${1:-}" ]; then
echo "Usage $0 <PACKAGE_NAME> zip|pkg"
exit 1
fi
if [ "${2:-}" != 'zip' ] && [ "${2:-}" != 'pkg' ]; then
echo "Usage $0 <PACKAGE_NAME> zip|pkg"
exit 1
fi
PACKAGE_NAME="${1}"
PACKAGE_TYPE="${2}"
export CODE_PATH=${CODE_PATH:-"/emqx"} export CODE_PATH=${CODE_PATH:-"/emqx"}
export EMQX_NAME=${EMQX_NAME:-"emqx"} export EMQX_NAME=${EMQX_NAME:-"emqx"}
export PACKAGE_PATH="${CODE_PATH}/_packages/${EMQX_NAME}" export PACKAGE_PATH="${CODE_PATH}/_packages/${EMQX_NAME}"
@ -7,6 +25,27 @@ export RELUP_PACKAGE_PATH="${CODE_PATH}/_upgrade_base"
# export EMQX_NODE_NAME="emqx-on-$(uname -m)@127.0.0.1" # export EMQX_NODE_NAME="emqx-on-$(uname -m)@127.0.0.1"
# export EMQX_NODE_COOKIE=$(date +%s%N) # export EMQX_NODE_COOKIE=$(date +%s%N)
if [ "$PACKAGE_TYPE" = 'zip' ]; then
PKG_SUFFIX="zip"
else
SYSTEM="$($CODE_PATH/scripts/get-distro.sh)"
case "${SYSTEM:-}" in
ubuntu*|debian*|raspbian*)
PKG_SUFFIX='deb'
;;
*)
PKG_SUFFIX='rpm'
;;
esac
fi
PACKAGE_FILE_NAME="${PACKAGE_NAME}.${PKG_SUFFIX}"
PACKAGE_FILE="${PACKAGE_PATH}/${PACKAGE_FILE_NAME}"
if ! [ -f "$PACKAGE_FILE" ]; then
echo "$PACKAGE_FILE is not a file"
exit 1
fi
case "$(uname -m)" in case "$(uname -m)" in
x86_64) x86_64)
ARCH='amd64' ARCH='amd64'
@ -22,7 +61,6 @@ export ARCH
emqx_prepare(){ emqx_prepare(){
mkdir -p "${PACKAGE_PATH}" mkdir -p "${PACKAGE_PATH}"
if [ ! -d "/paho-mqtt-testing" ]; then if [ ! -d "/paho-mqtt-testing" ]; then
git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho-mqtt-testing git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho-mqtt-testing
fi fi
@ -31,18 +69,20 @@ emqx_prepare(){
emqx_test(){ emqx_test(){
cd "${PACKAGE_PATH}" cd "${PACKAGE_PATH}"
local packagename="${PACKAGE_FILE_NAME}"
for var in "$PACKAGE_PATH"/"${EMQX_NAME}"-*;do case "$PKG_SUFFIX" in
case ${var##*.} in
"zip") "zip")
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.zip)
unzip -q "${PACKAGE_PATH}/${packagename}" unzip -q "${PACKAGE_PATH}/${packagename}"
export EMQX_ZONE__EXTERNAL__SERVER__KEEPALIVE=60 \ export EMQX_ZONE__EXTERNAL__SERVER__KEEPALIVE=60 \
EMQX_MQTT__MAX_TOPIC_ALIAS=10 EMQX_MQTT__MAX_TOPIC_ALIAS=10
sed -i '/emqx_telemetry/d' "${PACKAGE_PATH}"/emqx/data/loaded_plugins sed -i '/emqx_telemetry/d' "${PACKAGE_PATH}"/emqx/data/loaded_plugins
echo "running ${packagename} start" echo "running ${packagename} start"
"${PACKAGE_PATH}"/emqx/bin/emqx start || ( tail "${PACKAGE_PATH}"/emqx/log/emqx.log.1 && exit 1 ) if ! "${PACKAGE_PATH}"/emqx/bin/emqx start; then
cat "${PACKAGE_PATH}"/emqx/log/erlang.log.1 || true
cat "${PACKAGE_PATH}"/emqx/log/emqx.log.1 || true
exit 1
fi
IDLE_TIME=0 IDLE_TIME=0
while ! "${PACKAGE_PATH}"/emqx/bin/emqx_ctl status | grep -qE 'Node\s.*@.*\sis\sstarted' while ! "${PACKAGE_PATH}"/emqx/bin/emqx_ctl status | grep -qE 'Node\s.*@.*\sis\sstarted'
do do
@ -60,7 +100,6 @@ emqx_test(){
rm -rf "${PACKAGE_PATH}"/emqx rm -rf "${PACKAGE_PATH}"/emqx
;; ;;
"deb") "deb")
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.deb)
dpkg -i "${PACKAGE_PATH}/${packagename}" dpkg -i "${PACKAGE_PATH}/${packagename}"
if [ "$(dpkg -l |grep emqx |awk '{print $1}')" != "ii" ] if [ "$(dpkg -l |grep emqx |awk '{print $1}')" != "ii" ]
then then
@ -87,15 +126,8 @@ emqx_test(){
fi fi
;; ;;
"rpm") "rpm")
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.rpm) yum install -y "${PACKAGE_PATH}/${packagename}"
if ! rpm -q ${EMQX_NAME} | grep -q "${EMQX_NAME}"; then
if [[ "${ARCH}" == "amd64" && $(rpm -E '%{rhel}') == 7 ]] ; then
# EMQX OTP requires openssl11 to have TLS1.3 support
yum install -y openssl11
fi
rpm -ivh "${PACKAGE_PATH}/${packagename}"
if ! rpm -q emqx | grep -q emqx; then
echo "package install error" echo "package install error"
exit 1 exit 1
fi fi
@ -110,9 +142,7 @@ emqx_test(){
exit 1 exit 1
fi fi
;; ;;
esac esac
done
} }
running_test(){ running_test(){
@ -120,7 +150,11 @@ running_test(){
EMQX_MQTT__MAX_TOPIC_ALIAS=10 EMQX_MQTT__MAX_TOPIC_ALIAS=10
sed -i '/emqx_telemetry/d' /var/lib/emqx/loaded_plugins sed -i '/emqx_telemetry/d' /var/lib/emqx/loaded_plugins
emqx start || ( tail /var/log/emqx/emqx.log.1 && exit 1 ) if ! emqx start; then
cat /var/log/emqx/erlang.log.1 || true
cat /var/log/emqx/emqx.log.1 || true
exit 1
fi
IDLE_TIME=0 IDLE_TIME=0
while ! emqx_ctl status | grep -qE 'Node\s.*@.*\sis\sstarted' while ! emqx_ctl status | grep -qE 'Node\s.*@.*\sis\sstarted'
do do
@ -146,7 +180,11 @@ relup_test(){
while read -r pkg; do while read -r pkg; do
packagename=$(basename "${pkg}") packagename=$(basename "${pkg}")
unzip "$packagename" unzip "$packagename"
./emqx/bin/emqx start || ( tail emqx/log/emqx.log.1 && exit 1 ) if ! ./emqx/bin/emqx start; then
cat emqx/log/erlang.log.1 || true
cat emqx/log/emqx.log.1 || true
exit 1
fi
./emqx/bin/emqx_ctl status ./emqx/bin/emqx_ctl status
./emqx/bin/emqx versions ./emqx/bin/emqx versions
cp "${PACKAGE_PATH}/${EMQX_NAME}"-*-"${TARGET_VERSION}-${ARCH}".zip ./emqx/releases cp "${PACKAGE_PATH}/${EMQX_NAME}"-*-"${TARGET_VERSION}-${ARCH}".zip ./emqx/releases

View File

@ -3,7 +3,7 @@ version: '3.9'
services: services:
erlang: erlang:
container_name: erlang container_name: erlang
image: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 image: ghcr.io/emqx/emqx-builder/4.4-4:24.1.5-3-ubuntu20.04
env_file: env_file:
- conf.env - conf.env
environment: environment:

View File

@ -15,6 +15,8 @@ PROFILE="$1"
VSN="$2" VSN="$2"
OLD_VSN="$3" OLD_VSN="$3"
PACKAGE_PATH="$4" PACKAGE_PATH="$4"
FROM_OTP_VSN="${5:-24.1.5-3}"
TO_OTP_VSN="${6:-24.1.5-3}"
TEMPDIR=$(mktemp -d) TEMPDIR=$(mktemp -d)
trap '{ rm -rf -- "$TEMPDIR"; }' EXIT trap '{ rm -rf -- "$TEMPDIR"; }' EXIT
@ -37,4 +39,6 @@ exec docker run \
--var ONE_MORE_EMQX_PATH="/relup_test/one_more_emqx" \ --var ONE_MORE_EMQX_PATH="/relup_test/one_more_emqx" \
--var VSN="$VSN" \ --var VSN="$VSN" \
--var OLD_VSN="$OLD_VSN" \ --var OLD_VSN="$OLD_VSN" \
--var FROM_OTP_VSN="$FROM_OTP_VSN" \
--var TO_OTP_VSN="$TO_OTP_VSN" \
relup.lux relup.lux

View File

@ -3,6 +3,8 @@
[config var=ONE_MORE_EMQX_PATH] [config var=ONE_MORE_EMQX_PATH]
[config var=VSN] [config var=VSN]
[config var=OLD_VSN] [config var=OLD_VSN]
[config var=FROM_OTP_VSN]
[config var=TO_OTP_VSN]
[config shell_cmd=/bin/bash] [config shell_cmd=/bin/bash]
[config timeout=600000] [config timeout=600000]
@ -19,7 +21,7 @@
[shell emqx] [shell emqx]
!cd $PACKAGE_PATH !cd $PACKAGE_PATH
!unzip -q -o $PROFILE-ubuntu20.04-$(echo $OLD_VSN | sed -r 's/[v|e]//g')-amd64.zip !unzip -q -o $PROFILE-$(echo $OLD_VSN | sed -r 's/[v|e]//g')-otp${FROM_OTP_VSN}-ubuntu20.04-amd64.zip
?SH-PROMPT ?SH-PROMPT
!cd emqx !cd emqx
@ -80,7 +82,7 @@
!echo "" > log/emqx.log.1 !echo "" > log/emqx.log.1
?SH-PROMPT ?SH-PROMPT
!cp -f ../$PROFILE-ubuntu20.04-$VSN-amd64.zip releases/ !cp -f ../$PROFILE-$VSN-otp${TO_OTP_VSN}-ubuntu20.04-amd64.zip releases/
!./bin/emqx install $VSN !./bin/emqx install $VSN
?Made release permanent: "$VSN" ?Made release permanent: "$VSN"
@ -105,7 +107,7 @@
!echo "" > log/emqx.log.1 !echo "" > log/emqx.log.1
?SH-PROMPT ?SH-PROMPT
!cp -f ../$PROFILE-ubuntu20.04-$VSN-amd64.zip releases/ !cp -f ../$PROFILE-$VSN-otp${TO_OTP_VSN}-ubuntu20.04-amd64.zip releases/
!./bin/emqx install $VSN !./bin/emqx install $VSN
?Made release permanent: "$VSN" ?Made release permanent: "$VSN"

View File

@ -1,5 +1,9 @@
name: Cross build packages name: Cross build packages
concurrency:
group: build-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
on: on:
schedule: schedule:
- cron: '0 */6 * * *' - cron: '0 */6 * * *'
@ -11,7 +15,8 @@ on:
jobs: jobs:
prepare: prepare:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 # prepare source with any OTP version, no need for a matrix
container: ghcr.io/emqx/emqx-builder/4.4-4:24.1.5-3-ubuntu20.04
outputs: outputs:
profiles: ${{ steps.set_profile.outputs.profiles }} profiles: ${{ steps.set_profile.outputs.profiles }}
@ -25,8 +30,8 @@ jobs:
- name: set profile - name: set profile
id: set_profile id: set_profile
shell: bash shell: bash
working-directory: source
run: | run: |
cd source
if make emqx-ee --dry-run > /dev/null 2>&1; then if make emqx-ee --dry-run > /dev/null 2>&1; then
old_vsns="$(./scripts/relup-base-vsns.sh enterprise | xargs)" old_vsns="$(./scripts/relup-base-vsns.sh enterprise | xargs)"
echo "::set-output name=old_vsns::$old_vsns" echo "::set-output name=old_vsns::$old_vsns"
@ -41,7 +46,7 @@ jobs:
run: | run: |
make -C source deps-all make -C source deps-all
zip -ryq source.zip source/* source/.[^.]* zip -ryq source.zip source/* source/.[^.]*
- name: get_all_deps - name: get_all_deps_ee
if: endsWith(github.repository, 'enterprise') if: endsWith(github.repository, 'enterprise')
run: | run: |
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
@ -61,8 +66,11 @@ jobs:
if: endsWith(github.repository, 'emqx') if: endsWith(github.repository, 'emqx')
strategy: strategy:
fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
otp:
- 23.2
exclude: exclude:
- profile: emqx-edge - profile: emqx-edge
@ -74,14 +82,16 @@ jobs:
- name: unzip source code - name: unzip source code
run: Expand-Archive -Path source.zip -DestinationPath ./ run: Expand-Archive -Path source.zip -DestinationPath ./
- uses: ilammy/msvc-dev-cmd@v1 - uses: ilammy/msvc-dev-cmd@v1
- uses: gleam-lang/setup-erlang@v1.1.0 - uses: gleam-lang/setup-erlang@v1.1.2
id: install_erlang id: install_erlang
## gleam-lang/setup-erlang does not yet support the installation of otp24 on windows
with: with:
otp-version: 23.2 otp-version: ${{ matrix.otp }}
- name: build - name: build
env: env:
PYTHON: python PYTHON: python
DIAGNOSTIC: 1 DIAGNOSTIC: 1
working-directory: source
run: | run: |
$env:PATH = "${{ steps.install_erlang.outputs.erlpath }}\bin;$env:PATH" $env:PATH = "${{ steps.install_erlang.outputs.erlpath }}\bin;$env:PATH"
erl -eval "erlang:display(crypto:info_lib())" -s init stop erl -eval "erlang:display(crypto:info_lib())" -s init stop
@ -89,12 +99,11 @@ jobs:
$version = $( "${{ github.ref }}" -replace "^(.*)/(.*)/" ) $version = $( "${{ github.ref }}" -replace "^(.*)/(.*)/" )
if ($version -match "^v[0-9]+\.[0-9]+(\.[0-9]+)?") { if ($version -match "^v[0-9]+\.[0-9]+(\.[0-9]+)?") {
$regex = "[0-9]+\.[0-9]+(-alpha|-beta|-rc)?\.[0-9]+" $regex = "[0-9]+\.[0-9]+(-alpha|-beta|-rc)?\.[0-9]+"
$pkg_name = "${{ matrix.profile }}-windows-$([regex]::matches($version, $regex).value).zip" $pkg_name = "${{ matrix.profile }}-$([regex]::matches($version, $regex).value)-otp${{ matrix.otp }}-windows-amd64.zip"
} }
else { else {
$pkg_name = "${{ matrix.profile }}-windows-$($version -replace '/').zip" $pkg_name = "${{ matrix.profile }}-$($version -replace '/')-otp${{ matrix.otp }}-windows-amd64.zip"
} }
cd source
## We do not build/release bcrypt for windows package ## We do not build/release bcrypt for windows package
Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/ Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/
if (Test-Path rebar.lock) { if (Test-Path rebar.lock) {
@ -103,6 +112,8 @@ jobs:
make ensure-rebar3 make ensure-rebar3
copy rebar3 "${{ steps.install_erlang.outputs.erlpath }}\bin" copy rebar3 "${{ steps.install_erlang.outputs.erlpath }}\bin"
ls "${{ steps.install_erlang.outputs.erlpath }}\bin" ls "${{ steps.install_erlang.outputs.erlpath }}\bin"
head -2 rebar3
which rebar3
rebar3 --help rebar3 --help
make ${{ matrix.profile }} make ${{ matrix.profile }}
mkdir -p _packages/${{ matrix.profile }} mkdir -p _packages/${{ matrix.profile }}
@ -111,8 +122,8 @@ jobs:
Get-FileHash -Path "_packages/${{ matrix.profile }}/$pkg_name" | Format-List | grep 'Hash' | awk '{print $3}' > _packages/${{ matrix.profile }}/$pkg_name.sha256 Get-FileHash -Path "_packages/${{ matrix.profile }}/$pkg_name" | Format-List | grep 'Hash' | awk '{print $3}' > _packages/${{ matrix.profile }}/$pkg_name.sha256
- name: run emqx - name: run emqx
timeout-minutes: 1 timeout-minutes: 1
working-directory: source
run: | run: |
cd source
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx start
Start-Sleep -s 5 Start-Sleep -s 5
./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop ./_build/${{ matrix.profile }}/rel/emqx/bin/emqx stop
@ -126,18 +137,18 @@ jobs:
mac: mac:
needs: prepare needs: prepare
strategy: strategy:
fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
erl_otp: otp:
- 23.2.7.2-emqx-3 - 24.1.5-3
macos:
- macos-11
- macos-10.15
exclude: exclude:
- profile: emqx-edge - profile: emqx-edge
macos:
- macos-10.15
runs-on: ${{ matrix.macos }} runs-on: ${{ matrix.macos }}
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
@ -154,8 +165,8 @@ jobs:
- uses: actions/cache@v2 - uses: actions/cache@v2
id: cache id: cache
with: with:
path: ~/.kerl/${{ matrix.erl_otp }} path: ~/.kerl/${{ matrix.otp }}
key: otp-install-${{ matrix.erl_otp }}-${{ matrix.macos }} key: otp-install-${{ matrix.otp }}-${{ matrix.macos }}
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60 timeout-minutes: 60
@ -164,21 +175,22 @@ jobs:
OTP_GITHUB_URL: https://github.com/emqx/otp OTP_GITHUB_URL: https://github.com/emqx/otp
run: | run: |
kerl update releases kerl update releases
kerl build ${{ matrix.erl_otp }} kerl build ${{ matrix.otp }}
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }} kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build - name: build
working-directory: source
run: | run: |
. $HOME/.kerl/${{ matrix.erl_otp }}/activate . $HOME/.kerl/${{ matrix.otp }}/activate
cd source
make ensure-rebar3 make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3 sudo cp rebar3 /usr/local/bin/rebar3
rm -rf _build/${{ matrix.profile }}/lib rm -rf _build/${{ matrix.profile }}/lib
make ${{ matrix.profile }}-zip make ${{ matrix.profile }}-zip
- name: test - name: test
working-directory: source
run: | run: |
cd source set -x
pkg_name=$(basename _packages/${{ matrix.profile }}/${{ matrix.profile }}-*.zip) pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip)
unzip -q _packages/${{ matrix.profile }}/$pkg_name unzip -q $pkg_name
gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no' ready='no'
@ -197,11 +209,11 @@ jobs:
./emqx/bin/emqx_ctl status ./emqx/bin/emqx_ctl status
./emqx/bin/emqx stop ./emqx/bin/emqx stop
rm -rf emqx rm -rf emqx
openssl dgst -sha256 ./_packages/${{ matrix.profile }}/$pkg_name | awk '{print $2}' > ./_packages/${{ matrix.profile }}/$pkg_name.sha256 openssl dgst -sha256 $pkg_name | awk '{print $2}' > $pkg_name.sha256
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: source/_packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
linux: linux:
@ -210,8 +222,15 @@ jobs:
needs: prepare needs: prepare
strategy: strategy:
fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
package:
- zip
- pkg
otp:
- 23.3.4.9-3
- 24.1.5-3
arch: arch:
- amd64 - amd64
- arm64 - arm64
@ -228,6 +247,8 @@ jobs:
- raspbian10 - raspbian10
# - raspbian9 # - raspbian9
exclude: exclude:
- package: pkg
otp: 23.3.4.9-3
- os: centos6 - os: centos6
arch: arm64 arch: arm64
- os: raspbian9 - os: raspbian9
@ -248,15 +269,11 @@ jobs:
shell: bash shell: bash
steps: steps:
- name: prepare docker - uses: docker/setup-buildx-action@v1
run: | - uses: docker/setup-qemu-action@v1
mkdir -p $HOME/.docker with:
echo '{ "experimental": "enabled" }' | tee $HOME/.docker/config.json image: tonistiigi/binfmt:latest
echo '{ "experimental": true, "storage-driver": "overlay2", "max-concurrent-downloads": 50, "max-concurrent-uploads": 50}' | sudo tee /etc/docker/daemon.json platforms: all
sudo systemctl restart docker
docker info
docker buildx create --use --name mybuild
docker run --rm --privileged tonistiigi/binfmt --install all
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source name: source
@ -264,11 +281,14 @@ jobs:
- name: unzip source code - name: unzip source code
run: unzip -q source.zip run: unzip -q source.zip
- name: downloads old emqx zip packages - name: downloads old emqx zip packages
if: matrix.package == 'zip'
env: env:
OTP_VSN: ${{ matrix.otp }}
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }} ARCH: ${{ matrix.arch }}
SYSTEM: ${{ matrix.os }} SYSTEM: ${{ matrix.os }}
OLD_VSNS: ${{ needs.prepare.outputs.old_vsns }} OLD_VSNS: ${{ needs.prepare.outputs.old_vsns }}
working-directory: source
run: | run: |
set -e -x -u set -e -x -u
broker=$PROFILE broker=$PROFILE
@ -279,65 +299,59 @@ jobs:
export ARCH="arm" export ARCH="arm"
fi fi
mkdir -p source/_upgrade_base mkdir -p _upgrade_base
cd source/_upgrade_base cd _upgrade_base
old_vsns=($(echo $OLD_VSNS | tr ' ' ' ')) old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
for tag in ${old_vsns[@]}; do for tag in ${old_vsns[@]}; do
if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip) | grep -oE "^[23]+")" ];then package_name="${PROFILE}-${tag#[e|v]}-otp${OTP_VSN}-${SYSTEM}-${ARCH}"
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip) | grep -oE "^[23]+")" ]; then
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256 wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip
echo "$(cat $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256) $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip" | sha256sum -c || exit 1 wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip.sha256
echo "$(cat $package_name.zip.sha256) $package_name.zip" | sha256sum -c || exit 1
fi fi
done done
- name: build emqx packages - name: build emqx packages
env: env:
ERL_OTP: erl23.2.7.2-emqx-3 OTP: ${{ matrix.otp }}
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
PACKAGE: ${{ matrix.package}}
ARCH: ${{ matrix.arch }} ARCH: ${{ matrix.arch }}
SYSTEM: ${{ matrix.os }} SYSTEM: ${{ matrix.os }}
working-directory: source
run: | run: |
set -e -u ./scripts/buildx.sh \
cd source --profile "${PROFILE}" \
docker buildx build --no-cache \ --pkgtype "${PACKAGE}" \
--platform=linux/$ARCH \ --arch "${ARCH}" \
-t cross_build_emqx_for_$SYSTEM \ --builder "ghcr.io/emqx/emqx-builder/4.4-4:${OTP}-${SYSTEM}"
-f .ci/build_packages/Dockerfile \
--build-arg BUILD_FROM=emqx/build-env:$ERL_OTP-$SYSTEM \
--build-arg EMQX_NAME=$PROFILE \
--output type=tar,dest=/tmp/cross-build-$PROFILE-for-$SYSTEM.tar .
mkdir -p /tmp/packages/$PROFILE
tar -xvf /tmp/cross-build-$PROFILE-for-$SYSTEM.tar --wildcards emqx/_packages/$PROFILE/*
mv emqx/_packages/$PROFILE/* /tmp/packages/$PROFILE/
rm -rf /tmp/cross-build-$PROFILE-for-$SYSTEM.tar
docker rm -f $(docker ps -a -q)
docker volume prune -f
- name: create sha256 - name: create sha256
working-directory: source
env: env:
PROFILE: ${{ matrix.profile}} PROFILE: ${{ matrix.profile}}
run: | run: |
if [ -d /tmp/packages/$PROFILE ]; then if [ -d _packages/$PROFILE ]; then
cd /tmp/packages/$PROFILE cd _packages/$PROFILE
for var in $(ls emqx-* ); do for var in $(ls emqx-* ); do
bash -c "echo $(sha256sum $var | awk '{print $1}') > $var.sha256" sudo bash -c "echo $(sha256sum $var | awk '{print $1}') > $var.sha256"
done done
cd - cd -
fi fi
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: /tmp/packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
docker: docker:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: prepare needs: prepare
strategy: strategy:
fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
otp:
- 24.1.5-3
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
@ -378,8 +392,8 @@ jobs:
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
build-args: | build-args: |
BUILD_FROM=emqx/build-env:erl23.2.7.2-emqx-3-alpine BUILD_FROM=ghcr.io/emqx/emqx-builder/4.4-4:${{ matrix.otp }}-alpine3.14
RUN_FROM=alpine:3.12 RUN_FROM=alpine:3.14
EMQX_NAME=${{ matrix.profile }} EMQX_NAME=${{ matrix.profile }}
file: source/deploy/docker/Dockerfile file: source/deploy/docker/Dockerfile
context: source context: source
@ -393,8 +407,8 @@ jobs:
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
build-args: | build-args: |
BUILD_FROM=emqx/build-env:erl23.2.7.2-emqx-3-alpine BUILD_FROM=ghcr.io/emqx/emqx-builder/4.4-4:${{ matrix.otp }}-alpine3.14
RUN_FROM=alpine:3.12 RUN_FROM=alpine:3.14
EMQX_NAME=${{ matrix.profile }} EMQX_NAME=${{ matrix.profile }}
file: source/deploy/docker/Dockerfile.enterprise file: source/deploy/docker/Dockerfile.enterprise
context: source context: source
@ -431,6 +445,9 @@ jobs:
strategy: strategy:
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
otp:
- 23.3.4.9-3
- 24.1.5-3
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -441,7 +458,7 @@ jobs:
echo 'EOF' >> $GITHUB_ENV echo 'EOF' >> $GITHUB_ENV
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: ${{ matrix.profile }} name: ${{ matrix.profile }}-${{ matrix.otp }}
path: ./_packages/${{ matrix.profile }} path: ./_packages/${{ matrix.profile }}
- name: install dos2unix - name: install dos2unix
run: sudo apt-get update && sudo apt install -y dos2unix run: sudo apt-get update && sudo apt install -y dos2unix
@ -491,30 +508,23 @@ jobs:
-X POST \ -X POST \
-d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.version }}\" }" \ -d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.version }}\" }" \
${{ secrets.EMQX_IO_RELEASE_API }} ${{ secrets.EMQX_IO_RELEASE_API }}
- name: push docker image to aws ecr
if: github.event_name == 'release' && matrix.profile == 'emqx'
run: |
set -e -x -u
aws ecr-public get-login-password --region us-east-1 | docker login --username AWS --password-stdin public.ecr.aws
docker tag emqx/emqx:${version#v} public.ecr.aws/emqx/emqx:${version#v}
docker push public.ecr.aws/emqx/emqx:${version#v}
- name: update repo.emqx.io - name: update repo.emqx.io
if: github.event_name == 'release' && endsWith(github.repository, 'enterprise') && matrix.profile == 'emqx-ee' if: github.event_name == 'release' && matrix.profile == 'emqx-ee'
run: | run: |
curl --silent --show-error \ curl --silent --show-error \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-X POST \ -X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ee\": \"true\"}}" \ -d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ee\": \"true\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches" "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
- name: update repo.emqx.io - name: update repo.emqx.io
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx' if: github.event_name == 'release' && matrix.profile == 'emqx'
run: | run: |
curl --silent --show-error \ curl --silent --show-error \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-X POST \ -X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ce\": \"true\"}}" \ -d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ce\": \"true\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches" "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
- name: update homebrew packages - name: update homebrew packages
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx' if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx'
@ -524,7 +534,7 @@ jobs:
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-X POST \ -X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \ -d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_homebrew.yaml/dispatches" "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_homebrew.yaml/dispatches"
fi fi
- uses: geekyeggo/delete-artifact@v1 - uses: geekyeggo/delete-artifact@v1

View File

@ -1,5 +1,10 @@
name: Build slim packages name: Build slim packages
concurrency:
group: slim-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
on: on:
push: push:
tags: tags:
@ -13,14 +18,16 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy: strategy:
fail-fast: false
matrix: matrix:
erl_otp: erl_otp:
- erl23.2.7.2-emqx-3 - 23.3.4.9-3
- 24.1.5-3
os: os:
- ubuntu20.04 - ubuntu20.04
- centos7 - centos7
container: emqx/build-env:${{ matrix.erl_otp }}-${{ matrix.os }} container: ghcr.io/emqx/emqx-builder/4.4-4:${{ matrix.erl_otp }}-${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
@ -43,23 +50,31 @@ jobs:
with: with:
name: rebar3.crashdump name: rebar3.crashdump
path: ./rebar3.crashdump path: ./rebar3.crashdump
- name: pakcages test - name: packages test
run: | run: |
export CODE_PATH=$GITHUB_WORKSPACE PKG_VSN="$(./pkg-vsn.sh)"
.ci/build_packages/tests.sh PKG_NAME="${EMQX_NAME}-${PKG_VSN}-otp${{ matrix.erl_otp }}-${{ matrix.os }}-amd64"
export CODE_PATH="$GITHUB_WORKSPACE"
.ci/build_packages/tests.sh "$PKG_NAME" zip
.ci/build_packages/tests.sh "$PKG_NAME" pkg
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: ${{ matrix.os }} name: ${{ matrix.os }}
path: _packages/**/*.zip path: _packages/**/*.zip
mac: mac:
strategy: strategy:
fail-fast: false
matrix: matrix:
erl_otp: otp:
- 23.2.7.2-emqx-3 - 24.1.5-3
macos: macos:
- macos-11 - macos-11
- macos-10.15
runs-on: ${{ matrix.macos }} runs-on: ${{ matrix.macos }}
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- name: prepare - name: prepare
@ -81,8 +96,8 @@ jobs:
- uses: actions/cache@v2 - uses: actions/cache@v2
id: cache id: cache
with: with:
path: ~/.kerl/${{ matrix.erl_otp }} path: ~/.kerl/${{ matrix.otp }}
key: otp-install-${{ matrix.erl_otp }}-${{ matrix.macos }} key: otp-install-${{ matrix.otp }}-${{ matrix.macos }}
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60 timeout-minutes: 60
@ -91,11 +106,11 @@ jobs:
OTP_GITHUB_URL: https://github.com/emqx/otp OTP_GITHUB_URL: https://github.com/emqx/otp
run: | run: |
kerl update releases kerl update releases
kerl build ${{ matrix.erl_otp }} kerl build ${{ matrix.otp }}
kerl install ${{ matrix.erl_otp }} $HOME/.kerl/${{ matrix.erl_otp }} kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build - name: build
run: | run: |
. $HOME/.kerl/${{ matrix.erl_otp }}/activate . $HOME/.kerl/${{ matrix.otp }}/activate
make ensure-rebar3 make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3 sudo cp rebar3 /usr/local/bin/rebar3
make ${EMQX_NAME}-zip make ${EMQX_NAME}-zip
@ -106,8 +121,8 @@ jobs:
path: ./rebar3.crashdump path: ./rebar3.crashdump
- name: test - name: test
run: | run: |
pkg_name=$(basename _packages/${EMQX_NAME}/emqx-*.zip) pkg_name=$(find _packages/${EMQX_NAME} -mindepth 1 -maxdepth 1 -iname \*.zip)
unzip -q _packages/${EMQX_NAME}/$pkg_name unzip -q $pkg_name
gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no' ready='no'

View File

@ -5,7 +5,7 @@ on: [pull_request]
jobs: jobs:
check_deps_integrity: check_deps_integrity:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-4:24.1.5-3-ubuntu20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -5,7 +5,7 @@ on: workflow_dispatch
jobs: jobs:
test: test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-4:24.1.5-3-ubuntu20.04
strategy: strategy:
fail-fast: true fail-fast: true
env: env:

View File

@ -12,8 +12,8 @@ jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
imgname: ${{ steps.build_docker.outputs.imgname}} imgname: ${{ steps.prepare.outputs.imgname}}
version: ${{ steps.build_docker.outputs.version}} version: ${{ steps.prepare.outputs.version}}
steps: steps:
- name: download jmeter - name: download jmeter
id: dload_jmeter id: dload_jmeter
@ -27,8 +27,8 @@ jobs:
name: apache-jmeter.tgz name: apache-jmeter.tgz
path: /tmp/apache-jmeter.tgz path: /tmp/apache-jmeter.tgz
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: build docker - name: prepare
id: build_docker id: prepare
run: | run: |
if [ -f EMQX_ENTERPRISE ]; then if [ -f EMQX_ENTERPRISE ]; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
@ -36,20 +36,23 @@ jobs:
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token
make deps-emqx-ee make deps-emqx-ee
make clean make clean
make emqx-ee-docker
echo "::set-output name=imgname::emqx-ee" echo "::set-output name=imgname::emqx-ee"
echo "::set-output name=version::$(./pkg-vsn.sh)" echo "::set-output name=version::$(./pkg-vsn.sh)"
docker save emqx/emqx-ee:$(./pkg-vsn.sh) -o emqx.tar
else else
make emqx-docker make emqx-docker
echo "::set-output name=imgname::emqx" echo "::set-output name=imgname::emqx"
echo "::set-output name=version::$(./pkg-vsn.sh)" echo "::set-output name=version::$(./pkg-vsn.sh)"
docker save emqx/emqx:$(./pkg-vsn.sh) -o emqx.tar
fi fi
- name: build docker image
env:
OTP_VSN: 24.1.5-3
run: |
make ${{ steps.prepare.outputs.imgname }}-docker
docker save emqx/${{ steps.prepare.outputs.imgname }}:${{ steps.prepare.outputs.version }} -o image.tar.gz
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: emqx-docker-image name: image
path: emqx.tar path: image.tar.gz
webhook: webhook:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -65,10 +68,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
run: docker load < /tmp/emqx.tar run: |
docker load < /tmp/image.tar.gz
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -163,10 +167,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
run: docker load < /tmp/emqx.tar run: |
docker load < /tmp/image.tar.gz
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -268,10 +273,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
run: docker load < /tmp/emqx.tar run: |
docker load < /tmp/image.tar.gz
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:
@ -362,10 +368,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: emqx-docker-image name: image
path: /tmp path: /tmp
- name: load docker image - name: load docker image
run: docker load < /tmp/emqx.tar run: |
docker load < /tmp/image.tar.gz
- name: docker compose up - name: docker compose up
timeout-minutes: 5 timeout-minutes: 5
env: env:

View File

@ -13,25 +13,28 @@ jobs:
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- uses: gleam-lang/setup-erlang@v1.1.2 - name: prepare
id: install_erlang
with:
otp-version: 23.2
- name: make docker
run: | run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token
make deps-emqx-ee make deps-emqx-ee
make clean
echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
make emqx-ee-docker make emqx-ee-docker
else else
echo "TARGET=emqx/emqx" >> $GITHUB_ENV echo "TARGET=emqx/emqx" >> $GITHUB_ENV
echo "PROFILE=emqx" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
make emqx-docker make emqx-docker
fi fi
- name: make emqx image
env:
OTP_VSN: 24.1.5-3
run: make ${PROFILE}-docker
- name: run emqx - name: run emqx
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -64,13 +67,15 @@ jobs:
helm_test: helm_test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
discovery:
- k8s
- dns
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- uses: gleam-lang/setup-erlang@v1.1.2
id: install_erlang
with:
otp-version: 23.2
- name: prepare - name: prepare
run: | run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then if make emqx-ee --dry-run > /dev/null 2>&1; then
@ -78,12 +83,19 @@ jobs:
git config --global credential.helper store git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token echo "${{ secrets.CI_GIT_TOKEN }}" >> scripts/git-token
make deps-emqx-ee make deps-emqx-ee
make clean
echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV echo "TARGET=emqx/emqx-ee" >> $GITHUB_ENV
make emqx-ee-docker echo "PROFILE=emqx-ee" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
else else
echo "TARGET=emqx/emqx" >> $GITHUB_ENV echo "TARGET=emqx/emqx" >> $GITHUB_ENV
make emqx-docker echo "PROFILE=emqx" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
fi fi
- name: make emqx image
env:
OTP_VSN: 24.1.5-3
run: make ${PROFILE}-docker
- name: install k3s - name: install k3s
env: env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml" KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
@ -100,18 +112,18 @@ jobs:
sudo chmod 700 get_helm.sh sudo chmod 700 get_helm.sh
sudo ./get_helm.sh sudo ./get_helm.sh
helm version helm version
- name: run emqx on chart - name: setup emqx chart
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
timeout-minutes: 5
run: | run: |
version=$(./pkg-vsn.sh) sudo docker save ${TARGET}:${EMQX_TAG} -o emqx.tar.gz
sudo docker save ${TARGET}:$version -o emqx.tar.gz
sudo k3s ctr image import emqx.tar.gz sudo k3s ctr image import emqx.tar.gz
sed -i -r "s/^appVersion: .*$/appVersion: \"${version}\"/g" deploy/charts/emqx/Chart.yaml sed -i -r "s/^appVersion: .*$/appVersion: \"${EMQX_TAG}\"/g" deploy/charts/emqx/Chart.yaml
sed -i '/emqx_telemetry/d' deploy/charts/emqx/values.yaml sed -i '/emqx_telemetry/d' deploy/charts/emqx/values.yaml
- name: run emqx on chart
if: matrix.discovery == 'k8s'
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
run: |
helm install emqx \ helm install emqx \
--set image.repository=${TARGET} \ --set image.repository=${TARGET} \
--set image.pullPolicy=Never \ --set image.pullPolicy=Never \
@ -121,7 +133,29 @@ jobs:
--set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \ --set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \
deploy/charts/emqx \ deploy/charts/emqx \
--debug --debug
- name: run emqx on chart
if: matrix.discovery == 'dns'
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
run: |
helm install emqx \
--set emqxConfig.EMQX_CLUSTER__DISCOVERY="dns" \
--set emqxConfig.EMQX_CLUSTER__DNS__NAME="emqx-headless.default.svc.cluster.local" \
--set emqxConfig.EMQX_CLUSTER__DNS__APP="emqx" \
--set emqxConfig.EMQX_CLUSTER__DNS__TYPE="srv" \
--set image.repository=${TARGET} \
--set image.pullPolicy=Never \
--set emqxAclConfig="" \
--set image.pullPolicy=Never \
--set emqxConfig.EMQX_ZONE__EXTERNAL__RETRY_INTERVAL=2s \
--set emqxConfig.EMQX_MQTT__MAX_TOPIC_ALIAS=10 \
deploy/charts/emqx \
--debug
- name: waiting emqx started
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
timeout-minutes: 5
run: |
while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \ while [ "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.replicas}')" \
!= "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do != "$(kubectl get StatefulSet -l app.kubernetes.io/name=emqx -o jsonpath='{.items[0].status.readyReplicas}')" ]; do
echo "=============================="; echo "==============================";
@ -130,6 +164,18 @@ jobs:
echo "waiting emqx started"; echo "waiting emqx started";
sleep 10; sleep 10;
done done
- name: Check ${{ matrix.kind[0]}} cluster
env:
KUBECONFIG: "/etc/rancher/k3s/k3s.yaml"
timeout-minutes: 10
run: |
while
nodes="$(kubectl exec -i emqx-0 -- curl --silent --basic -u admin:public -X GET http://localhost:8081/api/v4/brokers | jq '.data|length')";
[ "$nodes" != "3" ];
do
echo "waiting emqx cluster scale"
sleep 1
done
- name: get emqx-0 pods log - name: get emqx-0 pods log
if: failure() if: failure()
env: env:
@ -180,7 +226,7 @@ jobs:
relup_test_plan: relup_test_plan:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-4:24.1.5-3-ubuntu20.04
outputs: outputs:
profile: ${{ steps.profile-and-versions.outputs.profile }} profile: ${{ steps.profile-and-versions.outputs.profile }}
vsn: ${{ steps.profile-and-versions.outputs.vsn }} vsn: ${{ steps.profile-and-versions.outputs.vsn }}
@ -225,8 +271,13 @@ jobs:
relup_test_build: relup_test_build:
needs: relup_test_plan needs: relup_test_plan
strategy:
fail-fast: false
matrix:
otp:
- 24.1.5-3
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-4:24.1.5-3-ubuntu20.04
defaults: defaults:
run: run:
shell: bash shell: bash
@ -253,7 +304,7 @@ jobs:
cd emqx/_upgrade_base cd emqx/_upgrade_base
old_vsns=($(echo $OLD_VSNS | tr ' ' ' ')) old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
for old_vsn in ${old_vsns[@]}; do for old_vsn in ${old_vsns[@]}; do
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-amd64.zip wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-${old_vsn#[e|v]}-otp${{ matrix.otp }}-ubuntu20.04-amd64.zip
done done
- name: Build emqx - name: Build emqx
run: make -C emqx ${PROFILE}-zip run: make -C emqx ${PROFILE}-zip
@ -270,11 +321,13 @@ jobs:
- relup_test_plan - relup_test_plan
- relup_test_build - relup_test_build
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/relup-test-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: emqx/relup-test-env:erl23.2.7.2-emqx-2-ubuntu20.04
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
old_vsn: ${{ fromJson(needs.relup_test_plan.outputs.matrix) }} old_vsn: ${{ fromJson(needs.relup_test_plan.outputs.matrix) }}
otp:
- 24.1.5-3
env: env:
OLD_VSN: "${{ matrix.old_vsn }}" OLD_VSN: "${{ matrix.old_vsn }}"
PROFILE: "${{ needs.relup_test_plan.outputs.profile }}" PROFILE: "${{ needs.relup_test_plan.outputs.profile }}"
@ -301,7 +354,7 @@ jobs:
mkdir -p packages mkdir -p packages
cp emqx_built/_packages/*/*.zip packages cp emqx_built/_packages/*/*.zip packages
cd packages cd packages
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$OLD_VSN/$PROFILE-ubuntu20.04-${OLD_VSN#[e|v]}-amd64.zip wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$OLD_VSN/$PROFILE-${OLD_VSN#[e|v]}-otp${{ matrix.otp }}-ubuntu20.04-amd64.zip
- name: Run relup test scenario - name: Run relup test scenario
timeout-minutes: 5 timeout-minutes: 5
run: | run: |
@ -313,6 +366,8 @@ jobs:
--var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \ --var ONE_MORE_EMQX_PATH=$(pwd)/one_more_emqx \
--var VSN="$VSN" \ --var VSN="$VSN" \
--var OLD_VSN="$OLD_VSN" \ --var OLD_VSN="$OLD_VSN" \
--var FROM_OTP_VSN="24.1.5-3" \
--var TO_OTP_VSN="24.1.5-3" \
emqx_built/.ci/fvt_tests/relup.lux emqx_built/.ci/fvt_tests/relup.lux
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
name: Save debug data name: Save debug data

View File

@ -10,7 +10,7 @@ on:
jobs: jobs:
run_proper_test: run_proper_test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: emqx/build-env:erl23.2.7.2-emqx-3-ubuntu20.04 container: ghcr.io/emqx/emqx-builder/4.4-4:24.1.5-3-ubuntu20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -1 +1 @@
erlang 23.2.7.2-emqx-3 erlang 24.1.5-3

61
CHANGES-4.4.md Normal file
View File

@ -0,0 +1,61 @@
# EMQ X 4.4 Changes
## 4.4-beta.1
### Important changes
- **For Debian/Ubuntu users**, Debian/Ubuntu package (deb) installed EMQ X is now started from systemd.
This is to use systemd's supervision functionality to ensure that EMQ X service restarts after a crash.
The package installation service upgrade from init.d to systemd has been verified,
it is still recommended that you verify and confirm again before deploying to the production environment,
at least to ensure that systemd is available in your system
- MongoDB authentication supports DNS SRV and TXT Records resolution, which can seamlessly connect with MongoDB Altas
- Support dynamic modification of MQTT Keep Alive to adapt to different energy consumption strategies.
### Minor changes
- Bumpped default boot wait time from 15 seconds to 150 seconds
because in some simulated environments it may take up to 70 seconds to boot in build CI
- Dashboard supports relative paths and custom access paths
- Supports configuring whether to forward retained messages with empty payload to suit users
who are still using MQTT v3.1. The relevant configurable item is `retainer.stop_publish_clear_msg`
- Multi-language hook extension (ExHook) supports dynamic cancellation of subsequent forwarding of client messages
- Rule engine SQL supports the use of single quotes in `FROM` clauses, for example: `SELECT * FROM 't/#'`
- Change the default value of the `max_topic_levels` configurable item to 128.
Previously, it had no limit (configured to 0), which may be a potential DoS threat
- Improve the error log content when the Proxy Protocol message is received without `proxy_protocol` configured.
- Add additional message attributes to the message reported by the gateway.
Messages from gateways such as CoAP, LwM2M, Stomp, ExProto, etc., when converted to EMQ X messages,
add fields such as protocol name, protocol version, user name, client IP, etc.,
which can be used for multi-language hook extension (ExHook)
- HTTP client performance improvement
- Add openssl-1.1 to RPM dependency
### Bug fixes
- Fix the issue that the client process becomes unresponsive due to the blockage of RPC calls between nodes
- Fix the issue that the lock management process `ekka_locker` crashes after killing the suspended lock owner
- Fix the issue that the Path parameter of WebHook action in rule engine cannot use the rule engine variable
- Fix MongoDB authentication module cannot use Replica Set mode and other issues
- Fix the issue of out-of-sequence message forwarding between clusters. The relevant configurable item is `rpc.tcp_client_num`
- Fix the issue of incorrect calculation of memory usage
- Fix MQTT bridge malfunction when remote host is unreachable (hangs the connection)
- Fix the issue that HTTP headers may be duplicated

View File

@ -1,14 +1,15 @@
$(shell $(CURDIR)/scripts/git-hooks-init.sh) $(shell $(CURDIR)/scripts/git-hooks-init.sh)
REBAR_VERSION = 3.14.3-emqx-8
REBAR = $(CURDIR)/rebar3 REBAR = $(CURDIR)/rebar3
BUILD = $(CURDIR)/build BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts SCRIPTS = $(CURDIR)/scripts
export EMQX_DEFAULT_BUILDER = emqx/build-env:erl23.2.7.2-emqx-3-alpine export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/4.4-4:24.1.5-3-alpine3.14
export EMQX_DEFAULT_RUNNER = alpine:3.12 export EMQX_DEFAULT_RUNNER = alpine:3.14
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh) export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
export EMQX_DESC ?= EMQ X export EMQX_DESC ?= EMQ X
export EMQX_CE_DASHBOARD_VERSION ?= v4.3.4 export EMQX_CE_DASHBOARD_VERSION ?= v4.4.0-beta.1
export DOCKERFILE := deploy/docker/Dockerfile export DOCKERFILE := deploy/docker/Dockerfile
export DOCKERFILE_TESTING := deploy/docker/Dockerfile.testing
ifeq ($(OS),Windows_NT) ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none export REBAR_COLOR=none
endif endif
@ -29,7 +30,7 @@ all: $(REBAR) $(PROFILES)
.PHONY: ensure-rebar3 .PHONY: ensure-rebar3
ensure-rebar3: ensure-rebar3:
@$(SCRIPTS)/fail-on-old-otp-version.escript @$(SCRIPTS)/fail-on-old-otp-version.escript
@$(SCRIPTS)/ensure-rebar3.sh $(REBAR_VERSION) @$(SCRIPTS)/ensure-rebar3.sh
$(REBAR): ensure-rebar3 $(REBAR): ensure-rebar3
@ -96,6 +97,7 @@ $(PROFILES:%=clean-%):
.PHONY: clean-all .PHONY: clean-all
clean-all: clean-all:
@rm -f rebar.lock
@rm -rf _build @rm -rf _build
.PHONY: deps-all .PHONY: deps-all
@ -160,6 +162,18 @@ endef
ALL_ZIPS = $(REL_PROFILES) ALL_ZIPS = $(REL_PROFILES)
$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target,$(zt)))) $(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target,$(zt))))
## emqx-docker-testing
## emqx-ee-docker-testing
## is to directly copy a unzipped zip-package to a
## base image such as ubuntu20.04. Mostly for testing
.PHONY: $(REL_PROFILES:%=%-docker-testing)
define gen-docker-target-testing
$1-docker-testing: $(COMMON_DEPS)
@$(BUILD) $1 docker-testing
endef
ALL_ZIPS = $(REL_PROFILES)
$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target-testing,$(zt))))
.PHONY: run .PHONY: run
run: $(PROFILE) quickrun run: $(PROFILE) quickrun

View File

@ -1,6 +1,6 @@
{deps, {deps,
[ [
{jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.1"}}} {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}}
]}. ]}.
{edoc_opts, [{preprocess, true}]}. {edoc_opts, [{preprocess, true}]}.

View File

@ -1,6 +1,6 @@
{application, emqx_auth_jwt, {application, emqx_auth_jwt,
[{description, "EMQ X Authentication with JWT"}, [{description, "EMQ X Authentication with JWT"},
{vsn, "4.3.1"}, % strict semver, bump manually! {vsn, "4.4.0"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_auth_jwt_sup]}, {registered, [emqx_auth_jwt_sup]},
{applications, [kernel,stdlib,jose]}, {applications, [kernel,stdlib,jose]},

View File

@ -58,7 +58,7 @@ insert_user(User = #emqx_user{login = Login}) ->
[_|_] -> mnesia:abort(existed) [_|_] -> mnesia:abort(existed)
end. end.
-spec(add_default_user(clientid | username, tuple(), binary()) -> ok | {error, any()}). -spec(add_default_user(clientid | username, binary(), binary()) -> ok | {error, any()}).
add_default_user(Type, Key, Password) -> add_default_user(Type, Key, Password) ->
Login = {Type, Key}, Login = {Type, Key},
case add_user(Login, Password) of case add_user(Login, Password) of

View File

@ -7,6 +7,12 @@
## Value: single | unknown | sharded | rs ## Value: single | unknown | sharded | rs
auth.mongo.type = single auth.mongo.type = single
## Whether to use SRV and TXT records.
##
## Value: true | false
## Default: false
auth.mongo.srv_record = false
## The set name if type is rs. ## The set name if type is rs.
## ##
## Value: String ## Value: String
@ -37,7 +43,6 @@ auth.mongo.pool = 8
## MongoDB AuthSource ## MongoDB AuthSource
## ##
## Value: String ## Value: String
## Default: mqtt
## auth.mongo.auth_source = admin ## auth.mongo.auth_source = admin
## MongoDB database ## MongoDB database

View File

@ -6,8 +6,12 @@
{datatype, {enum, [single, unknown, sharded, rs]}} {datatype, {enum, [single, unknown, sharded, rs]}}
]}. ]}.
{mapping, "auth.mongo.srv_record", "emqx_auth_mongo.server", [
{default, false},
{datatype, {enum, [true, false]}}
]}.
{mapping, "auth.mongo.rs_set_name", "emqx_auth_mongo.server", [ {mapping, "auth.mongo.rs_set_name", "emqx_auth_mongo.server", [
{default, "mqtt"},
{datatype, string} {datatype, string}
]}. ]}.
@ -41,7 +45,6 @@
]}. ]}.
{mapping, "auth.mongo.auth_source", "emqx_auth_mongo.server", [ {mapping, "auth.mongo.auth_source", "emqx_auth_mongo.server", [
{default, "mqtt"},
{datatype, string} {datatype, string}
]}. ]}.
@ -101,9 +104,9 @@
]}. ]}.
{translation, "emqx_auth_mongo.server", fun(Conf) -> {translation, "emqx_auth_mongo.server", fun(Conf) ->
H = cuttlefish:conf_get("auth.mongo.server", Conf), SrvRecord = cuttlefish:conf_get("auth.mongo.srv_record", Conf, false),
Hosts = string:tokens(H, ","), Server = cuttlefish:conf_get("auth.mongo.server", Conf),
Type0 = cuttlefish:conf_get("auth.mongo.type", Conf), Type = cuttlefish:conf_get("auth.mongo.type", Conf),
Pool = cuttlefish:conf_get("auth.mongo.pool", Conf), Pool = cuttlefish:conf_get("auth.mongo.pool", Conf),
%% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0 %% FIXME: compatible with 4.0-4.2 version format, plan to delete in 5.0
Login = cuttlefish:conf_get("auth.mongo.username", Conf, Login = cuttlefish:conf_get("auth.mongo.username", Conf,
@ -111,7 +114,10 @@
), ),
Passwd = cuttlefish:conf_get("auth.mongo.password", Conf), Passwd = cuttlefish:conf_get("auth.mongo.password", Conf),
DB = cuttlefish:conf_get("auth.mongo.database", Conf), DB = cuttlefish:conf_get("auth.mongo.database", Conf),
AuthSrc = cuttlefish:conf_get("auth.mongo.auth_source", Conf), AuthSource = case cuttlefish:conf_get("auth.mongo.auth_source", Conf, undefined) of
undefined -> [];
AuthSource0 -> [{auth_source, list_to_binary(AuthSource0)}]
end,
R = cuttlefish:conf_get("auth.mongo.w_mode", Conf), R = cuttlefish:conf_get("auth.mongo.w_mode", Conf),
W = cuttlefish:conf_get("auth.mongo.r_mode", Conf), W = cuttlefish:conf_get("auth.mongo.r_mode", Conf),
Login0 = case Login =:= [] of Login0 = case Login =:= [] of
@ -156,8 +162,8 @@
false -> [] false -> []
end, end,
WorkerOptions = [{database, list_to_binary(DB)}, {auth_source, list_to_binary(AuthSrc)}] WorkerOptions = [{database, list_to_binary(DB)}]
++ Login0 ++ Passwd0 ++ W0 ++ R0 ++ Ssl, ++ Login0 ++ Passwd0 ++ W0 ++ R0 ++ Ssl ++ AuthSource,
Vars = cuttlefish_variable:fuzzy_matches(["auth", "mongo", "topology", "$name"], Conf), Vars = cuttlefish_variable:fuzzy_matches(["auth", "mongo", "topology", "$name"], Conf),
Options = lists:map(fun({_, Name}) -> Options = lists:map(fun({_, Name}) ->
@ -174,16 +180,17 @@
{list_to_atom(Name2), cuttlefish:conf_get("auth.mongo.topology."++Name, Conf)} {list_to_atom(Name2), cuttlefish:conf_get("auth.mongo.topology."++Name, Conf)}
end, Vars), end, Vars),
Type = case Type0 =:= rs of ReplicaSet = case cuttlefish:conf_get("auth.mongo.rs_set_name", Conf, undefined) of
true -> {Type0, list_to_binary(cuttlefish:conf_get("auth.mongo.rs_set_name", Conf))}; undefined -> [];
false -> Type0 ReplicaSet0 -> [{rs_set_name, list_to_binary(ReplicaSet0)}]
end, end,
[{type, Type}, [{srv_record, SrvRecord},
{hosts, Hosts}, {type, Type},
{server, Server},
{options, Options}, {options, Options},
{worker_options, WorkerOptions}, {worker_options, WorkerOptions},
{auto_reconnect, 1}, {auto_reconnect, 1},
{pool_size, Pool}] {pool_size, Pool}] ++ ReplicaSet
end}. end}.
%% The mongodb operation timeout is specified by the value of `cursor_timeout` from application config, %% The mongodb operation timeout is specified by the value of `cursor_timeout` from application config,

View File

@ -1,6 +1,6 @@
{application, emqx_auth_mongo, {application, emqx_auth_mongo,
[{description, "EMQ X Authentication/ACL with MongoDB"}, [{description, "EMQ X Authentication/ACL with MongoDB"},
{vsn, "4.3.1"}, % strict semver, bump manually! {vsn, "4.4.1"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_auth_mongo_sup]}, {registered, [emqx_auth_mongo_sup]},
{applications, [kernel,stdlib,mongodb,ecpool]}, {applications, [kernel,stdlib,mongodb,ecpool]},

View File

@ -1,10 +1,10 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{VSN, {VSN,
[{"4.3.0", [{"4.4.0",
[{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]}, [{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]},
{load_module,emqx_acl_mongo,brutal_purge,soft_purge,[]}]}, {load_module,emqx_acl_mongo,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}], {<<".*">>,[]}],
[{"4.3.0", [{"4.4.0",
[{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]}, [{load_module,emqx_auth_mongo_app,brutal_purge,soft_purge,[]},
{load_module,emqx_acl_mongo,brutal_purge,soft_purge,[]}]}, {load_module,emqx_acl_mongo,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}] {<<".*">>,[]}]

View File

@ -28,7 +28,96 @@ start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []). supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) -> init([]) ->
{ok, PoolEnv} = application:get_env(?APP, server), {ok, Opts} = application:get_env(?APP, server),
PoolSpec = ecpool:pool_spec(?APP, ?APP, ?APP, PoolEnv), NOpts = may_parse_srv_and_txt_records(Opts),
PoolSpec = ecpool:pool_spec(?APP, ?APP, ?APP, NOpts),
{ok, {{one_for_all, 10, 100}, [PoolSpec]}}. {ok, {{one_for_all, 10, 100}, [PoolSpec]}}.
may_parse_srv_and_txt_records(Opts) when is_list(Opts) ->
maps:to_list(may_parse_srv_and_txt_records(maps:from_list(Opts)));
may_parse_srv_and_txt_records(#{type := Type,
srv_record := false,
server := Server} = Opts) ->
Hosts = to_hosts(Server),
case Type =:= rs of
true ->
case maps:get(rs_set_name, Opts, undefined) of
undefined ->
error({missing_parameter, rs_set_name});
ReplicaSet ->
Opts#{type => {rs, ReplicaSet},
hosts => Hosts}
end;
false ->
Opts#{hosts => Hosts}
end;
may_parse_srv_and_txt_records(#{type := Type,
srv_record := true,
server := Server,
worker_options := WorkerOptions} = Opts) ->
Hosts = parse_srv_records(Server),
Opts0 = parse_txt_records(Type, Server),
NWorkerOptions = maps:to_list(maps:merge(maps:from_list(WorkerOptions), maps:with([auth_source], Opts0))),
NOpts = Opts#{hosts => Hosts, worker_options => NWorkerOptions},
case Type =:= rs of
true ->
case maps:get(rs_set_name, Opts0, maps:get(rs_set_name, NOpts, undefined)) of
undefined ->
error({missing_parameter, rs_set_name});
ReplicaSet ->
NOpts#{type => {Type, ReplicaSet}}
end;
false ->
NOpts
end.
to_hosts(Server) ->
[string:trim(H) || H <- string:tokens(Server, ",")].
parse_srv_records(Server) ->
case inet_res:lookup("_mongodb._tcp." ++ Server, in, srv) of
[] ->
error(service_not_found);
Services ->
[Host ++ ":" ++ integer_to_list(Port) || {_, _, Port, Host} <- Services]
end.
parse_txt_records(Type, Server) ->
case inet_res:lookup(Server, in, txt) of
[] ->
#{};
[[QueryString]] ->
case uri_string:dissect_query(QueryString) of
{error, _, _} ->
error({invalid_txt_record, invalid_query_string});
Options ->
Fields = case Type of
rs -> ["authSource", "replicaSet"];
_ -> ["authSource"]
end,
take_and_convert(Fields, Options)
end;
_ ->
error({invalid_txt_record, multiple_records})
end.
take_and_convert(Fields, Options) ->
take_and_convert(Fields, Options, #{}).
take_and_convert([], [_ | _], _Acc) ->
error({invalid_txt_record, invalid_option});
take_and_convert([], [], Acc) ->
Acc;
take_and_convert([Field | More], Options, Acc) ->
case lists:keytake(Field, 1, Options) of
{value, {"authSource", V}, NOptions} ->
take_and_convert(More, NOptions, Acc#{auth_source => list_to_binary(V)});
{value, {"replicaSet", V}, NOptions} ->
take_and_convert(More, NOptions, Acc#{rs_set_name => list_to_binary(V)});
{value, _, _} ->
error({invalid_txt_record, invalid_option});
false ->
take_and_convert(More, Options, Acc)
end.

View File

@ -1,5 +1,5 @@
{deps, {deps,
[{epgsql, {git, "https://github.com/epgsql/epgsql.git", {tag, "4.4.0"}}} [{epgsql, {git, "https://github.com/emqx/epgsql.git", {tag, "4.6.0"}}}
]}. ]}.
{erl_opts, [warn_unused_vars, {erl_opts, [warn_unused_vars,

View File

@ -1,6 +1,6 @@
{application, emqx_auth_pgsql, {application, emqx_auth_pgsql,
[{description, "EMQ X Authentication/ACL with PostgreSQL"}, [{description, "EMQ X Authentication/ACL with PostgreSQL"},
{vsn, "4.3.1"}, % strict semver, bump manually! {vsn, "4.4.1"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_auth_pgsql_sup]}, {registered, [emqx_auth_pgsql_sup]},
{applications, [kernel,stdlib,epgsql,ecpool]}, {applications, [kernel,stdlib,epgsql,ecpool]},

View File

@ -1,10 +1,10 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{VSN, {VSN,
[{"4.3.0", [{"4.4.0",
[{load_module,emqx_auth_pgsql_app,brutal_purge,soft_purge,[]}, [{load_module,emqx_auth_pgsql_app,brutal_purge,soft_purge,[]},
{load_module,emqx_acl_pgsql,brutal_purge,soft_purge,[]}]}, {load_module,emqx_acl_pgsql,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}], {<<".*">>,[]}],
[{"4.3.0", [{"4.4.0",
[{load_module,emqx_auth_pgsql_app,brutal_purge,soft_purge,[]}, [{load_module,emqx_auth_pgsql_app,brutal_purge,soft_purge,[]},
{load_module,emqx_acl_pgsql,brutal_purge,soft_purge,[]}]}, {load_module,emqx_acl_pgsql,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}] {<<".*">>,[]}]

View File

@ -24,6 +24,11 @@
## Value: false | Duration ## Value: false | Duration
#exhook.auto_reconnect = 60s #exhook.auto_reconnect = 60s
## The process pool size for gRPC client
##
## Default: Equals cpu cores
## Value: Integer
#exhook.pool_size = 16
##-------------------------------------------------------------------- ##--------------------------------------------------------------------
## The Hook callback servers ## The Hook callback servers

View File

@ -26,6 +26,10 @@
end end
end}. end}.
{mapping, "exhook.pool_size", "emqx_exhook.pool_size", [
{datatype, integer}
]}.
{mapping, "exhook.server.$name.url", "emqx_exhook.servers", [ {mapping, "exhook.server.$name.url", "emqx_exhook.servers", [
{datatype, string} {datatype, string}
]}. ]}.

View File

@ -358,6 +358,31 @@ message Message {
bytes payload = 6; bytes payload = 6;
uint64 timestamp = 7; uint64 timestamp = 7;
// The key of header can be:
// - username:
// * Readonly
// * The username of sender client
// * Value type: utf8 string
// - protocol:
// * Readonly
// * The protocol name of sender client
// * Value type: string enum with "mqtt", "mqtt-sn", ...
// - peerhost:
// * Readonly
// * The peerhost of sender client
// * Value type: ip address string
// - allow_publish:
// * Writable
// * Whether to allow the message to be published by emqx
// * Value type: string enum with "true", "false", default is "true"
//
// Notes: All header may be missing, which means that the message does not
// carry these headers. We can guarantee that clients coming from MQTT,
// MQTT-SN, CoAP, LwM2M and other natively supported protocol clients will
// carry these headers, but there is no guarantee that messages published
// by other means will do, e.g. messages published by HTTP-API
map<string, string> headers = 8;
} }
message Property { message Property {

View File

@ -5,7 +5,7 @@
]}. ]}.
{deps, {deps,
[{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.3"}}} [{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.4"}}}
]}. ]}.
{grpc, {grpc,

View File

@ -1,6 +1,6 @@
{application, emqx_exhook, {application, emqx_exhook,
[{description, "EMQ X Extension for Hook"}, [{description, "EMQ X Extension for Hook"},
{vsn, "4.3.4"}, {vsn, "4.4.0"},
{modules, []}, {modules, []},
{registered, []}, {registered, []},
{mod, {emqx_exhook_app, []}}, {mod, {emqx_exhook_app, []}},

View File

@ -1,15 +1,7 @@
%% -*-: erlang -*- %% -*-: erlang -*-
{VSN, {VSN,
[ [{<<".*">>, []}
{<<"4.3.[0-3]">>, [
{restart_application, emqx_exhook}
]},
{<<".*">>, []}
], ],
[ [{<<".*">>, []}
{<<"4.3.[0-3]">>, [
{restart_application, emqx_exhook}
]},
{<<".*">>, []}
] ]
}. }.

View File

@ -50,6 +50,7 @@
%% Utils %% Utils
-export([ message/1 -export([ message/1
, headers/1
, stringfy/1 , stringfy/1
, merge_responsed_bool/2 , merge_responsed_bool/2
, merge_responsed_message/2 , merge_responsed_message/2
@ -62,6 +63,8 @@
, call_fold/3 , call_fold/3
]). ]).
-elvis([{elvis_style, god_modules, disable}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Clients %% Clients
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -258,17 +261,58 @@ clientinfo(ClientInfo =
cn => maybe(maps:get(cn, ClientInfo, undefined)), cn => maybe(maps:get(cn, ClientInfo, undefined)),
dn => maybe(maps:get(dn, ClientInfo, undefined))}. dn => maybe(maps:get(dn, ClientInfo, undefined))}.
message(#message{id = Id, qos = Qos, from = From, topic = Topic, payload = Payload, timestamp = Ts}) -> message(#message{id = Id, qos = Qos, from = From, topic = Topic,
payload = Payload, timestamp = Ts, headers = Headers}) ->
#{node => stringfy(node()), #{node => stringfy(node()),
id => emqx_guid:to_hexstr(Id), id => emqx_guid:to_hexstr(Id),
qos => Qos, qos => Qos,
from => stringfy(From), from => stringfy(From),
topic => Topic, topic => Topic,
payload => Payload, payload => Payload,
timestamp => Ts}. timestamp => Ts,
headers => headers(Headers)
}.
assign_to_message(#{qos := Qos, topic := Topic, payload := Payload}, Message) -> headers(Headers) ->
Message#message{qos = Qos, topic = Topic, payload = Payload}. Ls = [username, protocol, peerhost, allow_publish],
maps:fold(
fun
(_, undefined, Acc) ->
Acc; %% Ignore undefined value
(K, V, Acc) ->
case lists:member(K, Ls) of
true ->
Acc#{atom_to_binary(K) => bin(K, V)};
_ ->
Acc
end
end, #{}, Headers).
bin(K, V) when K == username;
K == protocol;
K == allow_publish ->
bin(V);
bin(peerhost, V) ->
bin(inet:ntoa(V)).
bin(V) when is_binary(V) -> V;
bin(V) when is_atom(V) -> atom_to_binary(V);
bin(V) when is_list(V) -> iolist_to_binary(V).
assign_to_message(InMessage = #{qos := Qos, topic := Topic,
payload := Payload}, Message) ->
NMsg = Message#message{qos = Qos, topic = Topic, payload = Payload},
enrich_header(maps:get(headers, InMessage, #{}), NMsg).
enrich_header(Headers, Message) ->
case maps:get(<<"allow_publish">>, Headers, undefined) of
<<"false">> ->
emqx_message:set_header(allow_publish, false, Message);
<<"true">> ->
emqx_message:set_header(allow_publish, true, Message);
_ ->
Message
end.
topicfilters(Tfs) when is_list(Tfs) -> topicfilters(Tfs) when is_list(Tfs) ->
[#{name => Topic, qos => Qos} || {Topic, #{qos := Qos}} <- Tfs]. [#{name => Topic, qos => Qos} || {Topic, #{qos := Qos}} <- Tfs].
@ -299,11 +343,7 @@ merge_responsed_bool(_Req, #{type := 'IGNORE'}) ->
ignore; ignore;
merge_responsed_bool(Req, #{type := Type, value := {bool_result, NewBool}}) merge_responsed_bool(Req, #{type := Type, value := {bool_result, NewBool}})
when is_boolean(NewBool) -> when is_boolean(NewBool) ->
NReq = Req#{result => NewBool}, {ret(Type), Req#{result => NewBool}};
case Type of
'CONTINUE' -> {ok, NReq};
'STOP_AND_RETURN' -> {stop, NReq}
end;
merge_responsed_bool(_Req, Resp) -> merge_responsed_bool(_Req, Resp) ->
?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]), ?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]),
ignore. ignore.
@ -311,11 +351,10 @@ merge_responsed_bool(_Req, Resp) ->
merge_responsed_message(_Req, #{type := 'IGNORE'}) -> merge_responsed_message(_Req, #{type := 'IGNORE'}) ->
ignore; ignore;
merge_responsed_message(Req, #{type := Type, value := {message, NMessage}}) -> merge_responsed_message(Req, #{type := Type, value := {message, NMessage}}) ->
NReq = Req#{message => NMessage}, {ret(Type), Req#{message => NMessage}};
case Type of
'CONTINUE' -> {ok, NReq};
'STOP_AND_RETURN' -> {stop, NReq}
end;
merge_responsed_message(_Req, Resp) -> merge_responsed_message(_Req, Resp) ->
?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]), ?LOG(warning, "Unknown responsed value ~0p to merge to callback chain", [Resp]),
ignore. ignore.
ret('CONTINUE') -> ok;
ret('STOP_AND_RETURN') -> stop.

View File

@ -36,6 +36,8 @@
, server/1 , server/1
, put_request_failed_action/1 , put_request_failed_action/1
, get_request_failed_action/0 , get_request_failed_action/0
, put_pool_size/1
, get_pool_size/0
]). ]).
%% gen_server callbacks %% gen_server callbacks
@ -117,6 +119,9 @@ init([Servers, AutoReconnect, ReqOpts0]) ->
put_request_failed_action( put_request_failed_action(
maps:get(request_failed_action, ReqOpts0, deny) maps:get(request_failed_action, ReqOpts0, deny)
), ),
put_pool_size(
maps:get(pool_size, ReqOpts0, erlang:system_info(schedulers))
),
%% Load the hook servers %% Load the hook servers
ReqOpts = maps:without([request_failed_action], ReqOpts0), ReqOpts = maps:without([request_failed_action], ReqOpts0),
@ -286,6 +291,14 @@ put_request_failed_action(Val) ->
get_request_failed_action() -> get_request_failed_action() ->
persistent_term:get({?APP, request_failed_action}). persistent_term:get({?APP, request_failed_action}).
put_pool_size(Val) ->
persistent_term:put({?APP, pool_size}, Val).
get_pool_size() ->
%% Avoid the scenario that the parameter is not set after
%% the hot upgrade completed.
persistent_term:get({?APP, pool_size}, erlang:system_info(schedulers)).
save(Name, ServerState) -> save(Name, ServerState) ->
Saved = persistent_term:get(?APP, []), Saved = persistent_term:get(?APP, []),
persistent_term:put(?APP, lists:reverse([Name | Saved])), persistent_term:put(?APP, lists:reverse([Name | Saved])),

View File

@ -77,6 +77,8 @@
-dialyzer({nowarn_function, [inc_metrics/2]}). -dialyzer({nowarn_function, [inc_metrics/2]}).
-elvis([{elvis_style, dont_repeat_yourself, disable}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Load/Unload APIs %% Load/Unload APIs
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -125,13 +127,18 @@ channel_opts(Opts) ->
SvrAddr = format_http_uri(Scheme, Host, Port), SvrAddr = format_http_uri(Scheme, Host, Port),
ClientOpts = case Scheme of ClientOpts = case Scheme of
https -> https ->
SslOpts = lists:keydelete(ssl, 1, proplists:get_value(ssl_options, Opts, [])), SslOpts = lists:keydelete(
ssl,
1,
proplists:get_value(ssl_options, Opts, [])
),
#{gun_opts => #{gun_opts =>
#{transport => ssl, #{transport => ssl,
transport_opts => SslOpts}}; transport_opts => SslOpts}};
_ -> #{} _ -> #{}
end, end,
{SvrAddr, ClientOpts}. NClientOpts = ClientOpts#{pool_size => emqx_exhook_mngr:get_pool_size()},
{SvrAddr, NClientOpts}.
format_http_uri(Scheme, Host0, Port) -> format_http_uri(Scheme, Host0, Port) ->
Host = case is_tuple(Host0) of Host = case is_tuple(Host0) of
@ -174,16 +181,18 @@ resovle_hookspec(HookSpecs) when is_list(HookSpecs) ->
case maps:get(name, HookSpec, undefined) of case maps:get(name, HookSpec, undefined) of
undefined -> Acc; undefined -> Acc;
Name0 -> Name0 ->
Name = try binary_to_existing_atom(Name0, utf8) catch T:R:_ -> {T,R} end, Name = try
case lists:member(Name, AvailableHooks) of binary_to_existing_atom(Name0, utf8)
true -> catch T:R -> {T,R}
case lists:member(Name, MessageHooks) of end,
true -> case {lists:member(Name, AvailableHooks),
Acc#{Name => #{topics => maps:get(topics, HookSpec, [])}}; lists:member(Name, MessageHooks)} of
_ -> {false, _} ->
Acc#{Name => #{}} error({unknown_hookpoint, Name});
end; {true, false} ->
_ -> error({unknown_hookpoint, Name}) Acc#{Name => #{}};
{true, true} ->
Acc#{Name => #{topics => maps:get(topics, HookSpec, [])}}
end end
end end
end, #{}, HookSpecs). end, #{}, HookSpecs).
@ -271,8 +280,8 @@ do_call(ChannName, Fun, Req, ReqOpts) ->
Options = ReqOpts#{channel => ChannName}, Options = ReqOpts#{channel => ChannName},
?LOG(debug, "Call ~0p:~0p(~0p, ~0p)", [?PB_CLIENT_MOD, Fun, Req, Options]), ?LOG(debug, "Call ~0p:~0p(~0p, ~0p)", [?PB_CLIENT_MOD, Fun, Req, Options]),
case catch apply(?PB_CLIENT_MOD, Fun, [Req, Options]) of case catch apply(?PB_CLIENT_MOD, Fun, [Req, Options]) of
{ok, Resp, _Metadata} -> {ok, Resp, Metadata} ->
?LOG(debug, "Response {ok, ~0p, ~0p}", [Resp, _Metadata]), ?LOG(debug, "Response {ok, ~0p, ~0p}", [Resp, Metadata]),
{ok, Resp}; {ok, Resp};
{error, {Code, Msg}, _Metadata} -> {error, {Code, Msg}, _Metadata} ->
?LOG(error, "CALL ~0p:~0p(~0p, ~0p) response errcode: ~0p, errmsg: ~0p", ?LOG(error, "CALL ~0p:~0p(~0p, ~0p) response errcode: ~0p, errmsg: ~0p",

View File

@ -54,7 +54,8 @@ auto_reconnect() ->
request_options() -> request_options() ->
#{timeout => env(request_timeout, 5000), #{timeout => env(request_timeout, 5000),
request_failed_action => env(request_failed_action, deny) request_failed_action => env(request_failed_action, deny),
pool_size => env(pool_size, erlang:system_info(schedulers))
}. }.
env(Key, Def) -> env(Key, Def) ->
@ -67,7 +68,7 @@ env(Key, Def) ->
-spec start_grpc_client_channel( -spec start_grpc_client_channel(
string(), string(),
uri_string:uri_string(), uri_string:uri_string(),
grpc_client:options()) -> {ok, pid()} | {error, term()}. grpc_client_sup:options()) -> {ok, pid()} | {error, term()}.
start_grpc_client_channel(Name, SvrAddr, Options) -> start_grpc_client_channel(Name, SvrAddr, Options) ->
grpc_client_sup:create_channel_pool(Name, SvrAddr, Options). grpc_client_sup:create_channel_pool(Name, SvrAddr, Options).

View File

@ -299,21 +299,31 @@ on_message_publish(#{message := #{from := From} = Msg} = Req, Md) ->
%% some cases for testing %% some cases for testing
case From of case From of
<<"baduser">> -> <<"baduser">> ->
NMsg = Msg#{qos => 0, NMsg = deny(Msg#{qos => 0,
topic => <<"">>, topic => <<"">>,
payload => <<"">> payload => <<"">>
}, }),
{ok, #{type => 'STOP_AND_RETURN', {ok, #{type => 'STOP_AND_RETURN',
value => {message, NMsg}}, Md}; value => {message, NMsg}}, Md};
<<"gooduser">> -> <<"gooduser">> ->
NMsg = Msg#{topic => From, NMsg = allow(Msg#{topic => From,
payload => From}, payload => From}),
{ok, #{type => 'STOP_AND_RETURN', {ok, #{type => 'STOP_AND_RETURN',
value => {message, NMsg}}, Md}; value => {message, NMsg}}, Md};
_ -> _ ->
{ok, #{type => 'IGNORE'}, Md} {ok, #{type => 'IGNORE'}, Md}
end. end.
deny(Msg) ->
NHeader = maps:put(<<"allow_publish">>, <<"false">>,
maps:get(headers, Msg, #{})),
maps:put(headers, NHeader, Msg).
allow(Msg) ->
NHeader = maps:put(<<"allow_publish">>, <<"true">>,
maps:get(headers, Msg, #{})),
maps:put(headers, NHeader, Msg).
-spec on_message_delivered(emqx_exhook_pb:message_delivered_request(), grpc:metadata()) -spec on_message_delivered(emqx_exhook_pb:message_delivered_request(), grpc:metadata())
-> {ok, emqx_exhook_pb:empty_success(), grpc:metadata()} -> {ok, emqx_exhook_pb:empty_success(), grpc:metadata()}
| {error, grpc_cowboy_h:error_response()}. | {error, grpc_cowboy_h:error_response()}.

View File

@ -299,19 +299,24 @@ prop_message_publish() ->
_ -> _ ->
ExpectedOutMsg = case emqx_message:from(Msg) of ExpectedOutMsg = case emqx_message:from(Msg) of
<<"baduser">> -> <<"baduser">> ->
MsgMap = emqx_message:to_map(Msg), MsgMap = #{headers := Headers}
= emqx_message:to_map(Msg),
emqx_message:from_map( emqx_message:from_map(
MsgMap#{qos => 0, MsgMap#{qos => 0,
topic => <<"">>, topic => <<"">>,
payload => <<"">> payload => <<"">>,
headers => maps:put(allow_publish, false, Headers)
}); });
<<"gooduser">> = From -> <<"gooduser">> = From ->
MsgMap = emqx_message:to_map(Msg), MsgMap = #{headers := Headers}
= emqx_message:to_map(Msg),
emqx_message:from_map( emqx_message:from_map(
MsgMap#{topic => From, MsgMap#{topic => From,
payload => From payload => From,
headers => maps:put(allow_publish, true, Headers)
}); });
_ -> Msg _ ->
Msg
end, end,
?assertEqual(ExpectedOutMsg, OutMsg), ?assertEqual(ExpectedOutMsg, OutMsg),
@ -464,7 +469,9 @@ from_message(Msg) ->
from => stringfy(emqx_message:from(Msg)), from => stringfy(emqx_message:from(Msg)),
topic => emqx_message:topic(Msg), topic => emqx_message:topic(Msg),
payload => emqx_message:payload(Msg), payload => emqx_message:payload(Msg),
timestamp => emqx_message:timestamp(Msg) timestamp => emqx_message:timestamp(Msg),
headers => emqx_exhook_handler:headers(
emqx_message:get_headers(Msg))
}. }.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -13,7 +13,7 @@
]}. ]}.
{deps, {deps,
[{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.3"}}} [{grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.4"}}}
]}. ]}.
{grpc, {grpc,

View File

@ -1,10 +1,10 @@
{deps, {deps,
[{lwm2m_coap, {git, "https://github.com/emqx/lwm2m-coap", {tag, "v1.1.5"}}} [{lwm2m_coap, {git, "https://github.com/emqx/lwm2m-coap", {tag, "v2.0.1"}}}
]}. ]}.
{profiles, {profiles,
[{test, [{test,
[{deps, [{er_coap_client, {git, "https://github.com/emqx/er_coap_client", {tag, "v1.0"}}}, [{deps, [{er_coap_client, {git, "https://github.com/emqx/er_coap_client", {tag, "v1.0.4"}}},
{emqx_ct_helpers, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "1.2.2"}}}, {emqx_ct_helpers, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "1.2.2"}}},
{emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.2.0"}}} {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.2.0"}}}
]} ]}

View File

@ -32,4 +32,4 @@
-define(ERROR14, 114). %% OldPassword error -define(ERROR14, 114). %% OldPassword error
-define(ERROR15, 115). %% bad topic -define(ERROR15, 115). %% bad topic
-define(VERSIONS, ["4.0", "4.1", "4.2", "4.3"]). -define(VERSIONS, ["4.0", "4.1", "4.2", "4.3", "4.4"]).

View File

@ -1,6 +1,6 @@
{application, emqx_management, {application, emqx_management,
[{description, "EMQ X Management API and CLI"}, [{description, "EMQ X Management API and CLI"},
{vsn, "4.3.10"}, % strict semver, bump manually! {vsn, "4.4.1"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_management_sup]}, {registered, [emqx_management_sup]},
{applications, [kernel,stdlib,minirest]}, {applications, [kernel,stdlib,minirest]},

View File

@ -1,17 +1,11 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{VSN, {VSN,
[ {<<"4\\.3\\.[0-9]+">>, [{<<".*">>,
[{apply,{minirest,stop_http,['http:management']}}, [{apply,{minirest,stop_http,['http:management']}},
{apply,{minirest,stop_http,['https:management']}}, {apply,{minirest,stop_http,['https:management']}},
{restart_application, emqx_management} {restart_application, emqx_management}]}],
]}, [{<<".*">>,
{<<".*">>, []}
],
[ {<<"4\\.3\\.[0-9]+">>,
[{apply,{minirest,stop_http,['http:management']}}, [{apply,{minirest,stop_http,['http:management']}},
{apply,{minirest,stop_http,['https:management']}}, {apply,{minirest,stop_http,['https:management']}},
{restart_application, emqx_management} {restart_application, emqx_management}]}]
]},
{<<".*">>, []}
]
}. }.

View File

@ -22,6 +22,9 @@
-include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/emqx_mqtt.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl").
-elvis([{elvis_style, invalid_dynamic_call, #{ignore => [emqx_mgmt]}}]).
-elvis([{elvis_style, god_modules, #{ignore => [emqx_mgmt]}}]).
%% Nodes and Brokers API %% Nodes and Brokers API
-export([ list_nodes/0 -export([ list_nodes/0
, lookup_node/1 , lookup_node/1
@ -49,6 +52,7 @@
, clean_acl_cache_all/1 , clean_acl_cache_all/1
, set_ratelimit_policy/2 , set_ratelimit_policy/2
, set_quota_policy/2 , set_quota_policy/2
, set_keepalive/2
]). ]).
%% Internal funcs %% Internal funcs
@ -143,9 +147,8 @@ node_info(Node) when Node =:= node() ->
memory_used => proplists:get_value(used, Memory), memory_used => proplists:get_value(used, Memory),
process_available => erlang:system_info(process_limit), process_available => erlang:system_info(process_limit),
process_used => erlang:system_info(process_count), process_used => erlang:system_info(process_count),
max_fds => max_fds => proplists:get_value(max_fds,
proplists:get_value( max_fds lists:usort(lists:flatten(erlang:system_info(check_io)))),
, lists:usort(lists:flatten(erlang:system_info(check_io)))),
connections => ets:info(emqx_channel, size), connections => ets:info(emqx_channel, size),
node_status => 'Running', node_status => 'Running',
uptime => iolist_to_binary(proplists:get_value(uptime, BrokerInfo)), uptime => iolist_to_binary(proplists:get_value(uptime, BrokerInfo)),
@ -227,7 +230,7 @@ lookup_client(Node, {username, Username}, FormatFun) ->
kickout_client(ClientId) -> kickout_client(ClientId) ->
Results = [kickout_client(Node, ClientId) || Node <- ekka_mnesia:running_nodes()], Results = [kickout_client(Node, ClientId) || Node <- ekka_mnesia:running_nodes()],
check_every_ok(Results). has_any_ok(Results).
kickout_client(Node, ClientId) when Node =:= node() -> kickout_client(Node, ClientId) when Node =:= node() ->
emqx_cm:kick_session(ClientId); emqx_cm:kick_session(ClientId);
@ -240,7 +243,7 @@ list_acl_cache(ClientId) ->
clean_acl_cache(ClientId) -> clean_acl_cache(ClientId) ->
Results = [clean_acl_cache(Node, ClientId) || Node <- ekka_mnesia:running_nodes()], Results = [clean_acl_cache(Node, ClientId) || Node <- ekka_mnesia:running_nodes()],
check_every_ok(Results). has_any_ok(Results).
clean_acl_cache(Node, ClientId) when Node =:= node() -> clean_acl_cache(Node, ClientId) when Node =:= node() ->
case emqx_cm:lookup_channels(ClientId) of case emqx_cm:lookup_channels(ClientId) of
@ -272,6 +275,11 @@ set_ratelimit_policy(ClientId, Policy) ->
set_quota_policy(ClientId, Policy) -> set_quota_policy(ClientId, Policy) ->
call_client(ClientId, {quota, Policy}). call_client(ClientId, {quota, Policy}).
set_keepalive(ClientId, Interval)when Interval >= 0 andalso Interval =< 65535 ->
call_client(ClientId, {keepalive, Interval});
set_keepalive(_ClientId, _Interval) ->
{error, ?ERROR2, <<"mqtt3.1.1 specification: keepalive must between 0~65535">>}.
%% @private %% @private
call_client(ClientId, Req) -> call_client(ClientId, Req) ->
Results = [call_client(Node, ClientId, Req) || Node <- ekka_mnesia:running_nodes()], Results = [call_client(Node, ClientId, Req) || Node <- ekka_mnesia:running_nodes()],
@ -315,6 +323,7 @@ list_subscriptions_via_topic(Topic, FormatFun) ->
lists:append([list_subscriptions_via_topic(Node, Topic, FormatFun) lists:append([list_subscriptions_via_topic(Node, Topic, FormatFun)
|| Node <- ekka_mnesia:running_nodes()]). || Node <- ekka_mnesia:running_nodes()]).
list_subscriptions_via_topic(Node, Topic, {M,F}) when Node =:= node() -> list_subscriptions_via_topic(Node, Topic, {M,F}) when Node =:= node() ->
MatchSpec = [{{{'_', '$1'}, '_'}, [{'=:=','$1', Topic}], ['$_']}], MatchSpec = [{{{'_', '$1'}, '_'}, [{'=:=','$1', Topic}], ['$_']}],
erlang:apply(M, F, [ets:select(emqx_suboption, MatchSpec)]); erlang:apply(M, F, [ets:select(emqx_suboption, MatchSpec)]);
@ -438,8 +447,8 @@ list_listeners(Node) when Node =:= node() ->
Http = lists:map(fun({Protocol, Opts}) -> Http = lists:map(fun({Protocol, Opts}) ->
#{protocol => Protocol, #{protocol => Protocol,
listen_on => proplists:get_value(port, Opts), listen_on => proplists:get_value(port, Opts),
acceptors => maps:get( num_acceptors acceptors => maps:get(num_acceptors,
, proplists:get_value(transport_options, Opts, #{}), 0), proplists:get_value(transport_options, Opts, #{}), 0),
max_conns => proplists:get_value(max_connections, Opts), max_conns => proplists:get_value(max_connections, Opts),
current_conns => proplists:get_value(all_connections, Opts), current_conns => proplists:get_value(all_connections, Opts),
shutdown_count => []} shutdown_count => []}
@ -488,10 +497,8 @@ add_duration_field([], _Now, Acc) ->
Acc; Acc;
add_duration_field([Alarm = #{activated := true, activate_at := ActivateAt} | Rest], Now, Acc) -> add_duration_field([Alarm = #{activated := true, activate_at := ActivateAt} | Rest], Now, Acc) ->
add_duration_field(Rest, Now, [Alarm#{duration => Now - ActivateAt} | Acc]); add_duration_field(Rest, Now, [Alarm#{duration => Now - ActivateAt} | Acc]);
add_duration_field([Alarm = #{ activated := false add_duration_field([Alarm = #{activated := false,
, activate_at := ActivateAt activate_at := ActivateAt, deactivate_at := DeactivateAt} | Rest], Now, Acc) ->
, deactivate_at := DeactivateAt}
| Rest], Now, Acc) ->
add_duration_field(Rest, Now, [Alarm#{duration => DeactivateAt - ActivateAt} | Acc]). add_duration_field(Rest, Now, [Alarm#{duration => DeactivateAt - ActivateAt} | Acc]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -572,13 +579,13 @@ check_row_limit([Tab | Tables], Limit) ->
false -> check_row_limit(Tables, Limit) false -> check_row_limit(Tables, Limit)
end. end.
check_every_ok(Results) ->
case lists:any(fun(Item) -> Item =:= ok end, Results) of
true -> ok;
false -> lists:last(Results)
end.
max_row_limit() -> max_row_limit() ->
application:get_env(?APP, max_row_limit, ?MAX_ROW_LIMIT). application:get_env(?APP, max_row_limit, ?MAX_ROW_LIMIT).
table_size(Tab) -> ets:info(Tab, size). table_size(Tab) -> ets:info(Tab, size).
has_any_ok(Results) ->
case lists:any(fun(Item) -> Item =:= ok end, Results) of
true -> ok;
false -> lists:last(Results)
end.

View File

@ -53,17 +53,46 @@ paginate(Tables, Params, RowFun) ->
query_handle(Table) when is_atom(Table) -> query_handle(Table) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table)]); qlc:q([R|| R <- ets:table(Table)]);
query_handle({Table, Opts}) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table, Opts)]);
query_handle([Table]) when is_atom(Table) -> query_handle([Table]) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table)]); qlc:q([R|| R <- ets:table(Table)]);
query_handle([{Table, Opts}]) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table, Opts)]);
query_handle(Tables) -> query_handle(Tables) ->
qlc:append([qlc:q([E || E <- ets:table(T)]) || T <- Tables]). Fold = fun({Table, Opts}, Acc) ->
Handle = qlc:q([R|| R <- ets:table(Table, Opts)]),
[Handle | Acc];
(Table, Acc) ->
Handle = qlc:q([R|| R <- ets:table(Table)]),
[Handle | Acc]
end,
Handles = lists:foldl(Fold, [], Tables),
qlc:append(lists:reverse(Handles)).
count(Table) when is_atom(Table) -> count(Table) when is_atom(Table) ->
ets:info(Table, size); ets:info(Table, size);
count({Table, _Opts}) when is_atom(Table) ->
ets:info(Table, size);
count([Table]) when is_atom(Table) -> count([Table]) when is_atom(Table) ->
ets:info(Table, size); ets:info(Table, size);
count([{Table, _Opts}]) when is_atom(Table) ->
ets:info(Table, size);
count(Tables) -> count(Tables) ->
lists:sum([count(T) || T <- Tables]). Fold = fun({Table, _Opts}, Acc) ->
count(Table) ++ Acc;
(Table, Acc) ->
count(Table) ++ Acc
end,
lists:foldl(Fold, 0, Tables).
count(Table, Nodes) -> count(Table, Nodes) ->
lists:sum([rpc_call(Node, ets, info, [Table, size], 5000) || Node <- Nodes]). lists:sum([rpc_call(Node, ets, info, [Table, size], 5000) || Node <- Nodes]).

View File

@ -117,6 +117,12 @@
func => clean_quota, func => clean_quota,
descr => "Clear the quota policy"}). descr => "Clear the quota policy"}).
-rest_api(#{name => set_keepalive,
method => 'PUT',
path => "/clients/:bin:clientid/keepalive",
func => set_keepalive,
descr => "Set the client keepalive"}).
-import(emqx_mgmt_util, [ ntoa/1 -import(emqx_mgmt_util, [ ntoa/1
, strftime/1 , strftime/1
]). ]).
@ -130,23 +136,24 @@
, set_quota_policy/2 , set_quota_policy/2
, clean_ratelimit/2 , clean_ratelimit/2
, clean_quota/2 , clean_quota/2
, set_keepalive/2
]). ]).
-export([ query/3 -export([ query/3
, format_channel_info/1 , format_channel_info/1
]). ]).
-define(query_fun, {?MODULE, query}). -define(QUERY_FUN, {?MODULE, query}).
-define(format_fun, {?MODULE, format_channel_info}). -define(FORMAT_FUN, {?MODULE, format_channel_info}).
list(Bindings, Params) when map_size(Bindings) == 0 -> list(Bindings, Params) when map_size(Bindings) == 0 ->
fence(fun() -> fence(fun() ->
emqx_mgmt_api:cluster_query(Params, ?CLIENT_QS_SCHEMA, ?query_fun) emqx_mgmt_api:cluster_query(Params, ?CLIENT_QS_SCHEMA, ?QUERY_FUN)
end); end);
list(#{node := Node}, Params) when Node =:= node() -> list(#{node := Node}, Params) when Node =:= node() ->
fence(fun() -> fence(fun() ->
emqx_mgmt_api:node_query(Node, Params, ?CLIENT_QS_SCHEMA, ?query_fun) emqx_mgmt_api:node_query(Node, Params, ?CLIENT_QS_SCHEMA, ?QUERY_FUN)
end); end);
list(Bindings = #{node := Node}, Params) -> list(Bindings = #{node := Node}, Params) ->
@ -169,16 +176,20 @@ fence(Func) ->
end. end.
lookup(#{node := Node, clientid := ClientId}, _Params) -> lookup(#{node := Node, clientid := ClientId}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client(Node, {clientid, emqx_mgmt_util:urldecode(ClientId)}, ?format_fun)}); minirest:return({ok, emqx_mgmt:lookup_client(Node,
{clientid, emqx_mgmt_util:urldecode(ClientId)}, ?FORMAT_FUN)});
lookup(#{clientid := ClientId}, _Params) -> lookup(#{clientid := ClientId}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client({clientid, emqx_mgmt_util:urldecode(ClientId)}, ?format_fun)}); minirest:return({ok, emqx_mgmt:lookup_client(
{clientid, emqx_mgmt_util:urldecode(ClientId)}, ?FORMAT_FUN)});
lookup(#{node := Node, username := Username}, _Params) -> lookup(#{node := Node, username := Username}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client(Node, {username, emqx_mgmt_util:urldecode(Username)}, ?format_fun)}); minirest:return({ok, emqx_mgmt:lookup_client(Node,
{username, emqx_mgmt_util:urldecode(Username)}, ?FORMAT_FUN)});
lookup(#{username := Username}, _Params) -> lookup(#{username := Username}, _Params) ->
minirest:return({ok, emqx_mgmt:lookup_client({username, emqx_mgmt_util:urldecode(Username)}, ?format_fun)}). minirest:return({ok, emqx_mgmt:lookup_client({username,
emqx_mgmt_util:urldecode(Username)}, ?FORMAT_FUN)}).
kickout(#{clientid := ClientId}, _Params) -> kickout(#{clientid := ClientId}, _Params) ->
case emqx_mgmt:kickout_client(emqx_mgmt_util:urldecode(ClientId)) of case emqx_mgmt:kickout_client(emqx_mgmt_util:urldecode(ClientId)) of
@ -204,7 +215,7 @@ list_acl_cache(#{clientid := ClientId}, _Params) ->
set_ratelimit_policy(#{clientid := ClientId}, Params) -> set_ratelimit_policy(#{clientid := ClientId}, Params) ->
P = [{conn_bytes_in, proplists:get_value(<<"conn_bytes_in">>, Params)}, P = [{conn_bytes_in, proplists:get_value(<<"conn_bytes_in">>, Params)},
{conn_messages_in, proplists:get_value(<<"conn_messages_in">>, Params)}], {conn_messages_in, proplists:get_value(<<"conn_messages_in">>, Params)}],
case [{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined] of case filter_ratelimit_params(P) of
[] -> minirest:return(); [] -> minirest:return();
Policy -> Policy ->
case emqx_mgmt:set_ratelimit_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of case emqx_mgmt:set_ratelimit_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of
@ -223,7 +234,7 @@ clean_ratelimit(#{clientid := ClientId}, _Params) ->
set_quota_policy(#{clientid := ClientId}, Params) -> set_quota_policy(#{clientid := ClientId}, Params) ->
P = [{conn_messages_routing, proplists:get_value(<<"conn_messages_routing">>, Params)}], P = [{conn_messages_routing, proplists:get_value(<<"conn_messages_routing">>, Params)}],
case [{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined] of case filter_ratelimit_params(P) of
[] -> minirest:return(); [] -> minirest:return();
Policy -> Policy ->
case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), Policy) of
@ -233,6 +244,7 @@ set_quota_policy(#{clientid := ClientId}, Params) ->
end end
end. end.
clean_quota(#{clientid := ClientId}, _Params) -> clean_quota(#{clientid := ClientId}, _Params) ->
case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), []) of case emqx_mgmt:set_quota_policy(emqx_mgmt_util:urldecode(ClientId), []) of
ok -> minirest:return(); ok -> minirest:return();
@ -240,6 +252,20 @@ clean_quota(#{clientid := ClientId}, _Params) ->
{error, Reason} -> minirest:return({error, ?ERROR1, Reason}) {error, Reason} -> minirest:return({error, ?ERROR1, Reason})
end. end.
set_keepalive(#{clientid := ClientId}, Params) ->
case proplists:get_value(<<"interval">>, Params) of
undefined ->
minirest:return({error, ?ERROR7, params_not_found});
Interval0 ->
Interval = binary_to_integer(Interval0),
case emqx_mgmt:set_keepalive(emqx_mgmt_util:urldecode(ClientId), Interval) of
ok -> minirest:return();
{error, not_found} -> minirest:return({error, ?ERROR12, not_found});
{error, Code, Reason} -> minirest:return({error, Code, Reason});
{error, Reason} -> minirest:return({error, ?ERROR1, Reason})
end
end.
%% @private %% @private
%% S = 100,1s %% S = 100,1s
%% | 100KB, 1m %% | 100KB, 1m
@ -266,7 +292,7 @@ format_channel_info({_Key, Info, Stats0}) ->
ConnInfo = maps:get(conninfo, Info, #{}), ConnInfo = maps:get(conninfo, Info, #{}),
Session = case maps:get(session, Info, #{}) of Session = case maps:get(session, Info, #{}) of
undefined -> #{}; undefined -> #{};
_Sess -> _Sess Sess -> Sess
end, end,
SessCreated = maps:get(created_at, Session, maps:get(connected_at, ConnInfo)), SessCreated = maps:get(created_at, Session, maps:get(connected_at, ConnInfo)),
Connected = case maps:get(conn_state, Info, connected) of Connected = case maps:get(conn_state, Info, connected) of
@ -287,8 +313,14 @@ format_channel_info({_Key, Info, Stats0}) ->
inflight, max_inflight, awaiting_rel, inflight, max_inflight, awaiting_rel,
max_awaiting_rel, mqueue_len, mqueue_dropped, max_awaiting_rel, mqueue_len, mqueue_dropped,
max_mqueue, heap_size, reductions, mailbox_len, max_mqueue, heap_size, reductions, mailbox_len,
recv_cnt, recv_msg, recv_oct, recv_pkt, send_cnt, recv_cnt,
send_msg, send_oct, send_pkt], NStats), recv_msg, 'recv_msg.qos0', 'recv_msg.qos1', 'recv_msg.qos2',
'recv_msg.dropped', 'recv_msg.dropped.expired',
recv_oct, recv_pkt, send_cnt,
send_msg, 'send_msg.qos0', 'send_msg.qos1', 'send_msg.qos2',
'send_msg.dropped', 'send_msg.dropped.expired',
'send_msg.dropped.queue_full', 'send_msg.dropped.too_large',
send_oct, send_pkt], NStats),
maps:with([clientid, username, mountpoint, is_bridge, zone], ClientInfo), maps:with([clientid, username, mountpoint, is_bridge, zone], ClientInfo),
maps:with([clean_start, keepalive, expiry_interval, proto_name, maps:with([clean_start, keepalive, expiry_interval, proto_name,
proto_ver, peername, connected_at, disconnected_at], ConnInfo), proto_ver, peername, connected_at, disconnected_at], ConnInfo),
@ -306,7 +338,8 @@ format(Data) when is_map(Data)->
created_at => iolist_to_binary(strftime(CreatedAt div 1000))}, created_at => iolist_to_binary(strftime(CreatedAt div 1000))},
case maps:get(disconnected_at, Data, undefined) of case maps:get(disconnected_at, Data, undefined) of
undefined -> #{}; undefined -> #{};
DisconnectedAt -> #{disconnected_at => iolist_to_binary(strftime(DisconnectedAt div 1000))} DisconnectedAt -> #{disconnected_at =>
iolist_to_binary(strftime(DisconnectedAt div 1000))}
end). end).
format_acl_cache({{PubSub, Topic}, {AclResult, Timestamp}}) -> format_acl_cache({{PubSub, Topic}, {AclResult, Timestamp}}) ->
@ -326,7 +359,8 @@ query({Qs, []}, Start, Limit) ->
query({Qs, Fuzzy}, Start, Limit) -> query({Qs, Fuzzy}, Start, Limit) ->
Ms = qs2ms(Qs), Ms = qs2ms(Qs),
MatchFun = match_fun(Ms, Fuzzy), MatchFun = match_fun(Ms, Fuzzy),
emqx_mgmt_api:traverse_table(emqx_channel_info, MatchFun, Start, Limit, fun format_channel_info/1). emqx_mgmt_api:traverse_table(emqx_channel_info, MatchFun,
Start, Limit, fun format_channel_info/1).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Match funcs %% Match funcs
@ -399,6 +433,9 @@ ms(connected_at, X) ->
ms(created_at, X) -> ms(created_at, X) ->
#{session => #{created_at => X}}. #{session => #{created_at => X}}.
filter_ratelimit_params(P) ->
[{K, parse_ratelimit_str(V)} || {K, V} <- P, V =/= undefined].
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% EUnits %% EUnits
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -71,8 +71,8 @@ subscribe(_Bindings, Params) ->
publish(_Bindings, Params) -> publish(_Bindings, Params) ->
logger:debug("API publish Params:~p", [Params]), logger:debug("API publish Params:~p", [Params]),
{ClientId, Topic, Qos, Retain, Payload} = parse_publish_params(Params), {ClientId, Topic, Qos, Retain, Payload, UserProps} = parse_publish_params(Params),
case do_publish(ClientId, Topic, Qos, Retain, Payload) of case do_publish(ClientId, Topic, Qos, Retain, Payload, UserProps) of
{ok, MsgIds} -> {ok, MsgIds} ->
case proplists:get_value(<<"return">>, Params, undefined) of case proplists:get_value(<<"return">>, Params, undefined) of
undefined -> minirest:return(ok); undefined -> minirest:return(ok);
@ -114,7 +114,8 @@ loop_subscribe([Params | ParamsN], Acc) ->
{_, Code0, _Reason} -> Code0 {_, Code0, _Reason} -> Code0
end, end,
Result = #{clientid => ClientId, Result = #{clientid => ClientId,
topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), topic => resp_topic(proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
code => Code}, code => Code},
loop_subscribe(ParamsN, [Result | Acc]). loop_subscribe(ParamsN, [Result | Acc]).
@ -123,12 +124,13 @@ loop_publish(Params) ->
loop_publish([], Result) -> loop_publish([], Result) ->
lists:reverse(Result); lists:reverse(Result);
loop_publish([Params | ParamsN], Acc) -> loop_publish([Params | ParamsN], Acc) ->
{ClientId, Topic, Qos, Retain, Payload} = parse_publish_params(Params), {ClientId, Topic, Qos, Retain, Payload, UserProps} = parse_publish_params(Params),
Code = case do_publish(ClientId, Topic, Qos, Retain, Payload) of Code = case do_publish(ClientId, Topic, Qos, Retain, Payload, UserProps) of
{ok, _} -> 0; {ok, _} -> 0;
{_, Code0, _} -> Code0 {_, Code0, _} -> Code0
end, end,
Result = #{topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), Result = #{topic => resp_topic(proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
code => Code}, code => Code},
loop_publish(ParamsN, [Result | Acc]). loop_publish(ParamsN, [Result | Acc]).
@ -143,7 +145,8 @@ loop_unsubscribe([Params | ParamsN], Acc) ->
{_, Code0, _} -> Code0 {_, Code0, _} -> Code0
end, end,
Result = #{clientid => ClientId, Result = #{clientid => ClientId,
topic => resp_topic(proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), topic => resp_topic(proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
code => Code}, code => Code},
loop_unsubscribe(ParamsN, [Result | Acc]). loop_unsubscribe(ParamsN, [Result | Acc]).
@ -158,14 +161,17 @@ do_subscribe(ClientId, Topics, QoS) ->
_ -> ok _ -> ok
end. end.
do_publish(ClientId, _Topics, _Qos, _Retain, _Payload) when not (is_binary(ClientId) or (ClientId =:= undefined)) -> do_publish(ClientId, _Topics, _Qos, _Retain, _Payload, _UserProps)
when not (is_binary(ClientId) or (ClientId =:= undefined)) ->
{ok, ?ERROR8, <<"bad clientid: must be string">>}; {ok, ?ERROR8, <<"bad clientid: must be string">>};
do_publish(_ClientId, [], _Qos, _Retain, _Payload) -> do_publish(_ClientId, [], _Qos, _Retain, _Payload, _UserProps) ->
{ok, ?ERROR15, bad_topic}; {ok, ?ERROR15, bad_topic};
do_publish(ClientId, Topics, Qos, Retain, Payload) -> do_publish(ClientId, Topics, Qos, Retain, Payload, UserProps) ->
MsgIds = lists:map(fun(Topic) -> MsgIds = lists:map(fun(Topic) ->
Msg = emqx_message:make(ClientId, Qos, Topic, Payload), Msg = emqx_message:make(ClientId, Qos, Topic, Payload),
_ = emqx_mgmt:publish(Msg#message{flags = #{retain => Retain}}), UserProps1 = #{'User-Property' => UserProps},
_ = emqx_mgmt:publish(Msg#message{flags = #{retain => Retain},
headers = #{properties => UserProps1}}),
emqx_guid:to_hexstr(Msg#message.id) emqx_guid:to_hexstr(Msg#message.id)
end, Topics), end, Topics),
{ok, MsgIds}. {ok, MsgIds}.
@ -185,19 +191,22 @@ do_unsubscribe(ClientId, Topic) ->
parse_subscribe_params(Params) -> parse_subscribe_params(Params) ->
ClientId = proplists:get_value(<<"clientid">>, Params), ClientId = proplists:get_value(<<"clientid">>, Params),
Topics = topics(filter, proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), Topics = topics(filter, proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
QoS = proplists:get_value(<<"qos">>, Params, 0), QoS = proplists:get_value(<<"qos">>, Params, 0),
{ClientId, Topics, QoS}. {ClientId, Topics, QoS}.
parse_publish_params(Params) -> parse_publish_params(Params) ->
Topics = topics(name, proplists:get_value(<<"topic">>, Params), proplists:get_value(<<"topics">>, Params, <<"">>)), Topics = topics(name, proplists:get_value(<<"topic">>, Params),
proplists:get_value(<<"topics">>, Params, <<"">>)),
ClientId = proplists:get_value(<<"clientid">>, Params), ClientId = proplists:get_value(<<"clientid">>, Params),
Payload = decode_payload(proplists:get_value(<<"payload">>, Params, <<>>), Payload = decode_payload(proplists:get_value(<<"payload">>, Params, <<>>),
proplists:get_value(<<"encoding">>, Params, <<"plain">>)), proplists:get_value(<<"encoding">>, Params, <<"plain">>)),
Qos = proplists:get_value(<<"qos">>, Params, 0), Qos = proplists:get_value(<<"qos">>, Params, 0),
Retain = proplists:get_value(<<"retain">>, Params, false), Retain = proplists:get_value(<<"retain">>, Params, false),
Payload1 = maybe_maps_to_binary(Payload), Payload1 = maybe_maps_to_binary(Payload),
{ClientId, Topics, Qos, Retain, Payload1}. UserProps = check_user_props(proplists:get_value(<<"user_properties">>, Params, [])),
{ClientId, Topics, Qos, Retain, Payload1, UserProps}.
parse_unsubscribe_params(Params) -> parse_unsubscribe_params(Params) ->
ClientId = proplists:get_value(<<"clientid">>, Params), ClientId = proplists:get_value(<<"clientid">>, Params),
@ -251,3 +260,8 @@ maybe_maps_to_binary(Payload) ->
_C : _E : S -> _C : _E : S ->
error({encode_payload_fail, S}) error({encode_payload_fail, S})
end. end.
check_user_props(UserProps) when is_list(UserProps) ->
UserProps;
check_user_props(UserProps) ->
error({user_properties_type_error, UserProps}).

View File

@ -21,7 +21,9 @@
-include("emqx_mgmt.hrl"). -include("emqx_mgmt.hrl").
-define(PRINT_CMD(Cmd, Descr), io:format("~-48s# ~s~n", [Cmd, Descr])). -elvis([{elvis_style, invalid_dynamic_call, disable}]).
-define(PRINT_CMD(Cmd, Desc), io:format("~-48s# ~s~n", [Cmd, Desc])).
-export([load/0]). -export([load/0]).
@ -36,6 +38,7 @@
, vm/1 , vm/1
, mnesia/1 , mnesia/1
, trace/1 , trace/1
, traces/1
, log/1 , log/1
, mgmt/1 , mgmt/1
, data/1 , data/1
@ -74,11 +77,8 @@ mgmt(["insert", AppId, Name]) ->
mgmt(["lookup", AppId]) -> mgmt(["lookup", AppId]) ->
case emqx_mgmt_auth:lookup_app(list_to_binary(AppId)) of case emqx_mgmt_auth:lookup_app(list_to_binary(AppId)) of
{AppId1, AppSecret, Name, Desc, Status, Expired} -> undefined -> emqx_ctl:print("Not Found.~n");
emqx_ctl:print("app_id: ~s~nsecret: ~s~nname: ~s~ndesc: ~s~nstatus: ~s~nexpired: ~p~n", App -> print_app_info(App)
[AppId1, AppSecret, Name, Desc, Status, Expired]);
undefined ->
emqx_ctl:print("Not Found.~n")
end; end;
mgmt(["update", AppId, Status]) -> mgmt(["update", AppId, Status]) ->
@ -99,10 +99,7 @@ mgmt(["delete", AppId]) ->
end; end;
mgmt(["list"]) -> mgmt(["list"]) ->
lists:foreach(fun({AppId, AppSecret, Name, Desc, Status, Expired}) -> lists:foreach(fun print_app_info/1, emqx_mgmt_auth:list_apps());
emqx_ctl:print("app_id: ~s, secret: ~s, name: ~s, desc: ~s, status: ~s, expired: ~p~n",
[AppId, AppSecret, Name, Desc, Status, Expired])
end, emqx_mgmt_auth:list_apps());
mgmt(_) -> mgmt(_) ->
emqx_ctl:usage([{"mgmt list", "List Applications"}, emqx_ctl:usage([{"mgmt list", "List Applications"},
@ -128,10 +125,12 @@ broker([]) ->
[emqx_ctl:print("~-10s: ~s~n", [Fun, emqx_sys:Fun()]) || Fun <- Funs]; [emqx_ctl:print("~-10s: ~s~n", [Fun, emqx_sys:Fun()]) || Fun <- Funs];
broker(["stats"]) -> broker(["stats"]) ->
[emqx_ctl:print("~-30s: ~w~n", [Stat, Val]) || {Stat, Val} <- lists:sort(emqx_stats:getstats())]; [emqx_ctl:print("~-30s: ~w~n", [Stat, Val]) ||
{Stat, Val} <- lists:sort(emqx_stats:getstats())];
broker(["metrics"]) -> broker(["metrics"]) ->
[emqx_ctl:print("~-30s: ~w~n", [Metric, Val]) || {Metric, Val} <- lists:sort(emqx_metrics:all())]; [emqx_ctl:print("~-30s: ~w~n", [Metric, Val]) ||
{Metric, Val} <- lists:sort(emqx_metrics:all())];
broker(_) -> broker(_) ->
emqx_ctl:usage([{"broker", "Show broker version, uptime and description"}, emqx_ctl:usage([{"broker", "Show broker version, uptime and description"},
@ -258,8 +257,10 @@ subscriptions(["del", ClientId, Topic]) ->
subscriptions(_) -> subscriptions(_) ->
emqx_ctl:usage([{"subscriptions list", "List all subscriptions"}, emqx_ctl:usage([{"subscriptions list", "List all subscriptions"},
{"subscriptions show <ClientId>", "Show subscriptions of a client"}, {"subscriptions show <ClientId>", "Show subscriptions of a client"},
{"subscriptions add <ClientId> <Topic> <QoS>", "Add a static subscription manually"}, {"subscriptions add <ClientId> <Topic> <QoS>",
{"subscriptions del <ClientId> <Topic>", "Delete a static subscription manually"}]). "Add a static subscription manually"},
{"subscriptions del <ClientId> <Topic>",
"Delete a static subscription manually"}]).
if_valid_qos(QoS, Fun) -> if_valid_qos(QoS, Fun) ->
try list_to_integer(QoS) of try list_to_integer(QoS) of
@ -328,14 +329,20 @@ vm(["memory"]) ->
[emqx_ctl:print("memory/~-17s: ~w~n", [Cat, Val]) || {Cat, Val} <- erlang:memory()]; [emqx_ctl:print("memory/~-17s: ~w~n", [Cat, Val]) || {Cat, Val} <- erlang:memory()];
vm(["process"]) -> vm(["process"]) ->
[emqx_ctl:print("process/~-16s: ~w~n", [Name, erlang:system_info(Key)]) || {Name, Key} <- [{limit, process_limit}, {count, process_count}]]; [emqx_ctl:print("process/~-16s: ~w~n",
[Name, erlang:system_info(Key)]) ||
{Name, Key} <- [{limit, process_limit}, {count, process_count}]];
vm(["io"]) -> vm(["io"]) ->
IoInfo = lists:usort(lists:flatten(erlang:system_info(check_io))), IoInfo = lists:usort(lists:flatten(erlang:system_info(check_io))),
[emqx_ctl:print("io/~-21s: ~w~n", [Key, proplists:get_value(Key, IoInfo)]) || Key <- [max_fds, active_fds]]; [emqx_ctl:print("io/~-21s: ~w~n",
[Key, proplists:get_value(Key, IoInfo)]) ||
Key <- [max_fds, active_fds]];
vm(["ports"]) -> vm(["ports"]) ->
[emqx_ctl:print("ports/~-16s: ~w~n", [Name, erlang:system_info(Key)]) || {Name, Key} <- [{count, port_count}, {limit, port_limit}]]; [emqx_ctl:print("ports/~-16s: ~w~n",
[Name, erlang:system_info(Key)]) ||
{Name, Key} <- [{count, port_count}, {limit, port_limit}]];
vm(_) -> vm(_) ->
emqx_ctl:usage([{"vm all", "Show info of Erlang VM"}, emqx_ctl:usage([{"vm all", "Show info of Erlang VM"},
@ -372,8 +379,9 @@ log(["primary-level", Level]) ->
emqx_ctl:print("~s~n", [emqx_logger:get_primary_log_level()]); emqx_ctl:print("~s~n", [emqx_logger:get_primary_log_level()]);
log(["handlers", "list"]) -> log(["handlers", "list"]) ->
_ = [emqx_ctl:print("LogHandler(id=~s, level=~s, destination=~s, status=~s)~n", [Id, Level, Dst, Status]) _ = [emqx_ctl:print("LogHandler(id=~s, level=~s, destination=~s, status=~s)~n",
|| #{id := Id, level := Level, dst := Dst, status := Status} <- emqx_logger:get_log_handlers()], [Id, Level, Dst, Status]) || #{id := Id, level := Level, dst := Dst, status := Status}
<- emqx_logger:get_log_handlers()],
ok; ok;
log(["handlers", "start", HandlerId]) -> log(["handlers", "start", HandlerId]) ->
@ -406,43 +414,51 @@ log(_) ->
{"log handlers list", "Show log handlers"}, {"log handlers list", "Show log handlers"},
{"log handlers start <HandlerId>", "Start a log handler"}, {"log handlers start <HandlerId>", "Start a log handler"},
{"log handlers stop <HandlerId>", "Stop a log handler"}, {"log handlers stop <HandlerId>", "Stop a log handler"},
{"log handlers set-level <HandlerId> <Level>", "Set log level of a log handler"}]). {"log handlers set-level <HandlerId> <Level>",
"Set log level of a log handler"}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% @doc Trace Command %% @doc Trace Command
trace(["list"]) -> trace(["list"]) ->
lists:foreach(fun({{Who, Name}, {Level, LogFile}}) -> lists:foreach(fun(Trace) ->
emqx_ctl:print("Trace(~s=~s, level=~s, destination=~p)~n", [Who, Name, Level, LogFile]) #{type := Type, filter := Filter, level := Level, dst := Dst} = Trace,
end, emqx_tracer:lookup_traces()); emqx_ctl:print("Trace(~s=~s, level=~s, destination=~p)~n", [Type, Filter, Level, Dst])
end, emqx_trace_handler:running());
trace(["stop", "client", ClientId]) -> trace(["stop", Operation, ClientId]) ->
trace_off(clientid, ClientId); case trace_type(Operation) of
{ok, Type} -> trace_off(Type, ClientId);
error -> trace([])
end;
trace(["start", "client", ClientId, LogFile]) -> trace(["start", Operation, ClientId, LogFile]) ->
trace_on(clientid, ClientId, all, LogFile); trace(["start", Operation, ClientId, LogFile, "all"]);
trace(["start", "client", ClientId, LogFile, Level]) -> trace(["start", Operation, ClientId, LogFile, Level]) ->
trace_on(clientid, ClientId, list_to_atom(Level), LogFile); case trace_type(Operation) of
{ok, Type} -> trace_on(Type, ClientId, list_to_existing_atom(Level), LogFile);
trace(["stop", "topic", Topic]) -> error -> trace([])
trace_off(topic, Topic); end;
trace(["start", "topic", Topic, LogFile]) ->
trace_on(topic, Topic, all, LogFile);
trace(["start", "topic", Topic, LogFile, Level]) ->
trace_on(topic, Topic, list_to_atom(Level), LogFile);
trace(_) -> trace(_) ->
emqx_ctl:usage([{"trace list", "List all traces started"}, emqx_ctl:usage([{"trace list", "List all traces started on local node"},
{"trace start client <ClientId> <File> [<Level>]", "Traces for a client"}, {"trace start client <ClientId> <File> [<Level>]",
{"trace stop client <ClientId>", "Stop tracing for a client"}, "Traces for a client on local node"},
{"trace start topic <Topic> <File> [<Level>] ", "Traces for a topic"}, {"trace stop client <ClientId>",
{"trace stop topic <Topic> ", "Stop tracing for a topic"}]). "Stop tracing for a client on local node"},
{"trace start topic <Topic> <File> [<Level>] ",
"Traces for a topic on local node"},
{"trace stop topic <Topic> ",
"Stop tracing for a topic on local node"},
{"trace start ip_address <IP> <File> [<Level>] ",
"Traces for a client ip on local node"},
{"trace stop ip_addresss <IP> ",
"Stop tracing for a client ip on local node"}
]).
trace_on(Who, Name, Level, LogFile) -> trace_on(Who, Name, Level, LogFile) ->
case emqx_tracer:start_trace({Who, iolist_to_binary(Name)}, Level, LogFile) of case emqx_trace_handler:install(Who, Name, Level, LogFile) of
ok -> ok ->
emqx_ctl:print("trace ~s ~s successfully~n", [Who, Name]); emqx_ctl:print("trace ~s ~s successfully~n", [Who, Name]);
{error, Error} -> {error, Error} ->
@ -450,13 +466,94 @@ trace_on(Who, Name, Level, LogFile) ->
end. end.
trace_off(Who, Name) -> trace_off(Who, Name) ->
case emqx_tracer:stop_trace({Who, iolist_to_binary(Name)}) of case emqx_trace_handler:uninstall(Who, Name) of
ok -> ok ->
emqx_ctl:print("stop tracing ~s ~s successfully~n", [Who, Name]); emqx_ctl:print("stop tracing ~s ~s successfully~n", [Who, Name]);
{error, Error} -> {error, Error} ->
emqx_ctl:print("[error] stop tracing ~s ~s: ~p~n", [Who, Name, Error]) emqx_ctl:print("[error] stop tracing ~s ~s: ~p~n", [Who, Name, Error])
end. end.
%%--------------------------------------------------------------------
%% @doc Trace Cluster Command
traces(["list"]) ->
{ok, List} = emqx_trace_api:list_trace(get, []),
case List of
[] ->
emqx_ctl:print("Cluster Trace is empty~n", []);
_ ->
lists:foreach(fun(Trace) ->
#{type := Type, name := Name, status := Status,
log_size := LogSize} = Trace,
emqx_ctl:print("Trace(~s: ~s=~s, ~s, LogSize:~p)~n",
[Name, Type, maps:get(Type, Trace), Status, LogSize])
end, List)
end,
length(List);
traces(["stop", Name]) ->
trace_cluster_off(Name);
traces(["delete", Name]) ->
trace_cluster_del(Name);
traces(["start", Name, Operation, Filter]) ->
traces(["start", Name, Operation, Filter, "900"]);
traces(["start", Name, Operation, Filter, DurationS]) ->
case trace_type(Operation) of
{ok, Type} -> trace_cluster_on(Name, Type, Filter, DurationS);
error -> traces([])
end;
traces(_) ->
emqx_ctl:usage([{"traces list", "List all cluster traces started"},
{"traces start <Name> client <ClientId>", "Traces for a client in cluster"},
{"traces start <Name> topic <Topic>", "Traces for a topic in cluster"},
{"traces start <Name> ip_address <IPAddr>", "Traces for a IP in cluster"},
{"traces stop <Name>", "Stop trace in cluster"},
{"traces delete <Name>", "Delete trace in cluster"}
]).
trace_cluster_on(Name, Type, Filter, DurationS0) ->
case erlang:whereis(emqx_trace) of
undefined ->
emqx_ctl:print("[error] Tracer module not started~n"
"Please run `emqx_ctl modules start tracer` "
"or `emqx_ctl modules start emqx_mod_trace` first~n", []);
_ ->
DurationS = list_to_integer(DurationS0),
Now = erlang:system_time(second),
Trace = #{ name => list_to_binary(Name)
, type => atom_to_binary(Type)
, Type => list_to_binary(Filter)
, start_at => list_to_binary(calendar:system_time_to_rfc3339(Now))
, end_at => list_to_binary(calendar:system_time_to_rfc3339(Now + DurationS))
},
case emqx_trace:create(Trace) of
ok ->
emqx_ctl:print("Cluster_trace ~p ~s ~s successfully~n", [Type, Filter, Name]);
{error, Error} ->
emqx_ctl:print("[error] Cluster_trace ~s ~s=~s ~p~n",
[Name, Type, Filter, Error])
end
end.
trace_cluster_del(Name) ->
case emqx_trace:delete(list_to_binary(Name)) of
ok -> emqx_ctl:print("Del cluster_trace ~s successfully~n", [Name]);
{error, Error} -> emqx_ctl:print("[error] Del cluster_trace ~s: ~p~n", [Name, Error])
end.
trace_cluster_off(Name) ->
case emqx_trace:update(list_to_binary(Name), false) of
ok -> emqx_ctl:print("Stop cluster_trace ~s successfully~n", [Name]);
{error, Error} -> emqx_ctl:print("[error] Stop cluster_trace ~s: ~p~n", [Name, Error])
end.
trace_type("client") -> {ok, clientid};
trace_type("topic") -> {ok, topic};
trace_type("ip_address") -> {ok, ip_address};
trace_type(_) -> error.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% @doc Listeners Command %% @doc Listeners Command
@ -473,8 +570,9 @@ listeners([]) ->
end, esockd:listeners()), end, esockd:listeners()),
lists:foreach(fun({Protocol, Opts}) -> lists:foreach(fun({Protocol, Opts}) ->
Port = proplists:get_value(port, Opts), Port = proplists:get_value(port, Opts),
Acceptors = maps:get(num_acceptors, proplists:get_value(transport_options, Opts, #{}), 0),
Info = [{listen_on, {string, emqx_listeners:format_listen_on(Port)}}, Info = [{listen_on, {string, emqx_listeners:format_listen_on(Port)}},
{acceptors, maps:get(num_acceptors, proplists:get_value(transport_options, Opts, #{}), 0)}, {acceptors, Acceptors},
{max_conns, proplists:get_value(max_connections, Opts)}, {max_conns, proplists:get_value(max_connections, Opts)},
{current_conn, proplists:get_value(all_connections, Opts)}, {current_conn, proplists:get_value(all_connections, Opts)},
{shutdown_count, []}], {shutdown_count, []}],
@ -483,7 +581,8 @@ listeners([]) ->
end, ranch:info()); end, ranch:info());
listeners(["stop", Name = "http" ++ _N | _MaybePort]) -> listeners(["stop", Name = "http" ++ _N | _MaybePort]) ->
%% _MaybePort is to be backward compatible, to stop http listener, there is no need for the port number %% _MaybePort is to be backward compatible, to stop http listener,
%% there is no need for the port number
case minirest:stop_http(list_to_atom(Name)) of case minirest:stop_http(list_to_atom(Name)) of
ok -> ok ->
emqx_ctl:print("Stop ~s listener successfully.~n", [Name]); emqx_ctl:print("Stop ~s listener successfully.~n", [Name]);
@ -564,7 +663,8 @@ data(["import", Filename, "--env", Env]) ->
{error, unsupported_version} -> {error, unsupported_version} ->
emqx_ctl:print("The emqx data import failed: Unsupported version.~n"); emqx_ctl:print("The emqx data import failed: Unsupported version.~n");
{error, Reason} -> {error, Reason} ->
emqx_ctl:print("The emqx data import failed: ~0p while reading ~s.~n", [Reason, Filename]) emqx_ctl:print("The emqx data import failed: ~0p while reading ~s.~n",
[Reason, Filename])
end; end;
data(_) -> data(_) ->
@ -657,15 +757,19 @@ print({client, {ClientId, ChanPid}}) ->
maps:with([created_at], Session)]), maps:with([created_at], Session)]),
InfoKeys = [clientid, username, peername, InfoKeys = [clientid, username, peername,
clean_start, keepalive, expiry_interval, clean_start, keepalive, expiry_interval,
subscriptions_cnt, inflight_cnt, awaiting_rel_cnt, send_msg, mqueue_len, mqueue_dropped, subscriptions_cnt, inflight_cnt, awaiting_rel_cnt,
connected, created_at, connected_at] ++ case maps:is_key(disconnected_at, Info) of send_msg, mqueue_len, mqueue_dropped,
connected, created_at, connected_at] ++
case maps:is_key(disconnected_at, Info) of
true -> [disconnected_at]; true -> [disconnected_at];
false -> [] false -> []
end, end,
emqx_ctl:print("Client(~s, username=~s, peername=~s, " emqx_ctl:print("Client(~s, username=~s, peername=~s, "
"clean_start=~s, keepalive=~w, session_expiry_interval=~w, " "clean_start=~s, keepalive=~w, session_expiry_interval=~w, "
"subscriptions=~w, inflight=~w, awaiting_rel=~w, delivered_msgs=~w, enqueued_msgs=~w, dropped_msgs=~w, " "subscriptions=~w, inflight=~w, awaiting_rel=~w, "
"connected=~s, created_at=~w, connected_at=~w" ++ case maps:is_key(disconnected_at, Info) of "delivered_msgs=~w, enqueued_msgs=~w, dropped_msgs=~w, "
"connected=~s, created_at=~w, connected_at=~w" ++
case maps:is_key(disconnected_at, Info) of
true -> ", disconnected_at=~w)~n"; true -> ", disconnected_at=~w)~n";
false -> ")~n" false -> ")~n"
end, end,
@ -721,3 +825,7 @@ restart_http_listener(Scheme, AppName) ->
http_mod_name(emqx_management) -> emqx_mgmt_http; http_mod_name(emqx_management) -> emqx_mgmt_http;
http_mod_name(Name) -> Name. http_mod_name(Name) -> Name.
print_app_info({AppId, AppSecret, Name, Desc, Status, Expired}) ->
emqx_ctl:print("app_id: ~s, secret: ~s, name: ~s, desc: ~s, status: ~s, expired: ~p~n",
[AppId, AppSecret, Name, Desc, Status, Expired]).

View File

@ -237,10 +237,12 @@ import_resource(#{<<"id">> := Id,
config => Config, config => Config,
created_at => NCreatedAt, created_at => NCreatedAt,
description => Desc}). description => Desc}).
import_resources_and_rules(Resources, Rules, FromVersion) import_resources_and_rules(Resources, Rules, FromVersion)
when FromVersion =:= "4.0" orelse when FromVersion =:= "4.0" orelse
FromVersion =:= "4.1" orelse FromVersion =:= "4.1" orelse
FromVersion =:= "4.2" -> FromVersion =:= "4.2" orelse
FromVersion =:= "4.3" ->
Configs = lists:foldl(fun compatible_version/2 , [], Resources), Configs = lists:foldl(fun compatible_version/2 , [], Resources),
lists:foreach(fun(#{<<"actions">> := Actions} = Rule) -> lists:foreach(fun(#{<<"actions">> := Actions} = Rule) ->
NActions = apply_new_config(Actions, Configs), NActions = apply_new_config(Actions, Configs),
@ -305,6 +307,17 @@ compatible_version(#{<<"id">> := ID,
{ok, _Resource} = import_resource(Resource#{<<"config">> := Cfg}), {ok, _Resource} = import_resource(Resource#{<<"config">> := Cfg}),
NHeaders = maps:put(<<"content-type">>, ContentType, covert_empty_headers(Headers)), NHeaders = maps:put(<<"content-type">>, ContentType, covert_empty_headers(Headers)),
[{ID, #{headers => NHeaders, method => Method}} | Acc]; [{ID, #{headers => NHeaders, method => Method}} | Acc];
compatible_version(#{<<"id">> := ID,
<<"type">> := Type,
<<"config">> := Config} = Resource, Acc)
when Type =:= <<"backend_mongo_single">>
orelse Type =:= <<"backend_mongo_sharded">>
orelse Type =:= <<"backend_mongo_rs">> ->
NewConfig = maps:merge(#{<<"srv_record">> => false}, Config),
{ok, _Resource} = import_resource(Resource#{<<"config">> := NewConfig}),
[{ID, NewConfig} | Acc];
% normal version % normal version
compatible_version(Resource, Acc) -> compatible_version(Resource, Acc) ->
{ok, _Resource} = import_resource(Resource), {ok, _Resource} = import_resource(Resource),
@ -527,6 +540,7 @@ import_modules(Modules) ->
undefined -> undefined ->
ok; ok;
_ -> _ ->
NModules = migrate_modules(Modules),
lists:foreach(fun(#{<<"id">> := Id, lists:foreach(fun(#{<<"id">> := Id,
<<"type">> := Type, <<"type">> := Type,
<<"config">> := Config, <<"config">> := Config,
@ -534,9 +548,31 @@ import_modules(Modules) ->
<<"created_at">> := CreatedAt, <<"created_at">> := CreatedAt,
<<"description">> := Description}) -> <<"description">> := Description}) ->
_ = emqx_modules:import_module({Id, any_to_atom(Type), Config, Enabled, CreatedAt, Description}) _ = emqx_modules:import_module({Id, any_to_atom(Type), Config, Enabled, CreatedAt, Description})
end, Modules) end, NModules)
end. end.
migrate_modules(Modules) ->
migrate_modules(Modules, []).
migrate_modules([], Acc) ->
lists:reverse(Acc);
migrate_modules([#{<<"type">> := <<"mongo_authentication">>,
<<"config">> := Config} = Module | More], Acc) ->
WMode = case maps:get(<<"w_mode">>, Config, <<"unsafe">>) of
<<"undef">> -> <<"unsafe">>;
Other -> Other
end,
RMode = case maps:get(<<"r_mode">>, Config, <<"master">>) of
<<"undef">> -> <<"master">>;
<<"slave-ok">> -> <<"slave_ok">>;
Other0 -> Other0
end,
NConfig = Config#{<<"srv_record">> => false,
<<"w_mode">> => WMode,
<<"r_mode">> => RMode},
migrate_modules(More, [Module#{<<"config">> => NConfig} | Acc]);
migrate_modules([Module | More], Acc) ->
migrate_modules(More, [Module | Acc]).
import_schemas(Schemas) -> import_schemas(Schemas) ->
case ets:info(emqx_schema) of case ets:info(emqx_schema) of
@ -697,6 +733,8 @@ is_version_supported2("4.1") ->
true; true;
is_version_supported2("4.3") -> is_version_supported2("4.3") ->
true; true;
is_version_supported2("4.4") ->
true;
is_version_supported2(Version) -> is_version_supported2(Version) ->
case re:run(Version, "^4.[02].\\d+$", [{capture, none}]) of case re:run(Version, "^4.[02].\\d+$", [{capture, none}]) of
match -> match ->

View File

@ -45,6 +45,7 @@ groups() ->
t_vm_cmd, t_vm_cmd,
t_plugins_cmd, t_plugins_cmd,
t_trace_cmd, t_trace_cmd,
t_traces_cmd,
t_broker_cmd, t_broker_cmd,
t_router_cmd, t_router_cmd,
t_subscriptions_cmd, t_subscriptions_cmd,
@ -64,6 +65,23 @@ init_per_suite(Config) ->
end_per_suite(_Config) -> end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps(apps()). emqx_ct_helpers:stop_apps(apps()).
init_per_testcase(t_plugins_cmd, Config) ->
meck:new(emqx_plugins, [non_strict, passthrough]),
meck:expect(emqx_plugins, load, fun(_) -> ok end),
meck:expect(emqx_plugins, unload, fun(_) -> ok end),
meck:expect(emqx_plugins, reload, fun(_) -> ok end),
mock_print(),
Config;
init_per_testcase(_Case, Config) ->
mock_print(),
Config.
end_per_testcase(t_plugins_cmd, _Config) ->
meck:unload(emqx_plugins),
unmock_print();
end_per_testcase(_Case, _Config) ->
unmock_print().
t_app(_Config) -> t_app(_Config) ->
{ok, AppSecret} = emqx_mgmt_auth:add_app(<<"app_id">>, <<"app_name">>), {ok, AppSecret} = emqx_mgmt_auth:add_app(<<"app_id">>, <<"app_name">>),
?assert(emqx_mgmt_auth:is_authorized(<<"app_id">>, AppSecret)), ?assert(emqx_mgmt_auth:is_authorized(<<"app_id">>, AppSecret)),
@ -96,7 +114,6 @@ t_app(_Config) ->
ok. ok.
t_log_cmd(_) -> t_log_cmd(_) ->
mock_print(),
lists:foreach(fun(Level) -> lists:foreach(fun(Level) ->
emqx_mgmt_cli:log(["primary-level", Level]), emqx_mgmt_cli:log(["primary-level", Level]),
?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["primary-level"])) ?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["primary-level"]))
@ -109,12 +126,9 @@ t_log_cmd(_) ->
?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["handlers", "set-level", ?assertEqual(Level ++ "\n", emqx_mgmt_cli:log(["handlers", "set-level",
atom_to_list(Id), Level])) atom_to_list(Id), Level]))
end, ?LOG_LEVELS) end, ?LOG_LEVELS)
|| #{id := Id} <- emqx_logger:get_log_handlers()], || #{id := Id} <- emqx_logger:get_log_handlers()].
meck:unload().
t_mgmt_cmd(_) -> t_mgmt_cmd(_) ->
% ct:pal("start testing the mgmt command"),
mock_print(),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt( ?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt(
["lookup", "emqx_appid"]), "Not Found.")), ["lookup", "emqx_appid"]), "Not Found.")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt( ?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt(
@ -127,28 +141,19 @@ t_mgmt_cmd(_) ->
["update", "emqx_appid", "ts"]), "update successfully")), ["update", "emqx_appid", "ts"]), "update successfully")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt( ?assertMatch({match, _}, re:run(emqx_mgmt_cli:mgmt(
["delete", "emqx_appid"]), "ok")), ["delete", "emqx_appid"]), "ok")),
ok = emqx_mgmt_cli:mgmt(["list"]), ok = emqx_mgmt_cli:mgmt(["list"]).
meck:unload().
t_status_cmd(_) -> t_status_cmd(_) ->
% ct:pal("start testing status command"),
mock_print(),
%% init internal status seem to be always 'starting' when running ct tests %% init internal status seem to be always 'starting' when running ct tests
?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([]), "Node\s.*@.*\sis\sstart(ed|ing)")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([]), "Node\s.*@.*\sis\sstart(ed|ing)")).
meck:unload().
t_broker_cmd(_) -> t_broker_cmd(_) ->
% ct:pal("start testing the broker command"),
mock_print(),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([]), "sysdescr")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([]), "sysdescr")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["stats"]), "subscriptions.shared")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["stats"]), "subscriptions.shared")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["metrics"]), "bytes.sent")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker(["metrics"]), "bytes.sent")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([undefined]), "broker")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:broker([undefined]), "broker")).
meck:unload().
t_clients_cmd(_) -> t_clients_cmd(_) ->
% ct:pal("start testing the client command"),
mock_print(),
process_flag(trap_exit, true), process_flag(trap_exit, true),
{ok, T} = emqtt:start_link([{clientid, <<"client12">>}, {ok, T} = emqtt:start_link([{clientid, <<"client12">>},
{username, <<"testuser1">>}, {username, <<"testuser1">>},
@ -164,7 +169,6 @@ t_clients_cmd(_) ->
receive receive
{'EXIT', T, _} -> {'EXIT', T, _} ->
ok ok
% ct:pal("Connection closed: ~p~n", [Reason])
after after
500 -> 500 ->
erlang:error("Client is not kick") erlang:error("Client is not kick")
@ -179,10 +183,11 @@ t_clients_cmd(_) ->
{ok, Connack, <<>>, _} = raw_recv_pase(Bin), {ok, Connack, <<>>, _} = raw_recv_pase(Bin),
timer:sleep(300), timer:sleep(300),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "client13")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "client13")),
meck:unload().
% emqx_mgmt_cli:clients(["kick", "client13"]), % emqx_mgmt_cli:clients(["kick", "client13"]),
% timer:sleep(500), % timer:sleep(500),
% ?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "Not Found")). % ?assertMatch({match, _}, re:run(emqx_mgmt_cli:clients(["show", "client13"]), "Not Found")).
ok.
raw_recv_pase(Packet) -> raw_recv_pase(Packet) ->
emqx_frame:parse(Packet). emqx_frame:parse(Packet).
@ -191,8 +196,6 @@ raw_send_serialize(Packet) ->
emqx_frame:serialize(Packet). emqx_frame:serialize(Packet).
t_vm_cmd(_) -> t_vm_cmd(_) ->
% ct:pal("start testing the vm command"),
mock_print(),
[[?assertMatch({match, _}, re:run(Result, Name)) [[?assertMatch({match, _}, re:run(Result, Name))
|| Result <- emqx_mgmt_cli:vm([Name])] || Result <- emqx_mgmt_cli:vm([Name])]
|| Name <- ["load", "memory", "process", "io", "ports"]], || Name <- ["load", "memory", "process", "io", "ports"]],
@ -205,12 +208,9 @@ t_vm_cmd(_) ->
[?assertMatch({match, _}, re:run(Result, "io")) [?assertMatch({match, _}, re:run(Result, "io"))
|| Result <- emqx_mgmt_cli:vm(["io"])], || Result <- emqx_mgmt_cli:vm(["io"])],
[?assertMatch({match, _}, re:run(Result, "ports")) [?assertMatch({match, _}, re:run(Result, "ports"))
|| Result <- emqx_mgmt_cli:vm(["ports"])], || Result <- emqx_mgmt_cli:vm(["ports"])].
unmock_print().
t_trace_cmd(_) -> t_trace_cmd(_) ->
% ct:pal("start testing the trace command"),
mock_print(),
logger:set_primary_config(level, debug), logger:set_primary_config(level, debug),
{ok, T} = emqtt:start_link([{clientid, <<"client">>}, {ok, T} = emqtt:start_link([{clientid, <<"client">>},
{username, <<"testuser">>}, {username, <<"testuser">>},
@ -237,12 +237,34 @@ t_trace_cmd(_) ->
Trace7 = emqx_mgmt_cli:trace(["start", "topic", "a/b/c", Trace7 = emqx_mgmt_cli:trace(["start", "topic", "a/b/c",
"log/clientid_trace.log", "error"]), "log/clientid_trace.log", "error"]),
?assertMatch({match, _}, re:run(Trace7, "successfully")), ?assertMatch({match, _}, re:run(Trace7, "successfully")),
logger:set_primary_config(level, error), logger:set_primary_config(level, error).
unmock_print().
t_traces_cmd(_) ->
emqx_trace:create_table(),
Count1 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(0, Count1),
Error1 = emqx_mgmt_cli:traces(["start", "test-name", "client", "clientid-dev"]),
?assertMatch({match, _}, re:run(Error1, "Tracer module not started")),
emqx_trace:start_link(),
Trace1 = emqx_mgmt_cli:traces(["start", "test-name", "client", "clientid-dev"]),
?assertMatch({match, _}, re:run(Trace1, "successfully")),
Count2 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(1, Count2),
Error2 = emqx_mgmt_cli:traces(["start", "test-name", "client", "clientid-dev"]),
?assertMatch({match, _}, re:run(Error2, "already_existed")),
Trace2 = emqx_mgmt_cli:traces(["stop", "test-name"]),
?assertMatch({match, _}, re:run(Trace2, "successfully")),
Count3 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(1, Count3),
Trace3 = emqx_mgmt_cli:traces(["delete", "test-name"]),
?assertMatch({match, _}, re:run(Trace3, "successfully")),
Count4 = emqx_mgmt_cli:traces(["list"]),
?assertEqual(0, Count4),
Error3 = emqx_mgmt_cli:traces(["delete", "test-name"]),
?assertMatch({match, _}, re:run(Error3, "not_found")),
ok.
t_router_cmd(_) -> t_router_cmd(_) ->
% ct:pal("start testing the router command"),
mock_print(),
{ok, T} = emqtt:start_link([{clientid, <<"client1">>}, {ok, T} = emqtt:start_link([{clientid, <<"client1">>},
{username, <<"testuser1">>}, {username, <<"testuser1">>},
{password, <<"pass1">>} {password, <<"pass1">>}
@ -257,12 +279,9 @@ t_router_cmd(_) ->
emqtt:connect(T1), emqtt:connect(T1),
emqtt:subscribe(T1, <<"a/b/c/d">>), emqtt:subscribe(T1, <<"a/b/c/d">>),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["list"]), "a/b/c | a/b/c")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["list"]), "a/b/c | a/b/c")),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["show", "a/b/c"]), "a/b/c")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:routes(["show", "a/b/c"]), "a/b/c")).
unmock_print().
t_subscriptions_cmd(_) -> t_subscriptions_cmd(_) ->
% ct:pal("Start testing the subscriptions command"),
mock_print(),
{ok, T3} = emqtt:start_link([{clientid, <<"client">>}, {ok, T3} = emqtt:start_link([{clientid, <<"client">>},
{username, <<"testuser">>}, {username, <<"testuser">>},
{password, <<"pass">>} {password, <<"pass">>}
@ -273,22 +292,18 @@ t_subscriptions_cmd(_) ->
[?assertMatch({match, _} , re:run(Result, "b/b/c")) [?assertMatch({match, _} , re:run(Result, "b/b/c"))
|| Result <- emqx_mgmt_cli:subscriptions(["show", <<"client">>])], || Result <- emqx_mgmt_cli:subscriptions(["show", <<"client">>])],
?assertEqual(emqx_mgmt_cli:subscriptions(["add", "client", "b/b/c", "0"]), "ok~n"), ?assertEqual(emqx_mgmt_cli:subscriptions(["add", "client", "b/b/c", "0"]), "ok~n"),
?assertEqual(emqx_mgmt_cli:subscriptions(["del", "client", "b/b/c"]), "ok~n"), ?assertEqual(emqx_mgmt_cli:subscriptions(["del", "client", "b/b/c"]), "ok~n").
unmock_print().
t_listeners_cmd_old(_) -> t_listeners_cmd_old(_) ->
ok = emqx_listeners:ensure_all_started(), ok = emqx_listeners:ensure_all_started(),
mock_print(),
?assertEqual(emqx_mgmt_cli:listeners([]), ok), ?assertEqual(emqx_mgmt_cli:listeners([]), ok),
?assertEqual( ?assertEqual(
"Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n", "Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n",
emqx_mgmt_cli:listeners(["stop", "wss", "8084"]) emqx_mgmt_cli:listeners(["stop", "wss", "8084"])
), ).
unmock_print().
t_listeners_cmd_new(_) -> t_listeners_cmd_new(_) ->
ok = emqx_listeners:ensure_all_started(), ok = emqx_listeners:ensure_all_started(),
mock_print(),
?assertEqual(emqx_mgmt_cli:listeners([]), ok), ?assertEqual(emqx_mgmt_cli:listeners([]), ok),
?assertEqual( ?assertEqual(
"Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n", "Stop mqtt:wss:external listener on 0.0.0.0:8084 successfully.\n",
@ -304,16 +319,11 @@ t_listeners_cmd_new(_) ->
), ),
?assertEqual( ?assertEqual(
emqx_mgmt_cli:listeners(["restart", "bad:listener:identifier"]), emqx_mgmt_cli:listeners(["restart", "bad:listener:identifier"]),
"Failed to restart bad:listener:identifier listener: {no_such_listener,\"bad:listener:identifier\"}\n" "Failed to restart bad:listener:identifier listener: "
), "{no_such_listener,\"bad:listener:identifier\"}\n"
unmock_print(). ).
t_plugins_cmd(_) -> t_plugins_cmd(_) ->
mock_print(),
meck:new(emqx_plugins, [non_strict, passthrough]),
meck:expect(emqx_plugins, load, fun(_) -> ok end),
meck:expect(emqx_plugins, unload, fun(_) -> ok end),
meck:expect(emqx_plugins, reload, fun(_) -> ok end),
?assertEqual(emqx_mgmt_cli:plugins(["list"]), ok), ?assertEqual(emqx_mgmt_cli:plugins(["list"]), ok),
?assertEqual( ?assertEqual(
emqx_mgmt_cli:plugins(["unload", "emqx_auth_mnesia"]), emqx_mgmt_cli:plugins(["unload", "emqx_auth_mnesia"]),
@ -326,11 +336,9 @@ t_plugins_cmd(_) ->
?assertEqual( ?assertEqual(
emqx_mgmt_cli:plugins(["unload", "emqx_management"]), emqx_mgmt_cli:plugins(["unload", "emqx_management"]),
"Plugin emqx_management can not be unloaded.~n" "Plugin emqx_management can not be unloaded.~n"
), ).
unmock_print().
t_cli(_) -> t_cli(_) ->
mock_print(),
?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([""]), "status")), ?assertMatch({match, _}, re:run(emqx_mgmt_cli:status([""]), "status")),
[?assertMatch({match, _}, re:run(Value, "broker")) [?assertMatch({match, _}, re:run(Value, "broker"))
|| Value <- emqx_mgmt_cli:broker([""])], || Value <- emqx_mgmt_cli:broker([""])],
@ -352,9 +360,10 @@ t_cli(_) ->
|| Value <- emqx_mgmt_cli:mnesia([""])], || Value <- emqx_mgmt_cli:mnesia([""])],
[?assertMatch({match, _}, re:run(Value, "trace")) [?assertMatch({match, _}, re:run(Value, "trace"))
|| Value <- emqx_mgmt_cli:trace([""])], || Value <- emqx_mgmt_cli:trace([""])],
[?assertMatch({match, _}, re:run(Value, "traces"))
|| Value <- emqx_mgmt_cli:traces([""])],
[?assertMatch({match, _}, re:run(Value, "mgmt")) [?assertMatch({match, _}, re:run(Value, "mgmt"))
|| Value <- emqx_mgmt_cli:mgmt([""])], || Value <- emqx_mgmt_cli:mgmt([""])].
unmock_print().
mock_print() -> mock_print() ->
catch meck:unload(emqx_ctl), catch meck:unload(emqx_ctl),

View File

@ -29,6 +29,8 @@
-define(HOST, "http://127.0.0.1:8081/"). -define(HOST, "http://127.0.0.1:8081/").
-elvis([{elvis_style, line_length, disable}]).
-define(API_VERSION, "v4"). -define(API_VERSION, "v4").
-define(BASE_PATH, "api"). -define(BASE_PATH, "api").
@ -76,30 +78,40 @@ t_alarms(_) ->
?assert(is_existing(alarm2, emqx_alarm:get_alarms(activated))), ?assert(is_existing(alarm2, emqx_alarm:get_alarms(activated))),
{ok, Return1} = request_api(get, api_path(["alarms/activated"]), auth_header_()), {ok, Return1} = request_api(get, api_path(["alarms/activated"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return1))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return1))))), lists:nth(1, get(<<"data">>, Return1))))),
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return1))))),
emqx_alarm:deactivate(alarm1), emqx_alarm:deactivate(alarm1),
{ok, Return2} = request_api(get, api_path(["alarms"]), auth_header_()), {ok, Return2} = request_api(get, api_path(["alarms"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return2))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return2))))), lists:nth(1, get(<<"data">>, Return2))))),
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return2))))),
{ok, Return3} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()), {ok, Return3} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return3))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return3))))), lists:nth(1, get(<<"data">>, Return3))))),
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return3))))),
emqx_alarm:deactivate(alarm2), emqx_alarm:deactivate(alarm2),
{ok, Return4} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()), {ok, Return4} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()),
?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return4))))), ?assert(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return4))))), lists:nth(1, get(<<"data">>, Return4))))),
?assert(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return4))))),
{ok, _} = request_api(delete, api_path(["alarms/deactivated"]), auth_header_()), {ok, _} = request_api(delete, api_path(["alarms/deactivated"]), auth_header_()),
{ok, Return5} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()), {ok, Return5} = request_api(get, api_path(["alarms/deactivated"]), auth_header_()),
?assertNot(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return5))))), ?assertNot(lookup_alarm(<<"alarm1">>, maps:get(<<"alarms">>,
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>, lists:nth(1, get(<<"data">>, Return5))))). lists:nth(1, get(<<"data">>, Return5))))),
?assertNot(lookup_alarm(<<"alarm2">>, maps:get(<<"alarms">>,
lists:nth(1, get(<<"data">>, Return5))))).
t_apps(_) -> t_apps(_) ->
AppId = <<"123456">>, AppId = <<"123456">>,
@ -153,7 +165,8 @@ t_banned(_) ->
[Banned] = get(<<"data">>, Result), [Banned] = get(<<"data">>, Result),
?assertEqual(Who, maps:get(<<"who">>, Banned)), ?assertEqual(Who, maps:get(<<"who">>, Banned)),
{ok, _} = request_api(delete, api_path(["banned", "clientid", binary_to_list(Who)]), auth_header_()), {ok, _} = request_api(delete, api_path(["banned", "clientid", binary_to_list(Who)]),
auth_header_()),
{ok, Result2} = request_api(get, api_path(["banned"]), auth_header_()), {ok, Result2} = request_api(get, api_path(["banned"]), auth_header_()),
?assertEqual([], get(<<"data">>, Result2)). ?assertEqual([], get(<<"data">>, Result2)).
@ -205,40 +218,50 @@ t_clients(_) ->
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, kickout_client, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, kickout_client, 1, fun(_) -> {error, undefined} end),
{ok, MeckRet1} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), {ok, MeckRet1} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]),
auth_header_()),
?assertEqual(?ERROR1, get(<<"code">>, MeckRet1)), ?assertEqual(?ERROR1, get(<<"code">>, MeckRet1)),
meck:expect(emqx_mgmt, clean_acl_cache, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, clean_acl_cache, 1, fun(_) -> {error, undefined} end),
{ok, MeckRet2} = request_api(delete, api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()), {ok, MeckRet2} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR1, get(<<"code">>, MeckRet2)), ?assertEqual(?ERROR1, get(<<"code">>, MeckRet2)),
meck:expect(emqx_mgmt, list_acl_cache, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, list_acl_cache, 1, fun(_) -> {error, undefined} end),
{ok, MeckRet3} = request_api(get, api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()), {ok, MeckRet3} = request_api(get,
api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR1, get(<<"code">>, MeckRet3)), ?assertEqual(?ERROR1, get(<<"code">>, MeckRet3)),
meck:unload(emqx_mgmt), meck:unload(emqx_mgmt),
{ok, Ok} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), {ok, Ok} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1)]), auth_header_()),
?assertEqual(?SUCCESS, get(<<"code">>, Ok)), ?assertEqual(?SUCCESS, get(<<"code">>, Ok)),
timer:sleep(300), timer:sleep(300),
{ok, Ok1} = request_api(delete, api_path(["clients", binary_to_list(ClientId1)]), auth_header_()), {ok, Ok1} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1)]), auth_header_()),
?assertEqual(?SUCCESS, get(<<"code">>, Ok1)), ?assertEqual(?SUCCESS, get(<<"code">>, Ok1)),
{ok, Clients6} = request_api(get, api_path(["clients"]), "_limit=100&_page=1", auth_header_()), {ok, Clients6} = request_api(get,
api_path(["clients"]), "_limit=100&_page=1", auth_header_()),
?assertEqual(1, maps:get(<<"count">>, get(<<"meta">>, Clients6))), ?assertEqual(1, maps:get(<<"count">>, get(<<"meta">>, Clients6))),
{ok, NotFound1} = request_api(get, api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()), {ok, NotFound1} = request_api(get,
api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR12, get(<<"code">>, NotFound1)), ?assertEqual(?ERROR12, get(<<"code">>, NotFound1)),
{ok, NotFound2} = request_api(delete, api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()), {ok, NotFound2} = request_api(delete,
api_path(["clients", binary_to_list(ClientId1), "acl_cache"]), auth_header_()),
?assertEqual(?ERROR12, get(<<"code">>, NotFound2)), ?assertEqual(?ERROR12, get(<<"code">>, NotFound2)),
{ok, EmptyAclCache} = request_api(get, api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()), {ok, EmptyAclCache} = request_api(get,
api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()),
?assertEqual(0, length(get(<<"data">>, EmptyAclCache))), ?assertEqual(0, length(get(<<"data">>, EmptyAclCache))),
{ok, Ok1} = request_api(delete, api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()), {ok, Ok1} = request_api(delete,
api_path(["clients", binary_to_list(ClientId2), "acl_cache"]), auth_header_()),
?assertEqual(?SUCCESS, get(<<"code">>, Ok1)). ?assertEqual(?SUCCESS, get(<<"code">>, Ok1)).
receive_exit(0) -> receive_exit(0) ->
@ -257,7 +280,8 @@ receive_exit(Count) ->
t_listeners(_) -> t_listeners(_) ->
{ok, _} = request_api(get, api_path(["listeners"]), auth_header_()), {ok, _} = request_api(get, api_path(["listeners"]), auth_header_()),
{ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "listeners"]), auth_header_()), {ok, _} = request_api(get,
api_path(["nodes", atom_to_list(node()), "listeners"]), auth_header_()),
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, list_listeners, 0, fun() -> [{node(), {error, undefined}}] end), meck:expect(emqx_mgmt, list_listeners, 0, fun() -> [{node(), {error, undefined}}] end),
{ok, Return} = request_api(get, api_path(["listeners"]), auth_header_()), {ok, Return} = request_api(get, api_path(["listeners"]), auth_header_()),
@ -268,10 +292,12 @@ t_listeners(_) ->
t_metrics(_) -> t_metrics(_) ->
{ok, _} = request_api(get, api_path(["metrics"]), auth_header_()), {ok, _} = request_api(get, api_path(["metrics"]), auth_header_()),
{ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()), {ok, _} = request_api(get,
api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()),
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, get_metrics, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, get_metrics, 1, fun(_) -> {error, undefined} end),
{ok, "{\"message\":\"undefined\"}"} = request_api(get, api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()), {ok, "{\"message\":\"undefined\"}"} =
request_api(get, api_path(["nodes", atom_to_list(node()), "metrics"]), auth_header_()),
meck:unload(emqx_mgmt). meck:unload(emqx_mgmt).
t_nodes(_) -> t_nodes(_) ->
@ -348,7 +374,8 @@ t_acl_cache(_) ->
{ok, _} = emqtt:connect(C1), {ok, _} = emqtt:connect(C1),
{ok, _, _} = emqtt:subscribe(C1, Topic, 2), {ok, _, _} = emqtt:subscribe(C1, Topic, 2),
%% get acl cache, should not be empty %% get acl cache, should not be empty
{ok, Result} = request_api(get, api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()), {ok, Result} = request_api(get,
api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()),
#{<<"code">> := 0, <<"data">> := Caches} = jiffy:decode(list_to_binary(Result), [return_maps]), #{<<"code">> := 0, <<"data">> := Caches} = jiffy:decode(list_to_binary(Result), [return_maps]),
?assert(length(Caches) > 0), ?assert(length(Caches) > 0),
?assertMatch(#{<<"access">> := <<"subscribe">>, ?assertMatch(#{<<"access">> := <<"subscribe">>,
@ -356,11 +383,14 @@ t_acl_cache(_) ->
<<"result">> := <<"allow">>, <<"result">> := <<"allow">>,
<<"updated_time">> := _}, hd(Caches)), <<"updated_time">> := _}, hd(Caches)),
%% clear acl cache %% clear acl cache
{ok, Result2} = request_api(delete, api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()), {ok, Result2} = request_api(delete,
api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()),
?assertMatch(#{<<"code">> := 0}, jiffy:decode(list_to_binary(Result2), [return_maps])), ?assertMatch(#{<<"code">> := 0}, jiffy:decode(list_to_binary(Result2), [return_maps])),
%% get acl cache again, after the acl cache is cleared %% get acl cache again, after the acl cache is cleared
{ok, Result3} = request_api(get, api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()), {ok, Result3} = request_api(get,
#{<<"code">> := 0, <<"data">> := Caches3} = jiffy:decode(list_to_binary(Result3), [return_maps]), api_path(["clients", binary_to_list(ClientId), "acl_cache"]), [], auth_header_()),
#{<<"code">> := 0, <<"data">> := Caches3}
= jiffy:decode(list_to_binary(Result3), [return_maps]),
?assertEqual(0, length(Caches3)), ?assertEqual(0, length(Caches3)),
ok = emqtt:disconnect(C1). ok = emqtt:disconnect(C1).
@ -371,7 +401,7 @@ t_pubsub(_) ->
ClientId = <<"client1">>, ClientId = <<"client1">>,
Options = #{clientid => ClientId, Options = #{clientid => ClientId,
proto_ver => 5}, proto_ver => v5},
Topic = <<"mytopic">>, Topic = <<"mytopic">>,
{ok, C1} = emqtt:start_link(Options), {ok, C1} = emqtt:start_link(Options),
{ok, _} = emqtt:connect(C1), {ok, _} = emqtt:connect(C1),
@ -482,12 +512,15 @@ t_pubsub(_) ->
Topic_list = [<<"mytopic1">>, <<"mytopic2">>], Topic_list = [<<"mytopic1">>, <<"mytopic2">>],
[ {ok, _, [2]} = emqtt:subscribe(C1, Topics, 2) || Topics <- Topic_list], [ {ok, _, [2]} = emqtt:subscribe(C1, Topics, 2) || Topics <- Topic_list],
Body1 = [ #{<<"clientid">> => ClientId, <<"topic">> => Topics, <<"qos">> => 2} || Topics <- Topic_list], Body1 = [ #{<<"clientid">> => ClientId,
<<"topic">> => Topics, <<"qos">> => 2} || Topics <- Topic_list],
{ok, Data1} = request_api(post, api_path(["mqtt/subscribe_batch"]), [], auth_header_(), Body1), {ok, Data1} = request_api(post, api_path(["mqtt/subscribe_batch"]), [], auth_header_(), Body1),
loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data1), [return_maps]))), loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data1), [return_maps]))),
%% tests publish_batch %% tests publish_batch
Body2 = [ #{<<"clientid">> => ClientId, <<"topic">> => Topics, <<"qos">> => 2, <<"retain">> => <<"false">>, <<"payload">> => #{body => "hello world"}} || Topics <- Topic_list ], Body2 = [ #{<<"clientid">> => ClientId, <<"topic">> => Topics, <<"qos">> => 2,
<<"retain">> => <<"false">>, <<"payload">> => #{body => "hello world"}}
|| Topics <- Topic_list ],
{ok, Data2} = request_api(post, api_path(["mqtt/publish_batch"]), [], auth_header_(), Body2), {ok, Data2} = request_api(post, api_path(["mqtt/publish_batch"]), [], auth_header_(), Body2),
loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data2), [return_maps]))), loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data2), [return_maps]))),
[ ?assert(receive [ ?assert(receive
@ -499,14 +532,33 @@ t_pubsub(_) ->
%% tests unsubscribe_batch %% tests unsubscribe_batch
Body3 = [#{<<"clientid">> => ClientId, <<"topic">> => Topics} || Topics <- Topic_list], Body3 = [#{<<"clientid">> => ClientId, <<"topic">> => Topics} || Topics <- Topic_list],
{ok, Data3} = request_api(post, api_path(["mqtt/unsubscribe_batch"]), [], auth_header_(), Body3), {ok, Data3} = request_api(post,
api_path(["mqtt/unsubscribe_batch"]), [], auth_header_(), Body3),
loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data3), [return_maps]))), loop(maps:get(<<"data">>, jiffy:decode(list_to_binary(Data3), [return_maps]))),
{ok, _, [1]} = emqtt:subscribe(C1, <<"mytopic">>, qos1),
timer:sleep(50),
%% user properties
{ok, Code} = request_api(post, api_path(["mqtt/publish"]), [], auth_header_(),
#{<<"clientid">> => ClientId,
<<"topic">> => <<"mytopic">>,
<<"qos">> => 1,
<<"payload">> => <<"hello world">>,
<<"user_properties">> => #{<<"porp_1">> => <<"porp_1">>}}),
?assert(receive
{publish, #{payload := <<"hello world">>,
properties := #{'User-Property' := [{<<"porp_1">>,<<"porp_1">>}]}}} ->
true
after 100 ->
false
end),
ok = emqtt:disconnect(C1), ok = emqtt:disconnect(C1),
?assertEqual(3, emqx_metrics:val('messages.qos1.received') - Qos1Received), ?assertEqual(4, emqx_metrics:val('messages.qos1.received') - Qos1Received),
?assertEqual(2, emqx_metrics:val('messages.qos2.received') - Qos2Received), ?assertEqual(2, emqx_metrics:val('messages.qos2.received') - Qos2Received),
?assertEqual(5, emqx_metrics:val('messages.received') - Received). ?assertEqual(6, emqx_metrics:val('messages.received') - Received).
loop([]) -> []; loop([]) -> [];
@ -523,7 +575,8 @@ t_routes_and_subscriptions(_) ->
?assertEqual([], get(<<"data">>, NonRoute)), ?assertEqual([], get(<<"data">>, NonRoute)),
{ok, NonSubscription} = request_api(get, api_path(["subscriptions"]), auth_header_()), {ok, NonSubscription} = request_api(get, api_path(["subscriptions"]), auth_header_()),
?assertEqual([], get(<<"data">>, NonSubscription)), ?assertEqual([], get(<<"data">>, NonSubscription)),
{ok, NonSubscription1} = request_api(get, api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()), {ok, NonSubscription1} = request_api(get,
api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()),
?assertEqual([], get(<<"data">>, NonSubscription1)), ?assertEqual([], get(<<"data">>, NonSubscription1)),
{ok, NonSubscription2} = request_api(get, {ok, NonSubscription2} = request_api(get,
api_path(["subscriptions", binary_to_list(ClientId)]), api_path(["subscriptions", binary_to_list(ClientId)]),
@ -554,11 +607,14 @@ t_routes_and_subscriptions(_) ->
?assertMatch(#{<<"page">> := 1, <<"limit">> := 10000, <<"hasnext">> := false, <<"count">> := 1}, ?assertMatch(#{<<"page">> := 1, <<"limit">> := 10000, <<"hasnext">> := false, <<"count">> := 1},
get(<<"meta">>, Result3)), get(<<"meta">>, Result3)),
{ok, Result3} = request_api(get, api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()), {ok, Result3} = request_api(get,
api_path(["nodes", atom_to_list(node()), "subscriptions"]), auth_header_()),
{ok, Result4} = request_api(get, api_path(["subscriptions", binary_to_list(ClientId)]), auth_header_()), {ok, Result4} = request_api(get,
api_path(["subscriptions", binary_to_list(ClientId)]), auth_header_()),
[Subscription] = get(<<"data">>, Result4), [Subscription] = get(<<"data">>, Result4),
{ok, Result4} = request_api(get, api_path(["nodes", atom_to_list(node()), "subscriptions", binary_to_list(ClientId)]) {ok, Result4} = request_api(get,
api_path(["nodes", atom_to_list(node()), "subscriptions", binary_to_list(ClientId)])
, auth_header_()), , auth_header_()),
ok = emqtt:disconnect(C1). ok = emqtt:disconnect(C1).
@ -623,7 +679,8 @@ t_stats(_) ->
{ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()), {ok, _} = request_api(get, api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()),
meck:new(emqx_mgmt, [passthrough, no_history]), meck:new(emqx_mgmt, [passthrough, no_history]),
meck:expect(emqx_mgmt, get_stats, 1, fun(_) -> {error, undefined} end), meck:expect(emqx_mgmt, get_stats, 1, fun(_) -> {error, undefined} end),
{ok, Return} = request_api(get, api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()), {ok, Return} = request_api(get,
api_path(["nodes", atom_to_list(node()), "stats"]), auth_header_()),
?assertEqual(<<"undefined">>, get(<<"message">>, Return)), ?assertEqual(<<"undefined">>, get(<<"message">>, Return)),
meck:unload(emqx_mgmt). meck:unload(emqx_mgmt).
@ -635,10 +692,15 @@ t_data(_) ->
{ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]), {ok, Data} = request_api(post, api_path(["data","export"]), [], auth_header_(), [#{}]),
#{<<"filename">> := Filename, <<"node">> := Node} = emqx_ct_http:get_http_data(Data), #{<<"filename">> := Filename, <<"node">> := Node} = emqx_ct_http:get_http_data(Data),
{ok, DataList} = request_api(get, api_path(["data","export"]), auth_header_()), {ok, DataList} = request_api(get, api_path(["data","export"]), auth_header_()),
?assertEqual(true, lists:member(emqx_ct_http:get_http_data(Data), emqx_ct_http:get_http_data(DataList))), ?assertEqual(true,
lists:member(emqx_ct_http:get_http_data(Data), emqx_ct_http:get_http_data(DataList))),
?assertMatch({ok, _}, request_api(post, api_path(["data","import"]), [], auth_header_(), #{<<"filename">> => Filename, <<"node">> => Node})), ?assertMatch({ok, _}, request_api(post,
?assertMatch({ok, _}, request_api(post, api_path(["data","import"]), [], auth_header_(), #{<<"filename">> => Filename})), api_path(["data","import"]), [], auth_header_(),
#{<<"filename">> => Filename, <<"node">> => Node})),
?assertMatch({ok, _},
request_api(post, api_path(["data","import"]), [], auth_header_(),
#{<<"filename">> => Filename})),
application:stop(emqx_rule_engine), application:stop(emqx_rule_engine),
application:stop(emqx_dashboard), application:stop(emqx_dashboard),
ok. ok.
@ -653,10 +715,36 @@ t_data_import_content(_) ->
Dir = emqx:get_env(data_dir), Dir = emqx:get_env(data_dir),
{ok, Bin} = file:read_file(filename:join(Dir, Filename)), {ok, Bin} = file:read_file(filename:join(Dir, Filename)),
Content = emqx_json:decode(Bin), Content = emqx_json:decode(Bin),
?assertMatch({ok, "{\"code\":0}"}, request_api(post, api_path(["data","import"]), [], auth_header_(), Content)), ?assertMatch({ok, "{\"code\":0}"},
request_api(post, api_path(["data","import"]), [], auth_header_(), Content)),
application:stop(emqx_rule_engine), application:stop(emqx_rule_engine),
application:stop(emqx_dashboard). application:stop(emqx_dashboard).
t_keepalive(_Config) ->
application:ensure_all_started(emqx_dashboard),
Username = "user_keepalive",
ClientId = "client_keepalive",
AuthHeader = auth_header_(),
Path = api_path(["clients", ClientId, "keepalive"]),
{ok, NotFound} = request_api(put, Path, "interval=5", AuthHeader, [#{}]),
?assertEqual("{\"message\":\"not_found\",\"code\":112}", NotFound),
{ok, C1} = emqtt:start_link(#{username => Username, clientid => ClientId}),
{ok, _} = emqtt:connect(C1),
{ok, Ok} = request_api(put, Path, "interval=5", AuthHeader, [#{}]),
?assertEqual("{\"code\":0}", Ok),
[Pid] = emqx_cm:lookup_channels(list_to_binary(ClientId)),
#{conninfo := #{keepalive := Keepalive}} = emqx_connection:info(Pid),
?assertEqual(5, Keepalive),
{ok, Error1} = request_api(put, Path, "interval=-1", AuthHeader, [#{}]),
{ok, Error2} = request_api(put, Path, "interval=65536", AuthHeader, [#{}]),
ErrMsg = #{<<"code">> => 102,
<<"message">> => <<"mqtt3.1.1 specification: keepalive must between 0~65535">>},
?assertEqual(ErrMsg, jiffy:decode(Error1, [return_maps])),
?assertEqual(Error1, Error2),
emqtt:disconnect(C1),
application:stop(emqx_dashboard),
ok.
request_api(Method, Url, Auth) -> request_api(Method, Url, Auth) ->
request_api(Method, Url, [], Auth, []). request_api(Method, Url, [], Auth, []).

View File

@ -0,0 +1,68 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mongo_auth_module_migration_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-ifdef(EMQX_ENTERPRISE).
-include_lib("emqx_modules/include/emqx_modules.hrl").
-endif.
all() ->
emqx_ct:all(?MODULE).
-ifdef(EMQX_ENTERPRISE).
init_per_suite(Config) ->
application:load(emqx_modules_spec),
emqx_ct_helpers:start_apps([emqx_management, emqx_modules]),
Config.
end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([emqx_modules, emqx_management]),
application:unload(emqx_modules_spec),
ok.
t_import_4_2(Config) ->
?assertMatch(ok, import("e4.2.8.json", Config)),
timer:sleep(100),
MongoAuthNModule = emqx_modules_registry:find_module_by_type(mongo_authentication),
?assertNotEqual(not_found, MongoAuthNModule),
?assertMatch(#module{config = #{<<"srv_record">> := _}}, MongoAuthNModule),
delete_modules().
t_import_4_3(Config) ->
?assertMatch(ok, import("e4.3.5.json", Config)),
timer:sleep(100),
MongoAuthNModule = emqx_modules_registry:find_module_by_type(mongo_authentication),
?assertNotEqual(not_found, MongoAuthNModule),
?assertMatch(#module{config = #{<<"srv_record">> := _}}, MongoAuthNModule),
delete_modules().
import(File, Config) ->
Filename = filename:join(proplists:get_value(data_dir, Config), File),
emqx_mgmt_data_backup:import(Filename, "{}").
delete_modules() ->
[emqx_modules_registry:remove_module(Mod) || Mod <- emqx_modules_registry:get_modules()].
-endif.

View File

@ -0,0 +1 @@
{"version":"4.2","date":"2021-11-15 01:52:40","modules":[{"id":"module:79002e0f","type":"retainer","config":{"storage_type":"ram","max_retained_messages":0,"max_payload_size":"1MB","expiry_interval":0},"enabled":true,"created_at":1636941076704,"description":""},{"id":"module:34834081","type":"presence","config":{"qos":0},"enabled":true,"created_at":1636941076704,"description":""},{"id":"module:f6eb69d1","type":"recon","config":{},"enabled":true,"created_at":1636941076704,"description":""},{"id":"module:7ae737b2","type":"mongo_authentication","config":{"w_mode":"undef","verify":false,"type":"single","super_query_selector":"","super_query_field":"","super_query_collection":"","ssl":false,"server":"127.0.0.1:27017","r_mode":"undef","pool_size":8,"password":"public","login":"admin","keyfile":{"filename":"","file":""},"database":"mqtt","certfile":{"filename":"","file":""},"cacertfile":{"filename":"","file":""},"auth_source":"admin","auth_query_selector":"username=%u","auth_query_password_hash":"sha256","auth_query_password_field":"password","auth_query_collection":"mqtt_user","acl_query_selectors":[],"acl_query_collection":"mqtt_acl"},"enabled":false,"created_at":1636941148794,"description":""},{"id":"module:e8c63201","type":"internal_acl","config":{"acl_rule_file":"etc/acl.conf"},"enabled":true,"created_at":1636941076704,"description":""}],"rules":[],"resources":[],"blacklist":[],"apps":[{"id":"admin","secret":"public","name":"Default","desc":"Application user","status":true,"expired":"undefined"}],"users":[{"username":"admin","password":"qP5m2iS9qnn51gHoGLbaiMo/GwE=","tags":"administrator"}],"auth_mnesia":[],"acl_mnesia":[],"schemas":[],"configs":[],"listeners_state":[]}

View File

@ -0,0 +1 @@
{"version":"4.3","rules":[],"resources":[],"blacklist":[],"apps":[{"id":"admin","secret":"public","name":"Default","desc":"Application user","status":true,"expired":"undefined"}],"users":[{"username":"admin","password":"/mWV4UgV0xmVUZX4qdIXQvxXZB0=","tags":"administrator"}],"auth_mnesia":[],"acl_mnesia":[],"modules":[{"id":"module:5881add2","type":"mongo_authentication","config":{"w_mode":"undef","verify":false,"type":"single","super_query_selector":"","super_query_field":"","super_query_collection":"","ssl":false,"server":"127.0.0.1:27017","r_mode":"undef","pool_size":8,"password":"public","login":"admin","keyfile":{"filename":"","file":""},"database":"mqtt","certfile":{"filename":"","file":""},"cacertfile":{"filename":"","file":""},"auth_source":"admin","auth_query_selector":"username=%u","auth_query_password_hash":"sha256","auth_query_password_field":"password","auth_query_collection":"mqtt_user","acl_query_selectors":[],"acl_query_collection":"mqtt_acl"},"enabled":false,"created_at":1636942609573,"description":""},{"id":"module:2adb6480","type":"presence","config":{"qos":0},"enabled":true,"created_at":1636942586725,"description":""},{"id":"module:24fabe8a","type":"internal_acl","config":{"acl_rule_file":"etc/acl.conf"},"enabled":true,"created_at":1636942586725,"description":""},{"id":"module:22c70ab8","type":"recon","config":{},"enabled":true,"created_at":1636942586725,"description":""},{"id":"module:a59f9a4a","type":"retainer","config":{"storage_type":"ram","max_retained_messages":0,"max_payload_size":"1MB","expiry_interval":0},"enabled":true,"created_at":1636942586725,"description":""}],"schemas":[],"configs":[],"listeners_state":[],"date":"2021-11-15 10:16:56"}

View File

@ -0,0 +1,38 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-define(TOPK_TAB, emqx_slow_subs_topk).
-define(INDEX_TAB, emqx_slow_subs_index).
-define(ID(ClientId, Topic), {ClientId, Topic}).
-define(INDEX(TimeSpan, Id), {Id, TimeSpan}).
-define(TOPK_INDEX(TimeSpan, Id), {TimeSpan, Id}).
-define(MAX_SIZE, 1000).
-record(top_k, { index :: topk_index()
, last_update_time :: pos_integer()
, extra = []
}).
-record(index_tab, { index :: index()}).
-type top_k() :: #top_k{}.
-type index_tab() :: #index_tab{}.
-type id() :: {emqx_types:clientid(), emqx_types:topic()}.
-type index() :: ?INDEX(non_neg_integer(), id()).
-type topk_index() :: ?TOPK_INDEX(non_neg_integer(), id()).

View File

@ -1,6 +1,6 @@
{application, emqx_plugin_libs, {application, emqx_plugin_libs,
[{description, "EMQ X Plugin utility libs"}, [{description, "EMQ X Plugin utility libs"},
{vsn, "4.3.1"}, {vsn, "4.4.1"},
{modules, []}, {modules, []},
{applications, [kernel,stdlib]}, {applications, [kernel,stdlib]},
{env, []} {env, []}

View File

@ -1,16 +1,13 @@
%% -*-: erlang -*- %% -*- mode: erlang -*-
{VSN, {VSN,
[ [{"4.4.0",
{<<"4.3.0">>, [ [ {load_module,emqx_slow_subs,brutal_purge,soft_purge,[]}
{load_module, emqx_plugin_libs_ssl, brutal_purge, soft_purge, []} , {load_module,emqx_slow_subs_api,brutal_purge,soft_purge,[]}
]}, ]},
{<<".*">>, []} {<<".*">>,[]}],
], [{"4.4.0",
[ [ {load_module,emqx_slow_subs,brutal_purge,soft_purge,[]}
{<<"4.3.0">>, [ , {load_module,emqx_slow_subs_api,brutal_purge,soft_purge,[]}
{load_module, emqx_plugin_libs_ssl, brutal_purge, soft_purge, []}
]}, ]},
{<<".*">>, []} {<<".*">>,[]}]
]
}. }.

View File

@ -0,0 +1,303 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_slow_subs).
-behaviour(gen_server).
-include_lib("include/emqx.hrl").
-include_lib("include/logger.hrl").
-include_lib("emqx_plugin_libs/include/emqx_slow_subs.hrl").
-logger_header("[SLOW Subs]").
-export([ start_link/1, on_delivery_completed/4, enable/0
, disable/0, clear_history/0, init_tab/0
]).
%% gen_server callbacks
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-compile(nowarn_unused_type).
-type state() :: #{ config := proplist:proplist()
, enable := boolean()
, last_tick_at := pos_integer()
}.
-type message() :: #message{}.
-import(proplists, [get_value/2, get_value/3]).
-type stats_type() :: whole %% whole = internal + response
| internal %% timespan from message in to deliver
| response. %% timespan from delivery to client response
-type stats_update_args() :: #{session_birth_time := pos_integer()}.
-type stats_update_env() :: #{ threshold := non_neg_integer()
, stats_type := stats_type()
, max_size := pos_integer()}.
-ifdef(TEST).
-define(EXPIRE_CHECK_INTERVAL, timer:seconds(1)).
-else.
-define(EXPIRE_CHECK_INTERVAL, timer:seconds(10)).
-endif.
-define(NOW, erlang:system_time(millisecond)).
-define(DEF_CALL_TIMEOUT, timer:seconds(10)).
%% erlang term order
%% number < atom < reference < fun < port < pid < tuple < list < bit string
%% ets ordered_set is ascending by term order
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%% APIs
%%--------------------------------------------------------------------
%% @doc Start the st_statistics
-spec(start_link(Env :: list()) -> emqx_types:startlink_ret()).
start_link(Env) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [Env], []).
on_delivery_completed(_ClientInfo, #message{timestamp = Ts}, #{session_birth_time := BirthTime}, _Cfg)
when Ts =< BirthTime ->
ok;
on_delivery_completed(ClientInfo, Msg, Env, Cfg) ->
on_delivery_completed(ClientInfo, Msg, Env, erlang:system_time(millisecond), Cfg).
on_delivery_completed(#{clientid := ClientId},
#message{topic = Topic} = Msg,
_Env,
Now,
#{threshold := Threshold,
stats_type := StatsType,
max_size := MaxSize}) ->
TimeSpan = calc_timespan(StatsType, Msg, Now),
case TimeSpan =< Threshold of
true -> ok;
_ ->
Id = ?ID(ClientId, Topic),
LastUpdateValue = find_last_update_value(Id),
case TimeSpan =< LastUpdateValue of
true -> ok;
_ ->
try_insert_to_topk(MaxSize, Now, LastUpdateValue, TimeSpan, Id)
end
end.
clear_history() ->
gen_server:call(?MODULE, ?FUNCTION_NAME, ?DEF_CALL_TIMEOUT).
enable() ->
gen_server:call(?MODULE, {enable, true}, ?DEF_CALL_TIMEOUT).
disable() ->
gen_server:call(?MODULE, {enable, false}, ?DEF_CALL_TIMEOUT).
init_tab() ->
safe_create_tab(?TOPK_TAB, [ ordered_set, public, named_table
, {keypos, #top_k.index}, {write_concurrency, true}
, {read_concurrency, true}
]),
safe_create_tab(?INDEX_TAB, [ ordered_set, public, named_table
, {keypos, #index_tab.index}, {write_concurrency, true}
, {read_concurrency, true}
]).
%%--------------------------------------------------------------------
%% gen_server callbacks
%%--------------------------------------------------------------------
init([Conf]) ->
expire_tick(Conf),
load(Conf),
{ok, #{config => Conf,
last_tick_at => ?NOW,
enable => true}}.
handle_call({enable, Enable}, _From,
#{config := Cfg, enable := IsEnable} = State) ->
State2 = case Enable of
IsEnable ->
State;
true ->
load(Cfg),
State#{enable := true};
_ ->
unload(),
State#{enable := false}
end,
{reply, ok, State2};
handle_call(clear_history, _, State) ->
do_clear_history(),
{reply, ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info(expire_tick, #{config := Cfg} = State) ->
expire_tick(Cfg),
Logs = ets:tab2list(?TOPK_TAB),
do_clear(Cfg, Logs),
{noreply, State};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, _) ->
unload(),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
%% Internal functions
%%--------------------------------------------------------------------
expire_tick(_) ->
erlang:send_after(?EXPIRE_CHECK_INTERVAL, self(), ?FUNCTION_NAME).
load(Cfg) ->
MaxSize = get_value(top_k_num, Cfg),
StatsType = get_value(stats_type, Cfg, whole),
Threshold = get_value(threshold, Cfg),
_ = emqx:hook('delivery.completed',
fun ?MODULE:on_delivery_completed/4,
[#{max_size => MaxSize,
stats_type => StatsType,
threshold => Threshold
}]),
ok.
unload() ->
emqx:unhook('delivery.completed', fun ?MODULE:on_delivery_completed/4 ),
do_clear_history().
do_clear(Cfg, Logs) ->
Now = ?NOW,
Interval = get_value(expire_interval, Cfg),
Each = fun(#top_k{index = ?TOPK_INDEX(TimeSpan, Id), last_update_time = Ts}) ->
case Now - Ts >= Interval of
true ->
delete_with_index(TimeSpan, Id);
_ ->
true
end
end,
lists:foreach(Each, Logs).
-spec calc_timespan(stats_type(), emqx_types:message(), non_neg_integer()) -> non_neg_integer().
calc_timespan(whole, #message{timestamp = Ts}, Now) ->
Now - Ts;
calc_timespan(internal, #message{timestamp = Ts} = Msg, Now) ->
End = emqx_message:get_header(deliver_begin_at, Msg, Now),
End - Ts;
calc_timespan(response, Msg, Now) ->
Begin = emqx_message:get_header(deliver_begin_at, Msg, Now),
Now - Begin.
%% update_topk is safe, because each process has a unique clientid
%% insert or delete are bind to this clientid, so there is no race condition
%%
%% but, the delete_with_index in L249 may have a race condition
%% because the data belong to other clientid will be deleted here (deleted the data written by other processes).%% so it may appear that:
%% when deleting a record, the other process is performing an update operation on this recrod
%% in order to solve this race condition problem, the index table also uses the ordered_set type,
%% so that even if the above situation occurs, it will only cause the old data to be deleted twice
%% and the correctness of the data will not be affected
try_insert_to_topk(MaxSize, Now, LastUpdateValue, TimeSpan, Id) ->
case ets:info(?TOPK_TAB, size) of
Size when Size < MaxSize ->
update_topk(Now, LastUpdateValue, TimeSpan, Id);
_Size ->
case ets:first(?TOPK_TAB) of
'$end_of_table' ->
update_topk(Now, LastUpdateValue, TimeSpan, Id);
?TOPK_INDEX(_, Id) ->
update_topk(Now, LastUpdateValue, TimeSpan, Id);
?TOPK_INDEX(Min, MinId) ->
case TimeSpan =< Min of
true -> false;
_ ->
update_topk(Now, LastUpdateValue, TimeSpan, Id),
delete_with_index(Min, MinId)
end
end
end.
-spec find_last_update_value(id()) -> non_neg_integer().
find_last_update_value(Id) ->
case ets:next(?INDEX_TAB, ?INDEX(0, Id)) of
?INDEX(LastUpdateValue, Id) ->
LastUpdateValue;
_ ->
0
end.
-spec update_topk(pos_integer(), non_neg_integer(), non_neg_integer(), id()) -> true.
update_topk(Now, LastUpdateValue, TimeSpan, Id) ->
%% update record
ets:insert(?TOPK_TAB, #top_k{index = ?TOPK_INDEX(TimeSpan, Id),
last_update_time = Now,
extra = []
}),
%% update index
ets:insert(?INDEX_TAB, #index_tab{index = ?INDEX(TimeSpan, Id)}),
%% delete the old record & index
delete_with_index(LastUpdateValue, Id).
-spec delete_with_index(non_neg_integer(), id()) -> true.
delete_with_index(0, _) ->
true;
delete_with_index(TimeSpan, Id) ->
ets:delete(?INDEX_TAB, ?INDEX(TimeSpan, Id)),
ets:delete(?TOPK_TAB, ?TOPK_INDEX(TimeSpan, Id)).
safe_create_tab(Name, Opts) ->
case ets:whereis(Name) of
undefined ->
Name = ets:new(Name, Opts);
_ ->
Name
end.
do_clear_history() ->
ets:delete_all_objects(?INDEX_TAB),
ets:delete_all_objects(?TOPK_TAB).

View File

@ -0,0 +1,116 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_slow_subs_api).
-rest_api(#{name => clear_history,
method => 'DELETE',
path => "/slow_subscriptions",
func => clear_history,
descr => "Clear current data and re count slow topic"}).
-rest_api(#{name => get_history,
method => 'GET',
path => "/slow_subscriptions",
func => get_history,
descr => "Get slow topics statistics record data"}).
-export([ clear_history/2
, get_history/2
, get_history/0
]).
-include_lib("emqx_plugin_libs/include/emqx_slow_subs.hrl").
-define(DEFAULT_RPC_TIMEOUT, timer:seconds(5)).
-import(minirest, [return/1]).
%%--------------------------------------------------------------------
%% HTTP API
%%--------------------------------------------------------------------
clear_history(_Bindings, _Params) ->
Nodes = ekka_mnesia:running_nodes(),
_ = [rpc_call(Node, emqx_slow_subs, clear_history, [], ok, ?DEFAULT_RPC_TIMEOUT)
|| Node <- Nodes],
return(ok).
get_history(_Bindings, _Params) ->
execute_when_enabled(fun do_get_history/0).
get_history() ->
Node = node(),
RankL = ets:tab2list(?TOPK_TAB),
ConvFun = fun(#top_k{index = ?TOPK_INDEX(TimeSpan, ?ID(ClientId, Topic)),
last_update_time = LastUpdateTime
}) ->
#{ clientid => ClientId
, node => Node
, topic => Topic
, timespan => TimeSpan
, last_update_time => LastUpdateTime
}
end,
lists:map(ConvFun, RankL).
%%--------------------------------------------------------------------
%% Internal functions
%%--------------------------------------------------------------------
do_get_history() ->
Nodes = ekka_mnesia:running_nodes(),
Fun = fun(Node, Acc) ->
NodeRankL = rpc_call(Node,
?MODULE,
get_history,
[],
[],
?DEFAULT_RPC_TIMEOUT),
NodeRankL ++ Acc
end,
RankL = lists:foldl(Fun, [], Nodes),
SortFun = fun(#{timespan := A}, #{timespan := B}) ->
A > B
end,
SortedL = lists:sort(SortFun, RankL),
SortedL2 = lists:sublist(SortedL, ?MAX_SIZE),
return({ok, SortedL2}).
rpc_call(Node, M, F, A, _ErrorR, _T) when Node =:= node() ->
erlang:apply(M, F, A);
rpc_call(Node, M, F, A, ErrorR, T) ->
case rpc:call(Node, M, F, A, T) of
{badrpc, _} -> ErrorR;
Res -> Res
end.
-ifdef(EMQX_ENTERPRISE).
execute_when_enabled(Fun) ->
Fun().
-else.
%% this code from emqx_mod_api_topics_metrics:execute_when_enabled
execute_when_enabled(Fun) ->
case emqx_modules:find_module(emqx_mod_slow_subs) of
[{_, true}] -> Fun();
_ -> return({error, module_not_loaded})
end.
-endif.

View File

@ -0,0 +1,490 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace).
-behaviour(gen_server).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-logger_header("[Tracer]").
-export([ publish/1
, subscribe/3
, unsubscribe/2
]).
-export([ start_link/0
, list/0
, list/1
, get_trace_filename/1
, create/1
, delete/1
, clear/0
, update/2
]).
-export([ format/1
, zip_dir/0
, filename/2
, trace_dir/0
, trace_file/1
, delete_files_after_send/2
]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-define(TRACE, ?MODULE).
-define(MAX_SIZE, 30).
-ifdef(TEST).
-export([ log_file/2
, create_table/0
, find_closest_time/2
]).
-endif.
-export_type([ip_address/0]).
-type ip_address() :: string().
-record(?TRACE,
{ name :: binary() | undefined | '_'
, type :: clientid | topic | ip_address | undefined | '_'
, filter :: emqx_types:topic() | emqx_types:clientid() | ip_address() | undefined | '_'
, enable = true :: boolean() | '_'
, start_at :: integer() | undefined | '_'
, end_at :: integer() | undefined | '_'
}).
publish(#message{topic = <<"$SYS/", _/binary>>}) -> ignore;
publish(#message{from = From, topic = Topic, payload = Payload}) when
is_binary(From); is_atom(From) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"PUBLISH to ~s: ~0p",
[Topic, Payload]
).
subscribe(<<"$SYS/", _/binary>>, _SubId, _SubOpts) -> ignore;
subscribe(Topic, SubId, SubOpts) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"~ts SUBSCRIBE ~ts: Options: ~0p",
[SubId, Topic, SubOpts]
).
unsubscribe(<<"$SYS/", _/binary>>, _SubOpts) -> ignore;
unsubscribe(Topic, SubOpts) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"~ts UNSUBSCRIBE ~ts: Options: ~0p",
[maps:get(subid, SubOpts, ""), Topic, SubOpts]
).
-spec(start_link() -> emqx_types:startlink_ret()).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec list() -> [tuple()].
list() ->
ets:match_object(?TRACE, #?TRACE{_ = '_'}).
-spec list(boolean()) -> [tuple()].
list(Enable) ->
ets:match_object(?TRACE, #?TRACE{enable = Enable, _ = '_'}).
-spec create([{Key :: binary(), Value :: binary()}] | #{atom() => binary()}) ->
ok | {error, {duplicate_condition, iodata()} | {already_existed, iodata()} | iodata()}.
create(Trace) ->
case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of
true ->
case to_trace(Trace) of
{ok, TraceRec} -> insert_new_trace(TraceRec);
{error, Reason} -> {error, Reason}
end;
false ->
{error, "The number of traces created has reache the maximum"
" please delete the useless ones first"}
end.
-spec delete(Name :: binary()) -> ok | {error, not_found}.
delete(Name) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name) of
[_] -> mnesia:delete(?TRACE, Name, write);
[] -> mnesia:abort(not_found)
end
end,
transaction(Tran).
-spec clear() -> ok | {error, Reason :: term()}.
clear() ->
case mnesia:clear_table(?TRACE) of
{atomic, ok} -> ok;
{aborted, Reason} -> {error, Reason}
end.
-spec update(Name :: binary(), Enable :: boolean()) ->
ok | {error, not_found | finished}.
update(Name, Enable) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name) of
[] -> mnesia:abort(not_found);
[#?TRACE{enable = Enable}] -> ok;
[Rec] ->
case erlang:system_time(second) >= Rec#?TRACE.end_at of
false -> mnesia:write(?TRACE, Rec#?TRACE{enable = Enable}, write);
true -> mnesia:abort(finished)
end
end
end,
transaction(Tran).
-spec get_trace_filename(Name :: binary()) ->
{ok, FileName :: string()} | {error, not_found}.
get_trace_filename(Name) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name, read) of
[] -> mnesia:abort(not_found);
[#?TRACE{start_at = Start}] -> {ok, filename(Name, Start)}
end end,
transaction(Tran).
-spec trace_file(File :: list()) ->
{ok, Node :: list(), Binary :: binary()} |
{error, Node :: list(), Reason :: term()}.
trace_file(File) ->
FileName = filename:join(trace_dir(), File),
Node = atom_to_list(node()),
case file:read_file(FileName) of
{ok, Bin} -> {ok, Node, Bin};
{error, Reason} -> {error, Node, Reason}
end.
delete_files_after_send(TraceLog, Zips) ->
gen_server:cast(?MODULE, {delete_tag, self(), [TraceLog | Zips]}).
-spec format(list(#?TRACE{})) -> list(map()).
format(Traces) ->
Fields = record_info(fields, ?TRACE),
lists:map(fun(Trace0 = #?TRACE{}) ->
[_ | Values] = tuple_to_list(Trace0),
maps:from_list(lists:zip(Fields, Values))
end, Traces).
init([]) ->
ok = create_table(),
erlang:process_flag(trap_exit, true),
OriginLogLevel = emqx_logger:get_primary_log_level(),
ok = filelib:ensure_dir(trace_dir()),
ok = filelib:ensure_dir(zip_dir()),
{ok, _} = mnesia:subscribe({table, ?TRACE, simple}),
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
TRef = update_trace(Traces),
{ok, #{timer => TRef, monitors => #{}, primary_log_level => OriginLogLevel}}.
create_table() ->
ok = ekka_mnesia:create_table(?TRACE, [
{type, set},
{disc_copies, [node()]},
{record_name, ?TRACE},
{attributes, record_info(fields, ?TRACE)}]),
ok = ekka_mnesia:copy_table(?TRACE, disc_copies).
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ok, State}.
handle_cast({delete_tag, Pid, Files}, State = #{monitors := Monitors}) ->
erlang:monitor(process, Pid),
{noreply, State#{monitors => Monitors#{Pid => Files}}};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info({'DOWN', _Ref, process, Pid, _Reason}, State = #{monitors := Monitors}) ->
case maps:take(Pid, Monitors) of
error -> {noreply, State};
{Files, NewMonitors} ->
lists:foreach(fun file:delete/1, Files),
{noreply, State#{monitors => NewMonitors}}
end;
handle_info({timeout, TRef, update_trace},
#{timer := TRef, primary_log_level := OriginLogLevel} = State) ->
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
NextTRef = update_trace(Traces),
{noreply, State#{timer => NextTRef}};
handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) ->
emqx_misc:cancel_timer(TRef),
handle_info({timeout, TRef, update_trace}, State);
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, #{timer := TRef, primary_log_level := OriginLogLevel}) ->
ok = set_log_primary_level(OriginLogLevel),
_ = mnesia:unsubscribe({table, ?TRACE, simple}),
emqx_misc:cancel_timer(TRef),
stop_all_trace_handler(),
_ = file:del_dir_r(zip_dir()),
ok.
code_change(_, State, _Extra) ->
{ok, State}.
insert_new_trace(Trace) ->
Tran = fun() ->
case mnesia:read(?TRACE, Trace#?TRACE.name) of
[] ->
#?TRACE{start_at = StartAt, type = Type, filter = Filter} = Trace,
Match = #?TRACE{_ = '_', start_at = StartAt, type = Type, filter = Filter},
case mnesia:match_object(?TRACE, Match, read) of
[] -> mnesia:write(?TRACE, Trace, write);
[#?TRACE{name = Name}] -> mnesia:abort({duplicate_condition, Name})
end;
[#?TRACE{name = Name}] -> mnesia:abort({already_existed, Name})
end
end,
transaction(Tran).
update_trace(Traces) ->
Now = erlang:system_time(second),
{_Waiting, Running, Finished} = classify_by_time(Traces, Now),
disable_finished(Finished),
Started = emqx_trace_handler:running(),
{NeedRunning, AllStarted} = start_trace(Running, Started),
NeedStop = AllStarted -- NeedRunning,
ok = stop_trace(NeedStop, Started),
clean_stale_trace_files(),
NextTime = find_closest_time(Traces, Now),
emqx_misc:start_timer(NextTime, update_trace).
stop_all_trace_handler() ->
lists:foreach(fun(#{id := Id}) -> emqx_trace_handler:uninstall(Id) end,
emqx_trace_handler:running()).
get_enable_trace() ->
{atomic, Traces} =
mnesia:transaction(fun() ->
mnesia:match_object(?TRACE, #?TRACE{enable = true, _ = '_'}, read)
end),
Traces.
find_closest_time(Traces, Now) ->
Sec =
lists:foldl(
fun(#?TRACE{start_at = Start, end_at = End, enable = true}, Closest) ->
min(closest(End, Now, Closest), closest(Start, Now, Closest));
(_, Closest) -> Closest
end, 60 * 15, Traces),
timer:seconds(Sec).
closest(Time, Now, Closest) when Now >= Time -> Closest;
closest(Time, Now, Closest) -> min(Time - Now, Closest).
disable_finished([]) -> ok;
disable_finished(Traces) ->
transaction(fun() ->
lists:map(fun(#?TRACE{name = Name}) ->
case mnesia:read(?TRACE, Name, write) of
[] -> ok;
[Trace] -> mnesia:write(?TRACE, Trace#?TRACE{enable = false}, write)
end end, Traces)
end).
start_trace(Traces, Started0) ->
Started = lists:map(fun(#{name := Name}) -> Name end, Started0),
lists:foldl(fun(#?TRACE{name = Name} = Trace, {Running, StartedAcc}) ->
case lists:member(Name, StartedAcc) of
true ->
{[Name | Running], StartedAcc};
false ->
case start_trace(Trace) of
ok -> {[Name | Running], [Name | StartedAcc]};
{error, _Reason} -> {[Name | Running], StartedAcc}
end
end
end, {[], Started}, Traces).
start_trace(Trace) ->
#?TRACE{name = Name
, type = Type
, filter = Filter
, start_at = Start
} = Trace,
Who = #{name => Name, type => Type, filter => Filter},
emqx_trace_handler:install(Who, debug, log_file(Name, Start)).
stop_trace(Finished, Started) ->
lists:foreach(fun(#{name := Name, type := Type}) ->
case lists:member(Name, Finished) of
true -> emqx_trace_handler:uninstall(Type, Name);
false -> ok
end
end, Started).
clean_stale_trace_files() ->
TraceDir = trace_dir(),
case file:list_dir(TraceDir) of
{ok, AllFiles} when AllFiles =/= ["zip"] ->
FileFun = fun(#?TRACE{name = Name, start_at = StartAt}) -> filename(Name, StartAt) end,
KeepFiles = lists:map(FileFun, list()),
case AllFiles -- ["zip" | KeepFiles] of
[] -> ok;
DeleteFiles ->
DelFun = fun(F) -> file:delete(filename:join(TraceDir, F)) end,
lists:foreach(DelFun, DeleteFiles)
end;
_ -> ok
end.
classify_by_time(Traces, Now) ->
classify_by_time(Traces, Now, [], [], []).
classify_by_time([], _Now, Wait, Run, Finish) -> {Wait, Run, Finish};
classify_by_time([Trace = #?TRACE{start_at = Start} | Traces],
Now, Wait, Run, Finish) when Start > Now ->
classify_by_time(Traces, Now, [Trace | Wait], Run, Finish);
classify_by_time([Trace = #?TRACE{end_at = End} | Traces],
Now, Wait, Run, Finish) when End =< Now ->
classify_by_time(Traces, Now, Wait, Run, [Trace | Finish]);
classify_by_time([Trace | Traces], Now, Wait, Run, Finish) ->
classify_by_time(Traces, Now, Wait, [Trace | Run], Finish).
to_trace(TraceParam) ->
case to_trace(ensure_map(TraceParam), #?TRACE{}) of
{error, Reason} -> {error, Reason};
{ok, #?TRACE{name = undefined}} ->
{error, "name required"};
{ok, #?TRACE{type = undefined}} ->
{error, "type=[topic,clientid,ip_address] required"};
{ok, TraceRec0 = #?TRACE{}} ->
case fill_default(TraceRec0) of
#?TRACE{start_at = Start, end_at = End} when End =< Start ->
{error, "failed by start_at >= end_at"};
TraceRec ->
{ok, TraceRec}
end
end.
ensure_map(#{} = Trace) -> Trace;
ensure_map(Trace) when is_list(Trace) ->
lists:foldl(
fun({K, V}, Acc) when is_binary(K) -> Acc#{binary_to_existing_atom(K) => V};
({K, V}, Acc) when is_atom(K) -> Acc#{K => V};
(_, Acc) -> Acc
end, #{}, Trace).
fill_default(Trace = #?TRACE{start_at = undefined}) ->
fill_default(Trace#?TRACE{start_at = erlang:system_time(second)});
fill_default(Trace = #?TRACE{end_at = undefined, start_at = StartAt}) ->
fill_default(Trace#?TRACE{end_at = StartAt + 10 * 60});
fill_default(Trace) -> Trace.
-define(NAME_RE, "^[A-Za-z]+[A-Za-z0-9-_]*$").
to_trace(#{name := Name} = Trace, Rec) ->
case re:run(Name, ?NAME_RE) of
nomatch -> {error, "Name should be " ?NAME_RE};
_ -> to_trace(maps:remove(name, Trace), Rec#?TRACE{name = Name})
end;
to_trace(#{type := <<"clientid">>, clientid := Filter} = Trace, Rec) ->
Trace0 = maps:without([type, clientid], Trace),
to_trace(Trace0, Rec#?TRACE{type = clientid, filter = Filter});
to_trace(#{type := <<"topic">>, topic := Filter} = Trace, Rec) ->
case validate_topic(Filter) of
ok ->
Trace0 = maps:without([type, topic], Trace),
to_trace(Trace0, Rec#?TRACE{type = topic, filter = Filter});
Error -> Error
end;
to_trace(#{type := <<"ip_address">>, ip_address := Filter} = Trace, Rec) ->
case validate_ip_address(Filter) of
ok ->
Trace0 = maps:without([type, ip_address], Trace),
to_trace(Trace0, Rec#?TRACE{type = ip_address, filter = Filter});
Error -> Error
end;
to_trace(#{type := Type}, _Rec) -> {error, io_lib:format("required ~s field", [Type])};
to_trace(#{start_at := StartAt} = Trace, Rec) ->
case to_system_second(StartAt) of
{ok, Sec} -> to_trace(maps:remove(start_at, Trace), Rec#?TRACE{start_at = Sec});
{error, Reason} -> {error, Reason}
end;
to_trace(#{end_at := EndAt} = Trace, Rec) ->
Now = erlang:system_time(second),
case to_system_second(EndAt) of
{ok, Sec} when Sec > Now ->
to_trace(maps:remove(end_at, Trace), Rec#?TRACE{end_at = Sec});
{ok, _Sec} ->
{error, "end_at time has already passed"};
{error, Reason} ->
{error, Reason}
end;
to_trace(_, Rec) -> {ok, Rec}.
validate_topic(TopicName) ->
try emqx_topic:validate(filter, TopicName) of
true -> ok
catch
error:Error ->
{error, io_lib:format("topic: ~s invalid by ~p", [TopicName, Error])}
end.
validate_ip_address(IP) ->
case inet:parse_address(binary_to_list(IP)) of
{ok, _} -> ok;
{error, Reason} -> {error, lists:flatten(io_lib:format("ip address: ~p", [Reason]))}
end.
to_system_second(At) ->
try
Sec = calendar:rfc3339_to_system_time(binary_to_list(At), [{unit, second}]),
Now = erlang:system_time(second),
{ok, erlang:max(Now, Sec)}
catch error: {badmatch, _} ->
{error, ["The rfc3339 specification not satisfied: ", At]}
end.
zip_dir() ->
trace_dir() ++ "zip/".
trace_dir() ->
filename:join(emqx:get_env(data_dir), "trace") ++ "/".
log_file(Name, Start) ->
filename:join(trace_dir(), filename(Name, Start)).
filename(Name, Start) ->
[Time, _] = string:split(calendar:system_time_to_rfc3339(Start), "T", leading),
lists:flatten(["trace_", binary_to_list(Name), "_", Time, ".log"]).
transaction(Tran) ->
case mnesia:transaction(Tran) of
{atomic, Res} -> Res;
{aborted, Reason} -> {error, Reason}
end.
update_log_primary_level([], OriginLevel) -> set_log_primary_level(OriginLevel);
update_log_primary_level(_, _) -> set_log_primary_level(debug).
set_log_primary_level(NewLevel) ->
case NewLevel =/= emqx_logger:get_primary_log_level() of
true -> emqx_logger:set_primary_log_level(NewLevel);
false -> ok
end.

View File

@ -0,0 +1,212 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_api).
-include_lib("emqx/include/logger.hrl").
-include_lib("kernel/include/file.hrl").
%% API
-export([ list_trace/2
, create_trace/2
, update_trace/2
, delete_trace/2
, clear_traces/2
, download_zip_log/2
, stream_log_file/2
]).
-export([ read_trace_file/3
, get_trace_size/0
]).
-define(TO_BIN(_B_), iolist_to_binary(_B_)).
-define(NOT_FOUND(N), {error, 'NOT_FOUND', ?TO_BIN([N, " NOT FOUND"])}).
list_trace(_, _Params) ->
case emqx_trace:list() of
[] -> {ok, []};
List0 ->
List = lists:sort(fun(#{start_at := A}, #{start_at := B}) -> A > B end,
emqx_trace:format(List0)),
Nodes = ekka_mnesia:running_nodes(),
TraceSize = cluster_call(?MODULE, get_trace_size, [], 30000),
AllFileSize = lists:foldl(fun(F, Acc) -> maps:merge(Acc, F) end, #{}, TraceSize),
Now = erlang:system_time(second),
Traces =
lists:map(fun(Trace = #{name := Name, start_at := Start,
end_at := End, enable := Enable, type := Type, filter := Filter}) ->
FileName = emqx_trace:filename(Name, Start),
LogSize = collect_file_size(Nodes, FileName, AllFileSize),
Trace0 = maps:without([enable, filter], Trace),
Trace0#{ log_size => LogSize
, Type => iolist_to_binary(Filter)
, start_at => list_to_binary(calendar:system_time_to_rfc3339(Start))
, end_at => list_to_binary(calendar:system_time_to_rfc3339(End))
, status => status(Enable, Start, End, Now)
}
end, List),
{ok, Traces}
end.
create_trace(_, Param) ->
case emqx_trace:create(Param) of
ok -> ok;
{error, {already_existed, Name}} ->
{error, 'ALREADY_EXISTED', ?TO_BIN([Name, "Already Exists"])};
{error, {duplicate_condition, Name}} ->
{error, 'DUPLICATE_CONDITION', ?TO_BIN([Name, "Duplication Condition"])};
{error, Reason} ->
{error, 'INCORRECT_PARAMS', ?TO_BIN(Reason)}
end.
delete_trace(#{name := Name}, _Param) ->
case emqx_trace:delete(Name) of
ok -> ok;
{error, not_found} -> ?NOT_FOUND(Name)
end.
clear_traces(_, _) ->
emqx_trace:clear().
update_trace(#{name := Name, operation := Operation}, _Param) ->
Enable = case Operation of disable -> false; enable -> true end,
case emqx_trace:update(Name, Enable) of
ok -> {ok, #{enable => Enable, name => Name}};
{error, not_found} -> ?NOT_FOUND(Name)
end.
%% if HTTP request headers include accept-encoding: gzip and file size > 300 bytes.
%% cowboy_compress_h will auto encode gzip format.
download_zip_log(#{name := Name}, _Param) ->
case emqx_trace:get_trace_filename(Name) of
{ok, TraceLog} ->
TraceFiles = collect_trace_file(TraceLog),
ZipDir = emqx_trace:zip_dir(),
Zips = group_trace_file(ZipDir, TraceLog, TraceFiles),
ZipFileName = ZipDir ++ binary_to_list(Name) ++ ".zip",
{ok, ZipFile} = zip:zip(ZipFileName, Zips, [{cwd, ZipDir}]),
emqx_trace:delete_files_after_send(ZipFileName, Zips),
{ok, ZipFile};
{error, Reason} ->
{error, Reason}
end.
group_trace_file(ZipDir, TraceLog, TraceFiles) ->
lists:foldl(fun(Res, Acc) ->
case Res of
{ok, Node, Bin} ->
ZipName = ZipDir ++ Node ++ "-" ++ TraceLog,
case file:write_file(ZipName, Bin) of
ok -> [Node ++ "-" ++ TraceLog | Acc];
_ -> Acc
end;
{error, Node, Reason} ->
?LOG(error, "download trace log error:~p", [{Node, TraceLog, Reason}]),
Acc
end
end, [], TraceFiles).
collect_trace_file(TraceLog) ->
cluster_call(emqx_trace, trace_file, [TraceLog], 60000).
cluster_call(Mod, Fun, Args, Timeout) ->
Nodes = ekka_mnesia:running_nodes(),
{GoodRes, BadNodes} = rpc:multicall(Nodes, Mod, Fun, Args, Timeout),
BadNodes =/= [] andalso ?LOG(error, "rpc call failed on ~p ~p", [BadNodes, {Mod, Fun, Args}]),
GoodRes.
stream_log_file(#{name := Name}, Params) ->
Node0 = proplists:get_value(<<"node">>, Params, atom_to_binary(node())),
Position0 = proplists:get_value(<<"position">>, Params, <<"0">>),
Bytes0 = proplists:get_value(<<"bytes">>, Params, <<"1000">>),
case to_node(Node0) of
{ok, Node} ->
Position = binary_to_integer(Position0),
Bytes = binary_to_integer(Bytes0),
case rpc:call(Node, ?MODULE, read_trace_file, [Name, Position, Bytes]) of
{ok, Bin} ->
Meta = #{<<"position">> => Position + byte_size(Bin), <<"bytes">> => Bytes},
{ok, #{meta => Meta, items => Bin}};
{eof, Size} ->
Meta = #{<<"position">> => Size, <<"bytes">> => Bytes},
{ok, #{meta => Meta, items => <<"">>}};
{error, Reason} ->
logger:log(error, "read_file_failed ~p", [{Node, Name, Reason, Position, Bytes}]),
{error, Reason};
{badrpc, nodedown} ->
{error, "BadRpc node down"}
end;
{error, Reason} -> {error, Reason}
end.
get_trace_size() ->
TraceDir = emqx_trace:trace_dir(),
Node = node(),
case file:list_dir(TraceDir) of
{ok, AllFiles} ->
lists:foldl(fun(File, Acc) ->
FullFileName = filename:join(TraceDir, File),
Acc#{{Node, File} => filelib:file_size(FullFileName)}
end, #{}, lists:delete("zip", AllFiles));
_ -> #{}
end.
%% this is an rpc call for stream_log_file/2
read_trace_file(Name, Position, Limit) ->
case emqx_trace:get_trace_filename(Name) of
{error, _} = Error -> Error;
{ok, TraceFile} ->
TraceDir = emqx_trace:trace_dir(),
TracePath = filename:join([TraceDir, TraceFile]),
read_file(TracePath, Position, Limit)
end.
read_file(Path, Offset, Bytes) ->
case file:open(Path, [read, raw, binary]) of
{ok, IoDevice} ->
try
_ = case Offset of
0 -> ok;
_ -> file:position(IoDevice, {bof, Offset})
end,
case file:read(IoDevice, Bytes) of
{ok, Bin} -> {ok, Bin};
{error, Reason} -> {error, Reason};
eof ->
{ok, #file_info{size = Size}} = file:read_file_info(IoDevice),
{eof, Size}
end
after
file:close(IoDevice)
end;
{error, Reason} -> {error, Reason}
end.
to_node(Node) ->
try {ok, binary_to_existing_atom(Node)}
catch _:_ ->
{error, "node not found"}
end.
collect_file_size(Nodes, FileName, AllFiles) ->
lists:foldl(fun(Node, Acc) ->
Size = maps:get({Node, FileName}, AllFiles, 0),
Acc#{Node => Size}
end, #{}, Nodes).
status(false, _Start, _End, _Now) -> <<"stopped">>;
status(true, Start, _End, Now) when Now < Start -> <<"waiting">>;
status(true, _Start, End, Now) when Now >= End -> <<"stopped">>;
status(true, _Start, _End, _Now) -> <<"running">>.

View File

@ -0,0 +1,348 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_SUITE).
%% API
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx/include/emqx.hrl").
-record(emqx_trace, {name, type, filter, enable = true, start_at, end_at}).
%%--------------------------------------------------------------------
%% Setups
%%--------------------------------------------------------------------
all() ->
emqx_ct:all(?MODULE).
init_per_suite(Config) ->
emqx_ct_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([]).
init_per_testcase(_, Config) ->
load(),
ok = emqx_trace:clear(),
Config.
end_per_testcase(_) ->
unload(),
ok.
t_base_create_delete(_Config) ->
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
End = to_rfc3339(Now + 30 * 60),
Name = <<"name1">>,
ClientId = <<"test-device">>,
Trace = #{
name => Name,
type => <<"clientid">>,
clientid => ClientId,
start_at => Start,
end_at => End
},
AnotherTrace = Trace#{name => <<"anotherTrace">>},
ok = emqx_trace:create(Trace),
?assertEqual({error, {already_existed, Name}}, emqx_trace:create(Trace)),
?assertEqual({error, {duplicate_condition, Name}}, emqx_trace:create(AnotherTrace)),
[TraceRec] = emqx_trace:list(),
Expect = #emqx_trace{
name = Name,
type = clientid,
filter = ClientId,
start_at = Now,
end_at = Now + 30 * 60
},
?assertEqual(Expect, TraceRec),
ExpectFormat = [
#{
filter => <<"test-device">>,
enable => true,
type => clientid,
name => <<"name1">>,
start_at => Now,
end_at => Now + 30 * 60
}
],
?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])),
?assertEqual(ok, emqx_trace:delete(Name)),
?assertEqual({error, not_found}, emqx_trace:delete(Name)),
?assertEqual([], emqx_trace:list()),
ok.
t_create_size_max(_Config) ->
lists:map(fun(Seq) ->
Name = list_to_binary("name" ++ integer_to_list(Seq)),
Trace = [{name, Name}, {type, <<"topic">>},
{topic, list_to_binary("/x/y/" ++ integer_to_list(Seq))}],
ok = emqx_trace:create(Trace)
end, lists:seq(1, 30)),
Trace31 = [{<<"name">>, <<"name31">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/31">>}],
{error, _} = emqx_trace:create(Trace31),
ok = emqx_trace:delete(<<"name30">>),
ok = emqx_trace:create(Trace31),
?assertEqual(30, erlang:length(emqx_trace:list())),
ok.
t_create_failed(_Config) ->
Name = {<<"name">>, <<"test">>},
UnknownField = [Name, {<<"unknown">>, 12}],
{error, Reason1} = emqx_trace:create(UnknownField),
?assertEqual(<<"type=[topic,clientid,ip_address] required">>, iolist_to_binary(Reason1)),
InvalidTopic = [Name, {<<"topic">>, "#/#//"}, {<<"type">>, <<"topic">>}],
{error, Reason2} = emqx_trace:create(InvalidTopic),
?assertEqual(<<"topic: #/#// invalid by function_clause">>, iolist_to_binary(Reason2)),
InvalidStart = [Name, {<<"type">>, <<"topic">>}, {<<"topic">>, <<"/sys/">>},
{<<"start_at">>, <<"2021-12-3:12">>}],
{error, Reason3} = emqx_trace:create(InvalidStart),
?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>,
iolist_to_binary(Reason3)),
InvalidEnd = [Name, {<<"type">>, <<"topic">>}, {<<"topic">>, <<"/sys/">>},
{<<"end_at">>, <<"2021-12-3:12">>}],
{error, Reason4} = emqx_trace:create(InvalidEnd),
?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>,
iolist_to_binary(Reason4)),
{error, Reason7} = emqx_trace:create([Name, {<<"type">>, <<"clientid">>}]),
?assertEqual(<<"required clientid field">>, iolist_to_binary(Reason7)),
InvalidPackets4 = [{<<"name">>, <<"/test">>}, {<<"clientid">>, <<"t">>},
{<<"type">>, <<"clientid">>}],
{error, Reason9} = emqx_trace:create(InvalidPackets4),
?assertEqual(<<"Name should be ^[A-Za-z]+[A-Za-z0-9-_]*$">>, iolist_to_binary(Reason9)),
?assertEqual({error, "type=[topic,clientid,ip_address] required"},
emqx_trace:create([{<<"name">>, <<"test-name">>}, {<<"clientid">>, <<"good">>}])),
?assertEqual({error, "ip address: einval"},
emqx_trace:create([Name, {<<"type">>, <<"ip_address">>},
{<<"ip_address">>, <<"test-name">>}])),
ok.
t_create_default(_Config) ->
{error, "name required"} = emqx_trace:create([]),
ok = emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, <<"good">>}]),
[#emqx_trace{name = <<"test-name">>}] = emqx_trace:list(),
ok = emqx_trace:clear(),
Trace = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"start_at">>, <<"2021-10-28T10:54:47+08:00">>},
{<<"end_at">>, <<"2021-10-27T10:54:47+08:00">>}
],
{error, "end_at time has already passed"} = emqx_trace:create(Trace),
Now = erlang:system_time(second),
Trace2 = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"start_at">>, to_rfc3339(Now + 10)},
{<<"end_at">>, to_rfc3339(Now + 3)}
],
{error, "failed by start_at >= end_at"} = emqx_trace:create(Trace2),
ok = emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/z">>}]),
[#emqx_trace{start_at = Start, end_at = End}] = emqx_trace:list(),
?assertEqual(10 * 60, End - Start),
?assertEqual(true, Start - erlang:system_time(second) < 5),
ok.
t_create_with_extra_fields(_Config) ->
ok = emqx_trace:clear(),
Trace = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"clientid">>, <<"dev001">>},
{<<"ip_address">>, <<"127.0.0.1">>}
],
ok = emqx_trace:create(Trace),
?assertMatch([#emqx_trace{name = <<"test-name">>, filter = <<"/x/y/z">>, type = topic}],
emqx_trace:list()),
ok.
t_update_enable(_Config) ->
Name = <<"test-name">>,
Now = erlang:system_time(second),
End = list_to_binary(calendar:system_time_to_rfc3339(Now + 2)),
ok = emqx_trace:create([{<<"name">>, Name}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>}, {<<"end_at">>, End}]),
[#emqx_trace{enable = Enable}] = emqx_trace:list(),
?assertEqual(Enable, true),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
ok = emqx_trace:update(Name, true),
[#emqx_trace{enable = true}] = emqx_trace:list(),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
?assertEqual({error, not_found}, emqx_trace:update(<<"Name not found">>, true)),
ct:sleep(2100),
?assertEqual({error, finished}, emqx_trace:update(Name, true)),
ok.
t_load_state(_Config) ->
Now = erlang:system_time(second),
Running = #{name => <<"Running">>, type => <<"topic">>,
topic => <<"/x/y/1">>, start_at => to_rfc3339(Now - 1),
end_at => to_rfc3339(Now + 2)},
Waiting = [{<<"name">>, <<"Waiting">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/2">>}, {<<"start_at">>, to_rfc3339(Now + 3)},
{<<"end_at">>, to_rfc3339(Now + 8)}],
Finished = [{<<"name">>, <<"Finished">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/3">>}, {<<"start_at">>, to_rfc3339(Now - 5)},
{<<"end_at">>, to_rfc3339(Now)}],
ok = emqx_trace:create(Running),
ok = emqx_trace:create(Waiting),
{error, "end_at time has already passed"} = emqx_trace:create(Finished),
Traces = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces)),
Enables = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces),
ExpectEnables = [{<<"Running">>, true}, {<<"Waiting">>, true}],
?assertEqual(ExpectEnables, lists:sort(Enables)),
ct:sleep(3500),
Traces2 = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces2)),
Enables2 = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces2),
ExpectEnables2 = [{<<"Running">>, false}, {<<"Waiting">>, true}],
?assertEqual(ExpectEnables2, lists:sort(Enables2)),
ok.
t_client_event(_Config) ->
application:set_env(emqx, allow_anonymous, true),
ClientId = <<"client-test">>,
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
Name = <<"test_client_id_event">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
emqtt:ping(Client),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"1">>, [{qos, 0}]),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"2">>, [{qos, 0}]),
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
ok = emqx_trace:create([{<<"name">>, <<"test_topic">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/test">>}, {<<"start_at">>, Start}]),
ok = emqx_trace_handler_SUITE:filesync(<<"test_topic">>, topic),
{ok, Bin} = file:read_file(emqx_trace:log_file(Name, Now)),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"3">>, [{qos, 0}]),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"4">>, [{qos, 0}]),
ok = emqtt:disconnect(Client),
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
ok = emqx_trace_handler_SUITE:filesync(<<"test_topic">>, topic),
{ok, Bin2} = file:read_file(emqx_trace:log_file(Name, Now)),
{ok, Bin3} = file:read_file(emqx_trace:log_file(<<"test_topic">>, Now)),
ct:pal("Bin ~p Bin2 ~p Bin3 ~p", [byte_size(Bin), byte_size(Bin2), byte_size(Bin3)]),
?assert(erlang:byte_size(Bin) > 0),
?assert(erlang:byte_size(Bin) < erlang:byte_size(Bin2)),
?assert(erlang:byte_size(Bin3) > 0),
ok.
t_get_log_filename(_Config) ->
Now = erlang:system_time(second),
Start = calendar:system_time_to_rfc3339(Now),
End = calendar:system_time_to_rfc3339(Now + 2),
Name = <<"name1">>,
Trace = [
{<<"name">>, Name},
{<<"type">>, <<"ip_address">>},
{<<"ip_address">>, <<"127.0.0.1">>},
{<<"start_at">>, list_to_binary(Start)},
{<<"end_at">>, list_to_binary(End)}
],
ok = emqx_trace:create(Trace),
?assertEqual({error, not_found}, emqx_trace:get_trace_filename(<<"test">>)),
?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))),
ct:sleep(3000),
?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))),
ok.
t_trace_file(_Config) ->
FileName = "test.log",
Content = <<"test \n test">>,
TraceDir = emqx_trace:trace_dir(),
File = filename:join(TraceDir, FileName),
ok = file:write_file(File, Content),
{ok, Node, Bin} = emqx_trace:trace_file(FileName),
?assertEqual(Node, atom_to_list(node())),
?assertEqual(Content, Bin),
ok = file:delete(File),
ok.
t_download_log(_Config) ->
ClientId = <<"client-test">>,
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
Name = <<"test_client_id">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
[begin _ = emqtt:ping(Client) end ||_ <- lists:seq(1, 5)],
ok = emqx_trace_handler_SUITE:filesync(Name, clientid),
{ok, ZipFile} = emqx_trace_api:download_zip_log(#{name => Name}, []),
?assert(filelib:file_size(ZipFile) > 0),
ok = emqtt:disconnect(Client),
ok.
t_find_closed_time(_Config) ->
DefaultMs = 60 * 15000,
Now = erlang:system_time(second),
Traces2 = [],
?assertEqual(DefaultMs, emqx_trace:find_closest_time(Traces2, Now)),
Traces3 = [#emqx_trace{name = <<"disable">>, start_at = Now + 1,
end_at = Now + 2, enable = false}],
?assertEqual(DefaultMs, emqx_trace:find_closest_time(Traces3, Now)),
Traces4 = [#emqx_trace{name = <<"running">>, start_at = Now, end_at = Now + 10, enable = true}],
?assertEqual(10000, emqx_trace:find_closest_time(Traces4, Now)),
Traces5 = [#emqx_trace{name = <<"waiting">>, start_at = Now + 2,
end_at = Now + 10, enable = true}],
?assertEqual(2000, emqx_trace:find_closest_time(Traces5, Now)),
Traces = [
#emqx_trace{name = <<"waiting">>, start_at = Now + 1, end_at = Now + 2, enable = true},
#emqx_trace{name = <<"running0">>, start_at = Now, end_at = Now + 5, enable = true},
#emqx_trace{name = <<"running1">>, start_at = Now - 1, end_at = Now + 1, enable = true},
#emqx_trace{name = <<"finished">>, start_at = Now - 2, end_at = Now - 1, enable = true},
#emqx_trace{name = <<"waiting">>, start_at = Now + 1, end_at = Now + 1, enable = true},
#emqx_trace{name = <<"stopped">>, start_at = Now, end_at = Now + 10, enable = false}
],
?assertEqual(1000, emqx_trace:find_closest_time(Traces, Now)),
ok.
to_rfc3339(Second) ->
list_to_binary(calendar:system_time_to_rfc3339(Second)).
load() ->
emqx_trace:start_link().
unload() ->
gen_server:stop(emqx_trace).

View File

@ -1,6 +1,6 @@
{application, emqx_retainer, {application, emqx_retainer,
[{description, "EMQ X Retainer"}, [{description, "EMQ X Retainer"},
{vsn, "4.3.3"}, % strict semver, bump manually! {vsn, "4.4.1"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_retainer_sup]}, {registered, [emqx_retainer_sup]},
{applications, [kernel,stdlib]}, {applications, [kernel,stdlib]},

View File

@ -1,14 +1,7 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{VSN, {VSN,
[{"4.3.2", [{"4.4.0",[{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]}]},
[{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]}]},
{<<"4\\.3\\.[0-1]">>,
[{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_retainer,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}], {<<".*">>,[]}],
[{"4.3.2", [{"4.4.0",[{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]}]},
[{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]}]}, {<<".*">>,[]}]
{<<"4\\.3\\.[0-1]">>, }.
[{load_module,emqx_retainer_cli,brutal_purge,soft_purge,[]},
{load_module,emqx_retainer,brutal_purge,soft_purge,[]}]},
{<<".*">>,[]}]}.

View File

@ -78,7 +78,8 @@ dispatch(Pid, Topic) ->
false -> read_messages(Topic); false -> read_messages(Topic);
true -> match_messages(Topic) true -> match_messages(Topic)
end, end,
[Pid ! {deliver, Topic, Msg} || Msg <- sort_retained(Msgs)]. Now = erlang:system_time(millisecond),
[Pid ! {deliver, Topic, refresh_timestamp_expiry(Msg, Now)} || Msg <- sort_retained(Msgs)].
%% RETAIN flag set to 1 and payload containing zero bytes %% RETAIN flag set to 1 and payload containing zero bytes
on_message_publish(Msg = #message{flags = #{retain := true}, on_message_publish(Msg = #message{flags = #{retain := true},
@ -151,7 +152,7 @@ init([Env]) ->
ok ok
end, end,
StatsFun = emqx_stats:statsfun('retained.count', 'retained.max'), StatsFun = emqx_stats:statsfun('retained.count', 'retained.max'),
{ok, StatsTimer} = timer:send_interval(timer:seconds(1), stats), StatsTimer = erlang:send_after(timer:seconds(1), self(), stats),
State = #state{stats_fun = StatsFun, stats_timer = StatsTimer}, State = #state{stats_fun = StatsFun, stats_timer = StatsTimer},
{ok, start_expire_timer(proplists:get_value(expiry_interval, Env, 0), State)}. {ok, start_expire_timer(proplists:get_value(expiry_interval, Env, 0), State)}.
@ -160,7 +161,7 @@ start_expire_timer(0, State) ->
start_expire_timer(undefined, State) -> start_expire_timer(undefined, State) ->
State; State;
start_expire_timer(Ms, State) -> start_expire_timer(Ms, State) ->
{ok, Timer} = timer:send_interval(Ms, expire), Timer = erlang:send_after(Ms, self(), {expire, Ms}),
State#state{expiry_timer = Timer}. State#state{expiry_timer = Timer}.
handle_call(Req, _From, State) -> handle_call(Req, _From, State) ->
@ -172,12 +173,14 @@ handle_cast(Msg, State) ->
{noreply, State}. {noreply, State}.
handle_info(stats, State = #state{stats_fun = StatsFun}) -> handle_info(stats, State = #state{stats_fun = StatsFun}) ->
StatsTimer = erlang:send_after(timer:seconds(1), self(), stats),
StatsFun(retained_count()), StatsFun(retained_count()),
{noreply, State, hibernate}; {noreply, State#state{stats_timer = StatsTimer}, hibernate};
handle_info(expire, State) -> handle_info({expire, Ms} = Expire, State) ->
Timer = erlang:send_after(Ms, self(), Expire),
ok = expire_messages(), ok = expire_messages(),
{noreply, State, hibernate}; {noreply, State#state{expiry_timer = Timer}, hibernate};
handle_info(Info, State) -> handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]), ?LOG(error, "Unexpected info: ~p", [Info]),
@ -214,11 +217,13 @@ store_retained(Msg = #message{topic = Topic, payload = Payload}, Env) ->
fun() -> fun() ->
case mnesia:read(?TAB, Topic) of case mnesia:read(?TAB, Topic) of
[_] -> [_] ->
mnesia:write(?TAB, #retained{topic = topic2tokens(Topic), mnesia:write(?TAB,
#retained{topic = topic2tokens(Topic),
msg = Msg, msg = Msg,
expiry_time = get_expiry_time(Msg, Env)}, write); expiry_time = get_expiry_time(Msg, Env)}, write);
[] -> [] ->
?LOG(error, "Cannot retain message(topic=~s) for table is full!", [Topic]) ?LOG(error,
"Cannot retain message(topic=~s) for table is full!", [Topic])
end end
end), end),
ok; ok;
@ -242,7 +247,8 @@ is_too_big(Size, Env) ->
get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := 0}}}, _Env) -> get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := 0}}}, _Env) ->
0; 0;
get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := Interval}}, timestamp = Ts}, _Env) -> get_expiry_time(#message{headers = #{properties := #{'Message-Expiry-Interval' := Interval}},
timestamp = Ts}, _Env) ->
Ts + Interval * 1000; Ts + Interval * 1000;
get_expiry_time(#message{timestamp = Ts}, Env) -> get_expiry_time(#message{timestamp = Ts}, Env) ->
case proplists:get_value(expiry_interval, Env, 0) of case proplists:get_value(expiry_interval, Env, 0) of
@ -311,3 +317,18 @@ condition(Ws) ->
false -> Ws1; false -> Ws1;
_ -> (Ws1 -- ['#']) ++ '_' _ -> (Ws1 -- ['#']) ++ '_'
end. end.
-spec(refresh_timestamp_expiry(emqx_types:message(), pos_integer()) -> emqx_types:message()).
refresh_timestamp_expiry(Msg = #message{headers =
#{properties :=
#{'Message-Expiry-Interval' := Interval} = Props},
timestamp = CreatedAt},
Now) ->
Elapsed = max(0, Now - CreatedAt),
Interval1 = max(1, Interval - (Elapsed div 1000)),
emqx_message:set_header(properties,
Props#{'Message-Expiry-Interval' => Interval1},
Msg#message{timestamp = Now});
refresh_timestamp_expiry(Msg, Now) ->
Msg#message{timestamp = Now}.

View File

@ -1,6 +1,6 @@
{application, emqx_rule_engine, {application, emqx_rule_engine,
[{description, "EMQ X Rule Engine"}, [{description, "EMQ X Rule Engine"},
{vsn, "4.3.7"}, % strict semver, bump manually! {vsn, "4.4.1"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_rule_engine_sup, emqx_rule_registry]}, {registered, [emqx_rule_engine_sup, emqx_rule_registry]},
{applications, [kernel,stdlib,rulesql,getopt]}, {applications, [kernel,stdlib,rulesql,getopt]},

View File

@ -1,135 +1,19 @@
%% -*- mode: erlang -*- %% -*- mode: erlang -*-
{VSN, {VSN,
[ [{"4.4.0",
{"4.3.6", [ {update, emqx_rule_metrics, {advanced, ["4.4.0"]}}
[ {update, emqx_rule_metrics, {advanced, ["4.3.6"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]} , {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]} , {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]} , {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]} , {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]}, ]},
{"4.3.5", {<<".*">>,[]}],
[ {update, emqx_rule_metrics, {advanced, ["4.3.5"]}} [{"4.4.0",
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]} [ {update, emqx_rule_metrics, {advanced, ["4.4.0"]}}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{"4.3.4",
[ {update, emqx_rule_metrics, {advanced, ["4.3.4"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{"4.3.3",
[ {update, emqx_rule_metrics, {advanced, ["4.3.3"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{"4.3.2",
[ {update, emqx_rule_metrics, {advanced, ["4.3.2"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {apply,{emqx_stats,cancel_update,[rule_registery_stats]}}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{"4.3.1",
[ {update, emqx_rule_metrics, {advanced, ["4.3.1"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {apply,{emqx_stats,cancel_update,[rule_registery_stats]}}
, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{"4.3.0",
[ {update, emqx_rule_metrics, {advanced, ["4.3.0"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {apply,{emqx_stats,cancel_update,[rule_registery_stats]}}
, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{<<".*">>, []}
],
[
{"4.3.6",
[ {update, emqx_rule_metrics, {advanced, ["4.3.6"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]} , {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]} , {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]} , {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]} , {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]}, ]},
{"4.3.5", {<<".*">>,[]}]
[ {update, emqx_rule_metrics, {advanced, ["4.3.5"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{"4.3.4",
[ {update, emqx_rule_metrics, {advanced, ["4.3.4"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{"4.3.3",
[ {update, emqx_rule_metrics, {advanced, ["4.3.3"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{"4.3.2",
[ {update, emqx_rule_metrics, {advanced, ["4.3.2"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {apply,{emqx_stats,cancel_update,[rule_registery_stats]}}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{"4.3.1",
[ {update, emqx_rule_metrics, {advanced, ["4.3.1"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {apply,{emqx_stats,cancel_update,[rule_registery_stats]}}
, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{"4.3.0",
[ {update, emqx_rule_metrics, {advanced, ["4.3.0"]}}
, {load_module,emqx_rule_events,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_funcs,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_registry,brutal_purge,soft_purge,[]}
, {apply,{emqx_stats,cancel_update,[rule_registery_stats]}}
, {load_module,emqx_rule_actions,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_runtime,brutal_purge,soft_purge,[]}
, {load_module,emqx_rule_engine_api,brutal_purge,soft_purge,[]}
]},
{<<".*">>, []}
]
}. }.

View File

@ -504,8 +504,8 @@ columns_with_exam('message.publish') ->
, {<<"topic">>, <<"t/a">>} , {<<"topic">>, <<"t/a">>}
, {<<"qos">>, 1} , {<<"qos">>, 1}
, {<<"flags">>, #{}} , {<<"flags">>, #{}}
, {<<"headers">>, undefined}
, {<<"publish_received_at">>, erlang:system_time(millisecond)} , {<<"publish_received_at">>, erlang:system_time(millisecond)}
, columns_example_props(pub_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -522,6 +522,7 @@ columns_with_exam('message.delivered') ->
, {<<"qos">>, 1} , {<<"qos">>, 1}
, {<<"flags">>, #{}} , {<<"flags">>, #{}}
, {<<"publish_received_at">>, erlang:system_time(millisecond)} , {<<"publish_received_at">>, erlang:system_time(millisecond)}
, columns_example_props(pub_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -538,6 +539,8 @@ columns_with_exam('message.acked') ->
, {<<"qos">>, 1} , {<<"qos">>, 1}
, {<<"flags">>, #{}} , {<<"flags">>, #{}}
, {<<"publish_received_at">>, erlang:system_time(millisecond)} , {<<"publish_received_at">>, erlang:system_time(millisecond)}
, columns_example_props(pub_props)
, columns_example_props(puback_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -553,6 +556,7 @@ columns_with_exam('message.dropped') ->
, {<<"qos">>, 1} , {<<"qos">>, 1}
, {<<"flags">>, #{}} , {<<"flags">>, #{}}
, {<<"publish_received_at">>, erlang:system_time(millisecond)} , {<<"publish_received_at">>, erlang:system_time(millisecond)}
, columns_example_props(pub_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -587,6 +591,7 @@ columns_with_exam('client.connected') ->
, {<<"expiry_interval">>, 3600} , {<<"expiry_interval">>, 3600}
, {<<"is_bridge">>, false} , {<<"is_bridge">>, false}
, {<<"connected_at">>, erlang:system_time(millisecond)} , {<<"connected_at">>, erlang:system_time(millisecond)}
, columns_example_props(conn_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -598,6 +603,7 @@ columns_with_exam('client.disconnected') ->
, {<<"peername">>, <<"192.168.0.10:56431">>} , {<<"peername">>, <<"192.168.0.10:56431">>}
, {<<"sockname">>, <<"0.0.0.0:1883">>} , {<<"sockname">>, <<"0.0.0.0:1883">>}
, {<<"disconnected_at">>, erlang:system_time(millisecond)} , {<<"disconnected_at">>, erlang:system_time(millisecond)}
, columns_example_props(disconn_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -608,6 +614,7 @@ columns_with_exam('session.subscribed') ->
, {<<"peerhost">>, <<"192.168.0.10">>} , {<<"peerhost">>, <<"192.168.0.10">>}
, {<<"topic">>, <<"t/a">>} , {<<"topic">>, <<"t/a">>}
, {<<"qos">>, 1} , {<<"qos">>, 1}
, columns_example_props(sub_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]; ];
@ -618,10 +625,42 @@ columns_with_exam('session.unsubscribed') ->
, {<<"peerhost">>, <<"192.168.0.10">>} , {<<"peerhost">>, <<"192.168.0.10">>}
, {<<"topic">>, <<"t/a">>} , {<<"topic">>, <<"t/a">>}
, {<<"qos">>, 1} , {<<"qos">>, 1}
, columns_example_props(unsub_props)
, {<<"timestamp">>, erlang:system_time(millisecond)} , {<<"timestamp">>, erlang:system_time(millisecond)}
, {<<"node">>, node()} , {<<"node">>, node()}
]. ].
columns_example_props(PropType) ->
Props = columns_example_props_specific(PropType),
UserProps = #{
'User-Property' => #{<<"foo">> => <<"bar">>},
'User-Property-Pairs' => [
#{key => <<"foo">>}, #{value => <<"bar">>}
]
},
{PropType, maps:merge(Props, UserProps)}.
columns_example_props_specific(pub_props) ->
#{ 'Payload-Format-Indicator' => 0
, 'Message-Expiry-Interval' => 30
};
columns_example_props_specific(puback_props) ->
#{ 'Reason-String' => <<"OK">>
};
columns_example_props_specific(conn_props) ->
#{ 'Session-Expiry-Interval' => 7200
, 'Receive-Maximum' => 32
};
columns_example_props_specific(disconn_props) ->
#{ 'Session-Expiry-Interval' => 7200
, 'Reason-String' => <<"Redirect to another server">>
, 'Server Reference' => <<"192.168.22.129">>
};
columns_example_props_specific(sub_props) ->
#{};
columns_example_props_specific(unsub_props) ->
#{}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Helper functions %% Helper functions
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -681,6 +720,10 @@ printable_maps(Headers) ->
AccIn#{K => ntoa(V0)}; AccIn#{K => ntoa(V0)};
('User-Property', V0, AccIn) when is_list(V0) -> ('User-Property', V0, AccIn) when is_list(V0) ->
AccIn#{ AccIn#{
%% The 'User-Property' field is for the convenience of querying properties
%% using the '.' syntax, e.g. "SELECT 'User-Property'.foo as foo"
%% However, this does not allow duplicate property keys. To allow
%% duplicate keys, we have to use the 'User-Property-Pairs' field instead.
'User-Property' => maps:from_list(V0), 'User-Property' => maps:from_list(V0),
'User-Property-Pairs' => [#{ 'User-Property-Pairs' => [#{
key => Key, key => Key,

View File

@ -17,6 +17,9 @@
-module(emqx_rule_funcs). -module(emqx_rule_funcs).
-include("rule_engine.hrl"). -include("rule_engine.hrl").
-elvis([{elvis_style, god_modules, disable}]).
-elvis([{elvis_style, function_naming_convention, disable}]).
-elvis([{elvis_style, macro_names, disable}]).
%% IoT Funcs %% IoT Funcs
-export([ msgid/0 -export([ msgid/0
@ -438,7 +441,8 @@ subbits(Bits, Len) when is_integer(Len), is_bitstring(Bits) ->
subbits(Bits, Start, Len) when is_integer(Start), is_integer(Len), is_bitstring(Bits) -> subbits(Bits, Start, Len) when is_integer(Start), is_integer(Len), is_bitstring(Bits) ->
get_subbits(Bits, Start, Len, <<"integer">>, <<"unsigned">>, <<"big">>). get_subbits(Bits, Start, Len, <<"integer">>, <<"unsigned">>, <<"big">>).
subbits(Bits, Start, Len, Type, Signedness, Endianness) when is_integer(Start), is_integer(Len), is_bitstring(Bits) -> subbits(Bits, Start, Len, Type, Signedness, Endianness)
when is_integer(Start), is_integer(Len), is_bitstring(Bits) ->
get_subbits(Bits, Start, Len, Type, Signedness, Endianness). get_subbits(Bits, Start, Len, Type, Signedness, Endianness).
get_subbits(Bits, Start, Len, Type, Signedness, Endianness) -> get_subbits(Bits, Start, Len, Type, Signedness, Endianness) ->
@ -520,7 +524,7 @@ map(Data) ->
emqx_rule_utils:map(Data). emqx_rule_utils:map(Data).
bin2hexstr(Bin) when is_binary(Bin) -> bin2hexstr(Bin) when is_binary(Bin) ->
emqx_misc:bin2hexstr_A_F(Bin). emqx_misc:bin2hexstr_a_f_upper(Bin).
hexstr2bin(Str) when is_binary(Str) -> hexstr2bin(Str) when is_binary(Str) ->
emqx_misc:hexstr2bin(Str). emqx_misc:hexstr2bin(Str).
@ -608,7 +612,8 @@ tokens(S, Separators) ->
[list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators))]. [list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators))].
tokens(S, Separators, <<"nocrlf">>) -> tokens(S, Separators, <<"nocrlf">>) ->
[list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators) ++ [$\r,$\n,[$\r,$\n]])]. [list_to_binary(R) || R <- string:lexemes(binary_to_list(S),
binary_to_list(Separators) ++ [$\r,$\n,[$\r,$\n]])].
concat(S1, S2) when is_binary(S1), is_binary(S2) -> concat(S1, S2) when is_binary(S1), is_binary(S2) ->
unicode:characters_to_binary([S1, S2], unicode). unicode:characters_to_binary([S1, S2], unicode).
@ -646,7 +651,8 @@ replace(SrcStr, P, RepStr) when is_binary(SrcStr), is_binary(P), is_binary(RepSt
replace(SrcStr, P, RepStr, <<"all">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> replace(SrcStr, P, RepStr, <<"all">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, all)); iolist_to_binary(string:replace(SrcStr, P, RepStr, all));
replace(SrcStr, P, RepStr, <<"trailing">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> replace(SrcStr, P, RepStr, <<"trailing">>)
when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
iolist_to_binary(string:replace(SrcStr, P, RepStr, trailing)); iolist_to_binary(string:replace(SrcStr, P, RepStr, trailing));
replace(SrcStr, P, RepStr, <<"leading">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> replace(SrcStr, P, RepStr, <<"leading">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) ->
@ -782,7 +788,7 @@ sha256(S) when is_binary(S) ->
hash(sha256, S). hash(sha256, S).
hash(Type, Data) -> hash(Type, Data) ->
emqx_misc:bin2hexstr_a_f(crypto:hash(Type, Data)). emqx_misc:bin2hexstr_a_f_lower(crypto:hash(Type, Data)).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Data encode and decode Funcs %% Data encode and decode Funcs

View File

@ -78,6 +78,8 @@
, terminate/2 , terminate/2
]). ]).
-elvis([{elvis_style, god_modules, disable}]).
-ifndef(TEST). -ifndef(TEST).
-define(SECS_5M, 300). -define(SECS_5M, 300).
-define(SAMPLING, 10). -define(SAMPLING, 10).
@ -326,9 +328,9 @@ handle_info(_Info, State) ->
code_change({down, _Vsn}, State = #state{metric_ids = MIDs}, [Vsn]) -> code_change({down, _Vsn}, State = #state{metric_ids = MIDs}, [Vsn]) ->
case string:tokens(Vsn, ".") of case string:tokens(Vsn, ".") of
["4", "3", SVal] -> ["4", "4", SVal] ->
{Val, []} = string:to_integer(SVal), {Val, []} = string:to_integer(SVal),
case Val =< 6 of case Val == 0 of
true -> true ->
[begin [begin
Passed = get_rules_passed(Id), Passed = get_rules_passed(Id),
@ -354,9 +356,9 @@ code_change({down, _Vsn}, State = #state{metric_ids = MIDs}, [Vsn]) ->
code_change(_Vsn, State = #state{metric_ids = MIDs}, [Vsn]) -> code_change(_Vsn, State = #state{metric_ids = MIDs}, [Vsn]) ->
case string:tokens(Vsn, ".") of case string:tokens(Vsn, ".") of
["4", "3", SVal] -> ["4", "4", SVal] ->
{Val, []} = string:to_integer(SVal), {Val, []} = string:to_integer(SVal),
case Val =< 6 of case Val == 0 of
true -> true ->
[begin [begin
Matched = get_rules_matched(Id), Matched = get_rules_matched(Id),
@ -428,17 +430,19 @@ calculate_speed(CurrVal, #rule_speed{max = MaxSpeed0, last_v = LastVal,
%% calculate the max speed since the emqx startup %% calculate the max speed since the emqx startup
MaxSpeed = MaxSpeed =
if MaxSpeed0 >= CurrSpeed -> MaxSpeed0; case MaxSpeed0 >= CurrSpeed of
true -> CurrSpeed true -> MaxSpeed0;
false -> CurrSpeed
end, end,
%% calculate the average speed in last 5 mins %% calculate the average speed in last 5 mins
{Last5MinSamples, Acc5Min, Last5Min} = {Last5MinSamples, Acc5Min, Last5Min} =
if Tick =< ?SAMPCOUNT_5M -> case Tick =< ?SAMPCOUNT_5M of
true ->
Acc = AccSpeed5Min0 + CurrSpeed, Acc = AccSpeed5Min0 + CurrSpeed,
{lists:reverse([CurrSpeed | lists:reverse(Last5MinSamples0)]), {lists:reverse([CurrSpeed | lists:reverse(Last5MinSamples0)]),
Acc, Acc / Tick}; Acc, Acc / Tick};
true -> false ->
[FirstSpeed | Speeds] = Last5MinSamples0, [FirstSpeed | Speeds] = Last5MinSamples0,
Acc = AccSpeed5Min0 + CurrSpeed - FirstSpeed, Acc = AccSpeed5Min0 + CurrSpeed - FirstSpeed,
{lists:reverse([CurrSpeed | lists:reverse(Speeds)]), {lists:reverse([CurrSpeed | lists:reverse(Speeds)]),
@ -450,7 +454,7 @@ calculate_speed(CurrVal, #rule_speed{max = MaxSpeed0, last_v = LastVal,
last5m_smpl = Last5MinSamples, tick = Tick + 1}. last5m_smpl = Last5MinSamples, tick = Tick + 1}.
format_rule_speed(#rule_speed{max = Max, current = Current, last5m = Last5Min}) -> format_rule_speed(#rule_speed{max = Max, current = Current, last5m = Last5Min}) ->
#{max => Max, current => precision(Current, 2), last5m => precision(Last5Min, 2)}. #{max => precision(Max, 2), current => precision(Current, 2), last5m => precision(Last5Min, 2)}.
precision(Float, N) -> precision(Float, N) ->
Base = math:pow(10, N), Base = math:pow(10, N),

View File

@ -98,21 +98,8 @@ sql_test_action() ->
fill_default_values(Event, Context) -> fill_default_values(Event, Context) ->
maps:merge(envs_examp(Event), Context). maps:merge(envs_examp(Event), Context).
envs_examp(<<"$events/", _/binary>> = EVENT_TOPIC) -> envs_examp(EVENT_TOPIC) ->
EventName = emqx_rule_events:event_name(EVENT_TOPIC), EventName = emqx_rule_events:event_name(EVENT_TOPIC),
emqx_rule_maps:atom_key_map( emqx_rule_maps:atom_key_map(
maps:from_list( maps:from_list(
emqx_rule_events:columns_with_exam(EventName))); emqx_rule_events:columns_with_exam(EventName))).
envs_examp(_) ->
#{id => emqx_guid:to_hexstr(emqx_guid:gen()),
clientid => <<"c_emqx">>,
username => <<"u_emqx">>,
payload => <<"{\"id\": 1, \"name\": \"ha\"}">>,
peerhost => <<"127.0.0.1">>,
topic => <<"t/a">>,
qos => 1,
flags => #{sys => true, event => true},
publish_received_at => emqx_rule_utils:now_ms(),
timestamp => emqx_rule_utils:now_ms(),
node => node()
}.

View File

@ -50,6 +50,9 @@
%% erlang:system_time should be unique and random enough %% erlang:system_time should be unique and random enough
-define(CLIENTID, iolist_to_binary([atom_to_list(?FUNCTION_NAME), "-", -define(CLIENTID, iolist_to_binary([atom_to_list(?FUNCTION_NAME), "-",
integer_to_list(erlang:system_time())])). integer_to_list(erlang:system_time())])).
-elvis([{elvis_style, dont_repeat_yourself, disable}]).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Setups %% Setups
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -66,7 +69,9 @@ end_per_suite(_) ->
emqx_ct_helpers:stop_apps([emqx_sn]). emqx_ct_helpers:stop_apps([emqx_sn]).
set_special_confs(emqx) -> set_special_confs(emqx) ->
application:set_env(emqx, plugins_loaded_file, application:set_env(
emqx,
plugins_loaded_file,
emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/loaded_plugins")); emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/loaded_plugins"));
set_special_confs(emqx_sn) -> set_special_confs(emqx_sn) ->
application:set_env(emqx_sn, enable_qos3, ?ENABLE_QOS3), application:set_env(emqx_sn, enable_qos3, ?ENABLE_QOS3),
@ -113,7 +118,8 @@ t_subscribe(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
TopicName1 = <<"abcD">>, TopicName1 = <<"abcD">>,
send_register_msg(Socket, TopicName1, MsgId), send_register_msg(Socket, TopicName1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1,
CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16,
@ -145,7 +151,8 @@ t_subscribe_case01(_) ->
TopicName1 = <<"abcD">>, TopicName1 = <<"abcD">>,
send_register_msg(Socket, TopicName1, MsgId), send_register_msg(Socket, TopicName1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1,
?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>,
@ -166,17 +173,18 @@ t_subscribe_case02(_) ->
Will = 0, Will = 0,
CleanSession = 0, CleanSession = 0,
MsgId = 1, MsgId = 1,
TopicId = ?PREDEF_TOPIC_ID1, %this TopicId is the predefined topic id corresponding to ?PREDEF_TOPIC_NAME1 TopicId = ?PREDEF_TOPIC_ID1,
ReturnCode = 0, ReturnCode = 0,
{ok, Socket} = gen_udp:open(0, [binary]), {ok, Socket} = gen_udp:open(0, [binary]),
ClientId = ?CLIENTID, ClientId = ?CLIENTID,
send_connect_msg(Socket, ?CLIENTID), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
Topic1 = ?PREDEF_TOPIC_NAME1, Topic1 = ?PREDEF_TOPIC_NAME1,
send_register_msg(Socket, Topic1, MsgId), send_register_msg(Socket, Topic1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1,
?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>,
@ -206,9 +214,11 @@ t_subscribe_case03(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_short_topic(Socket, QoS, <<"te">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"te">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1,
?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_unsubscribe_msg_short_topic(Socket, <<"te">>, MsgId), send_unsubscribe_msg_short_topic(Socket, <<"te">>, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -217,8 +227,12 @@ t_subscribe_case03(_) ->
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
gen_udp:close(Socket). gen_udp:close(Socket).
%%In this case We use predefined topic name to register and subcribe, and expect to receive the corresponding predefined topic id but not a new generated topic id from broker. We design this case to illustrate %% In this case We use predefined topic name to register and subcribe, and
%% emqx_sn_gateway's compatibility of dealing with predefined and normal topics. Once we give more restrictions to different topic id type, this case would be deleted or modified. %% expect to receive the corresponding predefined topic id but not a new
%% generated topic id from broker. We design this case to illustrate
%% emqx_sn_gateway's compatibility of dealing with predefined and normal topics.
%% Once we give more restrictions to different topic id type, this case would
%% be deleted or modified.
t_subscribe_case04(_) -> t_subscribe_case04(_) ->
Dup = 0, Dup = 0,
QoS = 0, QoS = 0,
@ -226,7 +240,7 @@ t_subscribe_case04(_) ->
Will = 0, Will = 0,
CleanSession = 0, CleanSession = 0,
MsgId = 1, MsgId = 1,
TopicId = ?PREDEF_TOPIC_ID1, %this TopicId is the predefined topic id corresponding to ?PREDEF_TOPIC_NAME1 TopicId = ?PREDEF_TOPIC_ID1,
ReturnCode = 0, ReturnCode = 0,
{ok, Socket} = gen_udp:open(0, [binary]), {ok, Socket} = gen_udp:open(0, [binary]),
ClientId = ?CLIENTID, ClientId = ?CLIENTID,
@ -234,10 +248,14 @@ t_subscribe_case04(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
Topic1 = ?PREDEF_TOPIC_NAME1, Topic1 = ?PREDEF_TOPIC_NAME1,
send_register_msg(Socket, Topic1, MsgId), send_register_msg(Socket, Topic1, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId:16, MsgId:16, 0:8>>,
send_subscribe_msg_normal_topic(Socket, QoS, Topic1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ReturnCode>>,
receive_response(Socket)), receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, Topic1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_unsubscribe_msg_normal_topic(Socket, Topic1, MsgId), send_unsubscribe_msg_normal_topic(Socket, Topic1, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -264,19 +282,30 @@ t_subscribe_case05(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_register_msg(Socket, <<"abcD">>, MsgId), send_register_msg(Socket, <<"abcD">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"abcD">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"abcD">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"/sport/#">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"/sport/#">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"/a/+/water">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"/a/+/water">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_subscribe_msg_normal_topic(Socket, QoS, <<"/Tom/Home">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"/Tom/Home">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1,
@ -306,19 +335,32 @@ t_subscribe_case06(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_register_msg(Socket, <<"abc">>, MsgId), send_register_msg(Socket, <<"abc">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId:16, 0:8>>,
receive_response(Socket)
),
send_register_msg(Socket, <<"/blue/#">>, MsgId), send_register_msg(Socket, <<"/blue/#">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId0:16, MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId0:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>,
receive_response(Socket)
),
send_register_msg(Socket, <<"/blue/+/white">>, MsgId), send_register_msg(Socket, <<"/blue/+/white">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId0:16, MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId0:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED:8>>,
receive_response(Socket)
),
send_register_msg(Socket, <<"/$sys/rain">>, MsgId), send_register_msg(Socket, <<"/$sys/rain">>, MsgId),
?assertEqual(<<7, ?SN_REGACK, TopicId2:16, MsgId:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId2:16, MsgId:16, 0:8>>,
receive_response(Socket)
),
send_subscribe_msg_short_topic(Socket, QoS, <<"Q2">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"Q2">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
send_unsubscribe_msg_normal_topic(Socket, <<"Q2">>, MsgId), send_unsubscribe_msg_normal_topic(Socket, <<"Q2">>, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -342,8 +384,11 @@ t_subscribe_case07(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>,
receive_response(Socket)
),
send_unsubscribe_msg_predefined_topic(Socket, TopicId2, MsgId), send_unsubscribe_msg_predefined_topic(Socket, TopicId2, MsgId),
?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_UNSUBACK, MsgId:16>>, receive_response(Socket)),
@ -365,8 +410,11 @@ t_subscribe_case08(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_reserved_topic(Socket, QoS, TopicId2, MsgId), send_subscribe_msg_reserved_topic(Socket, QoS, TopicId2, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, ?SN_INVALID_TOPIC_ID:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
?SN_INVALID_TOPIC_ID:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -390,15 +438,20 @@ t_publish_negqos_case09(_) ->
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_normal_topic(Socket, NegQoS, MsgId1, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, NegQoS, MsgId1, TopicId1, Payload1),
timer:sleep(100), timer:sleep(100),
case ?ENABLE_QOS3 of case ?ENABLE_QOS3 of
true -> true ->
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What) ?assertEqual(Eexp, What)
end, end,
@ -431,7 +484,9 @@ t_publish_qos0_case01(_) ->
send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -453,15 +508,20 @@ t_publish_qos0_case02(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId1, PredefTopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId1, PredefTopicId, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC:2, PredefTopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC:2,
PredefTopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -484,15 +544,20 @@ t_publish_qos0_case3(_) ->
Topic = <<"/a/b/c">>, Topic = <<"/a/b/c">>,
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId1, TopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId1, TopicId, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -514,8 +579,11 @@ t_publish_qos0_case04(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, <<"#">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"#">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 2, MsgId1 = 2,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
@ -523,7 +591,9 @@ t_publish_qos0_case04(_) ->
send_publish_msg_short_topic(Socket, QoS, MsgId1, Topic, Payload1), send_publish_msg_short_topic(Socket, QoS, MsgId1, Topic, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_SHORT_TOPIC:2, Topic/binary, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_SHORT_TOPIC:2,
Topic/binary, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -544,8 +614,11 @@ t_publish_qos0_case05(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_short_topic(Socket, QoS, <<"/#">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"/#">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -567,15 +640,20 @@ t_publish_qos0_case06(_) ->
Topic = <<"abc">>, Topic = <<"abc">>,
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
MsgId1 = 3, MsgId1 = 3,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId1, TopicId1, Payload1),
timer:sleep(100), timer:sleep(100),
Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, Eexp = <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>,
What = receive_response(Socket), What = receive_response(Socket),
?assertEqual(Eexp, What), ?assertEqual(Eexp, What),
@ -597,16 +675,25 @@ t_publish_qos1_case01(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, Topic, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1),
?assertEqual(<<7, ?SN_PUBACK, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicId1:16,
MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
timer:sleep(100), timer:sleep(100),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
gen_udp:close(Socket). gen_udp:close(Socket).
@ -625,12 +712,18 @@ t_publish_qos1_case02(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1),
?assertEqual(<<7, ?SN_PUBACK, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, PredefTopicId:16,
MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
timer:sleep(100), timer:sleep(100),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
@ -645,7 +738,10 @@ t_publish_qos1_case03(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_publish_msg_predefined_topic(Socket, QoS, MsgId, tid(5), <<20, 21, 22, 23>>), send_publish_msg_predefined_topic(Socket, QoS, MsgId, tid(5), <<20, 21, 22, 23>>),
?assertEqual(<<7, ?SN_PUBACK, TopicId5:16, MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicId5:16,
MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -664,15 +760,20 @@ t_publish_qos1_case04(_) ->
send_connect_msg(Socket, ClientId), send_connect_msg(Socket, ClientId),
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_short_topic(Socket, QoS, <<"ab">>, MsgId), send_subscribe_msg_short_topic(Socket, QoS, <<"ab">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId0:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Topic = <<"ab">>, Topic = <<"ab">>,
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_short_topic(Socket, QoS, MsgId, Topic, Payload1), send_publish_msg_short_topic(Socket, QoS, MsgId, Topic, Payload1),
<<TopicIdShort:16>> = Topic, <<TopicIdShort:16>> = Topic,
?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16, MsgId:16, ?SN_RC_ACCEPTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16,
MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
timer:sleep(100), timer:sleep(100),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
@ -692,13 +793,18 @@ t_publish_qos1_case05(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_normal_topic(Socket, QoS, <<"ab">>, MsgId), send_subscribe_msg_normal_topic(Socket, QoS, <<"ab">>, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
receive_response(Socket)), TopicId1:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/#">>, <<20, 21, 22, 23>>), send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/#">>, <<20, 21, 22, 23>>),
<<TopicIdShort:16>> = <<"/#">>, <<TopicIdShort:16>> = <<"/#">>,
?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16, MsgId:16, ?SN_RC_NOT_SUPPORTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -724,7 +830,10 @@ t_publish_qos1_case06(_) ->
send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/+">>, <<20, 21, 22, 23>>), send_publish_msg_short_topic(Socket, QoS, MsgId, <<"/+">>, <<20, 21, 22, 23>>),
<<TopicIdShort:16>> = <<"/+">>, <<TopicIdShort:16>> = <<"/+">>,
?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16, MsgId:16, ?SN_RC_NOT_SUPPORTED>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_PUBACK, TopicIdShort:16,
MsgId:16, ?SN_RC_NOT_SUPPORTED>>,
receive_response(Socket)
),
send_disconnect_msg(Socket, undefined), send_disconnect_msg(Socket, undefined),
?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)),
@ -751,7 +860,11 @@ t_publish_qos2_case01(_) ->
send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1), send_publish_msg_normal_topic(Socket, QoS, MsgId, TopicId1, Payload1),
?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)),
send_pubrel_msg(Socket, MsgId), send_pubrel_msg(Socket, MsgId),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, 1:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, 1:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)),
timer:sleep(100), timer:sleep(100),
@ -773,15 +886,21 @@ t_publish_qos2_case02(_) ->
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, PredefTopicId, MsgId),
?assertEqual(<<8, ?SN_SUBACK, ?FNU:1, QoS:2, ?FNU:5, PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>, ?assertEqual(<<8, ?SN_SUBACK, ?FNU:1, QoS:2, ?FNU:5,
receive_response(Socket)), PredefTopicId:16, MsgId:16, ?SN_RC_ACCEPTED>>,
receive_response(Socket)
),
Payload1 = <<20, 21, 22, 23>>, Payload1 = <<20, 21, 22, 23>>,
send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1), send_publish_msg_predefined_topic(Socket, QoS, MsgId, PredefTopicId, Payload1),
?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)),
send_pubrel_msg(Socket, MsgId), send_pubrel_msg(Socket, MsgId),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC :2, PredefTopicId:16, 1:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_PREDEFINED_TOPIC:2,
PredefTopicId:16, 1:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)),
timer:sleep(100), timer:sleep(100),
@ -812,7 +931,11 @@ t_publish_qos2_case03(_) ->
?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBREC, MsgId:16>>, receive_response(Socket)),
send_pubrel_msg(Socket, MsgId), send_pubrel_msg(Socket, MsgId),
?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_SHORT_TOPIC :2, <<"/a">>/binary, 1:16, <<20, 21, 22, 23>>/binary>>, receive_response(Socket)), ?assertEqual(<<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1,
Will:1, CleanSession:1, ?SN_SHORT_TOPIC:2,
"/a", 1:16, <<20, 21, 22, 23>>/binary>>,
receive_response(Socket)
),
?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)), ?assertEqual(<<4, ?SN_PUBCOMP, MsgId:16>>, receive_response(Socket)),
timer:sleep(100), timer:sleep(100),
@ -1083,7 +1206,11 @@ t_asleep_test03_to_awake_qos1_dl_msg(_) ->
send_register_msg(Socket, TopicName1, MsgId1), send_register_msg(Socket, TopicName1, MsgId1),
?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId1:16, 0:8>>, receive_response(Socket)), ?assertEqual(<<7, ?SN_REGACK, TopicId1:16, MsgId1:16, 0:8>>, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId1, MsgId),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId1:16, MsgId:16, ReturnCode>>, receive_response(Socket)), ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId1:16, MsgId:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
send_disconnect_msg(Socket, 1), send_disconnect_msg(Socket, 1),
@ -1109,7 +1236,10 @@ t_asleep_test03_to_awake_qos1_dl_msg(_) ->
%% the broker should sent dl msgs to the awake client before sending the pingresp %% the broker should sent dl msgs to the awake client before sending the pingresp
UdpData = receive_response(Socket), UdpData = receive_response(Socket),
MsgId_udp = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicId1, Payload1}, UdpData), MsgId_udp = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicId1, Payload1}, UdpData),
send_puback_msg(Socket, TopicId1, MsgId_udp), send_puback_msg(Socket, TopicId1, MsgId_udp),
%% check the pingresp is received at last %% check the pingresp is received at last
@ -1141,8 +1271,11 @@ t_asleep_test04_to_awake_qos1_dl_msg(_) ->
CleanSession = 0, CleanSession = 0,
ReturnCode = 0, ReturnCode = 0,
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1,CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
send_disconnect_msg(Socket, 1), send_disconnect_msg(Socket, 1),
@ -1176,11 +1309,17 @@ t_asleep_test04_to_awake_qos1_dl_msg(_) ->
send_regack_msg(Socket, TopicIdNew, MsgId3), send_regack_msg(Socket, TopicIdNew, MsgId3),
UdpData2 = receive_response(Socket), UdpData2 = receive_response(Socket),
MsgId_udp2 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload1}, UdpData2), MsgId_udp2 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload1}, UdpData2),
send_puback_msg(Socket, TopicIdNew, MsgId_udp2), send_puback_msg(Socket, TopicIdNew, MsgId_udp2),
UdpData3 = receive_response(Socket), UdpData3 = receive_response(Socket),
MsgId_udp3 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload2}, UdpData3), MsgId_udp3 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload2}, UdpData3),
send_puback_msg(Socket, TopicIdNew, MsgId_udp3), send_puback_msg(Socket, TopicIdNew, MsgId_udp3),
?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)),
@ -1216,8 +1355,11 @@ t_asleep_test05_to_awake_qos1_dl_msg(_) ->
CleanSession = 0, CleanSession = 0,
ReturnCode = 0, ReturnCode = 0,
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
SleepDuration = 30, SleepDuration = 30,
@ -1250,19 +1392,26 @@ t_asleep_test05_to_awake_qos1_dl_msg(_) ->
send_regack_msg(Socket, TopicIdNew, MsgId_reg), send_regack_msg(Socket, TopicIdNew, MsgId_reg),
UdpData2 = receive_response(Socket), UdpData2 = receive_response(Socket),
MsgId2 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload2}, UdpData2), MsgId2 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload2}, UdpData2),
send_puback_msg(Socket, TopicIdNew, MsgId2), send_puback_msg(Socket, TopicIdNew, MsgId2),
timer:sleep(50), timer:sleep(50),
UdpData3 = wrap_receive_response(Socket), UdpData3 = wrap_receive_response(Socket),
MsgId3 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload3}, UdpData3), MsgId3 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload3}, UdpData3),
send_puback_msg(Socket, TopicIdNew, MsgId3), send_puback_msg(Socket, TopicIdNew, MsgId3),
timer:sleep(50), timer:sleep(50),
case receive_response(Socket) of case receive_response(Socket) of
<<2,23>> -> ok; <<2,23>> -> ok;
UdpData4 -> UdpData4 ->
MsgId4 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, MsgId4 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC, CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload4}, UdpData4), TopicIdNew, Payload4}, UdpData4),
send_puback_msg(Socket, TopicIdNew, MsgId4) send_puback_msg(Socket, TopicIdNew, MsgId4)
@ -1322,7 +1471,10 @@ t_asleep_test06_to_awake_qos2_dl_msg(_) ->
send_pingreq_msg(Socket, ClientId), send_pingreq_msg(Socket, ClientId),
UdpData = wrap_receive_response(Socket), UdpData = wrap_receive_response(Socket),
MsgId_udp = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicId_tom, Payload1}, UdpData), MsgId_udp = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicId_tom, Payload1}, UdpData),
send_pubrec_msg(Socket, MsgId_udp), send_pubrec_msg(Socket, MsgId_udp),
?assertMatch(<<_:8, ?SN_PUBREL:8, _/binary>>, receive_response(Socket)), ?assertMatch(<<_:8, ?SN_PUBREL:8, _/binary>>, receive_response(Socket)),
send_pubcomp_msg(Socket, MsgId_udp), send_pubcomp_msg(Socket, MsgId_udp),
@ -1357,8 +1509,11 @@ t_asleep_test07_to_connected(_) ->
send_register_msg(Socket, TopicName_tom, MsgId1), send_register_msg(Socket, TopicName_tom, MsgId1),
TopicId_tom = check_regack_msg_on_udp(MsgId1, receive_response(Socket)), TopicId_tom = check_regack_msg_on_udp(MsgId1, receive_response(Socket)),
send_subscribe_msg_predefined_topic(Socket, QoS, TopicId_tom, MsgId1), send_subscribe_msg_predefined_topic(Socket, QoS, TopicId_tom, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId_tom:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1,CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId_tom:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
send_disconnect_msg(Socket, SleepDuration), send_disconnect_msg(Socket, SleepDuration),
@ -1436,8 +1591,11 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
CleanSession = 0, CleanSession = 0,
ReturnCode = 0, ReturnCode = 0,
send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1), send_subscribe_msg_normal_topic(Socket, QoS, TopicName1, MsgId1),
?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, TopicId0:16, MsgId1:16, ReturnCode>>, ?assertEqual(<<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1,
receive_response(Socket)), WillBit:1,CleanSession:1, ?SN_NORMAL_TOPIC:2,
TopicId0:16, MsgId1:16, ReturnCode>>,
receive_response(Socket)
),
% goto asleep state % goto asleep state
SleepDuration = 30, SleepDuration = 30,
send_disconnect_msg(Socket, SleepDuration), send_disconnect_msg(Socket, SleepDuration),
@ -1471,7 +1629,10 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
udp_receive_timeout -> udp_receive_timeout ->
ok; ok;
UdpData2 -> UdpData2 ->
MsgId2 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload2}, UdpData2), MsgId2 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload2}, UdpData2),
send_puback_msg(Socket, TopicIdNew, MsgId2) send_puback_msg(Socket, TopicIdNew, MsgId2)
end, end,
timer:sleep(100), timer:sleep(100),
@ -1480,7 +1641,10 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
udp_receive_timeout -> udp_receive_timeout ->
ok; ok;
UdpData3 -> UdpData3 ->
MsgId3 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, ?SN_NORMAL_TOPIC, TopicIdNew, Payload3}, UdpData3), MsgId3 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload3}, UdpData3),
send_puback_msg(Socket, TopicIdNew, MsgId3) send_puback_msg(Socket, TopicIdNew, MsgId3)
end, end,
timer:sleep(100), timer:sleep(100),
@ -1489,7 +1653,8 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
udp_receive_timeout -> udp_receive_timeout ->
ok; ok;
UdpData4 -> UdpData4 ->
MsgId4 = check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, MsgId4 = check_publish_msg_on_udp(
{Dup, QoS, Retain, WillBit,
CleanSession, ?SN_NORMAL_TOPIC, CleanSession, ?SN_NORMAL_TOPIC,
TopicIdNew, Payload4}, UdpData4), TopicIdNew, Payload4}, UdpData4),
send_puback_msg(Socket, TopicIdNew, MsgId4) send_puback_msg(Socket, TopicIdNew, MsgId4)
@ -1498,7 +1663,8 @@ t_asleep_test09_to_awake_again_qos1_dl_msg(_) ->
%% send PINGREQ again to enter awake state %% send PINGREQ again to enter awake state
send_pingreq_msg(Socket, ClientId), send_pingreq_msg(Socket, ClientId),
%% will not receive any buffered PUBLISH messages buffered before last awake, only receive PINGRESP here %% will not receive any buffered PUBLISH messages buffered before last
%% awake, only receive PINGRESP here
?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)), ?assertEqual(<<2, ?SN_PINGRESP>>, receive_response(Socket)),
gen_udp:close(Socket). gen_udp:close(Socket).
@ -1901,8 +2067,12 @@ check_dispatched_message(Dup, QoS, Retain, TopicIdType, TopicId, Payload, Socket
PubMsg = receive_response(Socket), PubMsg = receive_response(Socket),
Length = 7 + byte_size(Payload), Length = 7 + byte_size(Payload),
?LOG("check_dispatched_message ~p~n", [PubMsg]), ?LOG("check_dispatched_message ~p~n", [PubMsg]),
?LOG("expected ~p xx ~p~n", [<<Length, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2, TopicId:16>>, Payload]), ?LOG("expected ~p xx ~p~n",
<<Length, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2, TopicId:16, MsgId:16, Payload/binary>> = PubMsg, [<<Length, ?SN_PUBLISH,
Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2, TopicId:16>>, Payload]),
<<Length, ?SN_PUBLISH,
Dup:1, QoS:2, Retain:1, ?FNU:2, TopicIdType:2,
TopicId:16, MsgId:16, Payload/binary>> = PubMsg,
case QoS of case QoS of
0 -> ok; 0 -> ok;
1 -> send_puback_msg(Socket, TopicId, MsgId); 1 -> send_puback_msg(Socket, TopicId, MsgId);
@ -1914,11 +2084,14 @@ check_dispatched_message(Dup, QoS, Retain, TopicIdType, TopicId, Payload, Socket
get_udp_broadcast_address() -> get_udp_broadcast_address() ->
"255.255.255.255". "255.255.255.255".
check_publish_msg_on_udp({Dup, QoS, Retain, WillBit, CleanSession, TopicType, TopicId, Payload}, UdpData) -> check_publish_msg_on_udp({Dup, QoS, Retain, WillBit,
CleanSession, TopicType, TopicId, Payload}, UdpData) ->
<<HeaderUdp:5/binary, MsgId:16, PayloadIn/binary>> = UdpData, <<HeaderUdp:5/binary, MsgId:16, PayloadIn/binary>> = UdpData,
ct:pal("UdpData: ~p, Payload: ~p, PayloadIn: ~p", [UdpData, Payload, PayloadIn]), ct:pal("UdpData: ~p, Payload: ~p, PayloadIn: ~p", [UdpData, Payload, PayloadIn]),
Size9 = byte_size(Payload) + 7, Size9 = byte_size(Payload) + 7,
Eexp = <<Size9:8, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1, TopicType:2, TopicId:16>>, Eexp = <<Size9:8,
?SN_PUBLISH, Dup:1, QoS:2, Retain:1, WillBit:1, CleanSession:1,
TopicType:2, TopicId:16>>,
?assertEqual(Eexp, HeaderUdp), % mqtt-sn header should be same ?assertEqual(Eexp, HeaderUdp), % mqtt-sn header should be same
?assertEqual(Payload, PayloadIn), % payload should be same ?assertEqual(Payload, PayloadIn), % payload should be same
MsgId. MsgId.

View File

@ -4,6 +4,11 @@
set -e set -e
DEBUG="${DEBUG:-0}"
if [ "$DEBUG" -eq 1 ]; then
set -x
fi
ROOT_DIR="$(cd "$(dirname "$(readlink "$0" || echo "$0")")"/..; pwd -P)" ROOT_DIR="$(cd "$(dirname "$(readlink "$0" || echo "$0")")"/..; pwd -P)"
# shellcheck disable=SC1090 # shellcheck disable=SC1090
. "$ROOT_DIR"/releases/emqx_vars . "$ROOT_DIR"/releases/emqx_vars
@ -299,6 +304,43 @@ generate_config() {
fi fi
} }
# check if a PID is down
is_down() {
PID="$1"
if ps -p "$PID" >/dev/null; then
# still around
# shellcheck disable=SC2009 # this grep pattern is not a part of the progra names
if ps -p "$PID" | grep -q 'defunct'; then
# zombie state, print parent pid
parent="$(ps -o ppid= -p "$PID" | tr -d ' ')"
echo "WARN: $PID is marked <defunct>, parent:"
ps -p "$parent"
return 0
fi
return 1
fi
# it's gone
return 0
}
wait_for() {
local WAIT_TIME
local CMD
WAIT_TIME="$1"
shift
CMD="$*"
while true; do
if $CMD >/dev/null 2>&1; then
return 0
fi
if [ "$WAIT_TIME" -le 0 ]; then
return 1
fi
WAIT_TIME=$((WAIT_TIME - 1))
sleep 1
done
}
# Call bootstrapd for daemon commands like start/stop/console # Call bootstrapd for daemon commands like start/stop/console
bootstrapd() { bootstrapd() {
if [ -e "$RUNNER_DATA_DIR/.erlang.cookie" ]; then if [ -e "$RUNNER_DATA_DIR/.erlang.cookie" ]; then
@ -495,7 +537,7 @@ case "$1" in
"$BINDIR/run_erl" -daemon "$PIPE_DIR" "$RUNNER_LOG_DIR" \ "$BINDIR/run_erl" -daemon "$PIPE_DIR" "$RUNNER_LOG_DIR" \
"$(relx_start_command)" "$(relx_start_command)"
WAIT_TIME=${WAIT_FOR_ERLANG:-15} WAIT_TIME=${WAIT_FOR_ERLANG:-150}
while [ "$WAIT_TIME" -gt 0 ]; do while [ "$WAIT_TIME" -gt 0 ]; do
if ! relx_nodetool "ping" >/dev/null 2>&1; then if ! relx_nodetool "ping" >/dev/null 2>&1; then
WAIT_TIME=$((WAIT_TIME - 1)) WAIT_TIME=$((WAIT_TIME - 1))
@ -507,7 +549,7 @@ case "$1" in
echo "$EMQX_DESCRIPTION $REL_VSN is started successfully!" echo "$EMQX_DESCRIPTION $REL_VSN is started successfully!"
exit 0 exit 0
fi fi
done && echo "$EMQX_DESCRIPTION $REL_VSN failed to start within ${WAIT_FOR_ERLANG:-15} seconds," done && echo "$EMQX_DESCRIPTION $REL_VSN failed to start within ${WAIT_FOR_ERLANG:-150} seconds,"
echo "see the output of '$0 console' for more information." echo "see the output of '$0 console' for more information."
echo "If you want to wait longer, set the environment variable" echo "If you want to wait longer, set the environment variable"
echo "WAIT_FOR_ERLANG to the number of seconds to wait." echo "WAIT_FOR_ERLANG to the number of seconds to wait."
@ -518,6 +560,7 @@ case "$1" in
# Wait for the node to completely stop... # Wait for the node to completely stop...
PID="$(relx_get_pid)" PID="$(relx_get_pid)"
if ! relx_nodetool "stop"; then if ! relx_nodetool "stop"; then
echoerr "Graceful shutdown failed PID=[$PID]"
exit 1 exit 1
fi fi
WAIT_TIME="${EMQX_WAIT_FOR_STOP:-120}" WAIT_TIME="${EMQX_WAIT_FOR_STOP:-120}"

57
build
View File

@ -65,18 +65,18 @@ make_relup() {
if [ -d "$releases_dir" ]; then if [ -d "$releases_dir" ]; then
while read -r zip; do while read -r zip; do
local base_vsn local base_vsn
base_vsn="$(echo "$zip" | grep -oE "[0-9]+\.[0-9]+\.[0-9]+(-[0-9a-f]{8})?")" base_vsn="$(echo "$zip" | grep -oE "[0-9]+\.[0-9]+\.[0-9]+(-[0-9a-f]{8})?" | head -1)"
if [ ! -d "$releases_dir/$base_vsn" ]; then if [ ! -d "$releases_dir/$base_vsn" ]; then
local tmp_dir local tmp_dir
tmp_dir="$(mktemp -d -t emqx.XXXXXXX)" tmp_dir="$(mktemp -d -t emqx.XXXXXXX)"
unzip -q "$zip" "emqx/releases/*" -d "$tmp_dir" unzip -q "$zip" "emqx/releases/*" -d "$tmp_dir"
unzip -q "$zip" "emqx/lib/*" -d "$tmp_dir" unzip -q "$zip" "emqx/lib/*" -d "$tmp_dir"
cp -r -n "$tmp_dir/emqx/releases"/* "$releases_dir" cp -r -n "$tmp_dir/emqx/releases"/* "$releases_dir" || true
cp -r -n "$tmp_dir/emqx/lib"/* "$lib_dir" cp -r -n "$tmp_dir/emqx/lib"/* "$lib_dir" || true
rm -rf "$tmp_dir" rm -rf "$tmp_dir"
fi fi
releases+=( "$base_vsn" ) releases+=( "$base_vsn" )
done < <(find _upgrade_base -maxdepth 1 -name "*$PROFILE-$SYSTEM*-$ARCH.zip" -type f) done < <(find _upgrade_base -maxdepth 1 -name "${PROFILE}-*-otp${OTP_VSN}-${SYSTEM}-${ARCH}.zip" -type f)
fi fi
if [ ${#releases[@]} -eq 0 ]; then if [ ${#releases[@]} -eq 0 ]; then
log "No upgrade base found, relup ignored" log "No upgrade base found, relup ignored"
@ -120,7 +120,7 @@ make_zip() {
log "ERROR: $tarball is not found" log "ERROR: $tarball is not found"
fi fi
local zipball local zipball
zipball="${pkgpath}/${PROFILE}-${SYSTEM}-${PKG_VSN}-${ARCH}.zip" zipball="${pkgpath}/${PROFILE}-${PKG_VSN}-otp${OTP_VSN}-${SYSTEM}-${ARCH}.zip"
tar zxf "${tarball}" -C "${tard}/emqx" tar zxf "${tarball}" -C "${tard}/emqx"
## try to be portable for zip packages. ## try to be portable for zip packages.
## for DEB and RPM packages the dependencies are resoved by yum and apt ## for DEB and RPM packages the dependencies are resoved by yum and apt
@ -141,6 +141,49 @@ make_docker() {
-f "${DOCKERFILE}" . -f "${DOCKERFILE}" .
} }
## This function accepts any base docker image,
## a emqx zip-image, and a image tag (for the image to be built),
## to build a docker image which runs EMQ X
##
## Export below variables to quickly build an image
##
## Name Default Example
## ---------------------------------------------------------------------
## EMQX_BASE_IMAGE current os centos:7
## EMQX_ZIP_PACKAGE _packages/<current-zip-target> /tmp/emqx-4.4.0-otp24.1.5-3-centos7-amd64.zip
## EMQX_IMAGE_TAG emqx/emqx:<current-vns-rel> emqx/emqx:testing-tag
##
make_docker_testing() {
if [ -z "${EMQX_BASE_IMAGE:-}" ]; then
case "$SYSTEM" in
ubuntu20*)
EMQX_BASE_IMAGE="ubuntu:20.04"
;;
centos8)
EMQX_BASE_IMAGE="centos:8"
;;
*)
echo "Unsupported testing base image for $SYSTEM"
exit 1
;;
esac
fi
EMQX_IMAGE_TAG="${EMQX_IMAGE_TAG:-emqx/$PROFILE:${PKG_VSN}-otp${OTP_VSN}-${SYSTEM}}"
local defaultzip
defaultzip="_packages/${PROFILE}/${PROFILE}-${PKG_VSN}-otp${OTP_VSN}-${SYSTEM}-${ARCH}.zip"
local zip="${EMQX_ZIP_PACKAGE:-$defaultzip}"
if [ ! -f "$zip" ]; then
log "ERROR: $zip not built?"
exit 1
fi
set -x
docker build \
--build-arg BUILD_FROM="${EMQX_BASE_IMAGE}" \
--build-arg EMQX_ZIP_PACKAGE="${zip}" \
--tag "$EMQX_IMAGE_TAG" \
-f "${DOCKERFILE_TESTING}" .
}
log "building artifact=$ARTIFACT for profile=$PROFILE" log "building artifact=$ARTIFACT for profile=$PROFILE"
case "$ARTIFACT" in case "$ARTIFACT" in
@ -161,10 +204,12 @@ case "$ARTIFACT" in
make -C "deploy/packages/${PKGERDIR}" clean make -C "deploy/packages/${PKGERDIR}" clean
EMQX_REL="$(pwd)" EMQX_BUILD="${PROFILE}" SYSTEM="${SYSTEM}" make -C "deploy/packages/${PKGERDIR}" EMQX_REL="$(pwd)" EMQX_BUILD="${PROFILE}" SYSTEM="${SYSTEM}" make -C "deploy/packages/${PKGERDIR}"
;; ;;
docker) docker)
make_docker make_docker
;; ;;
docker-testing)
make_docker_testing
;;
*) *)
log "Unknown artifact $ARTIFACT" log "Unknown artifact $ARTIFACT"
exit 1 exit 1

View File

@ -105,7 +105,9 @@ spec:
secret: secret:
secretName: {{ $licenseSecretName }} secretName: {{ $licenseSecretName }}
{{- end }} {{- end }}
{{- if eq (.Values.emqxConfig.EMQX_CLUSTER__DISCOVERY | default "k8s") "k8s" }}
serviceAccountName: {{ include "emqx.fullname" . }} serviceAccountName: {{ include "emqx.fullname" . }}
{{- end }}
{{- if .Values.podSecurityContext.enabled }} {{- if .Values.podSecurityContext.enabled }}
securityContext: {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }} securityContext: {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }}
{{- end }} {{- end }}

View File

@ -0,0 +1,59 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ include "emqx.fullname" . }}-env
namespace: {{ .Release.Namespace }}
labels:
app.kubernetes.io/name: {{ include "emqx.name" . }}
helm.sh/chart: {{ include "emqx.chart" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
data:
{{- range $index, $value := .Values.emqxConfig}}
{{$index}}: "{{ tpl (printf "%v" $value) $ }}"
{{- end}}
---
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ include "emqx.fullname" . }}-acl
namespace: {{ .Release.Namespace }}
labels:
app.kubernetes.io/name: {{ include "emqx.name" . }}
helm.sh/chart: {{ include "emqx.chart" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
data:
"acl.conf": |
{{ .Values.emqxAclConfig }}
---
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ include "emqx.fullname" . }}-loaded-plugins
namespace: {{ .Release.Namespace }}
labels:
app.kubernetes.io/name: {{ include "emqx.name" . }}
helm.sh/chart: {{ include "emqx.chart" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
data:
"loaded_plugins": |
{{ .Values.emqxLoadedPlugins }}
---
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ include "emqx.fullname" . }}-loaded-modules
namespace: {{ .Release.Namespace }}
labels:
app.kubernetes.io/name: {{ include "emqx.name" . }}
helm.sh/chart: {{ include "emqx.chart" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
data:
"loaded_modules": |
{{ .Values.emqxLoadedModules }}

View File

@ -1,3 +1,4 @@
{{- if eq (.Values.emqxConfig.EMQX_CLUSTER__DISCOVERY | default "k8s") "k8s" }}
apiVersion: v1 apiVersion: v1
kind: ServiceAccount kind: ServiceAccount
metadata: metadata:
@ -40,3 +41,4 @@ roleRef:
kind: Role kind: Role
name: {{ include "emqx.fullname" . }} name: {{ include "emqx.fullname" . }}
apiGroup: rbac.authorization.k8s.io apiGroup: rbac.authorization.k8s.io
{{- end }}

View File

@ -70,7 +70,20 @@ initContainers: {}
## EMQ X configuration item, see the documentation (https://hub.docker.com/r/emqx/emqx) ## EMQ X configuration item, see the documentation (https://hub.docker.com/r/emqx/emqx)
emqxConfig: emqxConfig:
EMQX_NAME: "{{ .Release.Name }}"
## Cluster discovery by dns
# EMQX_CLUSTER__DISCOVERY: "dns"
# EMQX_CLUSTER__DNS__NAME: "{{ .Release.Name }}-headless.{{ .Release.Namespace }}.svc.cluster.local"
# EMQX_CLUSTER__DNS__APP: "{{ .Release.Name }}"
# EMQX_CLUSTER__DNS__TYPE: "srv"
## Cluster discovery by k8s
EMQX_CLUSTER__DISCOVERY: "k8s"
EMQX_CLUSTER__K8S__APP_NAME: "{{ .Release.Name }}"
EMQX_CLUSTER__K8S__APISERVER: "https://kubernetes.default.svc:443" EMQX_CLUSTER__K8S__APISERVER: "https://kubernetes.default.svc:443"
EMQX_CLUSTER__K8S__SERVICE_NAME: "{{ .Release.Name }}-headless"
EMQX_CLUSTER__K8S__NAMESPACE: "{{ .Release.Namespace }}"
## The address type is used to extract host from k8s service. ## The address type is used to extract host from k8s service.
## Value: ip | dns | hostname ## Value: ip | dns | hostname
## NoteHostname is only supported after v4.0-rc.2 ## NoteHostname is only supported after v4.0-rc.2
@ -114,6 +127,8 @@ emqxLoadedPlugins: >
emqxLoadedModules: > emqxLoadedModules: >
{emqx_mod_acl_internal, true}. {emqx_mod_acl_internal, true}.
{emqx_mod_presence, true}. {emqx_mod_presence, true}.
{emqx_mod_trace, false}.
{emqx_mod_st_statistics, false}.
{emqx_mod_delayed, false}. {emqx_mod_delayed, false}.
{emqx_mod_rewrite, false}. {emqx_mod_rewrite, false}.
{emqx_mod_subscription, false}. {emqx_mod_subscription, false}.

View File

@ -1,5 +1,5 @@
ARG BUILD_FROM=emqx/build-env:erl23.2.7.2-emqx-3-alpine ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/4.4-4:24.1.5-3-alpine3.14
ARG RUN_FROM=alpine:3.12 ARG RUN_FROM=alpine:3.14
FROM ${BUILD_FROM} AS builder FROM ${BUILD_FROM} AS builder
RUN apk add --no-cache \ RUN apk add --no-cache \
@ -29,17 +29,6 @@ RUN cd /emqx \
FROM $RUN_FROM FROM $RUN_FROM
# Basic build-time metadata as defined at http://label-schema.org
LABEL org.label-schema.docker.dockerfile="Dockerfile" \
org.label-schema.license="GNU" \
org.label-schema.name="emqx" \
org.label-schema.version=${PKG_VSN} \
org.label-schema.description="EMQ (Erlang MQTT Broker) is a distributed, massively scalable, highly extensible MQTT messaging broker written in Erlang/OTP." \
org.label-schema.url="https://emqx.io" \
org.label-schema.vcs-type="Git" \
org.label-schema.vcs-url="https://github.com/emqx/emqx" \
maintainer="EMQ X Team <support@emqx.io>"
ARG EMQX_NAME=emqx ARG EMQX_NAME=emqx
COPY deploy/docker/docker-entrypoint.sh /usr/bin/ COPY deploy/docker/docker-entrypoint.sh /usr/bin/

View File

@ -0,0 +1,43 @@
ARG BUILD_FROM
FROM ${BUILD_FROM}
## all we need is the unzip command
RUN if command -v yum; then yum update -y && yum install -y unzip; fi
RUN if command -v apt-get; then apt-get update -y && apt-get install unzip; fi
ARG EMQX_ZIP_PACKAGE
COPY ${EMQX_ZIP_PACKAGE} /opt/emqx.zip
RUN unzip -q /opt/emqx.zip -d /opt/ && rm /opt/emqx.zip
COPY deploy/docker/docker-entrypoint.sh /usr/bin/
RUN ln -s /opt/emqx/bin/* /usr/local/bin/
WORKDIR /opt/emqx
RUN adduser -u 1000 emqx
RUN echo "emqx ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers
RUN chgrp -Rf emqx /opt/emqx && chmod -Rf g+w /opt/emqx \
&& chown -Rf emqx /opt/emqx
USER emqx
VOLUME ["/opt/emqx/log", "/opt/emqx/data", "/opt/emqx/etc"]
# emqx will occupy these port:
# - 1883 port for MQTT
# - 8081 for mgmt API
# - 8083 for WebSocket/HTTP
# - 8084 for WSS/HTTPS
# - 8883 port for MQTT(SSL)
# - 11883 port for internal MQTT/TCP
# - 18083 for dashboard
# - 4369 epmd (Erlang-distrbution port mapper daemon) listener (deprecated)
# - 4370 default Erlang distrbution port
# - 5369 for gen_rpc port mapping
# - 6369 6370 for distributed node
EXPOSE 1883 8081 8083 8084 8883 11883 18083 4369 4370 5369 6369 6370
ENTRYPOINT ["/usr/bin/docker-entrypoint.sh"]
CMD ["/opt/emqx/bin/emqx", "foreground"]

View File

@ -28,10 +28,18 @@ if [[ -z "$EMQX_NAME" ]]; then
fi fi
if [[ -z "$EMQX_HOST" ]]; then if [[ -z "$EMQX_HOST" ]]; then
if [[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == "dns" ]] && [[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then if [[ "$EMQX_CLUSTER__DISCOVERY" == "dns" ]] && \
[[ "$EMQX_CLUSTER__DNS__TYPE" == "srv" ]] && \
grep -q "$(hostname).$EMQX_CLUSTER__DNS__NAME" /etc/hosts; then
EMQX_HOST="$(hostname).$EMQX_CLUSTER__DNS__NAME"
elif [[ "$EMQX_CLUSTER__DISCOVERY" == "k8s" ]] && \
[[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == "dns" ]] && \
[[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then
EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-"pod.cluster.local"} EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-"pod.cluster.local"}
EMQX_HOST="${LOCAL_IP//./-}.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX" EMQX_HOST="${LOCAL_IP//./-}.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX"
elif [[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == 'hostname' ]] && [[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then elif [[ "$EMQX_CLUSTER__DISCOVERY" == "k8s" ]] && \
[[ "$EMQX_CLUSTER__K8S__ADDRESS_TYPE" == 'hostname' ]] && \
[[ -n "$EMQX_CLUSTER__K8S__NAMESPACE" ]]; then
EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-'svc.cluster.local'} EMQX_CLUSTER__K8S__SUFFIX=${EMQX_CLUSTER__K8S__SUFFIX:-'svc.cluster.local'}
EMQX_HOST=$(grep -h "^$LOCAL_IP" /etc/hosts | grep -o "$(hostname).*.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX") EMQX_HOST=$(grep -h "^$LOCAL_IP" /etc/hosts | grep -o "$(hostname).*.$EMQX_CLUSTER__K8S__NAMESPACE.$EMQX_CLUSTER__K8S__SUFFIX")
else else

View File

@ -8,7 +8,7 @@ EMQX_NAME=$(subst -pkg,,$(EMQX_BUILD))
TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz
SOURCE_PKG := $(EMQX_NAME)_$(PKG_VSN)_$(shell dpkg --print-architecture) SOURCE_PKG := $(EMQX_NAME)_$(PKG_VSN)_$(shell dpkg --print-architecture)
TARGET_PKG := $(EMQX_NAME)-$(SYSTEM)-$(PKG_VSN)-$(ARCH) TARGET_PKG := $(EMQX_NAME)-$(PKG_VSN)-otp$(OTP_VSN)-$(SYSTEM)-$(ARCH)
.PHONY: all .PHONY: all
all: | $(BUILT) all: | $(BUILT)

View File

@ -4,7 +4,7 @@ Priority: optional
Maintainer: emqx <contact@emqx.io> Maintainer: emqx <contact@emqx.io>
Build-Depends: debhelper (>=9) Build-Depends: debhelper (>=9)
Standards-Version: 3.9.6 Standards-Version: 3.9.6
Homepage: https://www.emqx.io Homepage: https://www.emqx.com
Package: emqx Package: emqx
Architecture: any Architecture: any

View File

@ -5,8 +5,9 @@ BUILT := $(SRCDIR)/BUILT
dash := - dash := -
none := none :=
space := $(none) $(none) space := $(none) $(none)
RPM_VSN ?= $(shell echo $(PKG_VSN) | grep -oE "[0-9]+\.[0-9]+(\.[0-9]+)?") ## RPM does not allow '-' in version nubmer and release string, replace with '_'
RPM_REL ?= $(shell echo $(PKG_VSN) | grep -oE "(alpha|beta|rc)\.[0-9]") RPM_VSN := $(subst -,_,$(PKG_VSN))
RPM_REL := otp$(subst -,_,$(OTP_VSN))
ARCH ?= amd64 ARCH ?= amd64
ifeq ($(ARCH),mips64) ifeq ($(ARCH),mips64)
@ -16,12 +17,8 @@ endif
EMQX_NAME=$(subst -pkg,,$(EMQX_BUILD)) EMQX_NAME=$(subst -pkg,,$(EMQX_BUILD))
TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz TAR_PKG := $(EMQX_REL)/_build/$(EMQX_BUILD)/rel/emqx/emqx-$(PKG_VSN).tar.gz
TARGET_PKG := $(EMQX_NAME)-$(SYSTEM)-$(PKG_VSN)-$(ARCH) TARGET_PKG := $(EMQX_NAME)-$(PKG_VSN)-otp$(OTP_VSN)-$(SYSTEM)-$(ARCH)
ifeq ($(RPM_REL),) SOURCE_PKG := emqx-$(RPM_VSN)-$(RPM_REL).$(shell uname -m)
# no tail
RPM_REL := 1
endif
SOURCE_PKG := emqx-$(SYSTEM)-$(RPM_VSN)-$(RPM_REL).$(shell uname -m)
SYSTEMD := $(shell if command -v systemctl >/dev/null 2>&1; then echo yes; fi) SYSTEMD := $(shell if command -v systemctl >/dev/null 2>&1; then echo yes; fi)
# Not $(PWD) as it does not work for make -C # Not $(PWD) as it does not work for make -C
@ -47,7 +44,6 @@ all: | $(BUILT)
--define "_service_dst $(SERVICE_DST)" \ --define "_service_dst $(SERVICE_DST)" \
--define "_post_addition $(POST_ADDITION)" \ --define "_post_addition $(POST_ADDITION)" \
--define "_preun_addition $(PREUN_ADDITION)" \ --define "_preun_addition $(PREUN_ADDITION)" \
--define "_ostype -$(SYSTEM)" \
--define "_sharedstatedir /var/lib" \ --define "_sharedstatedir /var/lib" \
emqx.spec emqx.spec
mkdir -p $(EMQX_REL)/_packages/$(EMQX_NAME) mkdir -p $(EMQX_REL)/_packages/$(EMQX_NAME)

View File

@ -5,7 +5,7 @@
%define _log_dir %{_var}/log/%{_name} %define _log_dir %{_var}/log/%{_name}
%define _lib_home /usr/lib/%{_name} %define _lib_home /usr/lib/%{_name}
%define _var_home %{_sharedstatedir}/%{_name} %define _var_home %{_sharedstatedir}/%{_name}
%define _build_name_fmt %{_arch}/%{_name}%{?_ostype}-%{_version}-%{_release}.%{_arch}.rpm %define _build_name_fmt %{_arch}/%{_name}-%{_version}-%{_release}.%{_arch}.rpm
%define _build_id_links none %define _build_id_links none
Name: %{_package_name} Name: %{_package_name}

View File

@ -101,6 +101,11 @@ cluster.autoclean = 5m
## Value: String ## Value: String
## cluster.dns.app = emqx ## cluster.dns.app = emqx
## Type of dns record.
##
## Value: Value: a | srv
## cluster.dns.type = a
##-------------------------------------------------------------------- ##--------------------------------------------------------------------
## Cluster using etcd ## Cluster using etcd
@ -354,7 +359,7 @@ rpc.port_discovery = stateless
## ##
## Value: Interger [0-256] ## Value: Interger [0-256]
## Default = 1 ## Default = 1
#rpc.tcp_client_num = 1 #rpc.tcp_client_num = 0
## RCP Client connect timeout. ## RCP Client connect timeout.
## ##
@ -2213,6 +2218,29 @@ module.presence.qos = 1
## module.rewrite.pub.rule.1 = x/# ^x/y/(.+)$ z/y/$1 ## module.rewrite.pub.rule.1 = x/# ^x/y/(.+)$ z/y/$1
## module.rewrite.sub.rule.1 = y/+/z/# ^y/(.+)/z/(.+)$ y/z/$2 ## module.rewrite.sub.rule.1 = y/+/z/# ^y/(.+)/z/(.+)$ y/z/$2
##--------------------------------------------------------------------
## Slow Subscribers Statistics Module
## the expire time of the record which in topk
##
## Value: 5 minutes
#module.slow_subs.expire_interval = 5m
## maximum number of Top-K record
##
## Defalut: 10
#module.slow_subs.top_k_num = 10
## Stats Type
##
## Default: whole
#module.slow_subs.stats_type = whole
## Stats Threshold
##
## Default: 500ms
#module.slow_subs.threshold = 500ms
## CONFIG_SECTION_END=modules ================================================== ## CONFIG_SECTION_END=modules ==================================================
##------------------------------------------------------------------- ##-------------------------------------------------------------------

View File

@ -542,4 +542,22 @@
-define(SHARE(Group, Topic), emqx_topic:join([<<?SHARE>>, Group, Topic])). -define(SHARE(Group, Topic), emqx_topic:join([<<?SHARE>>, Group, Topic])).
-define(IS_SHARE(Topic), case Topic of <<?SHARE, _/binary>> -> true; _ -> false end). -define(IS_SHARE(Topic), case Topic of <<?SHARE, _/binary>> -> true; _ -> false end).
-define(TYPE_NAMES, {
'CONNECT'
, 'CONNACK'
, 'PUBLISH'
, 'PUBACK'
, 'PUBREC'
, 'PUBREL'
, 'PUBCOMP'
, 'SUBSCRIBE'
, 'SUBACK'
, 'UNSUBSCRIBE'
, 'UNSUBACK'
, 'PINGREQ'
, 'PINGRESP'
, 'DISCONNECT'
, 'AUTH'
}).
-endif. -endif.

View File

@ -29,7 +29,7 @@
-ifndef(EMQX_ENTERPRISE). -ifndef(EMQX_ENTERPRISE).
-define(EMQX_RELEASE, {opensource, "4.3.11"}). -define(EMQX_RELEASE, {opensource, "4.4-beta.1"}).
-else. -else.

View File

@ -1,6 +1,6 @@
{application, emqx_dashboard, {application, emqx_dashboard,
[{description, "EMQ X Web Dashboard"}, [{description, "EMQ X Web Dashboard"},
{vsn, "4.3.8"}, % strict semver, bump manually! {vsn, "4.4.0"}, % strict semver, bump manually!
{modules, []}, {modules, []},
{registered, [emqx_dashboard_sup]}, {registered, [emqx_dashboard_sup]},
{applications, [kernel,stdlib,mnesia,minirest]}, {applications, [kernel,stdlib,mnesia,minirest]},

View File

@ -41,18 +41,18 @@
start_listeners() -> start_listeners() ->
lists:foreach(fun(Listener) -> start_listener(Listener) end, listeners()). lists:foreach(fun(Listener) -> start_listener(Listener) end, listeners()).
%% Start HTTP Listener
start_listener({Proto, Port, Options}) when Proto == http ->
Dispatch = [{"/", cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}},
{"/static/[...]", cowboy_static, {priv_dir, emqx_dashboard, "www/static"}},
{"/api/v4/[...]", minirest, http_handlers()}],
minirest:start_http(listener_name(Proto), ranch_opts(Port, Options), Dispatch);
start_listener({Proto, Port, Options}) when Proto == https -> %% Start HTTP(S) Listener
start_listener({Proto, Port, Options}) ->
Dispatch = [{"/", cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}}, Dispatch = [{"/", cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}},
{"/static/[...]", cowboy_static, {priv_dir, emqx_dashboard, "www/static"}}, {"/static/[...]", cowboy_static, {priv_dir, emqx_dashboard, "www/static"}},
{"/api/v4/[...]", minirest, http_handlers()}], {"/api/v4/[...]", minirest, http_handlers()}],
minirest:start_https(listener_name(Proto), ranch_opts(Port, Options), Dispatch). Server = listener_name(Proto),
RanchOpts = ranch_opts(Port, Options),
case Proto of
http -> minirest:start_http(Server, RanchOpts, Dispatch);
https -> minirest:start_https(Server, RanchOpts, Dispatch)
end.
ranch_opts(Port, Options0) -> ranch_opts(Port, Options0) ->
NumAcceptors = get_value(num_acceptors, Options0, 4), NumAcceptors = get_value(num_acceptors, Options0, 4),
@ -89,7 +89,7 @@ listener_name(Proto) ->
http_handlers() -> http_handlers() ->
Plugins = lists:map(fun(Plugin) -> Plugin#plugin.name end, emqx_plugins:list()), Plugins = lists:map(fun(Plugin) -> Plugin#plugin.name end, emqx_plugins:list()),
[{"/api/v4/", [{"/api/v4/",
minirest:handler(#{apps => Plugins ++ [emqx_modules], minirest:handler(#{apps => Plugins ++ [emqx_modules, emqx_plugin_libs],
filter => fun ?MODULE:filter/1}), filter => fun ?MODULE:filter/1}),
[{authorization, fun ?MODULE:is_authorized/1}]}]. [{authorization, fun ?MODULE:is_authorized/1}]}].
@ -116,6 +116,7 @@ is_authorized(_Path, Req) ->
_ -> false _ -> false
end. end.
filter(#{app := emqx_plugin_libs}) -> true;
filter(#{app := emqx_modules}) -> true; filter(#{app := emqx_modules}) -> true;
filter(#{app := App}) -> filter(#{app := App}) ->
case emqx_plugins:find_plugin(App) of case emqx_plugins:find_plugin(App) of

View File

@ -54,6 +54,7 @@ groups() ->
]. ].
init_per_suite(Config) -> init_per_suite(Config) ->
application:load(emqx_plugin_libs),
emqx_ct_helpers:start_apps([emqx_modules, emqx_management, emqx_dashboard]), emqx_ct_helpers:start_apps([emqx_modules, emqx_management, emqx_dashboard]),
Config. Config.
@ -165,4 +166,3 @@ api_path(Path) ->
json(Data) -> json(Data) ->
{ok, Jsx} = emqx_json:safe_decode(Data, [return_maps]), Jsx. {ok, Jsx} = emqx_json:safe_decode(Data, [return_maps]), Jsx.

View File

@ -0,0 +1,49 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mod_slow_subs).
-behaviour(emqx_gen_mod).
-include_lib("include/emqx.hrl").
-include_lib("include/logger.hrl").
-logger_header("[SLOW Subs]").
%% emqx_gen_mod callbacks
-export([ load/1
, unload/1
, description/0
]).
-define(LIB, emqx_slow_subs).
%%--------------------------------------------------------------------
%% Load/Unload
%%--------------------------------------------------------------------
-spec(load(list()) -> ok).
load(Env) ->
emqx_mod_sup:start_child(?LIB, worker, [Env]),
ok.
-spec(unload(list()) -> ok).
unload(_Env) ->
_ = emqx_mod_sup:stop_child(?LIB),
ok.
description() ->
"EMQ X Slow Subscribers Statistics Module".

View File

@ -23,19 +23,23 @@
-export([ start_link/0 -export([ start_link/0
, start_child/1 , start_child/1
, start_child/2 , start_child/2
, start_child/3
, stop_child/1 , stop_child/1
]). ]).
-export([init/1]). -export([init/1]).
%% Helper macro for declaring children of supervisor %% Helper macro for declaring children of supervisor
-define(CHILD(Mod, Type), #{id => Mod, -define(CHILD(Mod, Type, Args),
start => {Mod, start_link, []}, #{id => Mod,
start => {Mod, start_link, Args},
restart => permanent, restart => permanent,
shutdown => 5000, shutdown => 5000,
type => Type, type => Type,
modules => [Mod]}). modules => [Mod]}).
-define(CHILD(MOD, Type), ?CHILD(MOD, Type, [])).
-spec(start_link() -> startlink_ret()). -spec(start_link() -> startlink_ret()).
start_link() -> start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []). supervisor:start_link({local, ?MODULE}, ?MODULE, []).
@ -48,6 +52,10 @@ start_child(ChildSpec) when is_map(ChildSpec) ->
start_child(Mod, Type) when is_atom(Mod) andalso is_atom(Type) -> start_child(Mod, Type) when is_atom(Mod) andalso is_atom(Type) ->
assert_started(supervisor:start_child(?MODULE, ?CHILD(Mod, Type))). assert_started(supervisor:start_child(?MODULE, ?CHILD(Mod, Type))).
-spec start_child(atom(), atom(), list(any())) -> ok.
start_child(Mod, Type, Args) when is_atom(Mod) andalso is_atom(Type) ->
assert_started(supervisor:start_child(?MODULE, ?CHILD(Mod, Type, Args))).
-spec(stop_child(any()) -> ok | {error, term()}). -spec(stop_child(any()) -> ok | {error, term()}).
stop_child(ChildId) -> stop_child(ChildId) ->
case supervisor:terminate_child(?MODULE, ChildId) of case supervisor:terminate_child(?MODULE, ChildId) of
@ -61,6 +69,7 @@ stop_child(ChildId) ->
init([]) -> init([]) ->
ok = emqx_tables:new(emqx_modules, [set, public, {write_concurrency, true}]), ok = emqx_tables:new(emqx_modules, [set, public, {write_concurrency, true}]),
emqx_slow_subs:init_tab(),
{ok, {{one_for_one, 10, 100}, []}}. {ok, {{one_for_one, 10, 100}, []}}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -69,6 +78,5 @@ init([]) ->
assert_started({ok, _Pid}) -> ok; assert_started({ok, _Pid}) -> ok;
assert_started({ok, _Pid, _Info}) -> ok; assert_started({ok, _Pid, _Info}) -> ok;
assert_started({error, {already_tarted, _Pid}}) -> ok; assert_started({error, {already_started, _Pid}}) -> ok;
assert_started({error, Reason}) -> erlang:error(Reason). assert_started({error, Reason}) -> erlang:error(Reason).

View File

@ -0,0 +1,39 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mod_trace).
-behaviour(emqx_gen_mod).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-export([ load/1
, unload/1
, description/0
]).
-spec description() -> string().
description() ->
"EMQ X Trace Module".
-spec load(any()) -> ok.
load(_Env) ->
emqx_mod_sup:start_child(emqx_trace, worker).
-spec unload(any()) -> ok.
unload(_Env) ->
emqx_mod_sup:stop_child(emqx_trace).

View File

@ -0,0 +1,98 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mod_trace_api).
%% API
-export([ list_trace/2
, create_trace/2
, disable_trace/2
, delete_trace/2
, clear_traces/2
, download_zip_log/2
, stream_log_file/2
]).
-import(minirest, [return/1]).
-rest_api(#{name => list_trace,
method => 'GET',
path => "/trace/",
func => list_trace,
descr => "list all traces"}).
-rest_api(#{name => create_trace,
method => 'POST',
path => "/trace/",
func => create_trace,
descr => "create trace"}).
-rest_api(#{name => delete_trace,
method => 'DELETE',
path => "/trace/:bin:name",
func => delete_trace,
descr => "delete trace"}).
-rest_api(#{name => clear_trace,
method => 'DELETE',
path => "/trace/",
func => clear_traces,
descr => "clear all traces"}).
-rest_api(#{name => disable_trace,
method => 'PUT',
path => "/trace/:bin:name/stop",
func => disable_trace,
descr => "stop trace"}).
-rest_api(#{name => download_zip_log,
method => 'GET',
path => "/trace/:bin:name/download",
func => download_zip_log,
descr => "download trace's log"}).
-rest_api(#{name => stream_log_file,
method => 'GET',
path => "/trace/:bin:name/log",
func => stream_log_file,
descr => "download trace's log"}).
list_trace(Path, Params) ->
return(emqx_trace_api:list_trace(Path, Params)).
create_trace(Path, Params) ->
return(emqx_trace_api:create_trace(Path, Params)).
delete_trace(Path, Params) ->
return(emqx_trace_api:delete_trace(Path, Params)).
clear_traces(Path, Params) ->
return(emqx_trace_api:clear_traces(Path, Params)).
disable_trace(#{name := Name}, Params) ->
return(emqx_trace_api:update_trace(#{name => Name, operation => disable}, Params)).
download_zip_log(Path, Params) ->
case emqx_trace_api:download_zip_log(Path, Params) of
{ok, File} -> minirest:return_file(File);
{error, Reason} -> return({error, 'NOT_FOUND', Reason})
end.
stream_log_file(Path, Params) ->
case emqx_trace_api:stream_log_file(Path, Params) of
{ok, File} -> return({ok, File});
{error, Reason} -> return({error, 'NOT_FOUND', Reason})
end.

View File

@ -1,6 +1,6 @@
{application, emqx_modules, {application, emqx_modules,
[{description, "EMQ X Module Management"}, [{description, "EMQ X Module Management"},
{vsn, "4.3.4"}, {vsn, "4.4.1"},
{modules, []}, {modules, []},
{applications, [kernel,stdlib]}, {applications, [kernel,stdlib]},
{mod, {emqx_modules_app, []}}, {mod, {emqx_modules_app, []}},

View File

@ -1,33 +1,13 @@
%% -*-: erlang -*- %% -*-: erlang -*-
{VSN, {VSN,
[ [{"4.4.0",
{<<"4\\.3\\.[2-3]">>, [ [{load_module, emqx_mod_presence, brutal_purge, soft_purge, []},
{load_module, emqx_mod_presence, brutal_purge, soft_purge, []} {load_module, emqx_mod_sup, brutal_purge, soft_purge, []}]},
]},
{"4.3.1", [
{load_module, emqx_mod_presence, brutal_purge, soft_purge, []},
{load_module, emqx_mod_api_topic_metrics, brutal_purge, soft_purge, []}
]},
{"4.3.0", [
{update, emqx_mod_delayed, {advanced, []}},
{load_module, emqx_mod_presence, brutal_purge, soft_purge, []},
{load_module, emqx_mod_api_topic_metrics, brutal_purge, soft_purge, []}
]},
{<<".*">>, []} {<<".*">>, []}
], ],
[ [{"4.4.0",
{<<"4\\.3\\.[2-3]">>, [ [{load_module, emqx_mod_presence, brutal_purge, soft_purge, []},
{load_module, emqx_mod_presence, brutal_purge, soft_purge, []} {load_module, emqx_mod_sup, brutal_purge, soft_purge, []}]},
]},
{"4.3.1", [
{load_module, emqx_mod_presence, brutal_purge, soft_purge, []},
{load_module, emqx_mod_api_topic_metrics, brutal_purge, soft_purge, []}
]},
{"4.3.0", [
{update, emqx_mod_delayed, {advanced, []}},
{load_module, emqx_mod_presence, brutal_purge, soft_purge, []},
{load_module, emqx_mod_api_topic_metrics, brutal_purge, soft_purge, []}
]},
{<<".*">>, []} {<<".*">>, []}
] ]
}. }.

View File

@ -62,7 +62,7 @@ t_mod_rewrite(_Config) ->
timer:sleep(100), timer:sleep(100),
?assertEqual([], emqx_broker:subscriptions(<<"rewrite_client">>)), ?assertEqual([], emqx_broker:subscriptions(<<"rewrite_client">>)),
%% Pub Rules %% Pub Rules
{ok, _Props, _} = emqtt:subscribe(C, [{Topic, ?QOS_1} || Topic <- PubDestTopics]), {ok, _Props1, _} = emqtt:subscribe(C, [{Topic, ?QOS_1} || Topic <- PubDestTopics]),
RecvTopics2 = [begin RecvTopics2 = [begin
ok = emqtt:publish(C, Topic, <<"payload">>), ok = emqtt:publish(C, Topic, <<"payload">>),
{ok, #{topic := RecvTopic}} = receive_publish(100), {ok, #{topic := RecvTopic}} = receive_publish(100),

View File

@ -41,9 +41,8 @@ t_start_child(_) ->
modules => [Mod]}, modules => [Mod]},
ok = emqx_mod_sup:start_child(Mod, worker), ok = emqx_mod_sup:start_child(Mod, worker),
?assertError({already_started, _}, emqx_mod_sup:start_child(Spec)), ?assertEqual(ok, emqx_mod_sup:start_child(Spec)),
ok = emqx_mod_sup:stop_child(Mod), ok = emqx_mod_sup:stop_child(Mod),
{error, not_found} = emqx_mod_sup:stop_child(Mod), {error, not_found} = emqx_mod_sup:stop_child(Mod),
ok. ok.

View File

@ -0,0 +1,187 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mod_trace_api_SUITE).
%% API
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx/include/emqx.hrl").
-define(HOST, "http://127.0.0.1:18083/").
-define(API_VERSION, "v4").
-define(BASE_PATH, "api").
%%--------------------------------------------------------------------
%% Setups
%%--------------------------------------------------------------------
all() ->
emqx_ct:all(?MODULE).
init_per_suite(Config) ->
application:load(emqx_plugin_libs),
emqx_ct_helpers:start_apps([emqx_modules, emqx_dashboard]),
Config.
end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([emqx_modules, emqx_dashboard]).
t_http_test(_Config) ->
emqx_trace:clear(),
load(),
Header = auth_header_(),
%% list
{ok, Empty} = request_api(get, api_path("trace"), Header),
?assertEqual(#{<<"code">> => 0, <<"data">> => []}, json(Empty)),
%% create
ErrorTrace = #{},
{ok, Error} = request_api(post, api_path("trace"), Header, ErrorTrace),
?assertEqual(#{<<"message">> => <<"name required">>,
<<"code">> => <<"INCORRECT_PARAMS">>}, json(Error)),
Name = <<"test-name">>,
Trace = [
{<<"name">>, Name},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>}
],
{ok, Create} = request_api(post, api_path("trace"), Header, Trace),
?assertEqual(#{<<"code">> => 0}, json(Create)),
{ok, List} = request_api(get, api_path("trace"), Header),
#{<<"code">> := 0, <<"data">> := [Data]} = json(List),
?assertEqual(Name, maps:get(<<"name">>, Data)),
%% update
{ok, Update} = request_api(put, api_path("trace/test-name/stop"), Header, #{}),
?assertEqual(#{<<"code">> => 0,
<<"data">> => #{<<"enable">> => false,
<<"name">> => <<"test-name">>}}, json(Update)),
{ok, List1} = request_api(get, api_path("trace"), Header),
#{<<"code">> := 0, <<"data">> := [Data1]} = json(List1),
Node = atom_to_binary(node()),
?assertMatch(#{
<<"status">> := <<"stopped">>,
<<"name">> := <<"test-name">>,
<<"log_size">> := #{Node := _},
<<"start_at">> := _,
<<"end_at">> := _,
<<"type">> := <<"topic">>,
<<"topic">> := <<"/x/y/z">>
}, Data1),
%% delete
{ok, Delete} = request_api(delete, api_path("trace/test-name"), Header),
?assertEqual(#{<<"code">> => 0}, json(Delete)),
{ok, DeleteNotFound} = request_api(delete, api_path("trace/test-name"), Header),
?assertEqual(#{<<"code">> => <<"NOT_FOUND">>,
<<"message">> => <<"test-name NOT FOUND">>}, json(DeleteNotFound)),
{ok, List2} = request_api(get, api_path("trace"), Header),
?assertEqual(#{<<"code">> => 0, <<"data">> => []}, json(List2)),
%% clear
{ok, Create1} = request_api(post, api_path("trace"), Header, Trace),
?assertEqual(#{<<"code">> => 0}, json(Create1)),
{ok, Clear} = request_api(delete, api_path("trace"), Header),
?assertEqual(#{<<"code">> => 0}, json(Clear)),
unload(),
ok.
t_stream_log(_Config) ->
application:set_env(emqx, allow_anonymous, true),
emqx_trace:clear(),
load(),
ClientId = <<"client-stream">>,
Now = erlang:system_time(second),
Name = <<"test_stream_log">>,
Start = to_rfc3339(Now - 10),
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
ct:sleep(200),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
[begin _ = emqtt:ping(Client) end ||_ <- lists:seq(1, 5)],
emqtt:publish(Client, <<"/good">>, #{}, <<"ghood1">>, [{qos, 0}]),
emqtt:publish(Client, <<"/good">>, #{}, <<"ghood2">>, [{qos, 0}]),
ok = emqtt:disconnect(Client),
ct:sleep(200),
File = emqx_trace:log_file(Name, Now),
ct:pal("FileName: ~p", [File]),
{ok, FileBin} = file:read_file(File),
ct:pal("FileBin: ~p ~s", [byte_size(FileBin), FileBin]),
Header = auth_header_(),
{ok, Binary} = request_api(get, api_path("trace/test_stream_log/log?bytes=10"), Header),
#{<<"code">> := 0, <<"data">> := #{<<"meta">> := Meta, <<"items">> := Bin}} = json(Binary),
?assertEqual(10, byte_size(Bin)),
?assertEqual(#{<<"position">> => 10, <<"bytes">> => 10}, Meta),
Path = api_path("trace/test_stream_log/log?position=20&bytes=10"),
{ok, Binary1} = request_api(get, Path, Header),
#{<<"code">> := 0, <<"data">> := #{<<"meta">> := Meta1, <<"items">> := Bin1}} = json(Binary1),
?assertEqual(#{<<"position">> => 30, <<"bytes">> => 10}, Meta1),
?assertEqual(10, byte_size(Bin1)),
unload(),
ok.
to_rfc3339(Second) ->
list_to_binary(calendar:system_time_to_rfc3339(Second)).
auth_header_() ->
auth_header_("admin", "public").
auth_header_(User, Pass) ->
Encoded = base64:encode_to_string(lists:append([User, ":", Pass])),
{"Authorization", "Basic " ++ Encoded}.
request_api(Method, Url, Auth) -> do_request_api(Method, {Url, [Auth]}).
request_api(Method, Url, Auth, Body) ->
Request = {Url, [Auth], "application/json", emqx_json:encode(Body)},
do_request_api(Method, Request).
do_request_api(Method, Request) ->
ct:pal("Method: ~p, Request: ~p", [Method, Request]),
case httpc:request(Method, Request, [], [{body_format, binary}]) of
{error, socket_closed_remotely} ->
{error, socket_closed_remotely};
{error,{shutdown, server_closed}} ->
{error, server_closed};
{ok, {{"HTTP/1.1", Code, _}, _Headers, Return} }
when Code =:= 200 orelse Code =:= 201 ->
{ok, Return};
{ok, {Reason, _, _}} ->
{error, Reason}
end.
api_path(Path) ->
?HOST ++ filename:join([?BASE_PATH, ?API_VERSION, Path]).
json(Data) ->
{ok, Jsx} = emqx_json:safe_decode(Data, [return_maps]), Jsx.
load() ->
emqx_trace:start_link().
unload() ->
gen_server:stop(emqx_trace).

Some files were not shown because too many files have changed in this diff Show More