Merge remote-tracking branch 'origin/master' into build-with-mix

This commit is contained in:
Thales Macedo Garitezi 2021-11-23 16:19:11 -03:00
commit f6d0159371
No known key found for this signature in database
GPG Key ID: DD279F8152A9B6DD
307 changed files with 16532 additions and 8788 deletions

View File

@ -53,7 +53,7 @@ emqx_test(){
exit 1 exit 1
fi fi
IDLE_TIME=0 IDLE_TIME=0
while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ] if [ $IDLE_TIME -gt 10 ]
then then
echo "emqx running error" echo "emqx running error"
@ -155,7 +155,7 @@ EOF
exit 1 exit 1
fi fi
IDLE_TIME=0 IDLE_TIME=0
while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ] if [ $IDLE_TIME -gt 10 ]
then then
echo "emqx running error" echo "emqx running error"
@ -184,7 +184,7 @@ EOF
exit 1 exit 1
fi fi
IDLE_TIME=0 IDLE_TIME=0
while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ] if [ $IDLE_TIME -gt 10 ]
then then
echo "emqx service error" echo "emqx service error"

View File

@ -0,0 +1,49 @@
.PHONY: help up down ct ct-all bash run
define usage
make -f .ci/docker-compose-file/Makefile.local up
make -f .ci/docker-compose-file/Makefile.local ct CONTAINER=erlang24 SUITE=apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl
make -f .ci/docker-compose-file/Makefile.local down
endef
export usage
help:
@echo "$$usage"
up:
env \
MYSQL_TAG=8 \
REDIS_TAG=6 \
MONGO_TAG=4 \
PGSQL_TAG=13 \
LDAP_TAG=2.4.50 \
docker-compose \
-f .ci/docker-compose-file/docker-compose.yaml \
-f .ci/docker-compose-file/docker-compose-ldap-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-mongo-single-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \
up -d --build
down:
docker-compose \
-f .ci/docker-compose-file/docker-compose.yaml \
-f .ci/docker-compose-file/docker-compose-ldap-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-mongo-single-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \
down
ct:
docker exec -i "$(CONTAINER)" bash -c "rebar3 ct --name 'test@127.0.0.1' -v --suite $(SUITE)"
ct-all:
docker exec -i "$(CONTAINER)" bash -c "make ct"
bash:
docker exec -it "$(CONTAINER)" bash
run:
docker exec -it "$(CONTAINER)" bash -c "make run";

View File

@ -0,0 +1,36 @@
x-default-emqx: &default-emqx
env_file:
- conf.cluster.env
healthcheck:
test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"]
interval: 5s
timeout: 25s
retries: 5
services:
emqx1:
<<: *default-emqx
container_name: node1.emqx.io
restart: on-failure
environment:
- "EMQX_HOST=node1.emqx.io"
- "EMQX_DB__BACKEND=rlog"
- "EMQX_DB__ROLE=core"
- "EMQX_CLUSTER__STATIC__SEEDS=[emqx@node1.emqx.io]"
- "EMQX_LISTENERS__TCP__DEFAULT__PROXY_PROTOCOL=false"
- "EMQX_LISTENERS__WS__DEFAULT__PROXY_PROTOCOL=false"
emqx2:
<<: *default-emqx
container_name: node2.emqx.io
depends_on:
- emqx1
restart: on-failure
environment:
- "EMQX_HOST=node2.emqx.io"
- "EMQX_DB__BACKEND=rlog"
- "EMQX_DB__ROLE=replicant"
- "EMQX_DB__CORE_NODES=emqx@node1.emqx.io"
- "EMQX_CLUSTER__STATIC__SEEDS=[emqx@node1.emqx.io]"
- "EMQX_LISTENERS__TCP__DEFAULT__PROXY_PROTOCOL=false"
- "EMQX_LISTENERS__WS__DEFAULT__PROXY_PROTOCOL=false"

View File

@ -1,5 +1,15 @@
version: '3.9' version: '3.9'
x-default-emqx: &default-emqx
image: ${_EMQX_DOCKER_IMAGE_TAG}
env_file:
- conf.cluster.env
healthcheck:
test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"]
interval: 5s
timeout: 25s
retries: 5
services: services:
haproxy: haproxy:
container_name: haproxy container_name: haproxy
@ -28,34 +38,20 @@ services:
haproxy -f /usr/local/etc/haproxy/haproxy.cfg haproxy -f /usr/local/etc/haproxy/haproxy.cfg
emqx1: emqx1:
<<: *default-emqx
container_name: node1.emqx.io container_name: node1.emqx.io
image: $TARGET:$EMQX_TAG
env_file:
- conf.cluster.env
environment: environment:
- "EMQX_HOST=node1.emqx.io" - "EMQX_HOST=node1.emqx.io"
healthcheck:
test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"]
interval: 5s
timeout: 25s
retries: 5
networks: networks:
emqx_bridge: emqx_bridge:
aliases: aliases:
- node1.emqx.io - node1.emqx.io
emqx2: emqx2:
<<: *default-emqx
container_name: node2.emqx.io container_name: node2.emqx.io
image: $TARGET:$EMQX_TAG
env_file:
- conf.cluster.env
environment: environment:
- "EMQX_HOST=node2.emqx.io" - "EMQX_HOST=node2.emqx.io"
healthcheck:
test: ["CMD", "/opt/emqx/bin/emqx", "ping"]
interval: 5s
timeout: 25s
retries: 5
networks: networks:
emqx_bridge: emqx_bridge:
aliases: aliases:

View File

@ -5,6 +5,8 @@ services:
container_name: mysql container_name: mysql
image: mysql:${MYSQL_TAG} image: mysql:${MYSQL_TAG}
restart: always restart: always
ports:
- "3306:3306"
environment: environment:
MYSQL_ROOT_PASSWORD: public MYSQL_ROOT_PASSWORD: public
MYSQL_DATABASE: mqtt MYSQL_DATABASE: mqtt

View File

@ -3,7 +3,7 @@ version: '3.9'
services: services:
erlang23: erlang23:
container_name: erlang23 container_name: erlang23
image: ghcr.io/emqx/emqx-builder/5.0:23.2.7.2-emqx-2-ubuntu20.04 image: ghcr.io/emqx/emqx-builder/5.0-2:23.3.4.9-3-ubuntu20.04
env_file: env_file:
- conf.env - conf.env
environment: environment:
@ -23,7 +23,7 @@ services:
erlang24: erlang24:
container_name: erlang24 container_name: erlang24
image: ghcr.io/emqx/emqx-builder/5.0:24.1.1-emqx-1-ubuntu20.04 image: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04
env_file: env_file:
- conf.env - conf.env
environment: environment:

View File

@ -54,7 +54,6 @@ backend emqx_dashboard_back
server emqx-1 node1.emqx.io:18083 server emqx-1 node1.emqx.io:18083
server emqx-2 node2.emqx.io:18083 server emqx-2 node2.emqx.io:18083
##---------------------------------------------------------------- ##----------------------------------------------------------------
## public ## public
##---------------------------------------------------------------- ##----------------------------------------------------------------

View File

@ -10,9 +10,9 @@ RUN wget ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-${LDAP_TA
&& cd .. && rm -rf openldap-${LDAP_TAG} && cd .. && rm -rf openldap-${LDAP_TAG}
COPY .ci/docker-compose-file/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf COPY .ci/docker-compose-file/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf
COPY apps/emqx_auth_ldap/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif COPY apps/emqx_authn/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif
COPY apps/emqx_auth_ldap/emqx.schema /usr/local/etc/openldap/schema/emqx.schema COPY apps/emqx_authn/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema
COPY apps/emqx_auth_ldap/test/certs/*.pem /usr/local/etc/openldap/ COPY apps/emqx_authn/test/data/certs/*.pem /usr/local/etc/openldap/
RUN mkdir -p /usr/local/etc/openldap/data \ RUN mkdir -p /usr/local/etc/openldap/data \
&& slapadd -l /usr/local/etc/openldap/schema/emqx.io.ldif -f /usr/local/etc/openldap/slapd.conf && slapadd -l /usr/local/etc/openldap/schema/emqx.io.ldif -f /usr/local/etc/openldap/slapd.conf

View File

@ -1,21 +1,30 @@
#!/bin/sh #!/bin/sh
## This script is to run emqx cluster smoke tests (fvt) in github action ## This script is to run emqx cluster smoke tests (fvt) in github action
## This script is executed in pacho_client ## This script is executed in paho_client
set -x set -x
set +e set +e
LB="haproxy" EMQX_TEST_DB_BACKEND=$1
if [ "$EMQX_TEST_DB_BACKEND" = "rlog" ]
then
# TODO: target only replica to avoid replication races
# see: https://github.com/emqx/emqx/issues/6094
TARGET_HOST="node2.emqx.io"
else
# use loadbalancer
TARGET_HOST="haproxy"
fi
apk update && apk add git curl apk update && apk add git curl
git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho.mqtt.testing git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho.mqtt.testing
pip install pytest pip install pytest
pytest -v /paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "$LB" pytest -v /paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "$TARGET_HOST"
RESULT=$? RESULT=$?
pytest -v /paho.mqtt.testing/interoperability/test_client --host "$LB" pytest -v /paho.mqtt.testing/interoperability/test_client --host "$TARGET_HOST"
RESULT=$(( RESULT + $? )) RESULT=$(( RESULT + $? ))
# pytest -v /paho.mqtt.testing/interoperability/test_cluster --host1 "node1.emqx.io" --host2 "node2.emqx.io" # pytest -v /paho.mqtt.testing/interoperability/test_cluster --host1 "node1.emqx.io" --host2 "node2.emqx.io"

View File

@ -0,0 +1,51 @@
#!/bin/bash
set -euxo pipefail
# _EMQX_DOCKER_IMAGE_TAG is shared with docker-compose file
export _EMQX_DOCKER_IMAGE_TAG="$1"
_EMQX_TEST_DB_BACKEND="${2:-${_EMQX_TEST_DB_BACKEND:-mnesia}}"
if [ "$_EMQX_TEST_DB_BACKEND" = "rlog" ]
then
CLUSTER_OVERRIDES="-f .ci/docker-compose-file/docker-compose-emqx-cluster-rlog.override.yaml"
else
CLUSTER_OVERRIDES=""
fi
{
echo "HOCON_ENV_OVERRIDE_PREFIX=EMQX_"
echo "EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s"
echo "EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10"
} >> .ci/docker-compose-file/conf.cluster.env
is_node_up() {
local node
node="$1"
docker exec -i "$node" \
bash -c "emqx eval \"['emqx@node1.emqx.io','emqx@node2.emqx.io'] = maps:get(running_nodes, ekka_cluster:info()).\"" > /dev/null 2>&1
}
is_node_listening() {
local node
node="$1"
docker exec -i "$node" \
emqx eval "ok = case gen_tcp:connect(\"localhost\", 1883, []) of {ok, P} -> gen_tcp:close(P), ok; _ -> exit(1) end." > /dev/null 2>&1
}
is_cluster_up() {
is_node_up node1.emqx.io && \
is_node_up node2.emqx.io && \
is_node_listening node1.emqx.io && \
is_node_listening node2.emqx.io
}
docker-compose \
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
$CLUSTER_OVERRIDES \
-f .ci/docker-compose-file/docker-compose-python.yaml \
up -d
while ! is_cluster_up; do
echo "['$(date -u +"%Y-%m-%dT%H:%M:%SZ")']:waiting emqx";
sleep 5;
done

View File

@ -9,6 +9,9 @@ labels: "Support, needs-triage"
<!-- Note, lacking of information will delay the handling of issue <!-- Note, lacking of information will delay the handling of issue
See our github issue handling flow here: See our github issue handling flow here:
https://github.com/emqx/emqx/blob/master/.github/ISSUE_TEMPLATE/assets/issue-handling.png https://github.com/emqx/emqx/blob/master/.github/ISSUE_TEMPLATE/assets/issue-handling.png
For support in Chinese, please visit https://askemq.com/
中文支持 请访问: https://askemq.com/
--> -->
### Subject of the support ### Subject of the support

View File

@ -11,92 +11,69 @@ on:
types: types:
- published - published
workflow_dispatch: workflow_dispatch:
inputs:
which_branch:
required: false
jobs: jobs:
prepare: prepare:
strategy:
matrix:
otp:
- "23.2.7.2-emqx-2"
- "24.1.1-emqx-1"
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" # prepare source with any OTP version, no need for a matrix
container: "ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04"
outputs: outputs:
profiles: ${{ steps.set_profile.outputs.profiles }} old_vsns: ${{ steps.find_old_versons.outputs.old_vsns }}
old_vsns: ${{ steps.set_profile.outputs.old_vsns }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
with: with:
ref: ${{ github.event.inputs.which_branch }}
path: source path: source
fetch-depth: 0 fetch-depth: 0
- name: set profile - name: find old versions
id: set_profile id: find_old_versons
shell: bash shell: bash
working-directory: source working-directory: source
run: | run: |
vsn="$(./pkg-vsn.sh)" vsn="$(./pkg-vsn.sh)"
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')" pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
if make emqx-ee --dry-run > /dev/null 2>&1; then old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")" echo "::set-output name=old_vsns::$old_vsns"
echo "::set-output name=old_vsns::$old_vsns" - name: get_all_deps
echo "::set-output name=profiles::[\"emqx-ee\"]"
else
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
echo "::set-output name=old_vsns::$old_vsns"
echo "::set-output name=profiles::[\"emqx\", \"emqx-edge\"]"
fi
- name: get otp version
id: get_otp_version
run: | run: |
otp="$(erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)" make -C source deps-all
echo "::set-output name=otp::$otp" zip -ryq source.zip source/* source/.[^.]*
- name: set get token
if: endsWith(github.repository, 'enterprise')
run: |
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
- name: get deps
working-directory: source
run: |
make ensure-rebar3
./rebar3 as default get-deps
rm -rf rebar.lock
- name: gen zip file
run: zip -ryq source-${{ steps.get_otp_version.outputs.otp }}.zip source/* source/.[^.]*
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: source-${{ steps.get_otp_version.outputs.otp }} name: source
path: source-${{ steps.get_otp_version.outputs.otp }}.zip path: source.zip
windows: windows:
runs-on: windows-2019 runs-on: windows-2019
needs: prepare needs: prepare
if: endsWith(github.repository, 'emqx')
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: # only CE for windows
exclude: - emqx
- profile: emqx-edge otp:
- 23.2
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source-23.2.7.2-emqx-2 name: source
path: . path: .
- name: unzip source code - name: unzip source code
run: Expand-Archive -Path source-23.2.7.2-emqx-2.zip -DestinationPath ./ run: Expand-Archive -Path source.zip -DestinationPath ./
- uses: ilammy/msvc-dev-cmd@v1 - uses: ilammy/msvc-dev-cmd@v1
- uses: gleam-lang/setup-erlang@v1.1.2 - uses: gleam-lang/setup-erlang@v1.1.2
id: install_erlang id: install_erlang
## gleam-lang/setup-erlang does not yet support the installation of otp24 on windows ## gleam-lang/setup-erlang does not yet support the installation of otp24 on windows
with: with:
otp-version: 23.2 otp-version: ${{ matrix.otp }}
- name: build - name: build
env: env:
PYTHON: python PYTHON: python
@ -108,10 +85,10 @@ jobs:
$version = $( "${{ github.ref }}" -replace "^(.*)/(.*)/" ) $version = $( "${{ github.ref }}" -replace "^(.*)/(.*)/" )
if ($version -match "^v[0-9]+\.[0-9]+(\.[0-9]+)?") { if ($version -match "^v[0-9]+\.[0-9]+(\.[0-9]+)?") {
$regex = "[0-9]+\.[0-9]+(-alpha|-beta|-rc)?\.[0-9]+" $regex = "[0-9]+\.[0-9]+(-alpha|-beta|-rc)?\.[0-9]+"
$pkg_name = "${{ matrix.profile }}-windows-$([regex]::matches($version, $regex).value).zip" $pkg_name = "${{ matrix.profile }}-$([regex]::matches($version, $regex).value)-otp${{ matrix.otp }}-windows-amd64.zip"
} }
else { else {
$pkg_name = "${{ matrix.profile }}-windows-$($version -replace '/').zip" $pkg_name = "${{ matrix.profile }}-$($version -replace '/')-otp${{ matrix.otp }}-windows-amd64.zip"
} }
## We do not build/release bcrypt and quic for windows package ## We do not build/release bcrypt and quic for windows package
Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/ Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/
@ -140,34 +117,32 @@ jobs:
- uses: actions/upload-artifact@v1 - uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
with: with:
name: ${{ matrix.profile }}-23.2.7.2-emqx-2 name: ${{ matrix.profile }}
path: source/_packages/${{ matrix.profile }}/. path: source/_packages/${{ matrix.profile }}/.
mac: mac:
needs: prepare needs: prepare
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: # no EDGE for mac
- emqx
- emqx-ee
otp:
- 24.1.5-2
macos: macos:
- macos-11 - macos-11
- macos-10.15 - macos-10.15
otp:
- 24.1.1-emqx-1
exclude: exclude:
- profile: emqx-edge - profile: emqx-edge
runs-on: ${{ matrix.macos }} runs-on: ${{ matrix.macos }}
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source-${{ matrix.otp }} name: source
path: . path: .
- name: unzip source code - name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip run: unzip -q source.zip
- name: prepare - name: prepare
run: | run: |
brew update brew update
@ -182,8 +157,12 @@ jobs:
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60 timeout-minutes: 60
env:
KERL_BUILD_BACKEND: git
OTP_GITHUB_URL: https://github.com/emqx/otp
run: | run: |
kerl build git https://github.com/emqx/otp.git OTP-${{ matrix.otp }} ${{ matrix.otp }} kerl update releases
kerl build ${{ matrix.otp }}
kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }} kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build - name: build
working-directory: source working-directory: source
@ -191,11 +170,12 @@ jobs:
. $HOME/.kerl/${{ matrix.otp }}/activate . $HOME/.kerl/${{ matrix.otp }}/activate
make ensure-rebar3 make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3 sudo cp rebar3 /usr/local/bin/rebar3
rm -rf _build/${{ matrix.profile }}/lib
make ${{ matrix.profile }}-zip make ${{ matrix.profile }}-zip
- name: test - name: test
working-directory: source working-directory: source
run: | run: |
pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip | head) pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip)
unzip -q $pkg_name unzip -q $pkg_name
# gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins # gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ./emqx/bin/emqx start || cat emqx/log/erlang.log.1
@ -230,7 +210,15 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: ## all editions for linux
- emqx-edge
- emqx
- emqx-ee
otp:
- 24.1.5-2 # we test with OTP 23, but only build package on OTP 24 versions
arch:
- amd64
- arm64
os: os:
- ubuntu20.04 - ubuntu20.04
- ubuntu18.04 - ubuntu18.04
@ -240,18 +228,9 @@ jobs:
# - opensuse # - opensuse
- centos8 - centos8
- centos7 - centos7
- centos6
- raspbian10 - raspbian10
# - raspbian9 # - raspbian9
arch:
- amd64
- arm64
otp:
- 23.2.7.2-emqx-2
- 24.1.1-emqx-1
exclude: exclude:
- os: centos6
arch: arm64
- os: raspbian9 - os: raspbian9
arch: amd64 arch: amd64
- os: raspbian10 - os: raspbian10
@ -277,12 +256,13 @@ jobs:
platforms: all platforms: all
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source-${{ matrix.otp }} name: source
path: . path: .
- name: unzip source code - name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip run: unzip -q source.zip
- name: downloads old emqx zip packages - name: downloads old emqx zip packages
env: env:
OTP_VSN: ${{ matrix.otp }}
PROFILE: ${{ matrix.profile }} PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }} ARCH: ${{ matrix.arch }}
SYSTEM: ${{ matrix.os }} SYSTEM: ${{ matrix.os }}
@ -302,10 +282,11 @@ jobs:
cd _upgrade_base cd _upgrade_base
old_vsns=($(echo $OLD_VSNS | tr ' ' ' ')) old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
for tag in ${old_vsns[@]}; do for tag in ${old_vsns[@]}; do
if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip) | grep -oE "^[23]+")" ];then package_name="${PROFILE}-${tag#[e|v]}-otp${OTP_VSN}-${SYSTEM}-${ARCH}"
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip) | grep -oE "^[23]+")" ]; then
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256 wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip
echo "$(cat $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256) $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip" | sha256sum -c || exit 1 wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip.sha256
echo "$(cat $package_name.zip.sha256) $package_name.zip" | sha256sum -c || exit 1
fi fi
done done
- name: build emqx packages - name: build emqx packages
@ -320,7 +301,7 @@ jobs:
-v $(pwd):/emqx \ -v $(pwd):/emqx \
--workdir /emqx \ --workdir /emqx \
--platform linux/$ARCH \ --platform linux/$ARCH \
ghcr.io/emqx/emqx-builder/5.0:$OTP-$SYSTEM \ ghcr.io/emqx/emqx-builder/5.0-2:$OTP-$SYSTEM \
bash -euc "make $PROFILE-zip || cat rebar3.crashdump; \ bash -euc "make $PROFILE-zip || cat rebar3.crashdump; \
make $PROFILE-pkg || cat rebar3.crashdump; \ make $PROFILE-pkg || cat rebar3.crashdump; \
EMQX_NAME=$PROFILE && .ci/build_packages/tests.sh" EMQX_NAME=$PROFILE && .ci/build_packages/tests.sh"
@ -349,17 +330,21 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile: # all editions for docker
- emqx-edge
- emqx
- emqx-ee
# NOTE: for docker, only support latest otp version, not a matrix
otp: otp:
- 24.1.1-emqx-1 - 24.1.5-2 # update to latest
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source-${{ matrix.otp }} name: source
path: . path: .
- name: unzip source code - name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip run: unzip -q source.zip
- uses: docker/setup-buildx-action@v1 - uses: docker/setup-buildx-action@v1
- uses: docker/setup-qemu-action@v1 - uses: docker/setup-qemu-action@v1
with: with:
@ -376,7 +361,8 @@ jobs:
type=ref,event=pr type=ref,event=pr
type=ref,event=tag type=ref,event=tag
type=semver,pattern={{version}} type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}} labels:
org.opencontainers.image.otp.version=${{ matrix.otp }}
- uses: docker/login-action@v1 - uses: docker/login-action@v1
if: github.event_name == 'release' if: github.event_name == 'release'
with: with:
@ -384,32 +370,26 @@ jobs:
password: ${{ secrets.DOCKER_HUB_TOKEN }} password: ${{ secrets.DOCKER_HUB_TOKEN }}
- uses: docker/build-push-action@v2 - uses: docker/build-push-action@v2
with: with:
push: ${{ github.event_name == 'release' }} push: ${{ github.event_name == 'release' && !github.event.release.prerelease }}
pull: true pull: true
no-cache: true no-cache: true
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
build-args: | build-args: |
BUILD_FROM=ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-alpine3.14 BUILD_FROM=ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-alpine3.14
RUN_FROM=alpine:3.14 RUN_FROM=alpine:3.14
EMQX_NAME=${{ matrix.profile }} EMQX_NAME=${{ matrix.profile }}
file: source/deploy/docker/Dockerfile file: source/deploy/docker/Dockerfile
context: source context: source
delete-artifact: delete-artifact:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy:
matrix:
otp:
- 23.2.7.2-emqx-2
- 24.1.1-emqx-1
needs: [prepare, mac, linux, docker] needs: [prepare, mac, linux, docker]
steps: steps:
- uses: geekyeggo/delete-artifact@v1 - uses: geekyeggo/delete-artifact@v1
with: with:
name: source-${{ matrix.otp }} name: source
upload: upload:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
@ -420,9 +400,12 @@ jobs:
strategy: strategy:
matrix: matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}} profile:
- emqx-edge
- emqx
- emqx-ee
otp: otp:
- 24.1.1-emqx-1 - 24.1.5-2
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -461,17 +444,11 @@ jobs:
aws s3 cp --recursive _packages/${{ matrix.profile }} s3://${{ secrets.AWS_S3_BUCKET }}/$broker/${{ env.version }} aws s3 cp --recursive _packages/${{ matrix.profile }} s3://${{ secrets.AWS_S3_BUCKET }}/$broker/${{ env.version }}
aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CLOUDFRONT_ID }} --paths "/$broker/${{ env.version }}/*" aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CLOUDFRONT_ID }} --paths "/$broker/${{ env.version }}/*"
- uses: Rory-Z/upload-release-asset@v1 - uses: Rory-Z/upload-release-asset@v1
if: github.event_name == 'release' && matrix.profile != 'emqx-ee' if: github.event_name == 'release'
with: with:
repo: emqx repo: emqx
path: "_packages/${{ matrix.profile }}/emqx-*" path: "_packages/${{ matrix.profile }}/emqx-*"
token: ${{ github.token }} token: ${{ github.token }}
- uses: Rory-Z/upload-release-asset@v1
if: github.event_name == 'release' && matrix.profile == 'emqx-ee'
with:
repo: emqx-enterprise
path: "_packages/${{ matrix.profile }}/emqx-*"
token: ${{ github.token }}
- name: update to emqx.io - name: update to emqx.io
if: github.event_name == 'release' if: github.event_name == 'release'
run: | run: |
@ -484,32 +461,28 @@ jobs:
-d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.version }}\" }" \ -d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.version }}\" }" \
${{ secrets.EMQX_IO_RELEASE_API }} ${{ secrets.EMQX_IO_RELEASE_API }}
- name: update repo.emqx.io - name: update repo.emqx.io
if: github.event_name == 'release' && endsWith(github.repository, 'enterprise') && matrix.profile == 'emqx-ee' if: github.event_name == 'release'
run: | run: |
if [ "${{ matrix. profile }}" = 'emqx-ee' ]; then
BOOL_FLAG_NAME="emqx_ee"
else
BOOL_FLAG_NAME="emqx_ce"
fi
curl --silent --show-error \ curl --silent --show-error \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-X POST \ -X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ee\": \"true\"}}" \ -d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\", \"${BOOL_FLAG_NAME}\": \"true\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
- name: update repo.emqx.io
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx'
run: |
curl --silent --show-error \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \
-X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ce\": \"true\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches" "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
- name: update homebrew packages - name: update homebrew packages
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx' if: github.event_name == 'release' && matrix.profile == 'emqx'
run: | run: |
if [ -z $(echo $version | grep -oE "(alpha|beta|rc)\.[0-9]") ]; then if [ -z $(echo $version | grep -oE "(alpha|beta|rc)\.[0-9]") ]; then
curl --silent --show-error \ curl --silent --show-error \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-X POST \ -X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \ -d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_homebrew.yaml/dispatches" "https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_homebrew.yaml/dispatches"
fi fi
- uses: geekyeggo/delete-artifact@v1 - uses: geekyeggo/delete-artifact@v1

View File

@ -4,7 +4,6 @@ concurrency:
group: slim-${{ github.event_name }}-${{ github.ref }} group: slim-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
on: on:
push: push:
tags: tags:
@ -14,48 +13,37 @@ on:
workflow_dispatch: workflow_dispatch:
jobs: jobs:
build: linux:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile:
- emqx-edge
- emqx
- emqx-ee
otp: otp:
- 24.1.1-emqx-1 - 24.1.5-2
os: os:
- ubuntu20.04 - ubuntu20.04
- centos7 - centos7
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-${{ matrix.os }}" container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-${{ matrix.os }}"
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- name: prepare - name: build zip package
run: | run: make ${{ matrix.profile }}-zip
if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> ./scripts/git-token
echo "EMQX_NAME=emqx-ee" >> $GITHUB_ENV
else
echo "EMQX_NAME=emqx" >> $GITHUB_ENV
fi
- name: build zip packages
run: make ${EMQX_NAME}-zip
- name: build deb/rpm packages - name: build deb/rpm packages
run: make ${EMQX_NAME}-pkg run: make ${{ matrix.profile }}-pkg
- uses: actions/upload-artifact@v1
if: failure()
with:
name: rebar3.crashdump
path: ./rebar3.crashdump
- name: packages test - name: packages test
run: | run: |
export CODE_PATH=$GITHUB_WORKSPACE export CODE_PATH=$GITHUB_WORKSPACE
.ci/build_packages/tests.sh EMQX_NAME=${{ matrix.profile }} .ci/build_packages/tests.sh
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: ${{ matrix.os }} name: ${{ matrix.profile}}-${{ matrix.otp }}-${{ matrix.os }}
path: _packages/**/*.zip path: _packages/**/*.zip
mac: mac:
@ -63,32 +51,25 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
profile:
- emqx
- emqx-ee
otp:
- 24.1.5-2
macos: macos:
- macos-11 - macos-11
- macos-10.15 - macos-10.15
otp:
- 24.1.1-emqx-1
runs-on: ${{ matrix.macos }} runs-on: ${{ matrix.macos }}
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v2
- name: prepare
run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> ./scripts/git-token
echo "EMQX_NAME=emqx-ee" >> $GITHUB_ENV
else
echo "EMQX_NAME=emqx" >> $GITHUB_ENV
fi
- name: prepare - name: prepare
run: | run: |
brew update brew update
brew install curl zip unzip gnu-sed kerl unixodbc freetds brew install curl zip unzip gnu-sed kerl unixodbc freetds
echo "/usr/local/bin" >> $GITHUB_PATH echo "/usr/local/bin" >> $GITHUB_PATH
git config --global credential.helper store echo "EMQX_NAME=${{ matrix.profile }}" >> $GITHUB_ENV
- uses: actions/cache@v2 - uses: actions/cache@v2
id: cache id: cache
with: with:
@ -97,23 +78,23 @@ jobs:
- name: build erlang - name: build erlang
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60 timeout-minutes: 60
env:
KERL_BUILD_BACKEND: git
OTP_GITHUB_URL: https://github.com/emqx/otp
run: | run: |
kerl build git https://github.com/emqx/otp.git OTP-${{ matrix.otp }} ${{ matrix.otp }} kerl update releases
kerl build ${{ matrix.otp }}
kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }} kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build - name: build ${{ matrix.profile }}
run: | run: |
. $HOME/.kerl/${{ matrix.otp }}/activate . $HOME/.kerl/${{ matrix.otp }}/activate
make ensure-rebar3 make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3 sudo cp rebar3 /usr/local/bin/rebar3
make ${EMQX_NAME}-zip make ${{ matrix.profile }}-zip
- uses: actions/upload-artifact@v1
if: failure()
with:
name: rebar3.crashdump
path: ./rebar3.crashdump
- name: test - name: test
run: | run: |
unzip -q $(find _packages/${EMQX_NAME} -mindepth 1 -maxdepth 1 -iname \*.zip | head) pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip)
unzip -q $pkg_name
# gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins # gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1 ./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no' ready='no'

View File

@ -5,7 +5,7 @@ on: [pull_request]
jobs: jobs:
check_deps_integrity: check_deps_integrity:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:24.1.1-emqx-1-ubuntu20.04" container: "ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04"
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

17
.github/workflows/code_style_check.yaml vendored Normal file
View File

@ -0,0 +1,17 @@
name: Code style check
on: [pull_request]
jobs:
build:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 1000
- name: Run elvis check
run: |
./scripts/elvis-check.sh $GITHUB_BASE_REF
- name: Check line-break at EOF
run: |
./scripts/check-nl-at-eof.sh

View File

@ -1,16 +0,0 @@
name: Elvis Linter
on: [pull_request]
jobs:
build:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: Set git token
if: endsWith(github.repository, 'enterprise')
run: |
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
- run: |
./scripts/elvis-check.sh $GITHUB_BASE_REF

View File

@ -24,11 +24,7 @@ jobs:
id: create_pull_request id: create_pull_request
run: | run: |
set -euo pipefail set -euo pipefail
if [ "$GITHUB_REF" = "refs/heads/master" ]; then EE_REF="${GITHUB_REF}-enterprise"
EE_REF="refs/heads/enterprise"
else
EE_REF="${GITHUB_REF}-enterprise"
fi
R=$(curl --silent --show-error \ R=$(curl --silent --show-error \
-H "Accept: application/vnd.github.v3+json" \ -H "Accept: application/vnd.github.v3+json" \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \ -H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \

View File

@ -61,7 +61,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
with: with:
repository: emqx/emqx-fvt repository: emqx/emqx-fvt
ref: v1.3.0 ref: 1.0.2-dev1
path: . path: .
- uses: actions/setup-java@v1 - uses: actions/setup-java@v1
with: with:

View File

@ -12,11 +12,11 @@ jobs:
strategy: strategy:
matrix: matrix:
otp: otp:
- "23.2.7.2-emqx-2" - 23.3.4.9-3
- "24.1.1-emqx-1" - 24.1.5-2
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-ubuntu20.04"
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@ -8,58 +8,27 @@ on:
push: push:
tags: tags:
- v* - v*
- e*
pull_request: pull_request:
jobs: jobs:
prepare: prepare:
strategy:
matrix:
otp:
- "23.2.7.2-emqx-2"
- "24.1.1-emqx-1"
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" # prepare source with any OTP version, no need for a matrix
container: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14
outputs:
profile: ${{ steps.profile.outputs.profile }}
steps: steps:
- name: get otp version
id: get_otp_version
run: |
otp="$(erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)"
echo "::set-output name=otp::$otp"
- uses: actions/checkout@v2 - uses: actions/checkout@v2
with: with:
path: source path: source
fetch-depth: 0 fetch-depth: 0
- name: set profile
id: profile
shell: bash
working-directory: source
run: |
vsn="$(./pkg-vsn.sh)"
if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "::set-output name=profile::emqx-ee"
else
echo "::set-output name=profile::emqx"
fi
- name: get deps - name: get deps
working-directory: source
run: | run: |
make ensure-rebar3 make -C source deps-all
./rebar3 as default get-deps zip -ryq source.zip source/* source/.[^.]*
rm -rf rebar.lock
- name: gen zip file
run: zip -ryq source-${{ steps.get_otp_version.outputs.otp }}.zip source/* source/.[^.]*
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v2
with: with:
name: source-${{ steps.get_otp_version.outputs.otp }} name: source
path: source-${{ steps.get_otp_version.outputs.otp }}.zip path: source.zip
docker_test: docker_test:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
@ -68,47 +37,43 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
otp: profile:
- 23.2.7.2-emqx-2 - emqx-edge
- 24.1.1-emqx-1 - emqx
- emqx-ee
cluster_db_backend:
- mnesia
- rlog
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source-${{ matrix.otp }} name: source
path: . path: .
- name: unzip source code - name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip run: unzip -q source.zip
- name: make docker image - name: make docker image
working-directory: source working-directory: source
env: env:
OTP: ${{ matrix.otp }} EMQX_BUILDER: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14
run: | run: |
make ${{ needs.prepare.outputs.profile }}-docker make ${{ matrix.profile }}-docker
echo "TARGET=emqx/${{ needs.prepare.outputs.profile }}" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
- name: run emqx - name: run emqx
timeout-minutes: 5 timeout-minutes: 5
working-directory: source working-directory: source
run: | run: |
set -e -u -x set -x
echo "HOCON_ENV_OVERRIDE_PREFIX=EMQX_" >> .ci/docker-compose-file/conf.cluster.env IMAGE=emqx/${{ matrix.profile }}:$(./pkg-vsn.sh)
echo "EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s" >> .ci/docker-compose-file/conf.cluster.env ./.ci/docker-compose-file/scripts/run-emqx.sh $IMAGE ${{ matrix.cluster_db_backend }}
echo "EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10" >> .ci/docker-compose-file/conf.cluster.env
docker-compose \
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
-f .ci/docker-compose-file/docker-compose-python.yaml \
up -d
while ! docker exec -i node1.emqx.io bash -c "emqx eval \"['emqx@node1.emqx.io','emqx@node2.emqx.io'] = maps:get(running_nodes, ekka_cluster:info()).\"" > /dev/null 2>&1; do
echo "['$(date -u +"%Y-%m-%dT%H:%M:%SZ")']:waiting emqx";
sleep 5;
done
- name: make paho tests - name: make paho tests
run: | run: |
if ! docker exec -i python /scripts/pytest.sh; then if ! docker exec -i python /scripts/pytest.sh "${{ matrix.cluster_db_backend }}"; then
echo "DUMP_CONTAINER_LOGS_BGN" echo "DUMP_CONTAINER_LOGS_BGN"
echo "============== haproxy =============="
docker logs haproxy docker logs haproxy
echo "============== node1 =============="
docker logs node1.emqx.io docker logs node1.emqx.io
echo "============== node2 =============="
docker logs node2.emqx.io docker logs node2.emqx.io
echo "DUMP_CONTAINER_LOGS_END" echo "DUMP_CONTAINER_LOGS_END"
exit 1 exit 1
@ -121,24 +86,24 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
otp: profile:
- 23.2.7.2-emqx-2 - emqx
- 24.1.1-emqx-1 # - emqx-ee # TODO test enterprise
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: source-${{ matrix.otp }} name: source
path: . path: .
- name: unzip source code - name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip run: unzip -q source.zip
- name: make docker image - name: make docker image
working-directory: source working-directory: source
env: env:
OTP: ${{ matrix.otp }} EMQX_BUILDER: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14
run: | run: |
make ${{ needs.prepare.outputs.profile }}-docker make ${{ matrix.profile }}-docker
echo "TARGET=emqx/${{ needs.prepare.outputs.profile }}" >> $GITHUB_ENV echo "TARGET=emqx/${{ matrix.profile }}" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
- run: minikube start - run: minikube start
- name: run emqx on chart - name: run emqx on chart

View File

@ -15,12 +15,14 @@ jobs:
relup_test: relup_test:
strategy: strategy:
matrix: matrix:
otp: profile:
- "23.2.7.2-emqx-2" - emqx
- "24.1.1-emqx-1" - emqx-ee
otp_vsn:
- 24.1.5-2
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp_vsn }}-ubuntu20.04"
defaults: defaults:
run: run:
@ -43,7 +45,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
with: with:
repository: emqx/emqtt-bench repository: emqx/emqtt-bench
ref: 0.3.4 ref: 0.3.4
path: emqtt-bench path: emqtt-bench
- uses: actions/checkout@v2 - uses: actions/checkout@v2
with: with:
@ -55,26 +57,18 @@ jobs:
repository: ${{ github.repository }} repository: ${{ github.repository }}
path: emqx path: emqx
fetch-depth: 0 fetch-depth: 0
- name: prepare
run: |
if make -C emqx emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> emqx/scripts/git-token
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
else
echo "PROFILE=emqx" >> $GITHUB_ENV
fi
- name: get version - name: get version
run: | run: |
set -e -x -u set -e -x -u
cd emqx cd emqx
export PROFILE=${{ matrix.profile }}
export OTP_VSN=${{ matrix.otp_vsn }}
echo "PROFILE=$PROFILE" >> $GITHUB_ENV
echo "OTP_VSN=$OTP_VSN" >> $GITHUB_ENV
if [ $PROFILE = "emqx" ];then if [ $PROFILE = "emqx" ];then
broker="emqx-ce" broker="emqx-ce"
edition='opensource'
else else
broker="emqx-ee" broker="emqx-ee"
edition='enterprise'
fi fi
echo "BROKER=$broker" >> $GITHUB_ENV echo "BROKER=$broker" >> $GITHUB_ENV
@ -82,11 +76,7 @@ jobs:
echo "VSN=$vsn" >> $GITHUB_ENV echo "VSN=$vsn" >> $GITHUB_ENV
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')" pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
if [ $PROFILE = "emqx" ]; then old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
else
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")"
fi
echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV
- name: download emqx - name: download emqx
run: | run: |
@ -95,7 +85,7 @@ jobs:
cd emqx/_upgrade_base cd emqx/_upgrade_base
old_vsns=($(echo $OLD_VSNS | tr ' ' ' ')) old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
for old_vsn in ${old_vsns[@]}; do for old_vsn in ${old_vsns[@]}; do
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-amd64.zip wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-${old_vsn#[e|v]}-otp${OTP_VSN}-ubuntu20.04-amd64.zip
done done
- name: build emqx - name: build emqx
run: make -C emqx ${PROFILE}-zip run: make -C emqx ${PROFILE}-zip

View File

@ -15,12 +15,11 @@ jobs:
run_static_analysis: run_static_analysis:
strategy: strategy:
matrix: matrix:
otp: emqx_builder:
- "23.2.7.2-emqx-2" - 5.0-2:24.1.5-2 # run dialyzer on latest OTP
- "24.1.1-emqx-1"
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" container: "ghcr.io/emqx/emqx-builder/${{ matrix.emqx_builder }}-ubuntu20.04"
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -38,12 +37,11 @@ jobs:
run_proper_test: run_proper_test:
strategy: strategy:
matrix: matrix:
otp: emqx_builder:
- "23.2.7.2-emqx-2" - 5.0-2:24.1.5-2
- "24.1.1-emqx-1"
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04" container: "ghcr.io/emqx/emqx-builder/${{ matrix.emqx_builder }}-ubuntu20.04"
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -67,32 +65,19 @@ jobs:
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: set edition
id: set_edition
run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "EDITION=enterprise" >> $GITHUB_ENV
else
echo "EDITION=opensource" >> $GITHUB_ENV
fi
- name: docker compose up - name: docker compose up
if: env.EDITION == 'opensource'
env: env:
MYSQL_TAG: 8
PGSQL_TAG: 13
REDIS_TAG: 6
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: | run: |
docker-compose \ docker-compose \
-f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \
-f .ci/docker-compose-file/docker-compose.yaml \ -f .ci/docker-compose-file/docker-compose.yaml \
up -d --build up -d --build
- name: docker compose up
if: env.EDITION == 'enterprise'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 20
run: |
docker-compose \
-f .ci/docker-compose-file/docker-compose.yaml \
-f .ci/docker-compose-file/docker-compose-enterprise.yaml \
up -d --build
- name: run eunit - name: run eunit
run: | run: |
docker exec -i ${{ matrix.otp_release }} bash -c "make eunit" docker exec -i ${{ matrix.otp_release }} bash -c "make eunit"

2
.gitignore vendored
View File

@ -58,3 +58,5 @@ erlang_ls.config
# Emacs temporary files # Emacs temporary files
.#* .#*
*# *#
# For direnv
.envrc

View File

@ -1 +1 @@
erlang 24.0.5-emqx-1 erlang 24.1.5-2

View File

@ -79,4 +79,4 @@ Just as in the **subject**, use the imperative, present tense: "change" not "cha
The footer should contain any information about **Breaking Changes** and is also the place to reference GitHub issues that this commit **Closes**. The footer should contain any information about **Breaking Changes** and is also the place to reference GitHub issues that this commit **Closes**.
**Breaking Changes** should start with the word `BREAKING CHANGE:` with a space or two newlines. The rest of the commit message is then used for this. **Breaking Changes** should start with the word `BREAKING CHANGE:` with a space or two newlines. The rest of the commit message is then used for this.

View File

@ -3,16 +3,20 @@ REBAR_VERSION = 3.16.1-emqx-1
REBAR = $(CURDIR)/rebar3 REBAR = $(CURDIR)/rebar3
BUILD = $(CURDIR)/build BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts SCRIPTS = $(CURDIR)/scripts
export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-alpine3.14
export EMQX_DEFAULT_RUNNER = alpine:3.14
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh) export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
export EMQX_DESC ?= EMQ X
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.18 export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.18
export DOCKERFILE := deploy/docker/Dockerfile
export DOCKERFILE_TESTING := deploy/docker/Dockerfile.testing
ifeq ($(OS),Windows_NT) ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none export REBAR_COLOR=none
endif endif
PROFILE ?= emqx PROFILE ?= emqx
REL_PROFILES := emqx emqx-edge REL_PROFILES := emqx emqx-edge emqx-ee
PKG_PROFILES := emqx-pkg emqx-edge-pkg PKG_PROFILES := emqx-pkg emqx-edge-pkg emqx-ee-pkg
PROFILES := $(REL_PROFILES) $(PKG_PROFILES) default PROFILES := $(REL_PROFILES) $(PKG_PROFILES) default
CT_NODE_NAME ?= 'test@127.0.0.1' CT_NODE_NAME ?= 'test@127.0.0.1'
@ -85,7 +89,6 @@ coveralls: $(REBAR)
@ENABLE_COVER_COMPILE=1 $(REBAR) as test coveralls send @ENABLE_COVER_COMPILE=1 $(REBAR) as test coveralls send
.PHONY: $(REL_PROFILES) .PHONY: $(REL_PROFILES)
$(REL_PROFILES:%=%): $(REBAR) get-dashboard conf-segs $(REL_PROFILES:%=%): $(REBAR) get-dashboard conf-segs
@$(REBAR) as $(@) do compile,release @$(REBAR) as $(@) do compile,release
@ -98,8 +101,10 @@ $(REL_PROFILES:%=%): $(REBAR) get-dashboard conf-segs
clean: $(PROFILES:%=clean-%) clean: $(PROFILES:%=clean-%)
$(PROFILES:%=clean-%): $(PROFILES:%=clean-%):
@if [ -d _build/$(@:clean-%=%) ]; then \ @if [ -d _build/$(@:clean-%=%) ]; then \
rm rebar.lock \
rm -rf _build/$(@:clean-%=%)/rel; \ rm -rf _build/$(@:clean-%=%)/rel; \
find _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \ find _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \
find _build/$(@:clean-%=%) -type l -delete; \
fi fi
.PHONY: clean-all .PHONY: clean-all
@ -109,6 +114,7 @@ clean-all:
.PHONY: deps-all .PHONY: deps-all
deps-all: $(REBAR) $(PROFILES:%=deps-%) deps-all: $(REBAR) $(PROFILES:%=deps-%)
@make clean # ensure clean at the end
## deps-<profile> is used in CI scripts to download deps and the ## deps-<profile> is used in CI scripts to download deps and the
## share downloads between CI steps and/or copied into containers ## share downloads between CI steps and/or copied into containers
@ -116,6 +122,7 @@ deps-all: $(REBAR) $(PROFILES:%=deps-%)
.PHONY: $(PROFILES:%=deps-%) .PHONY: $(PROFILES:%=deps-%)
$(PROFILES:%=deps-%): $(REBAR) get-dashboard $(PROFILES:%=deps-%): $(REBAR) get-dashboard
@$(REBAR) as $(@:deps-%=%) get-deps @$(REBAR) as $(@:deps-%=%) get-deps
@rm -f rebar.lock
.PHONY: xref .PHONY: xref
xref: $(REBAR) xref: $(REBAR)
@ -174,5 +181,17 @@ endef
ALL_ZIPS = $(REL_PROFILES) ALL_ZIPS = $(REL_PROFILES)
$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target,$(zt)))) $(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target,$(zt))))
## emqx-docker-testing
## emqx-ee-docker-testing
## is to directly copy a unzipped zip-package to a
## base image such as ubuntu20.04. Mostly for testing
.PHONY: $(REL_PROFILES:%=%-docker-testing)
define gen-docker-target-testing
$1-docker-testing: $(COMMON_DEPS)
@$(BUILD) $1 docker-testing
endef
ALL_ZIPS = $(REL_PROFILES)
$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target-testing,$(zt))))
conf-segs: conf-segs:
@scripts/merge-config.escript @scripts/merge-config.escript

View File

@ -86,8 +86,8 @@ listeners.tcp.default {
## Set to "" to disable the feature. ## Set to "" to disable the feature.
## ##
## Variables in mountpoint string: ## Variables in mountpoint string:
## - %c: clientid ## - ${clientid}: clientid
## - %u: username ## - ${username}: username
## ##
## @doc listeners.tcp.<name>.mountpoint ## @doc listeners.tcp.<name>.mountpoint
## ValueType: String ## ValueType: String
@ -185,8 +185,8 @@ listeners.ssl.default {
## Set to "" to disable the feature. ## Set to "" to disable the feature.
## ##
## Variables in mountpoint string: ## Variables in mountpoint string:
## - %c: clientid ## - ${clientid}: clientid
## - %u: username ## - ${username}: username
## ##
## @doc listeners.ssl.<name>.mountpoint ## @doc listeners.ssl.<name>.mountpoint
## ValueType: String ## ValueType: String
@ -278,8 +278,8 @@ listeners.quic.default {
## Set to "" to disable the feature. ## Set to "" to disable the feature.
## ##
## Variables in mountpoint string: ## Variables in mountpoint string:
## - %c: clientid ## - ${clientid}: clientid
## - %u: username ## - ${username}: username
## ##
## @doc listeners.quic.<name>.mountpoint ## @doc listeners.quic.<name>.mountpoint
## ValueType: String ## ValueType: String
@ -372,8 +372,8 @@ listeners.ws.default {
## Set to "" to disable the feature. ## Set to "" to disable the feature.
## ##
## Variables in mountpoint string: ## Variables in mountpoint string:
## - %c: clientid ## - ${clientid}: clientid
## - %u: username ## - ${username}: username
## ##
## @doc listeners.ws.<name>.mountpoint ## @doc listeners.ws.<name>.mountpoint
## ValueType: String ## ValueType: String
@ -475,8 +475,8 @@ listeners.wss.default {
## Set to "" to disable the feature. ## Set to "" to disable the feature.
## ##
## Variables in mountpoint string: ## Variables in mountpoint string:
## - %c: clientid ## - ${clientid}: clientid
## - %u: username ## - ${username}: username
## ##
## @doc listeners.wss.<name>.mountpoint ## @doc listeners.wss.<name>.mountpoint
## ValueType: String ## ValueType: String

View File

@ -116,3 +116,7 @@
## patches dir ## patches dir
-pa {{ platform_data_dir }}/patches -pa {{ platform_data_dir }}/patches
## Mnesia thresholds
-mnesia dump_log_write_threshold 5000
-mnesia dump_log_time_threshold 60000

View File

@ -114,3 +114,7 @@
## patches dir ## patches dir
-pa {{ platform_data_dir }}/patches -pa {{ platform_data_dir }}/patches
## Mnesia thresholds
-mnesia dump_log_write_threshold 5000
-mnesia dump_log_time_threshold 60000

View File

@ -48,6 +48,12 @@
%% Queue topic %% Queue topic
-define(QUEUE, <<"$queue/">>). -define(QUEUE, <<"$queue/">>).
%%--------------------------------------------------------------------
%% alarms
%%--------------------------------------------------------------------
-define(ACTIVATED_ALARM, emqx_activated_alarm).
-define(DEACTIVATED_ALARM, emqx_deactivated_alarm).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Message and Delivery %% Message and Delivery
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -0,0 +1,115 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQ_X_PLACEHOLDER_HRL).
-define(EMQ_X_PLACEHOLDER_HRL, true).
-define(PH(Type), <<"${", Type/binary, "}">> ).
%% action: publish/subscribe/all
-define(PH_ACTION, <<"${action}">> ).
%% cert
-define(PH_CERT_SUBJECT, <<"${cert_subject}">> ).
-define(PH_CERT_CN_NAME, <<"${cert_common_name}">> ).
%% MQTT
-define(PH_PASSWORD, <<"${password}">> ).
-define(PH_CLIENTID, <<"${clientid}">> ).
-define(PH_FROM_CLIENTID, <<"${from_clientid}">> ).
-define(PH_USERNAME, <<"${username}">> ).
-define(PH_FROM_USERNAME, <<"${from_username}">> ).
-define(PH_TOPIC, <<"${topic}">> ).
%% MQTT payload
-define(PH_PAYLOAD, <<"${payload}">> ).
%% client IPAddress
-define(PH_PEERHOST, <<"${peerhost}">> ).
%% ip & port
-define(PH_HOST, <<"${host}">> ).
-define(PH_PORT, <<"${port}">> ).
%% Enumeration of message QoS 0,1,2
-define(PH_QOS, <<"${qos}">> ).
-define(PH_FLAGS, <<"${flags}">> ).
%% Additional data related to process within the MQTT message
-define(PH_HEADERS, <<"${headers}">> ).
%% protocol name
-define(PH_PROTONAME, <<"${proto_name}">> ).
%% protocol version
-define(PH_PROTOVER, <<"${proto_ver}">> ).
%% MQTT keepalive interval
-define(PH_KEEPALIVE, <<"${keepalive}">> ).
%% MQTT clean_start
-define(PH_CLEAR_START, <<"${clean_start}">> ).
%% MQTT Session Expiration time
-define(PH_EXPIRY_INTERVAL, <<"${expiry_interval}">> ).
%% Time when PUBLISH message reaches Broker (ms)
-define(PH_PUBLISH_RECEIVED_AT, <<"${publish_received_at}">>).
%% Mountpoint for bridging messages
-define(PH_MOUNTPOINT, <<"${mountpoint}">> ).
%% IPAddress and Port of terminal
-define(PH_PEERNAME, <<"${peername}">> ).
%% IPAddress and Port listened by emqx
-define(PH_SOCKNAME, <<"${sockname}">> ).
%% whether it is MQTT bridge connection
-define(PH_IS_BRIDGE, <<"${is_bridge}">> ).
%% Terminal connection completion time (s)
-define(PH_CONNECTED_AT, <<"${connected_at}">> ).
%% Event trigger time(millisecond)
-define(PH_TIMESTAMP, <<"${timestamp}">> ).
%% Terminal disconnection completion time (s)
-define(PH_DISCONNECTED_AT, <<"${disconnected_at}">> ).
-define(PH_NODE, <<"${node}">> ).
-define(PH_REASON, <<"${reason}">> ).
-define(PH_ENDPOINT_NAME, <<"${endpoint_name}">> ).
%% sync change these place holder with binary def.
-define(PH_S_ACTION, "${action}" ).
-define(PH_S_CERT_SUBJECT, "${cert_subject}" ).
-define(PH_S_CERT_CN_NAME, "${cert_common_name}" ).
-define(PH_S_PASSWORD, "${password}" ).
-define(PH_S_CLIENTID, "${clientid}" ).
-define(PH_S_FROM_CLIENTID, "${from_clientid}" ).
-define(PH_S_USERNAME, "${username}" ).
-define(PH_S_FROM_USERNAME, "${from_username}" ).
-define(PH_S_TOPIC, "${topic}" ).
-define(PH_S_PAYLOAD, "${payload}" ).
-define(PH_S_PEERHOST, "${peerhost}" ).
-define(PH_S_HOST, "${host}" ).
-define(PH_S_PORT, "${port}" ).
-define(PH_S_QOS, "${qos}" ).
-define(PH_S_FLAGS, "${flags}" ).
-define(PH_S_HEADERS, "${headers}" ).
-define(PH_S_PROTONAME, "${proto_name}" ).
-define(PH_S_PROTOVER, "${proto_ver}" ).
-define(PH_S_KEEPALIVE, "${keepalive}" ).
-define(PH_S_CLEAR_START, "${clean_start}" ).
-define(PH_S_EXPIRY_INTERVAL, "${expiry_interval}" ).
-define(PH_S_PUBLISH_RECEIVED_AT, "${publish_received_at}" ).
-define(PH_S_MOUNTPOINT, "${mountpoint}" ).
-define(PH_S_PEERNAME, "${peername}" ).
-define(PH_S_SOCKNAME, "${sockname}" ).
-define(PH_S_IS_BRIDGE, "${is_bridge}" ).
-define(PH_S_CONNECTED_AT, "${connected_at}" ).
-define(PH_S_TIMESTAMP, "${timestamp}" ).
-define(PH_S_DISCONNECTED_AT, "${disconnected_at}" ).
-define(PH_S_NODE, "${node}" ).
-define(PH_S_REASON, "${reason}" ).
-define(PH_S_ENDPOINT_NAME, "${endpoint_name}" ).
-endif.

View File

@ -14,9 +14,6 @@
%% limitations under the License. %% limitations under the License.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
-ifndef(EMQX_RELEASE_HRL).
-define(EMQX_RELEASE_HRL, true).
%% NOTE: this is the release version which is not always the same %% NOTE: this is the release version which is not always the same
%% as the emqx app version defined in emqx.app.src %% as the emqx app version defined in emqx.app.src
%% App (plugin) versions are bumped independently. %% App (plugin) versions are bumped independently.
@ -27,13 +24,4 @@
%% NOTE: This version number should be manually bumped for each release %% NOTE: This version number should be manually bumped for each release
-ifndef(EMQX_ENTERPRISE). -define(EMQX_RELEASE, "5.0-beta.2").
-define(EMQX_RELEASE, {opensource, "5.0-beta.1"}).
-else.
-endif.
-endif.

View File

@ -17,10 +17,10 @@
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.0"}}} , {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.0"}}}
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.11.1"}}} , {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.11.1"}}}
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}} , {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.20.5"}}} , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.20.6"}}}
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}} , {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}} , {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.14.1"}}} , {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.15.0"}}}
]}. ]}.
{plugins, [{rebar3_proper, "0.12.1"}]}. {plugins, [{rebar3_proper, "0.12.1"}]}.

View File

@ -20,6 +20,7 @@
-include("logger.hrl"). -include("logger.hrl").
-include("types.hrl"). -include("types.hrl").
-elvis([{elvis_style, god_modules, disable}]).
%% Start/Stop the application %% Start/Stop the application
-export([ start/0 -export([ start/0
@ -51,10 +52,6 @@
, run_fold_hook/3 , run_fold_hook/3
]). ]).
%% Troubleshooting
-export([ set_debug_secret/1
]).
%% Configs APIs %% Configs APIs
-export([ get_config/1 -export([ get_config/1
, get_config/2 , get_config/2
@ -71,29 +68,6 @@
-define(APP, ?MODULE). -define(APP, ?MODULE).
%% @hidden Path to the file which has debug_info encryption secret in it.
%% Evaluate this function if there is a need to access encrypted debug_info.
%% NOTE: Do not change the API to accept the secret text because it may
%% get logged everywhere.
set_debug_secret(PathToSecretFile) ->
SecretText =
case file:read_file(PathToSecretFile) of
{ok, Secret} ->
try string:trim(binary_to_list(Secret))
catch _ : _ -> error({badfile, PathToSecretFile})
end;
{error, Reason} ->
?ULOG("Failed to read debug_info encryption key file ~ts: ~p~n",
[PathToSecretFile, Reason]),
error(Reason)
end,
F = fun(init) -> ok;
(clear) -> ok;
({debug_info, _Mode, _Module, _Filename}) -> SecretText
end,
_ = beam_lib:clear_crypto_key_fun(),
ok = beam_lib:crypto_key_fun(F).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Bootstrap, is_running... %% Bootstrap, is_running...
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -17,7 +17,6 @@
-module(emqx_alarm). -module(emqx_alarm).
-behaviour(gen_server). -behaviour(gen_server).
-behaviour(emqx_config_handler).
-include("emqx.hrl"). -include("emqx.hrl").
-include("logger.hrl"). -include("logger.hrl").
@ -27,22 +26,19 @@
-boot_mnesia({mnesia, [boot]}). -boot_mnesia({mnesia, [boot]}).
-export([post_config_update/4]). -export([start_link/0
-export([ start_link/0
, stop/0
]). ]).
-export([format/1]).
%% API %% API
-export([ activate/1 -export([ activate/1
, activate/2 , activate/2
, activate/3
, deactivate/1 , deactivate/1
, deactivate/2 , deactivate/2
, deactivate/3
, delete_all_deactivated_alarms/0 , delete_all_deactivated_alarms/0
, get_alarms/0 , get_alarms/0
, get_alarms/1 , get_alarms/1
, format/1
]). ]).
%% gen_server callbacks %% gen_server callbacks
@ -56,34 +52,19 @@
-record(activated_alarm, { -record(activated_alarm, {
name :: binary() | atom(), name :: binary() | atom(),
details :: map() | list(), details :: map() | list(),
message :: binary(), message :: binary(),
activate_at :: integer() activate_at :: integer()
}). }).
-record(deactivated_alarm, { -record(deactivated_alarm, {
activate_at :: integer(), activate_at :: integer(),
name :: binary() | atom(), name :: binary() | atom(),
details :: map() | list(), details :: map() | list(),
message :: binary(), message :: binary(),
deactivate_at :: integer() | infinity deactivate_at :: integer() | infinity
}). }).
-record(state, {
timer :: reference()
}).
-define(ACTIVATED_ALARM, emqx_activated_alarm).
-define(DEACTIVATED_ALARM, emqx_deactivated_alarm).
-ifdef(TEST). -ifdef(TEST).
-compile(export_all). -compile(export_all).
-compile(nowarn_export_all). -compile(nowarn_export_all).
@ -114,20 +95,23 @@ mnesia(boot) ->
start_link() -> start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
stop() ->
gen_server:stop(?MODULE).
activate(Name) -> activate(Name) ->
activate(Name, #{}). activate(Name, #{}).
activate(Name, Details) -> activate(Name, Details) ->
gen_server:call(?MODULE, {activate_alarm, Name, Details}). activate(Name, Details, <<"">>).
activate(Name, Details, Message) ->
gen_server:call(?MODULE, {activate_alarm, Name, Details, Message}).
deactivate(Name) -> deactivate(Name) ->
gen_server:call(?MODULE, {deactivate_alarm, Name, no_details}). deactivate(Name, no_details, <<"">>).
deactivate(Name, Details) -> deactivate(Name, Details) ->
gen_server:call(?MODULE, {deactivate_alarm, Name, Details}). deactivate(Name, Details, <<"">>).
deactivate(Name, Details, Message) ->
gen_server:call(?MODULE, {deactivate_alarm, Name, Details, Message}).
delete_all_deactivated_alarms() -> delete_all_deactivated_alarms() ->
gen_server:call(?MODULE, delete_all_deactivated_alarms). gen_server:call(?MODULE, delete_all_deactivated_alarms).
@ -144,12 +128,10 @@ get_alarms(activated) ->
get_alarms(deactivated) -> get_alarms(deactivated) ->
gen_server:call(?MODULE, {get_alarms, deactivated}). gen_server:call(?MODULE, {get_alarms, deactivated}).
post_config_update(_, #{validity_period := Period0}, _OldConf, _AppEnv) ->
?MODULE ! {update_timer, Period0},
ok.
format(#activated_alarm{name = Name, message = Message, activate_at = At, details = Details}) -> format(#activated_alarm{name = Name, message = Message, activate_at = At, details = Details}) ->
Now = erlang:system_time(microsecond), Now = erlang:system_time(microsecond),
%% mnesia db stored microsecond for high frequency alarm
%% format for dashboard using millisecond
#{ #{
node => node(), node => node(),
name => Name, name => Name,
@ -159,20 +141,19 @@ format(#activated_alarm{name = Name, message = Message, activate_at = At, detail
details => Details details => Details
}; };
format(#deactivated_alarm{name = Name, message = Message, activate_at = At, details = Details, format(#deactivated_alarm{name = Name, message = Message, activate_at = At, details = Details,
deactivate_at = DAt}) -> deactivate_at = DAt}) ->
#{ #{
node => node(), node => node(),
name => Name, name => Name,
message => Message, message => Message,
duration => DAt - At, duration => (DAt - At) div 1000, %% to millisecond
activate_at => to_rfc3339(At), activate_at => to_rfc3339(At),
deactivate_at => to_rfc3339(DAt), deactivate_at => to_rfc3339(DAt),
details => Details details => Details
}; }.
format(_) ->
{error, unknow_alarm}.
to_rfc3339(Timestamp) -> to_rfc3339(Timestamp) ->
%% rfc3339 accuracy to millisecond
list_to_binary(calendar:system_time_to_rfc3339(Timestamp div 1000, [{unit, millisecond}])). list_to_binary(calendar:system_time_to_rfc3339(Timestamp div 1000, [{unit, millisecond}])).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -180,85 +161,72 @@ to_rfc3339(Timestamp) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init([]) -> init([]) ->
_ = mria:wait_for_tables([?ACTIVATED_ALARM, ?DEACTIVATED_ALARM]), ok = mria:wait_for_tables([?ACTIVATED_ALARM, ?DEACTIVATED_ALARM]),
deactivate_all_alarms(), deactivate_all_alarms(),
ok = emqx_config_handler:add_handler([alarm], ?MODULE), {ok, #{}, get_validity_period()}.
{ok, #state{timer = ensure_timer(undefined, get_validity_period())}}.
%% suppress dialyzer warning due to dirty read/write race condition. handle_call({activate_alarm, Name, Details, Message}, _From, State) ->
%% TODO: change from dirty_read/write to transactional. Res = mria:transaction(mria:local_content_shard(),
%% TODO: handle mnesia write errors. fun create_activate_alarm/3,
-dialyzer([{nowarn_function, [handle_call/3]}]). [Name, Details, Message]),
handle_call({activate_alarm, Name, Details}, _From, State) -> case Res of
case mnesia:dirty_read(?ACTIVATED_ALARM, Name) of {atomic, Alarm} ->
[#activated_alarm{name = Name}] ->
{reply, {error, already_existed}, State};
[] ->
Alarm = #activated_alarm{name = Name,
details = Details,
message = normalize_message(Name, Details),
activate_at = erlang:system_time(microsecond)},
mria:dirty_write(?ACTIVATED_ALARM, Alarm),
do_actions(activate, Alarm, emqx:get_config([alarm, actions])), do_actions(activate, Alarm, emqx:get_config([alarm, actions])),
{reply, ok, State} {reply, ok, State, get_validity_period()};
{aborted, Reason} ->
{reply, Reason, State, get_validity_period()}
end; end;
handle_call({deactivate_alarm, Name, Details}, _From, State) -> handle_call({deactivate_alarm, Name, Details, Message}, _From, State) ->
case mnesia:dirty_read(?ACTIVATED_ALARM, Name) of case mnesia:dirty_read(?ACTIVATED_ALARM, Name) of
[] -> [] ->
{reply, {error, not_found}, State}; {reply, {error, not_found}, State};
[Alarm] -> [Alarm] ->
deactivate_alarm(Details, Alarm), deactivate_alarm(Alarm, Details, Message),
{reply, ok, State} {reply, ok, State, get_validity_period()}
end; end;
handle_call(delete_all_deactivated_alarms, _From, State) -> handle_call(delete_all_deactivated_alarms, _From, State) ->
clear_table(?DEACTIVATED_ALARM), clear_table(?DEACTIVATED_ALARM),
{reply, ok, State}; {reply, ok, State, get_validity_period()};
handle_call({get_alarms, all}, _From, State) -> handle_call({get_alarms, all}, _From, State) ->
{atomic, Alarms} = {atomic, Alarms} =
mria:ro_transaction( mria:ro_transaction(
?COMMON_SHARD, mria:local_content_shard(),
fun() -> fun() ->
[normalize(Alarm) || [normalize(Alarm) ||
Alarm <- ets:tab2list(?ACTIVATED_ALARM) Alarm <- ets:tab2list(?ACTIVATED_ALARM)
++ ets:tab2list(?DEACTIVATED_ALARM)] ++ ets:tab2list(?DEACTIVATED_ALARM)]
end), end),
{reply, Alarms, State}; {reply, Alarms, State, get_validity_period()};
handle_call({get_alarms, activated}, _From, State) -> handle_call({get_alarms, activated}, _From, State) ->
Alarms = [normalize(Alarm) || Alarm <- ets:tab2list(?ACTIVATED_ALARM)], Alarms = [normalize(Alarm) || Alarm <- ets:tab2list(?ACTIVATED_ALARM)],
{reply, Alarms, State}; {reply, Alarms, State, get_validity_period()};
handle_call({get_alarms, deactivated}, _From, State) -> handle_call({get_alarms, deactivated}, _From, State) ->
Alarms = [normalize(Alarm) || Alarm <- ets:tab2list(?DEACTIVATED_ALARM)], Alarms = [normalize(Alarm) || Alarm <- ets:tab2list(?DEACTIVATED_ALARM)],
{reply, Alarms, State}; {reply, Alarms, State, get_validity_period()};
handle_call(Req, _From, State) -> handle_call(Req, From, State) ->
?SLOG(error, #{msg => "unexpected_call", call => Req}), ?SLOG(error, #{msg => "unexpected_call", call_req => Req, from => From}),
{reply, ignored, State}. {reply, ignored, State, get_validity_period()}.
handle_cast(Msg, State) -> handle_cast(Msg, State) ->
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}), ?SLOG(error, #{msg => "unexpected_cast", cast_req => Msg}),
{noreply, State}. {noreply, State, get_validity_period()}.
handle_info({timeout, _TRef, delete_expired_deactivated_alarm}, handle_info(timeout, State) ->
#state{timer = TRef} = State) ->
Period = get_validity_period(), Period = get_validity_period(),
delete_expired_deactivated_alarms(erlang:system_time(microsecond) - Period * 1000), delete_expired_deactivated_alarms(erlang:system_time(microsecond) - Period * 1000),
{noreply, State#state{timer = ensure_timer(TRef, Period)}}; {noreply, State, Period};
handle_info({update_timer, Period}, #state{timer = TRef} = State) ->
?SLOG(warning, #{msg => "validity_timer_updated", period => Period}),
{noreply, State#state{timer = ensure_timer(TRef, Period)}};
handle_info(Info, State) -> handle_info(Info, State) ->
?SLOG(error, #{msg => "unexpected_info", info => Info}), ?SLOG(error, #{msg => "unexpected_info", info_req => Info}),
{noreply, State}. {noreply, State, get_validity_period()}.
terminate(_Reason, _State) -> terminate(_Reason, _State) ->
ok = emqx_config_handler:remove_handler([alarm]),
ok. ok.
code_change(_OldVsn, State, _Extra) -> code_change(_OldVsn, State, _Extra) ->
@ -271,8 +239,21 @@ code_change(_OldVsn, State, _Extra) ->
get_validity_period() -> get_validity_period() ->
emqx:get_config([alarm, validity_period]). emqx:get_config([alarm, validity_period]).
deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name, create_activate_alarm(Name, Details, Message) ->
details = Details0, message = Msg0}) -> case mnesia:read(?ACTIVATED_ALARM, Name) of
[#activated_alarm{name = Name}] ->
mnesia:abort({error, already_existed});
[] ->
Alarm = #activated_alarm{name = Name,
details = Details,
message = normalize_message(Name, iolist_to_binary(Message)),
activate_at = erlang:system_time(microsecond)},
ok = mnesia:write(?ACTIVATED_ALARM, Alarm, write),
Alarm
end.
deactivate_alarm(#activated_alarm{activate_at = ActivateAt, name = Name,
details = Details0, message = Msg0}, Details, Message) ->
SizeLimit = emqx:get_config([alarm, size_limit]), SizeLimit = emqx:get_config([alarm, size_limit]),
case SizeLimit > 0 andalso (mnesia:table_info(?DEACTIVATED_ALARM, size) >= SizeLimit) of case SizeLimit > 0 andalso (mnesia:table_info(?DEACTIVATED_ALARM, size) >= SizeLimit) of
true -> true ->
@ -286,7 +267,7 @@ deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name
HistoryAlarm = make_deactivated_alarm(ActivateAt, Name, Details0, Msg0, HistoryAlarm = make_deactivated_alarm(ActivateAt, Name, Details0, Msg0,
erlang:system_time(microsecond)), erlang:system_time(microsecond)),
DeActAlarm = make_deactivated_alarm(ActivateAt, Name, Details, DeActAlarm = make_deactivated_alarm(ActivateAt, Name, Details,
normalize_message(Name, Details), normalize_message(Name, iolist_to_binary(Message)),
erlang:system_time(microsecond)), erlang:system_time(microsecond)),
mria:dirty_write(?DEACTIVATED_ALARM, HistoryAlarm), mria:dirty_write(?DEACTIVATED_ALARM, HistoryAlarm),
mria:dirty_delete(?ACTIVATED_ALARM, Name), mria:dirty_delete(?ACTIVATED_ALARM, Name),
@ -329,13 +310,6 @@ clear_table(TableName) ->
ok ok
end. end.
ensure_timer(OldTRef, Period) ->
_ = case is_reference(OldTRef) of
true -> erlang:cancel_timer(OldTRef);
false -> ok
end,
emqx_misc:start_timer(Period, delete_expired_deactivated_alarm).
delete_expired_deactivated_alarms(Checkpoint) -> delete_expired_deactivated_alarms(Checkpoint) ->
delete_expired_deactivated_alarms(mnesia:dirty_first(?DEACTIVATED_ALARM), Checkpoint). delete_expired_deactivated_alarms(mnesia:dirty_first(?DEACTIVATED_ALARM), Checkpoint).
@ -368,16 +342,12 @@ do_actions(deactivate, Alarm = #deactivated_alarm{name = Name}, [log | More]) ->
do_actions(deactivate, Alarm, More); do_actions(deactivate, Alarm, More);
do_actions(Operation, Alarm, [publish | More]) -> do_actions(Operation, Alarm, [publish | More]) ->
Topic = topic(Operation), Topic = topic(Operation),
{ok, Payload} = encode_to_json(Alarm), {ok, Payload} = emqx_json:safe_encode(normalize(Alarm)),
Message = emqx_message:make(?MODULE, 0, Topic, Payload, #{sys => true}, Message = emqx_message:make(?MODULE, 0, Topic, Payload, #{sys => true},
#{properties => #{'Content-Type' => <<"application/json">>}}), #{properties => #{'Content-Type' => <<"application/json">>}}),
%% TODO log failed publishes
_ = emqx_broker:safe_publish(Message), _ = emqx_broker:safe_publish(Message),
do_actions(Operation, Alarm, More). do_actions(Operation, Alarm, More).
encode_to_json(Alarm) ->
emqx_json:safe_encode(normalize(Alarm)).
topic(activate) -> topic(activate) ->
emqx_topic:systop(<<"alarms/activate">>); emqx_topic:systop(<<"alarms/activate">>);
topic(deactivate) -> topic(deactivate) ->
@ -405,25 +375,6 @@ normalize(#deactivated_alarm{activate_at = ActivateAt,
deactivate_at => DeactivateAt, deactivate_at => DeactivateAt,
activated => false}. activated => false}.
normalize_message(Name, no_details) -> normalize_message(Name, <<"">>) ->
list_to_binary(io_lib:format("~p", [Name])); list_to_binary(io_lib:format("~p", [Name]));
normalize_message(runq_overload, #{node := Node, runq_length := Len}) -> normalize_message(_Name, Message) -> Message.
list_to_binary(io_lib:format("VM is overloaded on node: ~p: ~p", [Node, Len]));
normalize_message(high_system_memory_usage, #{high_watermark := HighWatermark}) ->
list_to_binary(io_lib:format("System memory usage is higher than ~p%", [HighWatermark]));
normalize_message(high_process_memory_usage, #{high_watermark := HighWatermark}) ->
list_to_binary(io_lib:format("Process memory usage is higher than ~p%", [HighWatermark]));
normalize_message(high_cpu_usage, #{usage := Usage}) ->
list_to_binary(io_lib:format("~ts cpu usage", [Usage]));
normalize_message(too_many_processes, #{usage := Usage}) ->
list_to_binary(io_lib:format("~ts process usage", [Usage]));
normalize_message(cluster_rpc_apply_failed, #{tnx_id := TnxId}) ->
list_to_binary(io_lib:format("cluster_rpc_apply_failed:~w", [TnxId]));
normalize_message(partition, #{occurred := Node}) ->
list_to_binary(io_lib:format("Partition occurs at node ~ts", [Node]));
normalize_message(<<"resource", _/binary>>, #{type := Type, id := ID}) ->
list_to_binary(io_lib:format("Resource ~ts(~ts) is down", [Type, ID]));
normalize_message(<<"conn_congestion/", Info/binary>>, _) ->
list_to_binary(io_lib:format("connection congested: ~ts", [Info]));
normalize_message(_Name, _UnknownDetails) ->
<<"Unknown alarm">>.

View File

@ -56,14 +56,18 @@ init(_) ->
{ok, []}. {ok, []}.
handle_event({set_alarm, {system_memory_high_watermark, []}}, State) -> handle_event({set_alarm, {system_memory_high_watermark, []}}, State) ->
HighWatermark = emqx_os_mon:get_sysmem_high_watermark(),
Message = to_bin("System memory usage is higher than ~p%", [HighWatermark]),
emqx_alarm:activate(high_system_memory_usage, emqx_alarm:activate(high_system_memory_usage,
#{high_watermark => emqx_os_mon:get_sysmem_high_watermark()}), #{high_watermark => HighWatermark}, Message),
{ok, State}; {ok, State};
handle_event({set_alarm, {process_memory_high_watermark, Pid}}, State) -> handle_event({set_alarm, {process_memory_high_watermark, Pid}}, State) ->
HighWatermark = emqx_os_mon:get_procmem_high_watermark(),
Message = to_bin("Process memory usage is higher than ~p%", [HighWatermark]),
emqx_alarm:activate(high_process_memory_usage, emqx_alarm:activate(high_process_memory_usage,
#{pid => list_to_binary(pid_to_list(Pid)), #{pid => list_to_binary(pid_to_list(Pid)),
high_watermark => emqx_os_mon:get_procmem_high_watermark()}), high_watermark => HighWatermark}, Message),
{ok, State}; {ok, State};
handle_event({clear_alarm, system_memory_high_watermark}, State) -> handle_event({clear_alarm, system_memory_high_watermark}, State) ->
@ -75,7 +79,9 @@ handle_event({clear_alarm, process_memory_high_watermark}, State) ->
{ok, State}; {ok, State};
handle_event({set_alarm, {?LC_ALARM_ID_RUNQ, Info}}, State) -> handle_event({set_alarm, {?LC_ALARM_ID_RUNQ, Info}}, State) ->
emqx_alarm:activate(runq_overload, Info), #{node := Node, runq_length := Len} = Info,
Message = to_bin("VM is overloaded on node: ~p: ~p", [Node, Len]),
emqx_alarm:activate(runq_overload, Info, Message),
{ok, State}; {ok, State};
handle_event({clear_alarm, ?LC_ALARM_ID_RUNQ}, State) -> handle_event({clear_alarm, ?LC_ALARM_ID_RUNQ}, State) ->
@ -95,3 +101,6 @@ terminate(swap, _State) ->
{emqx_alarm_handler, []}; {emqx_alarm_handler, []};
terminate(_, _) -> terminate(_, _) ->
ok. ok.
to_bin(Format, Args) ->
io_lib:format(Format, Args).

View File

@ -30,7 +30,6 @@
]). ]).
-include("emqx.hrl"). -include("emqx.hrl").
-include("emqx_release.hrl").
-include("logger.hrl"). -include("logger.hrl").
-define(APP, emqx). -define(APP, emqx).
@ -40,6 +39,7 @@
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
start(_Type, _Args) -> start(_Type, _Args) ->
ok = emqx_release:put_edition(),
ok = maybe_load_config(), ok = maybe_load_config(),
ok = emqx_persistent_session:init_db_backend(), ok = emqx_persistent_session:init_db_backend(),
ok = maybe_start_quicer(), ok = maybe_start_quicer(),
@ -107,30 +107,7 @@ is_quicer_app_present() ->
is_quic_listener_configured() -> is_quic_listener_configured() ->
emqx_listeners:has_enabled_listener_conf_by_type(quic). emqx_listeners:has_enabled_listener_conf_by_type(quic).
get_description() -> get_description() -> emqx_release:description().
{ok, Descr0} = application:get_key(?APP, description),
case os:getenv("EMQX_DESCRIPTION") of
false -> Descr0;
"" -> Descr0;
Str -> string:strip(Str, both, $\n)
end.
get_release() -> get_release() ->
case lists:keyfind(emqx_vsn, 1, ?MODULE:module_info(compile)) of emqx_release:version().
false -> %% For TEST build or depedency build.
release_in_macro();
{_, Vsn} -> %% For emqx release build
VsnStr = release_in_macro(),
case string:str(Vsn, VsnStr) of
1 -> ok;
_ ->
erlang:error(#{ reason => version_mismatch
, source => VsnStr
, built_for => Vsn
})
end,
Vsn
end.
release_in_macro() ->
element(2, ?EMQX_RELEASE).

View File

@ -25,6 +25,8 @@
-include("emqx.hrl"). -include("emqx.hrl").
-include("logger.hrl"). -include("logger.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
%% The authentication entrypoint. %% The authentication entrypoint.
-export([ authenticate/2 -export([ authenticate/2
]). ]).
@ -45,6 +47,7 @@
, delete_chain/1 , delete_chain/1
, lookup_chain/1 , lookup_chain/1
, list_chains/0 , list_chains/0
, list_chain_names/0
, create_authenticator/2 , create_authenticator/2
, delete_authenticator/2 , delete_authenticator/2
, update_authenticator/3 , update_authenticator/3
@ -76,8 +79,8 @@
]). ]).
%% proxy callback %% proxy callback
-export([ pre_config_update/2 -export([ pre_config_update/3
, post_config_update/4 , post_config_update/5
]). ]).
-export_type([ authenticator_id/0 -export_type([ authenticator_id/0
@ -92,9 +95,6 @@
-define(CHAINS_TAB, emqx_authn_chains). -define(CHAINS_TAB, emqx_authn_chains).
-define(VER_1, <<"1">>).
-define(VER_2, <<"2">>).
-type chain_name() :: atom(). -type chain_name() :: atom().
-type authenticator_id() :: binary(). -type authenticator_id() :: binary().
-type position() :: top | bottom | {before, authenticator_id()}. -type position() :: top | bottom | {before, authenticator_id()}.
@ -120,10 +120,10 @@
%% parse and validate it, and reutrn parsed result. %% parse and validate it, and reutrn parsed result.
-callback check_config(config()) -> config(). -callback check_config(config()) -> config().
-callback create(Config) -callback create(AuthenticatorID, Config)
-> {ok, State} -> {ok, State}
| {error, term()} | {error, term()}
when Config::config(), State::state(). when AuthenticatorID::authenticator_id(), Config::config(), State::state().
-callback update(Config, State) -callback update(Config, State)
-> {ok, NewState} -> {ok, NewState}
@ -192,29 +192,6 @@ authenticate(#{listener := Listener, protocol := Protocol} = Credential, _AuthRe
NAuthenticators -> do_authenticate(NAuthenticators, Credential) NAuthenticators -> do_authenticate(NAuthenticators, Credential)
end. end.
do_authenticate([], _) ->
{stop, {error, not_authorized}};
do_authenticate([#authenticator{id = ID, provider = Provider, state = State} | More], Credential) ->
try Provider:authenticate(Credential, State) of
ignore ->
do_authenticate(More, Credential);
Result ->
%% {ok, Extra}
%% {ok, Extra, AuthData}
%% {continue, AuthCache}
%% {continue, AuthData, AuthCache}
%% {error, Reason}
{stop, Result}
catch
Class:Reason:Stacktrace ->
?SLOG(warning, #{msg => "unexpected_error_in_authentication",
exception => Class,
reason => Reason,
stacktrace => Stacktrace,
authenticator => ID}),
do_authenticate(More, Credential)
end.
get_authenticators(Listener, Global) -> get_authenticators(Listener, Global) ->
case ets:lookup(?CHAINS_TAB, Listener) of case ets:lookup(?CHAINS_TAB, Listener) of
[#chain{authenticators = Authenticators}] -> [#chain{authenticators = Authenticators}] ->
@ -235,11 +212,11 @@ get_enabled(Authenticators) ->
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
pre_config_update(UpdateReq, OldConfig) -> pre_config_update(Path, UpdateReq, OldConfig) ->
emqx_authentication_config:pre_config_update(UpdateReq, OldConfig). emqx_authentication_config:pre_config_update(Path, UpdateReq, OldConfig).
post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs) -> post_config_update(Path, UpdateReq, NewConfig, OldConfig, AppEnvs) ->
emqx_authentication_config:post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs). emqx_authentication_config:post_config_update(Path, UpdateReq, NewConfig, OldConfig, AppEnvs).
%% @doc Get all registered authentication providers. %% @doc Get all registered authentication providers.
get_providers() -> get_providers() ->
@ -274,6 +251,9 @@ initialize_authentication(ChainName, AuthenticatorsConfig) ->
-spec start_link() -> {ok, pid()} | ignore | {error, term()}. -spec start_link() -> {ok, pid()} | ignore | {error, term()}.
start_link() -> start_link() ->
%% Create chains ETS table here so that it belongs to the supervisor
%% and survives `emqx_authentication` crashes.
ok = create_chain_table(),
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec stop() -> ok. -spec stop() -> ok.
@ -312,13 +292,24 @@ delete_chain(Name) ->
-spec lookup_chain(chain_name()) -> {ok, chain()} | {error, term()}. -spec lookup_chain(chain_name()) -> {ok, chain()} | {error, term()}.
lookup_chain(Name) -> lookup_chain(Name) ->
call({lookup_chain, Name}). case ets:lookup(?CHAINS_TAB, Name) of
[] ->
{error, {not_found, {chain, Name}}};
[Chain] ->
{ok, serialize_chain(Chain)}
end.
-spec list_chains() -> {ok, [chain()]}. -spec list_chains() -> {ok, [chain()]}.
list_chains() -> list_chains() ->
Chains = ets:tab2list(?CHAINS_TAB), Chains = ets:tab2list(?CHAINS_TAB),
{ok, [serialize_chain(Chain) || Chain <- Chains]}. {ok, [serialize_chain(Chain) || Chain <- Chains]}.
-spec list_chain_names() -> {ok, [atom()]}.
list_chain_names() ->
Select = ets:fun2ms(fun(#chain{name = Name}) -> Name end),
ChainNames = ets:select(?CHAINS_TAB, Select),
{ok, ChainNames}.
-spec create_authenticator(chain_name(), config()) -> {ok, authenticator()} | {error, term()}. -spec create_authenticator(chain_name(), config()) -> {ok, authenticator()} | {error, term()}.
create_authenticator(ChainName, Config) -> create_authenticator(ChainName, Config) ->
call({create_authenticator, ChainName, Config}). call({create_authenticator, ChainName, Config}).
@ -327,11 +318,13 @@ create_authenticator(ChainName, Config) ->
delete_authenticator(ChainName, AuthenticatorID) -> delete_authenticator(ChainName, AuthenticatorID) ->
call({delete_authenticator, ChainName, AuthenticatorID}). call({delete_authenticator, ChainName, AuthenticatorID}).
-spec update_authenticator(chain_name(), authenticator_id(), config()) -> {ok, authenticator()} | {error, term()}. -spec update_authenticator(chain_name(), authenticator_id(), config()) ->
{ok, authenticator()} | {error, term()}.
update_authenticator(ChainName, AuthenticatorID, Config) -> update_authenticator(ChainName, AuthenticatorID, Config) ->
call({update_authenticator, ChainName, AuthenticatorID, Config}). call({update_authenticator, ChainName, AuthenticatorID, Config}).
-spec lookup_authenticator(chain_name(), authenticator_id()) -> {ok, authenticator()} | {error, term()}. -spec lookup_authenticator(chain_name(), authenticator_id()) ->
{ok, authenticator()} | {error, term()}.
lookup_authenticator(ChainName, AuthenticatorID) -> lookup_authenticator(ChainName, AuthenticatorID) ->
case ets:lookup(?CHAINS_TAB, ChainName) of case ets:lookup(?CHAINS_TAB, ChainName) of
[] -> [] ->
@ -362,7 +355,8 @@ move_authenticator(ChainName, AuthenticatorID, Position) ->
import_users(ChainName, AuthenticatorID, Filename) -> import_users(ChainName, AuthenticatorID, Filename) ->
call({import_users, ChainName, AuthenticatorID, Filename}). call({import_users, ChainName, AuthenticatorID, Filename}).
-spec add_user(chain_name(), authenticator_id(), user_info()) -> {ok, user_info()} | {error, term()}. -spec add_user(chain_name(), authenticator_id(), user_info()) ->
{ok, user_info()} | {error, term()}.
add_user(ChainName, AuthenticatorID, UserInfo) -> add_user(ChainName, AuthenticatorID, UserInfo) ->
call({add_user, ChainName, AuthenticatorID, UserInfo}). call({add_user, ChainName, AuthenticatorID, UserInfo}).
@ -370,11 +364,13 @@ add_user(ChainName, AuthenticatorID, UserInfo) ->
delete_user(ChainName, AuthenticatorID, UserID) -> delete_user(ChainName, AuthenticatorID, UserID) ->
call({delete_user, ChainName, AuthenticatorID, UserID}). call({delete_user, ChainName, AuthenticatorID, UserID}).
-spec update_user(chain_name(), authenticator_id(), binary(), map()) -> {ok, user_info()} | {error, term()}. -spec update_user(chain_name(), authenticator_id(), binary(), map()) ->
{ok, user_info()} | {error, term()}.
update_user(ChainName, AuthenticatorID, UserID, NewUserInfo) -> update_user(ChainName, AuthenticatorID, UserID, NewUserInfo) ->
call({update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}). call({update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}).
-spec lookup_user(chain_name(), authenticator_id(), binary()) -> {ok, user_info()} | {error, term()}. -spec lookup_user(chain_name(), authenticator_id(), binary()) ->
{ok, user_info()} | {error, term()}.
lookup_user(ChainName, AuthenticatorID, UserID) -> lookup_user(ChainName, AuthenticatorID, UserID) ->
call({lookup_user, ChainName, AuthenticatorID, UserID}). call({lookup_user, ChainName, AuthenticatorID, UserID}).
@ -387,9 +383,6 @@ list_users(ChainName, AuthenticatorID, Params) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init(_Opts) -> init(_Opts) ->
_ = ets:new(?CHAINS_TAB, [ named_table, set, public
, {keypos, #chain.name}
, {read_concurrency, true}]),
ok = emqx_config_handler:add_handler([authentication], ?MODULE), ok = emqx_config_handler:add_handler([authentication], ?MODULE),
ok = emqx_config_handler:add_handler([listeners, '?', '?', authentication], ?MODULE), ok = emqx_config_handler:add_handler([listeners, '?', '?', authentication], ?MODULE),
{ok, #{hooked => false, providers => #{}}}. {ok, #{hooked => false, providers => #{}}}.
@ -427,95 +420,36 @@ handle_call({delete_chain, Name}, _From, State) ->
[] -> [] ->
reply({error, {not_found, {chain, Name}}}, State); reply({error, {not_found, {chain, Name}}}, State);
[#chain{authenticators = Authenticators}] -> [#chain{authenticators = Authenticators}] ->
_ = [do_delete_authenticator(Authenticator) || Authenticator <- Authenticators], _ = [do_destroy_authenticator(Authenticator) || Authenticator <- Authenticators],
true = ets:delete(?CHAINS_TAB, Name), true = ets:delete(?CHAINS_TAB, Name),
reply(ok, maybe_unhook(State)) reply(ok, maybe_unhook(State))
end; end;
handle_call({lookup_chain, Name}, _From, State) ->
case ets:lookup(?CHAINS_TAB, Name) of
[] ->
reply({error, {not_found, {chain, Name}}}, State);
[Chain] ->
reply({ok, serialize_chain(Chain)}, State)
end;
handle_call({create_authenticator, ChainName, Config}, _From, #{providers := Providers} = State) -> handle_call({create_authenticator, ChainName, Config}, _From, #{providers := Providers} = State) ->
UpdateFun = UpdateFun = fun(Chain) ->
fun(#chain{authenticators = Authenticators} = Chain) -> handle_create_authenticator(Chain, Config, Providers)
AuthenticatorID = authenticator_id(Config), end,
case lists:keymember(AuthenticatorID, #authenticator.id, Authenticators) of
true ->
{error, {already_exists, {authenticator, AuthenticatorID}}};
false ->
case do_create_authenticator(ChainName, AuthenticatorID, Config, Providers) of
{ok, Authenticator} ->
NAuthenticators = Authenticators ++ [Authenticator#authenticator{enable = maps:get(enable, Config)}],
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
{ok, serialize_authenticator(Authenticator)};
{error, Reason} ->
{error, Reason}
end
end
end,
Reply = update_chain(ChainName, UpdateFun), Reply = update_chain(ChainName, UpdateFun),
reply(Reply, maybe_hook(State)); reply(Reply, maybe_hook(State));
handle_call({delete_authenticator, ChainName, AuthenticatorID}, _From, State) -> handle_call({delete_authenticator, ChainName, AuthenticatorID}, _From, State) ->
UpdateFun = UpdateFun = fun(Chain) ->
fun(#chain{authenticators = Authenticators} = Chain) -> handle_delete_authenticator(Chain, AuthenticatorID)
case lists:keytake(AuthenticatorID, #authenticator.id, Authenticators) of end,
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
{value, Authenticator, NAuthenticators} ->
_ = do_delete_authenticator(Authenticator),
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
ok
end
end,
Reply = update_chain(ChainName, UpdateFun), Reply = update_chain(ChainName, UpdateFun),
reply(Reply, maybe_unhook(State)); reply(Reply, maybe_unhook(State));
handle_call({update_authenticator, ChainName, AuthenticatorID, Config}, _From, State) -> handle_call({update_authenticator, ChainName, AuthenticatorID, Config}, _From, State) ->
UpdateFun = UpdateFun = fun(Chain) ->
fun(#chain{authenticators = Authenticators} = Chain) -> handle_update_authenticator(Chain, AuthenticatorID, Config)
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of end,
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
#authenticator{provider = Provider,
state = #{version := Version} = ST} = Authenticator ->
case AuthenticatorID =:= authenticator_id(Config) of
true ->
Unique = unique(ChainName, AuthenticatorID, Version),
case Provider:update(Config#{'_unique' => Unique}, ST) of
{ok, NewST} ->
NewAuthenticator = Authenticator#authenticator{state = switch_version(NewST),
enable = maps:get(enable, Config)},
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NewAuthenticators}),
{ok, serialize_authenticator(NewAuthenticator)};
{error, Reason} ->
{error, Reason}
end;
false ->
{error, change_of_authentication_type_is_not_allowed}
end
end
end,
Reply = update_chain(ChainName, UpdateFun), Reply = update_chain(ChainName, UpdateFun),
reply(Reply, State); reply(Reply, State);
handle_call({move_authenticator, ChainName, AuthenticatorID, Position}, _From, State) -> handle_call({move_authenticator, ChainName, AuthenticatorID, Position}, _From, State) ->
UpdateFun = UpdateFun = fun(Chain) ->
fun(#chain{authenticators = Authenticators} = Chain) -> handle_move_authenticator(Chain, AuthenticatorID, Position)
case do_move_authenticator(AuthenticatorID, Authenticators, Position) of end,
{ok, NAuthenticators} ->
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
ok;
{error, Reason} ->
{error, Reason}
end
end,
Reply = update_chain(ChainName, UpdateFun), Reply = update_chain(ChainName, UpdateFun),
reply(Reply, State); reply(Reply, State);
@ -569,9 +503,115 @@ terminate(Reason, _State) ->
code_change(_OldVsn, State, _Extra) -> code_change(_OldVsn, State, _Extra) ->
{ok, State}. {ok, State}.
%%------------------------------------------------------------------------------
%% Private functions
%%------------------------------------------------------------------------------
handle_update_authenticator(Chain, AuthenticatorID, Config) ->
#chain{authenticators = Authenticators} = Chain,
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
#authenticator{provider = Provider, state = ST} = Authenticator ->
case AuthenticatorID =:= authenticator_id(Config) of
true ->
case Provider:update(Config, ST) of
{ok, NewST} ->
NewAuthenticator = Authenticator#authenticator{
state = NewST,
enable = maps:get(enable, Config)},
NewAuthenticators = replace_authenticator(
AuthenticatorID,
NewAuthenticator,
Authenticators),
true = ets:insert(
?CHAINS_TAB,
Chain#chain{authenticators = NewAuthenticators}),
{ok, serialize_authenticator(NewAuthenticator)};
{error, Reason} ->
{error, Reason}
end;
false ->
{error, change_of_authentication_type_is_not_allowed}
end
end.
handle_delete_authenticator(Chain, AuthenticatorID) ->
MatchFun = fun(#authenticator{id = ID}) ->
ID =:= AuthenticatorID
end,
case do_delete_authenticators(MatchFun, Chain) of
[] -> {error, {not_found, {authenticator, AuthenticatorID}}};
[AuthenticatorID] -> ok
end.
handle_move_authenticator(Chain, AuthenticatorID, Position) ->
#chain{authenticators = Authenticators} = Chain,
case do_move_authenticator(AuthenticatorID, Authenticators, Position) of
{ok, NAuthenticators} ->
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
ok;
{error, Reason} ->
{error, Reason}
end.
handle_create_authenticator(Chain, Config, Providers) ->
#chain{authenticators = Authenticators} = Chain,
AuthenticatorID = authenticator_id(Config),
case lists:keymember(AuthenticatorID, #authenticator.id, Authenticators) of
true ->
{error, {already_exists, {authenticator, AuthenticatorID}}};
false ->
case do_create_authenticator(AuthenticatorID, Config, Providers) of
{ok, Authenticator} ->
NAuthenticators =
Authenticators ++
[Authenticator#authenticator{enable = maps:get(enable, Config)}],
true = ets:insert(?CHAINS_TAB,
Chain#chain{authenticators = NAuthenticators}),
{ok, serialize_authenticator(Authenticator)};
{error, Reason} ->
{error, Reason}
end
end.
do_authenticate([], _) ->
{stop, {error, not_authorized}};
do_authenticate([#authenticator{id = ID, provider = Provider, state = State} | More], Credential) ->
try Provider:authenticate(Credential, State) of
ignore ->
do_authenticate(More, Credential);
Result ->
%% {ok, Extra}
%% {ok, Extra, AuthData}
%% {continue, AuthCache}
%% {continue, AuthData, AuthCache}
%% {error, Reason}
{stop, Result}
catch
Class:Reason:Stacktrace ->
?SLOG(warning, #{msg => "unexpected_error_in_authentication",
exception => Class,
reason => Reason,
stacktrace => Stacktrace,
authenticator => ID}),
do_authenticate(More, Credential)
end.
reply(Reply, State) -> reply(Reply, State) ->
{reply, Reply, State}. {reply, Reply, State}.
create_chain_table() ->
try
_ = ets:new(?CHAINS_TAB, [named_table, set, public,
{keypos, #chain.name},
{read_concurrency, true}]),
ok
catch
error:badarg -> ok
end.
global_chain(mqtt) -> global_chain(mqtt) ->
'mqtt:global'; 'mqtt:global';
global_chain('mqtt-sn') -> global_chain('mqtt-sn') ->
@ -611,25 +651,35 @@ maybe_unhook(#{hooked := true} = State) ->
maybe_unhook(State) -> maybe_unhook(State) ->
State. State.
do_create_authenticator(ChainName, AuthenticatorID, #{enable := Enable} = Config, Providers) -> do_create_authenticator(AuthenticatorID, #{enable := Enable} = Config, Providers) ->
case maps:get(authn_type(Config), Providers, undefined) of case maps:get(authn_type(Config), Providers, undefined) of
undefined -> undefined ->
{error, no_available_provider}; {error, no_available_provider};
Provider -> Provider ->
Unique = unique(ChainName, AuthenticatorID, ?VER_1), case Provider:create(AuthenticatorID, Config) of
case Provider:create(Config#{'_unique' => Unique}) of
{ok, State} -> {ok, State} ->
Authenticator = #authenticator{id = AuthenticatorID, Authenticator = #authenticator{id = AuthenticatorID,
provider = Provider, provider = Provider,
enable = Enable, enable = Enable,
state = switch_version(State)}, state = State},
{ok, Authenticator}; {ok, Authenticator};
{error, Reason} -> {error, Reason} ->
{error, Reason} {error, Reason}
end end
end. end.
do_delete_authenticator(#authenticator{provider = Provider, state = State}) -> do_delete_authenticators(MatchFun, #chain{authenticators = Authenticators} = Chain) ->
{Matching, Others} = lists:partition(MatchFun, Authenticators),
MatchingIDs = lists:map(
fun(#authenticator{id = ID}) -> ID end,
Matching),
ok = lists:foreach(fun do_destroy_authenticator/1, Matching),
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = Others}),
MatchingIDs.
do_destroy_authenticator(#authenticator{provider = Provider, state = State}) ->
_ = Provider:destroy(State), _ = Provider:destroy(State),
ok. ok.
@ -702,17 +752,6 @@ serialize_authenticator(#authenticator{id = ID,
, state => State , state => State
}. }.
unique(ChainName, AuthenticatorID, Version) ->
NChainName = atom_to_binary(ChainName),
<<NChainName/binary, "/", AuthenticatorID/binary, ":", Version/binary>>.
switch_version(State = #{version := ?VER_1}) ->
State#{version := ?VER_2};
switch_version(State = #{version := ?VER_2}) ->
State#{version := ?VER_1};
switch_version(State) ->
State#{version => ?VER_2}.
authn_type(#{mechanism := Mechanism, backend := Backend}) -> authn_type(#{mechanism := Mechanism, backend := Backend}) ->
{Mechanism, Backend}; {Mechanism, Backend};
authn_type(#{mechanism := Mechanism}) -> authn_type(#{mechanism := Mechanism}) ->

View File

@ -19,8 +19,8 @@
-behaviour(emqx_config_handler). -behaviour(emqx_config_handler).
-export([ pre_config_update/2 -export([ pre_config_update/3
, post_config_update/4 , post_config_update/5
]). ]).
-export([ authenticator_id/1 -export([ authenticator_id/1
@ -53,9 +53,9 @@
%% Callbacks of config handler %% Callbacks of config handler
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
-spec pre_config_update(update_request(), emqx_config:raw_config()) -spec pre_config_update(list(atom()), update_request(), emqx_config:raw_config())
-> {ok, map() | list()} | {error, term()}. -> {ok, map() | list()} | {error, term()}.
pre_config_update(UpdateReq, OldConfig) -> pre_config_update(_, UpdateReq, OldConfig) ->
try do_pre_config_update(UpdateReq, to_list(OldConfig)) of try do_pre_config_update(UpdateReq, to_list(OldConfig)) of
{error, Reason} -> {error, Reason}; {error, Reason} -> {error, Reason};
{ok, NewConfig} -> {ok, return_map(NewConfig)} {ok, NewConfig} -> {ok, return_map(NewConfig)}
@ -102,34 +102,34 @@ do_pre_config_update({move_authenticator, _ChainName, AuthenticatorID, Position}
end end
end. end.
-spec post_config_update(update_request(), map() | list(), emqx_config:raw_config(), emqx_config:app_envs()) -spec post_config_update(list(atom()), update_request(), map() | list(), emqx_config:raw_config(), emqx_config:app_envs())
-> ok | {ok, map()} | {error, term()}. -> ok | {ok, map()} | {error, term()}.
post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs) -> post_config_update(_, UpdateReq, NewConfig, OldConfig, AppEnvs) ->
do_post_config_update(UpdateReq, check_configs(to_list(NewConfig)), OldConfig, AppEnvs). do_post_config_update(UpdateReq, check_configs(to_list(NewConfig)), OldConfig, AppEnvs).
do_post_config_update({create_authenticator, ChainName, Config}, _NewConfig, _OldConfig, _AppEnvs) -> do_post_config_update({create_authenticator, ChainName, Config}, NewConfig, _OldConfig, _AppEnvs) ->
NConfig = check_config(Config), NConfig = get_authenticator_config(authenticator_id(Config), NewConfig),
_ = emqx_authentication:create_chain(ChainName), _ = emqx_authentication:create_chain(ChainName),
emqx_authentication:create_authenticator(ChainName, NConfig); emqx_authentication:create_authenticator(ChainName, NConfig);
do_post_config_update({delete_authenticator, ChainName, AuthenticatorID}, _NewConfig, OldConfig, _AppEnvs) -> do_post_config_update({delete_authenticator, ChainName, AuthenticatorID}, _NewConfig, OldConfig, _AppEnvs) ->
case emqx_authentication:delete_authenticator(ChainName, AuthenticatorID) of case emqx_authentication:delete_authenticator(ChainName, AuthenticatorID) of
ok -> ok ->
[Config] = [Config0 || Config0 <- to_list(OldConfig), AuthenticatorID == authenticator_id(Config0)], Config = get_authenticator_config(AuthenticatorID, to_list(OldConfig)),
CertsDir = certs_dir(ChainName, AuthenticatorID), CertsDir = certs_dir(ChainName, AuthenticatorID),
ok = clear_certs(CertsDir, Config); ok = clear_certs(CertsDir, Config);
{error, Reason} -> {error, Reason} ->
{error, Reason} {error, Reason}
end; end;
do_post_config_update({update_authenticator, ChainName, AuthenticatorID, Config}, _NewConfig, _OldConfig, _AppEnvs) -> do_post_config_update({update_authenticator, ChainName, AuthenticatorID, Config}, NewConfig, _OldConfig, _AppEnvs) ->
NConfig = check_config(Config), case get_authenticator_config(authenticator_id(Config), NewConfig) of
emqx_authentication:update_authenticator(ChainName, AuthenticatorID, NConfig); {error, not_found} ->
{error, {not_found, {authenticator, AuthenticatorID}}};
NConfig ->
emqx_authentication:update_authenticator(ChainName, AuthenticatorID, NConfig)
end;
do_post_config_update({move_authenticator, ChainName, AuthenticatorID, Position}, _NewConfig, _OldConfig, _AppEnvs) -> do_post_config_update({move_authenticator, ChainName, AuthenticatorID, Position}, _NewConfig, _OldConfig, _AppEnvs) ->
emqx_authentication:move_authenticator(ChainName, AuthenticatorID, Position). emqx_authentication:move_authenticator(ChainName, AuthenticatorID, Position).
check_config(Config) ->
[Checked] = check_configs([Config]),
Checked.
check_configs(Configs) -> check_configs(Configs) ->
Providers = emqx_authentication:get_providers(), Providers = emqx_authentication:get_providers(),
lists:map(fun(C) -> do_check_conifg(C, Providers) end, Configs). lists:map(fun(C) -> do_check_conifg(C, Providers) end, Configs).
@ -208,6 +208,12 @@ clear_certs(CertsDir, Config) ->
OldSSL = maps:get(<<"ssl">>, Config, undefined), OldSSL = maps:get(<<"ssl">>, Config, undefined),
ok = emqx_tls_lib:delete_ssl_files(CertsDir, undefined, OldSSL). ok = emqx_tls_lib:delete_ssl_files(CertsDir, undefined, OldSSL).
get_authenticator_config(AuthenticatorID, AuthenticatorsConfig) ->
case [C0 || C0 <- AuthenticatorsConfig, AuthenticatorID == authenticator_id(C0)] of
[C | _] -> C;
[] -> {error, not_found}
end.
split_by_id(ID, AuthenticatorsConfig) -> split_by_id(ID, AuthenticatorsConfig) ->
case lists:foldl( case lists:foldl(
fun(C, {P1, P2, F0}) -> fun(C, {P1, P2, F0}) ->
@ -268,4 +274,3 @@ dir(ChainName, ID) when is_binary(ID) ->
binary:replace(iolist_to_binary([to_bin(ChainName), "-", ID]), <<":">>, <<"-">>); binary:replace(iolist_to_binary([to_bin(ChainName), "-", ID]), <<":">>, <<"-">>);
dir(ChainName, Config) when is_map(Config) -> dir(ChainName, Config) when is_map(Config) ->
dir(ChainName, authenticator_id(Config)). dir(ChainName, authenticator_id(Config)).

View File

@ -37,6 +37,7 @@
, info/1 , info/1
, format/1 , format/1
, parse/1 , parse/1
, to_timestamp/1
]). ]).
%% gen_server callbacks %% gen_server callbacks
@ -108,8 +109,8 @@ parse(Params) ->
Who = pares_who(Params), Who = pares_who(Params),
By = maps:get(<<"by">>, Params, <<"mgmt_api">>), By = maps:get(<<"by">>, Params, <<"mgmt_api">>),
Reason = maps:get(<<"reason">>, Params, <<"">>), Reason = maps:get(<<"reason">>, Params, <<"">>),
At = pares_time(maps:get(<<"at">>, Params, undefined), erlang:system_time(second)), At = parse_time(maps:get(<<"at">>, Params, undefined), erlang:system_time(second)),
Until = pares_time(maps:get(<<"until">>, Params, undefined), At + 5 * 60), Until = parse_time(maps:get(<<"until">>, Params, undefined), At + 5 * 60),
#banned{ #banned{
who = Who, who = Who,
by = By, by = By,
@ -120,15 +121,15 @@ parse(Params) ->
pares_who(#{as := As, who := Who}) -> pares_who(#{as := As, who := Who}) ->
pares_who(#{<<"as">> => As, <<"who">> => Who}); pares_who(#{<<"as">> => As, <<"who">> => Who});
pares_who(#{<<"as">> := <<"peerhost">>, <<"who">> := Peerhost0}) -> pares_who(#{<<"as">> := peerhost, <<"who">> := Peerhost0}) ->
{ok, Peerhost} = inet:parse_address(binary_to_list(Peerhost0)), {ok, Peerhost} = inet:parse_address(binary_to_list(Peerhost0)),
{peerhost, Peerhost}; {peerhost, Peerhost};
pares_who(#{<<"as">> := As, <<"who">> := Who}) -> pares_who(#{<<"as">> := As, <<"who">> := Who}) ->
{binary_to_atom(As, utf8), Who}. {As, Who}.
pares_time(undefined, Default) -> parse_time(undefined, Default) ->
Default; Default;
pares_time(Rfc3339, _Default) -> parse_time(Rfc3339, _Default) ->
to_timestamp(Rfc3339). to_timestamp(Rfc3339).
maybe_format_host({peerhost, Host}) -> maybe_format_host({peerhost, Host}) ->
@ -145,19 +146,36 @@ to_timestamp(Rfc3339) when is_binary(Rfc3339) ->
to_timestamp(Rfc3339) -> to_timestamp(Rfc3339) ->
calendar:rfc3339_to_system_time(Rfc3339, [{unit, second}]). calendar:rfc3339_to_system_time(Rfc3339, [{unit, second}]).
-spec(create(emqx_types:banned() | map()) -> ok). -spec(create(emqx_types:banned() | map()) ->
{ok, emqx_types:banned()} | {error, {already_exist, emqx_types:banned()}}).
create(#{who := Who, create(#{who := Who,
by := By, by := By,
reason := Reason, reason := Reason,
at := At, at := At,
until := Until}) -> until := Until}) ->
mria:dirty_write(?BANNED_TAB, #banned{who = Who, Banned = #banned{
by = By, who = Who,
reason = Reason, by = By,
at = At, reason = Reason,
until = Until}); at = At,
create(Banned) when is_record(Banned, banned) -> until = Until
mria:dirty_write(?BANNED_TAB, Banned). },
create(Banned);
create(Banned = #banned{who = Who}) ->
case look_up(Who) of
[] ->
mria:dirty_write(?BANNED_TAB, Banned),
{ok, Banned};
[OldBanned = #banned{until = Until}] ->
case Until < erlang:system_time(second) of
true ->
{error, {already_exist, OldBanned}};
false ->
mria:dirty_write(?BANNED_TAB, Banned),
{ok, Banned}
end
end.
look_up(Who) when is_map(Who) -> look_up(Who) when is_map(Who) ->
look_up(pares_who(Who)); look_up(pares_who(Who));

View File

@ -81,7 +81,7 @@
-define(SUBSCRIPTION, emqx_subscription). -define(SUBSCRIPTION, emqx_subscription).
%% Guards %% Guards
-define(is_subid(Id), (is_binary(Id) orelse is_atom(Id))). -define(IS_SUBID(Id), (is_binary(Id) orelse is_atom(Id))).
-spec(start_link(atom(), pos_integer()) -> startlink_ret()). -spec(start_link(atom(), pos_integer()) -> startlink_ret()).
start_link(Pool, Id) -> start_link(Pool, Id) ->
@ -117,15 +117,17 @@ subscribe(Topic) when is_binary(Topic) ->
subscribe(Topic, undefined). subscribe(Topic, undefined).
-spec(subscribe(emqx_types:topic(), emqx_types:subid() | emqx_types:subopts()) -> ok). -spec(subscribe(emqx_types:topic(), emqx_types:subid() | emqx_types:subopts()) -> ok).
subscribe(Topic, SubId) when is_binary(Topic), ?is_subid(SubId) -> subscribe(Topic, SubId) when is_binary(Topic), ?IS_SUBID(SubId) ->
subscribe(Topic, SubId, ?DEFAULT_SUBOPTS); subscribe(Topic, SubId, ?DEFAULT_SUBOPTS);
subscribe(Topic, SubOpts) when is_binary(Topic), is_map(SubOpts) -> subscribe(Topic, SubOpts) when is_binary(Topic), is_map(SubOpts) ->
subscribe(Topic, undefined, SubOpts). subscribe(Topic, undefined, SubOpts).
-spec(subscribe(emqx_types:topic(), emqx_types:subid(), emqx_types:subopts()) -> ok). -spec(subscribe(emqx_types:topic(), emqx_types:subid(), emqx_types:subopts()) -> ok).
subscribe(Topic, SubId, SubOpts0) when is_binary(Topic), ?is_subid(SubId), is_map(SubOpts0) -> subscribe(Topic, SubId, SubOpts0) when is_binary(Topic), ?IS_SUBID(SubId), is_map(SubOpts0) ->
SubOpts = maps:merge(?DEFAULT_SUBOPTS, SubOpts0), SubOpts = maps:merge(?DEFAULT_SUBOPTS, SubOpts0),
case ets:member(?SUBOPTION, {SubPid = self(), Topic}) of _ = emqx_trace:subscribe(Topic, SubId, SubOpts),
SubPid = self(),
case ets:member(?SUBOPTION, {SubPid, Topic}) of
false -> %% New false -> %% New
ok = emqx_broker_helper:register_sub(SubPid, SubId), ok = emqx_broker_helper:register_sub(SubPid, SubId),
do_subscribe(Topic, SubPid, with_subid(SubId, SubOpts)); do_subscribe(Topic, SubPid, with_subid(SubId, SubOpts));
@ -171,6 +173,7 @@ unsubscribe(Topic) when is_binary(Topic) ->
case ets:lookup(?SUBOPTION, {SubPid, Topic}) of case ets:lookup(?SUBOPTION, {SubPid, Topic}) of
[{_, SubOpts}] -> [{_, SubOpts}] ->
_ = emqx_broker_helper:reclaim_seq(Topic), _ = emqx_broker_helper:reclaim_seq(Topic),
_ = emqx_trace:unsubscribe(Topic, SubOpts),
do_unsubscribe(Topic, SubPid, SubOpts); do_unsubscribe(Topic, SubPid, SubOpts);
[] -> ok [] -> ok
end. end.
@ -198,7 +201,7 @@ do_unsubscribe(Group, Topic, SubPid, _SubOpts) ->
-spec(publish(emqx_types:message()) -> emqx_types:publish_result()). -spec(publish(emqx_types:message()) -> emqx_types:publish_result()).
publish(Msg) when is_record(Msg, message) -> publish(Msg) when is_record(Msg, message) ->
_ = emqx_tracer:trace(publish, Msg), _ = emqx_trace:publish(Msg),
emqx_message:is_sys(Msg) orelse emqx_metrics:inc('messages.publish'), emqx_message:is_sys(Msg) orelse emqx_metrics:inc('messages.publish'),
case emqx_hooks:run_fold('message.publish', [], emqx_message:clean_dup(Msg)) of case emqx_hooks:run_fold('message.publish', [], emqx_message:clean_dup(Msg)) of
#message{headers = #{allow_publish := false}} -> #message{headers = #{allow_publish := false}} ->
@ -267,7 +270,7 @@ aggre(Routes) ->
end, [], Routes). end, [], Routes).
%% @doc Forward message to another node. %% @doc Forward message to another node.
-spec(forward(node(), emqx_types:topic(), emqx_types:delivery(), RpcMode::sync|async) -spec(forward(node(), emqx_types:topic(), emqx_types:delivery(), RpcMode::sync | async)
-> emqx_types:deliver_result()). -> emqx_types:deliver_result()).
forward(Node, To, Delivery, async) -> forward(Node, To, Delivery, async) ->
case emqx_rpc:cast(To, Node, ?BROKER, dispatch, [To, Delivery]) of case emqx_rpc:cast(To, Node, ?BROKER, dispatch, [To, Delivery]) of
@ -380,14 +383,14 @@ subscriptions(SubId) ->
-spec(subscribed(pid() | emqx_types:subid(), emqx_types:topic()) -> boolean()). -spec(subscribed(pid() | emqx_types:subid(), emqx_types:topic()) -> boolean()).
subscribed(SubPid, Topic) when is_pid(SubPid) -> subscribed(SubPid, Topic) when is_pid(SubPid) ->
ets:member(?SUBOPTION, {SubPid, Topic}); ets:member(?SUBOPTION, {SubPid, Topic});
subscribed(SubId, Topic) when ?is_subid(SubId) -> subscribed(SubId, Topic) when ?IS_SUBID(SubId) ->
SubPid = emqx_broker_helper:lookup_subpid(SubId), SubPid = emqx_broker_helper:lookup_subpid(SubId),
ets:member(?SUBOPTION, {SubPid, Topic}). ets:member(?SUBOPTION, {SubPid, Topic}).
-spec(get_subopts(pid(), emqx_types:topic()) -> maybe(emqx_types:subopts())). -spec(get_subopts(pid(), emqx_types:topic()) -> maybe(emqx_types:subopts())).
get_subopts(SubPid, Topic) when is_pid(SubPid), is_binary(Topic) -> get_subopts(SubPid, Topic) when is_pid(SubPid), is_binary(Topic) ->
lookup_value(?SUBOPTION, {SubPid, Topic}); lookup_value(?SUBOPTION, {SubPid, Topic});
get_subopts(SubId, Topic) when ?is_subid(SubId) -> get_subopts(SubId, Topic) when ?IS_SUBID(SubId) ->
case emqx_broker_helper:lookup_subpid(SubId) of case emqx_broker_helper:lookup_subpid(SubId) of
SubPid when is_pid(SubPid) -> SubPid when is_pid(SubPid) ->
get_subopts(SubPid, Topic); get_subopts(SubPid, Topic);
@ -455,7 +458,8 @@ handle_call({subscribe, Topic}, _From, State) ->
{reply, Ok, State}; {reply, Ok, State};
handle_call({subscribe, Topic, I}, _From, State) -> handle_call({subscribe, Topic, I}, _From, State) ->
Ok = case get(Shard = {Topic, I}) of Shard = {Topic, I},
Ok = case get(Shard) of
undefined -> undefined ->
_ = put(Shard, true), _ = put(Shard, true),
true = ets:insert(?SUBSCRIBER, {Topic, {shard, I}}), true = ets:insert(?SUBSCRIBER, {Topic, {shard, I}}),
@ -512,4 +516,3 @@ code_change(_OldVsn, State, _Extra) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Internal functions %% Internal functions
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -103,7 +103,7 @@
-type(reply() :: {outgoing, emqx_types:packet()} -type(reply() :: {outgoing, emqx_types:packet()}
| {outgoing, [emqx_types:packet()]} | {outgoing, [emqx_types:packet()]}
| {event, conn_state()|updated} | {event, conn_state() | updated}
| {close, Reason :: atom()}). | {close, Reason :: atom()}).
-type(replies() :: emqx_types:packet() | reply() | [reply()]). -type(replies() :: emqx_types:packet() | reply() | [reply()]).
@ -132,7 +132,7 @@
info(Channel) -> info(Channel) ->
maps:from_list(info(?INFO_KEYS, Channel)). maps:from_list(info(?INFO_KEYS, Channel)).
-spec(info(list(atom())|atom(), channel()) -> term()). -spec(info(list(atom()) | atom(), channel()) -> term()).
info(Keys, Channel) when is_list(Keys) -> info(Keys, Channel) when is_list(Keys) ->
[{Key, info(Key, Channel)} || Key <- Keys]; [{Key, info(Key, Channel)} || Key <- Keys];
info(conninfo, #channel{conninfo = ConnInfo}) -> info(conninfo, #channel{conninfo = ConnInfo}) ->
@ -287,7 +287,7 @@ handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = ConnState})
handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = connecting}) -> handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = connecting}) ->
handle_out(connack, ?RC_PROTOCOL_ERROR, Channel); handle_out(connack, ?RC_PROTOCOL_ERROR, Channel);
handle_in(?CONNECT_PACKET(ConnPkt), Channel) -> handle_in(?CONNECT_PACKET(ConnPkt) = Packet, Channel) ->
case pipeline([fun overload_protection/2, case pipeline([fun overload_protection/2,
fun enrich_conninfo/2, fun enrich_conninfo/2,
fun run_conn_hooks/2, fun run_conn_hooks/2,
@ -297,6 +297,7 @@ handle_in(?CONNECT_PACKET(ConnPkt), Channel) ->
fun check_banned/2 fun check_banned/2
], ConnPkt, Channel#channel{conn_state = connecting}) of ], ConnPkt, Channel#channel{conn_state = connecting}) of
{ok, NConnPkt, NChannel = #channel{clientinfo = ClientInfo}} -> {ok, NConnPkt, NChannel = #channel{clientinfo = ClientInfo}} ->
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
NChannel1 = NChannel#channel{ NChannel1 = NChannel#channel{
will_msg = emqx_packet:will_msg(NConnPkt), will_msg = emqx_packet:will_msg(NConnPkt),
alias_maximum = init_alias_maximum(NConnPkt, ClientInfo) alias_maximum = init_alias_maximum(NConnPkt, ClientInfo)
@ -328,17 +329,23 @@ handle_in(Packet = ?AUTH_PACKET(ReasonCode, _Properties),
connecting -> connecting ->
process_connect(NProperties, ensure_connected(NChannel)); process_connect(NProperties, ensure_connected(NChannel));
_ -> _ ->
handle_out(auth, {?RC_SUCCESS, NProperties}, NChannel#channel{conn_state = connected}) handle_out( auth
, {?RC_SUCCESS, NProperties}
, NChannel#channel{conn_state = connected}
)
end; end;
{continue, NProperties, NChannel} -> {continue, NProperties, NChannel} ->
handle_out(auth, {?RC_CONTINUE_AUTHENTICATION, NProperties}, NChannel#channel{conn_state = reauthenticating}); handle_out( auth
, {?RC_CONTINUE_AUTHENTICATION, NProperties}
, NChannel#channel{conn_state = reauthenticating}
);
{error, NReasonCode} -> {error, NReasonCode} ->
case ConnState of case ConnState of
connecting -> connecting ->
handle_out(connack, NReasonCode, Channel); handle_out(connack, NReasonCode, Channel);
_ -> _ ->
handle_out(disconnect, NReasonCode, Channel) handle_out(disconnect, NReasonCode, Channel)
end end
end end
catch catch
_Class:_Reason -> _Class:_Reason ->
@ -632,7 +639,7 @@ do_publish(PacketId, Msg = #message{qos = ?QOS_2},
?SLOG(warning, #{ ?SLOG(warning, #{
msg => "dropped_qos2_packet", msg => "dropped_qos2_packet",
reason => emqx_reason_codes:name(RC), reason => emqx_reason_codes:name(RC),
packetId => PacketId packet_id => PacketId
}), }),
ok = emqx_metrics:inc('packets.publish.dropped'), ok = emqx_metrics:inc('packets.publish.dropped'),
handle_out(pubrec, {PacketId, RC}, Channel) handle_out(pubrec, {PacketId, RC}, Channel)
@ -655,7 +662,7 @@ ensure_quota(PubRes, Channel = #channel{quota = Limiter}) ->
-compile({inline, [puback_reason_code/1]}). -compile({inline, [puback_reason_code/1]}).
puback_reason_code([]) -> ?RC_NO_MATCHING_SUBSCRIBERS; puback_reason_code([]) -> ?RC_NO_MATCHING_SUBSCRIBERS;
puback_reason_code([_|_]) -> ?RC_SUCCESS. puback_reason_code([_ | _]) -> ?RC_SUCCESS.
-compile({inline, [after_message_acked/3]}). -compile({inline, [after_message_acked/3]}).
after_message_acked(ClientInfo, Msg, PubAckProps) -> after_message_acked(ClientInfo, Msg, PubAckProps) ->
@ -674,7 +681,7 @@ process_subscribe(TopicFilters, SubProps, Channel) ->
process_subscribe([], _SubProps, Channel, Acc) -> process_subscribe([], _SubProps, Channel, Acc) ->
{lists:reverse(Acc), Channel}; {lists:reverse(Acc), Channel};
process_subscribe([Topic = {TopicFilter, SubOpts}|More], SubProps, Channel, Acc) -> process_subscribe([Topic = {TopicFilter, SubOpts} | More], SubProps, Channel, Acc) ->
case check_sub_caps(TopicFilter, SubOpts, Channel) of case check_sub_caps(TopicFilter, SubOpts, Channel) of
ok -> ok ->
{ReasonCode, NChannel} = do_subscribe(TopicFilter, {ReasonCode, NChannel} = do_subscribe(TopicFilter,
@ -716,9 +723,9 @@ process_unsubscribe(TopicFilters, UnSubProps, Channel) ->
process_unsubscribe([], _UnSubProps, Channel, Acc) -> process_unsubscribe([], _UnSubProps, Channel, Acc) ->
{lists:reverse(Acc), Channel}; {lists:reverse(Acc), Channel};
process_unsubscribe([{TopicFilter, SubOpts}|More], UnSubProps, Channel, Acc) -> process_unsubscribe([{TopicFilter, SubOpts} | More], UnSubProps, Channel, Acc) ->
{RC, NChannel} = do_unsubscribe(TopicFilter, SubOpts#{unsub_props => UnSubProps}, Channel), {RC, NChannel} = do_unsubscribe(TopicFilter, SubOpts#{unsub_props => UnSubProps}, Channel),
process_unsubscribe(More, UnSubProps, NChannel, [RC|Acc]). process_unsubscribe(More, UnSubProps, NChannel, [RC | Acc]).
do_unsubscribe(TopicFilter, SubOpts, Channel = do_unsubscribe(TopicFilter, SubOpts, Channel =
#channel{clientinfo = ClientInfo = #{mountpoint := MountPoint}, #channel{clientinfo = ClientInfo = #{mountpoint := MountPoint},
@ -790,7 +797,9 @@ handle_deliver(Delivers, Channel = #channel{takeover = true,
pendings = Pendings, pendings = Pendings,
session = Session, session = Session,
clientinfo = #{clientid := ClientId}}) -> clientinfo = #{clientid := ClientId}}) ->
NPendings = lists:append(Pendings, emqx_session:ignore_local(maybe_nack(Delivers), ClientId, Session)), NPendings = lists:append(
Pendings,
emqx_session:ignore_local(maybe_nack(Delivers), ClientId, Session)),
{ok, Channel#channel{pendings = NPendings}}; {ok, Channel#channel{pendings = NPendings}};
handle_deliver(Delivers, Channel = #channel{session = Session, handle_deliver(Delivers, Channel = #channel{session = Session,
@ -995,6 +1004,17 @@ handle_call({quota, Policy}, Channel) ->
Quota = emqx_limiter:init(Zone, Policy), Quota = emqx_limiter:init(Zone, Policy),
reply(ok, Channel#channel{quota = Quota}); reply(ok, Channel#channel{quota = Quota});
handle_call({keepalive, Interval}, Channel = #channel{keepalive = KeepAlive,
conninfo = ConnInfo}) ->
ClientId = info(clientid, Channel),
NKeepalive = emqx_keepalive:set(interval, Interval * 1000, KeepAlive),
NConnInfo = maps:put(keepalive, Interval, ConnInfo),
NChannel = Channel#channel{keepalive = NKeepalive, conninfo = NConnInfo},
SockInfo = maps:get(sockinfo, emqx_cm:get_chan_info(ClientId), #{}),
ChanInfo1 = info(NChannel),
emqx_cm:set_chan_info(ClientId, ChanInfo1#{sockinfo => SockInfo}),
reply(ok, reset_timer(alive_timer, NChannel));
handle_call(Req, Channel) -> handle_call(Req, Channel) ->
?SLOG(error, #{msg => "unexpected_call", call => Req}), ?SLOG(error, #{msg => "unexpected_call", call => Req}),
reply(ignored, Channel). reply(ignored, Channel).
@ -1045,7 +1065,7 @@ handle_info(clean_authz_cache, Channel) ->
handle_info(die_if_test = Info, Channel) -> handle_info(die_if_test = Info, Channel) ->
die_if_test_compiled(), die_if_test_compiled(),
?LOG(error, "Unexpected info: ~p", [Info]), ?SLOG(error, #{msg => "unexpected_info", info => Info}),
{ok, Channel}; {ok, Channel};
handle_info(Info, Channel) -> handle_info(Info, Channel) ->
@ -1125,7 +1145,7 @@ handle_timeout(_TRef, expire_quota_limit, Channel) ->
{ok, clean_timer(quota_timer, Channel)}; {ok, clean_timer(quota_timer, Channel)};
handle_timeout(_TRef, Msg, Channel) -> handle_timeout(_TRef, Msg, Channel) ->
?SLOG(error, #{msg => "unexpected_timeout", timeout_message => Msg}), ?SLOG(error, #{msg => "unexpected_timeout", timeout_msg => Msg}),
{ok, Channel}. {ok, Channel}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -1179,20 +1199,26 @@ terminate(_, #channel{conn_state = idle}) -> ok;
terminate(normal, Channel) -> terminate(normal, Channel) ->
run_terminate_hook(normal, Channel); run_terminate_hook(normal, Channel);
terminate({shutdown, kicked}, Channel) -> terminate({shutdown, kicked}, Channel) ->
_ = emqx_persistent_session:persist(Channel#channel.clientinfo,
Channel#channel.conninfo,
Channel#channel.session),
run_terminate_hook(kicked, Channel); run_terminate_hook(kicked, Channel);
terminate({shutdown, Reason}, Channel) when Reason =:= discarded; terminate({shutdown, Reason}, Channel) when Reason =:= discarded;
Reason =:= takeovered -> Reason =:= takeovered ->
run_terminate_hook(Reason, Channel); run_terminate_hook(Reason, Channel);
terminate(Reason, Channel = #channel{will_msg = WillMsg}) -> terminate(Reason, Channel = #channel{will_msg = WillMsg}) ->
(WillMsg =/= undefined) andalso publish_will_msg(WillMsg), (WillMsg =/= undefined) andalso publish_will_msg(WillMsg),
_ = emqx_persistent_session:persist(Channel#channel.clientinfo, (Reason =:= expired) andalso persist_if_session(Channel),
Channel#channel.conninfo,
Channel#channel.session),
run_terminate_hook(Reason, Channel). run_terminate_hook(Reason, Channel).
persist_if_session(#channel{session = Session} = Channel) ->
case emqx_session:is_session(Session) of
true ->
_ = emqx_persistent_session:persist(Channel#channel.clientinfo,
Channel#channel.conninfo,
Channel#channel.session),
ok;
false ->
ok
end.
run_terminate_hook(_Reason, #channel{session = undefined}) -> ok; run_terminate_hook(_Reason, #channel{session = undefined}) -> ok;
run_terminate_hook(Reason, #channel{clientinfo = ClientInfo, session = Session}) -> run_terminate_hook(Reason, #channel{clientinfo = ClientInfo, session = Session}) ->
emqx_session:terminate(ClientInfo, Reason, Session). emqx_session:terminate(ClientInfo, Reason, Session).
@ -1359,17 +1385,20 @@ authenticate(?AUTH_PACKET(_, #{'Authentication-Method' := AuthMethod} = Properti
{error, ?RC_BAD_AUTHENTICATION_METHOD} {error, ?RC_BAD_AUTHENTICATION_METHOD}
end. end.
do_authenticate(#{auth_method := AuthMethod} = Credential, #channel{clientinfo = ClientInfo} = Channel) -> do_authenticate(#{auth_method := AuthMethod} = Credential,
#channel{clientinfo = ClientInfo} = Channel) ->
Properties = #{'Authentication-Method' => AuthMethod}, Properties = #{'Authentication-Method' => AuthMethod},
case emqx_access_control:authenticate(Credential) of case emqx_access_control:authenticate(Credential) of
{ok, Result} -> {ok, Result} ->
{ok, Properties, {ok, Properties,
Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)}, Channel#channel{
auth_cache = #{}}}; clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
auth_cache = #{}}};
{ok, Result, AuthData} -> {ok, Result, AuthData} ->
{ok, Properties#{'Authentication-Data' => AuthData}, {ok, Properties#{'Authentication-Data' => AuthData},
Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)}, Channel#channel{
auth_cache = #{}}}; clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
auth_cache = #{}}};
{continue, AuthCache} -> {continue, AuthCache} ->
{continue, Properties, Channel#channel{auth_cache = AuthCache}}; {continue, Properties, Channel#channel{auth_cache = AuthCache}};
{continue, AuthData, AuthCache} -> {continue, AuthData, AuthCache} ->
@ -1606,6 +1635,8 @@ ensure_connected(Channel = #channel{conninfo = ConnInfo,
clientinfo = ClientInfo}) -> clientinfo = ClientInfo}) ->
NConnInfo = ConnInfo#{connected_at => erlang:system_time(millisecond)}, NConnInfo = ConnInfo#{connected_at => erlang:system_time(millisecond)},
ok = run_hooks('client.connected', [ClientInfo, NConnInfo]), ok = run_hooks('client.connected', [ClientInfo, NConnInfo]),
ChanPid = self(),
emqx_cm:mark_channel_connected(ChanPid),
Channel#channel{conninfo = NConnInfo, Channel#channel{conninfo = NConnInfo,
conn_state = connected conn_state = connected
}. }.
@ -1691,6 +1722,8 @@ ensure_disconnected(Reason, Channel = #channel{conninfo = ConnInfo,
clientinfo = ClientInfo}) -> clientinfo = ClientInfo}) ->
NConnInfo = ConnInfo#{disconnected_at => erlang:system_time(millisecond)}, NConnInfo = ConnInfo#{disconnected_at => erlang:system_time(millisecond)},
ok = run_hooks('client.disconnected', [ClientInfo, Reason, NConnInfo]), ok = run_hooks('client.disconnected', [ClientInfo, Reason, NConnInfo]),
ChanPid = self(),
emqx_cm:mark_channel_disconnected(ChanPid),
Channel#channel{conninfo = NConnInfo, conn_state = disconnected}. Channel#channel{conninfo = NConnInfo, conn_state = disconnected}.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -58,7 +58,10 @@
, lookup_channels/2 , lookup_channels/2
]). ]).
-export([all_channels/0]). %% Test/debug interface
-export([ all_channels/0
, all_client_ids/0
]).
%% gen_server callbacks %% gen_server callbacks
-export([ init/1 -export([ init/1
@ -70,7 +73,12 @@
]). ]).
%% Internal export %% Internal export
-export([stats_fun/0]). -export([ stats_fun/0
, clean_down/1
, mark_channel_connected/1
, mark_channel_disconnected/1
, get_connected_client_count/0
]).
-type(chan_pid() :: pid()). -type(chan_pid() :: pid()).
@ -78,11 +86,13 @@
-define(CHAN_TAB, emqx_channel). -define(CHAN_TAB, emqx_channel).
-define(CHAN_CONN_TAB, emqx_channel_conn). -define(CHAN_CONN_TAB, emqx_channel_conn).
-define(CHAN_INFO_TAB, emqx_channel_info). -define(CHAN_INFO_TAB, emqx_channel_info).
-define(CHAN_LIVE_TAB, emqx_channel_live).
-define(CHAN_STATS, -define(CHAN_STATS,
[{?CHAN_TAB, 'channels.count', 'channels.max'}, [{?CHAN_TAB, 'channels.count', 'channels.max'},
{?CHAN_TAB, 'sessions.count', 'sessions.max'}, {?CHAN_TAB, 'sessions.count', 'sessions.max'},
{?CHAN_CONN_TAB, 'connections.count', 'connections.max'} {?CHAN_CONN_TAB, 'connections.count', 'connections.max'},
{?CHAN_LIVE_TAB, 'live_connections.count', 'live_connections.max'}
]). ]).
%% Batch drain %% Batch drain
@ -91,7 +101,14 @@
%% Server name %% Server name
-define(CM, ?MODULE). -define(CM, ?MODULE).
-define(T_TAKEOVER, 15000). -define(T_KICK, 5_000).
-define(T_GET_INFO, 5_000).
-define(T_TAKEOVER, 15_000).
%% linting overrides
-elvis([ {elvis_style, invalid_dynamic_call, #{ignore => [emqx_cm]}}
, {elvis_style, god_modules, #{ignore => [emqx_cm]}}
]).
%% @doc Start the channel manager. %% @doc Start the channel manager.
-spec(start_link() -> startlink_ret()). -spec(start_link() -> startlink_ret()).
@ -162,7 +179,7 @@ get_chan_info(ClientId, ChanPid) when node(ChanPid) == node() ->
error:badarg -> undefined error:badarg -> undefined
end; end;
get_chan_info(ClientId, ChanPid) -> get_chan_info(ClientId, ChanPid) ->
rpc_call(node(ChanPid), get_chan_info, [ClientId, ChanPid]). rpc_call(node(ChanPid), get_chan_info, [ClientId, ChanPid], ?T_GET_INFO).
%% @doc Update infos of the channel. %% @doc Update infos of the channel.
-spec(set_chan_info(emqx_types:clientid(), emqx_types:attrs()) -> boolean()). -spec(set_chan_info(emqx_types:clientid(), emqx_types:attrs()) -> boolean()).
@ -187,7 +204,7 @@ get_chan_stats(ClientId, ChanPid) when node(ChanPid) == node() ->
error:badarg -> undefined error:badarg -> undefined
end; end;
get_chan_stats(ClientId, ChanPid) -> get_chan_stats(ClientId, ChanPid) ->
rpc_call(node(ChanPid), get_chan_stats, [ClientId, ChanPid]). rpc_call(node(ChanPid), get_chan_stats, [ClientId, ChanPid], ?T_GET_INFO).
%% @doc Set channel's stats. %% @doc Set channel's stats.
-spec(set_chan_stats(emqx_types:clientid(), emqx_types:stats()) -> boolean()). -spec(set_chan_stats(emqx_types:clientid(), emqx_types:stats()) -> boolean()).
@ -236,7 +253,10 @@ open_session(false, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
pendings => Pendings}}; pendings => Pendings}};
{living, ConnMod, ChanPid, Session} -> {living, ConnMod, ChanPid, Session} ->
ok = emqx_session:resume(ClientInfo, Session), ok = emqx_session:resume(ClientInfo, Session),
Session1 = emqx_persistent_session:persist(ClientInfo, ConnInfo, Session), Session1 = emqx_persistent_session:persist( ClientInfo
, ConnInfo
, Session
),
Pendings = ConnMod:call(ChanPid, {takeover, 'end'}, ?T_TAKEOVER), Pendings = ConnMod:call(ChanPid, {takeover, 'end'}, ?T_TAKEOVER),
register_channel(ClientId, Self, ConnInfo), register_channel(ClientId, Self, ConnInfo),
{ok, #{session => Session1, {ok, #{session => Session1,
@ -245,12 +265,18 @@ open_session(false, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
{expired, OldSession} -> {expired, OldSession} ->
_ = emqx_persistent_session:discard(ClientId, OldSession), _ = emqx_persistent_session:discard(ClientId, OldSession),
Session = create_session(ClientInfo, ConnInfo), Session = create_session(ClientInfo, ConnInfo),
Session1 = emqx_persistent_session:persist(ClientInfo, ConnInfo, Session), Session1 = emqx_persistent_session:persist( ClientInfo
, ConnInfo
, Session
),
register_channel(ClientId, Self, ConnInfo), register_channel(ClientId, Self, ConnInfo),
{ok, #{session => Session1, present => false}}; {ok, #{session => Session1, present => false}};
none -> none ->
Session = create_session(ClientInfo, ConnInfo), Session = create_session(ClientInfo, ConnInfo),
Session1 = emqx_persistent_session:persist(ClientInfo, ConnInfo, Session), Session1 = emqx_persistent_session:persist( ClientInfo
, ConnInfo
, Session
),
register_channel(ClientId, Self, ConnInfo), register_channel(ClientId, Self, ConnInfo),
{ok, #{session => Session1, present => false}} {ok, #{session => Session1, present => false}}
end end
@ -300,7 +326,7 @@ takeover_session(ClientId) ->
[ChanPid] -> [ChanPid] ->
takeover_session(ClientId, ChanPid); takeover_session(ClientId, ChanPid);
ChanPids -> ChanPids ->
[ChanPid|StalePids] = lists:reverse(ChanPids), [ChanPid | StalePids] = lists:reverse(ChanPids),
?SLOG(warning, #{msg => "more_than_one_channel_found", chan_pids => ChanPids}), ?SLOG(warning, #{msg => "more_than_one_channel_found", chan_pids => ChanPids}),
lists:foreach(fun(StalePid) -> lists:foreach(fun(StalePid) ->
catch discard_session(ClientId, StalePid) catch discard_session(ClientId, StalePid)
@ -308,82 +334,136 @@ takeover_session(ClientId) ->
takeover_session(ClientId, ChanPid) takeover_session(ClientId, ChanPid)
end. end.
takeover_session(ClientId, ChanPid) when node(ChanPid) == node() -> takeover_session(ClientId, Pid) ->
try do_takeover_session(ClientId, Pid)
catch
_ : noproc -> % emqx_ws_connection: call
emqx_persistent_session:lookup(ClientId);
_ : {noproc, _} -> % emqx_connection: gen_server:call
emqx_persistent_session:lookup(ClientId);
_ : {'EXIT', {noproc, _}} -> % rpc_call/3
emqx_persistent_session:lookup(ClientId)
end.
do_takeover_session(ClientId, ChanPid) when node(ChanPid) == node() ->
case get_chann_conn_mod(ClientId, ChanPid) of case get_chann_conn_mod(ClientId, ChanPid) of
undefined -> undefined ->
emqx_persistent_session:lookup(ClientId); emqx_persistent_session:lookup(ClientId);
ConnMod when is_atom(ConnMod) -> ConnMod when is_atom(ConnMod) ->
%% TODO: if takeover times out, maybe kill the old?
Session = ConnMod:call(ChanPid, {takeover, 'begin'}, ?T_TAKEOVER), Session = ConnMod:call(ChanPid, {takeover, 'begin'}, ?T_TAKEOVER),
{living, ConnMod, ChanPid, Session} {living, ConnMod, ChanPid, Session}
end; end;
do_takeover_session(ClientId, ChanPid) ->
takeover_session(ClientId, ChanPid) -> rpc_call(node(ChanPid), takeover_session, [ClientId, ChanPid], ?T_TAKEOVER).
rpc_call(node(ChanPid), takeover_session, [ClientId, ChanPid]).
%% @doc Discard all the sessions identified by the ClientId. %% @doc Discard all the sessions identified by the ClientId.
-spec(discard_session(emqx_types:clientid()) -> ok). -spec(discard_session(emqx_types:clientid()) -> ok).
discard_session(ClientId) when is_binary(ClientId) -> discard_session(ClientId) when is_binary(ClientId) ->
case lookup_channels(ClientId) of case lookup_channels(ClientId) of
[] -> ok; [] -> ok;
ChanPids -> lists:foreach(fun(Pid) -> do_discard_session(ClientId, Pid) end, ChanPids) ChanPids -> lists:foreach(fun(Pid) -> discard_session(ClientId, Pid) end, ChanPids)
end. end.
do_discard_session(ClientId, Pid) -> %% @private Kick a local stale session to force it step down.
%% If failed to kick (e.g. timeout) force a kill.
%% Keeping the stale pid around, or returning error or raise an exception
%% benefits nobody.
-spec kick_or_kill(kick | discard, module(), pid()) -> ok.
kick_or_kill(Action, ConnMod, Pid) ->
try try
discard_session(ClientId, Pid) %% this is essentailly a gen_server:call implemented in emqx_connection
%% and emqx_ws_connection.
%% the handle_call is implemented in emqx_channel
ok = apply(ConnMod, call, [Pid, Action, ?T_KICK])
catch catch
_ : noproc -> % emqx_ws_connection: call _ : noproc -> % emqx_ws_connection: call
?tp(debug, "session_already_gone", #{pid => Pid}), ok = ?tp(debug, "session_already_gone", #{pid => Pid, action => Action});
ok;
_ : {noproc, _} -> % emqx_connection: gen_server:call _ : {noproc, _} -> % emqx_connection: gen_server:call
?tp(debug, "session_already_gone", #{pid => Pid}), ok = ?tp(debug, "session_already_gone", #{pid => Pid, action => Action});
ok; _ : {shutdown, _} ->
_ : {'EXIT', {noproc, _}} -> % rpc_call/3 ok = ?tp(debug, "session_already_shutdown", #{pid => Pid, action => Action});
?tp(debug, "session_already_gone", #{pid => Pid}),
ok;
_ : {{shutdown, _}, _} -> _ : {{shutdown, _}, _} ->
?tp(debug, "session_already_shutdown", #{pid => Pid}), ok = ?tp(debug, "session_already_shutdown", #{pid => Pid, action => Action});
ok; _ : {timeout, {gen_server, call, _}} ->
?tp(warning, "session_kick_timeout",
#{pid => Pid,
action => Action,
stale_channel => stale_channel_info(Pid)
}),
ok = force_kill(Pid);
_ : Error : St -> _ : Error : St ->
?tp(error, "failed_to_discard_session", ?tp(error, "session_kick_exception",
#{pid => Pid, reason => Error, stacktrace=>St}) #{pid => Pid,
action => Action,
reason => Error,
stacktrace => St,
stale_channel => stale_channel_info(Pid)
}),
ok = force_kill(Pid)
end. end.
discard_session(ClientId, ChanPid) when node(ChanPid) == node() -> force_kill(Pid) ->
case get_chann_conn_mod(ClientId, ChanPid) of exit(Pid, kill),
undefined -> ok; ok.
ConnMod when is_atom(ConnMod) ->
ConnMod:call(ChanPid, discard, ?T_TAKEOVER) stale_channel_info(Pid) ->
end; process_info(Pid, [status, message_queue_len, current_stacktrace]).
discard_session(ClientId, ChanPid) -> discard_session(ClientId, ChanPid) ->
rpc_call(node(ChanPid), discard_session, [ClientId, ChanPid]). kick_session(discard, ClientId, ChanPid).
kick_session(ClientId, ChanPid) ->
kick_session(kick, ClientId, ChanPid).
%% @private This function is shared for session 'kick' and 'discard' (as the first arg Action).
kick_session(Action, ClientId, ChanPid) when node(ChanPid) == node() ->
case get_chann_conn_mod(ClientId, ChanPid) of
undefined ->
%% already deregistered
ok;
ConnMod when is_atom(ConnMod) ->
ok = kick_or_kill(Action, ConnMod, ChanPid)
end;
kick_session(Action, ClientId, ChanPid) ->
%% call remote node on the old APIs because we do not know if they have upgraded
%% to have kick_session/3
Function = case Action of
discard -> discard_session;
kick -> kick_session
end,
try
rpc_call(node(ChanPid), Function, [ClientId, ChanPid], ?T_KICK)
catch
Error : Reason ->
%% This should mostly be RPC failures.
%% However, if the node is still running the old version
%% code (prior to emqx app 4.3.10) some of the RPC handler
%% exceptions may get propagated to a new version node
?SLOG(error, #{ msg => "failed_to_kick_session_on_remote_node"
, node => node(ChanPid)
, action => Action
, error => Error
, reason => Reason
})
end.
kick_session(ClientId) -> kick_session(ClientId) ->
case lookup_channels(ClientId) of case lookup_channels(ClientId) of
[] -> {error, not_found}; [] ->
[ChanPid] -> ?SLOG(warning, #{msg => "kicked_an_unknown_session",
kick_session(ClientId, ChanPid); clientid => ClientId}),
ok;
ChanPids -> ChanPids ->
[ChanPid|StalePids] = lists:reverse(ChanPids), case length(ChanPids) > 1 of
?SLOG(warning, #{msg => "more_than_one_channel_found", chan_pids => ChanPids}), true ->
lists:foreach(fun(StalePid) -> ?SLOG(warning, #{msg => "more_than_one_channel_found",
catch discard_session(ClientId, StalePid) chan_pids => ChanPids});
end, StalePids), false -> ok
kick_session(ClientId, ChanPid) end,
lists:foreach(fun(Pid) -> kick_session(ClientId, Pid) end, ChanPids)
end. end.
kick_session(ClientId, ChanPid) when node(ChanPid) == node() ->
case get_chan_info(ClientId, ChanPid) of
#{conninfo := #{conn_mod := ConnMod}} ->
ConnMod:call(ChanPid, kick, ?T_TAKEOVER);
undefined ->
{error, not_found}
end;
kick_session(ClientId, ChanPid) ->
rpc_call(node(ChanPid), kick_session, [ClientId, ChanPid]).
%% @doc Is clean start? %% @doc Is clean start?
% is_clean_start(#{clean_start := false}) -> false; % is_clean_start(#{clean_start := false}) -> false;
% is_clean_start(_Attrs) -> true. % is_clean_start(_Attrs) -> true.
@ -395,11 +475,17 @@ with_channel(ClientId, Fun) ->
Pids -> Fun(lists:last(Pids)) Pids -> Fun(lists:last(Pids))
end. end.
%% @doc Get all channels registed. %% @doc Get all registed channel pids. Debugg/test interface
all_channels() -> all_channels() ->
Pat = [{{'_', '$1'}, [], ['$1']}], Pat = [{{'_', '$1'}, [], ['$1']}],
ets:select(?CHAN_TAB, Pat). ets:select(?CHAN_TAB, Pat).
%% @doc Get all registed clientIDs. Debugg/test interface
all_client_ids() ->
Pat = [{{'$1', '_'}, [], ['$1']}],
ets:select(?CHAN_TAB, Pat).
%% @doc Lookup channels. %% @doc Lookup channels.
-spec(lookup_channels(emqx_types:clientid()) -> list(chan_pid())). -spec(lookup_channels(emqx_types:clientid()) -> list(chan_pid())).
lookup_channels(ClientId) -> lookup_channels(ClientId) ->
@ -419,10 +505,16 @@ lookup_channels(local, ClientId) ->
[ChanPid || {_, ChanPid} <- ets:lookup(?CHAN_TAB, ClientId)]. [ChanPid || {_, ChanPid} <- ets:lookup(?CHAN_TAB, ClientId)].
%% @private %% @private
rpc_call(Node, Fun, Args) -> rpc_call(Node, Fun, Args, Timeout) ->
case rpc:call(Node, ?MODULE, Fun, Args) of case rpc:call(Node, ?MODULE, Fun, Args, 2 * Timeout) of
{badrpc, Reason} -> error(Reason); {badrpc, Reason} ->
Res -> Res %% since eqmx app 4.3.10, the 'kick' and 'discard' calls hanndler
%% should catch all exceptions and always return 'ok'.
%% This leaves 'badrpc' only possible when there is problem
%% calling the remote node.
error({badrpc, Reason});
Res ->
Res
end. end.
%% @private %% @private
@ -437,8 +529,10 @@ init([]) ->
ok = emqx_tables:new(?CHAN_TAB, [bag, {read_concurrency, true} | TabOpts]), ok = emqx_tables:new(?CHAN_TAB, [bag, {read_concurrency, true} | TabOpts]),
ok = emqx_tables:new(?CHAN_CONN_TAB, [bag | TabOpts]), ok = emqx_tables:new(?CHAN_CONN_TAB, [bag | TabOpts]),
ok = emqx_tables:new(?CHAN_INFO_TAB, [set, compressed | TabOpts]), ok = emqx_tables:new(?CHAN_INFO_TAB, [set, compressed | TabOpts]),
ok = emqx_tables:new(?CHAN_LIVE_TAB, [set, {write_concurrency, true} | TabOpts]),
ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0), ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0),
{ok, #{chan_pmon => emqx_pmon:new()}}. State = #{chan_pmon => emqx_pmon:new()},
{ok, State}.
handle_call(Req, _From, State) -> handle_call(Req, _From, State) ->
?SLOG(error, #{msg => "unexpected_call", call => Req}), ?SLOG(error, #{msg => "unexpected_call", call => Req}),
@ -447,17 +541,21 @@ handle_call(Req, _From, State) ->
handle_cast({registered, {ClientId, ChanPid}}, State = #{chan_pmon := PMon}) -> handle_cast({registered, {ClientId, ChanPid}}, State = #{chan_pmon := PMon}) ->
PMon1 = emqx_pmon:monitor(ChanPid, ClientId, PMon), PMon1 = emqx_pmon:monitor(ChanPid, ClientId, PMon),
{noreply, State#{chan_pmon := PMon1}}; {noreply, State#{chan_pmon := PMon1}};
handle_cast(Msg, State) -> handle_cast(Msg, State) ->
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}), ?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}. {noreply, State}.
handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) -> handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) ->
?tp(emqx_cm_process_down, #{pid => Pid, reason => _Reason}),
ChanPids = [Pid | emqx_misc:drain_down(?BATCH_SIZE)], ChanPids = [Pid | emqx_misc:drain_down(?BATCH_SIZE)],
{Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon), {Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon),
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun clean_down/1, Items]), lists:foreach(
fun({ChanPid, _ClientID}) ->
mark_channel_disconnected(ChanPid)
end,
Items),
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]),
{noreply, State#{chan_pmon := PMon1}}; {noreply, State#{chan_pmon := PMon1}};
handle_info(Info, State) -> handle_info(Info, State) ->
?SLOG(error, #{msg => "unexpected_info", info => Info}), ?SLOG(error, #{msg => "unexpected_info", info => Info}),
@ -492,5 +590,20 @@ get_chann_conn_mod(ClientId, ChanPid) when node(ChanPid) == node() ->
error:badarg -> undefined error:badarg -> undefined
end; end;
get_chann_conn_mod(ClientId, ChanPid) -> get_chann_conn_mod(ClientId, ChanPid) ->
rpc_call(node(ChanPid), get_chann_conn_mod, [ClientId, ChanPid]). rpc_call(node(ChanPid), get_chann_conn_mod, [ClientId, ChanPid], ?T_GET_INFO).
mark_channel_connected(ChanPid) ->
?tp(emqx_cm_connected_client_count_inc, #{}),
ets:insert_new(?CHAN_LIVE_TAB, {ChanPid, true}),
ok.
mark_channel_disconnected(ChanPid) ->
?tp(emqx_cm_connected_client_count_dec, #{}),
ets:delete(?CHAN_LIVE_TAB, ChanPid),
ok.
get_connected_client_count() ->
case ets:info(?CHAN_LIVE_TAB, size) of
undefined -> 0;
Size -> Size
end.

View File

@ -16,6 +16,7 @@
-module(emqx_config). -module(emqx_config).
-compile({no_auto_import, [get/0, get/1, put/2, erase/1]}). -compile({no_auto_import, [get/0, get/1, put/2, erase/1]}).
-elvis([{elvis_style, god_modules, disable}]).
-export([ init_load/1 -export([ init_load/1
, init_load/2 , init_load/2
@ -138,10 +139,9 @@ get(KeyPath, Default) -> do_get(?CONF, KeyPath, Default).
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}. {ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
find([]) -> find([]) ->
Ref = make_ref(), Ref = make_ref(),
Res = do_get(?CONF, [], Ref), case do_get(?CONF, [], Ref) of
case Res =:= Ref of Ref -> {not_found, []};
true -> {not_found, []}; Res -> {ok, Res}
false -> {ok, Res}
end; end;
find(KeyPath) -> find(KeyPath) ->
?ATOM_CONF_PATH(KeyPath, emqx_map_lib:deep_find(AtomKeyPath, get_root(KeyPath)), ?ATOM_CONF_PATH(KeyPath, emqx_map_lib:deep_find(AtomKeyPath, get_root(KeyPath)),
@ -151,10 +151,9 @@ find(KeyPath) ->
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}. {ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
find_raw([]) -> find_raw([]) ->
Ref = make_ref(), Ref = make_ref(),
Res = do_get(?RAW_CONF, [], Ref), case do_get(?RAW_CONF, [], Ref) of
case Res =:= Ref of Ref -> {not_found, []};
true -> {not_found, []}; Res -> {ok, Res}
false -> {ok, Res}
end; end;
find_raw(KeyPath) -> find_raw(KeyPath) ->
emqx_map_lib:deep_find([bin(Key) || Key <- KeyPath], get_root_raw(KeyPath)). emqx_map_lib:deep_find([bin(Key) || Key <- KeyPath], get_root_raw(KeyPath)).
@ -288,8 +287,7 @@ check_config(SchemaMod, RawConf) ->
}, },
{AppEnvs, CheckedConf} = {AppEnvs, CheckedConf} =
hocon_schema:map_translate(SchemaMod, RawConf, Opts), hocon_schema:map_translate(SchemaMod, RawConf, Opts),
Conf = maps:with(maps:keys(RawConf), CheckedConf), {AppEnvs, emqx_map_lib:unsafe_atom_key_map(CheckedConf)}.
{AppEnvs, emqx_map_lib:unsafe_atom_key_map(Conf)}.
-spec fill_defaults(raw_config()) -> map(). -spec fill_defaults(raw_config()) -> map().
fill_defaults(RawConf) -> fill_defaults(RawConf) ->
@ -349,7 +347,8 @@ get_root_names() ->
get_atom_root_names() -> get_atom_root_names() ->
[atom(N) || N <- get_root_names()]. [atom(N) || N <- get_root_names()].
-spec save_configs(app_envs(), config(), raw_config(), raw_config(), update_opts()) -> ok | {error, term()}. -spec save_configs(app_envs(), config(), raw_config(), raw_config(), update_opts()) ->
ok | {error, term()}.
save_configs(_AppEnvs, Conf, RawConf, OverrideConf, Opts) -> save_configs(_AppEnvs, Conf, RawConf, OverrideConf, Opts) ->
%% We may need also support hot config update for the apps that use application envs. %% We may need also support hot config update for the apps that use application envs.
%% If that is the case uncomment the following line to update the configs to app env %% If that is the case uncomment the following line to update the configs to app env

View File

@ -45,14 +45,14 @@
-type handler_name() :: module(). -type handler_name() :: module().
-type handlers() :: #{emqx_config:config_key() => handlers(), ?MOD => handler_name()}. -type handlers() :: #{emqx_config:config_key() => handlers(), ?MOD => handler_name()}.
-optional_callbacks([ pre_config_update/2 -optional_callbacks([ pre_config_update/3
, post_config_update/4 , post_config_update/5
]). ]).
-callback pre_config_update(emqx_config:update_request(), emqx_config:raw_config()) -> -callback pre_config_update([atom()], emqx_config:update_request(), emqx_config:raw_config()) ->
{ok, emqx_config:update_request()} | {error, term()}. {ok, emqx_config:update_request()} | {error, term()}.
-callback post_config_update(emqx_config:update_request(), emqx_config:config(), -callback post_config_update([atom()], emqx_config:update_request(), emqx_config:config(),
emqx_config:config(), emqx_config:app_envs()) -> emqx_config:config(), emqx_config:app_envs()) ->
ok | {ok, Result::any()} | {error, Reason::term()}. ok | {ok, Result::any()} | {error, Reason::term()}.
@ -181,14 +181,20 @@ process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) ->
Error -> Error Error -> Error
end. end.
do_update_config([], Handlers, OldRawConf, UpdateReq) -> do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) ->
call_pre_config_update(Handlers, OldRawConf, UpdateReq); do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq, []).
do_update_config([ConfKey | ConfKeyPath], Handlers, OldRawConf, UpdateReq) ->
do_update_config([], Handlers, OldRawConf, UpdateReq, ConfKeyPath) ->
call_pre_config_update(Handlers, OldRawConf, UpdateReq, ConfKeyPath);
do_update_config([ConfKey | SubConfKeyPath], Handlers, OldRawConf,
UpdateReq, ConfKeyPath0) ->
ConfKeyPath = ConfKeyPath0 ++ [ConfKey],
SubOldRawConf = get_sub_config(bin(ConfKey), OldRawConf), SubOldRawConf = get_sub_config(bin(ConfKey), OldRawConf),
SubHandlers = get_sub_handlers(ConfKey, Handlers), SubHandlers = get_sub_handlers(ConfKey, Handlers),
case do_update_config(ConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq) of case do_update_config(SubConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq, ConfKeyPath) of
{ok, NewUpdateReq} -> {ok, NewUpdateReq} ->
call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq}); call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq},
ConfKeyPath);
Error -> Error ->
Error Error
end. end.
@ -211,18 +217,25 @@ check_and_save_configs(SchemaModule, ConfKeyPath, Handlers, NewRawConf, Override
Error -> Error Error -> Error
end. end.
do_post_config_update([], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result) -> do_post_config_update(ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result) ->
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs), Result); do_post_config_update(ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs,
do_post_config_update([ConfKey | ConfKeyPath], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result, []).
Result) ->
do_post_config_update([], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result,
ConfKeyPath) ->
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs),
Result, ConfKeyPath);
do_post_config_update([ConfKey | SubConfKeyPath], Handlers, OldConf, NewConf, AppEnvs,
UpdateArgs, Result, ConfKeyPath0) ->
ConfKeyPath = ConfKeyPath0 ++ [ConfKey],
SubOldConf = get_sub_config(ConfKey, OldConf), SubOldConf = get_sub_config(ConfKey, OldConf),
SubNewConf = get_sub_config(ConfKey, NewConf), SubNewConf = get_sub_config(ConfKey, NewConf),
SubHandlers = get_sub_handlers(ConfKey, Handlers), SubHandlers = get_sub_handlers(ConfKey, Handlers),
case do_post_config_update(ConfKeyPath, SubHandlers, SubOldConf, SubNewConf, AppEnvs, case do_post_config_update(SubConfKeyPath, SubHandlers, SubOldConf, SubNewConf, AppEnvs,
UpdateArgs, Result) of UpdateArgs, Result, ConfKeyPath) of
{ok, Result1} -> {ok, Result1} ->
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs), call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs),
Result1); Result1, ConfKeyPath);
Error -> Error Error -> Error
end. end.
@ -237,22 +250,23 @@ get_sub_config(ConfKey, Conf) when is_map(Conf) ->
get_sub_config(_, _Conf) -> %% the Conf is a primitive get_sub_config(_, _Conf) -> %% the Conf is a primitive
undefined. undefined.
call_pre_config_update(Handlers, OldRawConf, UpdateReq) -> call_pre_config_update(Handlers, OldRawConf, UpdateReq, ConfKeyPath) ->
HandlerName = maps:get(?MOD, Handlers, undefined), HandlerName = maps:get(?MOD, Handlers, undefined),
case erlang:function_exported(HandlerName, pre_config_update, 2) of case erlang:function_exported(HandlerName, pre_config_update, 3) of
true -> true ->
case HandlerName:pre_config_update(UpdateReq, OldRawConf) of case HandlerName:pre_config_update(ConfKeyPath, UpdateReq, OldRawConf) of
{ok, NewUpdateReq} -> {ok, NewUpdateReq}; {ok, NewUpdateReq} -> {ok, NewUpdateReq};
{error, Reason} -> {error, {pre_config_update, HandlerName, Reason}} {error, Reason} -> {error, {pre_config_update, HandlerName, Reason}}
end; end;
false -> merge_to_old_config(UpdateReq, OldRawConf) false -> merge_to_old_config(UpdateReq, OldRawConf)
end. end.
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, UpdateReq, Result) -> call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, UpdateReq, Result, ConfKeyPath) ->
HandlerName = maps:get(?MOD, Handlers, undefined), HandlerName = maps:get(?MOD, Handlers, undefined),
case erlang:function_exported(HandlerName, post_config_update, 4) of case erlang:function_exported(HandlerName, post_config_update, 5) of
true -> true ->
case HandlerName:post_config_update(UpdateReq, NewConf, OldConf, AppEnvs) of case HandlerName:post_config_update(ConfKeyPath, UpdateReq, NewConf, OldConf,
AppEnvs) of
ok -> {ok, Result}; ok -> {ok, Result};
{ok, Result1} -> {ok, Result1} ->
{ok, Result#{HandlerName => Result1}}; {ok, Result#{HandlerName => Result1}};

View File

@ -78,13 +78,15 @@ cancel_alarm_congestion(Socket, Transport, Channel, Reason) ->
do_alarm_congestion(Socket, Transport, Channel, Reason) -> do_alarm_congestion(Socket, Transport, Channel, Reason) ->
ok = update_alarm_sent_at(Reason), ok = update_alarm_sent_at(Reason),
AlarmDetails = tcp_congestion_alarm_details(Socket, Transport, Channel), AlarmDetails = tcp_congestion_alarm_details(Socket, Transport, Channel),
emqx_alarm:activate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails), Message = io_lib:format("connection congested: ~ts", [AlarmDetails]),
emqx_alarm:activate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails, Message),
ok. ok.
do_cancel_alarm_congestion(Socket, Transport, Channel, Reason) -> do_cancel_alarm_congestion(Socket, Transport, Channel, Reason) ->
ok = remove_alarm_sent_at(Reason), ok = remove_alarm_sent_at(Reason),
AlarmDetails = tcp_congestion_alarm_details(Socket, Transport, Channel), AlarmDetails = tcp_congestion_alarm_details(Socket, Transport, Channel),
emqx_alarm:deactivate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails), Message = io_lib:format("connection congested: ~ts", [AlarmDetails]),
emqx_alarm:deactivate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails, Message),
ok. ok.
is_tcp_congested(Socket, Transport) -> is_tcp_congested(Socket, Transport) ->

View File

@ -149,7 +149,7 @@ start_link(Transport, Socket, Options) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% @doc Get infos of the connection/channel. %% @doc Get infos of the connection/channel.
-spec(info(pid()|state()) -> emqx_types:infos()). -spec(info(pid() | state()) -> emqx_types:infos()).
info(CPid) when is_pid(CPid) -> info(CPid) when is_pid(CPid) ->
call(CPid, info); call(CPid, info);
info(State = #state{channel = Channel}) -> info(State = #state{channel = Channel}) ->
@ -176,7 +176,7 @@ info(limiter, #state{limiter = Limiter}) ->
maybe_apply(fun emqx_limiter:info/1, Limiter). maybe_apply(fun emqx_limiter:info/1, Limiter).
%% @doc Get stats of the connection/channel. %% @doc Get stats of the connection/channel.
-spec(stats(pid()|state()) -> emqx_types:stats()). -spec(stats(pid() | state()) -> emqx_types:stats()).
stats(CPid) when is_pid(CPid) -> stats(CPid) when is_pid(CPid) ->
call(CPid, stats); call(CPid, stats);
stats(#state{transport = Transport, stats(#state{transport = Transport,
@ -373,7 +373,7 @@ cancel_stats_timer(State) -> State.
process_msg([], State) -> process_msg([], State) ->
{ok, State}; {ok, State};
process_msg([Msg|More], State) -> process_msg([Msg | More], State) ->
try try
case handle_msg(Msg, State) of case handle_msg(Msg, State) of
ok -> ok ->
@ -475,7 +475,7 @@ handle_msg({Passive, _Sock}, State)
handle_msg(Deliver = {deliver, _Topic, _Msg}, #state{ handle_msg(Deliver = {deliver, _Topic, _Msg}, #state{
listener = {Type, Listener}} = State) -> listener = {Type, Listener}} = State) ->
ActiveN = get_active_n(Type, Listener), ActiveN = get_active_n(Type, Listener),
Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)], Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State); with_channel(handle_deliver, [Delivers], State);
%% Something sent %% Something sent
@ -540,7 +540,7 @@ terminate(Reason, State = #state{channel = Channel, transport = Transport,
?tp(warning, unclean_terminate, #{exception => E, context => C, stacktrace => S}) ?tp(warning, unclean_terminate, #{exception => E, context => C, stacktrace => S})
end, end,
?tp(info, terminate, #{reason => Reason}), ?tp(info, terminate, #{reason => Reason}),
maybe_raise_excption(Reason). maybe_raise_exception(Reason).
%% close socket, discard new state, always return ok. %% close socket, discard new state, always return ok.
close_socket_ok(State) -> close_socket_ok(State) ->
@ -548,12 +548,12 @@ close_socket_ok(State) ->
ok. ok.
%% tell truth about the original exception %% tell truth about the original exception
maybe_raise_excption(#{exception := Exception, maybe_raise_exception(#{exception := Exception,
context := Context, context := Context,
stacktrace := Stacktrace stacktrace := Stacktrace
}) -> }) ->
erlang:raise(Exception, Context, Stacktrace); erlang:raise(Exception, Context, Stacktrace);
maybe_raise_excption(Reason) -> maybe_raise_exception(Reason) ->
exit(Reason). exit(Reason).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -649,7 +649,7 @@ parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
{Packets, State#state{parse_state = NParseState}}; {Packets, State#state{parse_state = NParseState}};
{ok, Packet, Rest, NParseState} -> {ok, Packet, Rest, NParseState} ->
NState = State#state{parse_state = NParseState}, NState = State#state{parse_state = NParseState},
parse_incoming(Rest, [Packet|Packets], NState) parse_incoming(Rest, [Packet | Packets], NState)
catch catch
throw : ?FRAME_PARSE_ERROR(Reason) -> throw : ?FRAME_PARSE_ERROR(Reason) ->
?SLOG(info, #{ reason => Reason ?SLOG(info, #{ reason => Reason
@ -679,7 +679,7 @@ next_incoming_msgs(Packets) ->
handle_incoming(Packet, State) when is_record(Packet, mqtt_packet) -> handle_incoming(Packet, State) when is_record(Packet, mqtt_packet) ->
ok = inc_incoming_stats(Packet), ok = inc_incoming_stats(Packet),
?SLOG(debug, #{msg => "RECV_packet", packet => Packet}), ?SLOG(debug, #{msg => "RECV_packet", packet => emqx_packet:format(Packet)}),
with_channel(handle_in, [Packet], State); with_channel(handle_in, [Packet], State);
handle_incoming(FrameError, State) -> handle_incoming(FrameError, State) ->
@ -752,7 +752,7 @@ send(IoData, #state{transport = Transport, socket = Socket, channel = Channel})
ok = emqx_metrics:inc('bytes.sent', Oct), ok = emqx_metrics:inc('bytes.sent', Oct),
inc_counter(outgoing_bytes, Oct), inc_counter(outgoing_bytes, Oct),
emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel), emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
case Transport:async_send(Socket, IoData, [nosuspend]) of case Transport:async_send(Socket, IoData, []) of
ok -> ok; ok -> ok;
Error = {error, _Reason} -> Error = {error, _Reason} ->
%% Send an inet_reply to postpone handling the error %% Send an inet_reply to postpone handling the error

View File

@ -129,7 +129,8 @@ handle_cast({detected, #flapping{clientid = ClientId,
reason = <<"flapping is detected">>, reason = <<"flapping is detected">>,
at = Now, at = Now,
until = Now + (Interval div 1000)}, until = Now + (Interval div 1000)},
emqx_banned:create(Banned); {ok, _} = emqx_banned:create(Banned),
ok;
false -> false ->
?SLOG(warning, #{ ?SLOG(warning, #{
msg => "client_disconnected", msg => "client_disconnected",

View File

@ -77,6 +77,8 @@
priority :: integer() priority :: integer()
}). }).
-type(callback() :: #callback{}).
-record(hook, { -record(hook, {
name :: hookpoint(), name :: hookpoint(),
callbacks :: list(#callback{}) callbacks :: list(#callback{})
@ -112,7 +114,7 @@ callback_priority(#callback{priority= P}) -> P.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% @doc Register a callback %% @doc Register a callback
-spec(add(hookpoint(), action() | #callback{}) -> ok_or_error(already_exists)). -spec(add(hookpoint(), action() | callback()) -> ok_or_error(already_exists)).
add(HookPoint, Callback) when is_record(Callback, callback) -> add(HookPoint, Callback) when is_record(Callback, callback) ->
gen_server:call(?SERVER, {add, HookPoint, Callback}, infinity); gen_server:call(?SERVER, {add, HookPoint, Callback}, infinity);
add(HookPoint, Action) when is_function(Action); is_tuple(Action) -> add(HookPoint, Action) when is_function(Action); is_tuple(Action) ->
@ -131,7 +133,7 @@ add(HookPoint, Action, Filter, Priority) when is_integer(Priority) ->
add(HookPoint, #callback{action = Action, filter = Filter, priority = Priority}). add(HookPoint, #callback{action = Action, filter = Filter, priority = Priority}).
%% @doc Like add/2, it register a callback, discard 'already_exists' error. %% @doc Like add/2, it register a callback, discard 'already_exists' error.
-spec(put(hookpoint(), action() | #callback{}) -> ok). -spec(put(hookpoint(), action() | callback()) -> ok).
put(HookPoint, Callback) when is_record(Callback, callback) -> put(HookPoint, Callback) when is_record(Callback, callback) ->
case add(HookPoint, Callback) of case add(HookPoint, Callback) of
ok -> ok; ok -> ok;
@ -211,7 +213,7 @@ safe_execute({M, F, A}, Args) ->
exception => Error, exception => Error,
reason => Reason, reason => Reason,
stacktrace => Stacktrace, stacktrace => Stacktrace,
failed_call => {M, F, A} failed_call => {M, F, Args ++ A}
}) })
end. end.
@ -220,7 +222,7 @@ execute({M, F, A}, Args) ->
erlang:apply(M, F, Args ++ A). erlang:apply(M, F, Args ++ A).
%% @doc Lookup callbacks. %% @doc Lookup callbacks.
-spec(lookup(hookpoint()) -> [#callback{}]). -spec(lookup(hookpoint()) -> [callback()]).
lookup(HookPoint) -> lookup(HookPoint) ->
case ets:lookup(?TAB, HookPoint) of case ets:lookup(?TAB, HookPoint) of
[#hook{callbacks = Callbacks}] -> [#hook{callbacks = Callbacks}] ->
@ -292,10 +294,10 @@ add_callback(C, Callbacks) ->
add_callback(C, Callbacks, []). add_callback(C, Callbacks, []).
add_callback(C, [], Acc) -> add_callback(C, [], Acc) ->
lists:reverse([C|Acc]); lists:reverse([C | Acc]);
add_callback(C1 = #callback{priority = P1}, [C2 = #callback{priority = P2}|More], Acc) add_callback(C1 = #callback{priority = P1}, [C2 = #callback{priority = P2} | More], Acc)
when P1 =< P2 -> when P1 =< P2 ->
add_callback(C1, More, [C2|Acc]); add_callback(C1, More, [C2 | Acc]);
add_callback(C1, More, Acc) -> add_callback(C1, More, Acc) ->
lists:append(lists:reverse(Acc), [C1 | More]). lists:append(lists:reverse(Acc), [C1 | More]).
@ -310,4 +312,3 @@ del_callback(Action = {M, F}, [#callback{action = {M, F, _A}} | Callbacks], Acc)
del_callback(Action, Callbacks, Acc); del_callback(Action, Callbacks, Acc);
del_callback(Action, [Callback | Callbacks], Acc) -> del_callback(Action, [Callback | Callbacks], Acc) ->
del_callback(Action, Callbacks, [Callback | Acc]). del_callback(Action, Callbacks, [Callback | Acc]).

View File

@ -20,8 +20,11 @@
, info/1 , info/1
, info/2 , info/2
, check/2 , check/2
, set/3
]). ]).
-elvis([{elvis_style, no_if_expression, disable}]).
-export_type([keepalive/0]). -export_type([keepalive/0]).
-record(keepalive, { -record(keepalive, {
@ -49,7 +52,7 @@ info(#keepalive{interval = Interval,
repeat => Repeat repeat => Repeat
}. }.
-spec(info(interval|statval|repeat, keepalive()) -spec(info(interval | statval | repeat, keepalive())
-> non_neg_integer()). -> non_neg_integer()).
info(interval, #keepalive{interval = Interval}) -> info(interval, #keepalive{interval = Interval}) ->
Interval; Interval;
@ -71,3 +74,7 @@ check(NewVal, KeepAlive = #keepalive{statval = OldVal,
true -> {error, timeout} true -> {error, timeout}
end. end.
%% @doc Update keepalive's interval
-spec(set(interval, non_neg_integer(), keepalive()) -> keepalive()).
set(interval, Interval, KeepAlive) ->
KeepAlive#keepalive{interval = Interval}.

View File

@ -17,6 +17,8 @@
%% @doc Start/Stop MQTT listeners. %% @doc Start/Stop MQTT listeners.
-module(emqx_listeners). -module(emqx_listeners).
-elvis([{elvis_style, dont_repeat_yourself, #{min_complexity => 10000}}]).
-include("emqx_mqtt.hrl"). -include("emqx_mqtt.hrl").
-include("logger.hrl"). -include("logger.hrl").
@ -28,6 +30,7 @@
, is_running/1 , is_running/1
, current_conns/2 , current_conns/2
, max_conns/2 , max_conns/2
, id_example/0
]). ]).
-export([ start_listener/1 -export([ start_listener/1
@ -43,11 +46,23 @@
, parse_listener_id/1 , parse_listener_id/1
]). ]).
-export([post_config_update/4]). -export([post_config_update/5]).
-define(CONF_KEY_PATH, [listeners]). -define(CONF_KEY_PATH, [listeners]).
-define(TYPES_STRING, ["tcp","ssl","ws","wss","quic"]). -define(TYPES_STRING, ["tcp","ssl","ws","wss","quic"]).
-spec(id_example() -> atom()).
id_example() ->
id_example(list()).
id_example([]) ->
{ID, _} = hd(list()),
ID;
id_example([{'tcp:default', _} | _]) ->
'tcp:default';
id_example([_ | Listeners]) ->
id_example(Listeners).
%% @doc List configured listeners. %% @doc List configured listeners.
-spec(list() -> [{ListenerId :: atom(), ListenerConf :: map()}]). -spec(list() -> [{ListenerId :: atom(), ListenerConf :: map()}]).
list() -> list() ->
@ -235,10 +250,10 @@ do_start_listener(quic, ListenerName, #{bind := ListenOn} = Opts) ->
, {key, maps:get(keyfile, Opts)} , {key, maps:get(keyfile, Opts)}
, {alpn, ["mqtt"]} , {alpn, ["mqtt"]}
, {conn_acceptors, lists:max([DefAcceptors, maps:get(acceptors, Opts, 0)])} , {conn_acceptors, lists:max([DefAcceptors, maps:get(acceptors, Opts, 0)])}
, {idle_timeout_ms, lists:max([ , {idle_timeout_ms,
emqx_config:get_zone_conf(zone(Opts), [mqtt, idle_timeout]) * 3 lists:max([
, timer:seconds(maps:get(idle_timeout, Opts))] emqx_config:get_zone_conf(zone(Opts), [mqtt, idle_timeout]) * 3,
)} timer:seconds(maps:get(idle_timeout, Opts))])}
], ],
ConnectionOpts = #{ conn_callback => emqx_quic_connection ConnectionOpts = #{ conn_callback => emqx_quic_connection
, peer_unidi_stream_count => 1 , peer_unidi_stream_count => 1
@ -257,7 +272,7 @@ delete_authentication(Type, ListenerName, _Conf) ->
emqx_authentication:delete_chain(listener_id(Type, ListenerName)). emqx_authentication:delete_chain(listener_id(Type, ListenerName)).
%% Update the listeners at runtime %% Update the listeners at runtime
post_config_update(_Req, NewListeners, OldListeners, _AppEnvs) -> post_config_update(_, _Req, NewListeners, OldListeners, _AppEnvs) ->
#{added := Added, removed := Removed, changed := Updated} #{added := Added, removed := Removed, changed := Updated}
= diff_listeners(NewListeners, OldListeners), = diff_listeners(NewListeners, OldListeners),
perform_listener_changes(fun stop_listener/3, Removed), perform_listener_changes(fun stop_listener/3, Removed),
@ -281,7 +296,8 @@ flatten_listeners(Conf0) ->
|| {Type, Conf} <- maps:to_list(Conf0)])). || {Type, Conf} <- maps:to_list(Conf0)])).
do_flatten_listeners(Type, Conf0) -> do_flatten_listeners(Type, Conf0) ->
[{listener_id(Type, Name), maps:remove(authentication, Conf)} || {Name, Conf} <- maps:to_list(Conf0)]. [{listener_id(Type, Name), maps:remove(authentication, Conf)} ||
{Name, Conf} <- maps:to_list(Conf0)].
esockd_opts(Type, Opts0) -> esockd_opts(Type, Opts0) ->
Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0), Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0),
@ -352,10 +368,13 @@ listener_id(Type, ListenerName) ->
list_to_atom(lists:append([str(Type), ":", str(ListenerName)])). list_to_atom(lists:append([str(Type), ":", str(ListenerName)])).
parse_listener_id(Id) -> parse_listener_id(Id) ->
[Type, Name] = string:split(str(Id), ":", leading), case string:split(str(Id), ":", leading) of
case lists:member(Type, ?TYPES_STRING) of [Type, Name] ->
true -> {list_to_existing_atom(Type), list_to_atom(Name)}; case lists:member(Type, ?TYPES_STRING) of
false -> {error, {invalid_listener_id, Id}} true -> {list_to_existing_atom(Type), list_to_atom(Name)};
false -> {error, {invalid_listener_id, Id}}
end;
_ -> {error, {invalid_listener_id, Id}}
end. end.
zone(Opts) -> zone(Opts) ->

View File

@ -20,6 +20,7 @@
-behaviour(gen_server). -behaviour(gen_server).
-behaviour(emqx_config_handler). -behaviour(emqx_config_handler).
-elvis([{elvis_style, god_modules, disable}]).
%% gen_server callbacks %% gen_server callbacks
-export([ start_link/0 -export([ start_link/0
@ -70,7 +71,7 @@
, stop_log_handler/1 , stop_log_handler/1
]). ]).
-export([post_config_update/4]). -export([post_config_update/5]).
-type(peername_str() :: list()). -type(peername_str() :: list()).
-type(logger_dst() :: file:filename() | console | unknown). -type(logger_dst() :: file:filename() | console | unknown).
@ -78,10 +79,11 @@
id := logger:handler_id(), id := logger:handler_id(),
level := logger:level(), level := logger:level(),
dst := logger_dst(), dst := logger_dst(),
filters := [{logger:filter_id(), logger:filter()}],
status := started | stopped status := started | stopped
}). }).
-define(stopped_handlers, {?MODULE, stopped_handlers}). -define(STOPPED_HANDLERS, {?MODULE, stopped_handlers}).
-define(CONF_PATH, [log]). -define(CONF_PATH, [log]).
start_link() -> start_link() ->
@ -123,7 +125,7 @@ code_change(_OldVsn, State, _Extra) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% emqx_config_handler callbacks %% emqx_config_handler callbacks
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
post_config_update(_Req, _NewConf, _OldConf, AppEnvs) -> post_config_update(_, _Req, _NewConf, _OldConf, AppEnvs) ->
gen_server:call(?MODULE, {update_config, AppEnvs}, 5000). gen_server:call(?MODULE, {update_config, AppEnvs}, 5000).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -238,19 +240,19 @@ get_log_handlers() ->
-spec(get_log_handlers(started | stopped) -> [logger_handler_info()]). -spec(get_log_handlers(started | stopped) -> [logger_handler_info()]).
get_log_handlers(started) -> get_log_handlers(started) ->
[log_hanlder_info(Conf, started) || Conf <- logger:get_handler_config()]; [log_handler_info(Conf, started) || Conf <- logger:get_handler_config()];
get_log_handlers(stopped) -> get_log_handlers(stopped) ->
[log_hanlder_info(Conf, stopped) || Conf <- list_stopped_handler_config()]. [log_handler_info(Conf, stopped) || Conf <- list_stopped_handler_config()].
-spec(get_log_handler(logger:handler_id()) -> logger_handler_info()). -spec(get_log_handler(logger:handler_id()) -> logger_handler_info()).
get_log_handler(HandlerId) -> get_log_handler(HandlerId) ->
case logger:get_handler_config(HandlerId) of case logger:get_handler_config(HandlerId) of
{ok, Conf} -> {ok, Conf} ->
log_hanlder_info(Conf, started); log_handler_info(Conf, started);
{error, _} -> {error, _} ->
case read_stopped_handler_config(HandlerId) of case read_stopped_handler_config(HandlerId) of
error -> {error, {not_found, HandlerId}}; error -> {error, {not_found, HandlerId}};
{ok, Conf} -> log_hanlder_info(Conf, stopped) {ok, Conf} -> log_handler_info(Conf, stopped)
end end
end. end.
@ -305,21 +307,21 @@ set_log_level(Level) ->
%% Internal Functions %% Internal Functions
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
log_hanlder_info(#{id := Id, level := Level, module := logger_std_h, log_handler_info(#{id := Id, level := Level, module := logger_std_h,
config := #{type := Type}}, Status) when filters := Filters, config := #{type := Type}}, Status) when
Type =:= standard_io; Type =:= standard_io;
Type =:= standard_error -> Type =:= standard_error ->
#{id => Id, level => Level, dst => console, status => Status}; #{id => Id, level => Level, dst => console, status => Status, filters => Filters};
log_hanlder_info(#{id := Id, level := Level, module := logger_std_h, log_handler_info(#{id := Id, level := Level, module := logger_std_h,
config := Config = #{type := file}}, Status) -> filters := Filters, config := Config = #{type := file}}, Status) ->
#{id => Id, level => Level, status => Status, #{id => Id, level => Level, status => Status, filters => Filters,
dst => maps:get(file, Config, atom_to_list(Id))}; dst => maps:get(file, Config, atom_to_list(Id))};
log_hanlder_info(#{id := Id, level := Level, module := logger_disk_log_h, log_handler_info(#{id := Id, level := Level, module := logger_disk_log_h,
config := #{file := Filename}}, Status) -> filters := Filters, config := #{file := Filename}}, Status) ->
#{id => Id, level => Level, dst => Filename, status => Status}; #{id => Id, level => Level, dst => Filename, status => Status, filters => Filters};
log_hanlder_info(#{id := Id, level := Level, module := _OtherModule}, Status) -> log_handler_info(#{id := Id, level := Level, filters := Filters}, Status) ->
#{id => Id, level => Level, dst => unknown, status => Status}. #{id => Id, level => Level, dst => unknown, status => Status, filters => Filters}.
%% set level for all log handlers in one command %% set level for all log handlers in one command
set_all_log_handlers_level(Level) -> set_all_log_handlers_level(Level) ->
@ -341,29 +343,29 @@ rollback([{ID, Level} | List]) ->
rollback([]) -> ok. rollback([]) -> ok.
save_stopped_handler_config(HandlerId, Config) -> save_stopped_handler_config(HandlerId, Config) ->
case persistent_term:get(?stopped_handlers, undefined) of case persistent_term:get(?STOPPED_HANDLERS, undefined) of
undefined -> undefined ->
persistent_term:put(?stopped_handlers, #{HandlerId => Config}); persistent_term:put(?STOPPED_HANDLERS, #{HandlerId => Config});
ConfList -> ConfList ->
persistent_term:put(?stopped_handlers, ConfList#{HandlerId => Config}) persistent_term:put(?STOPPED_HANDLERS, ConfList#{HandlerId => Config})
end. end.
read_stopped_handler_config(HandlerId) -> read_stopped_handler_config(HandlerId) ->
case persistent_term:get(?stopped_handlers, undefined) of case persistent_term:get(?STOPPED_HANDLERS, undefined) of
undefined -> error; undefined -> error;
ConfList -> maps:find(HandlerId, ConfList) ConfList -> maps:find(HandlerId, ConfList)
end. end.
remove_stopped_handler_config(HandlerId) -> remove_stopped_handler_config(HandlerId) ->
case persistent_term:get(?stopped_handlers, undefined) of case persistent_term:get(?STOPPED_HANDLERS, undefined) of
undefined -> ok; undefined -> ok;
ConfList -> ConfList ->
case maps:find(HandlerId, ConfList) of case maps:find(HandlerId, ConfList) of
error -> ok; error -> ok;
{ok, _} -> {ok, _} ->
persistent_term:put(?stopped_handlers, maps:remove(HandlerId, ConfList)) persistent_term:put(?STOPPED_HANDLERS, maps:remove(HandlerId, ConfList))
end end
end. end.
list_stopped_handler_config() -> list_stopped_handler_config() ->
case persistent_term:get(?stopped_handlers, undefined) of case persistent_term:get(?STOPPED_HANDLERS, undefined) of
undefined -> []; undefined -> [];
ConfList -> maps:values(ConfList) ConfList -> maps:values(ConfList)
end. end.

View File

@ -17,6 +17,7 @@
-module(emqx_misc). -module(emqx_misc).
-compile(inline). -compile(inline).
-elvis([{elvis_style, god_modules, disable}]).
-include("types.hrl"). -include("types.hrl").
-include("logger.hrl"). -include("logger.hrl").
@ -65,21 +66,13 @@ maybe_parse_ip(Host) ->
end. end.
%% @doc Add `ipv6_probe' socket option if it's supported. %% @doc Add `ipv6_probe' socket option if it's supported.
%% gen_tcp:ipv6_probe() -> true. is added to EMQ's OTP forks
ipv6_probe(Opts) -> ipv6_probe(Opts) ->
case persistent_term:get({?MODULE, ipv6_probe_supported}, unknown) of case erlang:function_exported(gen_tcp, ipv6_probe, 0) of
unknown -> true -> [{ipv6_probe, true} | Opts];
%% e.g. 23.2.7.1-emqx-2-x86_64-unknown-linux-gnu-64 false -> Opts
OtpVsn = emqx_vm:get_otp_version(),
Bool = (match =:= re:run(OtpVsn, "emqx", [{capture, none}])),
_ = persistent_term:put({?MODULE, ipv6_probe_supported}, Bool),
ipv6_probe(Bool, Opts);
Bool ->
ipv6_probe(Bool, Opts)
end. end.
ipv6_probe(false, Opts) -> Opts;
ipv6_probe(true, Opts) -> [{ipv6_probe, true} | Opts].
%% @doc Merge options %% @doc Merge options
-spec(merge_opts(Opts, Opts) -> Opts when Opts :: proplists:proplist()). -spec(merge_opts(Opts, Opts) -> Opts when Opts :: proplists:proplist()).
merge_opts(Defaults, Options) -> merge_opts(Defaults, Options) ->
@ -100,9 +93,9 @@ maybe_apply(Fun, Arg) when is_function(Fun) ->
-spec(compose(list(F)) -> G -spec(compose(list(F)) -> G
when F :: fun((any()) -> any()), when F :: fun((any()) -> any()),
G :: fun((any()) -> any())). G :: fun((any()) -> any())).
compose([F|More]) -> compose(F, More). compose([F | More]) -> compose(F, More).
-spec(compose(F, G|[Gs]) -> C -spec(compose(F, G | [Gs]) -> C
when F :: fun((X1) -> X2), when F :: fun((X1) -> X2),
G :: fun((X2) -> X3), G :: fun((X2) -> X3),
Gs :: [fun((Xn) -> Xn1)], Gs :: [fun((Xn) -> Xn1)],
@ -110,19 +103,19 @@ compose([F|More]) -> compose(F, More).
X3 :: any(), Xn :: any(), Xn1 :: any(), Xm :: any()). X3 :: any(), Xn :: any(), Xn1 :: any(), Xm :: any()).
compose(F, G) when is_function(G) -> fun(X) -> G(F(X)) end; compose(F, G) when is_function(G) -> fun(X) -> G(F(X)) end;
compose(F, [G]) -> compose(F, G); compose(F, [G]) -> compose(F, G);
compose(F, [G|More]) -> compose(compose(F, G), More). compose(F, [G | More]) -> compose(compose(F, G), More).
%% @doc RunFold %% @doc RunFold
run_fold([], Acc, _State) -> run_fold([], Acc, _State) ->
Acc; Acc;
run_fold([Fun|More], Acc, State) -> run_fold([Fun | More], Acc, State) ->
run_fold(More, Fun(Acc, State), State). run_fold(More, Fun(Acc, State), State).
%% @doc Pipeline %% @doc Pipeline
pipeline([], Input, State) -> pipeline([], Input, State) ->
{ok, Input, State}; {ok, Input, State};
pipeline([Fun|More], Input, State) -> pipeline([Fun | More], Input, State) ->
case apply_fun(Fun, Input, State) of case apply_fun(Fun, Input, State) of
ok -> pipeline(More, Input, State); ok -> pipeline(More, Input, State);
{ok, NState} -> {ok, NState} ->
@ -171,7 +164,7 @@ drain_deliver(0, Acc) ->
drain_deliver(N, Acc) -> drain_deliver(N, Acc) ->
receive receive
Deliver = {deliver, _Topic, _Msg} -> Deliver = {deliver, _Topic, _Msg} ->
drain_deliver(N-1, [Deliver|Acc]) drain_deliver(N-1, [Deliver | Acc])
after 0 -> after 0 ->
lists:reverse(Acc) lists:reverse(Acc)
end. end.
@ -186,7 +179,7 @@ drain_down(0, Acc) ->
drain_down(Cnt, Acc) -> drain_down(Cnt, Acc) ->
receive receive
{'DOWN', _MRef, process, Pid, _Reason} -> {'DOWN', _MRef, process, Pid, _Reason} ->
drain_down(Cnt-1, [Pid|Acc]) drain_down(Cnt-1, [Pid | Acc])
after 0 -> after 0 ->
lists:reverse(Acc) lists:reverse(Acc)
end. end.
@ -213,7 +206,7 @@ check_oom(Pid, #{max_message_queue_len := MaxQLen,
end. end.
do_check_oom([]) -> ok; do_check_oom([]) -> ok;
do_check_oom([{Val, Max, Reason}|Rest]) -> do_check_oom([{Val, Max, Reason} | Rest]) ->
case is_integer(Max) andalso (0 < Max) andalso (Max < Val) of case is_integer(Max) andalso (0 < Max) andalso (Max < Val) of
true -> {shutdown, Reason}; true -> {shutdown, Reason};
false -> do_check_oom(Rest) false -> do_check_oom(Rest)
@ -256,8 +249,8 @@ proc_stats(Pid) ->
reductions, reductions,
memory]) of memory]) of
undefined -> []; undefined -> [];
[{message_queue_len, Len}|ProcStats] -> [{message_queue_len, Len} | ProcStats] ->
[{mailbox_len, Len}|ProcStats] [{mailbox_len, Len} | ProcStats]
end. end.
rand_seed() -> rand_seed() ->
@ -277,9 +270,9 @@ index_of(E, L) ->
index_of(_E, _I, []) -> index_of(_E, _I, []) ->
error(badarg); error(badarg);
index_of(E, I, [E|_]) -> index_of(E, I, [E | _]) ->
I; I;
index_of(E, I, [_|L]) -> index_of(E, I, [_ | L]) ->
index_of(E, I+1, L). index_of(E, I+1, L).
-spec(bin2hexstr_A_F(binary()) -> binary()). -spec(bin2hexstr_A_F(binary()) -> binary()).
@ -339,6 +332,12 @@ pad(L, Count) ->
-include_lib("eunit/include/eunit.hrl"). -include_lib("eunit/include/eunit.hrl").
ipv6_probe_test() -> ipv6_probe_test() ->
?assertEqual([{ipv6_probe, true}], ipv6_probe([])). try gen_tcp:ipv6_probe() of
true ->
?assertEqual([{ipv6_probe, true}], ipv6_probe([]))
catch
_ : _ ->
ok
end.
-endif. -endif.

View File

@ -17,6 +17,7 @@
-module(emqx_mountpoint). -module(emqx_mountpoint).
-include("emqx.hrl"). -include("emqx.hrl").
-include("emqx_placeholder.hrl").
-include("types.hrl"). -include("types.hrl").
-export([ mount/2 -export([ mount/2
@ -66,14 +67,17 @@ unmount(MountPoint, Msg = #message{topic = Topic}) ->
-spec(replvar(maybe(mountpoint()), map()) -> maybe(mountpoint())). -spec(replvar(maybe(mountpoint()), map()) -> maybe(mountpoint())).
replvar(undefined, _Vars) -> replvar(undefined, _Vars) ->
undefined; undefined;
replvar(MountPoint, #{clientid := ClientId, username := Username}) -> replvar(MountPoint, Vars) ->
lists:foldl(fun feed_var/2, MountPoint, ClientID = maps:get(clientid, Vars, undefined),
[{<<"%c">>, ClientId}, {<<"%u">>, Username}]). UserName = maps:get(username, Vars, undefined),
EndpointName = maps:get(endpoint_name, Vars, undefined),
List = [ {?PH_CLIENTID, ClientID}
, {?PH_USERNAME, UserName}
, {?PH_ENDPOINT_NAME, EndpointName}
],
lists:foldl(fun feed_var/2, MountPoint, List).
feed_var({<<"%c">>, ClientId}, MountPoint) -> feed_var({_PlaceHolder, undefined}, MountPoint) ->
emqx_topic:feed_var(<<"%c">>, ClientId, MountPoint);
feed_var({<<"%u">>, undefined}, MountPoint) ->
MountPoint; MountPoint;
feed_var({<<"%u">>, Username}, MountPoint) -> feed_var({PlaceHolder, Value}, MountPoint) ->
emqx_topic:feed_var(<<"%u">>, Username, MountPoint). emqx_topic:feed_var(PlaceHolder, Value, MountPoint).

View File

@ -96,12 +96,26 @@ handle_info({timeout, _Timer, check}, State) ->
_ = case emqx_vm:cpu_util() of %% TODO: should be improved? _ = case emqx_vm:cpu_util() of %% TODO: should be improved?
0 -> ok; 0 -> ok;
Busy when Busy >= CPUHighWatermark -> Busy when Busy >= CPUHighWatermark ->
emqx_alarm:activate(high_cpu_usage, #{usage => io_lib:format("~p%", [Busy]), Usage = io_lib:format("~p%", [Busy]),
high_watermark => CPUHighWatermark, Message = [Usage, " cpu usage"],
low_watermark => CPULowWatermark}), emqx_alarm:activate(high_cpu_usage,
#{
usage => Usage,
high_watermark => CPUHighWatermark,
low_watermark => CPULowWatermark
},
Message),
start_check_timer(); start_check_timer();
Busy when Busy =< CPULowWatermark -> Busy when Busy =< CPULowWatermark ->
emqx_alarm:deactivate(high_cpu_usage), Usage = io_lib:format("~p%", [Busy]),
Message = [Usage, " cpu usage"],
emqx_alarm:deactivate(high_cpu_usage,
#{
usage => Usage,
high_watermark => CPUHighWatermark,
low_watermark => CPULowWatermark
},
Message),
start_check_timer(); start_check_timer();
_Busy -> _Busy ->
start_check_timer() start_check_timer()

View File

@ -22,7 +22,7 @@
-include("logger.hrl"). -include("logger.hrl").
-type(hash_type() :: plain | md5 | sha | sha256 | pbkdf2 | bcrypt). -type(hash_type() :: plain | md5 | sha | sha256 | sha512 | pbkdf2 | bcrypt).
-export_type([hash_type/0]). -export_type([hash_type/0]).
@ -95,4 +95,3 @@ hexstring(<<X:256/big-unsigned-integer>>) ->
iolist_to_binary(io_lib:format("~64.16.0b", [X])); iolist_to_binary(io_lib:format("~64.16.0b", [X]));
hexstring(<<X:512/big-unsigned-integer>>) -> hexstring(<<X:512/big-unsigned-integer>>) ->
iolist_to_binary(io_lib:format("~128.16.0b", [X])). iolist_to_binary(io_lib:format("~128.16.0b", [X])).

View File

@ -179,12 +179,17 @@ timestamp_from_conninfo(ConnInfo) ->
end. end.
lookup(ClientID) when is_binary(ClientID) -> lookup(ClientID) when is_binary(ClientID) ->
case lookup_session_store(ClientID) of case is_store_enabled() of
none -> none; false ->
{value, #session_store{session = S} = SS} -> none;
case persistent_session_status(SS) of true ->
expired -> {expired, S}; case lookup_session_store(ClientID) of
persistent -> {persistent, S} none -> none;
{value, #session_store{session = S} = SS} ->
case persistent_session_status(SS) of
expired -> {expired, S};
persistent -> {persistent, S}
end
end end
end. end.

View File

@ -32,7 +32,7 @@
]). ]).
-ifdef(TEST). -ifdef(TEST).
-export([worker/0]). -export([worker/0, flush_async_tasks/0]).
-endif. -endif.
%% gen_server callbacks %% gen_server callbacks
@ -139,3 +139,15 @@ run({F, A}) when is_function(F), is_list(A) ->
run(Fun) when is_function(Fun) -> run(Fun) when is_function(Fun) ->
Fun(). Fun().
-ifdef(TEST).
%% This help function creates a large enough number of async tasks
%% to force flush the pool workers.
%% The number of tasks should be large enough to ensure all workers have
%% the chance to work on at least one of the tasks.
flush_async_tasks() ->
Ref = make_ref(),
Self = self(),
L = lists:seq(1, 997),
lists:foreach(fun(I) -> emqx_pool:async_submit(fun() -> Self ! {done, Ref, I} end, []) end, L),
lists:foreach(fun(I) -> receive {done, Ref, I} -> ok end end, L).
-endif.

View File

@ -0,0 +1,86 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_release).
-export([ edition/0
, put_edition/0
, put_edition/1
, description/0
, version/0
]).
-include("emqx_release.hrl").
%% @doc Return EMQ X description.
description() ->
case os:getenv("EMQX_DESCRIPTION") of
false -> "EMQ X Community Edition";
"" -> "EMQ X Community Edition";
Str -> string:strip(Str, both, $\n)
end.
%% @doc Return EMQ X edition info.
%% Read info from persistent_term at runtime.
%% Or meck this function to run tests for another eidtion.
-spec edition() -> ce | ee | edge.
edition() ->
try persistent_term:get(emqx_edition)
catch error : badarg -> get_edition() end.
%% @private initiate EMQ X edition info in persistent_term.
put_edition() ->
ok = put_edition(get_edition()).
%% @hidden This function is mostly for testing.
%% Switch to another eidtion at runtime to run edition-specific tests.
-spec put_edition(ce | ee | edge) -> ok.
put_edition(Which) ->
persistent_term:put(emqx_edition, Which),
ok.
-spec get_edition() -> ce | ee | edge.
get_edition() ->
edition(description()).
edition(Desc) ->
case re:run(Desc, "enterprise", [caseless]) of
{match, _} ->
ee;
_ ->
case re:run(Desc, "edge", [caseless]) of
{match, _} -> edge;
_ -> ce
end
end.
%% @doc Return the release version.
version() ->
case lists:keyfind(emqx_vsn, 1, ?MODULE:module_info(compile)) of
false -> %% For TEST build or depedency build.
?EMQX_RELEASE;
{_, Vsn} -> %% For emqx release build
VsnStr = ?EMQX_RELEASE,
case string:str(Vsn, VsnStr) of
1 -> ok;
_ ->
erlang:error(#{ reason => version_mismatch
, source => VsnStr
, built_for => Vsn
})
end,
Vsn
end.

View File

@ -51,6 +51,7 @@
-export([ validate_heap_size/1 -export([ validate_heap_size/1
, parse_user_lookup_fun/1 , parse_user_lookup_fun/1
, validate_alarm_actions/1
]). ]).
% workaround: prevent being recognized as unused functions % workaround: prevent being recognized as unused functions
@ -74,6 +75,8 @@
-export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1, default_ciphers/1]). -export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1, default_ciphers/1]).
-export([sc/2, map/2]). -export([sc/2, map/2]).
-elvis([{elvis_style, god_modules, disable}]).
namespace() -> undefined. namespace() -> undefined.
roots() -> roots() ->
@ -889,17 +892,36 @@ fields("sysmon_os") ->
fields("alarm") -> fields("alarm") ->
[ {"actions", [ {"actions",
sc(hoconsc:array(atom()), sc(hoconsc:array(atom()),
#{ default => [log, publish] #{ default => [log, publish],
validator => fun ?MODULE:validate_alarm_actions/1,
example => [log, publish],
desc =>
"""The actions triggered when the alarm is activated.<\br>
Currently supports two actions, 'log' and 'publish'.
'log' is to write the alarm to log (console or file).
'publish' is to publish the alarm as an MQTT message to the system topics:
<code>$SYS/brokers/emqx@xx.xx.xx.x/alarms/activate</code> and
<code>$SYS/brokers/emqx@xx.xx.xx.x/alarms/deactivate</code>"""
}) })
} }
, {"size_limit", , {"size_limit",
sc(integer(), sc(range(1, 3000),
#{ default => 1000 #{ default => 1000,
example => 1000,
desc =>
"""The maximum total number of deactivated alarms to keep as history.<br>
When this limit is exceeded, the oldest deactivated alarms are deleted to cap the total number.
"""
}) })
} }
, {"validity_period", , {"validity_period",
sc(duration(), sc(duration(),
#{ default => "24h" #{ default => "24h",
example => "24h",
desc =>
"""Retention time of deactivated alarms. Alarms are not deleted immediately
when deactivated, but after the retention time.
"""
}) })
} }
]. ].
@ -1141,7 +1163,7 @@ client_ssl_opts_schema(Defaults) ->
common_ssl_opts_schema(Defaults) ++ common_ssl_opts_schema(Defaults) ++
[ { "server_name_indication", [ { "server_name_indication",
sc(hoconsc:union([disable, string()]), sc(hoconsc:union([disable, string()]),
#{ default => disable #{ nullable => true
, desc => , desc =>
"""Specify the host name to be used in TLS Server Name Indication extension.<br> """Specify the host name to be used in TLS Server Name Indication extension.<br>
For instance, when connecting to \"server.example.net\", the genuine server For instance, when connecting to \"server.example.net\", the genuine server
@ -1163,7 +1185,8 @@ default_tls_vsns(dtls_all_available) ->
default_tls_vsns(tls_all_available) -> default_tls_vsns(tls_all_available) ->
emqx_tls_lib:default_versions(). emqx_tls_lib:default_versions().
-spec ciphers_schema(quic | dtls_all_available | tls_all_available | undefined) -> hocon_schema:field_schema(). -spec ciphers_schema(quic | dtls_all_available | tls_all_available | undefined)
-> hocon_schema:field_schema().
ciphers_schema(Default) -> ciphers_schema(Default) ->
sc(hoconsc:array(string()), sc(hoconsc:array(string()),
#{ default => default_ciphers(Default) #{ default => default_ciphers(Default)
@ -1303,7 +1326,7 @@ to_bar_separated_list(Str) ->
{ok, string:tokens(Str, "| ")}. {ok, string:tokens(Str, "| ")}.
to_ip_port(Str) -> to_ip_port(Str) ->
case string:tokens(Str, ":") of case string:tokens(Str, ": ") of
[Ip, Port] -> [Ip, Port] ->
PortVal = list_to_integer(Port), PortVal = list_to_integer(Port),
case inet:parse_address(Ip) of case inet:parse_address(Ip) of
@ -1345,8 +1368,16 @@ validate_heap_size(Siz) ->
true -> error(io_lib:format("force_shutdown_policy: heap-size ~ts is too large", [Siz])); true -> error(io_lib:format("force_shutdown_policy: heap-size ~ts is too large", [Siz]));
false -> ok false -> ok
end. end.
validate_alarm_actions(Actions) ->
UnSupported = lists:filter(fun(Action) -> Action =/= log andalso Action =/= publish end, Actions),
case UnSupported of
[] -> ok;
Error -> {error, Error}
end.
parse_user_lookup_fun(StrConf) -> parse_user_lookup_fun(StrConf) ->
[ModStr, FunStr] = string:tokens(str(StrConf), ":"), [ModStr, FunStr] = string:tokens(str(StrConf), ": "),
Mod = list_to_atom(ModStr), Mod = list_to_atom(ModStr),
Fun = list_to_atom(FunStr), Fun = list_to_atom(FunStr),
{fun Mod:Fun/3, undefined}. {fun Mod:Fun/3, undefined}.

View File

@ -58,6 +58,7 @@
-export([ info/1 -export([ info/1
, info/2 , info/2
, is_session/1
, stats/1 , stats/1
]). ]).
@ -202,6 +203,9 @@ init(Opts) ->
%% Info, Stats %% Info, Stats
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
is_session(#session{}) -> true;
is_session(_) -> false.
%% @doc Get infos of the session. %% @doc Get infos of the session.
-spec(info(session()) -> emqx_types:infos()). -spec(info(session()) -> emqx_types:infos()).
info(Session) -> info(Session) ->

View File

@ -215,7 +215,7 @@ handle_call({pending, SessionID, MarkerIDs}, _From, State) ->
Res = emqx_persistent_session:pending_messages_in_db(SessionID, MarkerIDs), Res = emqx_persistent_session:pending_messages_in_db(SessionID, MarkerIDs),
{reply, Res, State}; {reply, Res, State};
handle_call(Req, _From, State) -> handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]), ?SLOG(error, #{msg => "unexpected_call", req => Req}),
{reply, ignored, State}. {reply, ignored, State}.
handle_cast({delete_routes, SessionID, Subscriptions}, State) -> handle_cast({delete_routes, SessionID, Subscriptions}, State) ->
@ -233,11 +233,11 @@ handle_cast({resume_end, SessionID, Pid}, State) ->
_ = emqx_session_router_worker_sup:abort_worker(Pid), _ = emqx_session_router_worker_sup:abort_worker(Pid),
{noreply, State#{ pmon => Pmon }}; {noreply, State#{ pmon => Pmon }};
handle_cast(Msg, State) -> handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]), ?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}. {noreply, State}.
handle_info(Info, State) -> handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]), ?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}. {noreply, State}.
terminate(_Reason, #{pool := Pool, id := Id}) -> terminate(_Reason, #{pool := Pool, id := Id}) ->

View File

@ -292,6 +292,7 @@ subscribers(Group, Topic) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
init([]) -> init([]) ->
ok = mria:wait_for_tables([?TAB]),
{ok, _} = mnesia:subscribe({table, ?TAB, simple}), {ok, _} = mnesia:subscribe({table, ?TAB, simple}),
{atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun init_monitors/0), {atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun init_monitors/0),
ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]), ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]),

View File

@ -21,6 +21,7 @@
-include("emqx.hrl"). -include("emqx.hrl").
-include("logger.hrl"). -include("logger.hrl").
-include("types.hrl"). -include("types.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
%% APIs %% APIs
@ -66,8 +67,10 @@
%% Connection stats %% Connection stats
-define(CONNECTION_STATS, -define(CONNECTION_STATS,
['connections.count', %% Count of Concurrent Connections [ 'connections.count' %% Count of Concurrent Connections
'connections.max' %% Maximum Number of Concurrent Connections , 'connections.max' %% Maximum Number of Concurrent Connections
, 'live_connections.count' %% Count of connected clients
, 'live_connections.max' %% Maximum number of connected clients
]). ]).
%% Channel stats %% Channel stats
@ -215,6 +218,11 @@ handle_cast({setstat, Stat, MaxStat, Val}, State) ->
ets:insert(?TAB, {MaxStat, Val}) ets:insert(?TAB, {MaxStat, Val})
end, end,
safe_update_element(Stat, Val), safe_update_element(Stat, Val),
?tp(emqx_stats_setstat,
#{ count_stat => Stat
, max_stat => MaxStat
, value => Val
}),
{noreply, State}; {noreply, State};
handle_cast({update_interval, Update = #update{name = Name}}, handle_cast({update_interval, Update = #update{name = Name}},
@ -225,7 +233,7 @@ handle_cast({update_interval, Update = #update{name = Name}},
name => Name name => Name
}), }),
State; State;
false -> State#state{updates = [Update|Updates]} false -> State#state{updates = [Update | Updates]}
end, end,
{noreply, NState}; {noreply, NState};

View File

@ -170,9 +170,11 @@ code_change(_OldVsn, State, _Extra) ->
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
handle_partition_event({partition, {occurred, Node}}) -> handle_partition_event({partition, {occurred, Node}}) ->
emqx_alarm:activate(partition, #{occurred => Node}); Message = io_lib:format("Partition occurs at node ~ts", [Node]),
handle_partition_event({partition, {healed, _Node}}) -> emqx_alarm:activate(partition, #{occurred => Node}, Message);
emqx_alarm:deactivate(partition). handle_partition_event({partition, {healed, Node}}) ->
Message = io_lib:format("Partition healed at node ~ts", [Node]),
emqx_alarm:deactivate(partition, no_details, Message).
suppress(Key, SuccFun, State = #{events := Events}) -> suppress(Key, SuccFun, State = #{events := Events}) ->
case lists:member(Key, Events) of case lists:member(Key, Events) of

View File

@ -0,0 +1,486 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace).
-behaviour(gen_server).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
%% Mnesia bootstrap
-export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
-export([ publish/1
, subscribe/3
, unsubscribe/2
]).
-export([ start_link/0
, list/0
, list/1
, get_trace_filename/1
, create/1
, delete/1
, clear/0
, update/2
]).
-export([ format/1
, zip_dir/0
, filename/2
, trace_dir/0
, trace_file/1
, delete_files_after_send/2
]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-define(TRACE, ?MODULE).
-define(MAX_SIZE, 30).
-ifdef(TEST).
-export([log_file/2]).
-endif.
-export_type([ip_address/0]).
-type ip_address() :: string().
-record(?TRACE,
{ name :: binary() | undefined | '_'
, type :: clientid | topic | ip_address | undefined | '_'
, filter :: emqx_types:topic() | emqx_types:clientid() | ip_address() | undefined | '_'
, enable = true :: boolean() | '_'
, start_at :: integer() | undefined | '_'
, end_at :: integer() | undefined | '_'
}).
mnesia(boot) ->
ok = mria:create_table(?TRACE, [
{type, set},
{rlog_shard, ?COMMON_SHARD},
{storage, disc_copies},
{record_name, ?TRACE},
{attributes, record_info(fields, ?TRACE)}]).
publish(#message{topic = <<"$SYS/", _/binary>>}) -> ignore;
publish(#message{from = From, topic = Topic, payload = Payload}) when
is_binary(From); is_atom(From) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"PUBLISH to ~s: ~0p",
[Topic, Payload]
).
subscribe(<<"$SYS/", _/binary>>, _SubId, _SubOpts) -> ignore;
subscribe(Topic, SubId, SubOpts) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"~ts SUBSCRIBE ~ts: Options: ~0p",
[SubId, Topic, SubOpts]
).
unsubscribe(<<"$SYS/", _/binary>>, _SubOpts) -> ignore;
unsubscribe(Topic, SubOpts) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"~ts UNSUBSCRIBE ~ts: Options: ~0p",
[maps:get(subid, SubOpts, ""), Topic, SubOpts]
).
-spec(start_link() -> emqx_types:startlink_ret()).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec list() -> [tuple()].
list() ->
ets:match_object(?TRACE, #?TRACE{_ = '_'}).
-spec list(boolean()) -> [tuple()].
list(Enable) ->
ets:match_object(?TRACE, #?TRACE{enable = Enable, _ = '_'}).
-spec create([{Key :: binary(), Value :: binary()}] | #{atom() => binary()}) ->
ok | {error, {duplicate_condition, iodata()} | {already_existed, iodata()} | iodata()}.
create(Trace) ->
case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of
true ->
case to_trace(Trace) of
{ok, TraceRec} -> insert_new_trace(TraceRec);
{error, Reason} -> {error, Reason}
end;
false ->
{error, "The number of traces created has reache the maximum"
" please delete the useless ones first"}
end.
-spec delete(Name :: binary()) -> ok | {error, not_found}.
delete(Name) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name) of
[_] -> mnesia:delete(?TRACE, Name, write);
[] -> mnesia:abort(not_found)
end
end,
transaction(Tran).
-spec clear() -> ok | {error, Reason :: term()}.
clear() ->
case mria:clear_table(?TRACE) of
{atomic, ok} -> ok;
{aborted, Reason} -> {error, Reason}
end.
-spec update(Name :: binary(), Enable :: boolean()) ->
ok | {error, not_found | finished}.
update(Name, Enable) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name) of
[] -> mnesia:abort(not_found);
[#?TRACE{enable = Enable}] -> ok;
[Rec] ->
case erlang:system_time(second) >= Rec#?TRACE.end_at of
false -> mnesia:write(?TRACE, Rec#?TRACE{enable = Enable}, write);
true -> mnesia:abort(finished)
end
end
end,
transaction(Tran).
-spec get_trace_filename(Name :: binary()) ->
{ok, FileName :: string()} | {error, not_found}.
get_trace_filename(Name) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name, read) of
[] -> mnesia:abort(not_found);
[#?TRACE{start_at = Start}] -> {ok, filename(Name, Start)}
end end,
transaction(Tran).
-spec trace_file(File :: list()) ->
{ok, Node :: list(), Binary :: binary()} |
{error, Node :: list(), Reason :: term()}.
trace_file(File) ->
FileName = filename:join(trace_dir(), File),
Node = atom_to_list(node()),
case file:read_file(FileName) of
{ok, Bin} -> {ok, Node, Bin};
{error, Reason} -> {error, Node, Reason}
end.
delete_files_after_send(TraceLog, Zips) ->
gen_server:cast(?MODULE, {delete_tag, self(), [TraceLog | Zips]}).
-spec format(list(#?TRACE{})) -> list(map()).
format(Traces) ->
Fields = record_info(fields, ?TRACE),
lists:map(fun(Trace0 = #?TRACE{}) ->
[_ | Values] = tuple_to_list(Trace0),
maps:from_list(lists:zip(Fields, Values))
end, Traces).
init([]) ->
erlang:process_flag(trap_exit, true),
OriginLogLevel = emqx_logger:get_primary_log_level(),
ok = filelib:ensure_dir(trace_dir()),
ok = filelib:ensure_dir(zip_dir()),
{ok, _} = mnesia:subscribe({table, ?TRACE, simple}),
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
TRef = update_trace(Traces),
{ok, #{timer => TRef, monitors => #{}, primary_log_level => OriginLogLevel}}.
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ok, State}.
handle_cast({delete_tag, Pid, Files}, State = #{monitors := Monitors}) ->
erlang:monitor(process, Pid),
{noreply, State#{monitors => Monitors#{Pid => Files}}};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info({'DOWN', _Ref, process, Pid, _Reason}, State = #{monitors := Monitors}) ->
case maps:take(Pid, Monitors) of
error -> {noreply, State};
{Files, NewMonitors} ->
lists:foreach(fun file:delete/1, Files),
{noreply, State#{monitors => NewMonitors}}
end;
handle_info({timeout, TRef, update_trace},
#{timer := TRef, primary_log_level := OriginLogLevel} = State) ->
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
NextTRef = update_trace(Traces),
{noreply, State#{timer => NextTRef}};
handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) ->
emqx_misc:cancel_timer(TRef),
handle_info({timeout, TRef, update_trace}, State);
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, #{timer := TRef, primary_log_level := OriginLogLevel}) ->
ok = set_log_primary_level(OriginLogLevel),
_ = mnesia:unsubscribe({table, ?TRACE, simple}),
emqx_misc:cancel_timer(TRef),
stop_all_trace_handler(),
_ = file:del_dir_r(zip_dir()),
ok.
code_change(_, State, _Extra) ->
{ok, State}.
insert_new_trace(Trace) ->
Tran = fun() ->
case mnesia:read(?TRACE, Trace#?TRACE.name) of
[] ->
#?TRACE{start_at = StartAt, type = Type, filter = Filter} = Trace,
Match = #?TRACE{_ = '_', start_at = StartAt, type = Type, filter = Filter},
case mnesia:match_object(?TRACE, Match, read) of
[] -> mnesia:write(?TRACE, Trace, write);
[#?TRACE{name = Name}] -> mnesia:abort({duplicate_condition, Name})
end;
[#?TRACE{name = Name}] -> mnesia:abort({already_existed, Name})
end
end,
transaction(Tran).
update_trace(Traces) ->
Now = erlang:system_time(second),
{_Waiting, Running, Finished} = classify_by_time(Traces, Now),
disable_finished(Finished),
Started = emqx_trace_handler:running(),
{NeedRunning, AllStarted} = start_trace(Running, Started),
NeedStop = AllStarted -- NeedRunning,
ok = stop_trace(NeedStop, Started),
clean_stale_trace_files(),
NextTime = find_closest_time(Traces, Now),
emqx_misc:start_timer(NextTime, update_trace).
stop_all_trace_handler() ->
lists:foreach(fun(#{id := Id}) -> emqx_trace_handler:uninstall(Id) end,
emqx_trace_handler:running()).
get_enable_trace() ->
{atomic, Traces} =
mria:transaction(?COMMON_SHARD, fun() ->
mnesia:match_object(?TRACE, #?TRACE{enable = true, _ = '_'}, read)
end),
Traces.
find_closest_time(Traces, Now) ->
Sec =
lists:foldl(
fun(#?TRACE{start_at = Start, end_at = End}, Closest)
when Start >= Now andalso Now < End -> %% running
min(End - Now, Closest);
(#?TRACE{start_at = Start}, Closest) when Start < Now -> %% waiting
min(Now - Start, Closest);
(_, Closest) -> Closest %% finished
end, 60 * 15, Traces),
timer:seconds(Sec).
disable_finished([]) -> ok;
disable_finished(Traces) ->
transaction(fun() ->
lists:map(fun(#?TRACE{name = Name}) ->
case mnesia:read(?TRACE, Name, write) of
[] -> ok;
[Trace] -> mnesia:write(?TRACE, Trace#?TRACE{enable = false}, write)
end end, Traces)
end).
start_trace(Traces, Started0) ->
Started = lists:map(fun(#{name := Name}) -> Name end, Started0),
lists:foldl(fun(#?TRACE{name = Name} = Trace, {Running, StartedAcc}) ->
case lists:member(Name, StartedAcc) of
true ->
{[Name | Running], StartedAcc};
false ->
case start_trace(Trace) of
ok -> {[Name | Running], [Name | StartedAcc]};
{error, _Reason} -> {[Name | Running], StartedAcc}
end
end
end, {[], Started}, Traces).
start_trace(Trace) ->
#?TRACE{name = Name
, type = Type
, filter = Filter
, start_at = Start
} = Trace,
Who = #{name => Name, type => Type, filter => Filter},
emqx_trace_handler:install(Who, debug, log_file(Name, Start)).
stop_trace(Finished, Started) ->
lists:foreach(fun(#{name := Name, type := Type}) ->
case lists:member(Name, Finished) of
true -> emqx_trace_handler:uninstall(Type, Name);
false -> ok
end
end, Started).
clean_stale_trace_files() ->
TraceDir = trace_dir(),
case file:list_dir(TraceDir) of
{ok, AllFiles} when AllFiles =/= ["zip"] ->
FileFun = fun(#?TRACE{name = Name, start_at = StartAt}) -> filename(Name, StartAt) end,
KeepFiles = lists:map(FileFun, list()),
case AllFiles -- ["zip" | KeepFiles] of
[] -> ok;
DeleteFiles ->
DelFun = fun(F) -> file:delete(filename:join(TraceDir, F)) end,
lists:foreach(DelFun, DeleteFiles)
end;
_ -> ok
end.
classify_by_time(Traces, Now) ->
classify_by_time(Traces, Now, [], [], []).
classify_by_time([], _Now, Wait, Run, Finish) -> {Wait, Run, Finish};
classify_by_time([Trace = #?TRACE{start_at = Start} | Traces],
Now, Wait, Run, Finish) when Start > Now ->
classify_by_time(Traces, Now, [Trace | Wait], Run, Finish);
classify_by_time([Trace = #?TRACE{end_at = End} | Traces],
Now, Wait, Run, Finish) when End =< Now ->
classify_by_time(Traces, Now, Wait, Run, [Trace | Finish]);
classify_by_time([Trace | Traces], Now, Wait, Run, Finish) ->
classify_by_time(Traces, Now, Wait, [Trace | Run], Finish).
to_trace(TraceParam) ->
case to_trace(ensure_proplists(TraceParam), #?TRACE{}) of
{error, Reason} -> {error, Reason};
{ok, #?TRACE{name = undefined}} ->
{error, "name required"};
{ok, #?TRACE{type = undefined}} ->
{error, "type=[topic,clientid,ip_address] required"};
{ok, #?TRACE{filter = undefined}} ->
{error, "topic/clientid/ip_address filter required"};
{ok, TraceRec0} ->
case fill_default(TraceRec0) of
#?TRACE{start_at = Start, end_at = End} when End =< Start ->
{error, "failed by start_at >= end_at"};
TraceRec -> {ok, TraceRec}
end
end.
ensure_proplists(#{} = Trace) -> maps:to_list(Trace);
ensure_proplists(Trace) when is_list(Trace) ->
lists:foldl(
fun({K, V}, Acc) when is_binary(K) -> [{binary_to_existing_atom(K), V} | Acc];
({K, V}, Acc) when is_atom(K) -> [{K, V} | Acc];
(_, Acc) -> Acc
end, [], Trace).
fill_default(Trace = #?TRACE{start_at = undefined}) ->
fill_default(Trace#?TRACE{start_at = erlang:system_time(second)});
fill_default(Trace = #?TRACE{end_at = undefined, start_at = StartAt}) ->
fill_default(Trace#?TRACE{end_at = StartAt + 10 * 60});
fill_default(Trace) -> Trace.
to_trace([], Rec) -> {ok, Rec};
to_trace([{name, Name} | Trace], Rec) ->
case io_lib:printable_unicode_list(unicode:characters_to_list(Name, utf8)) of
true ->
case binary:match(Name, [<<"/">>], []) of
nomatch -> to_trace(Trace, Rec#?TRACE{name = Name});
_ -> {error, "name cannot contain /"}
end;
false -> {error, "name must printable unicode"}
end;
to_trace([{type, Type} | Trace], Rec) ->
case lists:member(Type, [<<"clientid">>, <<"topic">>, <<"ip_address">>]) of
true -> to_trace(Trace, Rec#?TRACE{type = binary_to_existing_atom(Type)});
false -> {error, "incorrect type: only support clientid/topic/ip_address"}
end;
to_trace([{topic, Topic} | Trace], Rec) ->
case validate_topic(Topic) of
ok -> to_trace(Trace, Rec#?TRACE{filter = Topic});
{error, Reason} -> {error, Reason}
end;
to_trace([{clientid, ClientId} | Trace], Rec) ->
to_trace(Trace, Rec#?TRACE{filter = ClientId});
to_trace([{ip_address, IP} | Trace], Rec) ->
case inet:parse_address(binary_to_list(IP)) of
{ok, _} -> to_trace(Trace, Rec#?TRACE{filter = binary_to_list(IP)});
{error, Reason} -> {error, lists:flatten(io_lib:format("ip address: ~p", [Reason]))}
end;
to_trace([{start_at, StartAt} | Trace], Rec) ->
case to_system_second(StartAt) of
{ok, Sec} -> to_trace(Trace, Rec#?TRACE{start_at = Sec});
{error, Reason} -> {error, Reason}
end;
to_trace([{end_at, EndAt} | Trace], Rec) ->
Now = erlang:system_time(second),
case to_system_second(EndAt) of
{ok, Sec} when Sec > Now ->
to_trace(Trace, Rec#?TRACE{end_at = Sec});
{ok, _Sec} ->
{error, "end_at time has already passed"};
{error, Reason} ->
{error, Reason}
end;
to_trace([Unknown | _Trace], _Rec) -> {error, io_lib:format("unknown field: ~p", [Unknown])}.
validate_topic(TopicName) ->
try emqx_topic:validate(filter, TopicName) of
true -> ok
catch
error:Error ->
{error, io_lib:format("topic: ~s invalid by ~p", [TopicName, Error])}
end.
to_system_second(At) ->
try
Sec = calendar:rfc3339_to_system_time(binary_to_list(At), [{unit, second}]),
{ok, Sec}
catch error: {badmatch, _} ->
{error, ["The rfc3339 specification not satisfied: ", At]}
end.
zip_dir() ->
trace_dir() ++ "zip/".
trace_dir() ->
filename:join(emqx:data_dir(), "trace") ++ "/".
log_file(Name, Start) ->
filename:join(trace_dir(), filename(Name, Start)).
filename(Name, Start) ->
[Time, _] = string:split(calendar:system_time_to_rfc3339(Start), "T", leading),
lists:flatten(["trace_", binary_to_list(Name), "_", Time, ".log"]).
transaction(Tran) ->
case mria:transaction(?COMMON_SHARD, Tran) of
{atomic, Res} -> Res;
{aborted, Reason} -> {error, Reason}
end.
update_log_primary_level([], OriginLevel) -> set_log_primary_level(OriginLevel);
update_log_primary_level(_, _) -> set_log_primary_level(debug).
set_log_primary_level(NewLevel) ->
case NewLevel =/= emqx_logger:get_primary_log_level() of
true -> emqx_logger:set_primary_log_level(NewLevel);
false -> ok
end.

View File

@ -0,0 +1,210 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_api).
-include_lib("emqx/include/logger.hrl").
-include_lib("kernel/include/file.hrl").
%% API
-export([ list_trace/2
, create_trace/2
, update_trace/2
, delete_trace/2
, clear_traces/2
, download_zip_log/2
, stream_log_file/2
]).
-export([ read_trace_file/3
, get_trace_size/0
]).
-define(TO_BIN(_B_), iolist_to_binary(_B_)).
-define(NOT_FOUND(N), {error, 'NOT_FOUND', ?TO_BIN([N, " NOT FOUND"])}).
list_trace(_, _Params) ->
case emqx_trace:list() of
[] -> {ok, []};
List0 ->
List = lists:sort(fun(#{start_at := A}, #{start_at := B}) -> A > B end, List0),
Nodes = mria_mnesia:running_nodes(),
TraceSize = cluster_call(?MODULE, get_trace_size, [], 30000),
AllFileSize = lists:foldl(fun(F, Acc) -> maps:merge(Acc, F) end, #{}, TraceSize),
Now = erlang:system_time(second),
Traces =
lists:map(fun(Trace = #{name := Name, start_at := Start,
end_at := End, enable := Enable, type := Type, filter := Filter}) ->
FileName = emqx_trace:filename(Name, Start),
LogSize = collect_file_size(Nodes, FileName, AllFileSize),
Trace0 = maps:without([enable, filter], Trace),
Trace0#{ log_size => LogSize
, Type => Filter
, start_at => list_to_binary(calendar:system_time_to_rfc3339(Start))
, end_at => list_to_binary(calendar:system_time_to_rfc3339(End))
, status => status(Enable, Start, End, Now)
}
end, emqx_trace:format(List)),
{ok, Traces}
end.
create_trace(_, Param) ->
case emqx_trace:create(Param) of
ok -> ok;
{error, {already_existed, Name}} ->
{error, 'ALREADY_EXISTED', ?TO_BIN([Name, "Already Exists"])};
{error, {duplicate_condition, Name}} ->
{error, 'DUPLICATE_CONDITION', ?TO_BIN([Name, "Duplication Condition"])};
{error, Reason} ->
{error, 'INCORRECT_PARAMS', ?TO_BIN(Reason)}
end.
delete_trace(#{name := Name}, _Param) ->
case emqx_trace:delete(Name) of
ok -> ok;
{error, not_found} -> ?NOT_FOUND(Name)
end.
clear_traces(_, _) ->
emqx_trace:clear().
update_trace(#{name := Name, operation := Operation}, _Param) ->
Enable = case Operation of disable -> false; enable -> true end,
case emqx_trace:update(Name, Enable) of
ok -> {ok, #{enable => Enable, name => Name}};
{error, not_found} -> ?NOT_FOUND(Name)
end.
%% if HTTP request headers include accept-encoding: gzip and file size > 300 bytes.
%% cowboy_compress_h will auto encode gzip format.
download_zip_log(#{name := Name}, _Param) ->
case emqx_trace:get_trace_filename(Name) of
{ok, TraceLog} ->
TraceFiles = collect_trace_file(TraceLog),
ZipDir = emqx_trace:zip_dir(),
Zips = group_trace_file(ZipDir, TraceLog, TraceFiles),
ZipFileName = ZipDir ++ binary_to_list(Name) ++ ".zip",
{ok, ZipFile} = zip:zip(ZipFileName, Zips, [{cwd, ZipDir}]),
emqx_trace:delete_files_after_send(ZipFileName, Zips),
{ok, ZipFile};
{error, Reason} ->
{error, Reason}
end.
group_trace_file(ZipDir, TraceLog, TraceFiles) ->
lists:foldl(fun(Res, Acc) ->
case Res of
{ok, Node, Bin} ->
ZipName = ZipDir ++ Node ++ "-" ++ TraceLog,
ok = file:write_file(ZipName, Bin),
[Node ++ "-" ++ TraceLog | Acc];
{error, Node, Reason} ->
?LOG(error, "download trace log error:~p", [{Node, TraceLog, Reason}]),
Acc
end
end, [], TraceFiles).
collect_trace_file(TraceLog) ->
cluster_call(emqx_trace, trace_file, [TraceLog], 60000).
cluster_call(Mod, Fun, Args, Timeout) ->
Nodes = mria_mnesia:running_nodes(),
{GoodRes, BadNodes} = rpc:multicall(Nodes, Mod, Fun, Args, Timeout),
BadNodes =/= [] andalso ?LOG(error, "rpc call failed on ~p ~p", [BadNodes, {Mod, Fun, Args}]),
GoodRes.
stream_log_file(#{name := Name}, Params) ->
Node0 = proplists:get_value(<<"node">>, Params, atom_to_binary(node())),
Position0 = proplists:get_value(<<"position">>, Params, <<"0">>),
Bytes0 = proplists:get_value(<<"bytes">>, Params, <<"1000">>),
case to_node(Node0) of
{ok, Node} ->
Position = binary_to_integer(Position0),
Bytes = binary_to_integer(Bytes0),
case rpc:call(Node, ?MODULE, read_trace_file, [Name, Position, Bytes]) of
{ok, Bin} ->
Meta = #{<<"position">> => Position + byte_size(Bin), <<"bytes">> => Bytes},
{ok, #{meta => Meta, items => Bin}};
{eof, Size} ->
Meta = #{<<"position">> => Size, <<"bytes">> => Bytes},
{ok, #{meta => Meta, items => <<"">>}};
{error, Reason} ->
logger:log(error, "read_file_failed by ~p", [{Name, Reason, Position, Bytes}]),
{error, Reason};
{badrpc, nodedown} ->
{error, "BadRpc node down"}
end;
{error, Reason} -> {error, Reason}
end.
get_trace_size() ->
TraceDir = emqx_trace:trace_dir(),
Node = node(),
case file:list_dir(TraceDir) of
{ok, AllFiles} ->
lists:foldl(fun(File, Acc) ->
FullFileName = filename:join(TraceDir, File),
Acc#{{Node, File} => filelib:file_size(FullFileName)}
end, #{}, lists:delete("zip", AllFiles));
_ -> #{}
end.
%% this is an rpc call for stream_log_file/2
read_trace_file(Name, Position, Limit) ->
TraceDir = emqx_trace:trace_dir(),
{ok, AllFiles} = file:list_dir(TraceDir),
TracePrefix = "trace_" ++ binary_to_list(Name) ++ "_",
Filter = fun(FileName) -> nomatch =/= string:prefix(FileName, TracePrefix) end,
case lists:filter(Filter, AllFiles) of
[TraceFile] ->
TracePath = filename:join([TraceDir, TraceFile]),
read_file(TracePath, Position, Limit);
[] -> {error, not_found}
end.
read_file(Path, Offset, Bytes) ->
{ok, IoDevice} = file:open(Path, [read, raw, binary]),
try
_ = case Offset of
0 -> ok;
_ -> file:position(IoDevice, {bof, Offset})
end,
case file:read(IoDevice, Bytes) of
{ok, Bin} -> {ok, Bin};
{error, Reason} -> {error, Reason};
eof ->
{ok, #file_info{size = Size}} = file:read_file_info(IoDevice),
{eof, Size}
end
after
file:close(IoDevice)
end.
to_node(Node) ->
try {ok, binary_to_existing_atom(Node)}
catch _:_ ->
{error, "node not found"}
end.
collect_file_size(Nodes, FileName, AllFiles) ->
lists:foldl(fun(Node, Acc) ->
Size = maps:get({Node, FileName}, AllFiles, 0),
Acc#{Node => Size}
end, #{}, Nodes).
%% status(false, _Start, End, Now) when End > Now -> <<"stopped">>;
status(false, _Start, _End, _Now) -> <<"stopped">>;
status(true, Start, _End, Now) when Now < Start -> <<"waiting">>;
status(true, _Start, End, Now) when Now >= End -> <<"stopped">>;
status(true, _Start, _End, _Now) -> <<"running">>.

View File

@ -0,0 +1,218 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2018-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_handler).
-include("emqx.hrl").
-include("logger.hrl").
-logger_header("[Tracer]").
%% APIs
-export([ running/0
, install/3
, install/4
, uninstall/1
, uninstall/2
]).
%% For logger handler filters callbacks
-export([ filter_clientid/2
, filter_topic/2
, filter_ip_address/2
]).
-export([handler_id/2]).
-type tracer() :: #{
name := binary(),
type := clientid | topic | ip_address,
filter := emqx_types:clientid() | emqx_types:topic() | emqx_trace:ip_address()
}.
-define(FORMAT,
{logger_formatter, #{
template => [
time, " [", level, "] ",
{clientid,
[{peername, [clientid, "@", peername, " "], [clientid, " "]}],
[{peername, [peername, " "], []}]
},
msg, "\n"
],
single_line => false,
max_size => unlimited,
depth => unlimited
}}
).
-define(CONFIG(_LogFile_), #{
type => halt,
file => _LogFile_,
max_no_bytes => 512 * 1024 * 1024,
overload_kill_enable => true,
overload_kill_mem_size => 50 * 1024 * 1024,
overload_kill_qlen => 20000,
%% disable restart
overload_kill_restart_after => infinity
}).
%%------------------------------------------------------------------------------
%% APIs
%%------------------------------------------------------------------------------
-spec install(Name :: binary() | list(),
Type :: clientid | topic | ip_address,
Filter ::emqx_types:clientid() | emqx_types:topic() | string(),
Level :: logger:level() | all,
LogFilePath :: string()) -> ok | {error, term()}.
install(Name, Type, Filter, Level, LogFile) ->
Who = #{type => Type, filter => ensure_bin(Filter), name => ensure_bin(Name)},
install(Who, Level, LogFile).
-spec install(Type :: clientid | topic | ip_address,
Filter ::emqx_types:clientid() | emqx_types:topic() | string(),
Level :: logger:level() | all,
LogFilePath :: string()) -> ok | {error, term()}.
install(Type, Filter, Level, LogFile) ->
install(Filter, Type, Filter, Level, LogFile).
-spec install(tracer(), logger:level() | all, string()) -> ok | {error, term()}.
install(Who, all, LogFile) ->
install(Who, debug, LogFile);
install(Who, Level, LogFile) ->
PrimaryLevel = emqx_logger:get_primary_log_level(),
try logger:compare_levels(Level, PrimaryLevel) of
lt ->
{error,
io_lib:format(
"Cannot trace at a log level (~s) "
"lower than the primary log level (~s)",
[Level, PrimaryLevel]
)};
_GtOrEq ->
install_handler(Who, Level, LogFile)
catch
error:badarg ->
{error, {invalid_log_level, Level}}
end.
-spec uninstall(Type :: clientid | topic | ip_address,
Name :: binary() | list()) -> ok | {error, term()}.
uninstall(Type, Name) ->
HandlerId = handler_id(ensure_bin(Name), Type),
uninstall(HandlerId).
-spec uninstall(HandlerId :: atom()) -> ok | {error, term()}.
uninstall(HandlerId) ->
Res = logger:remove_handler(HandlerId),
show_prompts(Res, HandlerId, "Stop trace"),
Res.
%% @doc Return all running trace handlers information.
-spec running() ->
[
#{
name => binary(),
type => topic | clientid | ip_address,
id => atom(),
filter => emqx_types:topic() | emqx_types:clienetid() | emqx_trace:ip_address(),
level => logger:level(),
dst => file:filename() | console | unknown
}
].
running() ->
lists:foldl(fun filter_traces/2, [], emqx_logger:get_log_handlers(started)).
-spec filter_clientid(logger:log_event(), {string(), atom()}) -> logger:log_event() | ignore.
filter_clientid(#{meta := #{clientid := ClientId}} = Log, {ClientId, _Name}) -> Log;
filter_clientid(_Log, _ExpectId) -> ignore.
-spec filter_topic(logger:log_event(), {string(), atom()}) -> logger:log_event() | ignore.
filter_topic(#{meta := #{topic := Topic}} = Log, {TopicFilter, _Name}) ->
case emqx_topic:match(Topic, TopicFilter) of
true -> Log;
false -> ignore
end;
filter_topic(_Log, _ExpectId) -> ignore.
-spec filter_ip_address(logger:log_event(), {string(), atom()}) -> logger:log_event() | ignore.
filter_ip_address(#{meta := #{peername := Peername}} = Log, {IP, _Name}) ->
case lists:prefix(IP, Peername) of
true -> Log;
false -> ignore
end;
filter_ip_address(_Log, _ExpectId) -> ignore.
install_handler(Who = #{name := Name, type := Type}, Level, LogFile) ->
HandlerId = handler_id(Name, Type),
Config = #{ level => Level,
formatter => ?FORMAT,
filter_default => stop,
filters => filters(Who),
config => ?CONFIG(LogFile)
},
Res = logger:add_handler(HandlerId, logger_disk_log_h, Config),
show_prompts(Res, Who, "Start trace"),
Res.
filters(#{type := clientid, filter := Filter, name := Name}) ->
[{clientid, {fun ?MODULE:filter_clientid/2, {ensure_list(Filter), Name}}}];
filters(#{type := topic, filter := Filter, name := Name}) ->
[{topic, {fun ?MODULE:filter_topic/2, {ensure_bin(Filter), Name}}}];
filters(#{type := ip_address, filter := Filter, name := Name}) ->
[{ip_address, {fun ?MODULE:filter_ip_address/2, {ensure_list(Filter), Name}}}].
filter_traces(#{id := Id, level := Level, dst := Dst, filters := Filters}, Acc) ->
Init = #{id => Id, level => Level, dst => Dst},
case Filters of
[{Type, {_FilterFun, {Filter, Name}}}] when
Type =:= topic orelse
Type =:= clientid orelse
Type =:= ip_address ->
[Init#{type => Type, filter => Filter, name => Name} | Acc];
_ ->
Acc
end.
handler_id(Name, Type) ->
try
do_handler_id(Name, Type)
catch
_ : _ ->
Hash = emqx_misc:bin2hexstr_a_f(crypto:hash(md5, Name)),
do_handler_id(Hash, Type)
end.
%% Handler ID must be an atom.
do_handler_id(Name, Type) ->
TypeStr = atom_to_list(Type),
NameStr = unicode:characters_to_list(Name, utf8),
FullNameStr = "trace_" ++ TypeStr ++ "_" ++ NameStr,
true = io_lib:printable_unicode_list(FullNameStr),
FullNameBin = unicode:characters_to_binary(FullNameStr, utf8),
binary_to_atom(FullNameBin, utf8).
ensure_bin(List) when is_list(List) -> iolist_to_binary(List);
ensure_bin(Bin) when is_binary(Bin) -> Bin.
ensure_list(Bin) when is_binary(Bin) -> binary_to_list(Bin);
ensure_list(List) when is_list(List) -> List.
show_prompts(ok, Who, Msg) ->
?LOG(info, Msg ++ " ~p " ++ "successfully~n", [Who]);
show_prompts({error, Reason}, Who, Msg) ->
?LOG(error, Msg ++ " ~p " ++ "failed with ~p~n", [Who, Reason]).

View File

@ -1,167 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2018-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_tracer).
-include("emqx.hrl").
-include("logger.hrl").
%% APIs
-export([ trace/2
, start_trace/3
, lookup_traces/0
, stop_trace/1
]).
-type(trace_who() :: {clientid | topic, binary()}).
-define(TRACER, ?MODULE).
-define(FORMAT, {logger_formatter,
#{template =>
[time, " [", level, "] ",
{clientid,
[{peername,
[clientid, "@", peername, " "],
[clientid, " "]}],
[{peername,
[peername, " "],
[]}]},
msg, "\n"],
single_line => false
}}).
-define(TOPIC_TRACE_ID(T), "trace_topic_"++T).
-define(CLIENT_TRACE_ID(C), "trace_clientid_"++C).
-define(TOPIC_TRACE(T), {topic, T}).
-define(CLIENT_TRACE(C), {clientid, C}).
-define(IS_LOG_LEVEL(L),
L =:= emergency orelse
L =:= alert orelse
L =:= critical orelse
L =:= error orelse
L =:= warning orelse
L =:= notice orelse
L =:= info orelse
L =:= debug).
-dialyzer({nowarn_function, [install_trace_handler/3]}).
%%------------------------------------------------------------------------------
%% APIs
%%------------------------------------------------------------------------------
trace(publish, #message{topic = <<"$SYS/", _/binary>>}) ->
%% Do not trace '$SYS' publish
ignore;
trace(publish, #message{from = From, topic = Topic, payload = Payload})
when is_binary(From); is_atom(From) ->
emqx_logger:info(#{topic => Topic,
mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY} },
"PUBLISH to ~ts: ~0p", [Topic, Payload]).
%% @doc Start to trace clientid or topic.
-spec(start_trace(trace_who(), logger:level() | all, string()) -> ok | {error, term()}).
start_trace(Who, all, LogFile) ->
start_trace(Who, debug, LogFile);
start_trace(Who, Level, LogFile) ->
case ?IS_LOG_LEVEL(Level) of
true ->
#{level := PrimaryLevel} = logger:get_primary_config(),
try logger:compare_levels(Level, PrimaryLevel) of
lt ->
{error,
io_lib:format("Cannot trace at a log level (~ts) "
"lower than the primary log level (~ts)",
[Level, PrimaryLevel])};
_GtOrEq ->
install_trace_handler(Who, Level, LogFile)
catch
_:Error ->
{error, Error}
end;
false -> {error, {invalid_log_level, Level}}
end.
%% @doc Stop tracing clientid or topic.
-spec(stop_trace(trace_who()) -> ok | {error, term()}).
stop_trace(Who) ->
uninstall_trance_handler(Who).
%% @doc Lookup all traces
-spec(lookup_traces() -> [{Who :: trace_who(), LogFile :: string()}]).
lookup_traces() ->
lists:foldl(fun filter_traces/2, [], emqx_logger:get_log_handlers(started)).
install_trace_handler(Who, Level, LogFile) ->
case logger:add_handler(handler_id(Who), logger_disk_log_h,
#{level => Level,
formatter => ?FORMAT,
config => #{type => halt, file => LogFile},
filter_default => stop,
filters => [{meta_key_filter,
{fun filter_by_meta_key/2, Who}}]})
of
ok ->
?SLOG(info, #{msg => "start_trace", who => Who});
{error, Reason} ->
?SLOG(error, #{msg => "failed_to_trace", who => Who, reason => Reason}),
{error, Reason}
end.
uninstall_trance_handler(Who) ->
case logger:remove_handler(handler_id(Who)) of
ok ->
?SLOG(info, #{msg => "stop_trace", who => Who});
{error, Reason} ->
?SLOG(error, #{msg => "failed_to_stop_trace", who => Who, reason => Reason}),
{error, Reason}
end.
filter_traces(#{id := Id, level := Level, dst := Dst}, Acc) ->
case atom_to_list(Id) of
?TOPIC_TRACE_ID(T)->
[{?TOPIC_TRACE(T), {Level, Dst}} | Acc];
?CLIENT_TRACE_ID(C) ->
[{?CLIENT_TRACE(C), {Level, Dst}} | Acc];
_ -> Acc
end.
handler_id(?TOPIC_TRACE(Topic)) ->
list_to_atom(?TOPIC_TRACE_ID(handler_name(Topic)));
handler_id(?CLIENT_TRACE(ClientId)) ->
list_to_atom(?CLIENT_TRACE_ID(handler_name(ClientId))).
filter_by_meta_key(#{meta := Meta} = Log, {Key, Value}) ->
case is_meta_match(Key, Value, Meta) of
true -> Log;
false -> ignore
end.
is_meta_match(clientid, ClientId, #{clientid := ClientIdStr}) ->
ClientId =:= iolist_to_binary(ClientIdStr);
is_meta_match(topic, TopicFilter, #{topic := TopicMeta}) ->
emqx_topic:match(TopicMeta, TopicFilter);
is_meta_match(_, _, _) ->
false.
handler_name(Bin) ->
case byte_size(Bin) of
Size when Size =< 200 -> binary_to_list(Bin);
_ -> hashstr(Bin)
end.
hashstr(Bin) ->
binary_to_list(emqx_misc:bin2hexstr_A_F(Bin)).

View File

@ -62,12 +62,23 @@ handle_info({timeout, _Timer, check}, State) ->
ProcessCount = erlang:system_info(process_count), ProcessCount = erlang:system_info(process_count),
case ProcessCount / erlang:system_info(process_limit) of case ProcessCount / erlang:system_info(process_limit) of
Percent when Percent >= ProcHighWatermark -> Percent when Percent >= ProcHighWatermark ->
emqx_alarm:activate(too_many_processes, #{ Usage = io_lib:format("~p%", [Percent*100]),
usage => io_lib:format("~p%", [Percent*100]), Message = [Usage, " process usage"],
high_watermark => ProcHighWatermark, emqx_alarm:activate(too_many_processes,
low_watermark => ProcLowWatermark}); #{
usage => Usage,
high_watermark => ProcHighWatermark,
low_watermark => ProcLowWatermark},
Message);
Percent when Percent < ProcLowWatermark -> Percent when Percent < ProcLowWatermark ->
emqx_alarm:deactivate(too_many_processes); Usage = io_lib:format("~p%", [Percent*100]),
Message = [Usage, " process usage"],
emqx_alarm:deactivate(too_many_processes,
#{
usage => Usage,
high_watermark => ProcHighWatermark,
low_watermark => ProcLowWatermark},
Message);
_Precent -> _Precent ->
ok ok
end, end,

View File

@ -32,16 +32,12 @@ init_per_testcase(t_size_limit, Config) ->
<<"size_limit">> => 2 <<"size_limit">> => 2
}), }),
Config; Config;
init_per_testcase(t_validity_period, Config) -> init_per_testcase(_, Config) ->
emqx_common_test_helpers:boot_modules(all), emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]), emqx_common_test_helpers:start_apps([]),
{ok, _} = emqx:update_config([alarm], #{ {ok, _} = emqx:update_config([alarm], #{
<<"validity_period">> => <<"1s">> <<"validity_period">> => <<"1s">>
}), }),
Config;
init_per_testcase(_, Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
Config. Config.
end_per_testcase(_, _Config) -> end_per_testcase(_, _Config) ->
@ -86,17 +82,77 @@ t_size_limit(_) ->
?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))), ?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))),
emqx_alarm:delete_all_deactivated_alarms(). emqx_alarm:delete_all_deactivated_alarms().
t_validity_period(_) -> t_validity_period(_Config) ->
ok = emqx_alarm:activate(a), ok = emqx_alarm:activate(a, #{msg => "Request frequency is too high"}, <<"Reach Rate Limit">>),
ok = emqx_alarm:deactivate(a), ok = emqx_alarm:deactivate(a, #{msg => "Request frequency returns to normal"}),
?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))), ?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))),
%% call with unknown msg
?assertEqual(ignored, gen_server:call(emqx_alarm, unknown_alarm)),
ct:sleep(3000), ct:sleep(3000),
?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))). ?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))).
t_validity_period_1(_Config) ->
ok = emqx_alarm:activate(a, #{msg => "Request frequency is too high"}, <<"Reach Rate Limit">>),
ok = emqx_alarm:deactivate(a, #{msg => "Request frequency returns to normal"}),
?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))),
%% info with unknown msg
erlang:send(emqx_alarm, unknown_alarm),
ct:sleep(3000),
?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))).
t_validity_period_2(_Config) ->
ok = emqx_alarm:activate(a, #{msg => "Request frequency is too high"}, <<"Reach Rate Limit">>),
ok = emqx_alarm:deactivate(a, #{msg => "Request frequency returns to normal"}),
?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))),
%% cast with unknown msg
gen_server:cast(emqx_alarm, unknown_alarm),
ct:sleep(3000),
?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))).
-record(activated_alarm, {
name :: binary() | atom(),
details :: map() | list(),
message :: binary(),
activate_at :: integer()
}).
-record(deactivated_alarm, {
activate_at :: integer(),
name :: binary() | atom(),
details :: map() | list(),
message :: binary(),
deactivate_at :: integer() | infinity
}).
t_format(_Config) ->
Name = test_alarm,
Message = "test_msg",
At = erlang:system_time(microsecond),
Details = "test_details",
Node = node(),
Activate = #activated_alarm{name = Name, message = Message, activate_at = At, details = Details},
#{
node := Node,
name := Name,
message := Message,
duration := 0,
details := Details
} = emqx_alarm:format(Activate),
Deactivate = #deactivated_alarm{name = Name, message = Message, activate_at = At, details = Details,
deactivate_at = At},
#{
node := Node,
name := Name,
message := Message,
duration := 0,
details := Details
} = emqx_alarm:format(Deactivate),
ok.
get_alarm(Name, [Alarm = #{name := Name} | _More]) -> get_alarm(Name, [Alarm = #{name := Name} | _More]) ->
Alarm; Alarm;
get_alarm(Name, [_Alarm | More]) -> get_alarm(Name, [_Alarm | More]) ->
get_alarm(Name, More); get_alarm(Name, More);
get_alarm(_Name, []) -> get_alarm(_Name, []) ->
{error, not_found}. {error, not_found}.

View File

@ -28,7 +28,7 @@
-export([ roots/0, fields/1 ]). -export([ roots/0, fields/1 ]).
-export([ create/1 -export([ create/2
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -70,7 +70,7 @@ check_config(C) ->
#{atom_key => true}), #{atom_key => true}),
R. R.
create(_Config) -> create(_AuthenticatorID, _Config) ->
{ok, #{mark => 1}}. {ok, #{mark => 1}}.
update(_Config, _State) -> update(_Config, _State) ->
@ -103,22 +103,28 @@ end_per_testcase(Case, Config) ->
_ = ?MODULE:Case({'end', Config}), _ = ?MODULE:Case({'end', Config}),
ok. ok.
t_chain({_, Config}) -> Config; t_chain({_, Config}) -> Config;
t_chain(Config) when is_list(Config) -> t_chain(Config) when is_list(Config) ->
% CRUD of authentication chain % CRUD of authentication chain
ChainName = 'test', ChainName = 'test',
?assertMatch({ok, []}, ?AUTHN:list_chains()), ?assertMatch({ok, []}, ?AUTHN:list_chains()),
?assertMatch({ok, []}, ?AUTHN:list_chain_names()),
?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:create_chain(ChainName)), ?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:create_chain(ChainName)),
?assertEqual({error, {already_exists, {chain, ChainName}}}, ?AUTHN:create_chain(ChainName)), ?assertEqual({error, {already_exists, {chain, ChainName}}}, ?AUTHN:create_chain(ChainName)),
?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:lookup_chain(ChainName)), ?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:lookup_chain(ChainName)),
?assertMatch({ok, [#{name := ChainName}]}, ?AUTHN:list_chains()), ?assertMatch({ok, [#{name := ChainName}]}, ?AUTHN:list_chains()),
?assertEqual({ok, [ChainName]}, ?AUTHN:list_chain_names()),
?assertEqual(ok, ?AUTHN:delete_chain(ChainName)), ?assertEqual(ok, ?AUTHN:delete_chain(ChainName)),
?assertMatch({error, {not_found, {chain, ChainName}}}, ?AUTHN:lookup_chain(ChainName)), ?assertMatch({error, {not_found, {chain, ChainName}}}, ?AUTHN:lookup_chain(ChainName)),
ok. ok.
t_authenticator({'init', Config}) -> t_authenticator({'init', Config}) ->
[{"auth1", {'password-based', 'built-in-database'}}, [{"auth1", {'password-based', 'built-in-database'}},
{"auth2", {'password-based', mysql}} | Config]; {"auth2", {'password-based', mysql}} | Config];
t_authenticator(Config) when is_list(Config) -> t_authenticator(Config) when is_list(Config) ->
ChainName = 'test', ChainName = 'test',
AuthenticatorConfig1 = #{mechanism => 'password-based', AuthenticatorConfig1 = #{mechanism => 'password-based',
@ -126,23 +132,43 @@ t_authenticator(Config) when is_list(Config) ->
enable => true}, enable => true},
% Create an authenticator when the authentication chain does not exist % Create an authenticator when the authentication chain does not exist
?assertEqual({error, {not_found, {chain, ChainName}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), ?assertEqual(
{error, {not_found, {chain, ChainName}}},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?AUTHN:create_chain(ChainName), ?AUTHN:create_chain(ChainName),
% Create an authenticator when the provider does not exist % Create an authenticator when the provider does not exist
?assertEqual({error, no_available_provider}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertEqual(
{error, no_available_provider},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
AuthNType1 = ?config("auth1"), AuthNType1 = ?config("auth1"),
register_provider(AuthNType1, ?MODULE), register_provider(AuthNType1, ?MODULE),
ID1 = <<"password-based:built-in-database">>, ID1 = <<"password-based:built-in-database">>,
% CRUD of authencaticator % CRUD of authencaticator
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)), ?assertMatch(
{ok, #{id := ID1, state := #{mark := 1}}},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch({ok, #{id := ID1}}, ?AUTHN:lookup_authenticator(ChainName, ID1)), ?assertMatch({ok, #{id := ID1}}, ?AUTHN:lookup_authenticator(ChainName, ID1)),
?assertMatch({ok, [#{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)), ?assertMatch({ok, [#{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual({error, {already_exists, {authenticator, ID1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)), ?assertEqual(
{error, {already_exists, {authenticator, ID1}}},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch(
{ok, #{id := ID1, state := #{mark := 2}}},
?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
?assertEqual(ok, ?AUTHN:delete_authenticator(ChainName, ID1)), ?assertEqual(ok, ?AUTHN:delete_authenticator(ChainName, ID1)),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
?assertEqual(
{error, {not_found, {authenticator, ID1}}},
?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
?assertMatch({ok, []}, ?AUTHN:list_authenticators(ChainName)), ?assertMatch({ok, []}, ?AUTHN:list_authenticators(ChainName)),
% Multiple authenticators exist at the same time % Multiple authenticators exist at the same time
@ -152,25 +178,37 @@ t_authenticator(Config) when is_list(Config) ->
AuthenticatorConfig2 = #{mechanism => 'password-based', AuthenticatorConfig2 = #{mechanism => 'password-based',
backend => mysql, backend => mysql,
enable => true}, enable => true},
?assertMatch({ok, #{id := ID1}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch({ok, #{id := ID2}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig2)), ?assertMatch(
{ok, #{id := ID1}},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch(
{ok, #{id := ID2}},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig2)),
% Move authenticator % Move authenticator
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)), ?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, top)), ?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, top)),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)), ?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, bottom)), ?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, bottom)),
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)), ?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, {before, ID1})), ?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, {before, ID1})),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)); ?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName));
t_authenticator({'end', Config}) -> t_authenticator({'end', Config}) ->
?AUTHN:delete_chain(test), ?AUTHN:delete_chain(test),
?AUTHN:deregister_providers([?config("auth1"), ?config("auth2")]), ?AUTHN:deregister_providers([?config("auth1"), ?config("auth2")]),
ok. ok.
t_authenticate({init, Config}) -> t_authenticate({init, Config}) ->
[{listener_id, 'tcp:default'}, [{listener_id, 'tcp:default'},
{authn_type, {'password-based', 'built-in-database'}} | Config]; {authn_type, {'password-based', 'built-in-database'}} | Config];
t_authenticate(Config) when is_list(Config) -> t_authenticate(Config) when is_list(Config) ->
ListenerID = ?config(listener_id), ListenerID = ?config(listener_id),
AuthNType = ?config(authn_type), AuthNType = ?config(authn_type),
@ -188,13 +226,21 @@ t_authenticate(Config) when is_list(Config) ->
enable => true}, enable => true},
?AUTHN:create_chain(ListenerID), ?AUTHN:create_chain(ListenerID),
?assertMatch({ok, _}, ?AUTHN:create_authenticator(ListenerID, AuthenticatorConfig)), ?assertMatch({ok, _}, ?AUTHN:create_authenticator(ListenerID, AuthenticatorConfig)),
?assertEqual({ok, #{is_superuser => true}}, emqx_access_control:authenticate(ClientInfo)),
?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>})); ?assertEqual(
{ok, #{is_superuser => true}},
emqx_access_control:authenticate(ClientInfo)),
?assertEqual(
{error, bad_username_or_password},
emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>}));
t_authenticate({'end', Config}) -> t_authenticate({'end', Config}) ->
?AUTHN:delete_chain(?config(listener_id)), ?AUTHN:delete_chain(?config(listener_id)),
?AUTHN:deregister_provider(?config(authn_type)), ?AUTHN:deregister_provider(?config(authn_type)),
ok. ok.
t_update_config({init, Config}) -> t_update_config({init, Config}) ->
Global = 'mqtt:global', Global = 'mqtt:global',
AuthNType1 = {'password-based', 'built-in-database'}, AuthNType1 = {'password-based', 'built-in-database'},
@ -202,6 +248,7 @@ t_update_config({init, Config}) ->
[{global, Global}, [{global, Global},
{"auth1", AuthNType1}, {"auth1", AuthNType1},
{"auth2", AuthNType2} | Config]; {"auth2", AuthNType2} | Config];
t_update_config(Config) when is_list(Config) -> t_update_config(Config) when is_list(Config) ->
emqx_config_handler:add_handler([authentication], emqx_authentication), emqx_config_handler:add_handler([authentication], emqx_authentication),
ok = register_provider(?config("auth1"), ?MODULE), ok = register_provider(?config("auth1"), ?MODULE),
@ -217,46 +264,131 @@ t_update_config(Config) when is_list(Config) ->
ID2 = <<"password-based:mysql">>, ID2 = <<"password-based:mysql">>,
?assertMatch({ok, []}, ?AUTHN:list_chains()), ?assertMatch({ok, []}, ?AUTHN:list_chains()),
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})),
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})), ?assertMatch(
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID2)), {ok, _},
update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})),
?assertMatch({ok, _}, update_config([authentication], {update_authenticator, Global, ID1, AuthenticatorConfig1#{<<"enable">> => false}})), ?assertMatch(
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(Global, ID1)), {ok, #{id := ID1, state := #{mark := 1}}},
?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch(
{ok, _},
update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})),
?assertMatch(
{ok, #{id := ID2, state := #{mark := 1}}},
?AUTHN:lookup_authenticator(Global, ID2)),
?assertMatch(
{ok, _},
update_config([authentication],
{update_authenticator,
Global,
ID1,
AuthenticatorConfig1#{<<"enable">> => false}
})),
?assertMatch(
{ok, #{id := ID1, state := #{mark := 2}}},
?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch(
{ok, _},
update_config([authentication], {move_authenticator, Global, ID2, top})),
?assertMatch({ok, _}, update_config([authentication], {move_authenticator, Global, ID2, top})),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(Global)), ?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(Global)),
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID1})), ?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID1})),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(Global, ID1)), ?assertEqual(
{error, {not_found, {authenticator, ID1}}},
?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID2})), ?assertMatch(
?assertEqual({error, {not_found, {authenticator, ID2}}}, ?AUTHN:lookup_authenticator(Global, ID2)), {ok, _},
update_config([authentication], {delete_authenticator, Global, ID2})),
?assertEqual(
{error, {not_found, {authenticator, ID2}}},
?AUTHN:lookup_authenticator(Global, ID2)),
ListenerID = 'tcp:default', ListenerID = 'tcp:default',
ConfKeyPath = [listeners, tcp, default, authentication], ConfKeyPath = [listeners, tcp, default, authentication],
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig1})),
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig2})), ?assertMatch(
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID2)), {ok, _},
update_config(ConfKeyPath,
{create_authenticator, ListenerID, AuthenticatorConfig1})),
?assertMatch({ok, _}, update_config(ConfKeyPath, {update_authenticator, ListenerID, ID1, AuthenticatorConfig1#{<<"enable">> => false}})), ?assertMatch(
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)), {ok, #{id := ID1, state := #{mark := 1}}},
?AUTHN:lookup_authenticator(ListenerID, ID1)),
?assertMatch({ok, _}, update_config(ConfKeyPath, {move_authenticator, ListenerID, ID2, top})), ?assertMatch(
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ListenerID)), {ok, _},
update_config(ConfKeyPath,
{create_authenticator, ListenerID, AuthenticatorConfig2})),
?assertMatch(
{ok, #{id := ID2, state := #{mark := 1}}},
?AUTHN:lookup_authenticator(ListenerID, ID2)),
?assertMatch(
{ok, _},
update_config(ConfKeyPath,
{update_authenticator,
ListenerID,
ID1,
AuthenticatorConfig1#{<<"enable">> => false}
})),
?assertMatch(
{ok, #{id := ID1, state := #{mark := 2}}},
?AUTHN:lookup_authenticator(ListenerID, ID1)),
?assertMatch(
{ok, _},
update_config(ConfKeyPath, {move_authenticator, ListenerID, ID2, top})),
?assertMatch(
{ok, [#{id := ID2}, #{id := ID1}]},
?AUTHN:list_authenticators(ListenerID)),
?assertMatch(
{ok, _},
update_config(ConfKeyPath, {delete_authenticator, ListenerID, ID1})),
?assertEqual(
{error, {not_found, {authenticator, ID1}}},
?AUTHN:lookup_authenticator(ListenerID, ID1));
?assertMatch({ok, _}, update_config(ConfKeyPath, {delete_authenticator, ListenerID, ID1})),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1));
t_update_config({'end', Config}) -> t_update_config({'end', Config}) ->
?AUTHN:delete_chain(?config(global)), ?AUTHN:delete_chain(?config(global)),
?AUTHN:deregister_providers([?config("auth1"), ?config("auth2")]), ?AUTHN:deregister_providers([?config("auth1"), ?config("auth2")]),
ok. ok.
t_restart({'init', Config}) -> Config;
t_restart(Config) when is_list(Config) ->
?assertEqual({ok, []}, ?AUTHN:list_chain_names()),
?AUTHN:create_chain(test_chain),
?assertEqual({ok, [test_chain]}, ?AUTHN:list_chain_names()),
ok = supervisor:terminate_child(emqx_authentication_sup, ?AUTHN),
{ok, _} = supervisor:restart_child(emqx_authentication_sup, ?AUTHN),
?assertEqual({ok, [test_chain]}, ?AUTHN:list_chain_names());
t_restart({'end', _Config}) ->
?AUTHN:delete_chain(test_chain),
ok.
t_convert_certs({_, Config}) -> Config; t_convert_certs({_, Config}) -> Config;
t_convert_certs(Config) when is_list(Config) -> t_convert_certs(Config) when is_list(Config) ->
Global = <<"mqtt:global">>, Global = <<"mqtt:global">>,
Certs = certs([ {<<"keyfile">>, "key.pem"} Certs = certs([ {<<"keyfile">>, "key.pem"}
@ -270,7 +402,11 @@ t_convert_certs(Config) when is_list(Config) ->
Certs2 = certs([ {<<"keyfile">>, "key.pem"} Certs2 = certs([ {<<"keyfile">>, "key.pem"}
, {<<"certfile">>, "cert.pem"} , {<<"certfile">>, "cert.pem"}
]), ]),
#{<<"ssl">> := NCerts2} = convert_certs(CertsDir, #{<<"ssl">> => Certs2}, #{<<"ssl">> => NCerts}),
#{<<"ssl">> := NCerts2} = convert_certs(
CertsDir,
#{<<"ssl">> => Certs2}, #{<<"ssl">> => NCerts}),
?assertEqual(maps:get(<<"keyfile">>, NCerts), maps:get(<<"keyfile">>, NCerts2)), ?assertEqual(maps:get(<<"keyfile">>, NCerts), maps:get(<<"keyfile">>, NCerts2)),
?assertEqual(maps:get(<<"certfile">>, NCerts), maps:get(<<"certfile">>, NCerts2)), ?assertEqual(maps:get(<<"certfile">>, NCerts), maps:get(<<"certfile">>, NCerts2)),
@ -278,7 +414,11 @@ t_convert_certs(Config) when is_list(Config) ->
, {<<"certfile">>, "client-cert.pem"} , {<<"certfile">>, "client-cert.pem"}
, {<<"cacertfile">>, "cacert.pem"} , {<<"cacertfile">>, "cacert.pem"}
]), ]),
#{<<"ssl">> := NCerts3} = convert_certs(CertsDir, #{<<"ssl">> => Certs3}, #{<<"ssl">> => NCerts2}),
#{<<"ssl">> := NCerts3} = convert_certs(
CertsDir,
#{<<"ssl">> => Certs3}, #{<<"ssl">> => NCerts2}),
?assertNotEqual(maps:get(<<"keyfile">>, NCerts2), maps:get(<<"keyfile">>, NCerts3)), ?assertNotEqual(maps:get(<<"keyfile">>, NCerts2), maps:get(<<"keyfile">>, NCerts3)),
?assertNotEqual(maps:get(<<"certfile">>, NCerts2), maps:get(<<"certfile">>, NCerts3)), ?assertNotEqual(maps:get(<<"certfile">>, NCerts2), maps:get(<<"certfile">>, NCerts3)),

View File

@ -41,16 +41,16 @@ t_add_delete(_) ->
at = erlang:system_time(second), at = erlang:system_time(second),
until = erlang:system_time(second) + 1000 until = erlang:system_time(second) + 1000
}, },
ok = emqx_banned:create(Banned), {ok, _} = emqx_banned:create(Banned),
?assertEqual(1, emqx_banned:info(size)), ?assertEqual(1, emqx_banned:info(size)),
ok = emqx_banned:delete({clientid, <<"TestClient">>}), ok = emqx_banned:delete({clientid, <<"TestClient">>}),
?assertEqual(0, emqx_banned:info(size)). ?assertEqual(0, emqx_banned:info(size)).
t_check(_) -> t_check(_) ->
ok = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}}), {ok, _} = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}}),
ok = emqx_banned:create(#banned{who = {username, <<"BannedUser">>}}), {ok, _} = emqx_banned:create(#banned{who = {username, <<"BannedUser">>}}),
ok = emqx_banned:create(#banned{who = {peerhost, {192,168,0,1}}}), {ok, _} = emqx_banned:create(#banned{who = {peerhost, {192,168,0,1}}}),
?assertEqual(3, emqx_banned:info(size)), ?assertEqual(3, emqx_banned:info(size)),
ClientInfo1 = #{clientid => <<"BannedClient">>, ClientInfo1 = #{clientid => <<"BannedClient">>,
username => <<"user">>, username => <<"user">>,
@ -83,7 +83,7 @@ t_check(_) ->
t_unused(_) -> t_unused(_) ->
{ok, Banned} = emqx_banned:start_link(), {ok, Banned} = emqx_banned:start_link(),
ok = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}, {ok, _} = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>},
until = erlang:system_time(second)}), until = erlang:system_time(second)}),
?assertEqual(ignored, gen_server:call(Banned, unexpected_req)), ?assertEqual(ignored, gen_server:call(Banned, unexpected_req)),
?assertEqual(ok, gen_server:cast(Banned, unexpected_msg)), ?assertEqual(ok, gen_server:cast(Banned, unexpected_msg)),

View File

@ -23,20 +23,71 @@
-include_lib("eunit/include/eunit.hrl"). -include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl"). -include_lib("common_test/include/ct.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/emqx_mqtt.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl").
all() -> emqx_common_test_helpers:all(?MODULE). all() ->
[ {group, all_cases}
, {group, connected_client_count_group}
].
init_per_suite(Config) -> groups() ->
TCs = emqx_common_test_helpers:all(?MODULE),
ConnClientTCs = [ t_connected_client_count_persistent
, t_connected_client_count_anonymous
, t_connected_client_stats
],
OtherTCs = TCs -- ConnClientTCs,
[ {all_cases, [], OtherTCs}
, {connected_client_count_group, [ {group, tcp}
, {group, ws}
, {group, quic}
]}
, {tcp, [], ConnClientTCs}
, {ws, [], ConnClientTCs}
, {quic, [], ConnClientTCs}
].
init_per_group(connected_client_count_group, Config) ->
Config;
init_per_group(tcp, Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
[{conn_fun, connect} | Config];
init_per_group(ws, Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
[ {ssl, false}
, {enable_websocket, true}
, {conn_fun, ws_connect}
, {port, 8083}
, {host, "localhost"}
| Config
];
init_per_group(quic, Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
[ {conn_fun, quic_connect}
, {port, 14567}
| Config];
init_per_group(_Group, Config) ->
emqx_common_test_helpers:boot_modules(all), emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]), emqx_common_test_helpers:start_apps([]),
Config. Config.
end_per_suite(_Config) -> end_per_group(connected_client_count_group, _Config) ->
ok;
end_per_group(_Group, _Config) ->
emqx_common_test_helpers:stop_apps([]). emqx_common_test_helpers:stop_apps([]).
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_testcase(Case, Config) -> init_per_testcase(Case, Config) ->
?MODULE:Case({init, Config}). ?MODULE:Case({init, Config}).
@ -277,6 +328,240 @@ t_shard({'end', _Config}) ->
emqx_broker:unsubscribe(<<"topic">>), emqx_broker:unsubscribe(<<"topic">>),
ok = meck:unload(emqx_broker_helper). ok = meck:unload(emqx_broker_helper).
%% persistent sessions, when gone, do not contribute to connected
%% client count
t_connected_client_count_persistent({init, Config}) ->
ok = snabbkaffe:start_trace(),
process_flag(trap_exit, true),
Config;
t_connected_client_count_persistent(Config) when is_list(Config) ->
ConnFun = ?config(conn_fun, Config),
ClientID = <<"clientid">>,
?assertEqual(0, emqx_cm:get_connected_client_count()),
{ok, ConnPid0} = emqtt:start_link([ {clean_start, false}
, {clientid, ClientID}
| Config]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid0) end,
[emqx_cm_connected_client_count_inc]
),
timer:sleep(10),
?assertEqual(1, emqx_cm:get_connected_client_count()),
{ok, {ok, [_]}} = wait_for_events(
fun() -> emqtt:disconnect(ConnPid0) end,
[emqx_cm_connected_client_count_dec]
),
timer:sleep(10),
?assertEqual(0, emqx_cm:get_connected_client_count()),
%% reconnecting
{ok, ConnPid1} = emqtt:start_link([ {clean_start, false}
, {clientid, ClientID}
| Config
]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid1) end,
[emqx_cm_connected_client_count_inc]
),
?assertEqual(1, emqx_cm:get_connected_client_count()),
%% taking over
{ok, ConnPid2} = emqtt:start_link([ {clean_start, false}
, {clientid, ClientID}
| Config
]),
{{ok, _}, {ok, [_, _]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid2) end,
[ emqx_cm_connected_client_count_inc
, emqx_cm_connected_client_count_dec
],
500
),
?assertEqual(1, emqx_cm:get_connected_client_count()),
%% abnormal exit of channel process
ChanPids = emqx_cm:all_channels(),
{ok, {ok, [_, _]}} = wait_for_events(
fun() ->
lists:foreach(
fun(ChanPid) -> exit(ChanPid, kill) end,
ChanPids)
end,
[ emqx_cm_connected_client_count_dec
, emqx_cm_process_down
]
),
?assertEqual(0, emqx_cm:get_connected_client_count()),
ok;
t_connected_client_count_persistent({'end', _Config}) ->
snabbkaffe:stop(),
ok.
%% connections without client_id also contribute to connected client
%% count
t_connected_client_count_anonymous({init, Config}) ->
ok = snabbkaffe:start_trace(),
process_flag(trap_exit, true),
Config;
t_connected_client_count_anonymous(Config) when is_list(Config) ->
ConnFun = ?config(conn_fun, Config),
?assertEqual(0, emqx_cm:get_connected_client_count()),
%% first client
{ok, ConnPid0} = emqtt:start_link([ {clean_start, true}
| Config]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid0) end,
[emqx_cm_connected_client_count_inc]
),
?assertEqual(1, emqx_cm:get_connected_client_count()),
%% second client
{ok, ConnPid1} = emqtt:start_link([ {clean_start, true}
| Config]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid1) end,
[emqx_cm_connected_client_count_inc]
),
?assertEqual(2, emqx_cm:get_connected_client_count()),
%% when first client disconnects, shouldn't affect the second
{ok, {ok, [_, _]}} = wait_for_events(
fun() -> emqtt:disconnect(ConnPid0) end,
[ emqx_cm_connected_client_count_dec
, emqx_cm_process_down
]
),
?assertEqual(1, emqx_cm:get_connected_client_count()),
%% reconnecting
{ok, ConnPid2} = emqtt:start_link([ {clean_start, true}
| Config
]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid2) end,
[emqx_cm_connected_client_count_inc]
),
?assertEqual(2, emqx_cm:get_connected_client_count()),
{ok, {ok, [_, _]}} = wait_for_events(
fun() -> emqtt:disconnect(ConnPid1) end,
[ emqx_cm_connected_client_count_dec
, emqx_cm_process_down
]
),
?assertEqual(1, emqx_cm:get_connected_client_count()),
%% abnormal exit of channel process
Chans = emqx_cm:all_channels(),
{ok, {ok, [_, _]}} = wait_for_events(
fun() ->
lists:foreach(
fun(ChanPid) -> exit(ChanPid, kill) end,
Chans)
end,
[ emqx_cm_connected_client_count_dec
, emqx_cm_process_down
]
),
?assertEqual(0, emqx_cm:get_connected_client_count()),
ok;
t_connected_client_count_anonymous({'end', _Config}) ->
snabbkaffe:stop(),
ok.
t_connected_client_stats({init, Config}) ->
ok = supervisor:terminate_child(emqx_kernel_sup, emqx_stats),
{ok, _} = supervisor:restart_child(emqx_kernel_sup, emqx_stats),
ok = snabbkaffe:start_trace(),
Config;
t_connected_client_stats(Config) when is_list(Config) ->
ConnFun = ?config(conn_fun, Config),
?assertEqual(0, emqx_cm:get_connected_client_count()),
?assertEqual(0, emqx_stats:getstat('live_connections.count')),
?assertEqual(0, emqx_stats:getstat('live_connections.max')),
{ok, ConnPid} = emqtt:start_link([ {clean_start, true}
, {clientid, <<"clientid">>}
| Config
]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid) end,
[emqx_cm_connected_client_count_inc]
),
timer:sleep(20),
%% ensure stats are synchronized
{_, {ok, [_]}} = wait_for_stats(
fun emqx_cm:stats_fun/0,
[#{count_stat => 'live_connections.count',
max_stat => 'live_connections.max'}]
),
?assertEqual(1, emqx_stats:getstat('live_connections.count')),
?assertEqual(1, emqx_stats:getstat('live_connections.max')),
{ok, {ok, [_]}} = wait_for_events(
fun() -> emqtt:disconnect(ConnPid) end,
[emqx_cm_connected_client_count_dec]
),
timer:sleep(20),
%% ensure stats are synchronized
{_, {ok, [_]}} = wait_for_stats(
fun emqx_cm:stats_fun/0,
[#{count_stat => 'live_connections.count',
max_stat => 'live_connections.max'}]
),
?assertEqual(0, emqx_stats:getstat('live_connections.count')),
?assertEqual(1, emqx_stats:getstat('live_connections.max')),
ok;
t_connected_client_stats({'end', _Config}) ->
ok = snabbkaffe:stop(),
ok = supervisor:terminate_child(emqx_kernel_sup, emqx_stats),
{ok, _} = supervisor:restart_child(emqx_kernel_sup, emqx_stats),
ok.
%% the count must be always non negative
t_connect_client_never_negative({init, Config}) ->
Config;
t_connect_client_never_negative(Config) when is_list(Config) ->
?assertEqual(0, emqx_cm:get_connected_client_count()),
%% would go to -1
ChanPid = list_to_pid("<0.0.1>"),
emqx_cm:mark_channel_disconnected(ChanPid),
?assertEqual(0, emqx_cm:get_connected_client_count()),
%% would be 0, if really went to -1
emqx_cm:mark_channel_connected(ChanPid),
?assertEqual(1, emqx_cm:get_connected_client_count()),
ok;
t_connect_client_never_negative({'end', _Config}) ->
ok.
wait_for_events(Action, Kinds) ->
wait_for_events(Action, Kinds, 500).
wait_for_events(Action, Kinds, Timeout) ->
Predicate = fun(#{?snk_kind := K}) ->
lists:member(K, Kinds)
end,
N = length(Kinds),
{ok, Sub} = snabbkaffe_collector:subscribe(Predicate, N, Timeout, 0),
Res = Action(),
case snabbkaffe_collector:receive_events(Sub) of
{timeout, _} ->
{Res, timeout};
{ok, Events} ->
{Res, {ok, Events}}
end.
wait_for_stats(Action, Stats) ->
Predicate = fun(Event = #{?snk_kind := emqx_stats_setstat}) ->
Stat = maps:with(
[ count_stat
, max_stat
], Event),
lists:member(Stat, Stats);
(_) ->
false
end,
N = length(Stats),
Timeout = 500,
{ok, Sub} = snabbkaffe_collector:subscribe(Predicate, N, Timeout, 0),
Res = Action(),
case snabbkaffe_collector:receive_events(Sub) of
{timeout, _} ->
{Res, timeout};
{ok, Events} ->
{Res, {ok, Events}}
end.
recv_msgs(Count) -> recv_msgs(Count) ->
recv_msgs(Count, []). recv_msgs(Count, []).

View File

@ -41,7 +41,7 @@ t_lookup_subpid(_) ->
emqx_broker_helper:register_sub(self(), <<"clientid">>), emqx_broker_helper:register_sub(self(), <<"clientid">>),
ct:sleep(10), ct:sleep(10),
?assertEqual(self(), emqx_broker_helper:lookup_subpid(<<"clientid">>)). ?assertEqual(self(), emqx_broker_helper:lookup_subpid(<<"clientid">>)).
t_register_sub(_) -> t_register_sub(_) ->
ok = emqx_broker_helper:register_sub(self(), <<"clientid">>), ok = emqx_broker_helper:register_sub(self(), <<"clientid">>),
ct:sleep(10), ct:sleep(10),
@ -62,7 +62,7 @@ t_shard_seq(_) ->
t_shards_num(_) -> t_shards_num(_) ->
?assertEqual(emqx_vm:schedulers() * 32, emqx_broker_helper:shards_num()). ?assertEqual(emqx_vm:schedulers() * 32, emqx_broker_helper:shards_num()).
t_get_sub_shard(_) -> t_get_sub_shard(_) ->
?assertEqual(0, emqx_broker_helper:get_sub_shard(self(), <<"topic">>)). ?assertEqual(0, emqx_broker_helper:get_sub_shard(self(), <<"topic">>)).
@ -72,4 +72,4 @@ t_terminate(_) ->
t_uncovered_func(_) -> t_uncovered_func(_) ->
gen_server:call(emqx_broker_helper, test), gen_server:call(emqx_broker_helper, test),
gen_server:cast(emqx_broker_helper, test), gen_server:cast(emqx_broker_helper, test),
emqx_broker_helper ! test. emqx_broker_helper ! test.

View File

@ -144,6 +144,8 @@ set_test_listenser_confs() ->
init_per_suite(Config) -> init_per_suite(Config) ->
%% CM Meck %% CM Meck
ok = meck:new(emqx_cm, [passthrough, no_history, no_link]), ok = meck:new(emqx_cm, [passthrough, no_history, no_link]),
ok = meck:expect(emqx_cm, mark_channel_connected, fun(_) -> ok end),
ok = meck:expect(emqx_cm, mark_channel_disconnected, fun(_) -> ok end),
%% Access Control Meck %% Access Control Meck
ok = meck:new(emqx_access_control, [passthrough, no_history, no_link]), ok = meck:new(emqx_access_control, [passthrough, no_history, no_link]),
ok = meck:expect(emqx_access_control, authenticate, ok = meck:expect(emqx_access_control, authenticate,

View File

@ -32,6 +32,12 @@
conn_mod => emqx_connection, conn_mod => emqx_connection,
receive_maximum => 100}}). receive_maximum => 100}}).
-define(WAIT(PATTERN, TIMEOUT, RET),
fun() ->
receive PATTERN -> RET
after TIMEOUT -> error({timeout, ?LINE}) end
end()).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% CT callbacks %% CT callbacks
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -179,28 +185,100 @@ t_open_session_race_condition(_) ->
exit(Winner, kill), exit(Winner, kill),
receive {'DOWN', _, process, Winner, _} -> ok end, receive {'DOWN', _, process, Winner, _} -> ok end,
ignored = gen_server:call(emqx_cm, ignore, infinity), %% sync ignored = gen_server:call(?CM, ignore, infinity), %% sync
ok = emqx_pool:flush_async_tasks(),
?assertEqual([], emqx_cm:lookup_channels(ClientId)). ?assertEqual([], emqx_cm:lookup_channels(ClientId)).
t_discard_session(_) -> t_kick_session_discard_normal(_) ->
test_kick_session(discard, normal).
t_kick_session_discard_shutdown(_) ->
test_kick_session(discard, shutdown).
t_kick_session_discard_shutdown_with_reason(_) ->
test_kick_session(discard, {shutdown, discard}).
t_kick_session_discard_timeout(_) ->
test_kick_session(discard, timeout).
t_kick_session_discard_noproc(_) ->
test_kick_session(discard, noproc).
t_kick_session_kick_normal(_) ->
test_kick_session(discard, normal).
t_kick_session_kick_shutdown(_) ->
test_kick_session(discard, shutdown).
t_kick_session_kick_shutdown_with_reason(_) ->
test_kick_session(discard, {shutdown, discard}).
t_kick_session_kick_timeout(_) ->
test_kick_session(discard, timeout).
t_kick_session_kick_noproc(_) ->
test_kick_session(discard, noproc).
test_kick_session(Action, Reason) ->
ClientId = rand_client_id(), ClientId = rand_client_id(),
#{conninfo := ConnInfo} = ?ChanInfo, #{conninfo := ConnInfo} = ?ChanInfo,
ok = emqx_cm:register_channel(ClientId, self(), ConnInfo), FakeSessionFun =
fun Loop() ->
receive
{'$gen_call', From, A} when A =:= kick orelse
A =:= discard ->
case Reason of
normal ->
gen_server:reply(From, ok);
timeout ->
%% no response to the call
Loop();
_ ->
exit(Reason)
end;
Msg ->
ct:pal("(~p) fake_session_discarded ~p", [Action, Msg]),
Loop()
end
end,
{Pid1, _} = spawn_monitor(FakeSessionFun),
{Pid2, _} = spawn_monitor(FakeSessionFun),
ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo),
ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo),
ok = emqx_cm:register_channel(ClientId, Pid2, ConnInfo),
?assertEqual([Pid1, Pid2], lists:sort(emqx_cm:lookup_channels(ClientId))),
case Reason of
noproc -> exit(Pid1, kill), exit(Pid2, kill);
_ -> ok
end,
ok = case Action of
kick -> emqx_cm:kick_session(ClientId);
discard -> emqx_cm:discard_session(ClientId)
end,
case Reason =:= timeout orelse Reason =:= noproc of
true ->
?assertEqual(killed, ?WAIT({'DOWN', _, process, Pid1, R}, 2_000, R)),
?assertEqual(killed, ?WAIT({'DOWN', _, process, Pid2, R}, 2_000, R));
false ->
?assertEqual(Reason, ?WAIT({'DOWN', _, process, Pid1, R}, 2_000, R)),
?assertEqual(Reason, ?WAIT({'DOWN', _, process, Pid2, R}, 2_000, R))
end,
ignored = gen_server:call(?CM, ignore, infinity), % sync
ok = flush_emqx_pool(),
?assertEqual([], emqx_cm:lookup_channels(ClientId)).
ok = meck:new(emqx_connection, [passthrough, no_history]), %% Channel deregistration is delegated to emqx_pool as a sync tasks.
ok = meck:expect(emqx_connection, call, fun(_, _) -> ok end), %% The emqx_pool is pool of workers, and there is no way to know
ok = meck:expect(emqx_connection, call, fun(_, _, _) -> ok end), %% which worker was picked for the last deregistration task.
ok = emqx_cm:discard_session(ClientId), %% This help function creates a large enough number of async tasks
ok = emqx_cm:register_channel(ClientId, self(), ConnInfo), %% to sync with the pool workers.
ok = emqx_cm:discard_session(ClientId), %% The number of tasks should be large enough to ensure all workers have
ok = emqx_cm:unregister_channel(ClientId), %% the chance to work on at least one of the tasks.
ok = emqx_cm:register_channel(ClientId, self(), ConnInfo), flush_emqx_pool() ->
ok = emqx_cm:discard_session(ClientId), Self = self(),
ok = meck:expect(emqx_connection, call, fun(_, _) -> error(testing) end), L = lists:seq(1, 1000),
ok = meck:expect(emqx_connection, call, fun(_, _, _) -> error(testing) end), lists:foreach(fun(I) -> emqx_pool:async_submit(fun() -> Self ! {done, I} end, []) end, L),
ok = emqx_cm:discard_session(ClientId), lists:foreach(fun(I) -> receive {done, I} -> ok end end, L).
ok = emqx_cm:unregister_channel(ClientId),
ok = meck:unload(emqx_connection).
t_discard_session_race(_) -> t_discard_session_race(_) ->
ClientId = rand_client_id(), ClientId = rand_client_id(),
@ -222,37 +300,55 @@ t_discard_session_race(_) ->
t_takeover_session(_) -> t_takeover_session(_) ->
#{conninfo := ConnInfo} = ?ChanInfo, #{conninfo := ConnInfo} = ?ChanInfo,
none = emqx_cm:takeover_session(<<"clientid">>), none = emqx_cm:takeover_session(<<"clientid">>),
Parent = self(),
erlang:spawn_link(fun() -> erlang:spawn_link(fun() ->
ok = emqx_cm:register_channel(<<"clientid">>, self(), ConnInfo), ok = emqx_cm:register_channel(<<"clientid">>, self(), ConnInfo),
Parent ! registered,
receive receive
{'$gen_call', From, {takeover, 'begin'}} -> {'$gen_call', From, {takeover, 'begin'}} ->
gen_server:reply(From, test), ok gen_server:reply(From, test), ok
end end
end), end),
timer:sleep(100), receive registered -> ok end,
{living, emqx_connection, _, test} = emqx_cm:takeover_session(<<"clientid">>), {living, emqx_connection, _, test} = emqx_cm:takeover_session(<<"clientid">>),
emqx_cm:unregister_channel(<<"clientid">>). emqx_cm:unregister_channel(<<"clientid">>).
t_kick_session(_) -> t_takeover_session_process_gone(_) ->
Info = #{conninfo := ConnInfo} = ?ChanInfo, #{conninfo := ConnInfo} = ?ChanInfo,
ok = meck:new(emqx_connection, [passthrough, no_history]), ClientIDTcp = <<"clientidTCP">>,
ok = meck:expect(emqx_connection, call, fun(_, _) -> test end), ClientIDWs = <<"clientidWs">>,
ok = meck:expect(emqx_connection, call, fun(_, _, _) -> test end), ClientIDRpc = <<"clientidRPC">>,
{error, not_found} = emqx_cm:kick_session(<<"clientid">>), none = emqx_cm:takeover_session(ClientIDTcp),
ok = emqx_cm:register_channel(<<"clientid">>, self(), ConnInfo), none = emqx_cm:takeover_session(ClientIDWs),
ok = emqx_cm:insert_channel_info(<<"clientid">>, Info, []), meck:new(emqx_connection, [passthrough, no_history]),
test = emqx_cm:kick_session(<<"clientid">>), meck:expect(emqx_connection, call,
erlang:spawn_link( fun(Pid, {takeover, 'begin'}, _) ->
fun() -> exit({noproc, {gen_server,call,[Pid, takeover_session]}});
ok = emqx_cm:register_channel(<<"clientid">>, self(), ConnInfo), (Pid, What, Args) ->
ok = emqx_cm:insert_channel_info(<<"clientid">>, Info, []), meck:passthrough([Pid, What, Args])
end),
timer:sleep(1000) ok = emqx_cm:register_channel(ClientIDTcp, self(), ConnInfo),
end), none = emqx_cm:takeover_session(ClientIDTcp),
ct:sleep(100), meck:expect(emqx_connection, call,
test = emqx_cm:kick_session(<<"clientid">>), fun(_Pid, {takeover, 'begin'}, _) ->
ok = emqx_cm:unregister_channel(<<"clientid">>), exit(noproc);
ok = meck:unload(emqx_connection). (Pid, What, Args) ->
meck:passthrough([Pid, What, Args])
end),
ok = emqx_cm:register_channel(ClientIDWs, self(), ConnInfo),
none = emqx_cm:takeover_session(ClientIDWs),
meck:expect(emqx_connection, call,
fun(Pid, {takeover, 'begin'}, _) ->
exit({'EXIT', {noproc, {gen_server,call,[Pid, takeover_session]}}});
(Pid, What, Args) ->
meck:passthrough([Pid, What, Args])
end),
ok = emqx_cm:register_channel(ClientIDRpc, self(), ConnInfo),
none = emqx_cm:takeover_session(ClientIDRpc),
emqx_cm:unregister_channel(ClientIDTcp),
emqx_cm:unregister_channel(ClientIDWs),
emqx_cm:unregister_channel(ClientIDRpc),
meck:unload(emqx_connection).
t_all_channels(_) -> t_all_channels(_) ->
?assertEqual(true, is_list(emqx_cm:all_channels())). ?assertEqual(true, is_list(emqx_cm:all_channels())).

View File

@ -19,14 +19,14 @@
-include_lib("common_test/include/ct.hrl"). -include_lib("common_test/include/ct.hrl").
-export([ request_api/3 -export([ request_api/3
, request_api/4 , request_api/4
, request_api/5 , request_api/5
, get_http_data/1 , get_http_data/1
, create_default_app/0 , create_default_app/0
, delete_default_app/0 , delete_default_app/0
, default_auth_header/0 , default_auth_header/0
, auth_header/2 , auth_header/2
]). ]).
request_api(Method, Url, Auth) -> request_api(Method, Url, Auth) ->
request_api(Method, Url, [], Auth, []). request_api(Method, Url, [], Auth, []).
@ -57,15 +57,14 @@ do_request_api(Method, Request, HttpOpts) ->
case httpc:request(Method, Request, HttpOpts, [{body_format, binary}]) of case httpc:request(Method, Request, HttpOpts, [{body_format, binary}]) of
{error, socket_closed_remotely} -> {error, socket_closed_remotely} ->
{error, socket_closed_remotely}; {error, socket_closed_remotely};
{ok, {{"HTTP/1.1", Code, _}, _Headers, Return} } {ok, {{"HTTP/1.1", Code, _}, _Headers, Return} } ->
when Code =:= 200 orelse Code =:= 201 -> {ok, Code, Return};
{ok, Return};
{ok, {Reason, _, _}} -> {ok, {Reason, _, _}} ->
{error, Reason} {error, Reason}
end. end.
get_http_data(ResponseBody) -> get_http_data(ResponseBody) ->
maps:get(<<"data">>, emqx_json:decode(ResponseBody, [return_maps])). emqx_json:decode(ResponseBody, [return_maps]).
auth_header(User, Pass) -> auth_header(User, Pass) ->
Encoded = base64:encode_to_string(lists:append([User,":",Pass])), Encoded = base64:encode_to_string(lists:append([User,":",Pass])),

View File

@ -36,6 +36,8 @@ init_per_suite(Config) ->
ok = meck:new(emqx_channel, [passthrough, no_history, no_link]), ok = meck:new(emqx_channel, [passthrough, no_history, no_link]),
%% Meck Cm %% Meck Cm
ok = meck:new(emqx_cm, [passthrough, no_history, no_link]), ok = meck:new(emqx_cm, [passthrough, no_history, no_link]),
ok = meck:expect(emqx_cm, mark_channel_connected, fun(_) -> ok end),
ok = meck:expect(emqx_cm, mark_channel_disconnected, fun(_) -> ok end),
%% Meck Limiter %% Meck Limiter
ok = meck:new(emqx_limiter, [passthrough, no_history, no_link]), ok = meck:new(emqx_limiter, [passthrough, no_history, no_link]),
%% Meck Pd %% Meck Pd
@ -113,7 +115,7 @@ t_ws_pingreq_before_connected(_) ->
t_info(_) -> t_info(_) ->
CPid = spawn(fun() -> CPid = spawn(fun() ->
receive receive
{'$gen_call', From, info} -> {'$gen_call', From, info} ->
gen_server:reply(From, emqx_connection:info(st())) gen_server:reply(From, emqx_connection:info(st()))
after after
@ -132,7 +134,7 @@ t_info_limiter(_) ->
t_stats(_) -> t_stats(_) ->
CPid = spawn(fun() -> CPid = spawn(fun() ->
receive receive
{'$gen_call', From, stats} -> {'$gen_call', From, stats} ->
gen_server:reply(From, emqx_connection:stats(st())) gen_server:reply(From, emqx_connection:stats(st()))
after after
@ -147,10 +149,10 @@ t_stats(_) ->
{send_pend,0}| _] , Stats). {send_pend,0}| _] , Stats).
t_process_msg(_) -> t_process_msg(_) ->
with_conn(fun(CPid) -> with_conn(fun(CPid) ->
ok = meck:expect(emqx_channel, handle_in, ok = meck:expect(emqx_channel, handle_in,
fun(_Packet, Channel) -> fun(_Packet, Channel) ->
{ok, Channel} {ok, Channel}
end), end),
CPid ! {incoming, ?PACKET(?PINGREQ)}, CPid ! {incoming, ?PACKET(?PINGREQ)},
CPid ! {incoming, undefined}, CPid ! {incoming, undefined},
@ -320,7 +322,7 @@ t_with_channel(_) ->
t_handle_outgoing(_) -> t_handle_outgoing(_) ->
?assertEqual(ok, emqx_connection:handle_outgoing(?PACKET(?PINGRESP), st())), ?assertEqual(ok, emqx_connection:handle_outgoing(?PACKET(?PINGRESP), st())),
?assertEqual(ok, emqx_connection:handle_outgoing([?PACKET(?PINGRESP)], st())). ?assertEqual(ok, emqx_connection:handle_outgoing([?PACKET(?PINGRESP)], st())).
t_handle_info(_) -> t_handle_info(_) ->
?assertMatch({ok, {event,running}, _NState}, ?assertMatch({ok, {event,running}, _NState},
emqx_connection:handle_info(activate_socket, st())), emqx_connection:handle_info(activate_socket, st())),
@ -347,7 +349,7 @@ t_activate_socket(_) ->
State = st(), State = st(),
{ok, NStats} = emqx_connection:activate_socket(State), {ok, NStats} = emqx_connection:activate_socket(State),
?assertEqual(running, emqx_connection:info(sockstate, NStats)), ?assertEqual(running, emqx_connection:info(sockstate, NStats)),
State1 = st(#{sockstate => blocked}), State1 = st(#{sockstate => blocked}),
?assertEqual({ok, State1}, emqx_connection:activate_socket(State1)), ?assertEqual({ok, State1}, emqx_connection:activate_socket(State1)),

View File

@ -55,12 +55,12 @@ t_unmount(_) ->
t_replvar(_) -> t_replvar(_) ->
?assertEqual(undefined, replvar(undefined, #{})), ?assertEqual(undefined, replvar(undefined, #{})),
?assertEqual(<<"mount/user/clientid/">>, ?assertEqual(<<"mount/user/clientid/">>,
replvar(<<"mount/%u/%c/">>, replvar(<<"mount/${username}/${clientid}/">>,
#{clientid => <<"clientid">>, #{clientid => <<"clientid">>,
username => <<"user">> username => <<"user">>
})), })),
?assertEqual(<<"mount/%u/clientid/">>, ?assertEqual(<<"mount/${username}/clientid/">>,
replvar(<<"mount/%u/%c/">>, replvar(<<"mount/${username}/${clientid}/">>,
#{clientid => <<"clientid">>, #{clientid => <<"clientid">>,
username => undefined username => undefined
})). })).

View File

@ -62,79 +62,104 @@ t_conn_stats(_) ->
t_tcp_sock_passive(_) -> t_tcp_sock_passive(_) ->
with_client(fun(CPid) -> CPid ! {tcp_passive, sock} end, []). with_client(fun(CPid) -> CPid ! {tcp_passive, sock} end, []).
t_message_expiry_interval_1(_) -> t_message_expiry_interval(_) ->
ClientA = message_expiry_interval_init(), {CPublish, CControl} = message_expiry_interval_init(),
[message_expiry_interval_exipred(ClientA, QoS) || QoS <- [0,1,2]], [message_expiry_interval_exipred(CPublish, CControl, QoS) || QoS <- [0,1,2]],
emqtt:stop(ClientA). emqtt:stop(CPublish),
emqtt:stop(CControl).
t_message_expiry_interval_2(_) -> t_message_not_expiry_interval(_) ->
ClientA = message_expiry_interval_init(), {CPublish, CControl} = message_expiry_interval_init(),
[message_expiry_interval_not_exipred(ClientA, QoS) || QoS <- [0,1,2]], [message_expiry_interval_not_exipred(CPublish, CControl, QoS) || QoS <- [0,1,2]],
emqtt:stop(ClientA). emqtt:stop(CPublish),
emqtt:stop(CControl).
message_expiry_interval_init() -> message_expiry_interval_init() ->
{ok, ClientA} = emqtt:start_link([{proto_ver,v5}, {ok, CPublish} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"client-a">>}, {clientid, <<"Client-Publish">>},
{clean_start, false}, {clean_start, false},
{properties, #{'Session-Expiry-Interval' => 360}}]), {properties, #{'Session-Expiry-Interval' => 360}}]),
{ok, ClientB} = emqtt:start_link([{proto_ver,v5}, {ok, CVerify} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"client-b">>}, {clientid, <<"Client-Verify">>},
{clean_start, false}, {clean_start, false},
{properties, #{'Session-Expiry-Interval' => 360}}]), {properties, #{'Session-Expiry-Interval' => 360}}]),
{ok, _} = emqtt:connect(ClientA), {ok, CControl} = emqtt:start_link([{proto_ver,v5},
{ok, _} = emqtt:connect(ClientB), {clientid, <<"Client-Control">>},
%% subscribe and disconnect client-b {clean_start, false},
emqtt:subscribe(ClientB, <<"t/a">>, 1), {properties, #{'Session-Expiry-Interval' => 360}}]),
emqtt:stop(ClientB), {ok, _} = emqtt:connect(CPublish),
ClientA. {ok, _} = emqtt:connect(CVerify),
{ok, _} = emqtt:connect(CControl),
%% subscribe and disconnect Client-verify
emqtt:subscribe(CControl, <<"t/a">>, 1),
emqtt:subscribe(CVerify, <<"t/a">>, 1),
emqtt:stop(CVerify),
{CPublish, CControl}.
message_expiry_interval_exipred(ClientA, QoS) -> message_expiry_interval_exipred(CPublish, CControl, QoS) ->
ct:pal("~p ~p", [?FUNCTION_NAME, QoS]), ct:pal("~p ~p", [?FUNCTION_NAME, QoS]),
%% publish to t/a and waiting for the message expired %% publish to t/a and waiting for the message expired
emqtt:publish(ClientA, <<"t/a">>, #{'Message-Expiry-Interval' => 1}, <<"this will be purged in 1s">>, [{qos, QoS}]), emqtt:publish(CPublish, <<"t/a">>, #{'Message-Expiry-Interval' => 1},
ct:sleep(1500), <<"this will be purged in 1s">>, [{qos, QoS}]),
%% CControl make sure publish already store in broker.
receive
{publish,#{client_pid := CControl, topic := <<"t/a">>}} ->
ok
after 1000 ->
ct:fail(should_receive_publish)
end,
ct:sleep(1100),
%% resume the session for client-b %% resume the session for Client-Verify
{ok, ClientB1} = emqtt:start_link([{proto_ver,v5}, {ok, CVerify} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"client-b">>}, {clientid, <<"Client-Verify">>},
{clean_start, false}, {clean_start, false},
{properties, #{'Session-Expiry-Interval' => 360}}]), {properties, #{'Session-Expiry-Interval' => 360}}]),
{ok, _} = emqtt:connect(ClientB1), {ok, _} = emqtt:connect(CVerify),
%% verify client-b could not receive the publish message %% verify Client-Verify could not receive the publish message
receive receive
{publish,#{client_pid := ClientB1, topic := <<"t/a">>}} -> {publish,#{client_pid := CVerify, topic := <<"t/a">>}} ->
ct:fail(should_have_expired) ct:fail(should_have_expired)
after 300 -> after 300 ->
ok ok
end, end,
emqtt:stop(ClientB1). emqtt:stop(CVerify).
message_expiry_interval_not_exipred(ClientA, QoS) -> message_expiry_interval_not_exipred(CPublish, CControl, QoS) ->
ct:pal("~p ~p", [?FUNCTION_NAME, QoS]), ct:pal("~p ~p", [?FUNCTION_NAME, QoS]),
%% publish to t/a %% publish to t/a
emqtt:publish(ClientA, <<"t/a">>, #{'Message-Expiry-Interval' => 20}, <<"this will be purged in 1s">>, [{qos, QoS}]), emqtt:publish(CPublish, <<"t/a">>, #{'Message-Expiry-Interval' => 20},
<<"this will be purged in 20s">>, [{qos, QoS}]),
%% wait for 1s and then resume the session for client-b, the message should not expires %% CControl make sure publish already store in broker.
receive
{publish,#{client_pid := CControl, topic := <<"t/a">>}} ->
ok
after 1000 ->
ct:fail(should_receive_publish)
end,
%% wait for 1.2s and then resume the session for Client-Verify, the message should not expires
%% as Message-Expiry-Interval = 20s %% as Message-Expiry-Interval = 20s
ct:sleep(1000), ct:sleep(1200),
{ok, ClientB1} = emqtt:start_link([{proto_ver,v5}, {ok, CVerify} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"client-b">>}, {clientid, <<"Client-Verify">>},
{clean_start, false}, {clean_start, false},
{properties, #{'Session-Expiry-Interval' => 360}}]), {properties, #{'Session-Expiry-Interval' => 360}}]),
{ok, _} = emqtt:connect(ClientB1), {ok, _} = emqtt:connect(CVerify),
%% verify client-b could receive the publish message and the Message-Expiry-Interval is set %% verify Client-Verify could receive the publish message and the Message-Expiry-Interval is set
receive receive
{publish,#{client_pid := ClientB1, topic := <<"t/a">>, {publish,#{client_pid := CVerify, topic := <<"t/a">>,
properties := #{'Message-Expiry-Interval' := MsgExpItvl}}} properties := #{'Message-Expiry-Interval' := MsgExpItvl}}}
when MsgExpItvl < 20 -> ok; when MsgExpItvl =< 20 -> ok;
{publish, _} = Msg -> {publish, _} = Msg ->
ct:fail({incorrect_publish, Msg}) ct:fail({incorrect_publish, Msg})
after 300 -> after 300 ->
ct:fail(no_publish_received) ct:fail(no_publish_received)
end, end,
emqtt:stop(ClientB1). emqtt:stop(CVerify).
with_client(TestFun, _Options) -> with_client(TestFun, _Options) ->
ClientId = <<"t_conn">>, ClientId = <<"t_conn">>,

View File

@ -113,6 +113,9 @@ init_per_group(snabbkaffe, Config) ->
[ {kill_connection_process, true} | Config]; [ {kill_connection_process, true} | Config];
init_per_group(gc_tests, Config) -> init_per_group(gc_tests, Config) ->
%% We need to make sure the system does not interfere with this test group. %% We need to make sure the system does not interfere with this test group.
lists:foreach(fun(ClientId) ->
maybe_kill_connection_process(ClientId, [{kill_connection_process, true}])
end, emqx_cm:all_client_ids()),
emqx_common_test_helpers:stop_apps([]), emqx_common_test_helpers:stop_apps([]),
SessionMsgEts = gc_tests_session_store, SessionMsgEts = gc_tests_session_store,
MsgEts = gc_tests_msg_store, MsgEts = gc_tests_msg_store,
@ -230,50 +233,92 @@ receive_messages(Count, Msgs) ->
maybe_kill_connection_process(ClientId, Config) -> maybe_kill_connection_process(ClientId, Config) ->
case ?config(kill_connection_process, Config) of case ?config(kill_connection_process, Config) of
true -> true ->
[ConnectionPid] = emqx_cm:lookup_channels(ClientId), case emqx_cm:lookup_channels(ClientId) of
?assert(is_pid(ConnectionPid)), [] ->
Ref = monitor(process, ConnectionPid), ok;
ConnectionPid ! die_if_test, [ConnectionPid] ->
receive {'DOWN', Ref, process, ConnectionPid, normal} -> ok ?assert(is_pid(ConnectionPid)),
after 3000 -> error(process_did_not_die) Ref = monitor(process, ConnectionPid),
ConnectionPid ! die_if_test,
receive {'DOWN', Ref, process, ConnectionPid, normal} -> ok
after 3000 -> error(process_did_not_die)
end,
wait_for_cm_unregister(ClientId)
end; end;
false -> false ->
ok ok
end. end.
snabbkaffe_sync_publish(Topic, Payloads, Config) -> wait_for_cm_unregister(ClientId) ->
Fun = fun(Client, Payload) -> wait_for_cm_unregister(ClientId, 10).
?wait_async_action( {ok, _} = emqtt:publish(Client, Topic, Payload, 2)
, #{?snk_kind := ps_persist_msg, payload := Payload}
)
end,
do_publish(Payloads, Fun, Config).
publish(Topic, Payloads, Config) -> wait_for_cm_unregister(_ClientId, 0) ->
error(cm_did_not_unregister);
wait_for_cm_unregister(ClientId, N) ->
case emqx_cm:lookup_channels(ClientId) of
[] -> ok;
[_] -> timer:sleep(100), wait_for_cm_unregister(ClientId, N - 1)
end.
snabbkaffe_sync_publish(Topic, Payloads) ->
Fun = fun(Client, Payload) ->
?check_trace(
begin
?wait_async_action( {ok, _} = emqtt:publish(Client, Topic, Payload, 2)
, #{?snk_kind := ps_persist_msg, payload := Payload}
)
end,
fun(_, _Trace) -> ok end)
end,
do_publish(Payloads, Fun, true).
publish(Topic, Payloads) ->
publish(Topic, Payloads, false).
publish(Topic, Payloads, WaitForUnregister) ->
Fun = fun(Client, Payload) -> Fun = fun(Client, Payload) ->
{ok, _} = emqtt:publish(Client, Topic, Payload, 2) {ok, _} = emqtt:publish(Client, Topic, Payload, 2)
end, end,
do_publish(Payloads, Fun, Config). do_publish(Payloads, Fun, WaitForUnregister).
do_publish(Payloads = [_|_], PublishFun, Config) -> do_publish(Payloads = [_|_], PublishFun, WaitForUnregister) ->
%% Publish from another process to avoid connection confusion. %% Publish from another process to avoid connection confusion.
{Pid, Ref} = {Pid, Ref} =
spawn_monitor( spawn_monitor(
fun() -> fun() ->
%% For convenience, always publish using tcp. %% For convenience, always publish using tcp.
%% The publish path is not what we are testing. %% The publish path is not what we are testing.
ClientID = <<"ps_SUITE_publisher">>,
{ok, Client} = emqtt:start_link([ {proto_ver, v5} {ok, Client} = emqtt:start_link([ {proto_ver, v5}
, {clientid, ClientID}
, {port, 1883} ]), , {port, 1883} ]),
{ok, _} = emqtt:connect(Client), {ok, _} = emqtt:connect(Client),
lists:foreach(fun(Payload) -> PublishFun(Client, Payload) end, Payloads), lists:foreach(fun(Payload) -> PublishFun(Client, Payload) end, Payloads),
ok = emqtt:disconnect(Client) ok = emqtt:disconnect(Client),
%% Snabbkaffe sometimes fails unless all processes are gone.
case WaitForUnregister of
false ->
ok;
true ->
case emqx_cm:lookup_channels(ClientID) of
[] ->
ok;
[ConnectionPid] ->
?assert(is_pid(ConnectionPid)),
Ref1 = monitor(process, ConnectionPid),
receive {'DOWN', Ref1, process, ConnectionPid, _} -> ok
after 3000 -> error(process_did_not_die)
end,
wait_for_cm_unregister(ClientID)
end
end
end), end),
receive receive
{'DOWN', Ref, process, Pid, normal} -> ok; {'DOWN', Ref, process, Pid, normal} -> ok;
{'DOWN', Ref, process, Pid, What} -> error({failed_publish, What}) {'DOWN', Ref, process, Pid, What} -> error({failed_publish, What})
end; end;
do_publish(Payload, PublishFun, Config) -> do_publish(Payload, PublishFun, WaitForUnregister) ->
do_publish([Payload], PublishFun, Config). do_publish([Payload], PublishFun, WaitForUnregister).
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% Test Cases %% Test Cases
@ -297,7 +342,7 @@ t_connect_session_expiry_interval(Config) ->
maybe_kill_connection_process(ClientId, Config), maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload, Config), publish(Topic, Payload),
{ok, Client2} = emqtt:start_link([ {clientid, ClientId}, {ok, Client2} = emqtt:start_link([ {clientid, ClientId},
{proto_ver, v5}, {proto_ver, v5},
@ -356,6 +401,8 @@ t_cancel_on_disconnect(Config) ->
{ok, _} = emqtt:ConnFun(Client1), {ok, _} = emqtt:ConnFun(Client1),
ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 0}), ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 0}),
wait_for_cm_unregister(ClientId),
{ok, Client2} = emqtt:start_link([ {clientid, ClientId}, {ok, Client2} = emqtt:start_link([ {clientid, ClientId},
{proto_ver, v5}, {proto_ver, v5},
{clean_start, false}, {clean_start, false},
@ -382,6 +429,8 @@ t_persist_on_disconnect(Config) ->
%% Strangely enough, the disconnect is reported as successful by emqtt. %% Strangely enough, the disconnect is reported as successful by emqtt.
ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 30}), ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 30}),
wait_for_cm_unregister(ClientId),
{ok, Client2} = emqtt:start_link([ {clientid, ClientId}, {ok, Client2} = emqtt:start_link([ {clientid, ClientId},
{proto_ver, v5}, {proto_ver, v5},
{clean_start, false}, {clean_start, false},
@ -424,7 +473,7 @@ t_process_dies_session_expires(Config) ->
maybe_kill_connection_process(ClientId, Config), maybe_kill_connection_process(ClientId, Config),
ok = publish(Topic, [Payload], Config), ok = publish(Topic, [Payload]),
SessionId = SessionId =
case ?config(persistent_store_enabled, Config) of case ?config(persistent_store_enabled, Config) of
@ -467,7 +516,8 @@ t_process_dies_session_expires(Config) ->
%% The session should be a fresh one %% The session should be a fresh one
{persistent, NewSession} = emqx_persistent_session:lookup(ClientId), {persistent, NewSession} = emqx_persistent_session:lookup(ClientId),
?assertNotEqual(SessionId, emqx_session:info(id, NewSession)), ?assertNotEqual(SessionId, emqx_session:info(id, NewSession)),
%% The old session should now either be marked as abandoned or already be garbage collected. %% The old session should now either
%% be marked as abandoned or already be garbage collected.
?assertMatch([], emqx_persistent_session:pending(SessionId)); ?assertMatch([], emqx_persistent_session:pending(SessionId));
false -> false ->
skip skip
@ -498,7 +548,7 @@ t_publish_while_client_is_gone(Config) ->
ok = emqtt:disconnect(Client1), ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config), maybe_kill_connection_process(ClientId, Config),
ok = publish(Topic, [Payload1, Payload2], Config), ok = publish(Topic, [Payload1, Payload2]),
{ok, Client2} = emqtt:start_link([ {proto_ver, v5}, {ok, Client2} = emqtt:start_link([ {proto_ver, v5},
{clientid, ClientId}, {clientid, ClientId},
@ -506,8 +556,9 @@ t_publish_while_client_is_gone(Config) ->
{clean_start, false} {clean_start, false}
| Config]), | Config]),
{ok, _} = emqtt:ConnFun(Client2), {ok, _} = emqtt:ConnFun(Client2),
[Msg1] = receive_messages(1), Msgs = receive_messages(2),
[Msg2] = receive_messages(1), ?assertEqual(length(Msgs), 2),
[Msg2, Msg1] = Msgs,
?assertEqual({ok, iolist_to_binary(Payload1)}, maps:find(payload, Msg1)), ?assertEqual({ok, iolist_to_binary(Payload1)}, maps:find(payload, Msg1)),
?assertEqual({ok, 2}, maps:find(qos, Msg1)), ?assertEqual({ok, 2}, maps:find(qos, Msg1)),
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg2)), ?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg2)),
@ -544,7 +595,7 @@ t_clean_start_drops_subscriptions(Config) ->
maybe_kill_connection_process(ClientId, Config), maybe_kill_connection_process(ClientId, Config),
%% 2. %% 2.
ok = publish(Topic, Payload1, Config), ok = publish(Topic, Payload1),
%% 3. %% 3.
{ok, Client2} = emqtt:start_link([ {proto_ver, v5}, {ok, Client2} = emqtt:start_link([ {proto_ver, v5},
@ -556,7 +607,7 @@ t_clean_start_drops_subscriptions(Config) ->
?assertEqual(0, client_info(session_present, Client2)), ?assertEqual(0, client_info(session_present, Client2)),
{ok, _, [2]} = emqtt:subscribe(Client2, STopic, qos2), {ok, _, [2]} = emqtt:subscribe(Client2, STopic, qos2),
ok = publish(Topic, Payload2, Config), ok = publish(Topic, Payload2),
[Msg1] = receive_messages(1), [Msg1] = receive_messages(1),
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg1)), ?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg1)),
@ -571,7 +622,7 @@ t_clean_start_drops_subscriptions(Config) ->
| Config]), | Config]),
{ok, _} = emqtt:ConnFun(Client3), {ok, _} = emqtt:ConnFun(Client3),
ok = publish(Topic, Payload3, Config), ok = publish(Topic, Payload3),
[Msg2] = receive_messages(1), [Msg2] = receive_messages(1),
?assertEqual({ok, iolist_to_binary(Payload3)}, maps:find(payload, Msg2)), ?assertEqual({ok, iolist_to_binary(Payload3)}, maps:find(payload, Msg2)),
@ -625,7 +676,7 @@ t_multiple_subscription_matches(Config) ->
maybe_kill_connection_process(ClientId, Config), maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload, Config), publish(Topic, Payload),
{ok, Client2} = emqtt:start_link([ {clientid, ClientId}, {ok, Client2} = emqtt:start_link([ {clientid, ClientId},
{proto_ver, v5}, {proto_ver, v5},
@ -675,9 +726,9 @@ t_lost_messages_because_of_gc(Config) ->
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2), {ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
emqtt:disconnect(Client1), emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config), maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload1, Config), publish(Topic, Payload1),
timer:sleep(2 * Retain), timer:sleep(2 * Retain),
publish(Topic, Payload2, Config), publish(Topic, Payload2),
emqx_persistent_session_gc:message_gc_worker(), emqx_persistent_session_gc:message_gc_worker(),
{ok, Client2} = emqtt:start_link([ {clientid, ClientId}, {ok, Client2} = emqtt:start_link([ {clientid, ClientId},
{clean_start, false}, {clean_start, false},
@ -747,7 +798,6 @@ check_snabbkaffe_vanilla(Trace) ->
t_snabbkaffe_vanilla_stages(Config) -> t_snabbkaffe_vanilla_stages(Config) ->
%% Test that all stages of session resume works ok in the simplest case %% Test that all stages of session resume works ok in the simplest case
process_flag(trap_exit, true),
ConnFun = ?config(conn_fun, Config), ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config), ClientId = ?config(client_id, Config),
EmqttOpts = [ {proto_ver, v5}, EmqttOpts = [ {proto_ver, v5},
@ -772,7 +822,6 @@ t_snabbkaffe_vanilla_stages(Config) ->
t_snabbkaffe_pending_messages(Config) -> t_snabbkaffe_pending_messages(Config) ->
%% Make sure there are pending messages are fetched during the init stage. %% Make sure there are pending messages are fetched during the init stage.
process_flag(trap_exit, true),
ConnFun = ?config(conn_fun, Config), ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config), ClientId = ?config(client_id, Config),
Topic = ?config(topic, Config), Topic = ?config(topic, Config),
@ -790,7 +839,7 @@ t_snabbkaffe_pending_messages(Config) ->
?check_trace( ?check_trace(
begin begin
snabbkaffe_sync_publish(Topic, Payloads, Config), snabbkaffe_sync_publish(Topic, Payloads),
{ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]), {ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client2), {ok, _} = emqtt:ConnFun(Client2),
Msgs = receive_messages(length(Payloads)), Msgs = receive_messages(length(Payloads)),
@ -812,7 +861,6 @@ t_snabbkaffe_pending_messages(Config) ->
t_snabbkaffe_buffered_messages(Config) -> t_snabbkaffe_buffered_messages(Config) ->
%% Make sure to buffer messages during startup. %% Make sure to buffer messages during startup.
process_flag(trap_exit, true),
ConnFun = ?config(conn_fun, Config), ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config), ClientId = ?config(client_id, Config),
Topic = ?config(topic, Config), Topic = ?config(topic, Config),
@ -829,7 +877,7 @@ t_snabbkaffe_buffered_messages(Config) ->
ok = emqtt:disconnect(Client1), ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config), maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payloads1, Config), publish(Topic, Payloads1),
?check_trace( ?check_trace(
begin begin
@ -837,8 +885,8 @@ t_snabbkaffe_buffered_messages(Config) ->
?force_ordering( #{ ?snk_kind := ps_worker_deliver }, ?force_ordering( #{ ?snk_kind := ps_worker_deliver },
#{ ?snk_kind := ps_resume_end }), #{ ?snk_kind := ps_resume_end }),
spawn_link(fun() -> spawn_link(fun() ->
?block_until(#{ ?snk_kind := ps_marker_pendings_msgs }, infinity, 5000), ?block_until(#{?snk_kind := ps_marker_pendings_msgs}, infinity, 5000),
publish(Topic, Payloads2, Config) publish(Topic, Payloads2, true)
end), end),
{ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]), {ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client2), {ok, _} = emqtt:ConnFun(Client2),

View File

@ -112,7 +112,8 @@ t_out(_) ->
t_out_2(_) -> t_out_2(_) ->
{empty, {pqueue, [{-1, {queue, [a], [], 1}}]}} = ?PQ:out(0, ?PQ:from_list([{1, a}])), {empty, {pqueue, [{-1, {queue, [a], [], 1}}]}} = ?PQ:out(0, ?PQ:from_list([{1, a}])),
{{value, a}, {queue, [], [], 0}} = ?PQ:out(1, ?PQ:from_list([{1, a}])), {{value, a}, {queue, [], [], 0}} = ?PQ:out(1, ?PQ:from_list([{1, a}])),
{{value, a}, {pqueue, [{-1, {queue, [], [b], 1}}]}} = ?PQ:out(1, ?PQ:from_list([{1, a}, {1, b}])), {{value, a}, {pqueue, [{-1, {queue, [], [b], 1}}]}} =
?PQ:out(1, ?PQ:from_list([{1, a}, {1, b}])),
{{value, a}, {queue, [b], [], 1}} = ?PQ:out(1, ?PQ:from_list([{1, a}, {0, b}])). {{value, a}, {queue, [b], [], 1}} = ?PQ:out(1, ?PQ:from_list([{1, a}, {0, b}])).
t_out_p(_) -> t_out_p(_) ->
@ -174,4 +175,4 @@ t_filter(_) ->
t_highest(_) -> t_highest(_) ->
empty = ?PQ:highest(?PQ:new()), empty = ?PQ:highest(?PQ:new()),
0 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}])), 0 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}])),
2 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}, {1, c}, {2, d}, {2, e}])). 2 = ?PQ:highest(?PQ:from_list([{0, a}, {0, b}, {1, c}, {2, d}, {2, e}])).

View File

@ -36,4 +36,4 @@ t_child(_) ->
?assertMatch({error, not_found}, emqx_sup:stop_child(undef)), ?assertMatch({error, not_found}, emqx_sup:stop_child(undef)),
?assertMatch({error, _}, emqx_sup:start_child(emqx_broker_sup, supervisor)), ?assertMatch({error, _}, emqx_sup:start_child(emqx_broker_sup, supervisor)),
?assertEqual(ok, emqx_sup:stop_child(emqx_broker_sup)), ?assertEqual(ok, emqx_sup:stop_child(emqx_broker_sup)),
?assertMatch({ok, _}, emqx_sup:start_child(emqx_broker_sup, supervisor)). ?assertMatch({ok, _}, emqx_sup:start_child(emqx_broker_sup, supervisor)).

View File

@ -0,0 +1,318 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_SUITE).
%% API
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx/include/emqx.hrl").
-record(emqx_trace, {name, type, filter, enable = true, start_at, end_at}).
%%--------------------------------------------------------------------
%% Setups
%%--------------------------------------------------------------------
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
application:load(emqx_plugin_libs),
emqx_common_test_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
emqx_common_test_helpers:stop_apps([]).
t_base_create_delete(_Config) ->
ok = emqx_trace:clear(),
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
End = to_rfc3339(Now + 30 * 60),
Name = <<"name1">>,
ClientId = <<"test-device">>,
Trace = #{
name => Name,
type => <<"clientid">>,
clientid => ClientId,
start_at => Start,
end_at => End
},
AnotherTrace = Trace#{name => <<"anotherTrace">>},
ok = emqx_trace:create(Trace),
?assertEqual({error, {already_existed, Name}}, emqx_trace:create(Trace)),
?assertEqual({error, {duplicate_condition, Name}}, emqx_trace:create(AnotherTrace)),
[TraceRec] = emqx_trace:list(),
Expect = #emqx_trace{
name = Name,
type = clientid,
filter = ClientId,
start_at = Now,
end_at = Now + 30 * 60
},
?assertEqual(Expect, TraceRec),
ExpectFormat = [
#{
filter => <<"test-device">>,
enable => true,
type => clientid,
name => <<"name1">>,
start_at => Now,
end_at => Now + 30 * 60
}
],
?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])),
?assertEqual(ok, emqx_trace:delete(Name)),
?assertEqual({error, not_found}, emqx_trace:delete(Name)),
?assertEqual([], emqx_trace:list()),
ok.
t_create_size_max(_Config) ->
emqx_trace:clear(),
lists:map(fun(Seq) ->
Name = list_to_binary("name" ++ integer_to_list(Seq)),
Trace = [{name, Name}, {type, <<"topic">>},
{topic, list_to_binary("/x/y/" ++ integer_to_list(Seq))}],
ok = emqx_trace:create(Trace)
end, lists:seq(1, 30)),
Trace31 = [{<<"name">>, <<"name31">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/31">>}],
{error, _} = emqx_trace:create(Trace31),
ok = emqx_trace:delete(<<"name30">>),
ok = emqx_trace:create(Trace31),
?assertEqual(30, erlang:length(emqx_trace:list())),
ok.
t_create_failed(_Config) ->
ok = emqx_trace:clear(),
UnknownField = [{<<"unknown">>, 12}],
{error, Reason1} = emqx_trace:create(UnknownField),
?assertEqual(<<"unknown field: {unknown,12}">>, iolist_to_binary(Reason1)),
InvalidTopic = [{<<"topic">>, "#/#//"}],
{error, Reason2} = emqx_trace:create(InvalidTopic),
?assertEqual(<<"topic: #/#// invalid by function_clause">>, iolist_to_binary(Reason2)),
InvalidStart = [{<<"start_at">>, <<"2021-12-3:12">>}],
{error, Reason3} = emqx_trace:create(InvalidStart),
?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>,
iolist_to_binary(Reason3)),
InvalidEnd = [{<<"end_at">>, <<"2021-12-3:12">>}],
{error, Reason4} = emqx_trace:create(InvalidEnd),
?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>,
iolist_to_binary(Reason4)),
{error, Reason7} = emqx_trace:create([{<<"name">>, <<"test">>}, {<<"type">>, <<"clientid">>}]),
?assertEqual(<<"topic/clientid/ip_address filter required">>, iolist_to_binary(Reason7)),
InvalidPackets4 = [{<<"name">>, <<"/test">>}, {<<"clientid">>, <<"t">>},
{<<"type">>, <<"clientid">>}],
{error, Reason9} = emqx_trace:create(InvalidPackets4),
?assertEqual(<<"name cannot contain /">>, iolist_to_binary(Reason9)),
?assertEqual({error, "type=[topic,clientid,ip_address] required"},
emqx_trace:create([{<<"name">>, <<"test-name">>}, {<<"clientid">>, <<"good">>}])),
?assertEqual({error, "incorrect type: only support clientid/topic/ip_address"},
emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"clientid">>, <<"good">>}, {<<"type">>, <<"typeerror">> }])),
?assertEqual({error, "ip address: einval"},
emqx_trace:create([{<<"ip_address">>, <<"test-name">>}])),
ok.
t_create_default(_Config) ->
ok = emqx_trace:clear(),
{error, "name required"} = emqx_trace:create([]),
ok = emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, <<"good">>}]),
[#emqx_trace{name = <<"test-name">>}] = emqx_trace:list(),
ok = emqx_trace:clear(),
Trace = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"start_at">>, <<"2021-10-28T10:54:47+08:00">>},
{<<"end_at">>, <<"2021-10-27T10:54:47+08:00">>}
],
{error, "end_at time has already passed"} = emqx_trace:create(Trace),
Now = erlang:system_time(second),
Trace2 = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"start_at">>, to_rfc3339(Now + 10)},
{<<"end_at">>, to_rfc3339(Now + 3)}
],
{error, "failed by start_at >= end_at"} = emqx_trace:create(Trace2),
ok = emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/z">>}]),
[#emqx_trace{start_at = Start, end_at = End}] = emqx_trace:list(),
?assertEqual(10 * 60, End - Start),
?assertEqual(true, Start - erlang:system_time(second) < 5),
ok.
t_update_enable(_Config) ->
ok = emqx_trace:clear(),
Name = <<"test-name">>,
Now = erlang:system_time(second),
End = list_to_binary(calendar:system_time_to_rfc3339(Now + 2)),
ok = emqx_trace:create([{<<"name">>, Name}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>}, {<<"end_at">>, End}]),
[#emqx_trace{enable = Enable}] = emqx_trace:list(),
?assertEqual(Enable, true),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
ok = emqx_trace:update(Name, true),
[#emqx_trace{enable = true}] = emqx_trace:list(),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
?assertEqual({error, not_found}, emqx_trace:update(<<"Name not found">>, true)),
ct:sleep(2100),
?assertEqual({error, finished}, emqx_trace:update(Name, true)),
ok.
t_load_state(_Config) ->
emqx_trace:clear(),
load(),
Now = erlang:system_time(second),
Running = [{<<"name">>, <<"Running">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/1">>}, {<<"start_at">>, to_rfc3339(Now - 1)},
{<<"end_at">>, to_rfc3339(Now + 2)}],
Waiting = [{<<"name">>, <<"Waiting">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/2">>}, {<<"start_at">>, to_rfc3339(Now + 3)},
{<<"end_at">>, to_rfc3339(Now + 8)}],
Finished = [{<<"name">>, <<"Finished">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/3">>}, {<<"start_at">>, to_rfc3339(Now - 5)},
{<<"end_at">>, to_rfc3339(Now)}],
ok = emqx_trace:create(Running),
ok = emqx_trace:create(Waiting),
{error, "end_at time has already passed"} = emqx_trace:create(Finished),
Traces = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces)),
Enables = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces),
ExpectEnables = [{<<"Running">>, true}, {<<"Waiting">>, true}],
?assertEqual(ExpectEnables, lists:sort(Enables)),
ct:sleep(3500),
Traces2 = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces2)),
Enables2 = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces2),
ExpectEnables2 = [{<<"Running">>, false}, {<<"Waiting">>, true}],
?assertEqual(ExpectEnables2, lists:sort(Enables2)),
unload(),
ok.
t_client_event(_Config) ->
application:set_env(emqx, allow_anonymous, true),
emqx_trace:clear(),
ClientId = <<"client-test">>,
load(),
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
Name = <<"test_client_id_event">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
ct:sleep(200),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
emqtt:ping(Client),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"1">>, [{qos, 0}]),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"2">>, [{qos, 0}]),
ct:sleep(200),
ok = emqx_trace:create([{<<"name">>, <<"test_topic">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/test">>}, {<<"start_at">>, Start}]),
ct:sleep(200),
{ok, Bin} = file:read_file(emqx_trace:log_file(Name, Now)),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"3">>, [{qos, 0}]),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"4">>, [{qos, 0}]),
ok = emqtt:disconnect(Client),
ct:sleep(200),
{ok, Bin2} = file:read_file(emqx_trace:log_file(Name, Now)),
{ok, Bin3} = file:read_file(emqx_trace:log_file(<<"test_topic">>, Now)),
ct:pal("Bin ~p Bin2 ~p Bin3 ~p", [byte_size(Bin), byte_size(Bin2), byte_size(Bin3)]),
?assert(erlang:byte_size(Bin) > 0),
?assert(erlang:byte_size(Bin) < erlang:byte_size(Bin2)),
?assert(erlang:byte_size(Bin3) > 0),
unload(),
ok.
t_get_log_filename(_Config) ->
ok = emqx_trace:clear(),
load(),
Now = erlang:system_time(second),
Start = calendar:system_time_to_rfc3339(Now),
End = calendar:system_time_to_rfc3339(Now + 2),
Name = <<"name1">>,
Trace = [
{<<"name">>, Name},
{<<"type">>, <<"ip_address">>},
{<<"ip_address">>, <<"127.0.0.1">>},
{<<"start_at">>, list_to_binary(Start)},
{<<"end_at">>, list_to_binary(End)}
],
ok = emqx_trace:create(Trace),
?assertEqual({error, not_found}, emqx_trace:get_trace_filename(<<"test">>)),
?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))),
ct:sleep(3000),
?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))),
unload(),
ok.
t_trace_file(_Config) ->
FileName = "test.log",
Content = <<"test \n test">>,
TraceDir = emqx_trace:trace_dir(),
File = filename:join(TraceDir, FileName),
ok = file:write_file(File, Content),
{ok, Node, Bin} = emqx_trace:trace_file(FileName),
?assertEqual(Node, atom_to_list(node())),
?assertEqual(Content, Bin),
ok = file:delete(File),
ok.
t_download_log(_Config) ->
emqx_trace:clear(),
load(),
ClientId = <<"client-test">>,
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
Name = <<"test_client_id">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
[begin _ = emqtt:ping(Client) end ||_ <- lists:seq(1, 5)],
ct:sleep(100),
{ok, ZipFile} = emqx_trace_api:download_zip_log(#{name => Name}, []),
?assert(filelib:file_size(ZipFile) > 0),
ok = emqtt:disconnect(Client),
unload(),
ok.
to_rfc3339(Second) ->
list_to_binary(calendar:system_time_to_rfc3339(Second)).
load() ->
emqx_trace:start_link().
unload() ->
gen_server:stop(emqx_trace).

View File

@ -0,0 +1,191 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2019-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_handler_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-define(CLIENT, [{host, "localhost"},
{clientid, <<"client">>},
{username, <<"testuser">>},
{password, <<"pass">>}
]).
all() -> [t_trace_clientid, t_trace_topic, t_trace_ip_address].
init_per_suite(Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
emqx_common_test_helpers:stop_apps([]).
init_per_testcase(t_trace_clientid, Config) ->
Config;
init_per_testcase(_Case, Config) ->
ok = emqx_logger:set_log_level(debug),
_ = [logger:remove_handler(Id) ||#{id := Id} <- emqx_trace_handler:running()],
Config.
end_per_testcase(_Case, _Config) ->
ok = emqx_logger:set_log_level(warning),
ok.
t_trace_clientid(_Config) ->
%% Start tracing
emqx_logger:set_log_level(error),
{error, _} = emqx_trace_handler:install(clientid, <<"client">>, debug, "tmp/client.log"),
emqx_logger:set_log_level(debug),
%% add list clientid
ok = emqx_trace_handler:install(clientid, "client", debug, "tmp/client.log"),
ok = emqx_trace_handler:install(clientid, <<"client2">>, all, "tmp/client2.log"),
ok = emqx_trace_handler:install(clientid, <<"client3">>, all, "tmp/client3.log"),
{error, {invalid_log_level, bad_level}} =
emqx_trace_handler:install(clientid, <<"client4">>, bad_level, "tmp/client4.log"),
{error, {handler_not_added, {file_error, ".", eisdir}}} =
emqx_trace_handler:install(clientid, <<"client5">>, debug, "."),
ct:sleep(100),
%% Verify the tracing file exits
?assert(filelib:is_regular("tmp/client.log")),
?assert(filelib:is_regular("tmp/client2.log")),
?assert(filelib:is_regular("tmp/client3.log")),
%% Get current traces
?assertMatch([#{type := clientid, filter := "client", name := <<"client">>,
level := debug, dst := "tmp/client.log"},
#{type := clientid, filter := "client2", name := <<"client2">>
, level := debug, dst := "tmp/client2.log"},
#{type := clientid, filter := "client3", name := <<"client3">>,
level := debug, dst := "tmp/client3.log"}
], emqx_trace_handler:running()),
%% Client with clientid = "client" publishes a "hi" message to "a/b/c".
{ok, T} = emqtt:start_link(?CLIENT),
emqtt:connect(T),
emqtt:publish(T, <<"a/b/c">>, <<"hi">>),
emqtt:ping(T),
ct:sleep(200),
%% Verify messages are logged to "tmp/client.log" but not "tmp/client2.log".
{ok, Bin} = file:read_file("tmp/client.log"),
?assertNotEqual(nomatch, binary:match(Bin, [<<"CONNECT">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"CONNACK">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"PUBLISH">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"PINGREQ">>])),
?assert(filelib:file_size("tmp/client2.log") == 0),
%% Stop tracing
ok = emqx_trace_handler:uninstall(clientid, <<"client">>),
ok = emqx_trace_handler:uninstall(clientid, <<"client2">>),
ok = emqx_trace_handler:uninstall(clientid, <<"client3">>),
emqtt:disconnect(T),
?assertEqual([], emqx_trace_handler:running()).
t_trace_topic(_Config) ->
{ok, T} = emqtt:start_link(?CLIENT),
emqtt:connect(T),
%% Start tracing
emqx_logger:set_log_level(debug),
ok = emqx_trace_handler:install(topic, <<"x/#">>, all, "tmp/topic_trace_x.log"),
ok = emqx_trace_handler:install(topic, <<"y/#">>, all, "tmp/topic_trace_y.log"),
ct:sleep(100),
%% Verify the tracing file exits
?assert(filelib:is_regular("tmp/topic_trace_x.log")),
?assert(filelib:is_regular("tmp/topic_trace_y.log")),
%% Get current traces
?assertMatch([#{type := topic, filter := <<"x/#">>,
level := debug, dst := "tmp/topic_trace_x.log", name := <<"x/#">>},
#{type := topic, filter := <<"y/#">>,
name := <<"y/#">>, level := debug, dst := "tmp/topic_trace_y.log"}
],
emqx_trace_handler:running()),
%% Client with clientid = "client" publishes a "hi" message to "x/y/z".
emqtt:publish(T, <<"x/y/z">>, <<"hi1">>),
emqtt:publish(T, <<"x/y/z">>, <<"hi2">>),
emqtt:subscribe(T, <<"x/y/z">>),
emqtt:unsubscribe(T, <<"x/y/z">>),
ct:sleep(200),
{ok, Bin} = file:read_file("tmp/topic_trace_x.log"),
?assertNotEqual(nomatch, binary:match(Bin, [<<"hi1">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"hi2">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"PUBLISH">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"SUBSCRIBE">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"UNSUBSCRIBE">>])),
?assert(filelib:file_size("tmp/topic_trace_y.log") =:= 0),
%% Stop tracing
ok = emqx_trace_handler:uninstall(topic, <<"x/#">>),
ok = emqx_trace_handler:uninstall(topic, <<"y/#">>),
{error, _Reason} = emqx_trace_handler:uninstall(topic, <<"z/#">>),
?assertEqual([], emqx_trace_handler:running()),
emqtt:disconnect(T).
t_trace_ip_address(_Config) ->
{ok, T} = emqtt:start_link(?CLIENT),
emqtt:connect(T),
%% Start tracing
ok = emqx_trace_handler:install(ip_address, "127.0.0.1", all, "tmp/ip_trace_x.log"),
ok = emqx_trace_handler:install(ip_address, "192.168.1.1", all, "tmp/ip_trace_y.log"),
ct:sleep(100),
%% Verify the tracing file exits
?assert(filelib:is_regular("tmp/ip_trace_x.log")),
?assert(filelib:is_regular("tmp/ip_trace_y.log")),
%% Get current traces
?assertMatch([#{type := ip_address, filter := "127.0.0.1",
name := <<"127.0.0.1">>,
level := debug, dst := "tmp/ip_trace_x.log"},
#{type := ip_address, filter := "192.168.1.1",
name := <<"192.168.1.1">>,
level := debug, dst := "tmp/ip_trace_y.log"}
],
emqx_trace_handler:running()),
%% Client with clientid = "client" publishes a "hi" message to "x/y/z".
emqtt:publish(T, <<"x/y/z">>, <<"hi1">>),
emqtt:publish(T, <<"x/y/z">>, <<"hi2">>),
emqtt:subscribe(T, <<"x/y/z">>),
emqtt:unsubscribe(T, <<"x/y/z">>),
ct:sleep(200),
{ok, Bin} = file:read_file("tmp/ip_trace_x.log"),
?assertNotEqual(nomatch, binary:match(Bin, [<<"hi1">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"hi2">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"PUBLISH">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"SUBSCRIBE">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"UNSUBSCRIBE">>])),
?assert(filelib:file_size("tmp/ip_trace_y.log") =:= 0),
%% Stop tracing
ok = emqx_trace_handler:uninstall(ip_address, <<"127.0.0.1">>),
ok = emqx_trace_handler:uninstall(ip_address, <<"192.168.1.1">>),
{error, _Reason} = emqx_trace_handler:uninstall(ip_address, <<"127.0.0.2">>),
emqtt:disconnect(T),
?assertEqual([], emqx_trace_handler:running()).

View File

@ -1,120 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2019-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_tracer_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
all() -> [t_trace_clientid, t_trace_topic].
init_per_suite(Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
emqx_common_test_helpers:stop_apps([]).
t_trace_clientid(_Config) ->
{ok, T} = emqtt:start_link([{host, "localhost"},
{clientid, <<"client">>},
{username, <<"testuser">>},
{password, <<"pass">>}
]),
emqtt:connect(T),
%% Start tracing
emqx_logger:set_log_level(error),
{error, _} = emqx_tracer:start_trace({clientid, <<"client">>}, debug, "tmp/client.log"),
emqx_logger:set_log_level(debug),
ok = emqx_tracer:start_trace({clientid, <<"client">>}, debug, "tmp/client.log"),
ok = emqx_tracer:start_trace({clientid, <<"client2">>}, all, "tmp/client2.log"),
ok = emqx_tracer:start_trace({clientid, <<"client3">>}, all, "tmp/client3.log"),
{error, {invalid_log_level, bad_level}} = emqx_tracer:start_trace({clientid, <<"client4">>}, bad_level, "tmp/client4.log"),
{error, {handler_not_added, {file_error,".",eisdir}}} = emqx_tracer:start_trace({clientid, <<"client5">>}, debug, "."),
ct:sleep(100),
%% Verify the tracing file exits
?assert(filelib:is_regular("tmp/client.log")),
?assert(filelib:is_regular("tmp/client2.log")),
%% Get current traces
?assertEqual([{{clientid,"client"},{debug,"tmp/client.log"}},
{{clientid,"client2"},{debug,"tmp/client2.log"}},
{{clientid,"client3"},{debug,"tmp/client3.log"}}
], emqx_tracer:lookup_traces()),
%% set the overall log level to debug
emqx_logger:set_log_level(debug),
%% Client with clientid = "client" publishes a "hi" message to "a/b/c".
emqtt:publish(T, <<"a/b/c">>, <<"hi">>),
ct:sleep(200),
%% Verify messages are logged to "tmp/client.log" but not "tmp/client2.log".
?assert(filelib:file_size("tmp/client.log") > 0),
?assert(filelib:file_size("tmp/client2.log") == 0),
%% Stop tracing
ok = emqx_tracer:stop_trace({clientid, <<"client">>}),
ok = emqx_tracer:stop_trace({clientid, <<"client2">>}),
ok = emqx_tracer:stop_trace({clientid, <<"client3">>}),
emqtt:disconnect(T),
emqx_logger:set_log_level(warning).
t_trace_topic(_Config) ->
{ok, T} = emqtt:start_link([{host, "localhost"},
{clientid, <<"client">>},
{username, <<"testuser">>},
{password, <<"pass">>}
]),
emqtt:connect(T),
%% Start tracing
emqx_logger:set_log_level(debug),
ok = emqx_tracer:start_trace({topic, <<"x/#">>}, all, "tmp/topic_trace.log"),
ok = emqx_tracer:start_trace({topic, <<"y/#">>}, all, "tmp/topic_trace.log"),
ct:sleep(100),
%% Verify the tracing file exits
?assert(filelib:is_regular("tmp/topic_trace.log")),
%% Get current traces
?assertEqual([{{topic,"x/#"},{debug,"tmp/topic_trace.log"}},
{{topic,"y/#"},{debug,"tmp/topic_trace.log"}}], emqx_tracer:lookup_traces()),
%% set the overall log level to debug
emqx_logger:set_log_level(debug),
%% Client with clientid = "client" publishes a "hi" message to "x/y/z".
emqtt:publish(T, <<"x/y/z">>, <<"hi">>),
ct:sleep(200),
?assert(filelib:file_size("tmp/topic_trace.log") > 0),
%% Stop tracing
ok = emqx_tracer:stop_trace({topic, <<"x/#">>}),
ok = emqx_tracer:stop_trace({topic, <<"y/#">>}),
{error, _Reason} = emqx_tracer:stop_trace({topic, <<"z/#">>}),
emqtt:disconnect(T),
emqx_logger:set_log_level(warning).

View File

@ -48,7 +48,10 @@ init_per_testcase(TestCase, Config) when
TestCase =/= t_ws_pingreq_before_connected, TestCase =/= t_ws_pingreq_before_connected,
TestCase =/= t_ws_non_check_origin TestCase =/= t_ws_non_check_origin
-> ->
emqx_channel_SUITE:set_test_listenser_confs(), %% Meck Cm
ok = meck:new(emqx_cm, [passthrough, no_history, no_link]),
ok = meck:expect(emqx_cm, mark_channel_connected, fun(_) -> ok end),
ok = meck:expect(emqx_cm, mark_channel_disconnected, fun(_) -> ok end),
%% Mock cowboy_req %% Mock cowboy_req
ok = meck:new(cowboy_req, [passthrough, no_history, no_link]), ok = meck:new(cowboy_req, [passthrough, no_history, no_link]),
ok = meck:expect(cowboy_req, header, fun(_, _, _) -> <<>> end), ok = meck:expect(cowboy_req, header, fun(_, _, _) -> <<>> end),
@ -90,7 +93,8 @@ end_per_testcase(TestCase, _Config) when
TestCase =/= t_ws_pingreq_before_connected TestCase =/= t_ws_pingreq_before_connected
-> ->
lists:foreach(fun meck:unload/1, lists:foreach(fun meck:unload/1,
[cowboy_req, [emqx_cm,
cowboy_req,
emqx_access_control, emqx_access_control,
emqx_broker, emqx_broker,
emqx_hooks, emqx_hooks,
@ -363,14 +367,12 @@ t_handle_info_close(_) ->
{[{close, _}], _St} = ?ws_conn:handle_info({close, protocol_error}, st()). {[{close, _}], _St} = ?ws_conn:handle_info({close, protocol_error}, st()).
t_handle_info_event(_) -> t_handle_info_event(_) ->
ok = meck:new(emqx_cm, [passthrough, no_history]),
ok = meck:expect(emqx_cm, register_channel, fun(_,_,_) -> ok end), ok = meck:expect(emqx_cm, register_channel, fun(_,_,_) -> ok end),
ok = meck:expect(emqx_cm, insert_channel_info, fun(_,_,_) -> ok end), ok = meck:expect(emqx_cm, insert_channel_info, fun(_,_,_) -> ok end),
ok = meck:expect(emqx_cm, connection_closed, fun(_) -> true end), ok = meck:expect(emqx_cm, connection_closed, fun(_) -> true end),
{ok, _} = ?ws_conn:handle_info({event, connected}, st()), {ok, _} = ?ws_conn:handle_info({event, connected}, st()),
{ok, _} = ?ws_conn:handle_info({event, disconnected}, st()), {ok, _} = ?ws_conn:handle_info({event, disconnected}, st()),
{ok, _} = ?ws_conn:handle_info({event, updated}, st()), {ok, _} = ?ws_conn:handle_info({event, updated}, st()).
ok = meck:unload(emqx_cm).
t_handle_timeout_idle_timeout(_) -> t_handle_timeout_idle_timeout(_) ->
TRef = make_ref(), TRef = make_ref(),

File diff suppressed because it is too large Load Diff

View File

@ -34,12 +34,11 @@
start(_StartType, _StartArgs) -> start(_StartType, _StartArgs) ->
ok = mria_rlog:wait_for_shards([?AUTH_SHARD], infinity), ok = mria_rlog:wait_for_shards([?AUTH_SHARD], infinity),
{ok, Sup} = emqx_authn_sup:start_link(), {ok, Sup} = emqx_authn_sup:start_link(),
ok = ?AUTHN:register_providers(emqx_authn:providers()),
ok = initialize(), ok = initialize(),
{ok, Sup}. {ok, Sup}.
stop(_State) -> stop(_State) ->
ok = ?AUTHN:deregister_providers(provider_types()), ok = deinitialize(),
ok. ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -47,12 +46,38 @@ stop(_State) ->
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
initialize() -> initialize() ->
RawConfigs = emqx:get_raw_config([authentication], []), ok = ?AUTHN:register_providers(emqx_authn:providers()),
Config = emqx_authn:check_configs(RawConfigs),
?AUTHN:initialize_authentication(?GLOBAL, Config), lists:foreach(
lists:foreach(fun({ListenerID, ListenerConfig}) -> fun({ChainName, RawAuthConfigs}) ->
?AUTHN:initialize_authentication(ListenerID, maps:get(authentication, ListenerConfig, [])) AuthConfig = emqx_authn:check_configs(RawAuthConfigs),
end, emqx_listeners:list()). ?AUTHN:initialize_authentication(
ChainName,
AuthConfig)
end,
chain_configs()).
deinitialize() ->
ok = ?AUTHN:deregister_providers(provider_types()),
ok = emqx_authn_utils:cleanup_resources().
chain_configs() ->
[global_chain_config() | listener_chain_configs()].
global_chain_config() ->
{?GLOBAL, emqx:get_raw_config([<<"authentication">>], [])}.
listener_chain_configs() ->
lists:map(
fun({ListenerID, _}) ->
{ListenerID, emqx:get_raw_config(auth_config_path(ListenerID), [])}
end,
emqx_listeners:list()).
auth_config_path(ListenerID) ->
[<<"listeners">>]
++ binary:split(atom_to_binary(ListenerID), <<":">>)
++ [<<"authentication">>].
provider_types() -> provider_types() ->
lists:map(fun({Type, _Module}) -> Type end, emqx_authn:providers()). lists:map(fun({Type, _Module}) -> Type end, emqx_authn:providers()).

View File

@ -21,12 +21,11 @@
-export([ common_fields/0 -export([ common_fields/0
, roots/0 , roots/0
, fields/1 , fields/1
, authenticator_type/0
]). ]).
%% only for doc generation %% only for doc generation
roots() -> [{authenticator_config, roots() -> [{authenticator_config, hoconsc:mk(authenticator_type())}].
#{type => hoconsc:union(config_refs([Module || {_AuthnType, Module} <- emqx_authn:providers()]))
}}].
fields(_) -> []. fields(_) -> [].
@ -38,5 +37,8 @@ enable(type) -> boolean();
enable(default) -> true; enable(default) -> true;
enable(_) -> undefined. enable(_) -> undefined.
authenticator_type() ->
hoconsc:union(config_refs([Module || {_AuthnType, Module} <- emqx_authn:providers()])).
config_refs(Modules) -> config_refs(Modules) ->
lists:append([Module:refs() || Module <- Modules]). lists:append([Module:refs() || Module <- Modules]).

View File

@ -16,6 +16,8 @@
-module(emqx_authn_utils). -module(emqx_authn_utils).
-include_lib("emqx/include/emqx_placeholder.hrl").
-export([ replace_placeholders/2 -export([ replace_placeholders/2
, replace_placeholder/2 , replace_placeholder/2
, check_password/3 , check_password/3
@ -23,8 +25,13 @@
, hash/4 , hash/4
, gen_salt/0 , gen_salt/0
, bin/1 , bin/1
, ensure_apps_started/1
, cleanup_resources/0
, make_resource_id/1
]). ]).
-define(RESOURCE_GROUP, <<"emqx_authn">>).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -42,17 +49,17 @@ replace_placeholders([Placeholder | More], Credential, Acc) ->
replace_placeholders(More, Credential, [convert_to_sql_param(V) | Acc]) replace_placeholders(More, Credential, [convert_to_sql_param(V) | Acc])
end. end.
replace_placeholder(<<"${mqtt-username}">>, Credential) -> replace_placeholder(?PH_USERNAME, Credential) ->
maps:get(username, Credential, undefined); maps:get(username, Credential, undefined);
replace_placeholder(<<"${mqtt-clientid}">>, Credential) -> replace_placeholder(?PH_CLIENTID, Credential) ->
maps:get(clientid, Credential, undefined); maps:get(clientid, Credential, undefined);
replace_placeholder(<<"${mqtt-password}">>, Credential) -> replace_placeholder(?PH_PASSWORD, Credential) ->
maps:get(password, Credential, undefined); maps:get(password, Credential, undefined);
replace_placeholder(<<"${ip-address}">>, Credential) -> replace_placeholder(?PH_PEERHOST, Credential) ->
maps:get(peerhost, Credential, undefined); maps:get(peerhost, Credential, undefined);
replace_placeholder(<<"${cert-subject}">>, Credential) -> replace_placeholder(?PH_CERT_SUBJECT, Credential) ->
maps:get(dn, Credential, undefined); maps:get(dn, Credential, undefined);
replace_placeholder(<<"${cert-common-name}">>, Credential) -> replace_placeholder(?PH_CERT_CN_NAME, Credential) ->
maps:get(cn, Credential, undefined); maps:get(cn, Credential, undefined);
replace_placeholder(Constant, _) -> replace_placeholder(Constant, _) ->
Constant. Constant.
@ -62,22 +69,42 @@ check_password(undefined, _Selected, _State) ->
check_password(Password, check_password(Password,
#{<<"password_hash">> := Hash}, #{<<"password_hash">> := Hash},
#{password_hash_algorithm := bcrypt}) -> #{password_hash_algorithm := bcrypt}) ->
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of case emqx_passwd:hash(bcrypt, {Hash, Password}) of
true -> ok; Hash -> ok;
false -> {error, bad_username_or_password} _ ->
{error, bad_username_or_password}
end; end;
check_password(Password, check_password(Password,
#{<<"password_hash">> := Hash} = Selected, #{<<"password_hash">> := Hash} = Selected,
#{password_hash_algorithm := Algorithm, #{password_hash_algorithm := Algorithm,
salt_position := SaltPosition}) -> salt_position := SaltPosition}) ->
Salt = maps:get(<<"salt">>, Selected, <<>>), Salt = maps:get(<<"salt">>, Selected, <<>>),
case Hash =:= hash(Algorithm, Password, Salt, SaltPosition) of case hash(Algorithm, Password, Salt, SaltPosition) of
true -> ok; Hash -> ok;
false -> {error, bad_username_or_password} _ ->
{error, bad_username_or_password}
end. end.
is_superuser(Selected) -> is_superuser(#{<<"is_superuser">> := <<"">>}) ->
#{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}. #{is_superuser => false};
is_superuser(#{<<"is_superuser">> := <<"0">>}) ->
#{is_superuser => false};
is_superuser(#{<<"is_superuser">> := 0}) ->
#{is_superuser => false};
is_superuser(#{<<"is_superuser">> := null}) ->
#{is_superuser => false};
is_superuser(#{<<"is_superuser">> := false}) ->
#{is_superuser => false};
is_superuser(#{<<"is_superuser">> := _}) ->
#{is_superuser => true};
is_superuser(#{}) ->
#{is_superuser => false}.
ensure_apps_started(bcrypt) ->
{ok, _} = application:ensure_all_started(bcrypt),
ok;
ensure_apps_started(_) ->
ok.
hash(Algorithm, Password, Salt, prefix) -> hash(Algorithm, Password, Salt, prefix) ->
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>); emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>);
@ -92,6 +119,15 @@ bin(A) when is_atom(A) -> atom_to_binary(A, utf8);
bin(L) when is_list(L) -> list_to_binary(L); bin(L) when is_list(L) -> list_to_binary(L);
bin(X) -> X. bin(X) -> X.
cleanup_resources() ->
lists:foreach(
fun emqx_resource:remove_local/1,
emqx_resource:list_group_instances(?RESOURCE_GROUP)).
make_resource_id(Name) ->
NameBin = bin(Name),
emqx_resource:generate_id(?RESOURCE_GROUP, NameBin).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Internal functions %% Internal functions
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------

View File

@ -17,6 +17,7 @@
-module(emqx_enhanced_authn_scram_mnesia). -module(emqx_enhanced_authn_scram_mnesia).
-include("emqx_authn.hrl"). -include("emqx_authn.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -behaviour(hocon_schema).
@ -28,7 +29,7 @@
]). ]).
-export([ refs/0 -export([ refs/0
, create/1 , create/2
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -46,6 +47,8 @@
-define(TAB, ?MODULE). -define(TAB, ?MODULE).
-define(FORMAT_FUN, {?MODULE, format_user_info}). -define(FORMAT_FUN, {?MODULE, format_user_info}).
-type(user_group() :: binary()).
-export([mnesia/1]). -export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}). -boot_mnesia({mnesia, [boot]}).
@ -58,6 +61,8 @@
, is_superuser , is_superuser
}). }).
-reflect_type([user_group/0]).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Mnesia bootstrap %% Mnesia bootstrap
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -102,17 +107,17 @@ iteration_count(_) -> undefined.
refs() -> refs() ->
[hoconsc:ref(?MODULE, config)]. [hoconsc:ref(?MODULE, config)].
create(#{ algorithm := Algorithm create(AuthenticatorID,
, iteration_count := IterationCount #{algorithm := Algorithm,
, '_unique' := Unique iteration_count := IterationCount}) ->
}) -> State = #{user_group => AuthenticatorID,
State = #{user_group => Unique,
algorithm => Algorithm, algorithm => Algorithm,
iteration_count => IterationCount}, iteration_count => IterationCount},
{ok, State}. {ok, State}.
update(Config, #{user_group := Unique}) ->
create(Config#{'_unique' => Unique}). update(Config, #{user_group := ID}) ->
create(ID, Config).
authenticate(#{auth_method := AuthMethod, authenticate(#{auth_method := AuthMethod,
auth_data := AuthData, auth_data := AuthData,
@ -132,9 +137,12 @@ authenticate(_Credential, _State) ->
ignore. ignore.
destroy(#{user_group := UserGroup}) -> destroy(#{user_group := UserGroup}) ->
MatchSpec = ets:fun2ms(
fun(#user_info{user_id = {Group, _}} = User) when Group =:= UserGroup ->
User
end),
trans( trans(
fun() -> fun() ->
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_', '_'}, [], ['$_']}],
ok = lists:foreach(fun(UserInfo) -> ok = lists:foreach(fun(UserInfo) ->
mnesia:delete_object(?TAB, UserInfo, write) mnesia:delete_object(?TAB, UserInfo, write)
end, mnesia:select(?TAB, MatchSpec, write)) end, mnesia:select(?TAB, MatchSpec, write))

View File

@ -30,7 +30,7 @@
]). ]).
-export([ refs/0 -export([ refs/0
, create/1 , create/2
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -113,24 +113,25 @@ refs() ->
, hoconsc:ref(?MODULE, post) , hoconsc:ref(?MODULE, post)
]. ].
create(#{ method := Method create(_AuthenticatorID, Config) ->
, url := URL create(Config).
, headers := Headers
, body := Body create(#{method := Method,
, request_timeout := RequestTimeout url := URL,
, '_unique' := Unique headers := Headers,
} = Config) -> body := Body,
request_timeout := RequestTimeout} = Config) ->
#{path := Path, #{path := Path,
query := Query} = URIMap = parse_url(URL), query := Query} = URIMap = parse_url(URL),
State = #{ method => Method ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
, path => Path State = #{method => Method,
, base_query => cow_qs:parse_qs(list_to_binary(Query)) path => Path,
, headers => maps:to_list(Headers) base_query => cow_qs:parse_qs(list_to_binary(Query)),
, body => maps:to_list(Body) headers => maps:to_list(Headers),
, request_timeout => RequestTimeout body => maps:to_list(Body),
, '_unique' => Unique request_timeout => RequestTimeout,
}, resource_id => ResourceId},
case emqx_resource:create_local(Unique, case emqx_resource:create_local(ResourceId,
emqx_connector_http, emqx_connector_http,
Config#{base_url => maps:remove(query, URIMap), Config#{base_url => maps:remove(query, URIMap),
pool_type => random}) of pool_type => random}) of
@ -153,11 +154,11 @@ update(Config, State) ->
authenticate(#{auth_method := _}, _) -> authenticate(#{auth_method := _}, _) ->
ignore; ignore;
authenticate(Credential, #{'_unique' := Unique, authenticate(Credential, #{resource_id := ResourceId,
method := Method, method := Method,
request_timeout := RequestTimeout} = State) -> request_timeout := RequestTimeout} = State) ->
Request = generate_request(Credential, State), Request = generate_request(Credential, State),
case emqx_resource:query(Unique, {Method, Request, RequestTimeout}) of case emqx_resource:query(ResourceId, {Method, Request, RequestTimeout}) of
{ok, 204, _Headers} -> {ok, #{is_superuser => false}}; {ok, 204, _Headers} -> {ok, #{is_superuser => false}};
{ok, 200, Headers, Body} -> {ok, 200, Headers, Body} ->
ContentType = proplists:get_value(<<"content-type">>, Headers, <<"application/json">>), ContentType = proplists:get_value(<<"content-type">>, Headers, <<"application/json">>),
@ -165,19 +166,35 @@ authenticate(Credential, #{'_unique' := Unique,
{ok, NBody} -> {ok, NBody} ->
%% TODO: Return by user property %% TODO: Return by user property
{ok, #{is_superuser => maps:get(<<"is_superuser">>, NBody, false), {ok, #{is_superuser => maps:get(<<"is_superuser">>, NBody, false),
user_property => NBody}}; user_property => maps:remove(<<"is_superuser">>, NBody)}};
{error, _Reason} -> {error, _Reason} ->
{ok, #{is_superuser => false}} {ok, #{is_superuser => false}}
end; end;
{error, Reason} -> {error, Reason} ->
?SLOG(error, #{msg => "http_server_query_failed", ?SLOG(error, #{msg => "http_server_query_failed",
resource => Unique, resource => ResourceId,
reason => Reason}), reason => Reason}),
ignore ignore;
Other ->
Output = may_append_body(#{resource => ResourceId}, Other),
case erlang:element(2, Other) of
Code5xx when Code5xx >= 500 andalso Code5xx < 600 ->
?SLOG(error, Output#{msg => "http_server_error",
code => Code5xx}),
ignore;
Code4xx when Code4xx >= 400 andalso Code4xx < 500 ->
?SLOG(warning, Output#{msg => "refused_by_http_server",
code => Code4xx}),
{error, not_authorized};
OtherCode ->
?SLOG(error, Output#{msg => "undesired_response_code",
code => OtherCode}),
ignore
end
end. end.
destroy(#{'_unique' := Unique}) -> destroy(#{resource_id := ResourceId}) ->
_ = emqx_resource:remove_local(Unique), _ = emqx_resource:remove_local(ResourceId),
ok. ok.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
@ -305,6 +322,11 @@ parse_body(<<"application/x-www-form-urlencoded">>, Body) ->
parse_body(ContentType, _) -> parse_body(ContentType, _) ->
{error, {unsupported_content_type, ContentType}}. {error, {unsupported_content_type, ContentType}}.
may_append_body(Output, {ok, _, _, Body}) ->
Output#{body => Body};
may_append_body(Output, {ok, _, _}) ->
Output.
to_list(A) when is_atom(A) -> to_list(A) when is_atom(A) ->
atom_to_list(A); atom_to_list(A);
to_list(B) when is_binary(B) -> to_list(B) when is_binary(B) ->

View File

@ -82,10 +82,10 @@ handle_info({refresh_jwks, _TRef, refresh}, #{request_id := RequestID} = State)
_ -> _ ->
ok = httpc:cancel_request(RequestID), ok = httpc:cancel_request(RequestID),
receive receive
{http, _} -> ok {http, _} -> ok
after 0 -> after 0 ->
ok ok
end end
end, end,
{noreply, refresh_jwks(State)}; {noreply, refresh_jwks(State)};

View File

@ -27,7 +27,7 @@
]). ]).
-export([ refs/0 -export([ refs/0
, create/1 , create/2
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -139,18 +139,23 @@ refs() ->
, hoconsc:ref(?MODULE, 'jwks') , hoconsc:ref(?MODULE, 'jwks')
]. ].
create(_AuthenticatorID, Config) ->
create(Config).
create(#{verify_claims := VerifyClaims} = Config) -> create(#{verify_claims := VerifyClaims} = Config) ->
create2(Config#{verify_claims => handle_verify_claims(VerifyClaims)}). create2(Config#{verify_claims => handle_verify_claims(VerifyClaims)}).
update(#{use_jwks := false} = Config, #{jwk := Connector}) update(#{use_jwks := false} = Config,
#{jwk := Connector})
when is_pid(Connector) -> when is_pid(Connector) ->
_ = emqx_authn_jwks_connector:stop(Connector), _ = emqx_authn_jwks_connector:stop(Connector),
create(Config); create(Config);
update(#{use_jwks := false} = Config, _) -> update(#{use_jwks := false} = Config, _State) ->
create(Config); create(Config);
update(#{use_jwks := true} = Config, #{jwk := Connector} = State) update(#{use_jwks := true} = Config,
#{jwk := Connector} = State)
when is_pid(Connector) -> when is_pid(Connector) ->
ok = emqx_authn_jwks_connector:update(Connector, Config), ok = emqx_authn_jwks_connector:update(Connector, Config),
case maps:get(verify_cliams, Config, undefined) of case maps:get(verify_cliams, Config, undefined) of
@ -160,7 +165,7 @@ update(#{use_jwks := true} = Config, #{jwk := Connector} = State)
{ok, State#{verify_claims => handle_verify_claims(VerifyClaims)}} {ok, State#{verify_claims => handle_verify_claims(VerifyClaims)}}
end; end;
update(#{use_jwks := true} = Config, _) -> update(#{use_jwks := true} = Config, _State) ->
create(Config). create(Config).
authenticate(#{auth_method := _}, _) -> authenticate(#{auth_method := _}, _) ->
@ -340,7 +345,7 @@ handle_placeholder(Placeholder0) ->
Placeholder0 Placeholder0
end. end.
validate_placeholder(<<"mqtt-clientid">>) -> validate_placeholder(<<"clientid">>) ->
clientid; clientid;
validate_placeholder(<<"mqtt-username">>) -> validate_placeholder(<<"username">>) ->
username. username.

View File

@ -17,6 +17,7 @@
-module(emqx_authn_mnesia). -module(emqx_authn_mnesia).
-include("emqx_authn.hrl"). -include("emqx_authn.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema). -behaviour(hocon_schema).
@ -28,7 +29,7 @@
]). ]).
-export([ refs/0 -export([ refs/0
, create/1 , create/2
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -45,8 +46,7 @@
-export([format_user_info/1]). -export([format_user_info/1]).
-type user_id_type() :: clientid | username. -type user_id_type() :: clientid | username.
-type user_group() :: binary().
-type user_group() :: {binary(), binary()}.
-type user_id() :: binary(). -type user_id() :: binary().
-record(user_info, -record(user_info,
@ -56,7 +56,7 @@
, is_superuser :: boolean() , is_superuser :: boolean()
}). }).
-reflect_type([ user_id_type/0 ]). -reflect_type([user_id_type/0]).
-export([mnesia/1]). -export([mnesia/1]).
@ -123,29 +123,28 @@ salt_rounds(_) -> undefined.
refs() -> refs() ->
[hoconsc:ref(?MODULE, config)]. [hoconsc:ref(?MODULE, config)].
create(#{ user_id_type := Type create(AuthenticatorID,
, password_hash_algorithm := #{name := bcrypt, #{user_id_type := Type,
salt_rounds := SaltRounds} password_hash_algorithm := #{name := bcrypt,
, '_unique' := Unique salt_rounds := SaltRounds}}) ->
}) -> ok = emqx_authn_utils:ensure_apps_started(bcrypt),
{ok, _} = application:ensure_all_started(bcrypt), State = #{user_group => AuthenticatorID,
State = #{user_group => Unique,
user_id_type => Type, user_id_type => Type,
password_hash_algorithm => bcrypt, password_hash_algorithm => bcrypt,
salt_rounds => SaltRounds}, salt_rounds => SaltRounds},
{ok, State}; {ok, State};
create(#{ user_id_type := Type create(AuthenticatorID,
, password_hash_algorithm := #{name := Name} #{user_id_type := Type,
, '_unique' := Unique password_hash_algorithm := #{name := Name}}) ->
}) -> ok = emqx_authn_utils:ensure_apps_started(Name),
State = #{user_group => Unique, State = #{user_group => AuthenticatorID,
user_id_type => Type, user_id_type => Type,
password_hash_algorithm => Name}, password_hash_algorithm => Name},
{ok, State}. {ok, State}.
update(Config, #{user_group := Unique}) -> update(Config, #{user_group := ID}) ->
create(Config#{'_unique' => Unique}). create(ID, Config).
authenticate(#{auth_method := _}, _) -> authenticate(#{auth_method := _}, _) ->
ignore; ignore;
@ -170,10 +169,14 @@ authenticate(#{password := Password} = Credential,
destroy(#{user_group := UserGroup}) -> destroy(#{user_group := UserGroup}) ->
trans( trans(
fun() -> fun() ->
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_'}, [], ['$_']}], ok = lists:foreach(
ok = lists:foreach(fun delete_user2/1, mnesia:select(?TAB, MatchSpec, write)) fun(User) ->
end). mnesia:delete_object(?TAB, User, write)
end,
mnesia:select(?TAB, group_match_spec(UserGroup), write))
end).
import_users(Filename0, State) -> import_users(Filename0, State) ->
Filename = to_binary(Filename0), Filename = to_binary(Filename0),
@ -246,8 +249,7 @@ lookup_user(UserID, #{user_group := UserGroup}) ->
end. end.
list_users(PageParams, #{user_group := UserGroup}) -> list_users(PageParams, #{user_group := UserGroup}) ->
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_'}, [], ['$_']}], {ok, emqx_mgmt_api:paginate(?TAB, group_match_spec(UserGroup), PageParams, ?FORMAT_FUN)}.
{ok, emqx_mgmt_api:paginate(?TAB, MatchSpec, PageParams, ?FORMAT_FUN)}.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Internal functions %% Internal functions
@ -374,9 +376,6 @@ insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) ->
is_superuser = IsSuperuser}, is_superuser = IsSuperuser},
mnesia:write(?TAB, UserInfo, write). mnesia:write(?TAB, UserInfo, write).
delete_user2(UserInfo) ->
mnesia:delete_object(?TAB, UserInfo, write).
%% TODO: Support other type %% TODO: Support other type
get_user_identity(#{username := Username}, username) -> get_user_identity(#{username := Username}, username) ->
Username; Username;
@ -401,3 +400,9 @@ to_binary(L) when is_list(L) ->
format_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) -> format_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) ->
#{user_id => UserID, is_superuser => IsSuperuser}. #{user_id => UserID, is_superuser => IsSuperuser}.
group_match_spec(UserGroup) ->
ets:fun2ms(
fun(#user_info{user_id = {Group, _}} = User) when Group =:= UserGroup ->
User
end).

View File

@ -29,7 +29,7 @@
]). ]).
-export([ refs/0 -export([ refs/0
, create/1 , create/2
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -102,19 +102,24 @@ refs() ->
, hoconsc:ref(?MODULE, 'sharded-cluster') , hoconsc:ref(?MODULE, 'sharded-cluster')
]. ].
create(#{ selector := Selector create(_AuthenticatorID, Config) ->
, '_unique' := Unique create(Config).
} = Config) ->
create(#{selector := Selector} = Config) ->
NSelector = parse_selector(Selector), NSelector = parse_selector(Selector),
State = maps:with([ collection State = maps:with(
, password_hash_field [collection,
, salt_field password_hash_field,
, is_superuser_field salt_field,
, password_hash_algorithm is_superuser_field,
, salt_position password_hash_algorithm,
, '_unique'], Config), salt_position],
NState = State#{selector => NSelector}, Config),
case emqx_resource:create_local(Unique, emqx_connector_mongo, Config) of ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
NState = State#{
selector => NSelector,
resource_id => ResourceId},
case emqx_resource:create_local(ResourceId, emqx_connector_mongo, Config) of
{ok, already_created} -> {ok, already_created} ->
{ok, NState}; {ok, NState};
{ok, _} -> {ok, _} ->
@ -135,17 +140,16 @@ update(Config, State) ->
authenticate(#{auth_method := _}, _) -> authenticate(#{auth_method := _}, _) ->
ignore; ignore;
authenticate(#{password := Password} = Credential, authenticate(#{password := Password} = Credential,
#{ collection := Collection #{collection := Collection,
, selector := Selector0 selector := Selector0,
, '_unique' := Unique resource_id := ResourceId} = State) ->
} = State) ->
Selector1 = replace_placeholders(Selector0, Credential), Selector1 = replace_placeholders(Selector0, Credential),
Selector2 = normalize_selector(Selector1), Selector2 = normalize_selector(Selector1),
case emqx_resource:query(Unique, {find_one, Collection, Selector2, #{}}) of case emqx_resource:query(ResourceId, {find_one, Collection, Selector2, #{}}) of
undefined -> ignore; undefined -> ignore;
{error, Reason} -> {error, Reason} ->
?SLOG(error, #{msg => "mongodb_query_failed", ?SLOG(error, #{msg => "mongodb_query_failed",
resource => Unique, resource => ResourceId,
reason => Reason}), reason => Reason}),
ignore; ignore;
Doc -> Doc ->
@ -154,7 +158,7 @@ authenticate(#{password := Password} = Credential,
{ok, #{is_superuser => is_superuser(Doc, State)}}; {ok, #{is_superuser => is_superuser(Doc, State)}};
{error, {cannot_find_password_hash_field, PasswordHashField}} -> {error, {cannot_find_password_hash_field, PasswordHashField}} ->
?SLOG(error, #{msg => "cannot_find_password_hash_field", ?SLOG(error, #{msg => "cannot_find_password_hash_field",
resource => Unique, resource => ResourceId,
password_hash_field => PasswordHashField}), password_hash_field => PasswordHashField}),
ignore; ignore;
{error, Reason} -> {error, Reason} ->
@ -162,8 +166,8 @@ authenticate(#{password := Password} = Credential,
end end
end. end.
destroy(#{'_unique' := Unique}) -> destroy(#{resource_id := ResourceId}) ->
_ = emqx_resource:remove_local(Unique), _ = emqx_resource:remove_local(ResourceId),
ok. ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -205,7 +209,7 @@ check_password(Password,
undefined -> undefined ->
{error, {cannot_find_password_hash_field, PasswordHashField}}; {error, {cannot_find_password_hash_field, PasswordHashField}};
Hash -> Hash ->
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of case {ok, to_list(Hash)} =:= bcrypt:hashpw(Password, Hash) of
true -> ok; true -> ok;
false -> {error, bad_username_or_password} false -> {error, bad_username_or_password}
end end
@ -238,3 +242,7 @@ hash(Algorithm, Password, Salt, prefix) ->
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>); emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>);
hash(Algorithm, Password, Salt, suffix) -> hash(Algorithm, Password, Salt, suffix) ->
emqx_passwd:hash(Algorithm, <<Password/binary, Salt/binary>>). emqx_passwd:hash(Algorithm, <<Password/binary, Salt/binary>>).
to_list(L) when is_list(L) -> L;
to_list(L) when is_binary(L) -> binary_to_list(L);
to_list(X) -> X.

View File

@ -29,7 +29,7 @@
]). ]).
-export([ refs/0 -export([ refs/0
, create/1 , create/2
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
@ -76,20 +76,23 @@ query_timeout(_) -> undefined.
refs() -> refs() ->
[hoconsc:ref(?MODULE, config)]. [hoconsc:ref(?MODULE, config)].
create(#{ password_hash_algorithm := Algorithm create(_AuthenticatorID, Config) ->
, salt_position := SaltPosition create(Config).
, query := Query0
, query_timeout := QueryTimeout create(#{password_hash_algorithm := Algorithm,
, '_unique' := Unique salt_position := SaltPosition,
query := Query0,
query_timeout := QueryTimeout
} = Config) -> } = Config) ->
{Query, PlaceHolders} = parse_query(Query0), {Query, PlaceHolders} = parse_query(Query0),
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
State = #{password_hash_algorithm => Algorithm, State = #{password_hash_algorithm => Algorithm,
salt_position => SaltPosition, salt_position => SaltPosition,
query => Query, query => Query,
placeholders => PlaceHolders, placeholders => PlaceHolders,
query_timeout => QueryTimeout, query_timeout => QueryTimeout,
'_unique' => Unique}, resource_id => ResourceId},
case emqx_resource:create_local(Unique, emqx_connector_mysql, Config) of case emqx_resource:create_local(ResourceId, emqx_connector_mysql, Config) of
{ok, already_created} -> {ok, already_created} ->
{ok, State}; {ok, State};
{ok, _} -> {ok, _} ->
@ -113,12 +116,12 @@ authenticate(#{password := Password} = Credential,
#{placeholders := PlaceHolders, #{placeholders := PlaceHolders,
query := Query, query := Query,
query_timeout := Timeout, query_timeout := Timeout,
'_unique' := Unique} = State) -> resource_id := ResourceId} = State) ->
Params = emqx_authn_utils:replace_placeholders(PlaceHolders, Credential), Params = emqx_authn_utils:replace_placeholders(PlaceHolders, Credential),
case emqx_resource:query(Unique, {sql, Query, Params, Timeout}) of case emqx_resource:query(ResourceId, {sql, Query, Params, Timeout}) of
{ok, _Columns, []} -> ignore; {ok, _Columns, []} -> ignore;
{ok, Columns, Rows} -> {ok, Columns, [Row | _]} ->
Selected = maps:from_list(lists:zip(Columns, Rows)), Selected = maps:from_list(lists:zip(Columns, Row)),
case emqx_authn_utils:check_password(Password, Selected, State) of case emqx_authn_utils:check_password(Password, Selected, State) of
ok -> ok ->
{ok, emqx_authn_utils:is_superuser(Selected)}; {ok, emqx_authn_utils:is_superuser(Selected)};
@ -127,13 +130,13 @@ authenticate(#{password := Password} = Credential,
end; end;
{error, Reason} -> {error, Reason} ->
?SLOG(error, #{msg => "mysql_query_failed", ?SLOG(error, #{msg => "mysql_query_failed",
resource => Unique, resource => ResourceId,
reason => Reason}), reason => Reason}),
ignore ignore
end. end.
destroy(#{'_unique' := Unique}) -> destroy(#{resource_id := ResourceId}) ->
_ = emqx_resource:remove_local(Unique), _ = emqx_resource:remove_local(ResourceId),
ok. ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------

View File

@ -30,12 +30,17 @@
]). ]).
-export([ refs/0 -export([ refs/0
, create/1 , create/2
, update/2 , update/2
, authenticate/2 , authenticate/2
, destroy/1 , destroy/1
]). ]).
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
-endif.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -48,7 +53,7 @@ fields(config) ->
[ {mechanism, {enum, ['password-based']}} [ {mechanism, {enum, ['password-based']}}
, {backend, {enum, [postgresql]}} , {backend, {enum, [postgresql]}}
, {password_hash_algorithm, fun password_hash_algorithm/1} , {password_hash_algorithm, fun password_hash_algorithm/1}
, {salt_position, {enum, [prefix, suffix]}} , {salt_position, fun salt_position/1}
, {query, fun query/1} , {query, fun query/1}
] ++ emqx_authn_schema:common_fields() ] ++ emqx_authn_schema:common_fields()
++ emqx_connector_schema_lib:relational_db_fields() ++ emqx_connector_schema_lib:relational_db_fields()
@ -58,6 +63,10 @@ password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt
password_hash_algorithm(default) -> sha256; password_hash_algorithm(default) -> sha256;
password_hash_algorithm(_) -> undefined. password_hash_algorithm(_) -> undefined.
salt_position(type) -> {enum, [prefix, suffix]};
salt_position(default) -> prefix;
salt_position(_) -> undefined.
query(type) -> string(); query(type) -> string();
query(_) -> undefined. query(_) -> undefined.
@ -68,18 +77,20 @@ query(_) -> undefined.
refs() -> refs() ->
[hoconsc:ref(?MODULE, config)]. [hoconsc:ref(?MODULE, config)].
create(#{ query := Query0 create(_AuthenticatorID, Config) ->
, password_hash_algorithm := Algorithm create(Config).
, salt_position := SaltPosition
, '_unique' := Unique create(#{query := Query0,
} = Config) -> password_hash_algorithm := Algorithm,
salt_position := SaltPosition} = Config) ->
{Query, PlaceHolders} = parse_query(Query0), {Query, PlaceHolders} = parse_query(Query0),
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
State = #{query => Query, State = #{query => Query,
placeholders => PlaceHolders, placeholders => PlaceHolders,
password_hash_algorithm => Algorithm, password_hash_algorithm => Algorithm,
salt_position => SaltPosition, salt_position => SaltPosition,
'_unique' => Unique}, resource_id => ResourceId},
case emqx_resource:create_local(Unique, emqx_connector_pgsql, Config) of case emqx_resource:create_local(ResourceId, emqx_connector_pgsql, Config) of
{ok, already_created} -> {ok, already_created} ->
{ok, State}; {ok, State};
{ok, _} -> {ok, _} ->
@ -102,14 +113,13 @@ authenticate(#{auth_method := _}, _) ->
authenticate(#{password := Password} = Credential, authenticate(#{password := Password} = Credential,
#{query := Query, #{query := Query,
placeholders := PlaceHolders, placeholders := PlaceHolders,
'_unique' := Unique} = State) -> resource_id := ResourceId} = State) ->
Params = emqx_authn_utils:replace_placeholders(PlaceHolders, Credential), Params = emqx_authn_utils:replace_placeholders(PlaceHolders, Credential),
case emqx_resource:query(Unique, {sql, Query, Params}) of case emqx_resource:query(ResourceId, {sql, Query, Params}) of
{ok, _Columns, []} -> ignore; {ok, _Columns, []} -> ignore;
{ok, Columns, Rows} -> {ok, Columns, [Row | _]} ->
NColumns = [Name || #column{name = Name} <- Columns], NColumns = [Name || #column{name = Name} <- Columns],
NRows = [erlang:element(1, Row) || Row <- Rows], Selected = maps:from_list(lists:zip(NColumns, erlang:tuple_to_list(Row))),
Selected = maps:from_list(lists:zip(NColumns, NRows)),
case emqx_authn_utils:check_password(Password, Selected, State) of case emqx_authn_utils:check_password(Password, Selected, State) of
ok -> ok ->
{ok, emqx_authn_utils:is_superuser(Selected)}; {ok, emqx_authn_utils:is_superuser(Selected)};
@ -118,13 +128,13 @@ authenticate(#{password := Password} = Credential,
end; end;
{error, Reason} -> {error, Reason} ->
?SLOG(error, #{msg => "postgresql_query_failed", ?SLOG(error, #{msg => "postgresql_query_failed",
resource => Unique, resource => ResourceId,
reason => Reason}), reason => Reason}),
ignore ignore
end. end.
destroy(#{'_unique' := Unique}) -> destroy(#{resource_id := ResourceId}) ->
_ = emqx_resource:remove_local(Unique), _ = emqx_resource:remove_local(ResourceId),
ok. ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -138,7 +148,7 @@ parse_query(Query) ->
PlaceHolders = [PlaceHolder || [PlaceHolder] <- Captured], PlaceHolders = [PlaceHolder || [PlaceHolder] <- Captured],
Replacements = ["$" ++ integer_to_list(I) || I <- lists:seq(1, length(Captured))], Replacements = ["$" ++ integer_to_list(I) || I <- lists:seq(1, length(Captured))],
NQuery = lists:foldl(fun({PlaceHolder, Replacement}, Query0) -> NQuery = lists:foldl(fun({PlaceHolder, Replacement}, Query0) ->
re:replace(Query0, PlaceHolder, Replacement, [{return, binary}]) re:replace(Query0, "\\" ++ PlaceHolder, Replacement, [{return, binary}])
end, Query, lists:zip(PlaceHolders, Replacements)), end, Query, lists:zip(PlaceHolders, Replacements)),
{NQuery, PlaceHolders}; {NQuery, PlaceHolders};
nomatch -> nomatch ->

Some files were not shown because too many files have changed in this diff Show More