Merge remote-tracking branch 'origin/master' into build-with-mix

This commit is contained in:
Thales Macedo Garitezi 2021-11-23 16:19:11 -03:00
commit f6d0159371
No known key found for this signature in database
GPG Key ID: DD279F8152A9B6DD
307 changed files with 16532 additions and 8788 deletions

View File

@ -53,7 +53,7 @@ emqx_test(){
exit 1
fi
IDLE_TIME=0
while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do
while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ]
then
echo "emqx running error"
@ -155,7 +155,7 @@ EOF
exit 1
fi
IDLE_TIME=0
while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do
while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ]
then
echo "emqx running error"
@ -184,7 +184,7 @@ EOF
exit 1
fi
IDLE_TIME=0
while ! curl http://localhost:18083/api/v5/status >/dev/null 2>&1; do
while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do
if [ $IDLE_TIME -gt 10 ]
then
echo "emqx service error"

View File

@ -0,0 +1,49 @@
.PHONY: help up down ct ct-all bash run
define usage
make -f .ci/docker-compose-file/Makefile.local up
make -f .ci/docker-compose-file/Makefile.local ct CONTAINER=erlang24 SUITE=apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl
make -f .ci/docker-compose-file/Makefile.local down
endef
export usage
help:
@echo "$$usage"
up:
env \
MYSQL_TAG=8 \
REDIS_TAG=6 \
MONGO_TAG=4 \
PGSQL_TAG=13 \
LDAP_TAG=2.4.50 \
docker-compose \
-f .ci/docker-compose-file/docker-compose.yaml \
-f .ci/docker-compose-file/docker-compose-ldap-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-mongo-single-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \
up -d --build
down:
docker-compose \
-f .ci/docker-compose-file/docker-compose.yaml \
-f .ci/docker-compose-file/docker-compose-ldap-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-mongo-single-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \
down
ct:
docker exec -i "$(CONTAINER)" bash -c "rebar3 ct --name 'test@127.0.0.1' -v --suite $(SUITE)"
ct-all:
docker exec -i "$(CONTAINER)" bash -c "make ct"
bash:
docker exec -it "$(CONTAINER)" bash
run:
docker exec -it "$(CONTAINER)" bash -c "make run";

View File

@ -0,0 +1,36 @@
x-default-emqx: &default-emqx
env_file:
- conf.cluster.env
healthcheck:
test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"]
interval: 5s
timeout: 25s
retries: 5
services:
emqx1:
<<: *default-emqx
container_name: node1.emqx.io
restart: on-failure
environment:
- "EMQX_HOST=node1.emqx.io"
- "EMQX_DB__BACKEND=rlog"
- "EMQX_DB__ROLE=core"
- "EMQX_CLUSTER__STATIC__SEEDS=[emqx@node1.emqx.io]"
- "EMQX_LISTENERS__TCP__DEFAULT__PROXY_PROTOCOL=false"
- "EMQX_LISTENERS__WS__DEFAULT__PROXY_PROTOCOL=false"
emqx2:
<<: *default-emqx
container_name: node2.emqx.io
depends_on:
- emqx1
restart: on-failure
environment:
- "EMQX_HOST=node2.emqx.io"
- "EMQX_DB__BACKEND=rlog"
- "EMQX_DB__ROLE=replicant"
- "EMQX_DB__CORE_NODES=emqx@node1.emqx.io"
- "EMQX_CLUSTER__STATIC__SEEDS=[emqx@node1.emqx.io]"
- "EMQX_LISTENERS__TCP__DEFAULT__PROXY_PROTOCOL=false"
- "EMQX_LISTENERS__WS__DEFAULT__PROXY_PROTOCOL=false"

View File

@ -1,5 +1,15 @@
version: '3.9'
x-default-emqx: &default-emqx
image: ${_EMQX_DOCKER_IMAGE_TAG}
env_file:
- conf.cluster.env
healthcheck:
test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"]
interval: 5s
timeout: 25s
retries: 5
services:
haproxy:
container_name: haproxy
@ -28,34 +38,20 @@ services:
haproxy -f /usr/local/etc/haproxy/haproxy.cfg
emqx1:
<<: *default-emqx
container_name: node1.emqx.io
image: $TARGET:$EMQX_TAG
env_file:
- conf.cluster.env
environment:
- "EMQX_HOST=node1.emqx.io"
healthcheck:
test: ["CMD", "/opt/emqx/bin/emqx_ctl", "status"]
interval: 5s
timeout: 25s
retries: 5
networks:
emqx_bridge:
aliases:
- node1.emqx.io
emqx2:
<<: *default-emqx
container_name: node2.emqx.io
image: $TARGET:$EMQX_TAG
env_file:
- conf.cluster.env
environment:
- "EMQX_HOST=node2.emqx.io"
healthcheck:
test: ["CMD", "/opt/emqx/bin/emqx", "ping"]
interval: 5s
timeout: 25s
retries: 5
networks:
emqx_bridge:
aliases:

View File

@ -5,6 +5,8 @@ services:
container_name: mysql
image: mysql:${MYSQL_TAG}
restart: always
ports:
- "3306:3306"
environment:
MYSQL_ROOT_PASSWORD: public
MYSQL_DATABASE: mqtt

View File

@ -3,7 +3,7 @@ version: '3.9'
services:
erlang23:
container_name: erlang23
image: ghcr.io/emqx/emqx-builder/5.0:23.2.7.2-emqx-2-ubuntu20.04
image: ghcr.io/emqx/emqx-builder/5.0-2:23.3.4.9-3-ubuntu20.04
env_file:
- conf.env
environment:
@ -23,7 +23,7 @@ services:
erlang24:
container_name: erlang24
image: ghcr.io/emqx/emqx-builder/5.0:24.1.1-emqx-1-ubuntu20.04
image: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04
env_file:
- conf.env
environment:

View File

@ -54,7 +54,6 @@ backend emqx_dashboard_back
server emqx-1 node1.emqx.io:18083
server emqx-2 node2.emqx.io:18083
##----------------------------------------------------------------
## public
##----------------------------------------------------------------

View File

@ -10,9 +10,9 @@ RUN wget ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-${LDAP_TA
&& cd .. && rm -rf openldap-${LDAP_TAG}
COPY .ci/docker-compose-file/openldap/slapd.conf /usr/local/etc/openldap/slapd.conf
COPY apps/emqx_auth_ldap/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif
COPY apps/emqx_auth_ldap/emqx.schema /usr/local/etc/openldap/schema/emqx.schema
COPY apps/emqx_auth_ldap/test/certs/*.pem /usr/local/etc/openldap/
COPY apps/emqx_authn/test/data/emqx.io.ldif /usr/local/etc/openldap/schema/emqx.io.ldif
COPY apps/emqx_authn/test/data/emqx.schema /usr/local/etc/openldap/schema/emqx.schema
COPY apps/emqx_authn/test/data/certs/*.pem /usr/local/etc/openldap/
RUN mkdir -p /usr/local/etc/openldap/data \
&& slapadd -l /usr/local/etc/openldap/schema/emqx.io.ldif -f /usr/local/etc/openldap/slapd.conf

View File

@ -1,21 +1,30 @@
#!/bin/sh
## This script is to run emqx cluster smoke tests (fvt) in github action
## This script is executed in pacho_client
## This script is executed in paho_client
set -x
set +e
LB="haproxy"
EMQX_TEST_DB_BACKEND=$1
if [ "$EMQX_TEST_DB_BACKEND" = "rlog" ]
then
# TODO: target only replica to avoid replication races
# see: https://github.com/emqx/emqx/issues/6094
TARGET_HOST="node2.emqx.io"
else
# use loadbalancer
TARGET_HOST="haproxy"
fi
apk update && apk add git curl
git clone -b develop-4.0 https://github.com/emqx/paho.mqtt.testing.git /paho.mqtt.testing
pip install pytest
pytest -v /paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "$LB"
pytest -v /paho.mqtt.testing/interoperability/test_client/V5/test_connect.py -k test_basic --host "$TARGET_HOST"
RESULT=$?
pytest -v /paho.mqtt.testing/interoperability/test_client --host "$LB"
pytest -v /paho.mqtt.testing/interoperability/test_client --host "$TARGET_HOST"
RESULT=$(( RESULT + $? ))
# pytest -v /paho.mqtt.testing/interoperability/test_cluster --host1 "node1.emqx.io" --host2 "node2.emqx.io"

View File

@ -0,0 +1,51 @@
#!/bin/bash
set -euxo pipefail
# _EMQX_DOCKER_IMAGE_TAG is shared with docker-compose file
export _EMQX_DOCKER_IMAGE_TAG="$1"
_EMQX_TEST_DB_BACKEND="${2:-${_EMQX_TEST_DB_BACKEND:-mnesia}}"
if [ "$_EMQX_TEST_DB_BACKEND" = "rlog" ]
then
CLUSTER_OVERRIDES="-f .ci/docker-compose-file/docker-compose-emqx-cluster-rlog.override.yaml"
else
CLUSTER_OVERRIDES=""
fi
{
echo "HOCON_ENV_OVERRIDE_PREFIX=EMQX_"
echo "EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s"
echo "EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10"
} >> .ci/docker-compose-file/conf.cluster.env
is_node_up() {
local node
node="$1"
docker exec -i "$node" \
bash -c "emqx eval \"['emqx@node1.emqx.io','emqx@node2.emqx.io'] = maps:get(running_nodes, ekka_cluster:info()).\"" > /dev/null 2>&1
}
is_node_listening() {
local node
node="$1"
docker exec -i "$node" \
emqx eval "ok = case gen_tcp:connect(\"localhost\", 1883, []) of {ok, P} -> gen_tcp:close(P), ok; _ -> exit(1) end." > /dev/null 2>&1
}
is_cluster_up() {
is_node_up node1.emqx.io && \
is_node_up node2.emqx.io && \
is_node_listening node1.emqx.io && \
is_node_listening node2.emqx.io
}
docker-compose \
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
$CLUSTER_OVERRIDES \
-f .ci/docker-compose-file/docker-compose-python.yaml \
up -d
while ! is_cluster_up; do
echo "['$(date -u +"%Y-%m-%dT%H:%M:%SZ")']:waiting emqx";
sleep 5;
done

View File

@ -9,6 +9,9 @@ labels: "Support, needs-triage"
<!-- Note, lacking of information will delay the handling of issue
See our github issue handling flow here:
https://github.com/emqx/emqx/blob/master/.github/ISSUE_TEMPLATE/assets/issue-handling.png
For support in Chinese, please visit https://askemq.com/
中文支持 请访问: https://askemq.com/
-->
### Subject of the support

View File

@ -11,92 +11,69 @@ on:
types:
- published
workflow_dispatch:
inputs:
which_branch:
required: false
jobs:
prepare:
strategy:
matrix:
otp:
- "23.2.7.2-emqx-2"
- "24.1.1-emqx-1"
runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04"
# prepare source with any OTP version, no need for a matrix
container: "ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04"
outputs:
profiles: ${{ steps.set_profile.outputs.profiles }}
old_vsns: ${{ steps.set_profile.outputs.old_vsns }}
old_vsns: ${{ steps.find_old_versons.outputs.old_vsns }}
steps:
- uses: actions/checkout@v2
with:
ref: ${{ github.event.inputs.which_branch }}
path: source
fetch-depth: 0
- name: set profile
id: set_profile
- name: find old versions
id: find_old_versons
shell: bash
working-directory: source
run: |
vsn="$(./pkg-vsn.sh)"
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
if make emqx-ee --dry-run > /dev/null 2>&1; then
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")"
echo "::set-output name=old_vsns::$old_vsns"
echo "::set-output name=profiles::[\"emqx-ee\"]"
else
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
echo "::set-output name=old_vsns::$old_vsns"
echo "::set-output name=profiles::[\"emqx\", \"emqx-edge\"]"
fi
- name: get otp version
id: get_otp_version
- name: get_all_deps
run: |
otp="$(erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)"
echo "::set-output name=otp::$otp"
- name: set get token
if: endsWith(github.repository, 'enterprise')
run: |
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
- name: get deps
working-directory: source
run: |
make ensure-rebar3
./rebar3 as default get-deps
rm -rf rebar.lock
- name: gen zip file
run: zip -ryq source-${{ steps.get_otp_version.outputs.otp }}.zip source/* source/.[^.]*
make -C source deps-all
zip -ryq source.zip source/* source/.[^.]*
- uses: actions/upload-artifact@v2
with:
name: source-${{ steps.get_otp_version.outputs.otp }}
path: source-${{ steps.get_otp_version.outputs.otp }}.zip
name: source
path: source.zip
windows:
runs-on: windows-2019
needs: prepare
if: endsWith(github.repository, 'emqx')
strategy:
fail-fast: false
matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
exclude:
- profile: emqx-edge
profile: # only CE for windows
- emqx
otp:
- 23.2
steps:
- uses: actions/download-artifact@v2
with:
name: source-23.2.7.2-emqx-2
name: source
path: .
- name: unzip source code
run: Expand-Archive -Path source-23.2.7.2-emqx-2.zip -DestinationPath ./
run: Expand-Archive -Path source.zip -DestinationPath ./
- uses: ilammy/msvc-dev-cmd@v1
- uses: gleam-lang/setup-erlang@v1.1.2
id: install_erlang
## gleam-lang/setup-erlang does not yet support the installation of otp24 on windows
with:
otp-version: 23.2
otp-version: ${{ matrix.otp }}
- name: build
env:
PYTHON: python
@ -108,10 +85,10 @@ jobs:
$version = $( "${{ github.ref }}" -replace "^(.*)/(.*)/" )
if ($version -match "^v[0-9]+\.[0-9]+(\.[0-9]+)?") {
$regex = "[0-9]+\.[0-9]+(-alpha|-beta|-rc)?\.[0-9]+"
$pkg_name = "${{ matrix.profile }}-windows-$([regex]::matches($version, $regex).value).zip"
$pkg_name = "${{ matrix.profile }}-$([regex]::matches($version, $regex).value)-otp${{ matrix.otp }}-windows-amd64.zip"
}
else {
$pkg_name = "${{ matrix.profile }}-windows-$($version -replace '/').zip"
$pkg_name = "${{ matrix.profile }}-$($version -replace '/')-otp${{ matrix.otp }}-windows-amd64.zip"
}
## We do not build/release bcrypt and quic for windows package
Remove-Item -Recurse -Force -Path _build/default/lib/bcrypt/
@ -140,34 +117,32 @@ jobs:
- uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/')
with:
name: ${{ matrix.profile }}-23.2.7.2-emqx-2
name: ${{ matrix.profile }}
path: source/_packages/${{ matrix.profile }}/.
mac:
needs: prepare
strategy:
fail-fast: false
matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
profile: # no EDGE for mac
- emqx
- emqx-ee
otp:
- 24.1.5-2
macos:
- macos-11
- macos-10.15
otp:
- 24.1.1-emqx-1
exclude:
- profile: emqx-edge
runs-on: ${{ matrix.macos }}
steps:
- uses: actions/download-artifact@v2
with:
name: source-${{ matrix.otp }}
name: source
path: .
- name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip
run: unzip -q source.zip
- name: prepare
run: |
brew update
@ -182,8 +157,12 @@ jobs:
- name: build erlang
if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60
env:
KERL_BUILD_BACKEND: git
OTP_GITHUB_URL: https://github.com/emqx/otp
run: |
kerl build git https://github.com/emqx/otp.git OTP-${{ matrix.otp }} ${{ matrix.otp }}
kerl update releases
kerl build ${{ matrix.otp }}
kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build
working-directory: source
@ -191,11 +170,12 @@ jobs:
. $HOME/.kerl/${{ matrix.otp }}/activate
make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3
rm -rf _build/${{ matrix.profile }}/lib
make ${{ matrix.profile }}-zip
- name: test
working-directory: source
run: |
pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip | head)
pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip)
unzip -q $pkg_name
# gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1
@ -230,7 +210,15 @@ jobs:
strategy:
fail-fast: false
matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
profile: ## all editions for linux
- emqx-edge
- emqx
- emqx-ee
otp:
- 24.1.5-2 # we test with OTP 23, but only build package on OTP 24 versions
arch:
- amd64
- arm64
os:
- ubuntu20.04
- ubuntu18.04
@ -240,18 +228,9 @@ jobs:
# - opensuse
- centos8
- centos7
- centos6
- raspbian10
# - raspbian9
arch:
- amd64
- arm64
otp:
- 23.2.7.2-emqx-2
- 24.1.1-emqx-1
exclude:
- os: centos6
arch: arm64
- os: raspbian9
arch: amd64
- os: raspbian10
@ -277,12 +256,13 @@ jobs:
platforms: all
- uses: actions/download-artifact@v2
with:
name: source-${{ matrix.otp }}
name: source
path: .
- name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip
run: unzip -q source.zip
- name: downloads old emqx zip packages
env:
OTP_VSN: ${{ matrix.otp }}
PROFILE: ${{ matrix.profile }}
ARCH: ${{ matrix.arch }}
SYSTEM: ${{ matrix.os }}
@ -302,10 +282,11 @@ jobs:
cd _upgrade_base
old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
for tag in ${old_vsns[@]}; do
if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip) | grep -oE "^[23]+")" ];then
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256
echo "$(cat $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip.sha256) $PROFILE-$SYSTEM-${tag#[e|v]}-$ARCH.zip" | sha256sum -c || exit 1
package_name="${PROFILE}-${tag#[e|v]}-otp${OTP_VSN}-${SYSTEM}-${ARCH}"
if [ ! -z "$(echo $(curl -I -m 10 -o /dev/null -s -w %{http_code} https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip) | grep -oE "^[23]+")" ]; then
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$broker/$tag/$package_name.zip.sha256
echo "$(cat $package_name.zip.sha256) $package_name.zip" | sha256sum -c || exit 1
fi
done
- name: build emqx packages
@ -320,7 +301,7 @@ jobs:
-v $(pwd):/emqx \
--workdir /emqx \
--platform linux/$ARCH \
ghcr.io/emqx/emqx-builder/5.0:$OTP-$SYSTEM \
ghcr.io/emqx/emqx-builder/5.0-2:$OTP-$SYSTEM \
bash -euc "make $PROFILE-zip || cat rebar3.crashdump; \
make $PROFILE-pkg || cat rebar3.crashdump; \
EMQX_NAME=$PROFILE && .ci/build_packages/tests.sh"
@ -349,17 +330,21 @@ jobs:
strategy:
fail-fast: false
matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
profile: # all editions for docker
- emqx-edge
- emqx
- emqx-ee
# NOTE: for docker, only support latest otp version, not a matrix
otp:
- 24.1.1-emqx-1
- 24.1.5-2 # update to latest
steps:
- uses: actions/download-artifact@v2
with:
name: source-${{ matrix.otp }}
name: source
path: .
- name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip
run: unzip -q source.zip
- uses: docker/setup-buildx-action@v1
- uses: docker/setup-qemu-action@v1
with:
@ -376,7 +361,8 @@ jobs:
type=ref,event=pr
type=ref,event=tag
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
labels:
org.opencontainers.image.otp.version=${{ matrix.otp }}
- uses: docker/login-action@v1
if: github.event_name == 'release'
with:
@ -384,32 +370,26 @@ jobs:
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- uses: docker/build-push-action@v2
with:
push: ${{ github.event_name == 'release' }}
push: ${{ github.event_name == 'release' && !github.event.release.prerelease }}
pull: true
no-cache: true
platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
BUILD_FROM=ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-alpine3.14
BUILD_FROM=ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-alpine3.14
RUN_FROM=alpine:3.14
EMQX_NAME=${{ matrix.profile }}
file: source/deploy/docker/Dockerfile
context: source
delete-artifact:
runs-on: ubuntu-20.04
strategy:
matrix:
otp:
- 23.2.7.2-emqx-2
- 24.1.1-emqx-1
needs: [prepare, mac, linux, docker]
steps:
- uses: geekyeggo/delete-artifact@v1
with:
name: source-${{ matrix.otp }}
name: source
upload:
runs-on: ubuntu-20.04
@ -420,9 +400,12 @@ jobs:
strategy:
matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
profile:
- emqx-edge
- emqx
- emqx-ee
otp:
- 24.1.1-emqx-1
- 24.1.5-2
steps:
- uses: actions/checkout@v2
@ -461,17 +444,11 @@ jobs:
aws s3 cp --recursive _packages/${{ matrix.profile }} s3://${{ secrets.AWS_S3_BUCKET }}/$broker/${{ env.version }}
aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CLOUDFRONT_ID }} --paths "/$broker/${{ env.version }}/*"
- uses: Rory-Z/upload-release-asset@v1
if: github.event_name == 'release' && matrix.profile != 'emqx-ee'
if: github.event_name == 'release'
with:
repo: emqx
path: "_packages/${{ matrix.profile }}/emqx-*"
token: ${{ github.token }}
- uses: Rory-Z/upload-release-asset@v1
if: github.event_name == 'release' && matrix.profile == 'emqx-ee'
with:
repo: emqx-enterprise
path: "_packages/${{ matrix.profile }}/emqx-*"
token: ${{ github.token }}
- name: update to emqx.io
if: github.event_name == 'release'
run: |
@ -484,32 +461,28 @@ jobs:
-d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.version }}\" }" \
${{ secrets.EMQX_IO_RELEASE_API }}
- name: update repo.emqx.io
if: github.event_name == 'release' && endsWith(github.repository, 'enterprise') && matrix.profile == 'emqx-ee'
if: github.event_name == 'release'
run: |
if [ "${{ matrix. profile }}" = 'emqx-ee' ]; then
BOOL_FLAG_NAME="emqx_ee"
else
BOOL_FLAG_NAME="emqx_ce"
fi
curl --silent --show-error \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \
-X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ee\": \"true\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
- name: update repo.emqx.io
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx'
run: |
curl --silent --show-error \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \
-X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\", \"emqx_ce\": \"true\"}}" \
-d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\", \"${BOOL_FLAG_NAME}\": \"true\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
- name: update homebrew packages
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx'
if: github.event_name == 'release' && matrix.profile == 'emqx'
run: |
if [ -z $(echo $version | grep -oE "(alpha|beta|rc)\.[0-9]") ]; then
curl --silent --show-error \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \
-X POST \
-d "{\"ref\":\"v1.0.3\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \
-d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ env.version }}\"}}" \
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_homebrew.yaml/dispatches"
fi
- uses: geekyeggo/delete-artifact@v1

View File

@ -4,7 +4,6 @@ concurrency:
group: slim-${{ github.event_name }}-${{ github.ref }}
cancel-in-progress: true
on:
push:
tags:
@ -14,48 +13,37 @@ on:
workflow_dispatch:
jobs:
build:
linux:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
profile:
- emqx-edge
- emqx
- emqx-ee
otp:
- 24.1.1-emqx-1
- 24.1.5-2
os:
- ubuntu20.04
- centos7
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-${{ matrix.os }}"
container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-${{ matrix.os }}"
steps:
- uses: actions/checkout@v1
- name: prepare
run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> ./scripts/git-token
echo "EMQX_NAME=emqx-ee" >> $GITHUB_ENV
else
echo "EMQX_NAME=emqx" >> $GITHUB_ENV
fi
- name: build zip packages
run: make ${EMQX_NAME}-zip
- name: build zip package
run: make ${{ matrix.profile }}-zip
- name: build deb/rpm packages
run: make ${EMQX_NAME}-pkg
- uses: actions/upload-artifact@v1
if: failure()
with:
name: rebar3.crashdump
path: ./rebar3.crashdump
run: make ${{ matrix.profile }}-pkg
- name: packages test
run: |
export CODE_PATH=$GITHUB_WORKSPACE
.ci/build_packages/tests.sh
EMQX_NAME=${{ matrix.profile }} .ci/build_packages/tests.sh
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}
name: ${{ matrix.profile}}-${{ matrix.otp }}-${{ matrix.os }}
path: _packages/**/*.zip
mac:
@ -63,32 +51,25 @@ jobs:
strategy:
fail-fast: false
matrix:
profile:
- emqx
- emqx-ee
otp:
- 24.1.5-2
macos:
- macos-11
- macos-10.15
otp:
- 24.1.1-emqx-1
runs-on: ${{ matrix.macos }}
steps:
- uses: actions/checkout@v1
- name: prepare
run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> ./scripts/git-token
echo "EMQX_NAME=emqx-ee" >> $GITHUB_ENV
else
echo "EMQX_NAME=emqx" >> $GITHUB_ENV
fi
- uses: actions/checkout@v2
- name: prepare
run: |
brew update
brew install curl zip unzip gnu-sed kerl unixodbc freetds
echo "/usr/local/bin" >> $GITHUB_PATH
git config --global credential.helper store
echo "EMQX_NAME=${{ matrix.profile }}" >> $GITHUB_ENV
- uses: actions/cache@v2
id: cache
with:
@ -97,23 +78,23 @@ jobs:
- name: build erlang
if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60
env:
KERL_BUILD_BACKEND: git
OTP_GITHUB_URL: https://github.com/emqx/otp
run: |
kerl build git https://github.com/emqx/otp.git OTP-${{ matrix.otp }} ${{ matrix.otp }}
kerl update releases
kerl build ${{ matrix.otp }}
kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build
- name: build ${{ matrix.profile }}
run: |
. $HOME/.kerl/${{ matrix.otp }}/activate
make ensure-rebar3
sudo cp rebar3 /usr/local/bin/rebar3
make ${EMQX_NAME}-zip
- uses: actions/upload-artifact@v1
if: failure()
with:
name: rebar3.crashdump
path: ./rebar3.crashdump
make ${{ matrix.profile }}-zip
- name: test
run: |
unzip -q $(find _packages/${EMQX_NAME} -mindepth 1 -maxdepth 1 -iname \*.zip | head)
pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip)
unzip -q $pkg_name
# gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no'

View File

@ -5,7 +5,7 @@ on: [pull_request]
jobs:
check_deps_integrity:
runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:24.1.1-emqx-1-ubuntu20.04"
container: "ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04"
steps:
- uses: actions/checkout@v2

17
.github/workflows/code_style_check.yaml vendored Normal file
View File

@ -0,0 +1,17 @@
name: Code style check
on: [pull_request]
jobs:
build:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 1000
- name: Run elvis check
run: |
./scripts/elvis-check.sh $GITHUB_BASE_REF
- name: Check line-break at EOF
run: |
./scripts/check-nl-at-eof.sh

View File

@ -1,16 +0,0 @@
name: Elvis Linter
on: [pull_request]
jobs:
build:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: Set git token
if: endsWith(github.repository, 'enterprise')
run: |
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
- run: |
./scripts/elvis-check.sh $GITHUB_BASE_REF

View File

@ -24,11 +24,7 @@ jobs:
id: create_pull_request
run: |
set -euo pipefail
if [ "$GITHUB_REF" = "refs/heads/master" ]; then
EE_REF="refs/heads/enterprise"
else
EE_REF="${GITHUB_REF}-enterprise"
fi
R=$(curl --silent --show-error \
-H "Accept: application/vnd.github.v3+json" \
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \

View File

@ -61,7 +61,7 @@ jobs:
- uses: actions/checkout@v2
with:
repository: emqx/emqx-fvt
ref: v1.3.0
ref: 1.0.2-dev1
path: .
- uses: actions/setup-java@v1
with:

View File

@ -12,11 +12,11 @@ jobs:
strategy:
matrix:
otp:
- "23.2.7.2-emqx-2"
- "24.1.1-emqx-1"
- 23.3.4.9-3
- 24.1.5-2
runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04"
container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-ubuntu20.04"
steps:
- uses: actions/checkout@v2

View File

@ -8,58 +8,27 @@ on:
push:
tags:
- v*
- e*
pull_request:
jobs:
prepare:
strategy:
matrix:
otp:
- "23.2.7.2-emqx-2"
- "24.1.1-emqx-1"
runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04"
outputs:
profile: ${{ steps.profile.outputs.profile }}
# prepare source with any OTP version, no need for a matrix
container: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14
steps:
- name: get otp version
id: get_otp_version
run: |
otp="$(erl -eval '{ok, Version} = file:read_file(filename:join([code:root_dir(), "releases", erlang:system_info(otp_release), "OTP_VERSION"])), io:fwrite(Version), halt().' -noshell)"
echo "::set-output name=otp::$otp"
- uses: actions/checkout@v2
with:
path: source
fetch-depth: 0
- name: set profile
id: profile
shell: bash
working-directory: source
run: |
vsn="$(./pkg-vsn.sh)"
if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "::set-output name=profile::emqx-ee"
else
echo "::set-output name=profile::emqx"
fi
- name: get deps
working-directory: source
run: |
make ensure-rebar3
./rebar3 as default get-deps
rm -rf rebar.lock
- name: gen zip file
run: zip -ryq source-${{ steps.get_otp_version.outputs.otp }}.zip source/* source/.[^.]*
make -C source deps-all
zip -ryq source.zip source/* source/.[^.]*
- uses: actions/upload-artifact@v2
with:
name: source-${{ steps.get_otp_version.outputs.otp }}
path: source-${{ steps.get_otp_version.outputs.otp }}.zip
name: source
path: source.zip
docker_test:
runs-on: ubuntu-20.04
@ -68,47 +37,43 @@ jobs:
strategy:
fail-fast: false
matrix:
otp:
- 23.2.7.2-emqx-2
- 24.1.1-emqx-1
profile:
- emqx-edge
- emqx
- emqx-ee
cluster_db_backend:
- mnesia
- rlog
steps:
- uses: actions/download-artifact@v2
with:
name: source-${{ matrix.otp }}
name: source
path: .
- name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip
run: unzip -q source.zip
- name: make docker image
working-directory: source
env:
OTP: ${{ matrix.otp }}
EMQX_BUILDER: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14
run: |
make ${{ needs.prepare.outputs.profile }}-docker
echo "TARGET=emqx/${{ needs.prepare.outputs.profile }}" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
make ${{ matrix.profile }}-docker
- name: run emqx
timeout-minutes: 5
working-directory: source
run: |
set -e -u -x
echo "HOCON_ENV_OVERRIDE_PREFIX=EMQX_" >> .ci/docker-compose-file/conf.cluster.env
echo "EMQX_ZONES__DEFAULT__MQTT__RETRY_INTERVAL=2s" >> .ci/docker-compose-file/conf.cluster.env
echo "EMQX_ZONES__DEFAULT__MQTT__MAX_TOPIC_ALIAS=10" >> .ci/docker-compose-file/conf.cluster.env
docker-compose \
-f .ci/docker-compose-file/docker-compose-emqx-cluster.yaml \
-f .ci/docker-compose-file/docker-compose-python.yaml \
up -d
while ! docker exec -i node1.emqx.io bash -c "emqx eval \"['emqx@node1.emqx.io','emqx@node2.emqx.io'] = maps:get(running_nodes, ekka_cluster:info()).\"" > /dev/null 2>&1; do
echo "['$(date -u +"%Y-%m-%dT%H:%M:%SZ")']:waiting emqx";
sleep 5;
done
set -x
IMAGE=emqx/${{ matrix.profile }}:$(./pkg-vsn.sh)
./.ci/docker-compose-file/scripts/run-emqx.sh $IMAGE ${{ matrix.cluster_db_backend }}
- name: make paho tests
run: |
if ! docker exec -i python /scripts/pytest.sh; then
if ! docker exec -i python /scripts/pytest.sh "${{ matrix.cluster_db_backend }}"; then
echo "DUMP_CONTAINER_LOGS_BGN"
echo "============== haproxy =============="
docker logs haproxy
echo "============== node1 =============="
docker logs node1.emqx.io
echo "============== node2 =============="
docker logs node2.emqx.io
echo "DUMP_CONTAINER_LOGS_END"
exit 1
@ -121,24 +86,24 @@ jobs:
strategy:
fail-fast: false
matrix:
otp:
- 23.2.7.2-emqx-2
- 24.1.1-emqx-1
profile:
- emqx
# - emqx-ee # TODO test enterprise
steps:
- uses: actions/download-artifact@v2
with:
name: source-${{ matrix.otp }}
name: source
path: .
- name: unzip source code
run: unzip -q source-${{ matrix.otp }}.zip
run: unzip -q source.zip
- name: make docker image
working-directory: source
env:
OTP: ${{ matrix.otp }}
EMQX_BUILDER: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14
run: |
make ${{ needs.prepare.outputs.profile }}-docker
echo "TARGET=emqx/${{ needs.prepare.outputs.profile }}" >> $GITHUB_ENV
make ${{ matrix.profile }}-docker
echo "TARGET=emqx/${{ matrix.profile }}" >> $GITHUB_ENV
echo "EMQX_TAG=$(./pkg-vsn.sh)" >> $GITHUB_ENV
- run: minikube start
- name: run emqx on chart

View File

@ -15,12 +15,14 @@ jobs:
relup_test:
strategy:
matrix:
otp:
- "23.2.7.2-emqx-2"
- "24.1.1-emqx-1"
profile:
- emqx
- emqx-ee
otp_vsn:
- 24.1.5-2
runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04"
container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp_vsn }}-ubuntu20.04"
defaults:
run:
@ -55,26 +57,18 @@ jobs:
repository: ${{ github.repository }}
path: emqx
fetch-depth: 0
- name: prepare
run: |
if make -C emqx emqx-ee --dry-run > /dev/null 2>&1; then
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
git config --global credential.helper store
echo "${{ secrets.CI_GIT_TOKEN }}" >> emqx/scripts/git-token
echo "PROFILE=emqx-ee" >> $GITHUB_ENV
else
echo "PROFILE=emqx" >> $GITHUB_ENV
fi
- name: get version
run: |
set -e -x -u
cd emqx
export PROFILE=${{ matrix.profile }}
export OTP_VSN=${{ matrix.otp_vsn }}
echo "PROFILE=$PROFILE" >> $GITHUB_ENV
echo "OTP_VSN=$OTP_VSN" >> $GITHUB_ENV
if [ $PROFILE = "emqx" ];then
broker="emqx-ce"
edition='opensource'
else
broker="emqx-ee"
edition='enterprise'
fi
echo "BROKER=$broker" >> $GITHUB_ENV
@ -82,11 +76,7 @@ jobs:
echo "VSN=$vsn" >> $GITHUB_ENV
pre_vsn="$(echo $vsn | grep -oE '^[0-9]+.[0-9]')"
if [ $PROFILE = "emqx" ]; then
old_vsns="$(git tag -l "v$pre_vsn.[0-9]" | xargs echo -n | sed "s/v$vsn//")"
else
old_vsns="$(git tag -l "e$pre_vsn.[0-9]" | xargs echo -n | sed "s/e$vsn//")"
fi
echo "OLD_VSNS=$old_vsns" >> $GITHUB_ENV
- name: download emqx
run: |
@ -95,7 +85,7 @@ jobs:
cd emqx/_upgrade_base
old_vsns=($(echo $OLD_VSNS | tr ' ' ' '))
for old_vsn in ${old_vsns[@]}; do
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-ubuntu20.04-${old_vsn#[e|v]}-amd64.zip
wget --no-verbose https://s3-us-west-2.amazonaws.com/packages.emqx/$BROKER/$old_vsn/$PROFILE-${old_vsn#[e|v]}-otp${OTP_VSN}-ubuntu20.04-amd64.zip
done
- name: build emqx
run: make -C emqx ${PROFILE}-zip

View File

@ -15,12 +15,11 @@ jobs:
run_static_analysis:
strategy:
matrix:
otp:
- "23.2.7.2-emqx-2"
- "24.1.1-emqx-1"
emqx_builder:
- 5.0-2:24.1.5-2 # run dialyzer on latest OTP
runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04"
container: "ghcr.io/emqx/emqx-builder/${{ matrix.emqx_builder }}-ubuntu20.04"
steps:
- uses: actions/checkout@v2
@ -38,12 +37,11 @@ jobs:
run_proper_test:
strategy:
matrix:
otp:
- "23.2.7.2-emqx-2"
- "24.1.1-emqx-1"
emqx_builder:
- 5.0-2:24.1.5-2
runs-on: ubuntu-20.04
container: "ghcr.io/emqx/emqx-builder/5.0:${{ matrix.otp }}-ubuntu20.04"
container: "ghcr.io/emqx/emqx-builder/${{ matrix.emqx_builder }}-ubuntu20.04"
steps:
- uses: actions/checkout@v2
@ -67,32 +65,19 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: set edition
id: set_edition
run: |
if make emqx-ee --dry-run > /dev/null 2>&1; then
echo "EDITION=enterprise" >> $GITHUB_ENV
else
echo "EDITION=opensource" >> $GITHUB_ENV
fi
- name: docker compose up
if: env.EDITION == 'opensource'
env:
MYSQL_TAG: 8
PGSQL_TAG: 13
REDIS_TAG: 6
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
docker-compose \
-f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \
-f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \
-f .ci/docker-compose-file/docker-compose.yaml \
up -d --build
- name: docker compose up
if: env.EDITION == 'enterprise'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 20
run: |
docker-compose \
-f .ci/docker-compose-file/docker-compose.yaml \
-f .ci/docker-compose-file/docker-compose-enterprise.yaml \
up -d --build
- name: run eunit
run: |
docker exec -i ${{ matrix.otp_release }} bash -c "make eunit"

2
.gitignore vendored
View File

@ -58,3 +58,5 @@ erlang_ls.config
# Emacs temporary files
.#*
*#
# For direnv
.envrc

View File

@ -1 +1 @@
erlang 24.0.5-emqx-1
erlang 24.1.5-2

View File

@ -3,16 +3,20 @@ REBAR_VERSION = 3.16.1-emqx-1
REBAR = $(CURDIR)/rebar3
BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts
export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/4.4-2:23.3.4.9-3-alpine3.14
export EMQX_DEFAULT_RUNNER = alpine:3.14
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
export EMQX_DESC ?= EMQ X
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.18
export DOCKERFILE := deploy/docker/Dockerfile
export DOCKERFILE_TESTING := deploy/docker/Dockerfile.testing
ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none
endif
PROFILE ?= emqx
REL_PROFILES := emqx emqx-edge
PKG_PROFILES := emqx-pkg emqx-edge-pkg
REL_PROFILES := emqx emqx-edge emqx-ee
PKG_PROFILES := emqx-pkg emqx-edge-pkg emqx-ee-pkg
PROFILES := $(REL_PROFILES) $(PKG_PROFILES) default
CT_NODE_NAME ?= 'test@127.0.0.1'
@ -85,7 +89,6 @@ coveralls: $(REBAR)
@ENABLE_COVER_COMPILE=1 $(REBAR) as test coveralls send
.PHONY: $(REL_PROFILES)
$(REL_PROFILES:%=%): $(REBAR) get-dashboard conf-segs
@$(REBAR) as $(@) do compile,release
@ -98,8 +101,10 @@ $(REL_PROFILES:%=%): $(REBAR) get-dashboard conf-segs
clean: $(PROFILES:%=clean-%)
$(PROFILES:%=clean-%):
@if [ -d _build/$(@:clean-%=%) ]; then \
rm rebar.lock \
rm -rf _build/$(@:clean-%=%)/rel; \
find _build/$(@:clean-%=%) -name '*.beam' -o -name '*.so' -o -name '*.app' -o -name '*.appup' -o -name '*.o' -o -name '*.d' -type f | xargs rm -f; \
find _build/$(@:clean-%=%) -type l -delete; \
fi
.PHONY: clean-all
@ -109,6 +114,7 @@ clean-all:
.PHONY: deps-all
deps-all: $(REBAR) $(PROFILES:%=deps-%)
@make clean # ensure clean at the end
## deps-<profile> is used in CI scripts to download deps and the
## share downloads between CI steps and/or copied into containers
@ -116,6 +122,7 @@ deps-all: $(REBAR) $(PROFILES:%=deps-%)
.PHONY: $(PROFILES:%=deps-%)
$(PROFILES:%=deps-%): $(REBAR) get-dashboard
@$(REBAR) as $(@:deps-%=%) get-deps
@rm -f rebar.lock
.PHONY: xref
xref: $(REBAR)
@ -174,5 +181,17 @@ endef
ALL_ZIPS = $(REL_PROFILES)
$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target,$(zt))))
## emqx-docker-testing
## emqx-ee-docker-testing
## is to directly copy a unzipped zip-package to a
## base image such as ubuntu20.04. Mostly for testing
.PHONY: $(REL_PROFILES:%=%-docker-testing)
define gen-docker-target-testing
$1-docker-testing: $(COMMON_DEPS)
@$(BUILD) $1 docker-testing
endef
ALL_ZIPS = $(REL_PROFILES)
$(foreach zt,$(ALL_ZIPS),$(eval $(call gen-docker-target-testing,$(zt))))
conf-segs:
@scripts/merge-config.escript

View File

@ -86,8 +86,8 @@ listeners.tcp.default {
## Set to "" to disable the feature.
##
## Variables in mountpoint string:
## - %c: clientid
## - %u: username
## - ${clientid}: clientid
## - ${username}: username
##
## @doc listeners.tcp.<name>.mountpoint
## ValueType: String
@ -185,8 +185,8 @@ listeners.ssl.default {
## Set to "" to disable the feature.
##
## Variables in mountpoint string:
## - %c: clientid
## - %u: username
## - ${clientid}: clientid
## - ${username}: username
##
## @doc listeners.ssl.<name>.mountpoint
## ValueType: String
@ -278,8 +278,8 @@ listeners.quic.default {
## Set to "" to disable the feature.
##
## Variables in mountpoint string:
## - %c: clientid
## - %u: username
## - ${clientid}: clientid
## - ${username}: username
##
## @doc listeners.quic.<name>.mountpoint
## ValueType: String
@ -372,8 +372,8 @@ listeners.ws.default {
## Set to "" to disable the feature.
##
## Variables in mountpoint string:
## - %c: clientid
## - %u: username
## - ${clientid}: clientid
## - ${username}: username
##
## @doc listeners.ws.<name>.mountpoint
## ValueType: String
@ -475,8 +475,8 @@ listeners.wss.default {
## Set to "" to disable the feature.
##
## Variables in mountpoint string:
## - %c: clientid
## - %u: username
## - ${clientid}: clientid
## - ${username}: username
##
## @doc listeners.wss.<name>.mountpoint
## ValueType: String

View File

@ -116,3 +116,7 @@
## patches dir
-pa {{ platform_data_dir }}/patches
## Mnesia thresholds
-mnesia dump_log_write_threshold 5000
-mnesia dump_log_time_threshold 60000

View File

@ -114,3 +114,7 @@
## patches dir
-pa {{ platform_data_dir }}/patches
## Mnesia thresholds
-mnesia dump_log_write_threshold 5000
-mnesia dump_log_time_threshold 60000

View File

@ -48,6 +48,12 @@
%% Queue topic
-define(QUEUE, <<"$queue/">>).
%%--------------------------------------------------------------------
%% alarms
%%--------------------------------------------------------------------
-define(ACTIVATED_ALARM, emqx_activated_alarm).
-define(DEACTIVATED_ALARM, emqx_deactivated_alarm).
%%--------------------------------------------------------------------
%% Message and Delivery
%%--------------------------------------------------------------------

View File

@ -0,0 +1,115 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQ_X_PLACEHOLDER_HRL).
-define(EMQ_X_PLACEHOLDER_HRL, true).
-define(PH(Type), <<"${", Type/binary, "}">> ).
%% action: publish/subscribe/all
-define(PH_ACTION, <<"${action}">> ).
%% cert
-define(PH_CERT_SUBJECT, <<"${cert_subject}">> ).
-define(PH_CERT_CN_NAME, <<"${cert_common_name}">> ).
%% MQTT
-define(PH_PASSWORD, <<"${password}">> ).
-define(PH_CLIENTID, <<"${clientid}">> ).
-define(PH_FROM_CLIENTID, <<"${from_clientid}">> ).
-define(PH_USERNAME, <<"${username}">> ).
-define(PH_FROM_USERNAME, <<"${from_username}">> ).
-define(PH_TOPIC, <<"${topic}">> ).
%% MQTT payload
-define(PH_PAYLOAD, <<"${payload}">> ).
%% client IPAddress
-define(PH_PEERHOST, <<"${peerhost}">> ).
%% ip & port
-define(PH_HOST, <<"${host}">> ).
-define(PH_PORT, <<"${port}">> ).
%% Enumeration of message QoS 0,1,2
-define(PH_QOS, <<"${qos}">> ).
-define(PH_FLAGS, <<"${flags}">> ).
%% Additional data related to process within the MQTT message
-define(PH_HEADERS, <<"${headers}">> ).
%% protocol name
-define(PH_PROTONAME, <<"${proto_name}">> ).
%% protocol version
-define(PH_PROTOVER, <<"${proto_ver}">> ).
%% MQTT keepalive interval
-define(PH_KEEPALIVE, <<"${keepalive}">> ).
%% MQTT clean_start
-define(PH_CLEAR_START, <<"${clean_start}">> ).
%% MQTT Session Expiration time
-define(PH_EXPIRY_INTERVAL, <<"${expiry_interval}">> ).
%% Time when PUBLISH message reaches Broker (ms)
-define(PH_PUBLISH_RECEIVED_AT, <<"${publish_received_at}">>).
%% Mountpoint for bridging messages
-define(PH_MOUNTPOINT, <<"${mountpoint}">> ).
%% IPAddress and Port of terminal
-define(PH_PEERNAME, <<"${peername}">> ).
%% IPAddress and Port listened by emqx
-define(PH_SOCKNAME, <<"${sockname}">> ).
%% whether it is MQTT bridge connection
-define(PH_IS_BRIDGE, <<"${is_bridge}">> ).
%% Terminal connection completion time (s)
-define(PH_CONNECTED_AT, <<"${connected_at}">> ).
%% Event trigger time(millisecond)
-define(PH_TIMESTAMP, <<"${timestamp}">> ).
%% Terminal disconnection completion time (s)
-define(PH_DISCONNECTED_AT, <<"${disconnected_at}">> ).
-define(PH_NODE, <<"${node}">> ).
-define(PH_REASON, <<"${reason}">> ).
-define(PH_ENDPOINT_NAME, <<"${endpoint_name}">> ).
%% sync change these place holder with binary def.
-define(PH_S_ACTION, "${action}" ).
-define(PH_S_CERT_SUBJECT, "${cert_subject}" ).
-define(PH_S_CERT_CN_NAME, "${cert_common_name}" ).
-define(PH_S_PASSWORD, "${password}" ).
-define(PH_S_CLIENTID, "${clientid}" ).
-define(PH_S_FROM_CLIENTID, "${from_clientid}" ).
-define(PH_S_USERNAME, "${username}" ).
-define(PH_S_FROM_USERNAME, "${from_username}" ).
-define(PH_S_TOPIC, "${topic}" ).
-define(PH_S_PAYLOAD, "${payload}" ).
-define(PH_S_PEERHOST, "${peerhost}" ).
-define(PH_S_HOST, "${host}" ).
-define(PH_S_PORT, "${port}" ).
-define(PH_S_QOS, "${qos}" ).
-define(PH_S_FLAGS, "${flags}" ).
-define(PH_S_HEADERS, "${headers}" ).
-define(PH_S_PROTONAME, "${proto_name}" ).
-define(PH_S_PROTOVER, "${proto_ver}" ).
-define(PH_S_KEEPALIVE, "${keepalive}" ).
-define(PH_S_CLEAR_START, "${clean_start}" ).
-define(PH_S_EXPIRY_INTERVAL, "${expiry_interval}" ).
-define(PH_S_PUBLISH_RECEIVED_AT, "${publish_received_at}" ).
-define(PH_S_MOUNTPOINT, "${mountpoint}" ).
-define(PH_S_PEERNAME, "${peername}" ).
-define(PH_S_SOCKNAME, "${sockname}" ).
-define(PH_S_IS_BRIDGE, "${is_bridge}" ).
-define(PH_S_CONNECTED_AT, "${connected_at}" ).
-define(PH_S_TIMESTAMP, "${timestamp}" ).
-define(PH_S_DISCONNECTED_AT, "${disconnected_at}" ).
-define(PH_S_NODE, "${node}" ).
-define(PH_S_REASON, "${reason}" ).
-define(PH_S_ENDPOINT_NAME, "${endpoint_name}" ).
-endif.

View File

@ -14,9 +14,6 @@
%% limitations under the License.
%%--------------------------------------------------------------------
-ifndef(EMQX_RELEASE_HRL).
-define(EMQX_RELEASE_HRL, true).
%% NOTE: this is the release version which is not always the same
%% as the emqx app version defined in emqx.app.src
%% App (plugin) versions are bumped independently.
@ -27,13 +24,4 @@
%% NOTE: This version number should be manually bumped for each release
-ifndef(EMQX_ENTERPRISE).
-define(EMQX_RELEASE, {opensource, "5.0-beta.1"}).
-else.
-endif.
-endif.
-define(EMQX_RELEASE, "5.0-beta.2").

View File

@ -17,10 +17,10 @@
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.0"}}}
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.11.1"}}}
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.20.5"}}}
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.20.6"}}}
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.14.1"}}}
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.15.0"}}}
]}.
{plugins, [{rebar3_proper, "0.12.1"}]}.

View File

@ -20,6 +20,7 @@
-include("logger.hrl").
-include("types.hrl").
-elvis([{elvis_style, god_modules, disable}]).
%% Start/Stop the application
-export([ start/0
@ -51,10 +52,6 @@
, run_fold_hook/3
]).
%% Troubleshooting
-export([ set_debug_secret/1
]).
%% Configs APIs
-export([ get_config/1
, get_config/2
@ -71,29 +68,6 @@
-define(APP, ?MODULE).
%% @hidden Path to the file which has debug_info encryption secret in it.
%% Evaluate this function if there is a need to access encrypted debug_info.
%% NOTE: Do not change the API to accept the secret text because it may
%% get logged everywhere.
set_debug_secret(PathToSecretFile) ->
SecretText =
case file:read_file(PathToSecretFile) of
{ok, Secret} ->
try string:trim(binary_to_list(Secret))
catch _ : _ -> error({badfile, PathToSecretFile})
end;
{error, Reason} ->
?ULOG("Failed to read debug_info encryption key file ~ts: ~p~n",
[PathToSecretFile, Reason]),
error(Reason)
end,
F = fun(init) -> ok;
(clear) -> ok;
({debug_info, _Mode, _Module, _Filename}) -> SecretText
end,
_ = beam_lib:clear_crypto_key_fun(),
ok = beam_lib:crypto_key_fun(F).
%%--------------------------------------------------------------------
%% Bootstrap, is_running...
%%--------------------------------------------------------------------

View File

@ -17,7 +17,6 @@
-module(emqx_alarm).
-behaviour(gen_server).
-behaviour(emqx_config_handler).
-include("emqx.hrl").
-include("logger.hrl").
@ -27,22 +26,19 @@
-boot_mnesia({mnesia, [boot]}).
-export([post_config_update/4]).
-export([ start_link/0
, stop/0
-export([start_link/0
]).
-export([format/1]).
%% API
-export([ activate/1
, activate/2
, activate/3
, deactivate/1
, deactivate/2
, deactivate/3
, delete_all_deactivated_alarms/0
, get_alarms/0
, get_alarms/1
, format/1
]).
%% gen_server callbacks
@ -56,34 +52,19 @@
-record(activated_alarm, {
name :: binary() | atom(),
details :: map() | list(),
message :: binary(),
activate_at :: integer()
}).
-record(deactivated_alarm, {
activate_at :: integer(),
name :: binary() | atom(),
details :: map() | list(),
message :: binary(),
deactivate_at :: integer() | infinity
}).
-record(state, {
timer :: reference()
}).
-define(ACTIVATED_ALARM, emqx_activated_alarm).
-define(DEACTIVATED_ALARM, emqx_deactivated_alarm).
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
@ -114,20 +95,23 @@ mnesia(boot) ->
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
stop() ->
gen_server:stop(?MODULE).
activate(Name) ->
activate(Name, #{}).
activate(Name, Details) ->
gen_server:call(?MODULE, {activate_alarm, Name, Details}).
activate(Name, Details, <<"">>).
activate(Name, Details, Message) ->
gen_server:call(?MODULE, {activate_alarm, Name, Details, Message}).
deactivate(Name) ->
gen_server:call(?MODULE, {deactivate_alarm, Name, no_details}).
deactivate(Name, no_details, <<"">>).
deactivate(Name, Details) ->
gen_server:call(?MODULE, {deactivate_alarm, Name, Details}).
deactivate(Name, Details, <<"">>).
deactivate(Name, Details, Message) ->
gen_server:call(?MODULE, {deactivate_alarm, Name, Details, Message}).
delete_all_deactivated_alarms() ->
gen_server:call(?MODULE, delete_all_deactivated_alarms).
@ -144,12 +128,10 @@ get_alarms(activated) ->
get_alarms(deactivated) ->
gen_server:call(?MODULE, {get_alarms, deactivated}).
post_config_update(_, #{validity_period := Period0}, _OldConf, _AppEnv) ->
?MODULE ! {update_timer, Period0},
ok.
format(#activated_alarm{name = Name, message = Message, activate_at = At, details = Details}) ->
Now = erlang:system_time(microsecond),
%% mnesia db stored microsecond for high frequency alarm
%% format for dashboard using millisecond
#{
node => node(),
name => Name,
@ -164,15 +146,14 @@ format(#deactivated_alarm{name = Name, message = Message, activate_at = At, deta
node => node(),
name => Name,
message => Message,
duration => DAt - At,
duration => (DAt - At) div 1000, %% to millisecond
activate_at => to_rfc3339(At),
deactivate_at => to_rfc3339(DAt),
details => Details
};
format(_) ->
{error, unknow_alarm}.
}.
to_rfc3339(Timestamp) ->
%% rfc3339 accuracy to millisecond
list_to_binary(calendar:system_time_to_rfc3339(Timestamp div 1000, [{unit, millisecond}])).
%%--------------------------------------------------------------------
@ -180,85 +161,72 @@ to_rfc3339(Timestamp) ->
%%--------------------------------------------------------------------
init([]) ->
_ = mria:wait_for_tables([?ACTIVATED_ALARM, ?DEACTIVATED_ALARM]),
ok = mria:wait_for_tables([?ACTIVATED_ALARM, ?DEACTIVATED_ALARM]),
deactivate_all_alarms(),
ok = emqx_config_handler:add_handler([alarm], ?MODULE),
{ok, #state{timer = ensure_timer(undefined, get_validity_period())}}.
{ok, #{}, get_validity_period()}.
%% suppress dialyzer warning due to dirty read/write race condition.
%% TODO: change from dirty_read/write to transactional.
%% TODO: handle mnesia write errors.
-dialyzer([{nowarn_function, [handle_call/3]}]).
handle_call({activate_alarm, Name, Details}, _From, State) ->
case mnesia:dirty_read(?ACTIVATED_ALARM, Name) of
[#activated_alarm{name = Name}] ->
{reply, {error, already_existed}, State};
[] ->
Alarm = #activated_alarm{name = Name,
details = Details,
message = normalize_message(Name, Details),
activate_at = erlang:system_time(microsecond)},
mria:dirty_write(?ACTIVATED_ALARM, Alarm),
handle_call({activate_alarm, Name, Details, Message}, _From, State) ->
Res = mria:transaction(mria:local_content_shard(),
fun create_activate_alarm/3,
[Name, Details, Message]),
case Res of
{atomic, Alarm} ->
do_actions(activate, Alarm, emqx:get_config([alarm, actions])),
{reply, ok, State}
{reply, ok, State, get_validity_period()};
{aborted, Reason} ->
{reply, Reason, State, get_validity_period()}
end;
handle_call({deactivate_alarm, Name, Details}, _From, State) ->
handle_call({deactivate_alarm, Name, Details, Message}, _From, State) ->
case mnesia:dirty_read(?ACTIVATED_ALARM, Name) of
[] ->
{reply, {error, not_found}, State};
[Alarm] ->
deactivate_alarm(Details, Alarm),
{reply, ok, State}
deactivate_alarm(Alarm, Details, Message),
{reply, ok, State, get_validity_period()}
end;
handle_call(delete_all_deactivated_alarms, _From, State) ->
clear_table(?DEACTIVATED_ALARM),
{reply, ok, State};
{reply, ok, State, get_validity_period()};
handle_call({get_alarms, all}, _From, State) ->
{atomic, Alarms} =
mria:ro_transaction(
?COMMON_SHARD,
mria:local_content_shard(),
fun() ->
[normalize(Alarm) ||
Alarm <- ets:tab2list(?ACTIVATED_ALARM)
++ ets:tab2list(?DEACTIVATED_ALARM)]
end),
{reply, Alarms, State};
{reply, Alarms, State, get_validity_period()};
handle_call({get_alarms, activated}, _From, State) ->
Alarms = [normalize(Alarm) || Alarm <- ets:tab2list(?ACTIVATED_ALARM)],
{reply, Alarms, State};
{reply, Alarms, State, get_validity_period()};
handle_call({get_alarms, deactivated}, _From, State) ->
Alarms = [normalize(Alarm) || Alarm <- ets:tab2list(?DEACTIVATED_ALARM)],
{reply, Alarms, State};
{reply, Alarms, State, get_validity_period()};
handle_call(Req, _From, State) ->
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_call(Req, From, State) ->
?SLOG(error, #{msg => "unexpected_call", call_req => Req, from => From}),
{reply, ignored, State, get_validity_period()}.
handle_cast(Msg, State) ->
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
?SLOG(error, #{msg => "unexpected_cast", cast_req => Msg}),
{noreply, State, get_validity_period()}.
handle_info({timeout, _TRef, delete_expired_deactivated_alarm},
#state{timer = TRef} = State) ->
handle_info(timeout, State) ->
Period = get_validity_period(),
delete_expired_deactivated_alarms(erlang:system_time(microsecond) - Period * 1000),
{noreply, State#state{timer = ensure_timer(TRef, Period)}};
handle_info({update_timer, Period}, #state{timer = TRef} = State) ->
?SLOG(warning, #{msg => "validity_timer_updated", period => Period}),
{noreply, State#state{timer = ensure_timer(TRef, Period)}};
{noreply, State, Period};
handle_info(Info, State) ->
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
?SLOG(error, #{msg => "unexpected_info", info_req => Info}),
{noreply, State, get_validity_period()}.
terminate(_Reason, _State) ->
ok = emqx_config_handler:remove_handler([alarm]),
ok.
code_change(_OldVsn, State, _Extra) ->
@ -271,8 +239,21 @@ code_change(_OldVsn, State, _Extra) ->
get_validity_period() ->
emqx:get_config([alarm, validity_period]).
deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name,
details = Details0, message = Msg0}) ->
create_activate_alarm(Name, Details, Message) ->
case mnesia:read(?ACTIVATED_ALARM, Name) of
[#activated_alarm{name = Name}] ->
mnesia:abort({error, already_existed});
[] ->
Alarm = #activated_alarm{name = Name,
details = Details,
message = normalize_message(Name, iolist_to_binary(Message)),
activate_at = erlang:system_time(microsecond)},
ok = mnesia:write(?ACTIVATED_ALARM, Alarm, write),
Alarm
end.
deactivate_alarm(#activated_alarm{activate_at = ActivateAt, name = Name,
details = Details0, message = Msg0}, Details, Message) ->
SizeLimit = emqx:get_config([alarm, size_limit]),
case SizeLimit > 0 andalso (mnesia:table_info(?DEACTIVATED_ALARM, size) >= SizeLimit) of
true ->
@ -286,7 +267,7 @@ deactivate_alarm(Details, #activated_alarm{activate_at = ActivateAt, name = Name
HistoryAlarm = make_deactivated_alarm(ActivateAt, Name, Details0, Msg0,
erlang:system_time(microsecond)),
DeActAlarm = make_deactivated_alarm(ActivateAt, Name, Details,
normalize_message(Name, Details),
normalize_message(Name, iolist_to_binary(Message)),
erlang:system_time(microsecond)),
mria:dirty_write(?DEACTIVATED_ALARM, HistoryAlarm),
mria:dirty_delete(?ACTIVATED_ALARM, Name),
@ -329,13 +310,6 @@ clear_table(TableName) ->
ok
end.
ensure_timer(OldTRef, Period) ->
_ = case is_reference(OldTRef) of
true -> erlang:cancel_timer(OldTRef);
false -> ok
end,
emqx_misc:start_timer(Period, delete_expired_deactivated_alarm).
delete_expired_deactivated_alarms(Checkpoint) ->
delete_expired_deactivated_alarms(mnesia:dirty_first(?DEACTIVATED_ALARM), Checkpoint).
@ -368,16 +342,12 @@ do_actions(deactivate, Alarm = #deactivated_alarm{name = Name}, [log | More]) ->
do_actions(deactivate, Alarm, More);
do_actions(Operation, Alarm, [publish | More]) ->
Topic = topic(Operation),
{ok, Payload} = encode_to_json(Alarm),
{ok, Payload} = emqx_json:safe_encode(normalize(Alarm)),
Message = emqx_message:make(?MODULE, 0, Topic, Payload, #{sys => true},
#{properties => #{'Content-Type' => <<"application/json">>}}),
%% TODO log failed publishes
_ = emqx_broker:safe_publish(Message),
do_actions(Operation, Alarm, More).
encode_to_json(Alarm) ->
emqx_json:safe_encode(normalize(Alarm)).
topic(activate) ->
emqx_topic:systop(<<"alarms/activate">>);
topic(deactivate) ->
@ -405,25 +375,6 @@ normalize(#deactivated_alarm{activate_at = ActivateAt,
deactivate_at => DeactivateAt,
activated => false}.
normalize_message(Name, no_details) ->
normalize_message(Name, <<"">>) ->
list_to_binary(io_lib:format("~p", [Name]));
normalize_message(runq_overload, #{node := Node, runq_length := Len}) ->
list_to_binary(io_lib:format("VM is overloaded on node: ~p: ~p", [Node, Len]));
normalize_message(high_system_memory_usage, #{high_watermark := HighWatermark}) ->
list_to_binary(io_lib:format("System memory usage is higher than ~p%", [HighWatermark]));
normalize_message(high_process_memory_usage, #{high_watermark := HighWatermark}) ->
list_to_binary(io_lib:format("Process memory usage is higher than ~p%", [HighWatermark]));
normalize_message(high_cpu_usage, #{usage := Usage}) ->
list_to_binary(io_lib:format("~ts cpu usage", [Usage]));
normalize_message(too_many_processes, #{usage := Usage}) ->
list_to_binary(io_lib:format("~ts process usage", [Usage]));
normalize_message(cluster_rpc_apply_failed, #{tnx_id := TnxId}) ->
list_to_binary(io_lib:format("cluster_rpc_apply_failed:~w", [TnxId]));
normalize_message(partition, #{occurred := Node}) ->
list_to_binary(io_lib:format("Partition occurs at node ~ts", [Node]));
normalize_message(<<"resource", _/binary>>, #{type := Type, id := ID}) ->
list_to_binary(io_lib:format("Resource ~ts(~ts) is down", [Type, ID]));
normalize_message(<<"conn_congestion/", Info/binary>>, _) ->
list_to_binary(io_lib:format("connection congested: ~ts", [Info]));
normalize_message(_Name, _UnknownDetails) ->
<<"Unknown alarm">>.
normalize_message(_Name, Message) -> Message.

View File

@ -56,14 +56,18 @@ init(_) ->
{ok, []}.
handle_event({set_alarm, {system_memory_high_watermark, []}}, State) ->
HighWatermark = emqx_os_mon:get_sysmem_high_watermark(),
Message = to_bin("System memory usage is higher than ~p%", [HighWatermark]),
emqx_alarm:activate(high_system_memory_usage,
#{high_watermark => emqx_os_mon:get_sysmem_high_watermark()}),
#{high_watermark => HighWatermark}, Message),
{ok, State};
handle_event({set_alarm, {process_memory_high_watermark, Pid}}, State) ->
HighWatermark = emqx_os_mon:get_procmem_high_watermark(),
Message = to_bin("Process memory usage is higher than ~p%", [HighWatermark]),
emqx_alarm:activate(high_process_memory_usage,
#{pid => list_to_binary(pid_to_list(Pid)),
high_watermark => emqx_os_mon:get_procmem_high_watermark()}),
high_watermark => HighWatermark}, Message),
{ok, State};
handle_event({clear_alarm, system_memory_high_watermark}, State) ->
@ -75,7 +79,9 @@ handle_event({clear_alarm, process_memory_high_watermark}, State) ->
{ok, State};
handle_event({set_alarm, {?LC_ALARM_ID_RUNQ, Info}}, State) ->
emqx_alarm:activate(runq_overload, Info),
#{node := Node, runq_length := Len} = Info,
Message = to_bin("VM is overloaded on node: ~p: ~p", [Node, Len]),
emqx_alarm:activate(runq_overload, Info, Message),
{ok, State};
handle_event({clear_alarm, ?LC_ALARM_ID_RUNQ}, State) ->
@ -95,3 +101,6 @@ terminate(swap, _State) ->
{emqx_alarm_handler, []};
terminate(_, _) ->
ok.
to_bin(Format, Args) ->
io_lib:format(Format, Args).

View File

@ -30,7 +30,6 @@
]).
-include("emqx.hrl").
-include("emqx_release.hrl").
-include("logger.hrl").
-define(APP, emqx).
@ -40,6 +39,7 @@
%%--------------------------------------------------------------------
start(_Type, _Args) ->
ok = emqx_release:put_edition(),
ok = maybe_load_config(),
ok = emqx_persistent_session:init_db_backend(),
ok = maybe_start_quicer(),
@ -107,30 +107,7 @@ is_quicer_app_present() ->
is_quic_listener_configured() ->
emqx_listeners:has_enabled_listener_conf_by_type(quic).
get_description() ->
{ok, Descr0} = application:get_key(?APP, description),
case os:getenv("EMQX_DESCRIPTION") of
false -> Descr0;
"" -> Descr0;
Str -> string:strip(Str, both, $\n)
end.
get_description() -> emqx_release:description().
get_release() ->
case lists:keyfind(emqx_vsn, 1, ?MODULE:module_info(compile)) of
false -> %% For TEST build or depedency build.
release_in_macro();
{_, Vsn} -> %% For emqx release build
VsnStr = release_in_macro(),
case string:str(Vsn, VsnStr) of
1 -> ok;
_ ->
erlang:error(#{ reason => version_mismatch
, source => VsnStr
, built_for => Vsn
})
end,
Vsn
end.
release_in_macro() ->
element(2, ?EMQX_RELEASE).
emqx_release:version().

View File

@ -25,6 +25,8 @@
-include("emqx.hrl").
-include("logger.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
%% The authentication entrypoint.
-export([ authenticate/2
]).
@ -45,6 +47,7 @@
, delete_chain/1
, lookup_chain/1
, list_chains/0
, list_chain_names/0
, create_authenticator/2
, delete_authenticator/2
, update_authenticator/3
@ -76,8 +79,8 @@
]).
%% proxy callback
-export([ pre_config_update/2
, post_config_update/4
-export([ pre_config_update/3
, post_config_update/5
]).
-export_type([ authenticator_id/0
@ -92,9 +95,6 @@
-define(CHAINS_TAB, emqx_authn_chains).
-define(VER_1, <<"1">>).
-define(VER_2, <<"2">>).
-type chain_name() :: atom().
-type authenticator_id() :: binary().
-type position() :: top | bottom | {before, authenticator_id()}.
@ -120,10 +120,10 @@
%% parse and validate it, and reutrn parsed result.
-callback check_config(config()) -> config().
-callback create(Config)
-callback create(AuthenticatorID, Config)
-> {ok, State}
| {error, term()}
when Config::config(), State::state().
when AuthenticatorID::authenticator_id(), Config::config(), State::state().
-callback update(Config, State)
-> {ok, NewState}
@ -192,29 +192,6 @@ authenticate(#{listener := Listener, protocol := Protocol} = Credential, _AuthRe
NAuthenticators -> do_authenticate(NAuthenticators, Credential)
end.
do_authenticate([], _) ->
{stop, {error, not_authorized}};
do_authenticate([#authenticator{id = ID, provider = Provider, state = State} | More], Credential) ->
try Provider:authenticate(Credential, State) of
ignore ->
do_authenticate(More, Credential);
Result ->
%% {ok, Extra}
%% {ok, Extra, AuthData}
%% {continue, AuthCache}
%% {continue, AuthData, AuthCache}
%% {error, Reason}
{stop, Result}
catch
Class:Reason:Stacktrace ->
?SLOG(warning, #{msg => "unexpected_error_in_authentication",
exception => Class,
reason => Reason,
stacktrace => Stacktrace,
authenticator => ID}),
do_authenticate(More, Credential)
end.
get_authenticators(Listener, Global) ->
case ets:lookup(?CHAINS_TAB, Listener) of
[#chain{authenticators = Authenticators}] ->
@ -235,11 +212,11 @@ get_enabled(Authenticators) ->
%% APIs
%%------------------------------------------------------------------------------
pre_config_update(UpdateReq, OldConfig) ->
emqx_authentication_config:pre_config_update(UpdateReq, OldConfig).
pre_config_update(Path, UpdateReq, OldConfig) ->
emqx_authentication_config:pre_config_update(Path, UpdateReq, OldConfig).
post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs) ->
emqx_authentication_config:post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs).
post_config_update(Path, UpdateReq, NewConfig, OldConfig, AppEnvs) ->
emqx_authentication_config:post_config_update(Path, UpdateReq, NewConfig, OldConfig, AppEnvs).
%% @doc Get all registered authentication providers.
get_providers() ->
@ -274,6 +251,9 @@ initialize_authentication(ChainName, AuthenticatorsConfig) ->
-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
start_link() ->
%% Create chains ETS table here so that it belongs to the supervisor
%% and survives `emqx_authentication` crashes.
ok = create_chain_table(),
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec stop() -> ok.
@ -312,13 +292,24 @@ delete_chain(Name) ->
-spec lookup_chain(chain_name()) -> {ok, chain()} | {error, term()}.
lookup_chain(Name) ->
call({lookup_chain, Name}).
case ets:lookup(?CHAINS_TAB, Name) of
[] ->
{error, {not_found, {chain, Name}}};
[Chain] ->
{ok, serialize_chain(Chain)}
end.
-spec list_chains() -> {ok, [chain()]}.
list_chains() ->
Chains = ets:tab2list(?CHAINS_TAB),
{ok, [serialize_chain(Chain) || Chain <- Chains]}.
-spec list_chain_names() -> {ok, [atom()]}.
list_chain_names() ->
Select = ets:fun2ms(fun(#chain{name = Name}) -> Name end),
ChainNames = ets:select(?CHAINS_TAB, Select),
{ok, ChainNames}.
-spec create_authenticator(chain_name(), config()) -> {ok, authenticator()} | {error, term()}.
create_authenticator(ChainName, Config) ->
call({create_authenticator, ChainName, Config}).
@ -327,11 +318,13 @@ create_authenticator(ChainName, Config) ->
delete_authenticator(ChainName, AuthenticatorID) ->
call({delete_authenticator, ChainName, AuthenticatorID}).
-spec update_authenticator(chain_name(), authenticator_id(), config()) -> {ok, authenticator()} | {error, term()}.
-spec update_authenticator(chain_name(), authenticator_id(), config()) ->
{ok, authenticator()} | {error, term()}.
update_authenticator(ChainName, AuthenticatorID, Config) ->
call({update_authenticator, ChainName, AuthenticatorID, Config}).
-spec lookup_authenticator(chain_name(), authenticator_id()) -> {ok, authenticator()} | {error, term()}.
-spec lookup_authenticator(chain_name(), authenticator_id()) ->
{ok, authenticator()} | {error, term()}.
lookup_authenticator(ChainName, AuthenticatorID) ->
case ets:lookup(?CHAINS_TAB, ChainName) of
[] ->
@ -362,7 +355,8 @@ move_authenticator(ChainName, AuthenticatorID, Position) ->
import_users(ChainName, AuthenticatorID, Filename) ->
call({import_users, ChainName, AuthenticatorID, Filename}).
-spec add_user(chain_name(), authenticator_id(), user_info()) -> {ok, user_info()} | {error, term()}.
-spec add_user(chain_name(), authenticator_id(), user_info()) ->
{ok, user_info()} | {error, term()}.
add_user(ChainName, AuthenticatorID, UserInfo) ->
call({add_user, ChainName, AuthenticatorID, UserInfo}).
@ -370,11 +364,13 @@ add_user(ChainName, AuthenticatorID, UserInfo) ->
delete_user(ChainName, AuthenticatorID, UserID) ->
call({delete_user, ChainName, AuthenticatorID, UserID}).
-spec update_user(chain_name(), authenticator_id(), binary(), map()) -> {ok, user_info()} | {error, term()}.
-spec update_user(chain_name(), authenticator_id(), binary(), map()) ->
{ok, user_info()} | {error, term()}.
update_user(ChainName, AuthenticatorID, UserID, NewUserInfo) ->
call({update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}).
-spec lookup_user(chain_name(), authenticator_id(), binary()) -> {ok, user_info()} | {error, term()}.
-spec lookup_user(chain_name(), authenticator_id(), binary()) ->
{ok, user_info()} | {error, term()}.
lookup_user(ChainName, AuthenticatorID, UserID) ->
call({lookup_user, ChainName, AuthenticatorID, UserID}).
@ -387,9 +383,6 @@ list_users(ChainName, AuthenticatorID, Params) ->
%%--------------------------------------------------------------------
init(_Opts) ->
_ = ets:new(?CHAINS_TAB, [ named_table, set, public
, {keypos, #chain.name}
, {read_concurrency, true}]),
ok = emqx_config_handler:add_handler([authentication], ?MODULE),
ok = emqx_config_handler:add_handler([listeners, '?', '?', authentication], ?MODULE),
{ok, #{hooked => false, providers => #{}}}.
@ -427,94 +420,35 @@ handle_call({delete_chain, Name}, _From, State) ->
[] ->
reply({error, {not_found, {chain, Name}}}, State);
[#chain{authenticators = Authenticators}] ->
_ = [do_delete_authenticator(Authenticator) || Authenticator <- Authenticators],
_ = [do_destroy_authenticator(Authenticator) || Authenticator <- Authenticators],
true = ets:delete(?CHAINS_TAB, Name),
reply(ok, maybe_unhook(State))
end;
handle_call({lookup_chain, Name}, _From, State) ->
case ets:lookup(?CHAINS_TAB, Name) of
[] ->
reply({error, {not_found, {chain, Name}}}, State);
[Chain] ->
reply({ok, serialize_chain(Chain)}, State)
end;
handle_call({create_authenticator, ChainName, Config}, _From, #{providers := Providers} = State) ->
UpdateFun =
fun(#chain{authenticators = Authenticators} = Chain) ->
AuthenticatorID = authenticator_id(Config),
case lists:keymember(AuthenticatorID, #authenticator.id, Authenticators) of
true ->
{error, {already_exists, {authenticator, AuthenticatorID}}};
false ->
case do_create_authenticator(ChainName, AuthenticatorID, Config, Providers) of
{ok, Authenticator} ->
NAuthenticators = Authenticators ++ [Authenticator#authenticator{enable = maps:get(enable, Config)}],
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
{ok, serialize_authenticator(Authenticator)};
{error, Reason} ->
{error, Reason}
end
end
UpdateFun = fun(Chain) ->
handle_create_authenticator(Chain, Config, Providers)
end,
Reply = update_chain(ChainName, UpdateFun),
reply(Reply, maybe_hook(State));
handle_call({delete_authenticator, ChainName, AuthenticatorID}, _From, State) ->
UpdateFun =
fun(#chain{authenticators = Authenticators} = Chain) ->
case lists:keytake(AuthenticatorID, #authenticator.id, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
{value, Authenticator, NAuthenticators} ->
_ = do_delete_authenticator(Authenticator),
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
ok
end
UpdateFun = fun(Chain) ->
handle_delete_authenticator(Chain, AuthenticatorID)
end,
Reply = update_chain(ChainName, UpdateFun),
reply(Reply, maybe_unhook(State));
handle_call({update_authenticator, ChainName, AuthenticatorID, Config}, _From, State) ->
UpdateFun =
fun(#chain{authenticators = Authenticators} = Chain) ->
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
#authenticator{provider = Provider,
state = #{version := Version} = ST} = Authenticator ->
case AuthenticatorID =:= authenticator_id(Config) of
true ->
Unique = unique(ChainName, AuthenticatorID, Version),
case Provider:update(Config#{'_unique' => Unique}, ST) of
{ok, NewST} ->
NewAuthenticator = Authenticator#authenticator{state = switch_version(NewST),
enable = maps:get(enable, Config)},
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NewAuthenticators}),
{ok, serialize_authenticator(NewAuthenticator)};
{error, Reason} ->
{error, Reason}
end;
false ->
{error, change_of_authentication_type_is_not_allowed}
end
end
UpdateFun = fun(Chain) ->
handle_update_authenticator(Chain, AuthenticatorID, Config)
end,
Reply = update_chain(ChainName, UpdateFun),
reply(Reply, State);
handle_call({move_authenticator, ChainName, AuthenticatorID, Position}, _From, State) ->
UpdateFun =
fun(#chain{authenticators = Authenticators} = Chain) ->
case do_move_authenticator(AuthenticatorID, Authenticators, Position) of
{ok, NAuthenticators} ->
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
ok;
{error, Reason} ->
{error, Reason}
end
UpdateFun = fun(Chain) ->
handle_move_authenticator(Chain, AuthenticatorID, Position)
end,
Reply = update_chain(ChainName, UpdateFun),
reply(Reply, State);
@ -569,9 +503,115 @@ terminate(Reason, _State) ->
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%------------------------------------------------------------------------------
%% Private functions
%%------------------------------------------------------------------------------
handle_update_authenticator(Chain, AuthenticatorID, Config) ->
#chain{authenticators = Authenticators} = Chain,
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
false ->
{error, {not_found, {authenticator, AuthenticatorID}}};
#authenticator{provider = Provider, state = ST} = Authenticator ->
case AuthenticatorID =:= authenticator_id(Config) of
true ->
case Provider:update(Config, ST) of
{ok, NewST} ->
NewAuthenticator = Authenticator#authenticator{
state = NewST,
enable = maps:get(enable, Config)},
NewAuthenticators = replace_authenticator(
AuthenticatorID,
NewAuthenticator,
Authenticators),
true = ets:insert(
?CHAINS_TAB,
Chain#chain{authenticators = NewAuthenticators}),
{ok, serialize_authenticator(NewAuthenticator)};
{error, Reason} ->
{error, Reason}
end;
false ->
{error, change_of_authentication_type_is_not_allowed}
end
end.
handle_delete_authenticator(Chain, AuthenticatorID) ->
MatchFun = fun(#authenticator{id = ID}) ->
ID =:= AuthenticatorID
end,
case do_delete_authenticators(MatchFun, Chain) of
[] -> {error, {not_found, {authenticator, AuthenticatorID}}};
[AuthenticatorID] -> ok
end.
handle_move_authenticator(Chain, AuthenticatorID, Position) ->
#chain{authenticators = Authenticators} = Chain,
case do_move_authenticator(AuthenticatorID, Authenticators, Position) of
{ok, NAuthenticators} ->
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
ok;
{error, Reason} ->
{error, Reason}
end.
handle_create_authenticator(Chain, Config, Providers) ->
#chain{authenticators = Authenticators} = Chain,
AuthenticatorID = authenticator_id(Config),
case lists:keymember(AuthenticatorID, #authenticator.id, Authenticators) of
true ->
{error, {already_exists, {authenticator, AuthenticatorID}}};
false ->
case do_create_authenticator(AuthenticatorID, Config, Providers) of
{ok, Authenticator} ->
NAuthenticators =
Authenticators ++
[Authenticator#authenticator{enable = maps:get(enable, Config)}],
true = ets:insert(?CHAINS_TAB,
Chain#chain{authenticators = NAuthenticators}),
{ok, serialize_authenticator(Authenticator)};
{error, Reason} ->
{error, Reason}
end
end.
do_authenticate([], _) ->
{stop, {error, not_authorized}};
do_authenticate([#authenticator{id = ID, provider = Provider, state = State} | More], Credential) ->
try Provider:authenticate(Credential, State) of
ignore ->
do_authenticate(More, Credential);
Result ->
%% {ok, Extra}
%% {ok, Extra, AuthData}
%% {continue, AuthCache}
%% {continue, AuthData, AuthCache}
%% {error, Reason}
{stop, Result}
catch
Class:Reason:Stacktrace ->
?SLOG(warning, #{msg => "unexpected_error_in_authentication",
exception => Class,
reason => Reason,
stacktrace => Stacktrace,
authenticator => ID}),
do_authenticate(More, Credential)
end.
reply(Reply, State) ->
{reply, Reply, State}.
create_chain_table() ->
try
_ = ets:new(?CHAINS_TAB, [named_table, set, public,
{keypos, #chain.name},
{read_concurrency, true}]),
ok
catch
error:badarg -> ok
end.
global_chain(mqtt) ->
'mqtt:global';
global_chain('mqtt-sn') ->
@ -611,25 +651,35 @@ maybe_unhook(#{hooked := true} = State) ->
maybe_unhook(State) ->
State.
do_create_authenticator(ChainName, AuthenticatorID, #{enable := Enable} = Config, Providers) ->
do_create_authenticator(AuthenticatorID, #{enable := Enable} = Config, Providers) ->
case maps:get(authn_type(Config), Providers, undefined) of
undefined ->
{error, no_available_provider};
Provider ->
Unique = unique(ChainName, AuthenticatorID, ?VER_1),
case Provider:create(Config#{'_unique' => Unique}) of
case Provider:create(AuthenticatorID, Config) of
{ok, State} ->
Authenticator = #authenticator{id = AuthenticatorID,
provider = Provider,
enable = Enable,
state = switch_version(State)},
state = State},
{ok, Authenticator};
{error, Reason} ->
{error, Reason}
end
end.
do_delete_authenticator(#authenticator{provider = Provider, state = State}) ->
do_delete_authenticators(MatchFun, #chain{authenticators = Authenticators} = Chain) ->
{Matching, Others} = lists:partition(MatchFun, Authenticators),
MatchingIDs = lists:map(
fun(#authenticator{id = ID}) -> ID end,
Matching),
ok = lists:foreach(fun do_destroy_authenticator/1, Matching),
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = Others}),
MatchingIDs.
do_destroy_authenticator(#authenticator{provider = Provider, state = State}) ->
_ = Provider:destroy(State),
ok.
@ -702,17 +752,6 @@ serialize_authenticator(#authenticator{id = ID,
, state => State
}.
unique(ChainName, AuthenticatorID, Version) ->
NChainName = atom_to_binary(ChainName),
<<NChainName/binary, "/", AuthenticatorID/binary, ":", Version/binary>>.
switch_version(State = #{version := ?VER_1}) ->
State#{version := ?VER_2};
switch_version(State = #{version := ?VER_2}) ->
State#{version := ?VER_1};
switch_version(State) ->
State#{version => ?VER_2}.
authn_type(#{mechanism := Mechanism, backend := Backend}) ->
{Mechanism, Backend};
authn_type(#{mechanism := Mechanism}) ->

View File

@ -19,8 +19,8 @@
-behaviour(emqx_config_handler).
-export([ pre_config_update/2
, post_config_update/4
-export([ pre_config_update/3
, post_config_update/5
]).
-export([ authenticator_id/1
@ -53,9 +53,9 @@
%% Callbacks of config handler
%%------------------------------------------------------------------------------
-spec pre_config_update(update_request(), emqx_config:raw_config())
-spec pre_config_update(list(atom()), update_request(), emqx_config:raw_config())
-> {ok, map() | list()} | {error, term()}.
pre_config_update(UpdateReq, OldConfig) ->
pre_config_update(_, UpdateReq, OldConfig) ->
try do_pre_config_update(UpdateReq, to_list(OldConfig)) of
{error, Reason} -> {error, Reason};
{ok, NewConfig} -> {ok, return_map(NewConfig)}
@ -102,34 +102,34 @@ do_pre_config_update({move_authenticator, _ChainName, AuthenticatorID, Position}
end
end.
-spec post_config_update(update_request(), map() | list(), emqx_config:raw_config(), emqx_config:app_envs())
-spec post_config_update(list(atom()), update_request(), map() | list(), emqx_config:raw_config(), emqx_config:app_envs())
-> ok | {ok, map()} | {error, term()}.
post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs) ->
post_config_update(_, UpdateReq, NewConfig, OldConfig, AppEnvs) ->
do_post_config_update(UpdateReq, check_configs(to_list(NewConfig)), OldConfig, AppEnvs).
do_post_config_update({create_authenticator, ChainName, Config}, _NewConfig, _OldConfig, _AppEnvs) ->
NConfig = check_config(Config),
do_post_config_update({create_authenticator, ChainName, Config}, NewConfig, _OldConfig, _AppEnvs) ->
NConfig = get_authenticator_config(authenticator_id(Config), NewConfig),
_ = emqx_authentication:create_chain(ChainName),
emqx_authentication:create_authenticator(ChainName, NConfig);
do_post_config_update({delete_authenticator, ChainName, AuthenticatorID}, _NewConfig, OldConfig, _AppEnvs) ->
case emqx_authentication:delete_authenticator(ChainName, AuthenticatorID) of
ok ->
[Config] = [Config0 || Config0 <- to_list(OldConfig), AuthenticatorID == authenticator_id(Config0)],
Config = get_authenticator_config(AuthenticatorID, to_list(OldConfig)),
CertsDir = certs_dir(ChainName, AuthenticatorID),
ok = clear_certs(CertsDir, Config);
{error, Reason} ->
{error, Reason}
end;
do_post_config_update({update_authenticator, ChainName, AuthenticatorID, Config}, _NewConfig, _OldConfig, _AppEnvs) ->
NConfig = check_config(Config),
emqx_authentication:update_authenticator(ChainName, AuthenticatorID, NConfig);
do_post_config_update({update_authenticator, ChainName, AuthenticatorID, Config}, NewConfig, _OldConfig, _AppEnvs) ->
case get_authenticator_config(authenticator_id(Config), NewConfig) of
{error, not_found} ->
{error, {not_found, {authenticator, AuthenticatorID}}};
NConfig ->
emqx_authentication:update_authenticator(ChainName, AuthenticatorID, NConfig)
end;
do_post_config_update({move_authenticator, ChainName, AuthenticatorID, Position}, _NewConfig, _OldConfig, _AppEnvs) ->
emqx_authentication:move_authenticator(ChainName, AuthenticatorID, Position).
check_config(Config) ->
[Checked] = check_configs([Config]),
Checked.
check_configs(Configs) ->
Providers = emqx_authentication:get_providers(),
lists:map(fun(C) -> do_check_conifg(C, Providers) end, Configs).
@ -208,6 +208,12 @@ clear_certs(CertsDir, Config) ->
OldSSL = maps:get(<<"ssl">>, Config, undefined),
ok = emqx_tls_lib:delete_ssl_files(CertsDir, undefined, OldSSL).
get_authenticator_config(AuthenticatorID, AuthenticatorsConfig) ->
case [C0 || C0 <- AuthenticatorsConfig, AuthenticatorID == authenticator_id(C0)] of
[C | _] -> C;
[] -> {error, not_found}
end.
split_by_id(ID, AuthenticatorsConfig) ->
case lists:foldl(
fun(C, {P1, P2, F0}) ->
@ -268,4 +274,3 @@ dir(ChainName, ID) when is_binary(ID) ->
binary:replace(iolist_to_binary([to_bin(ChainName), "-", ID]), <<":">>, <<"-">>);
dir(ChainName, Config) when is_map(Config) ->
dir(ChainName, authenticator_id(Config)).

View File

@ -37,6 +37,7 @@
, info/1
, format/1
, parse/1
, to_timestamp/1
]).
%% gen_server callbacks
@ -108,8 +109,8 @@ parse(Params) ->
Who = pares_who(Params),
By = maps:get(<<"by">>, Params, <<"mgmt_api">>),
Reason = maps:get(<<"reason">>, Params, <<"">>),
At = pares_time(maps:get(<<"at">>, Params, undefined), erlang:system_time(second)),
Until = pares_time(maps:get(<<"until">>, Params, undefined), At + 5 * 60),
At = parse_time(maps:get(<<"at">>, Params, undefined), erlang:system_time(second)),
Until = parse_time(maps:get(<<"until">>, Params, undefined), At + 5 * 60),
#banned{
who = Who,
by = By,
@ -120,15 +121,15 @@ parse(Params) ->
pares_who(#{as := As, who := Who}) ->
pares_who(#{<<"as">> => As, <<"who">> => Who});
pares_who(#{<<"as">> := <<"peerhost">>, <<"who">> := Peerhost0}) ->
pares_who(#{<<"as">> := peerhost, <<"who">> := Peerhost0}) ->
{ok, Peerhost} = inet:parse_address(binary_to_list(Peerhost0)),
{peerhost, Peerhost};
pares_who(#{<<"as">> := As, <<"who">> := Who}) ->
{binary_to_atom(As, utf8), Who}.
{As, Who}.
pares_time(undefined, Default) ->
parse_time(undefined, Default) ->
Default;
pares_time(Rfc3339, _Default) ->
parse_time(Rfc3339, _Default) ->
to_timestamp(Rfc3339).
maybe_format_host({peerhost, Host}) ->
@ -145,19 +146,36 @@ to_timestamp(Rfc3339) when is_binary(Rfc3339) ->
to_timestamp(Rfc3339) ->
calendar:rfc3339_to_system_time(Rfc3339, [{unit, second}]).
-spec(create(emqx_types:banned() | map()) -> ok).
-spec(create(emqx_types:banned() | map()) ->
{ok, emqx_types:banned()} | {error, {already_exist, emqx_types:banned()}}).
create(#{who := Who,
by := By,
reason := Reason,
at := At,
until := Until}) ->
mria:dirty_write(?BANNED_TAB, #banned{who = Who,
Banned = #banned{
who = Who,
by = By,
reason = Reason,
at = At,
until = Until});
create(Banned) when is_record(Banned, banned) ->
mria:dirty_write(?BANNED_TAB, Banned).
until = Until
},
create(Banned);
create(Banned = #banned{who = Who}) ->
case look_up(Who) of
[] ->
mria:dirty_write(?BANNED_TAB, Banned),
{ok, Banned};
[OldBanned = #banned{until = Until}] ->
case Until < erlang:system_time(second) of
true ->
{error, {already_exist, OldBanned}};
false ->
mria:dirty_write(?BANNED_TAB, Banned),
{ok, Banned}
end
end.
look_up(Who) when is_map(Who) ->
look_up(pares_who(Who));

View File

@ -81,7 +81,7 @@
-define(SUBSCRIPTION, emqx_subscription).
%% Guards
-define(is_subid(Id), (is_binary(Id) orelse is_atom(Id))).
-define(IS_SUBID(Id), (is_binary(Id) orelse is_atom(Id))).
-spec(start_link(atom(), pos_integer()) -> startlink_ret()).
start_link(Pool, Id) ->
@ -117,15 +117,17 @@ subscribe(Topic) when is_binary(Topic) ->
subscribe(Topic, undefined).
-spec(subscribe(emqx_types:topic(), emqx_types:subid() | emqx_types:subopts()) -> ok).
subscribe(Topic, SubId) when is_binary(Topic), ?is_subid(SubId) ->
subscribe(Topic, SubId) when is_binary(Topic), ?IS_SUBID(SubId) ->
subscribe(Topic, SubId, ?DEFAULT_SUBOPTS);
subscribe(Topic, SubOpts) when is_binary(Topic), is_map(SubOpts) ->
subscribe(Topic, undefined, SubOpts).
-spec(subscribe(emqx_types:topic(), emqx_types:subid(), emqx_types:subopts()) -> ok).
subscribe(Topic, SubId, SubOpts0) when is_binary(Topic), ?is_subid(SubId), is_map(SubOpts0) ->
subscribe(Topic, SubId, SubOpts0) when is_binary(Topic), ?IS_SUBID(SubId), is_map(SubOpts0) ->
SubOpts = maps:merge(?DEFAULT_SUBOPTS, SubOpts0),
case ets:member(?SUBOPTION, {SubPid = self(), Topic}) of
_ = emqx_trace:subscribe(Topic, SubId, SubOpts),
SubPid = self(),
case ets:member(?SUBOPTION, {SubPid, Topic}) of
false -> %% New
ok = emqx_broker_helper:register_sub(SubPid, SubId),
do_subscribe(Topic, SubPid, with_subid(SubId, SubOpts));
@ -171,6 +173,7 @@ unsubscribe(Topic) when is_binary(Topic) ->
case ets:lookup(?SUBOPTION, {SubPid, Topic}) of
[{_, SubOpts}] ->
_ = emqx_broker_helper:reclaim_seq(Topic),
_ = emqx_trace:unsubscribe(Topic, SubOpts),
do_unsubscribe(Topic, SubPid, SubOpts);
[] -> ok
end.
@ -198,7 +201,7 @@ do_unsubscribe(Group, Topic, SubPid, _SubOpts) ->
-spec(publish(emqx_types:message()) -> emqx_types:publish_result()).
publish(Msg) when is_record(Msg, message) ->
_ = emqx_tracer:trace(publish, Msg),
_ = emqx_trace:publish(Msg),
emqx_message:is_sys(Msg) orelse emqx_metrics:inc('messages.publish'),
case emqx_hooks:run_fold('message.publish', [], emqx_message:clean_dup(Msg)) of
#message{headers = #{allow_publish := false}} ->
@ -267,7 +270,7 @@ aggre(Routes) ->
end, [], Routes).
%% @doc Forward message to another node.
-spec(forward(node(), emqx_types:topic(), emqx_types:delivery(), RpcMode::sync|async)
-spec(forward(node(), emqx_types:topic(), emqx_types:delivery(), RpcMode::sync | async)
-> emqx_types:deliver_result()).
forward(Node, To, Delivery, async) ->
case emqx_rpc:cast(To, Node, ?BROKER, dispatch, [To, Delivery]) of
@ -380,14 +383,14 @@ subscriptions(SubId) ->
-spec(subscribed(pid() | emqx_types:subid(), emqx_types:topic()) -> boolean()).
subscribed(SubPid, Topic) when is_pid(SubPid) ->
ets:member(?SUBOPTION, {SubPid, Topic});
subscribed(SubId, Topic) when ?is_subid(SubId) ->
subscribed(SubId, Topic) when ?IS_SUBID(SubId) ->
SubPid = emqx_broker_helper:lookup_subpid(SubId),
ets:member(?SUBOPTION, {SubPid, Topic}).
-spec(get_subopts(pid(), emqx_types:topic()) -> maybe(emqx_types:subopts())).
get_subopts(SubPid, Topic) when is_pid(SubPid), is_binary(Topic) ->
lookup_value(?SUBOPTION, {SubPid, Topic});
get_subopts(SubId, Topic) when ?is_subid(SubId) ->
get_subopts(SubId, Topic) when ?IS_SUBID(SubId) ->
case emqx_broker_helper:lookup_subpid(SubId) of
SubPid when is_pid(SubPid) ->
get_subopts(SubPid, Topic);
@ -455,7 +458,8 @@ handle_call({subscribe, Topic}, _From, State) ->
{reply, Ok, State};
handle_call({subscribe, Topic, I}, _From, State) ->
Ok = case get(Shard = {Topic, I}) of
Shard = {Topic, I},
Ok = case get(Shard) of
undefined ->
_ = put(Shard, true),
true = ets:insert(?SUBSCRIBER, {Topic, {shard, I}}),
@ -512,4 +516,3 @@ code_change(_OldVsn, State, _Extra) ->
%%--------------------------------------------------------------------
%% Internal functions
%%--------------------------------------------------------------------

View File

@ -103,7 +103,7 @@
-type(reply() :: {outgoing, emqx_types:packet()}
| {outgoing, [emqx_types:packet()]}
| {event, conn_state()|updated}
| {event, conn_state() | updated}
| {close, Reason :: atom()}).
-type(replies() :: emqx_types:packet() | reply() | [reply()]).
@ -132,7 +132,7 @@
info(Channel) ->
maps:from_list(info(?INFO_KEYS, Channel)).
-spec(info(list(atom())|atom(), channel()) -> term()).
-spec(info(list(atom()) | atom(), channel()) -> term()).
info(Keys, Channel) when is_list(Keys) ->
[{Key, info(Key, Channel)} || Key <- Keys];
info(conninfo, #channel{conninfo = ConnInfo}) ->
@ -287,7 +287,7 @@ handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = ConnState})
handle_in(?CONNECT_PACKET(), Channel = #channel{conn_state = connecting}) ->
handle_out(connack, ?RC_PROTOCOL_ERROR, Channel);
handle_in(?CONNECT_PACKET(ConnPkt), Channel) ->
handle_in(?CONNECT_PACKET(ConnPkt) = Packet, Channel) ->
case pipeline([fun overload_protection/2,
fun enrich_conninfo/2,
fun run_conn_hooks/2,
@ -297,6 +297,7 @@ handle_in(?CONNECT_PACKET(ConnPkt), Channel) ->
fun check_banned/2
], ConnPkt, Channel#channel{conn_state = connecting}) of
{ok, NConnPkt, NChannel = #channel{clientinfo = ClientInfo}} ->
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
NChannel1 = NChannel#channel{
will_msg = emqx_packet:will_msg(NConnPkt),
alias_maximum = init_alias_maximum(NConnPkt, ClientInfo)
@ -328,10 +329,16 @@ handle_in(Packet = ?AUTH_PACKET(ReasonCode, _Properties),
connecting ->
process_connect(NProperties, ensure_connected(NChannel));
_ ->
handle_out(auth, {?RC_SUCCESS, NProperties}, NChannel#channel{conn_state = connected})
handle_out( auth
, {?RC_SUCCESS, NProperties}
, NChannel#channel{conn_state = connected}
)
end;
{continue, NProperties, NChannel} ->
handle_out(auth, {?RC_CONTINUE_AUTHENTICATION, NProperties}, NChannel#channel{conn_state = reauthenticating});
handle_out( auth
, {?RC_CONTINUE_AUTHENTICATION, NProperties}
, NChannel#channel{conn_state = reauthenticating}
);
{error, NReasonCode} ->
case ConnState of
connecting ->
@ -632,7 +639,7 @@ do_publish(PacketId, Msg = #message{qos = ?QOS_2},
?SLOG(warning, #{
msg => "dropped_qos2_packet",
reason => emqx_reason_codes:name(RC),
packetId => PacketId
packet_id => PacketId
}),
ok = emqx_metrics:inc('packets.publish.dropped'),
handle_out(pubrec, {PacketId, RC}, Channel)
@ -655,7 +662,7 @@ ensure_quota(PubRes, Channel = #channel{quota = Limiter}) ->
-compile({inline, [puback_reason_code/1]}).
puback_reason_code([]) -> ?RC_NO_MATCHING_SUBSCRIBERS;
puback_reason_code([_|_]) -> ?RC_SUCCESS.
puback_reason_code([_ | _]) -> ?RC_SUCCESS.
-compile({inline, [after_message_acked/3]}).
after_message_acked(ClientInfo, Msg, PubAckProps) ->
@ -674,7 +681,7 @@ process_subscribe(TopicFilters, SubProps, Channel) ->
process_subscribe([], _SubProps, Channel, Acc) ->
{lists:reverse(Acc), Channel};
process_subscribe([Topic = {TopicFilter, SubOpts}|More], SubProps, Channel, Acc) ->
process_subscribe([Topic = {TopicFilter, SubOpts} | More], SubProps, Channel, Acc) ->
case check_sub_caps(TopicFilter, SubOpts, Channel) of
ok ->
{ReasonCode, NChannel} = do_subscribe(TopicFilter,
@ -716,9 +723,9 @@ process_unsubscribe(TopicFilters, UnSubProps, Channel) ->
process_unsubscribe([], _UnSubProps, Channel, Acc) ->
{lists:reverse(Acc), Channel};
process_unsubscribe([{TopicFilter, SubOpts}|More], UnSubProps, Channel, Acc) ->
process_unsubscribe([{TopicFilter, SubOpts} | More], UnSubProps, Channel, Acc) ->
{RC, NChannel} = do_unsubscribe(TopicFilter, SubOpts#{unsub_props => UnSubProps}, Channel),
process_unsubscribe(More, UnSubProps, NChannel, [RC|Acc]).
process_unsubscribe(More, UnSubProps, NChannel, [RC | Acc]).
do_unsubscribe(TopicFilter, SubOpts, Channel =
#channel{clientinfo = ClientInfo = #{mountpoint := MountPoint},
@ -790,7 +797,9 @@ handle_deliver(Delivers, Channel = #channel{takeover = true,
pendings = Pendings,
session = Session,
clientinfo = #{clientid := ClientId}}) ->
NPendings = lists:append(Pendings, emqx_session:ignore_local(maybe_nack(Delivers), ClientId, Session)),
NPendings = lists:append(
Pendings,
emqx_session:ignore_local(maybe_nack(Delivers), ClientId, Session)),
{ok, Channel#channel{pendings = NPendings}};
handle_deliver(Delivers, Channel = #channel{session = Session,
@ -995,6 +1004,17 @@ handle_call({quota, Policy}, Channel) ->
Quota = emqx_limiter:init(Zone, Policy),
reply(ok, Channel#channel{quota = Quota});
handle_call({keepalive, Interval}, Channel = #channel{keepalive = KeepAlive,
conninfo = ConnInfo}) ->
ClientId = info(clientid, Channel),
NKeepalive = emqx_keepalive:set(interval, Interval * 1000, KeepAlive),
NConnInfo = maps:put(keepalive, Interval, ConnInfo),
NChannel = Channel#channel{keepalive = NKeepalive, conninfo = NConnInfo},
SockInfo = maps:get(sockinfo, emqx_cm:get_chan_info(ClientId), #{}),
ChanInfo1 = info(NChannel),
emqx_cm:set_chan_info(ClientId, ChanInfo1#{sockinfo => SockInfo}),
reply(ok, reset_timer(alive_timer, NChannel));
handle_call(Req, Channel) ->
?SLOG(error, #{msg => "unexpected_call", call => Req}),
reply(ignored, Channel).
@ -1045,7 +1065,7 @@ handle_info(clean_authz_cache, Channel) ->
handle_info(die_if_test = Info, Channel) ->
die_if_test_compiled(),
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{ok, Channel};
handle_info(Info, Channel) ->
@ -1125,7 +1145,7 @@ handle_timeout(_TRef, expire_quota_limit, Channel) ->
{ok, clean_timer(quota_timer, Channel)};
handle_timeout(_TRef, Msg, Channel) ->
?SLOG(error, #{msg => "unexpected_timeout", timeout_message => Msg}),
?SLOG(error, #{msg => "unexpected_timeout", timeout_msg => Msg}),
{ok, Channel}.
%%--------------------------------------------------------------------
@ -1179,19 +1199,25 @@ terminate(_, #channel{conn_state = idle}) -> ok;
terminate(normal, Channel) ->
run_terminate_hook(normal, Channel);
terminate({shutdown, kicked}, Channel) ->
_ = emqx_persistent_session:persist(Channel#channel.clientinfo,
Channel#channel.conninfo,
Channel#channel.session),
run_terminate_hook(kicked, Channel);
terminate({shutdown, Reason}, Channel) when Reason =:= discarded;
Reason =:= takeovered ->
run_terminate_hook(Reason, Channel);
terminate(Reason, Channel = #channel{will_msg = WillMsg}) ->
(WillMsg =/= undefined) andalso publish_will_msg(WillMsg),
(Reason =:= expired) andalso persist_if_session(Channel),
run_terminate_hook(Reason, Channel).
persist_if_session(#channel{session = Session} = Channel) ->
case emqx_session:is_session(Session) of
true ->
_ = emqx_persistent_session:persist(Channel#channel.clientinfo,
Channel#channel.conninfo,
Channel#channel.session),
run_terminate_hook(Reason, Channel).
ok;
false ->
ok
end.
run_terminate_hook(_Reason, #channel{session = undefined}) -> ok;
run_terminate_hook(Reason, #channel{clientinfo = ClientInfo, session = Session}) ->
@ -1359,16 +1385,19 @@ authenticate(?AUTH_PACKET(_, #{'Authentication-Method' := AuthMethod} = Properti
{error, ?RC_BAD_AUTHENTICATION_METHOD}
end.
do_authenticate(#{auth_method := AuthMethod} = Credential, #channel{clientinfo = ClientInfo} = Channel) ->
do_authenticate(#{auth_method := AuthMethod} = Credential,
#channel{clientinfo = ClientInfo} = Channel) ->
Properties = #{'Authentication-Method' => AuthMethod},
case emqx_access_control:authenticate(Credential) of
{ok, Result} ->
{ok, Properties,
Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
Channel#channel{
clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
auth_cache = #{}}};
{ok, Result, AuthData} ->
{ok, Properties#{'Authentication-Data' => AuthData},
Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
Channel#channel{
clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
auth_cache = #{}}};
{continue, AuthCache} ->
{continue, Properties, Channel#channel{auth_cache = AuthCache}};
@ -1606,6 +1635,8 @@ ensure_connected(Channel = #channel{conninfo = ConnInfo,
clientinfo = ClientInfo}) ->
NConnInfo = ConnInfo#{connected_at => erlang:system_time(millisecond)},
ok = run_hooks('client.connected', [ClientInfo, NConnInfo]),
ChanPid = self(),
emqx_cm:mark_channel_connected(ChanPid),
Channel#channel{conninfo = NConnInfo,
conn_state = connected
}.
@ -1691,6 +1722,8 @@ ensure_disconnected(Reason, Channel = #channel{conninfo = ConnInfo,
clientinfo = ClientInfo}) ->
NConnInfo = ConnInfo#{disconnected_at => erlang:system_time(millisecond)},
ok = run_hooks('client.disconnected', [ClientInfo, Reason, NConnInfo]),
ChanPid = self(),
emqx_cm:mark_channel_disconnected(ChanPid),
Channel#channel{conninfo = NConnInfo, conn_state = disconnected}.
%%--------------------------------------------------------------------

View File

@ -58,7 +58,10 @@
, lookup_channels/2
]).
-export([all_channels/0]).
%% Test/debug interface
-export([ all_channels/0
, all_client_ids/0
]).
%% gen_server callbacks
-export([ init/1
@ -70,7 +73,12 @@
]).
%% Internal export
-export([stats_fun/0]).
-export([ stats_fun/0
, clean_down/1
, mark_channel_connected/1
, mark_channel_disconnected/1
, get_connected_client_count/0
]).
-type(chan_pid() :: pid()).
@ -78,11 +86,13 @@
-define(CHAN_TAB, emqx_channel).
-define(CHAN_CONN_TAB, emqx_channel_conn).
-define(CHAN_INFO_TAB, emqx_channel_info).
-define(CHAN_LIVE_TAB, emqx_channel_live).
-define(CHAN_STATS,
[{?CHAN_TAB, 'channels.count', 'channels.max'},
{?CHAN_TAB, 'sessions.count', 'sessions.max'},
{?CHAN_CONN_TAB, 'connections.count', 'connections.max'}
{?CHAN_CONN_TAB, 'connections.count', 'connections.max'},
{?CHAN_LIVE_TAB, 'live_connections.count', 'live_connections.max'}
]).
%% Batch drain
@ -91,7 +101,14 @@
%% Server name
-define(CM, ?MODULE).
-define(T_TAKEOVER, 15000).
-define(T_KICK, 5_000).
-define(T_GET_INFO, 5_000).
-define(T_TAKEOVER, 15_000).
%% linting overrides
-elvis([ {elvis_style, invalid_dynamic_call, #{ignore => [emqx_cm]}}
, {elvis_style, god_modules, #{ignore => [emqx_cm]}}
]).
%% @doc Start the channel manager.
-spec(start_link() -> startlink_ret()).
@ -162,7 +179,7 @@ get_chan_info(ClientId, ChanPid) when node(ChanPid) == node() ->
error:badarg -> undefined
end;
get_chan_info(ClientId, ChanPid) ->
rpc_call(node(ChanPid), get_chan_info, [ClientId, ChanPid]).
rpc_call(node(ChanPid), get_chan_info, [ClientId, ChanPid], ?T_GET_INFO).
%% @doc Update infos of the channel.
-spec(set_chan_info(emqx_types:clientid(), emqx_types:attrs()) -> boolean()).
@ -187,7 +204,7 @@ get_chan_stats(ClientId, ChanPid) when node(ChanPid) == node() ->
error:badarg -> undefined
end;
get_chan_stats(ClientId, ChanPid) ->
rpc_call(node(ChanPid), get_chan_stats, [ClientId, ChanPid]).
rpc_call(node(ChanPid), get_chan_stats, [ClientId, ChanPid], ?T_GET_INFO).
%% @doc Set channel's stats.
-spec(set_chan_stats(emqx_types:clientid(), emqx_types:stats()) -> boolean()).
@ -236,7 +253,10 @@ open_session(false, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
pendings => Pendings}};
{living, ConnMod, ChanPid, Session} ->
ok = emqx_session:resume(ClientInfo, Session),
Session1 = emqx_persistent_session:persist(ClientInfo, ConnInfo, Session),
Session1 = emqx_persistent_session:persist( ClientInfo
, ConnInfo
, Session
),
Pendings = ConnMod:call(ChanPid, {takeover, 'end'}, ?T_TAKEOVER),
register_channel(ClientId, Self, ConnInfo),
{ok, #{session => Session1,
@ -245,12 +265,18 @@ open_session(false, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
{expired, OldSession} ->
_ = emqx_persistent_session:discard(ClientId, OldSession),
Session = create_session(ClientInfo, ConnInfo),
Session1 = emqx_persistent_session:persist(ClientInfo, ConnInfo, Session),
Session1 = emqx_persistent_session:persist( ClientInfo
, ConnInfo
, Session
),
register_channel(ClientId, Self, ConnInfo),
{ok, #{session => Session1, present => false}};
none ->
Session = create_session(ClientInfo, ConnInfo),
Session1 = emqx_persistent_session:persist(ClientInfo, ConnInfo, Session),
Session1 = emqx_persistent_session:persist( ClientInfo
, ConnInfo
, Session
),
register_channel(ClientId, Self, ConnInfo),
{ok, #{session => Session1, present => false}}
end
@ -300,7 +326,7 @@ takeover_session(ClientId) ->
[ChanPid] ->
takeover_session(ClientId, ChanPid);
ChanPids ->
[ChanPid|StalePids] = lists:reverse(ChanPids),
[ChanPid | StalePids] = lists:reverse(ChanPids),
?SLOG(warning, #{msg => "more_than_one_channel_found", chan_pids => ChanPids}),
lists:foreach(fun(StalePid) ->
catch discard_session(ClientId, StalePid)
@ -308,82 +334,136 @@ takeover_session(ClientId) ->
takeover_session(ClientId, ChanPid)
end.
takeover_session(ClientId, ChanPid) when node(ChanPid) == node() ->
takeover_session(ClientId, Pid) ->
try do_takeover_session(ClientId, Pid)
catch
_ : noproc -> % emqx_ws_connection: call
emqx_persistent_session:lookup(ClientId);
_ : {noproc, _} -> % emqx_connection: gen_server:call
emqx_persistent_session:lookup(ClientId);
_ : {'EXIT', {noproc, _}} -> % rpc_call/3
emqx_persistent_session:lookup(ClientId)
end.
do_takeover_session(ClientId, ChanPid) when node(ChanPid) == node() ->
case get_chann_conn_mod(ClientId, ChanPid) of
undefined ->
emqx_persistent_session:lookup(ClientId);
ConnMod when is_atom(ConnMod) ->
%% TODO: if takeover times out, maybe kill the old?
Session = ConnMod:call(ChanPid, {takeover, 'begin'}, ?T_TAKEOVER),
{living, ConnMod, ChanPid, Session}
end;
takeover_session(ClientId, ChanPid) ->
rpc_call(node(ChanPid), takeover_session, [ClientId, ChanPid]).
do_takeover_session(ClientId, ChanPid) ->
rpc_call(node(ChanPid), takeover_session, [ClientId, ChanPid], ?T_TAKEOVER).
%% @doc Discard all the sessions identified by the ClientId.
-spec(discard_session(emqx_types:clientid()) -> ok).
discard_session(ClientId) when is_binary(ClientId) ->
case lookup_channels(ClientId) of
[] -> ok;
ChanPids -> lists:foreach(fun(Pid) -> do_discard_session(ClientId, Pid) end, ChanPids)
ChanPids -> lists:foreach(fun(Pid) -> discard_session(ClientId, Pid) end, ChanPids)
end.
do_discard_session(ClientId, Pid) ->
%% @private Kick a local stale session to force it step down.
%% If failed to kick (e.g. timeout) force a kill.
%% Keeping the stale pid around, or returning error or raise an exception
%% benefits nobody.
-spec kick_or_kill(kick | discard, module(), pid()) -> ok.
kick_or_kill(Action, ConnMod, Pid) ->
try
discard_session(ClientId, Pid)
%% this is essentailly a gen_server:call implemented in emqx_connection
%% and emqx_ws_connection.
%% the handle_call is implemented in emqx_channel
ok = apply(ConnMod, call, [Pid, Action, ?T_KICK])
catch
_ : noproc -> % emqx_ws_connection: call
?tp(debug, "session_already_gone", #{pid => Pid}),
ok;
ok = ?tp(debug, "session_already_gone", #{pid => Pid, action => Action});
_ : {noproc, _} -> % emqx_connection: gen_server:call
?tp(debug, "session_already_gone", #{pid => Pid}),
ok;
_ : {'EXIT', {noproc, _}} -> % rpc_call/3
?tp(debug, "session_already_gone", #{pid => Pid}),
ok;
ok = ?tp(debug, "session_already_gone", #{pid => Pid, action => Action});
_ : {shutdown, _} ->
ok = ?tp(debug, "session_already_shutdown", #{pid => Pid, action => Action});
_ : {{shutdown, _}, _} ->
?tp(debug, "session_already_shutdown", #{pid => Pid}),
ok;
ok = ?tp(debug, "session_already_shutdown", #{pid => Pid, action => Action});
_ : {timeout, {gen_server, call, _}} ->
?tp(warning, "session_kick_timeout",
#{pid => Pid,
action => Action,
stale_channel => stale_channel_info(Pid)
}),
ok = force_kill(Pid);
_ : Error : St ->
?tp(error, "failed_to_discard_session",
#{pid => Pid, reason => Error, stacktrace=>St})
?tp(error, "session_kick_exception",
#{pid => Pid,
action => Action,
reason => Error,
stacktrace => St,
stale_channel => stale_channel_info(Pid)
}),
ok = force_kill(Pid)
end.
discard_session(ClientId, ChanPid) when node(ChanPid) == node() ->
case get_chann_conn_mod(ClientId, ChanPid) of
undefined -> ok;
ConnMod when is_atom(ConnMod) ->
ConnMod:call(ChanPid, discard, ?T_TAKEOVER)
end;
force_kill(Pid) ->
exit(Pid, kill),
ok.
stale_channel_info(Pid) ->
process_info(Pid, [status, message_queue_len, current_stacktrace]).
discard_session(ClientId, ChanPid) ->
rpc_call(node(ChanPid), discard_session, [ClientId, ChanPid]).
kick_session(discard, ClientId, ChanPid).
kick_session(ClientId, ChanPid) ->
kick_session(kick, ClientId, ChanPid).
%% @private This function is shared for session 'kick' and 'discard' (as the first arg Action).
kick_session(Action, ClientId, ChanPid) when node(ChanPid) == node() ->
case get_chann_conn_mod(ClientId, ChanPid) of
undefined ->
%% already deregistered
ok;
ConnMod when is_atom(ConnMod) ->
ok = kick_or_kill(Action, ConnMod, ChanPid)
end;
kick_session(Action, ClientId, ChanPid) ->
%% call remote node on the old APIs because we do not know if they have upgraded
%% to have kick_session/3
Function = case Action of
discard -> discard_session;
kick -> kick_session
end,
try
rpc_call(node(ChanPid), Function, [ClientId, ChanPid], ?T_KICK)
catch
Error : Reason ->
%% This should mostly be RPC failures.
%% However, if the node is still running the old version
%% code (prior to emqx app 4.3.10) some of the RPC handler
%% exceptions may get propagated to a new version node
?SLOG(error, #{ msg => "failed_to_kick_session_on_remote_node"
, node => node(ChanPid)
, action => Action
, error => Error
, reason => Reason
})
end.
kick_session(ClientId) ->
case lookup_channels(ClientId) of
[] -> {error, not_found};
[ChanPid] ->
kick_session(ClientId, ChanPid);
[] ->
?SLOG(warning, #{msg => "kicked_an_unknown_session",
clientid => ClientId}),
ok;
ChanPids ->
[ChanPid|StalePids] = lists:reverse(ChanPids),
?SLOG(warning, #{msg => "more_than_one_channel_found", chan_pids => ChanPids}),
lists:foreach(fun(StalePid) ->
catch discard_session(ClientId, StalePid)
end, StalePids),
kick_session(ClientId, ChanPid)
case length(ChanPids) > 1 of
true ->
?SLOG(warning, #{msg => "more_than_one_channel_found",
chan_pids => ChanPids});
false -> ok
end,
lists:foreach(fun(Pid) -> kick_session(ClientId, Pid) end, ChanPids)
end.
kick_session(ClientId, ChanPid) when node(ChanPid) == node() ->
case get_chan_info(ClientId, ChanPid) of
#{conninfo := #{conn_mod := ConnMod}} ->
ConnMod:call(ChanPid, kick, ?T_TAKEOVER);
undefined ->
{error, not_found}
end;
kick_session(ClientId, ChanPid) ->
rpc_call(node(ChanPid), kick_session, [ClientId, ChanPid]).
%% @doc Is clean start?
% is_clean_start(#{clean_start := false}) -> false;
% is_clean_start(_Attrs) -> true.
@ -395,11 +475,17 @@ with_channel(ClientId, Fun) ->
Pids -> Fun(lists:last(Pids))
end.
%% @doc Get all channels registed.
%% @doc Get all registed channel pids. Debugg/test interface
all_channels() ->
Pat = [{{'_', '$1'}, [], ['$1']}],
ets:select(?CHAN_TAB, Pat).
%% @doc Get all registed clientIDs. Debugg/test interface
all_client_ids() ->
Pat = [{{'$1', '_'}, [], ['$1']}],
ets:select(?CHAN_TAB, Pat).
%% @doc Lookup channels.
-spec(lookup_channels(emqx_types:clientid()) -> list(chan_pid())).
lookup_channels(ClientId) ->
@ -419,10 +505,16 @@ lookup_channels(local, ClientId) ->
[ChanPid || {_, ChanPid} <- ets:lookup(?CHAN_TAB, ClientId)].
%% @private
rpc_call(Node, Fun, Args) ->
case rpc:call(Node, ?MODULE, Fun, Args) of
{badrpc, Reason} -> error(Reason);
Res -> Res
rpc_call(Node, Fun, Args, Timeout) ->
case rpc:call(Node, ?MODULE, Fun, Args, 2 * Timeout) of
{badrpc, Reason} ->
%% since eqmx app 4.3.10, the 'kick' and 'discard' calls hanndler
%% should catch all exceptions and always return 'ok'.
%% This leaves 'badrpc' only possible when there is problem
%% calling the remote node.
error({badrpc, Reason});
Res ->
Res
end.
%% @private
@ -437,8 +529,10 @@ init([]) ->
ok = emqx_tables:new(?CHAN_TAB, [bag, {read_concurrency, true} | TabOpts]),
ok = emqx_tables:new(?CHAN_CONN_TAB, [bag | TabOpts]),
ok = emqx_tables:new(?CHAN_INFO_TAB, [set, compressed | TabOpts]),
ok = emqx_tables:new(?CHAN_LIVE_TAB, [set, {write_concurrency, true} | TabOpts]),
ok = emqx_stats:update_interval(chan_stats, fun ?MODULE:stats_fun/0),
{ok, #{chan_pmon => emqx_pmon:new()}}.
State = #{chan_pmon => emqx_pmon:new()},
{ok, State}.
handle_call(Req, _From, State) ->
?SLOG(error, #{msg => "unexpected_call", call => Req}),
@ -447,17 +541,21 @@ handle_call(Req, _From, State) ->
handle_cast({registered, {ClientId, ChanPid}}, State = #{chan_pmon := PMon}) ->
PMon1 = emqx_pmon:monitor(ChanPid, ClientId, PMon),
{noreply, State#{chan_pmon := PMon1}};
handle_cast(Msg, State) ->
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) ->
?tp(emqx_cm_process_down, #{pid => Pid, reason => _Reason}),
ChanPids = [Pid | emqx_misc:drain_down(?BATCH_SIZE)],
{Items, PMon1} = emqx_pmon:erase_all(ChanPids, PMon),
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun clean_down/1, Items]),
lists:foreach(
fun({ChanPid, _ClientID}) ->
mark_channel_disconnected(ChanPid)
end,
Items),
ok = emqx_pool:async_submit(fun lists:foreach/2, [fun ?MODULE:clean_down/1, Items]),
{noreply, State#{chan_pmon := PMon1}};
handle_info(Info, State) ->
?SLOG(error, #{msg => "unexpected_info", info => Info}),
@ -492,5 +590,20 @@ get_chann_conn_mod(ClientId, ChanPid) when node(ChanPid) == node() ->
error:badarg -> undefined
end;
get_chann_conn_mod(ClientId, ChanPid) ->
rpc_call(node(ChanPid), get_chann_conn_mod, [ClientId, ChanPid]).
rpc_call(node(ChanPid), get_chann_conn_mod, [ClientId, ChanPid], ?T_GET_INFO).
mark_channel_connected(ChanPid) ->
?tp(emqx_cm_connected_client_count_inc, #{}),
ets:insert_new(?CHAN_LIVE_TAB, {ChanPid, true}),
ok.
mark_channel_disconnected(ChanPid) ->
?tp(emqx_cm_connected_client_count_dec, #{}),
ets:delete(?CHAN_LIVE_TAB, ChanPid),
ok.
get_connected_client_count() ->
case ets:info(?CHAN_LIVE_TAB, size) of
undefined -> 0;
Size -> Size
end.

View File

@ -16,6 +16,7 @@
-module(emqx_config).
-compile({no_auto_import, [get/0, get/1, put/2, erase/1]}).
-elvis([{elvis_style, god_modules, disable}]).
-export([ init_load/1
, init_load/2
@ -138,10 +139,9 @@ get(KeyPath, Default) -> do_get(?CONF, KeyPath, Default).
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
find([]) ->
Ref = make_ref(),
Res = do_get(?CONF, [], Ref),
case Res =:= Ref of
true -> {not_found, []};
false -> {ok, Res}
case do_get(?CONF, [], Ref) of
Ref -> {not_found, []};
Res -> {ok, Res}
end;
find(KeyPath) ->
?ATOM_CONF_PATH(KeyPath, emqx_map_lib:deep_find(AtomKeyPath, get_root(KeyPath)),
@ -151,10 +151,9 @@ find(KeyPath) ->
{ok, term()} | {not_found, emqx_map_lib:config_key_path(), term()}.
find_raw([]) ->
Ref = make_ref(),
Res = do_get(?RAW_CONF, [], Ref),
case Res =:= Ref of
true -> {not_found, []};
false -> {ok, Res}
case do_get(?RAW_CONF, [], Ref) of
Ref -> {not_found, []};
Res -> {ok, Res}
end;
find_raw(KeyPath) ->
emqx_map_lib:deep_find([bin(Key) || Key <- KeyPath], get_root_raw(KeyPath)).
@ -288,8 +287,7 @@ check_config(SchemaMod, RawConf) ->
},
{AppEnvs, CheckedConf} =
hocon_schema:map_translate(SchemaMod, RawConf, Opts),
Conf = maps:with(maps:keys(RawConf), CheckedConf),
{AppEnvs, emqx_map_lib:unsafe_atom_key_map(Conf)}.
{AppEnvs, emqx_map_lib:unsafe_atom_key_map(CheckedConf)}.
-spec fill_defaults(raw_config()) -> map().
fill_defaults(RawConf) ->
@ -349,7 +347,8 @@ get_root_names() ->
get_atom_root_names() ->
[atom(N) || N <- get_root_names()].
-spec save_configs(app_envs(), config(), raw_config(), raw_config(), update_opts()) -> ok | {error, term()}.
-spec save_configs(app_envs(), config(), raw_config(), raw_config(), update_opts()) ->
ok | {error, term()}.
save_configs(_AppEnvs, Conf, RawConf, OverrideConf, Opts) ->
%% We may need also support hot config update for the apps that use application envs.
%% If that is the case uncomment the following line to update the configs to app env

View File

@ -45,14 +45,14 @@
-type handler_name() :: module().
-type handlers() :: #{emqx_config:config_key() => handlers(), ?MOD => handler_name()}.
-optional_callbacks([ pre_config_update/2
, post_config_update/4
-optional_callbacks([ pre_config_update/3
, post_config_update/5
]).
-callback pre_config_update(emqx_config:update_request(), emqx_config:raw_config()) ->
-callback pre_config_update([atom()], emqx_config:update_request(), emqx_config:raw_config()) ->
{ok, emqx_config:update_request()} | {error, term()}.
-callback post_config_update(emqx_config:update_request(), emqx_config:config(),
-callback post_config_update([atom()], emqx_config:update_request(), emqx_config:config(),
emqx_config:config(), emqx_config:app_envs()) ->
ok | {ok, Result::any()} | {error, Reason::term()}.
@ -181,14 +181,20 @@ process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) ->
Error -> Error
end.
do_update_config([], Handlers, OldRawConf, UpdateReq) ->
call_pre_config_update(Handlers, OldRawConf, UpdateReq);
do_update_config([ConfKey | ConfKeyPath], Handlers, OldRawConf, UpdateReq) ->
do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) ->
do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq, []).
do_update_config([], Handlers, OldRawConf, UpdateReq, ConfKeyPath) ->
call_pre_config_update(Handlers, OldRawConf, UpdateReq, ConfKeyPath);
do_update_config([ConfKey | SubConfKeyPath], Handlers, OldRawConf,
UpdateReq, ConfKeyPath0) ->
ConfKeyPath = ConfKeyPath0 ++ [ConfKey],
SubOldRawConf = get_sub_config(bin(ConfKey), OldRawConf),
SubHandlers = get_sub_handlers(ConfKey, Handlers),
case do_update_config(ConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq) of
case do_update_config(SubConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq, ConfKeyPath) of
{ok, NewUpdateReq} ->
call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq});
call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq},
ConfKeyPath);
Error ->
Error
end.
@ -211,18 +217,25 @@ check_and_save_configs(SchemaModule, ConfKeyPath, Handlers, NewRawConf, Override
Error -> Error
end.
do_post_config_update([], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result) ->
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs), Result);
do_post_config_update([ConfKey | ConfKeyPath], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs,
Result) ->
do_post_config_update(ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result) ->
do_post_config_update(ConfKeyPath, Handlers, OldConf, NewConf, AppEnvs, UpdateArgs,
Result, []).
do_post_config_update([], Handlers, OldConf, NewConf, AppEnvs, UpdateArgs, Result,
ConfKeyPath) ->
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs),
Result, ConfKeyPath);
do_post_config_update([ConfKey | SubConfKeyPath], Handlers, OldConf, NewConf, AppEnvs,
UpdateArgs, Result, ConfKeyPath0) ->
ConfKeyPath = ConfKeyPath0 ++ [ConfKey],
SubOldConf = get_sub_config(ConfKey, OldConf),
SubNewConf = get_sub_config(ConfKey, NewConf),
SubHandlers = get_sub_handlers(ConfKey, Handlers),
case do_post_config_update(ConfKeyPath, SubHandlers, SubOldConf, SubNewConf, AppEnvs,
UpdateArgs, Result) of
case do_post_config_update(SubConfKeyPath, SubHandlers, SubOldConf, SubNewConf, AppEnvs,
UpdateArgs, Result, ConfKeyPath) of
{ok, Result1} ->
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, up_req(UpdateArgs),
Result1);
Result1, ConfKeyPath);
Error -> Error
end.
@ -237,22 +250,23 @@ get_sub_config(ConfKey, Conf) when is_map(Conf) ->
get_sub_config(_, _Conf) -> %% the Conf is a primitive
undefined.
call_pre_config_update(Handlers, OldRawConf, UpdateReq) ->
call_pre_config_update(Handlers, OldRawConf, UpdateReq, ConfKeyPath) ->
HandlerName = maps:get(?MOD, Handlers, undefined),
case erlang:function_exported(HandlerName, pre_config_update, 2) of
case erlang:function_exported(HandlerName, pre_config_update, 3) of
true ->
case HandlerName:pre_config_update(UpdateReq, OldRawConf) of
case HandlerName:pre_config_update(ConfKeyPath, UpdateReq, OldRawConf) of
{ok, NewUpdateReq} -> {ok, NewUpdateReq};
{error, Reason} -> {error, {pre_config_update, HandlerName, Reason}}
end;
false -> merge_to_old_config(UpdateReq, OldRawConf)
end.
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, UpdateReq, Result) ->
call_post_config_update(Handlers, OldConf, NewConf, AppEnvs, UpdateReq, Result, ConfKeyPath) ->
HandlerName = maps:get(?MOD, Handlers, undefined),
case erlang:function_exported(HandlerName, post_config_update, 4) of
case erlang:function_exported(HandlerName, post_config_update, 5) of
true ->
case HandlerName:post_config_update(UpdateReq, NewConf, OldConf, AppEnvs) of
case HandlerName:post_config_update(ConfKeyPath, UpdateReq, NewConf, OldConf,
AppEnvs) of
ok -> {ok, Result};
{ok, Result1} ->
{ok, Result#{HandlerName => Result1}};

View File

@ -78,13 +78,15 @@ cancel_alarm_congestion(Socket, Transport, Channel, Reason) ->
do_alarm_congestion(Socket, Transport, Channel, Reason) ->
ok = update_alarm_sent_at(Reason),
AlarmDetails = tcp_congestion_alarm_details(Socket, Transport, Channel),
emqx_alarm:activate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails),
Message = io_lib:format("connection congested: ~ts", [AlarmDetails]),
emqx_alarm:activate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails, Message),
ok.
do_cancel_alarm_congestion(Socket, Transport, Channel, Reason) ->
ok = remove_alarm_sent_at(Reason),
AlarmDetails = tcp_congestion_alarm_details(Socket, Transport, Channel),
emqx_alarm:deactivate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails),
Message = io_lib:format("connection congested: ~ts", [AlarmDetails]),
emqx_alarm:deactivate(?ALARM_CONN_CONGEST(Channel, Reason), AlarmDetails, Message),
ok.
is_tcp_congested(Socket, Transport) ->

View File

@ -149,7 +149,7 @@ start_link(Transport, Socket, Options) ->
%%--------------------------------------------------------------------
%% @doc Get infos of the connection/channel.
-spec(info(pid()|state()) -> emqx_types:infos()).
-spec(info(pid() | state()) -> emqx_types:infos()).
info(CPid) when is_pid(CPid) ->
call(CPid, info);
info(State = #state{channel = Channel}) ->
@ -176,7 +176,7 @@ info(limiter, #state{limiter = Limiter}) ->
maybe_apply(fun emqx_limiter:info/1, Limiter).
%% @doc Get stats of the connection/channel.
-spec(stats(pid()|state()) -> emqx_types:stats()).
-spec(stats(pid() | state()) -> emqx_types:stats()).
stats(CPid) when is_pid(CPid) ->
call(CPid, stats);
stats(#state{transport = Transport,
@ -373,7 +373,7 @@ cancel_stats_timer(State) -> State.
process_msg([], State) ->
{ok, State};
process_msg([Msg|More], State) ->
process_msg([Msg | More], State) ->
try
case handle_msg(Msg, State) of
ok ->
@ -475,7 +475,7 @@ handle_msg({Passive, _Sock}, State)
handle_msg(Deliver = {deliver, _Topic, _Msg}, #state{
listener = {Type, Listener}} = State) ->
ActiveN = get_active_n(Type, Listener),
Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)],
Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State);
%% Something sent
@ -540,7 +540,7 @@ terminate(Reason, State = #state{channel = Channel, transport = Transport,
?tp(warning, unclean_terminate, #{exception => E, context => C, stacktrace => S})
end,
?tp(info, terminate, #{reason => Reason}),
maybe_raise_excption(Reason).
maybe_raise_exception(Reason).
%% close socket, discard new state, always return ok.
close_socket_ok(State) ->
@ -548,12 +548,12 @@ close_socket_ok(State) ->
ok.
%% tell truth about the original exception
maybe_raise_excption(#{exception := Exception,
maybe_raise_exception(#{exception := Exception,
context := Context,
stacktrace := Stacktrace
}) ->
erlang:raise(Exception, Context, Stacktrace);
maybe_raise_excption(Reason) ->
maybe_raise_exception(Reason) ->
exit(Reason).
%%--------------------------------------------------------------------
@ -649,7 +649,7 @@ parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
{Packets, State#state{parse_state = NParseState}};
{ok, Packet, Rest, NParseState} ->
NState = State#state{parse_state = NParseState},
parse_incoming(Rest, [Packet|Packets], NState)
parse_incoming(Rest, [Packet | Packets], NState)
catch
throw : ?FRAME_PARSE_ERROR(Reason) ->
?SLOG(info, #{ reason => Reason
@ -679,7 +679,7 @@ next_incoming_msgs(Packets) ->
handle_incoming(Packet, State) when is_record(Packet, mqtt_packet) ->
ok = inc_incoming_stats(Packet),
?SLOG(debug, #{msg => "RECV_packet", packet => Packet}),
?SLOG(debug, #{msg => "RECV_packet", packet => emqx_packet:format(Packet)}),
with_channel(handle_in, [Packet], State);
handle_incoming(FrameError, State) ->
@ -752,7 +752,7 @@ send(IoData, #state{transport = Transport, socket = Socket, channel = Channel})
ok = emqx_metrics:inc('bytes.sent', Oct),
inc_counter(outgoing_bytes, Oct),
emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
case Transport:async_send(Socket, IoData, [nosuspend]) of
case Transport:async_send(Socket, IoData, []) of
ok -> ok;
Error = {error, _Reason} ->
%% Send an inet_reply to postpone handling the error

View File

@ -129,7 +129,8 @@ handle_cast({detected, #flapping{clientid = ClientId,
reason = <<"flapping is detected">>,
at = Now,
until = Now + (Interval div 1000)},
emqx_banned:create(Banned);
{ok, _} = emqx_banned:create(Banned),
ok;
false ->
?SLOG(warning, #{
msg => "client_disconnected",

View File

@ -77,6 +77,8 @@
priority :: integer()
}).
-type(callback() :: #callback{}).
-record(hook, {
name :: hookpoint(),
callbacks :: list(#callback{})
@ -112,7 +114,7 @@ callback_priority(#callback{priority= P}) -> P.
%%--------------------------------------------------------------------
%% @doc Register a callback
-spec(add(hookpoint(), action() | #callback{}) -> ok_or_error(already_exists)).
-spec(add(hookpoint(), action() | callback()) -> ok_or_error(already_exists)).
add(HookPoint, Callback) when is_record(Callback, callback) ->
gen_server:call(?SERVER, {add, HookPoint, Callback}, infinity);
add(HookPoint, Action) when is_function(Action); is_tuple(Action) ->
@ -131,7 +133,7 @@ add(HookPoint, Action, Filter, Priority) when is_integer(Priority) ->
add(HookPoint, #callback{action = Action, filter = Filter, priority = Priority}).
%% @doc Like add/2, it register a callback, discard 'already_exists' error.
-spec(put(hookpoint(), action() | #callback{}) -> ok).
-spec(put(hookpoint(), action() | callback()) -> ok).
put(HookPoint, Callback) when is_record(Callback, callback) ->
case add(HookPoint, Callback) of
ok -> ok;
@ -211,7 +213,7 @@ safe_execute({M, F, A}, Args) ->
exception => Error,
reason => Reason,
stacktrace => Stacktrace,
failed_call => {M, F, A}
failed_call => {M, F, Args ++ A}
})
end.
@ -220,7 +222,7 @@ execute({M, F, A}, Args) ->
erlang:apply(M, F, Args ++ A).
%% @doc Lookup callbacks.
-spec(lookup(hookpoint()) -> [#callback{}]).
-spec(lookup(hookpoint()) -> [callback()]).
lookup(HookPoint) ->
case ets:lookup(?TAB, HookPoint) of
[#hook{callbacks = Callbacks}] ->
@ -292,10 +294,10 @@ add_callback(C, Callbacks) ->
add_callback(C, Callbacks, []).
add_callback(C, [], Acc) ->
lists:reverse([C|Acc]);
add_callback(C1 = #callback{priority = P1}, [C2 = #callback{priority = P2}|More], Acc)
lists:reverse([C | Acc]);
add_callback(C1 = #callback{priority = P1}, [C2 = #callback{priority = P2} | More], Acc)
when P1 =< P2 ->
add_callback(C1, More, [C2|Acc]);
add_callback(C1, More, [C2 | Acc]);
add_callback(C1, More, Acc) ->
lists:append(lists:reverse(Acc), [C1 | More]).
@ -310,4 +312,3 @@ del_callback(Action = {M, F}, [#callback{action = {M, F, _A}} | Callbacks], Acc)
del_callback(Action, Callbacks, Acc);
del_callback(Action, [Callback | Callbacks], Acc) ->
del_callback(Action, Callbacks, [Callback | Acc]).

View File

@ -20,8 +20,11 @@
, info/1
, info/2
, check/2
, set/3
]).
-elvis([{elvis_style, no_if_expression, disable}]).
-export_type([keepalive/0]).
-record(keepalive, {
@ -49,7 +52,7 @@ info(#keepalive{interval = Interval,
repeat => Repeat
}.
-spec(info(interval|statval|repeat, keepalive())
-spec(info(interval | statval | repeat, keepalive())
-> non_neg_integer()).
info(interval, #keepalive{interval = Interval}) ->
Interval;
@ -71,3 +74,7 @@ check(NewVal, KeepAlive = #keepalive{statval = OldVal,
true -> {error, timeout}
end.
%% @doc Update keepalive's interval
-spec(set(interval, non_neg_integer(), keepalive()) -> keepalive()).
set(interval, Interval, KeepAlive) ->
KeepAlive#keepalive{interval = Interval}.

View File

@ -17,6 +17,8 @@
%% @doc Start/Stop MQTT listeners.
-module(emqx_listeners).
-elvis([{elvis_style, dont_repeat_yourself, #{min_complexity => 10000}}]).
-include("emqx_mqtt.hrl").
-include("logger.hrl").
@ -28,6 +30,7 @@
, is_running/1
, current_conns/2
, max_conns/2
, id_example/0
]).
-export([ start_listener/1
@ -43,11 +46,23 @@
, parse_listener_id/1
]).
-export([post_config_update/4]).
-export([post_config_update/5]).
-define(CONF_KEY_PATH, [listeners]).
-define(TYPES_STRING, ["tcp","ssl","ws","wss","quic"]).
-spec(id_example() -> atom()).
id_example() ->
id_example(list()).
id_example([]) ->
{ID, _} = hd(list()),
ID;
id_example([{'tcp:default', _} | _]) ->
'tcp:default';
id_example([_ | Listeners]) ->
id_example(Listeners).
%% @doc List configured listeners.
-spec(list() -> [{ListenerId :: atom(), ListenerConf :: map()}]).
list() ->
@ -235,10 +250,10 @@ do_start_listener(quic, ListenerName, #{bind := ListenOn} = Opts) ->
, {key, maps:get(keyfile, Opts)}
, {alpn, ["mqtt"]}
, {conn_acceptors, lists:max([DefAcceptors, maps:get(acceptors, Opts, 0)])}
, {idle_timeout_ms, lists:max([
emqx_config:get_zone_conf(zone(Opts), [mqtt, idle_timeout]) * 3
, timer:seconds(maps:get(idle_timeout, Opts))]
)}
, {idle_timeout_ms,
lists:max([
emqx_config:get_zone_conf(zone(Opts), [mqtt, idle_timeout]) * 3,
timer:seconds(maps:get(idle_timeout, Opts))])}
],
ConnectionOpts = #{ conn_callback => emqx_quic_connection
, peer_unidi_stream_count => 1
@ -257,7 +272,7 @@ delete_authentication(Type, ListenerName, _Conf) ->
emqx_authentication:delete_chain(listener_id(Type, ListenerName)).
%% Update the listeners at runtime
post_config_update(_Req, NewListeners, OldListeners, _AppEnvs) ->
post_config_update(_, _Req, NewListeners, OldListeners, _AppEnvs) ->
#{added := Added, removed := Removed, changed := Updated}
= diff_listeners(NewListeners, OldListeners),
perform_listener_changes(fun stop_listener/3, Removed),
@ -281,7 +296,8 @@ flatten_listeners(Conf0) ->
|| {Type, Conf} <- maps:to_list(Conf0)])).
do_flatten_listeners(Type, Conf0) ->
[{listener_id(Type, Name), maps:remove(authentication, Conf)} || {Name, Conf} <- maps:to_list(Conf0)].
[{listener_id(Type, Name), maps:remove(authentication, Conf)} ||
{Name, Conf} <- maps:to_list(Conf0)].
esockd_opts(Type, Opts0) ->
Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0),
@ -352,10 +368,13 @@ listener_id(Type, ListenerName) ->
list_to_atom(lists:append([str(Type), ":", str(ListenerName)])).
parse_listener_id(Id) ->
[Type, Name] = string:split(str(Id), ":", leading),
case string:split(str(Id), ":", leading) of
[Type, Name] ->
case lists:member(Type, ?TYPES_STRING) of
true -> {list_to_existing_atom(Type), list_to_atom(Name)};
false -> {error, {invalid_listener_id, Id}}
end;
_ -> {error, {invalid_listener_id, Id}}
end.
zone(Opts) ->

View File

@ -20,6 +20,7 @@
-behaviour(gen_server).
-behaviour(emqx_config_handler).
-elvis([{elvis_style, god_modules, disable}]).
%% gen_server callbacks
-export([ start_link/0
@ -70,7 +71,7 @@
, stop_log_handler/1
]).
-export([post_config_update/4]).
-export([post_config_update/5]).
-type(peername_str() :: list()).
-type(logger_dst() :: file:filename() | console | unknown).
@ -78,10 +79,11 @@
id := logger:handler_id(),
level := logger:level(),
dst := logger_dst(),
filters := [{logger:filter_id(), logger:filter()}],
status := started | stopped
}).
-define(stopped_handlers, {?MODULE, stopped_handlers}).
-define(STOPPED_HANDLERS, {?MODULE, stopped_handlers}).
-define(CONF_PATH, [log]).
start_link() ->
@ -123,7 +125,7 @@ code_change(_OldVsn, State, _Extra) ->
%%--------------------------------------------------------------------
%% emqx_config_handler callbacks
%%--------------------------------------------------------------------
post_config_update(_Req, _NewConf, _OldConf, AppEnvs) ->
post_config_update(_, _Req, _NewConf, _OldConf, AppEnvs) ->
gen_server:call(?MODULE, {update_config, AppEnvs}, 5000).
%%--------------------------------------------------------------------
@ -238,19 +240,19 @@ get_log_handlers() ->
-spec(get_log_handlers(started | stopped) -> [logger_handler_info()]).
get_log_handlers(started) ->
[log_hanlder_info(Conf, started) || Conf <- logger:get_handler_config()];
[log_handler_info(Conf, started) || Conf <- logger:get_handler_config()];
get_log_handlers(stopped) ->
[log_hanlder_info(Conf, stopped) || Conf <- list_stopped_handler_config()].
[log_handler_info(Conf, stopped) || Conf <- list_stopped_handler_config()].
-spec(get_log_handler(logger:handler_id()) -> logger_handler_info()).
get_log_handler(HandlerId) ->
case logger:get_handler_config(HandlerId) of
{ok, Conf} ->
log_hanlder_info(Conf, started);
log_handler_info(Conf, started);
{error, _} ->
case read_stopped_handler_config(HandlerId) of
error -> {error, {not_found, HandlerId}};
{ok, Conf} -> log_hanlder_info(Conf, stopped)
{ok, Conf} -> log_handler_info(Conf, stopped)
end
end.
@ -305,21 +307,21 @@ set_log_level(Level) ->
%% Internal Functions
%%--------------------------------------------------------------------
log_hanlder_info(#{id := Id, level := Level, module := logger_std_h,
config := #{type := Type}}, Status) when
log_handler_info(#{id := Id, level := Level, module := logger_std_h,
filters := Filters, config := #{type := Type}}, Status) when
Type =:= standard_io;
Type =:= standard_error ->
#{id => Id, level => Level, dst => console, status => Status};
log_hanlder_info(#{id := Id, level := Level, module := logger_std_h,
config := Config = #{type := file}}, Status) ->
#{id => Id, level => Level, status => Status,
#{id => Id, level => Level, dst => console, status => Status, filters => Filters};
log_handler_info(#{id := Id, level := Level, module := logger_std_h,
filters := Filters, config := Config = #{type := file}}, Status) ->
#{id => Id, level => Level, status => Status, filters => Filters,
dst => maps:get(file, Config, atom_to_list(Id))};
log_hanlder_info(#{id := Id, level := Level, module := logger_disk_log_h,
config := #{file := Filename}}, Status) ->
#{id => Id, level => Level, dst => Filename, status => Status};
log_hanlder_info(#{id := Id, level := Level, module := _OtherModule}, Status) ->
#{id => Id, level => Level, dst => unknown, status => Status}.
log_handler_info(#{id := Id, level := Level, module := logger_disk_log_h,
filters := Filters, config := #{file := Filename}}, Status) ->
#{id => Id, level => Level, dst => Filename, status => Status, filters => Filters};
log_handler_info(#{id := Id, level := Level, filters := Filters}, Status) ->
#{id => Id, level => Level, dst => unknown, status => Status, filters => Filters}.
%% set level for all log handlers in one command
set_all_log_handlers_level(Level) ->
@ -341,29 +343,29 @@ rollback([{ID, Level} | List]) ->
rollback([]) -> ok.
save_stopped_handler_config(HandlerId, Config) ->
case persistent_term:get(?stopped_handlers, undefined) of
case persistent_term:get(?STOPPED_HANDLERS, undefined) of
undefined ->
persistent_term:put(?stopped_handlers, #{HandlerId => Config});
persistent_term:put(?STOPPED_HANDLERS, #{HandlerId => Config});
ConfList ->
persistent_term:put(?stopped_handlers, ConfList#{HandlerId => Config})
persistent_term:put(?STOPPED_HANDLERS, ConfList#{HandlerId => Config})
end.
read_stopped_handler_config(HandlerId) ->
case persistent_term:get(?stopped_handlers, undefined) of
case persistent_term:get(?STOPPED_HANDLERS, undefined) of
undefined -> error;
ConfList -> maps:find(HandlerId, ConfList)
end.
remove_stopped_handler_config(HandlerId) ->
case persistent_term:get(?stopped_handlers, undefined) of
case persistent_term:get(?STOPPED_HANDLERS, undefined) of
undefined -> ok;
ConfList ->
case maps:find(HandlerId, ConfList) of
error -> ok;
{ok, _} ->
persistent_term:put(?stopped_handlers, maps:remove(HandlerId, ConfList))
persistent_term:put(?STOPPED_HANDLERS, maps:remove(HandlerId, ConfList))
end
end.
list_stopped_handler_config() ->
case persistent_term:get(?stopped_handlers, undefined) of
case persistent_term:get(?STOPPED_HANDLERS, undefined) of
undefined -> [];
ConfList -> maps:values(ConfList)
end.

View File

@ -17,6 +17,7 @@
-module(emqx_misc).
-compile(inline).
-elvis([{elvis_style, god_modules, disable}]).
-include("types.hrl").
-include("logger.hrl").
@ -65,21 +66,13 @@ maybe_parse_ip(Host) ->
end.
%% @doc Add `ipv6_probe' socket option if it's supported.
%% gen_tcp:ipv6_probe() -> true. is added to EMQ's OTP forks
ipv6_probe(Opts) ->
case persistent_term:get({?MODULE, ipv6_probe_supported}, unknown) of
unknown ->
%% e.g. 23.2.7.1-emqx-2-x86_64-unknown-linux-gnu-64
OtpVsn = emqx_vm:get_otp_version(),
Bool = (match =:= re:run(OtpVsn, "emqx", [{capture, none}])),
_ = persistent_term:put({?MODULE, ipv6_probe_supported}, Bool),
ipv6_probe(Bool, Opts);
Bool ->
ipv6_probe(Bool, Opts)
case erlang:function_exported(gen_tcp, ipv6_probe, 0) of
true -> [{ipv6_probe, true} | Opts];
false -> Opts
end.
ipv6_probe(false, Opts) -> Opts;
ipv6_probe(true, Opts) -> [{ipv6_probe, true} | Opts].
%% @doc Merge options
-spec(merge_opts(Opts, Opts) -> Opts when Opts :: proplists:proplist()).
merge_opts(Defaults, Options) ->
@ -100,9 +93,9 @@ maybe_apply(Fun, Arg) when is_function(Fun) ->
-spec(compose(list(F)) -> G
when F :: fun((any()) -> any()),
G :: fun((any()) -> any())).
compose([F|More]) -> compose(F, More).
compose([F | More]) -> compose(F, More).
-spec(compose(F, G|[Gs]) -> C
-spec(compose(F, G | [Gs]) -> C
when F :: fun((X1) -> X2),
G :: fun((X2) -> X3),
Gs :: [fun((Xn) -> Xn1)],
@ -110,19 +103,19 @@ compose([F|More]) -> compose(F, More).
X3 :: any(), Xn :: any(), Xn1 :: any(), Xm :: any()).
compose(F, G) when is_function(G) -> fun(X) -> G(F(X)) end;
compose(F, [G]) -> compose(F, G);
compose(F, [G|More]) -> compose(compose(F, G), More).
compose(F, [G | More]) -> compose(compose(F, G), More).
%% @doc RunFold
run_fold([], Acc, _State) ->
Acc;
run_fold([Fun|More], Acc, State) ->
run_fold([Fun | More], Acc, State) ->
run_fold(More, Fun(Acc, State), State).
%% @doc Pipeline
pipeline([], Input, State) ->
{ok, Input, State};
pipeline([Fun|More], Input, State) ->
pipeline([Fun | More], Input, State) ->
case apply_fun(Fun, Input, State) of
ok -> pipeline(More, Input, State);
{ok, NState} ->
@ -171,7 +164,7 @@ drain_deliver(0, Acc) ->
drain_deliver(N, Acc) ->
receive
Deliver = {deliver, _Topic, _Msg} ->
drain_deliver(N-1, [Deliver|Acc])
drain_deliver(N-1, [Deliver | Acc])
after 0 ->
lists:reverse(Acc)
end.
@ -186,7 +179,7 @@ drain_down(0, Acc) ->
drain_down(Cnt, Acc) ->
receive
{'DOWN', _MRef, process, Pid, _Reason} ->
drain_down(Cnt-1, [Pid|Acc])
drain_down(Cnt-1, [Pid | Acc])
after 0 ->
lists:reverse(Acc)
end.
@ -213,7 +206,7 @@ check_oom(Pid, #{max_message_queue_len := MaxQLen,
end.
do_check_oom([]) -> ok;
do_check_oom([{Val, Max, Reason}|Rest]) ->
do_check_oom([{Val, Max, Reason} | Rest]) ->
case is_integer(Max) andalso (0 < Max) andalso (Max < Val) of
true -> {shutdown, Reason};
false -> do_check_oom(Rest)
@ -256,8 +249,8 @@ proc_stats(Pid) ->
reductions,
memory]) of
undefined -> [];
[{message_queue_len, Len}|ProcStats] ->
[{mailbox_len, Len}|ProcStats]
[{message_queue_len, Len} | ProcStats] ->
[{mailbox_len, Len} | ProcStats]
end.
rand_seed() ->
@ -277,9 +270,9 @@ index_of(E, L) ->
index_of(_E, _I, []) ->
error(badarg);
index_of(E, I, [E|_]) ->
index_of(E, I, [E | _]) ->
I;
index_of(E, I, [_|L]) ->
index_of(E, I, [_ | L]) ->
index_of(E, I+1, L).
-spec(bin2hexstr_A_F(binary()) -> binary()).
@ -339,6 +332,12 @@ pad(L, Count) ->
-include_lib("eunit/include/eunit.hrl").
ipv6_probe_test() ->
?assertEqual([{ipv6_probe, true}], ipv6_probe([])).
try gen_tcp:ipv6_probe() of
true ->
?assertEqual([{ipv6_probe, true}], ipv6_probe([]))
catch
_ : _ ->
ok
end.
-endif.

View File

@ -17,6 +17,7 @@
-module(emqx_mountpoint).
-include("emqx.hrl").
-include("emqx_placeholder.hrl").
-include("types.hrl").
-export([ mount/2
@ -66,14 +67,17 @@ unmount(MountPoint, Msg = #message{topic = Topic}) ->
-spec(replvar(maybe(mountpoint()), map()) -> maybe(mountpoint())).
replvar(undefined, _Vars) ->
undefined;
replvar(MountPoint, #{clientid := ClientId, username := Username}) ->
lists:foldl(fun feed_var/2, MountPoint,
[{<<"%c">>, ClientId}, {<<"%u">>, Username}]).
replvar(MountPoint, Vars) ->
ClientID = maps:get(clientid, Vars, undefined),
UserName = maps:get(username, Vars, undefined),
EndpointName = maps:get(endpoint_name, Vars, undefined),
List = [ {?PH_CLIENTID, ClientID}
, {?PH_USERNAME, UserName}
, {?PH_ENDPOINT_NAME, EndpointName}
],
lists:foldl(fun feed_var/2, MountPoint, List).
feed_var({<<"%c">>, ClientId}, MountPoint) ->
emqx_topic:feed_var(<<"%c">>, ClientId, MountPoint);
feed_var({<<"%u">>, undefined}, MountPoint) ->
feed_var({_PlaceHolder, undefined}, MountPoint) ->
MountPoint;
feed_var({<<"%u">>, Username}, MountPoint) ->
emqx_topic:feed_var(<<"%u">>, Username, MountPoint).
feed_var({PlaceHolder, Value}, MountPoint) ->
emqx_topic:feed_var(PlaceHolder, Value, MountPoint).

View File

@ -96,12 +96,26 @@ handle_info({timeout, _Timer, check}, State) ->
_ = case emqx_vm:cpu_util() of %% TODO: should be improved?
0 -> ok;
Busy when Busy >= CPUHighWatermark ->
emqx_alarm:activate(high_cpu_usage, #{usage => io_lib:format("~p%", [Busy]),
Usage = io_lib:format("~p%", [Busy]),
Message = [Usage, " cpu usage"],
emqx_alarm:activate(high_cpu_usage,
#{
usage => Usage,
high_watermark => CPUHighWatermark,
low_watermark => CPULowWatermark}),
low_watermark => CPULowWatermark
},
Message),
start_check_timer();
Busy when Busy =< CPULowWatermark ->
emqx_alarm:deactivate(high_cpu_usage),
Usage = io_lib:format("~p%", [Busy]),
Message = [Usage, " cpu usage"],
emqx_alarm:deactivate(high_cpu_usage,
#{
usage => Usage,
high_watermark => CPUHighWatermark,
low_watermark => CPULowWatermark
},
Message),
start_check_timer();
_Busy ->
start_check_timer()

View File

@ -22,7 +22,7 @@
-include("logger.hrl").
-type(hash_type() :: plain | md5 | sha | sha256 | pbkdf2 | bcrypt).
-type(hash_type() :: plain | md5 | sha | sha256 | sha512 | pbkdf2 | bcrypt).
-export_type([hash_type/0]).
@ -95,4 +95,3 @@ hexstring(<<X:256/big-unsigned-integer>>) ->
iolist_to_binary(io_lib:format("~64.16.0b", [X]));
hexstring(<<X:512/big-unsigned-integer>>) ->
iolist_to_binary(io_lib:format("~128.16.0b", [X])).

View File

@ -179,6 +179,10 @@ timestamp_from_conninfo(ConnInfo) ->
end.
lookup(ClientID) when is_binary(ClientID) ->
case is_store_enabled() of
false ->
none;
true ->
case lookup_session_store(ClientID) of
none -> none;
{value, #session_store{session = S} = SS} ->
@ -186,6 +190,7 @@ lookup(ClientID) when is_binary(ClientID) ->
expired -> {expired, S};
persistent -> {persistent, S}
end
end
end.
-spec discard_if_present(binary()) -> 'ok'.

View File

@ -32,7 +32,7 @@
]).
-ifdef(TEST).
-export([worker/0]).
-export([worker/0, flush_async_tasks/0]).
-endif.
%% gen_server callbacks
@ -139,3 +139,15 @@ run({F, A}) when is_function(F), is_list(A) ->
run(Fun) when is_function(Fun) ->
Fun().
-ifdef(TEST).
%% This help function creates a large enough number of async tasks
%% to force flush the pool workers.
%% The number of tasks should be large enough to ensure all workers have
%% the chance to work on at least one of the tasks.
flush_async_tasks() ->
Ref = make_ref(),
Self = self(),
L = lists:seq(1, 997),
lists:foreach(fun(I) -> emqx_pool:async_submit(fun() -> Self ! {done, Ref, I} end, []) end, L),
lists:foreach(fun(I) -> receive {done, Ref, I} -> ok end end, L).
-endif.

View File

@ -0,0 +1,86 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_release).
-export([ edition/0
, put_edition/0
, put_edition/1
, description/0
, version/0
]).
-include("emqx_release.hrl").
%% @doc Return EMQ X description.
description() ->
case os:getenv("EMQX_DESCRIPTION") of
false -> "EMQ X Community Edition";
"" -> "EMQ X Community Edition";
Str -> string:strip(Str, both, $\n)
end.
%% @doc Return EMQ X edition info.
%% Read info from persistent_term at runtime.
%% Or meck this function to run tests for another eidtion.
-spec edition() -> ce | ee | edge.
edition() ->
try persistent_term:get(emqx_edition)
catch error : badarg -> get_edition() end.
%% @private initiate EMQ X edition info in persistent_term.
put_edition() ->
ok = put_edition(get_edition()).
%% @hidden This function is mostly for testing.
%% Switch to another eidtion at runtime to run edition-specific tests.
-spec put_edition(ce | ee | edge) -> ok.
put_edition(Which) ->
persistent_term:put(emqx_edition, Which),
ok.
-spec get_edition() -> ce | ee | edge.
get_edition() ->
edition(description()).
edition(Desc) ->
case re:run(Desc, "enterprise", [caseless]) of
{match, _} ->
ee;
_ ->
case re:run(Desc, "edge", [caseless]) of
{match, _} -> edge;
_ -> ce
end
end.
%% @doc Return the release version.
version() ->
case lists:keyfind(emqx_vsn, 1, ?MODULE:module_info(compile)) of
false -> %% For TEST build or depedency build.
?EMQX_RELEASE;
{_, Vsn} -> %% For emqx release build
VsnStr = ?EMQX_RELEASE,
case string:str(Vsn, VsnStr) of
1 -> ok;
_ ->
erlang:error(#{ reason => version_mismatch
, source => VsnStr
, built_for => Vsn
})
end,
Vsn
end.

View File

@ -51,6 +51,7 @@
-export([ validate_heap_size/1
, parse_user_lookup_fun/1
, validate_alarm_actions/1
]).
% workaround: prevent being recognized as unused functions
@ -74,6 +75,8 @@
-export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1, default_ciphers/1]).
-export([sc/2, map/2]).
-elvis([{elvis_style, god_modules, disable}]).
namespace() -> undefined.
roots() ->
@ -889,17 +892,36 @@ fields("sysmon_os") ->
fields("alarm") ->
[ {"actions",
sc(hoconsc:array(atom()),
#{ default => [log, publish]
#{ default => [log, publish],
validator => fun ?MODULE:validate_alarm_actions/1,
example => [log, publish],
desc =>
"""The actions triggered when the alarm is activated.<\br>
Currently supports two actions, 'log' and 'publish'.
'log' is to write the alarm to log (console or file).
'publish' is to publish the alarm as an MQTT message to the system topics:
<code>$SYS/brokers/emqx@xx.xx.xx.x/alarms/activate</code> and
<code>$SYS/brokers/emqx@xx.xx.xx.x/alarms/deactivate</code>"""
})
}
, {"size_limit",
sc(integer(),
#{ default => 1000
sc(range(1, 3000),
#{ default => 1000,
example => 1000,
desc =>
"""The maximum total number of deactivated alarms to keep as history.<br>
When this limit is exceeded, the oldest deactivated alarms are deleted to cap the total number.
"""
})
}
, {"validity_period",
sc(duration(),
#{ default => "24h"
#{ default => "24h",
example => "24h",
desc =>
"""Retention time of deactivated alarms. Alarms are not deleted immediately
when deactivated, but after the retention time.
"""
})
}
].
@ -1141,7 +1163,7 @@ client_ssl_opts_schema(Defaults) ->
common_ssl_opts_schema(Defaults) ++
[ { "server_name_indication",
sc(hoconsc:union([disable, string()]),
#{ default => disable
#{ nullable => true
, desc =>
"""Specify the host name to be used in TLS Server Name Indication extension.<br>
For instance, when connecting to \"server.example.net\", the genuine server
@ -1163,7 +1185,8 @@ default_tls_vsns(dtls_all_available) ->
default_tls_vsns(tls_all_available) ->
emqx_tls_lib:default_versions().
-spec ciphers_schema(quic | dtls_all_available | tls_all_available | undefined) -> hocon_schema:field_schema().
-spec ciphers_schema(quic | dtls_all_available | tls_all_available | undefined)
-> hocon_schema:field_schema().
ciphers_schema(Default) ->
sc(hoconsc:array(string()),
#{ default => default_ciphers(Default)
@ -1303,7 +1326,7 @@ to_bar_separated_list(Str) ->
{ok, string:tokens(Str, "| ")}.
to_ip_port(Str) ->
case string:tokens(Str, ":") of
case string:tokens(Str, ": ") of
[Ip, Port] ->
PortVal = list_to_integer(Port),
case inet:parse_address(Ip) of
@ -1345,8 +1368,16 @@ validate_heap_size(Siz) ->
true -> error(io_lib:format("force_shutdown_policy: heap-size ~ts is too large", [Siz]));
false -> ok
end.
validate_alarm_actions(Actions) ->
UnSupported = lists:filter(fun(Action) -> Action =/= log andalso Action =/= publish end, Actions),
case UnSupported of
[] -> ok;
Error -> {error, Error}
end.
parse_user_lookup_fun(StrConf) ->
[ModStr, FunStr] = string:tokens(str(StrConf), ":"),
[ModStr, FunStr] = string:tokens(str(StrConf), ": "),
Mod = list_to_atom(ModStr),
Fun = list_to_atom(FunStr),
{fun Mod:Fun/3, undefined}.

View File

@ -58,6 +58,7 @@
-export([ info/1
, info/2
, is_session/1
, stats/1
]).
@ -202,6 +203,9 @@ init(Opts) ->
%% Info, Stats
%%--------------------------------------------------------------------
is_session(#session{}) -> true;
is_session(_) -> false.
%% @doc Get infos of the session.
-spec(info(session()) -> emqx_types:infos()).
info(Session) ->

View File

@ -215,7 +215,7 @@ handle_call({pending, SessionID, MarkerIDs}, _From, State) ->
Res = emqx_persistent_session:pending_messages_in_db(SessionID, MarkerIDs),
{reply, Res, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", req => Req}),
{reply, ignored, State}.
handle_cast({delete_routes, SessionID, Subscriptions}, State) ->
@ -233,11 +233,11 @@ handle_cast({resume_end, SessionID, Pid}, State) ->
_ = emqx_session_router_worker_sup:abort_worker(Pid),
{noreply, State#{ pmon => Pmon }};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, #{pool := Pool, id := Id}) ->

View File

@ -292,6 +292,7 @@ subscribers(Group, Topic) ->
%%--------------------------------------------------------------------
init([]) ->
ok = mria:wait_for_tables([?TAB]),
{ok, _} = mnesia:subscribe({table, ?TAB, simple}),
{atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun init_monitors/0),
ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]),

View File

@ -21,6 +21,7 @@
-include("emqx.hrl").
-include("logger.hrl").
-include("types.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
%% APIs
@ -66,8 +67,10 @@
%% Connection stats
-define(CONNECTION_STATS,
['connections.count', %% Count of Concurrent Connections
'connections.max' %% Maximum Number of Concurrent Connections
[ 'connections.count' %% Count of Concurrent Connections
, 'connections.max' %% Maximum Number of Concurrent Connections
, 'live_connections.count' %% Count of connected clients
, 'live_connections.max' %% Maximum number of connected clients
]).
%% Channel stats
@ -215,6 +218,11 @@ handle_cast({setstat, Stat, MaxStat, Val}, State) ->
ets:insert(?TAB, {MaxStat, Val})
end,
safe_update_element(Stat, Val),
?tp(emqx_stats_setstat,
#{ count_stat => Stat
, max_stat => MaxStat
, value => Val
}),
{noreply, State};
handle_cast({update_interval, Update = #update{name = Name}},
@ -225,7 +233,7 @@ handle_cast({update_interval, Update = #update{name = Name}},
name => Name
}),
State;
false -> State#state{updates = [Update|Updates]}
false -> State#state{updates = [Update | Updates]}
end,
{noreply, NState};

View File

@ -170,9 +170,11 @@ code_change(_OldVsn, State, _Extra) ->
%%--------------------------------------------------------------------
handle_partition_event({partition, {occurred, Node}}) ->
emqx_alarm:activate(partition, #{occurred => Node});
handle_partition_event({partition, {healed, _Node}}) ->
emqx_alarm:deactivate(partition).
Message = io_lib:format("Partition occurs at node ~ts", [Node]),
emqx_alarm:activate(partition, #{occurred => Node}, Message);
handle_partition_event({partition, {healed, Node}}) ->
Message = io_lib:format("Partition healed at node ~ts", [Node]),
emqx_alarm:deactivate(partition, no_details, Message).
suppress(Key, SuccFun, State = #{events := Events}) ->
case lists:member(Key, Events) of

View File

@ -0,0 +1,486 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace).
-behaviour(gen_server).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
%% Mnesia bootstrap
-export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
-export([ publish/1
, subscribe/3
, unsubscribe/2
]).
-export([ start_link/0
, list/0
, list/1
, get_trace_filename/1
, create/1
, delete/1
, clear/0
, update/2
]).
-export([ format/1
, zip_dir/0
, filename/2
, trace_dir/0
, trace_file/1
, delete_files_after_send/2
]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-define(TRACE, ?MODULE).
-define(MAX_SIZE, 30).
-ifdef(TEST).
-export([log_file/2]).
-endif.
-export_type([ip_address/0]).
-type ip_address() :: string().
-record(?TRACE,
{ name :: binary() | undefined | '_'
, type :: clientid | topic | ip_address | undefined | '_'
, filter :: emqx_types:topic() | emqx_types:clientid() | ip_address() | undefined | '_'
, enable = true :: boolean() | '_'
, start_at :: integer() | undefined | '_'
, end_at :: integer() | undefined | '_'
}).
mnesia(boot) ->
ok = mria:create_table(?TRACE, [
{type, set},
{rlog_shard, ?COMMON_SHARD},
{storage, disc_copies},
{record_name, ?TRACE},
{attributes, record_info(fields, ?TRACE)}]).
publish(#message{topic = <<"$SYS/", _/binary>>}) -> ignore;
publish(#message{from = From, topic = Topic, payload = Payload}) when
is_binary(From); is_atom(From) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"PUBLISH to ~s: ~0p",
[Topic, Payload]
).
subscribe(<<"$SYS/", _/binary>>, _SubId, _SubOpts) -> ignore;
subscribe(Topic, SubId, SubOpts) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"~ts SUBSCRIBE ~ts: Options: ~0p",
[SubId, Topic, SubOpts]
).
unsubscribe(<<"$SYS/", _/binary>>, _SubOpts) -> ignore;
unsubscribe(Topic, SubOpts) ->
emqx_logger:info(
#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}},
"~ts UNSUBSCRIBE ~ts: Options: ~0p",
[maps:get(subid, SubOpts, ""), Topic, SubOpts]
).
-spec(start_link() -> emqx_types:startlink_ret()).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec list() -> [tuple()].
list() ->
ets:match_object(?TRACE, #?TRACE{_ = '_'}).
-spec list(boolean()) -> [tuple()].
list(Enable) ->
ets:match_object(?TRACE, #?TRACE{enable = Enable, _ = '_'}).
-spec create([{Key :: binary(), Value :: binary()}] | #{atom() => binary()}) ->
ok | {error, {duplicate_condition, iodata()} | {already_existed, iodata()} | iodata()}.
create(Trace) ->
case mnesia:table_info(?TRACE, size) < ?MAX_SIZE of
true ->
case to_trace(Trace) of
{ok, TraceRec} -> insert_new_trace(TraceRec);
{error, Reason} -> {error, Reason}
end;
false ->
{error, "The number of traces created has reache the maximum"
" please delete the useless ones first"}
end.
-spec delete(Name :: binary()) -> ok | {error, not_found}.
delete(Name) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name) of
[_] -> mnesia:delete(?TRACE, Name, write);
[] -> mnesia:abort(not_found)
end
end,
transaction(Tran).
-spec clear() -> ok | {error, Reason :: term()}.
clear() ->
case mria:clear_table(?TRACE) of
{atomic, ok} -> ok;
{aborted, Reason} -> {error, Reason}
end.
-spec update(Name :: binary(), Enable :: boolean()) ->
ok | {error, not_found | finished}.
update(Name, Enable) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name) of
[] -> mnesia:abort(not_found);
[#?TRACE{enable = Enable}] -> ok;
[Rec] ->
case erlang:system_time(second) >= Rec#?TRACE.end_at of
false -> mnesia:write(?TRACE, Rec#?TRACE{enable = Enable}, write);
true -> mnesia:abort(finished)
end
end
end,
transaction(Tran).
-spec get_trace_filename(Name :: binary()) ->
{ok, FileName :: string()} | {error, not_found}.
get_trace_filename(Name) ->
Tran = fun() ->
case mnesia:read(?TRACE, Name, read) of
[] -> mnesia:abort(not_found);
[#?TRACE{start_at = Start}] -> {ok, filename(Name, Start)}
end end,
transaction(Tran).
-spec trace_file(File :: list()) ->
{ok, Node :: list(), Binary :: binary()} |
{error, Node :: list(), Reason :: term()}.
trace_file(File) ->
FileName = filename:join(trace_dir(), File),
Node = atom_to_list(node()),
case file:read_file(FileName) of
{ok, Bin} -> {ok, Node, Bin};
{error, Reason} -> {error, Node, Reason}
end.
delete_files_after_send(TraceLog, Zips) ->
gen_server:cast(?MODULE, {delete_tag, self(), [TraceLog | Zips]}).
-spec format(list(#?TRACE{})) -> list(map()).
format(Traces) ->
Fields = record_info(fields, ?TRACE),
lists:map(fun(Trace0 = #?TRACE{}) ->
[_ | Values] = tuple_to_list(Trace0),
maps:from_list(lists:zip(Fields, Values))
end, Traces).
init([]) ->
erlang:process_flag(trap_exit, true),
OriginLogLevel = emqx_logger:get_primary_log_level(),
ok = filelib:ensure_dir(trace_dir()),
ok = filelib:ensure_dir(zip_dir()),
{ok, _} = mnesia:subscribe({table, ?TRACE, simple}),
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
TRef = update_trace(Traces),
{ok, #{timer => TRef, monitors => #{}, primary_log_level => OriginLogLevel}}.
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
{reply, ok, State}.
handle_cast({delete_tag, Pid, Files}, State = #{monitors := Monitors}) ->
erlang:monitor(process, Pid),
{noreply, State#{monitors => Monitors#{Pid => Files}}};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
{noreply, State}.
handle_info({'DOWN', _Ref, process, Pid, _Reason}, State = #{monitors := Monitors}) ->
case maps:take(Pid, Monitors) of
error -> {noreply, State};
{Files, NewMonitors} ->
lists:foreach(fun file:delete/1, Files),
{noreply, State#{monitors => NewMonitors}}
end;
handle_info({timeout, TRef, update_trace},
#{timer := TRef, primary_log_level := OriginLogLevel} = State) ->
Traces = get_enable_trace(),
ok = update_log_primary_level(Traces, OriginLogLevel),
NextTRef = update_trace(Traces),
{noreply, State#{timer => NextTRef}};
handle_info({mnesia_table_event, _Events}, State = #{timer := TRef}) ->
emqx_misc:cancel_timer(TRef),
handle_info({timeout, TRef, update_trace}, State);
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, #{timer := TRef, primary_log_level := OriginLogLevel}) ->
ok = set_log_primary_level(OriginLogLevel),
_ = mnesia:unsubscribe({table, ?TRACE, simple}),
emqx_misc:cancel_timer(TRef),
stop_all_trace_handler(),
_ = file:del_dir_r(zip_dir()),
ok.
code_change(_, State, _Extra) ->
{ok, State}.
insert_new_trace(Trace) ->
Tran = fun() ->
case mnesia:read(?TRACE, Trace#?TRACE.name) of
[] ->
#?TRACE{start_at = StartAt, type = Type, filter = Filter} = Trace,
Match = #?TRACE{_ = '_', start_at = StartAt, type = Type, filter = Filter},
case mnesia:match_object(?TRACE, Match, read) of
[] -> mnesia:write(?TRACE, Trace, write);
[#?TRACE{name = Name}] -> mnesia:abort({duplicate_condition, Name})
end;
[#?TRACE{name = Name}] -> mnesia:abort({already_existed, Name})
end
end,
transaction(Tran).
update_trace(Traces) ->
Now = erlang:system_time(second),
{_Waiting, Running, Finished} = classify_by_time(Traces, Now),
disable_finished(Finished),
Started = emqx_trace_handler:running(),
{NeedRunning, AllStarted} = start_trace(Running, Started),
NeedStop = AllStarted -- NeedRunning,
ok = stop_trace(NeedStop, Started),
clean_stale_trace_files(),
NextTime = find_closest_time(Traces, Now),
emqx_misc:start_timer(NextTime, update_trace).
stop_all_trace_handler() ->
lists:foreach(fun(#{id := Id}) -> emqx_trace_handler:uninstall(Id) end,
emqx_trace_handler:running()).
get_enable_trace() ->
{atomic, Traces} =
mria:transaction(?COMMON_SHARD, fun() ->
mnesia:match_object(?TRACE, #?TRACE{enable = true, _ = '_'}, read)
end),
Traces.
find_closest_time(Traces, Now) ->
Sec =
lists:foldl(
fun(#?TRACE{start_at = Start, end_at = End}, Closest)
when Start >= Now andalso Now < End -> %% running
min(End - Now, Closest);
(#?TRACE{start_at = Start}, Closest) when Start < Now -> %% waiting
min(Now - Start, Closest);
(_, Closest) -> Closest %% finished
end, 60 * 15, Traces),
timer:seconds(Sec).
disable_finished([]) -> ok;
disable_finished(Traces) ->
transaction(fun() ->
lists:map(fun(#?TRACE{name = Name}) ->
case mnesia:read(?TRACE, Name, write) of
[] -> ok;
[Trace] -> mnesia:write(?TRACE, Trace#?TRACE{enable = false}, write)
end end, Traces)
end).
start_trace(Traces, Started0) ->
Started = lists:map(fun(#{name := Name}) -> Name end, Started0),
lists:foldl(fun(#?TRACE{name = Name} = Trace, {Running, StartedAcc}) ->
case lists:member(Name, StartedAcc) of
true ->
{[Name | Running], StartedAcc};
false ->
case start_trace(Trace) of
ok -> {[Name | Running], [Name | StartedAcc]};
{error, _Reason} -> {[Name | Running], StartedAcc}
end
end
end, {[], Started}, Traces).
start_trace(Trace) ->
#?TRACE{name = Name
, type = Type
, filter = Filter
, start_at = Start
} = Trace,
Who = #{name => Name, type => Type, filter => Filter},
emqx_trace_handler:install(Who, debug, log_file(Name, Start)).
stop_trace(Finished, Started) ->
lists:foreach(fun(#{name := Name, type := Type}) ->
case lists:member(Name, Finished) of
true -> emqx_trace_handler:uninstall(Type, Name);
false -> ok
end
end, Started).
clean_stale_trace_files() ->
TraceDir = trace_dir(),
case file:list_dir(TraceDir) of
{ok, AllFiles} when AllFiles =/= ["zip"] ->
FileFun = fun(#?TRACE{name = Name, start_at = StartAt}) -> filename(Name, StartAt) end,
KeepFiles = lists:map(FileFun, list()),
case AllFiles -- ["zip" | KeepFiles] of
[] -> ok;
DeleteFiles ->
DelFun = fun(F) -> file:delete(filename:join(TraceDir, F)) end,
lists:foreach(DelFun, DeleteFiles)
end;
_ -> ok
end.
classify_by_time(Traces, Now) ->
classify_by_time(Traces, Now, [], [], []).
classify_by_time([], _Now, Wait, Run, Finish) -> {Wait, Run, Finish};
classify_by_time([Trace = #?TRACE{start_at = Start} | Traces],
Now, Wait, Run, Finish) when Start > Now ->
classify_by_time(Traces, Now, [Trace | Wait], Run, Finish);
classify_by_time([Trace = #?TRACE{end_at = End} | Traces],
Now, Wait, Run, Finish) when End =< Now ->
classify_by_time(Traces, Now, Wait, Run, [Trace | Finish]);
classify_by_time([Trace | Traces], Now, Wait, Run, Finish) ->
classify_by_time(Traces, Now, Wait, [Trace | Run], Finish).
to_trace(TraceParam) ->
case to_trace(ensure_proplists(TraceParam), #?TRACE{}) of
{error, Reason} -> {error, Reason};
{ok, #?TRACE{name = undefined}} ->
{error, "name required"};
{ok, #?TRACE{type = undefined}} ->
{error, "type=[topic,clientid,ip_address] required"};
{ok, #?TRACE{filter = undefined}} ->
{error, "topic/clientid/ip_address filter required"};
{ok, TraceRec0} ->
case fill_default(TraceRec0) of
#?TRACE{start_at = Start, end_at = End} when End =< Start ->
{error, "failed by start_at >= end_at"};
TraceRec -> {ok, TraceRec}
end
end.
ensure_proplists(#{} = Trace) -> maps:to_list(Trace);
ensure_proplists(Trace) when is_list(Trace) ->
lists:foldl(
fun({K, V}, Acc) when is_binary(K) -> [{binary_to_existing_atom(K), V} | Acc];
({K, V}, Acc) when is_atom(K) -> [{K, V} | Acc];
(_, Acc) -> Acc
end, [], Trace).
fill_default(Trace = #?TRACE{start_at = undefined}) ->
fill_default(Trace#?TRACE{start_at = erlang:system_time(second)});
fill_default(Trace = #?TRACE{end_at = undefined, start_at = StartAt}) ->
fill_default(Trace#?TRACE{end_at = StartAt + 10 * 60});
fill_default(Trace) -> Trace.
to_trace([], Rec) -> {ok, Rec};
to_trace([{name, Name} | Trace], Rec) ->
case io_lib:printable_unicode_list(unicode:characters_to_list(Name, utf8)) of
true ->
case binary:match(Name, [<<"/">>], []) of
nomatch -> to_trace(Trace, Rec#?TRACE{name = Name});
_ -> {error, "name cannot contain /"}
end;
false -> {error, "name must printable unicode"}
end;
to_trace([{type, Type} | Trace], Rec) ->
case lists:member(Type, [<<"clientid">>, <<"topic">>, <<"ip_address">>]) of
true -> to_trace(Trace, Rec#?TRACE{type = binary_to_existing_atom(Type)});
false -> {error, "incorrect type: only support clientid/topic/ip_address"}
end;
to_trace([{topic, Topic} | Trace], Rec) ->
case validate_topic(Topic) of
ok -> to_trace(Trace, Rec#?TRACE{filter = Topic});
{error, Reason} -> {error, Reason}
end;
to_trace([{clientid, ClientId} | Trace], Rec) ->
to_trace(Trace, Rec#?TRACE{filter = ClientId});
to_trace([{ip_address, IP} | Trace], Rec) ->
case inet:parse_address(binary_to_list(IP)) of
{ok, _} -> to_trace(Trace, Rec#?TRACE{filter = binary_to_list(IP)});
{error, Reason} -> {error, lists:flatten(io_lib:format("ip address: ~p", [Reason]))}
end;
to_trace([{start_at, StartAt} | Trace], Rec) ->
case to_system_second(StartAt) of
{ok, Sec} -> to_trace(Trace, Rec#?TRACE{start_at = Sec});
{error, Reason} -> {error, Reason}
end;
to_trace([{end_at, EndAt} | Trace], Rec) ->
Now = erlang:system_time(second),
case to_system_second(EndAt) of
{ok, Sec} when Sec > Now ->
to_trace(Trace, Rec#?TRACE{end_at = Sec});
{ok, _Sec} ->
{error, "end_at time has already passed"};
{error, Reason} ->
{error, Reason}
end;
to_trace([Unknown | _Trace], _Rec) -> {error, io_lib:format("unknown field: ~p", [Unknown])}.
validate_topic(TopicName) ->
try emqx_topic:validate(filter, TopicName) of
true -> ok
catch
error:Error ->
{error, io_lib:format("topic: ~s invalid by ~p", [TopicName, Error])}
end.
to_system_second(At) ->
try
Sec = calendar:rfc3339_to_system_time(binary_to_list(At), [{unit, second}]),
{ok, Sec}
catch error: {badmatch, _} ->
{error, ["The rfc3339 specification not satisfied: ", At]}
end.
zip_dir() ->
trace_dir() ++ "zip/".
trace_dir() ->
filename:join(emqx:data_dir(), "trace") ++ "/".
log_file(Name, Start) ->
filename:join(trace_dir(), filename(Name, Start)).
filename(Name, Start) ->
[Time, _] = string:split(calendar:system_time_to_rfc3339(Start), "T", leading),
lists:flatten(["trace_", binary_to_list(Name), "_", Time, ".log"]).
transaction(Tran) ->
case mria:transaction(?COMMON_SHARD, Tran) of
{atomic, Res} -> Res;
{aborted, Reason} -> {error, Reason}
end.
update_log_primary_level([], OriginLevel) -> set_log_primary_level(OriginLevel);
update_log_primary_level(_, _) -> set_log_primary_level(debug).
set_log_primary_level(NewLevel) ->
case NewLevel =/= emqx_logger:get_primary_log_level() of
true -> emqx_logger:set_primary_log_level(NewLevel);
false -> ok
end.

View File

@ -0,0 +1,210 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_api).
-include_lib("emqx/include/logger.hrl").
-include_lib("kernel/include/file.hrl").
%% API
-export([ list_trace/2
, create_trace/2
, update_trace/2
, delete_trace/2
, clear_traces/2
, download_zip_log/2
, stream_log_file/2
]).
-export([ read_trace_file/3
, get_trace_size/0
]).
-define(TO_BIN(_B_), iolist_to_binary(_B_)).
-define(NOT_FOUND(N), {error, 'NOT_FOUND', ?TO_BIN([N, " NOT FOUND"])}).
list_trace(_, _Params) ->
case emqx_trace:list() of
[] -> {ok, []};
List0 ->
List = lists:sort(fun(#{start_at := A}, #{start_at := B}) -> A > B end, List0),
Nodes = mria_mnesia:running_nodes(),
TraceSize = cluster_call(?MODULE, get_trace_size, [], 30000),
AllFileSize = lists:foldl(fun(F, Acc) -> maps:merge(Acc, F) end, #{}, TraceSize),
Now = erlang:system_time(second),
Traces =
lists:map(fun(Trace = #{name := Name, start_at := Start,
end_at := End, enable := Enable, type := Type, filter := Filter}) ->
FileName = emqx_trace:filename(Name, Start),
LogSize = collect_file_size(Nodes, FileName, AllFileSize),
Trace0 = maps:without([enable, filter], Trace),
Trace0#{ log_size => LogSize
, Type => Filter
, start_at => list_to_binary(calendar:system_time_to_rfc3339(Start))
, end_at => list_to_binary(calendar:system_time_to_rfc3339(End))
, status => status(Enable, Start, End, Now)
}
end, emqx_trace:format(List)),
{ok, Traces}
end.
create_trace(_, Param) ->
case emqx_trace:create(Param) of
ok -> ok;
{error, {already_existed, Name}} ->
{error, 'ALREADY_EXISTED', ?TO_BIN([Name, "Already Exists"])};
{error, {duplicate_condition, Name}} ->
{error, 'DUPLICATE_CONDITION', ?TO_BIN([Name, "Duplication Condition"])};
{error, Reason} ->
{error, 'INCORRECT_PARAMS', ?TO_BIN(Reason)}
end.
delete_trace(#{name := Name}, _Param) ->
case emqx_trace:delete(Name) of
ok -> ok;
{error, not_found} -> ?NOT_FOUND(Name)
end.
clear_traces(_, _) ->
emqx_trace:clear().
update_trace(#{name := Name, operation := Operation}, _Param) ->
Enable = case Operation of disable -> false; enable -> true end,
case emqx_trace:update(Name, Enable) of
ok -> {ok, #{enable => Enable, name => Name}};
{error, not_found} -> ?NOT_FOUND(Name)
end.
%% if HTTP request headers include accept-encoding: gzip and file size > 300 bytes.
%% cowboy_compress_h will auto encode gzip format.
download_zip_log(#{name := Name}, _Param) ->
case emqx_trace:get_trace_filename(Name) of
{ok, TraceLog} ->
TraceFiles = collect_trace_file(TraceLog),
ZipDir = emqx_trace:zip_dir(),
Zips = group_trace_file(ZipDir, TraceLog, TraceFiles),
ZipFileName = ZipDir ++ binary_to_list(Name) ++ ".zip",
{ok, ZipFile} = zip:zip(ZipFileName, Zips, [{cwd, ZipDir}]),
emqx_trace:delete_files_after_send(ZipFileName, Zips),
{ok, ZipFile};
{error, Reason} ->
{error, Reason}
end.
group_trace_file(ZipDir, TraceLog, TraceFiles) ->
lists:foldl(fun(Res, Acc) ->
case Res of
{ok, Node, Bin} ->
ZipName = ZipDir ++ Node ++ "-" ++ TraceLog,
ok = file:write_file(ZipName, Bin),
[Node ++ "-" ++ TraceLog | Acc];
{error, Node, Reason} ->
?LOG(error, "download trace log error:~p", [{Node, TraceLog, Reason}]),
Acc
end
end, [], TraceFiles).
collect_trace_file(TraceLog) ->
cluster_call(emqx_trace, trace_file, [TraceLog], 60000).
cluster_call(Mod, Fun, Args, Timeout) ->
Nodes = mria_mnesia:running_nodes(),
{GoodRes, BadNodes} = rpc:multicall(Nodes, Mod, Fun, Args, Timeout),
BadNodes =/= [] andalso ?LOG(error, "rpc call failed on ~p ~p", [BadNodes, {Mod, Fun, Args}]),
GoodRes.
stream_log_file(#{name := Name}, Params) ->
Node0 = proplists:get_value(<<"node">>, Params, atom_to_binary(node())),
Position0 = proplists:get_value(<<"position">>, Params, <<"0">>),
Bytes0 = proplists:get_value(<<"bytes">>, Params, <<"1000">>),
case to_node(Node0) of
{ok, Node} ->
Position = binary_to_integer(Position0),
Bytes = binary_to_integer(Bytes0),
case rpc:call(Node, ?MODULE, read_trace_file, [Name, Position, Bytes]) of
{ok, Bin} ->
Meta = #{<<"position">> => Position + byte_size(Bin), <<"bytes">> => Bytes},
{ok, #{meta => Meta, items => Bin}};
{eof, Size} ->
Meta = #{<<"position">> => Size, <<"bytes">> => Bytes},
{ok, #{meta => Meta, items => <<"">>}};
{error, Reason} ->
logger:log(error, "read_file_failed by ~p", [{Name, Reason, Position, Bytes}]),
{error, Reason};
{badrpc, nodedown} ->
{error, "BadRpc node down"}
end;
{error, Reason} -> {error, Reason}
end.
get_trace_size() ->
TraceDir = emqx_trace:trace_dir(),
Node = node(),
case file:list_dir(TraceDir) of
{ok, AllFiles} ->
lists:foldl(fun(File, Acc) ->
FullFileName = filename:join(TraceDir, File),
Acc#{{Node, File} => filelib:file_size(FullFileName)}
end, #{}, lists:delete("zip", AllFiles));
_ -> #{}
end.
%% this is an rpc call for stream_log_file/2
read_trace_file(Name, Position, Limit) ->
TraceDir = emqx_trace:trace_dir(),
{ok, AllFiles} = file:list_dir(TraceDir),
TracePrefix = "trace_" ++ binary_to_list(Name) ++ "_",
Filter = fun(FileName) -> nomatch =/= string:prefix(FileName, TracePrefix) end,
case lists:filter(Filter, AllFiles) of
[TraceFile] ->
TracePath = filename:join([TraceDir, TraceFile]),
read_file(TracePath, Position, Limit);
[] -> {error, not_found}
end.
read_file(Path, Offset, Bytes) ->
{ok, IoDevice} = file:open(Path, [read, raw, binary]),
try
_ = case Offset of
0 -> ok;
_ -> file:position(IoDevice, {bof, Offset})
end,
case file:read(IoDevice, Bytes) of
{ok, Bin} -> {ok, Bin};
{error, Reason} -> {error, Reason};
eof ->
{ok, #file_info{size = Size}} = file:read_file_info(IoDevice),
{eof, Size}
end
after
file:close(IoDevice)
end.
to_node(Node) ->
try {ok, binary_to_existing_atom(Node)}
catch _:_ ->
{error, "node not found"}
end.
collect_file_size(Nodes, FileName, AllFiles) ->
lists:foldl(fun(Node, Acc) ->
Size = maps:get({Node, FileName}, AllFiles, 0),
Acc#{Node => Size}
end, #{}, Nodes).
%% status(false, _Start, End, Now) when End > Now -> <<"stopped">>;
status(false, _Start, _End, _Now) -> <<"stopped">>;
status(true, Start, _End, Now) when Now < Start -> <<"waiting">>;
status(true, _Start, End, Now) when Now >= End -> <<"stopped">>;
status(true, _Start, _End, _Now) -> <<"running">>.

View File

@ -0,0 +1,218 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2018-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_handler).
-include("emqx.hrl").
-include("logger.hrl").
-logger_header("[Tracer]").
%% APIs
-export([ running/0
, install/3
, install/4
, uninstall/1
, uninstall/2
]).
%% For logger handler filters callbacks
-export([ filter_clientid/2
, filter_topic/2
, filter_ip_address/2
]).
-export([handler_id/2]).
-type tracer() :: #{
name := binary(),
type := clientid | topic | ip_address,
filter := emqx_types:clientid() | emqx_types:topic() | emqx_trace:ip_address()
}.
-define(FORMAT,
{logger_formatter, #{
template => [
time, " [", level, "] ",
{clientid,
[{peername, [clientid, "@", peername, " "], [clientid, " "]}],
[{peername, [peername, " "], []}]
},
msg, "\n"
],
single_line => false,
max_size => unlimited,
depth => unlimited
}}
).
-define(CONFIG(_LogFile_), #{
type => halt,
file => _LogFile_,
max_no_bytes => 512 * 1024 * 1024,
overload_kill_enable => true,
overload_kill_mem_size => 50 * 1024 * 1024,
overload_kill_qlen => 20000,
%% disable restart
overload_kill_restart_after => infinity
}).
%%------------------------------------------------------------------------------
%% APIs
%%------------------------------------------------------------------------------
-spec install(Name :: binary() | list(),
Type :: clientid | topic | ip_address,
Filter ::emqx_types:clientid() | emqx_types:topic() | string(),
Level :: logger:level() | all,
LogFilePath :: string()) -> ok | {error, term()}.
install(Name, Type, Filter, Level, LogFile) ->
Who = #{type => Type, filter => ensure_bin(Filter), name => ensure_bin(Name)},
install(Who, Level, LogFile).
-spec install(Type :: clientid | topic | ip_address,
Filter ::emqx_types:clientid() | emqx_types:topic() | string(),
Level :: logger:level() | all,
LogFilePath :: string()) -> ok | {error, term()}.
install(Type, Filter, Level, LogFile) ->
install(Filter, Type, Filter, Level, LogFile).
-spec install(tracer(), logger:level() | all, string()) -> ok | {error, term()}.
install(Who, all, LogFile) ->
install(Who, debug, LogFile);
install(Who, Level, LogFile) ->
PrimaryLevel = emqx_logger:get_primary_log_level(),
try logger:compare_levels(Level, PrimaryLevel) of
lt ->
{error,
io_lib:format(
"Cannot trace at a log level (~s) "
"lower than the primary log level (~s)",
[Level, PrimaryLevel]
)};
_GtOrEq ->
install_handler(Who, Level, LogFile)
catch
error:badarg ->
{error, {invalid_log_level, Level}}
end.
-spec uninstall(Type :: clientid | topic | ip_address,
Name :: binary() | list()) -> ok | {error, term()}.
uninstall(Type, Name) ->
HandlerId = handler_id(ensure_bin(Name), Type),
uninstall(HandlerId).
-spec uninstall(HandlerId :: atom()) -> ok | {error, term()}.
uninstall(HandlerId) ->
Res = logger:remove_handler(HandlerId),
show_prompts(Res, HandlerId, "Stop trace"),
Res.
%% @doc Return all running trace handlers information.
-spec running() ->
[
#{
name => binary(),
type => topic | clientid | ip_address,
id => atom(),
filter => emqx_types:topic() | emqx_types:clienetid() | emqx_trace:ip_address(),
level => logger:level(),
dst => file:filename() | console | unknown
}
].
running() ->
lists:foldl(fun filter_traces/2, [], emqx_logger:get_log_handlers(started)).
-spec filter_clientid(logger:log_event(), {string(), atom()}) -> logger:log_event() | ignore.
filter_clientid(#{meta := #{clientid := ClientId}} = Log, {ClientId, _Name}) -> Log;
filter_clientid(_Log, _ExpectId) -> ignore.
-spec filter_topic(logger:log_event(), {string(), atom()}) -> logger:log_event() | ignore.
filter_topic(#{meta := #{topic := Topic}} = Log, {TopicFilter, _Name}) ->
case emqx_topic:match(Topic, TopicFilter) of
true -> Log;
false -> ignore
end;
filter_topic(_Log, _ExpectId) -> ignore.
-spec filter_ip_address(logger:log_event(), {string(), atom()}) -> logger:log_event() | ignore.
filter_ip_address(#{meta := #{peername := Peername}} = Log, {IP, _Name}) ->
case lists:prefix(IP, Peername) of
true -> Log;
false -> ignore
end;
filter_ip_address(_Log, _ExpectId) -> ignore.
install_handler(Who = #{name := Name, type := Type}, Level, LogFile) ->
HandlerId = handler_id(Name, Type),
Config = #{ level => Level,
formatter => ?FORMAT,
filter_default => stop,
filters => filters(Who),
config => ?CONFIG(LogFile)
},
Res = logger:add_handler(HandlerId, logger_disk_log_h, Config),
show_prompts(Res, Who, "Start trace"),
Res.
filters(#{type := clientid, filter := Filter, name := Name}) ->
[{clientid, {fun ?MODULE:filter_clientid/2, {ensure_list(Filter), Name}}}];
filters(#{type := topic, filter := Filter, name := Name}) ->
[{topic, {fun ?MODULE:filter_topic/2, {ensure_bin(Filter), Name}}}];
filters(#{type := ip_address, filter := Filter, name := Name}) ->
[{ip_address, {fun ?MODULE:filter_ip_address/2, {ensure_list(Filter), Name}}}].
filter_traces(#{id := Id, level := Level, dst := Dst, filters := Filters}, Acc) ->
Init = #{id => Id, level => Level, dst => Dst},
case Filters of
[{Type, {_FilterFun, {Filter, Name}}}] when
Type =:= topic orelse
Type =:= clientid orelse
Type =:= ip_address ->
[Init#{type => Type, filter => Filter, name => Name} | Acc];
_ ->
Acc
end.
handler_id(Name, Type) ->
try
do_handler_id(Name, Type)
catch
_ : _ ->
Hash = emqx_misc:bin2hexstr_a_f(crypto:hash(md5, Name)),
do_handler_id(Hash, Type)
end.
%% Handler ID must be an atom.
do_handler_id(Name, Type) ->
TypeStr = atom_to_list(Type),
NameStr = unicode:characters_to_list(Name, utf8),
FullNameStr = "trace_" ++ TypeStr ++ "_" ++ NameStr,
true = io_lib:printable_unicode_list(FullNameStr),
FullNameBin = unicode:characters_to_binary(FullNameStr, utf8),
binary_to_atom(FullNameBin, utf8).
ensure_bin(List) when is_list(List) -> iolist_to_binary(List);
ensure_bin(Bin) when is_binary(Bin) -> Bin.
ensure_list(Bin) when is_binary(Bin) -> binary_to_list(Bin);
ensure_list(List) when is_list(List) -> List.
show_prompts(ok, Who, Msg) ->
?LOG(info, Msg ++ " ~p " ++ "successfully~n", [Who]);
show_prompts({error, Reason}, Who, Msg) ->
?LOG(error, Msg ++ " ~p " ++ "failed with ~p~n", [Who, Reason]).

View File

@ -1,167 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2018-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_tracer).
-include("emqx.hrl").
-include("logger.hrl").
%% APIs
-export([ trace/2
, start_trace/3
, lookup_traces/0
, stop_trace/1
]).
-type(trace_who() :: {clientid | topic, binary()}).
-define(TRACER, ?MODULE).
-define(FORMAT, {logger_formatter,
#{template =>
[time, " [", level, "] ",
{clientid,
[{peername,
[clientid, "@", peername, " "],
[clientid, " "]}],
[{peername,
[peername, " "],
[]}]},
msg, "\n"],
single_line => false
}}).
-define(TOPIC_TRACE_ID(T), "trace_topic_"++T).
-define(CLIENT_TRACE_ID(C), "trace_clientid_"++C).
-define(TOPIC_TRACE(T), {topic, T}).
-define(CLIENT_TRACE(C), {clientid, C}).
-define(IS_LOG_LEVEL(L),
L =:= emergency orelse
L =:= alert orelse
L =:= critical orelse
L =:= error orelse
L =:= warning orelse
L =:= notice orelse
L =:= info orelse
L =:= debug).
-dialyzer({nowarn_function, [install_trace_handler/3]}).
%%------------------------------------------------------------------------------
%% APIs
%%------------------------------------------------------------------------------
trace(publish, #message{topic = <<"$SYS/", _/binary>>}) ->
%% Do not trace '$SYS' publish
ignore;
trace(publish, #message{from = From, topic = Topic, payload = Payload})
when is_binary(From); is_atom(From) ->
emqx_logger:info(#{topic => Topic,
mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY} },
"PUBLISH to ~ts: ~0p", [Topic, Payload]).
%% @doc Start to trace clientid or topic.
-spec(start_trace(trace_who(), logger:level() | all, string()) -> ok | {error, term()}).
start_trace(Who, all, LogFile) ->
start_trace(Who, debug, LogFile);
start_trace(Who, Level, LogFile) ->
case ?IS_LOG_LEVEL(Level) of
true ->
#{level := PrimaryLevel} = logger:get_primary_config(),
try logger:compare_levels(Level, PrimaryLevel) of
lt ->
{error,
io_lib:format("Cannot trace at a log level (~ts) "
"lower than the primary log level (~ts)",
[Level, PrimaryLevel])};
_GtOrEq ->
install_trace_handler(Who, Level, LogFile)
catch
_:Error ->
{error, Error}
end;
false -> {error, {invalid_log_level, Level}}
end.
%% @doc Stop tracing clientid or topic.
-spec(stop_trace(trace_who()) -> ok | {error, term()}).
stop_trace(Who) ->
uninstall_trance_handler(Who).
%% @doc Lookup all traces
-spec(lookup_traces() -> [{Who :: trace_who(), LogFile :: string()}]).
lookup_traces() ->
lists:foldl(fun filter_traces/2, [], emqx_logger:get_log_handlers(started)).
install_trace_handler(Who, Level, LogFile) ->
case logger:add_handler(handler_id(Who), logger_disk_log_h,
#{level => Level,
formatter => ?FORMAT,
config => #{type => halt, file => LogFile},
filter_default => stop,
filters => [{meta_key_filter,
{fun filter_by_meta_key/2, Who}}]})
of
ok ->
?SLOG(info, #{msg => "start_trace", who => Who});
{error, Reason} ->
?SLOG(error, #{msg => "failed_to_trace", who => Who, reason => Reason}),
{error, Reason}
end.
uninstall_trance_handler(Who) ->
case logger:remove_handler(handler_id(Who)) of
ok ->
?SLOG(info, #{msg => "stop_trace", who => Who});
{error, Reason} ->
?SLOG(error, #{msg => "failed_to_stop_trace", who => Who, reason => Reason}),
{error, Reason}
end.
filter_traces(#{id := Id, level := Level, dst := Dst}, Acc) ->
case atom_to_list(Id) of
?TOPIC_TRACE_ID(T)->
[{?TOPIC_TRACE(T), {Level, Dst}} | Acc];
?CLIENT_TRACE_ID(C) ->
[{?CLIENT_TRACE(C), {Level, Dst}} | Acc];
_ -> Acc
end.
handler_id(?TOPIC_TRACE(Topic)) ->
list_to_atom(?TOPIC_TRACE_ID(handler_name(Topic)));
handler_id(?CLIENT_TRACE(ClientId)) ->
list_to_atom(?CLIENT_TRACE_ID(handler_name(ClientId))).
filter_by_meta_key(#{meta := Meta} = Log, {Key, Value}) ->
case is_meta_match(Key, Value, Meta) of
true -> Log;
false -> ignore
end.
is_meta_match(clientid, ClientId, #{clientid := ClientIdStr}) ->
ClientId =:= iolist_to_binary(ClientIdStr);
is_meta_match(topic, TopicFilter, #{topic := TopicMeta}) ->
emqx_topic:match(TopicMeta, TopicFilter);
is_meta_match(_, _, _) ->
false.
handler_name(Bin) ->
case byte_size(Bin) of
Size when Size =< 200 -> binary_to_list(Bin);
_ -> hashstr(Bin)
end.
hashstr(Bin) ->
binary_to_list(emqx_misc:bin2hexstr_A_F(Bin)).

View File

@ -62,12 +62,23 @@ handle_info({timeout, _Timer, check}, State) ->
ProcessCount = erlang:system_info(process_count),
case ProcessCount / erlang:system_info(process_limit) of
Percent when Percent >= ProcHighWatermark ->
emqx_alarm:activate(too_many_processes, #{
usage => io_lib:format("~p%", [Percent*100]),
Usage = io_lib:format("~p%", [Percent*100]),
Message = [Usage, " process usage"],
emqx_alarm:activate(too_many_processes,
#{
usage => Usage,
high_watermark => ProcHighWatermark,
low_watermark => ProcLowWatermark});
low_watermark => ProcLowWatermark},
Message);
Percent when Percent < ProcLowWatermark ->
emqx_alarm:deactivate(too_many_processes);
Usage = io_lib:format("~p%", [Percent*100]),
Message = [Usage, " process usage"],
emqx_alarm:deactivate(too_many_processes,
#{
usage => Usage,
high_watermark => ProcHighWatermark,
low_watermark => ProcLowWatermark},
Message);
_Precent ->
ok
end,

View File

@ -32,16 +32,12 @@ init_per_testcase(t_size_limit, Config) ->
<<"size_limit">> => 2
}),
Config;
init_per_testcase(t_validity_period, Config) ->
init_per_testcase(_, Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
{ok, _} = emqx:update_config([alarm], #{
<<"validity_period">> => <<"1s">>
}),
Config;
init_per_testcase(_, Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
Config.
end_per_testcase(_, _Config) ->
@ -86,17 +82,77 @@ t_size_limit(_) ->
?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))),
emqx_alarm:delete_all_deactivated_alarms().
t_validity_period(_) ->
ok = emqx_alarm:activate(a),
ok = emqx_alarm:deactivate(a),
t_validity_period(_Config) ->
ok = emqx_alarm:activate(a, #{msg => "Request frequency is too high"}, <<"Reach Rate Limit">>),
ok = emqx_alarm:deactivate(a, #{msg => "Request frequency returns to normal"}),
?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))),
%% call with unknown msg
?assertEqual(ignored, gen_server:call(emqx_alarm, unknown_alarm)),
ct:sleep(3000),
?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))).
t_validity_period_1(_Config) ->
ok = emqx_alarm:activate(a, #{msg => "Request frequency is too high"}, <<"Reach Rate Limit">>),
ok = emqx_alarm:deactivate(a, #{msg => "Request frequency returns to normal"}),
?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))),
%% info with unknown msg
erlang:send(emqx_alarm, unknown_alarm),
ct:sleep(3000),
?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))).
t_validity_period_2(_Config) ->
ok = emqx_alarm:activate(a, #{msg => "Request frequency is too high"}, <<"Reach Rate Limit">>),
ok = emqx_alarm:deactivate(a, #{msg => "Request frequency returns to normal"}),
?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))),
%% cast with unknown msg
gen_server:cast(emqx_alarm, unknown_alarm),
ct:sleep(3000),
?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))).
-record(activated_alarm, {
name :: binary() | atom(),
details :: map() | list(),
message :: binary(),
activate_at :: integer()
}).
-record(deactivated_alarm, {
activate_at :: integer(),
name :: binary() | atom(),
details :: map() | list(),
message :: binary(),
deactivate_at :: integer() | infinity
}).
t_format(_Config) ->
Name = test_alarm,
Message = "test_msg",
At = erlang:system_time(microsecond),
Details = "test_details",
Node = node(),
Activate = #activated_alarm{name = Name, message = Message, activate_at = At, details = Details},
#{
node := Node,
name := Name,
message := Message,
duration := 0,
details := Details
} = emqx_alarm:format(Activate),
Deactivate = #deactivated_alarm{name = Name, message = Message, activate_at = At, details = Details,
deactivate_at = At},
#{
node := Node,
name := Name,
message := Message,
duration := 0,
details := Details
} = emqx_alarm:format(Deactivate),
ok.
get_alarm(Name, [Alarm = #{name := Name} | _More]) ->
Alarm;
get_alarm(Name, [_Alarm | More]) ->
get_alarm(Name, More);
get_alarm(_Name, []) ->
{error, not_found}.

View File

@ -28,7 +28,7 @@
-export([ roots/0, fields/1 ]).
-export([ create/1
-export([ create/2
, update/2
, authenticate/2
, destroy/1
@ -70,7 +70,7 @@ check_config(C) ->
#{atom_key => true}),
R.
create(_Config) ->
create(_AuthenticatorID, _Config) ->
{ok, #{mark => 1}}.
update(_Config, _State) ->
@ -103,22 +103,28 @@ end_per_testcase(Case, Config) ->
_ = ?MODULE:Case({'end', Config}),
ok.
t_chain({_, Config}) -> Config;
t_chain(Config) when is_list(Config) ->
% CRUD of authentication chain
ChainName = 'test',
?assertMatch({ok, []}, ?AUTHN:list_chains()),
?assertMatch({ok, []}, ?AUTHN:list_chain_names()),
?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:create_chain(ChainName)),
?assertEqual({error, {already_exists, {chain, ChainName}}}, ?AUTHN:create_chain(ChainName)),
?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:lookup_chain(ChainName)),
?assertMatch({ok, [#{name := ChainName}]}, ?AUTHN:list_chains()),
?assertEqual({ok, [ChainName]}, ?AUTHN:list_chain_names()),
?assertEqual(ok, ?AUTHN:delete_chain(ChainName)),
?assertMatch({error, {not_found, {chain, ChainName}}}, ?AUTHN:lookup_chain(ChainName)),
ok.
t_authenticator({'init', Config}) ->
[{"auth1", {'password-based', 'built-in-database'}},
{"auth2", {'password-based', mysql}} | Config];
t_authenticator(Config) when is_list(Config) ->
ChainName = 'test',
AuthenticatorConfig1 = #{mechanism => 'password-based',
@ -126,23 +132,43 @@ t_authenticator(Config) when is_list(Config) ->
enable => true},
% Create an authenticator when the authentication chain does not exist
?assertEqual({error, {not_found, {chain, ChainName}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertEqual(
{error, {not_found, {chain, ChainName}}},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?AUTHN:create_chain(ChainName),
% Create an authenticator when the provider does not exist
?assertEqual({error, no_available_provider}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertEqual(
{error, no_available_provider},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
AuthNType1 = ?config("auth1"),
register_provider(AuthNType1, ?MODULE),
ID1 = <<"password-based:built-in-database">>,
% CRUD of authencaticator
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch(
{ok, #{id := ID1, state := #{mark := 1}}},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch({ok, #{id := ID1}}, ?AUTHN:lookup_authenticator(ChainName, ID1)),
?assertMatch({ok, [#{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual({error, {already_exists, {authenticator, ID1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
?assertEqual(
{error, {already_exists, {authenticator, ID1}}},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch(
{ok, #{id := ID1, state := #{mark := 2}}},
?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
?assertEqual(ok, ?AUTHN:delete_authenticator(ChainName, ID1)),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
?assertEqual(
{error, {not_found, {authenticator, ID1}}},
?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
?assertMatch({ok, []}, ?AUTHN:list_authenticators(ChainName)),
% Multiple authenticators exist at the same time
@ -152,25 +178,37 @@ t_authenticator(Config) when is_list(Config) ->
AuthenticatorConfig2 = #{mechanism => 'password-based',
backend => mysql,
enable => true},
?assertMatch({ok, #{id := ID1}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch({ok, #{id := ID2}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig2)),
?assertMatch(
{ok, #{id := ID1}},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
?assertMatch(
{ok, #{id := ID2}},
?AUTHN:create_authenticator(ChainName, AuthenticatorConfig2)),
% Move authenticator
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, top)),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, bottom)),
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, {before, ID1})),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName));
t_authenticator({'end', Config}) ->
?AUTHN:delete_chain(test),
?AUTHN:deregister_providers([?config("auth1"), ?config("auth2")]),
ok.
t_authenticate({init, Config}) ->
[{listener_id, 'tcp:default'},
{authn_type, {'password-based', 'built-in-database'}} | Config];
t_authenticate(Config) when is_list(Config) ->
ListenerID = ?config(listener_id),
AuthNType = ?config(authn_type),
@ -188,13 +226,21 @@ t_authenticate(Config) when is_list(Config) ->
enable => true},
?AUTHN:create_chain(ListenerID),
?assertMatch({ok, _}, ?AUTHN:create_authenticator(ListenerID, AuthenticatorConfig)),
?assertEqual({ok, #{is_superuser => true}}, emqx_access_control:authenticate(ClientInfo)),
?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>}));
?assertEqual(
{ok, #{is_superuser => true}},
emqx_access_control:authenticate(ClientInfo)),
?assertEqual(
{error, bad_username_or_password},
emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>}));
t_authenticate({'end', Config}) ->
?AUTHN:delete_chain(?config(listener_id)),
?AUTHN:deregister_provider(?config(authn_type)),
ok.
t_update_config({init, Config}) ->
Global = 'mqtt:global',
AuthNType1 = {'password-based', 'built-in-database'},
@ -202,6 +248,7 @@ t_update_config({init, Config}) ->
[{global, Global},
{"auth1", AuthNType1},
{"auth2", AuthNType2} | Config];
t_update_config(Config) when is_list(Config) ->
emqx_config_handler:add_handler([authentication], emqx_authentication),
ok = register_provider(?config("auth1"), ?MODULE),
@ -217,46 +264,131 @@ t_update_config(Config) when is_list(Config) ->
ID2 = <<"password-based:mysql">>,
?assertMatch({ok, []}, ?AUTHN:list_chains()),
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})),
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})),
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID2)),
?assertMatch(
{ok, _},
update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})),
?assertMatch({ok, _}, update_config([authentication], {update_authenticator, Global, ID1, AuthenticatorConfig1#{<<"enable">> => false}})),
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch(
{ok, #{id := ID1, state := #{mark := 1}}},
?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch(
{ok, _},
update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})),
?assertMatch(
{ok, #{id := ID2, state := #{mark := 1}}},
?AUTHN:lookup_authenticator(Global, ID2)),
?assertMatch(
{ok, _},
update_config([authentication],
{update_authenticator,
Global,
ID1,
AuthenticatorConfig1#{<<"enable">> => false}
})),
?assertMatch(
{ok, #{id := ID1, state := #{mark := 2}}},
?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch(
{ok, _},
update_config([authentication], {move_authenticator, Global, ID2, top})),
?assertMatch({ok, _}, update_config([authentication], {move_authenticator, Global, ID2, top})),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(Global)),
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID1})),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
?assertEqual(
{error, {not_found, {authenticator, ID1}}},
?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID2})),
?assertEqual({error, {not_found, {authenticator, ID2}}}, ?AUTHN:lookup_authenticator(Global, ID2)),
?assertMatch(
{ok, _},
update_config([authentication], {delete_authenticator, Global, ID2})),
?assertEqual(
{error, {not_found, {authenticator, ID2}}},
?AUTHN:lookup_authenticator(Global, ID2)),
ListenerID = 'tcp:default',
ConfKeyPath = [listeners, tcp, default, authentication],
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig1})),
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig2})),
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID2)),
?assertMatch(
{ok, _},
update_config(ConfKeyPath,
{create_authenticator, ListenerID, AuthenticatorConfig1})),
?assertMatch({ok, _}, update_config(ConfKeyPath, {update_authenticator, ListenerID, ID1, AuthenticatorConfig1#{<<"enable">> => false}})),
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
?assertMatch(
{ok, #{id := ID1, state := #{mark := 1}}},
?AUTHN:lookup_authenticator(ListenerID, ID1)),
?assertMatch({ok, _}, update_config(ConfKeyPath, {move_authenticator, ListenerID, ID2, top})),
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ListenerID)),
?assertMatch(
{ok, _},
update_config(ConfKeyPath,
{create_authenticator, ListenerID, AuthenticatorConfig2})),
?assertMatch(
{ok, #{id := ID2, state := #{mark := 1}}},
?AUTHN:lookup_authenticator(ListenerID, ID2)),
?assertMatch(
{ok, _},
update_config(ConfKeyPath,
{update_authenticator,
ListenerID,
ID1,
AuthenticatorConfig1#{<<"enable">> => false}
})),
?assertMatch(
{ok, #{id := ID1, state := #{mark := 2}}},
?AUTHN:lookup_authenticator(ListenerID, ID1)),
?assertMatch(
{ok, _},
update_config(ConfKeyPath, {move_authenticator, ListenerID, ID2, top})),
?assertMatch(
{ok, [#{id := ID2}, #{id := ID1}]},
?AUTHN:list_authenticators(ListenerID)),
?assertMatch(
{ok, _},
update_config(ConfKeyPath, {delete_authenticator, ListenerID, ID1})),
?assertEqual(
{error, {not_found, {authenticator, ID1}}},
?AUTHN:lookup_authenticator(ListenerID, ID1));
?assertMatch({ok, _}, update_config(ConfKeyPath, {delete_authenticator, ListenerID, ID1})),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1));
t_update_config({'end', Config}) ->
?AUTHN:delete_chain(?config(global)),
?AUTHN:deregister_providers([?config("auth1"), ?config("auth2")]),
ok.
t_restart({'init', Config}) -> Config;
t_restart(Config) when is_list(Config) ->
?assertEqual({ok, []}, ?AUTHN:list_chain_names()),
?AUTHN:create_chain(test_chain),
?assertEqual({ok, [test_chain]}, ?AUTHN:list_chain_names()),
ok = supervisor:terminate_child(emqx_authentication_sup, ?AUTHN),
{ok, _} = supervisor:restart_child(emqx_authentication_sup, ?AUTHN),
?assertEqual({ok, [test_chain]}, ?AUTHN:list_chain_names());
t_restart({'end', _Config}) ->
?AUTHN:delete_chain(test_chain),
ok.
t_convert_certs({_, Config}) -> Config;
t_convert_certs(Config) when is_list(Config) ->
Global = <<"mqtt:global">>,
Certs = certs([ {<<"keyfile">>, "key.pem"}
@ -270,7 +402,11 @@ t_convert_certs(Config) when is_list(Config) ->
Certs2 = certs([ {<<"keyfile">>, "key.pem"}
, {<<"certfile">>, "cert.pem"}
]),
#{<<"ssl">> := NCerts2} = convert_certs(CertsDir, #{<<"ssl">> => Certs2}, #{<<"ssl">> => NCerts}),
#{<<"ssl">> := NCerts2} = convert_certs(
CertsDir,
#{<<"ssl">> => Certs2}, #{<<"ssl">> => NCerts}),
?assertEqual(maps:get(<<"keyfile">>, NCerts), maps:get(<<"keyfile">>, NCerts2)),
?assertEqual(maps:get(<<"certfile">>, NCerts), maps:get(<<"certfile">>, NCerts2)),
@ -278,7 +414,11 @@ t_convert_certs(Config) when is_list(Config) ->
, {<<"certfile">>, "client-cert.pem"}
, {<<"cacertfile">>, "cacert.pem"}
]),
#{<<"ssl">> := NCerts3} = convert_certs(CertsDir, #{<<"ssl">> => Certs3}, #{<<"ssl">> => NCerts2}),
#{<<"ssl">> := NCerts3} = convert_certs(
CertsDir,
#{<<"ssl">> => Certs3}, #{<<"ssl">> => NCerts2}),
?assertNotEqual(maps:get(<<"keyfile">>, NCerts2), maps:get(<<"keyfile">>, NCerts3)),
?assertNotEqual(maps:get(<<"certfile">>, NCerts2), maps:get(<<"certfile">>, NCerts3)),

View File

@ -41,16 +41,16 @@ t_add_delete(_) ->
at = erlang:system_time(second),
until = erlang:system_time(second) + 1000
},
ok = emqx_banned:create(Banned),
{ok, _} = emqx_banned:create(Banned),
?assertEqual(1, emqx_banned:info(size)),
ok = emqx_banned:delete({clientid, <<"TestClient">>}),
?assertEqual(0, emqx_banned:info(size)).
t_check(_) ->
ok = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}}),
ok = emqx_banned:create(#banned{who = {username, <<"BannedUser">>}}),
ok = emqx_banned:create(#banned{who = {peerhost, {192,168,0,1}}}),
{ok, _} = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>}}),
{ok, _} = emqx_banned:create(#banned{who = {username, <<"BannedUser">>}}),
{ok, _} = emqx_banned:create(#banned{who = {peerhost, {192,168,0,1}}}),
?assertEqual(3, emqx_banned:info(size)),
ClientInfo1 = #{clientid => <<"BannedClient">>,
username => <<"user">>,
@ -83,7 +83,7 @@ t_check(_) ->
t_unused(_) ->
{ok, Banned} = emqx_banned:start_link(),
ok = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>},
{ok, _} = emqx_banned:create(#banned{who = {clientid, <<"BannedClient">>},
until = erlang:system_time(second)}),
?assertEqual(ignored, gen_server:call(Banned, unexpected_req)),
?assertEqual(ok, gen_server:cast(Banned, unexpected_msg)),

View File

@ -23,20 +23,71 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/emqx_mqtt.hrl").
all() -> emqx_common_test_helpers:all(?MODULE).
all() ->
[ {group, all_cases}
, {group, connected_client_count_group}
].
init_per_suite(Config) ->
groups() ->
TCs = emqx_common_test_helpers:all(?MODULE),
ConnClientTCs = [ t_connected_client_count_persistent
, t_connected_client_count_anonymous
, t_connected_client_stats
],
OtherTCs = TCs -- ConnClientTCs,
[ {all_cases, [], OtherTCs}
, {connected_client_count_group, [ {group, tcp}
, {group, ws}
, {group, quic}
]}
, {tcp, [], ConnClientTCs}
, {ws, [], ConnClientTCs}
, {quic, [], ConnClientTCs}
].
init_per_group(connected_client_count_group, Config) ->
Config;
init_per_group(tcp, Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
[{conn_fun, connect} | Config];
init_per_group(ws, Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
[ {ssl, false}
, {enable_websocket, true}
, {conn_fun, ws_connect}
, {port, 8083}
, {host, "localhost"}
| Config
];
init_per_group(quic, Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
[ {conn_fun, quic_connect}
, {port, 14567}
| Config];
init_per_group(_Group, Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
end_per_group(connected_client_count_group, _Config) ->
ok;
end_per_group(_Group, _Config) ->
emqx_common_test_helpers:stop_apps([]).
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_testcase(Case, Config) ->
?MODULE:Case({init, Config}).
@ -277,6 +328,240 @@ t_shard({'end', _Config}) ->
emqx_broker:unsubscribe(<<"topic">>),
ok = meck:unload(emqx_broker_helper).
%% persistent sessions, when gone, do not contribute to connected
%% client count
t_connected_client_count_persistent({init, Config}) ->
ok = snabbkaffe:start_trace(),
process_flag(trap_exit, true),
Config;
t_connected_client_count_persistent(Config) when is_list(Config) ->
ConnFun = ?config(conn_fun, Config),
ClientID = <<"clientid">>,
?assertEqual(0, emqx_cm:get_connected_client_count()),
{ok, ConnPid0} = emqtt:start_link([ {clean_start, false}
, {clientid, ClientID}
| Config]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid0) end,
[emqx_cm_connected_client_count_inc]
),
timer:sleep(10),
?assertEqual(1, emqx_cm:get_connected_client_count()),
{ok, {ok, [_]}} = wait_for_events(
fun() -> emqtt:disconnect(ConnPid0) end,
[emqx_cm_connected_client_count_dec]
),
timer:sleep(10),
?assertEqual(0, emqx_cm:get_connected_client_count()),
%% reconnecting
{ok, ConnPid1} = emqtt:start_link([ {clean_start, false}
, {clientid, ClientID}
| Config
]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid1) end,
[emqx_cm_connected_client_count_inc]
),
?assertEqual(1, emqx_cm:get_connected_client_count()),
%% taking over
{ok, ConnPid2} = emqtt:start_link([ {clean_start, false}
, {clientid, ClientID}
| Config
]),
{{ok, _}, {ok, [_, _]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid2) end,
[ emqx_cm_connected_client_count_inc
, emqx_cm_connected_client_count_dec
],
500
),
?assertEqual(1, emqx_cm:get_connected_client_count()),
%% abnormal exit of channel process
ChanPids = emqx_cm:all_channels(),
{ok, {ok, [_, _]}} = wait_for_events(
fun() ->
lists:foreach(
fun(ChanPid) -> exit(ChanPid, kill) end,
ChanPids)
end,
[ emqx_cm_connected_client_count_dec
, emqx_cm_process_down
]
),
?assertEqual(0, emqx_cm:get_connected_client_count()),
ok;
t_connected_client_count_persistent({'end', _Config}) ->
snabbkaffe:stop(),
ok.
%% connections without client_id also contribute to connected client
%% count
t_connected_client_count_anonymous({init, Config}) ->
ok = snabbkaffe:start_trace(),
process_flag(trap_exit, true),
Config;
t_connected_client_count_anonymous(Config) when is_list(Config) ->
ConnFun = ?config(conn_fun, Config),
?assertEqual(0, emqx_cm:get_connected_client_count()),
%% first client
{ok, ConnPid0} = emqtt:start_link([ {clean_start, true}
| Config]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid0) end,
[emqx_cm_connected_client_count_inc]
),
?assertEqual(1, emqx_cm:get_connected_client_count()),
%% second client
{ok, ConnPid1} = emqtt:start_link([ {clean_start, true}
| Config]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid1) end,
[emqx_cm_connected_client_count_inc]
),
?assertEqual(2, emqx_cm:get_connected_client_count()),
%% when first client disconnects, shouldn't affect the second
{ok, {ok, [_, _]}} = wait_for_events(
fun() -> emqtt:disconnect(ConnPid0) end,
[ emqx_cm_connected_client_count_dec
, emqx_cm_process_down
]
),
?assertEqual(1, emqx_cm:get_connected_client_count()),
%% reconnecting
{ok, ConnPid2} = emqtt:start_link([ {clean_start, true}
| Config
]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid2) end,
[emqx_cm_connected_client_count_inc]
),
?assertEqual(2, emqx_cm:get_connected_client_count()),
{ok, {ok, [_, _]}} = wait_for_events(
fun() -> emqtt:disconnect(ConnPid1) end,
[ emqx_cm_connected_client_count_dec
, emqx_cm_process_down
]
),
?assertEqual(1, emqx_cm:get_connected_client_count()),
%% abnormal exit of channel process
Chans = emqx_cm:all_channels(),
{ok, {ok, [_, _]}} = wait_for_events(
fun() ->
lists:foreach(
fun(ChanPid) -> exit(ChanPid, kill) end,
Chans)
end,
[ emqx_cm_connected_client_count_dec
, emqx_cm_process_down
]
),
?assertEqual(0, emqx_cm:get_connected_client_count()),
ok;
t_connected_client_count_anonymous({'end', _Config}) ->
snabbkaffe:stop(),
ok.
t_connected_client_stats({init, Config}) ->
ok = supervisor:terminate_child(emqx_kernel_sup, emqx_stats),
{ok, _} = supervisor:restart_child(emqx_kernel_sup, emqx_stats),
ok = snabbkaffe:start_trace(),
Config;
t_connected_client_stats(Config) when is_list(Config) ->
ConnFun = ?config(conn_fun, Config),
?assertEqual(0, emqx_cm:get_connected_client_count()),
?assertEqual(0, emqx_stats:getstat('live_connections.count')),
?assertEqual(0, emqx_stats:getstat('live_connections.max')),
{ok, ConnPid} = emqtt:start_link([ {clean_start, true}
, {clientid, <<"clientid">>}
| Config
]),
{{ok, _}, {ok, [_]}} = wait_for_events(
fun() -> emqtt:ConnFun(ConnPid) end,
[emqx_cm_connected_client_count_inc]
),
timer:sleep(20),
%% ensure stats are synchronized
{_, {ok, [_]}} = wait_for_stats(
fun emqx_cm:stats_fun/0,
[#{count_stat => 'live_connections.count',
max_stat => 'live_connections.max'}]
),
?assertEqual(1, emqx_stats:getstat('live_connections.count')),
?assertEqual(1, emqx_stats:getstat('live_connections.max')),
{ok, {ok, [_]}} = wait_for_events(
fun() -> emqtt:disconnect(ConnPid) end,
[emqx_cm_connected_client_count_dec]
),
timer:sleep(20),
%% ensure stats are synchronized
{_, {ok, [_]}} = wait_for_stats(
fun emqx_cm:stats_fun/0,
[#{count_stat => 'live_connections.count',
max_stat => 'live_connections.max'}]
),
?assertEqual(0, emqx_stats:getstat('live_connections.count')),
?assertEqual(1, emqx_stats:getstat('live_connections.max')),
ok;
t_connected_client_stats({'end', _Config}) ->
ok = snabbkaffe:stop(),
ok = supervisor:terminate_child(emqx_kernel_sup, emqx_stats),
{ok, _} = supervisor:restart_child(emqx_kernel_sup, emqx_stats),
ok.
%% the count must be always non negative
t_connect_client_never_negative({init, Config}) ->
Config;
t_connect_client_never_negative(Config) when is_list(Config) ->
?assertEqual(0, emqx_cm:get_connected_client_count()),
%% would go to -1
ChanPid = list_to_pid("<0.0.1>"),
emqx_cm:mark_channel_disconnected(ChanPid),
?assertEqual(0, emqx_cm:get_connected_client_count()),
%% would be 0, if really went to -1
emqx_cm:mark_channel_connected(ChanPid),
?assertEqual(1, emqx_cm:get_connected_client_count()),
ok;
t_connect_client_never_negative({'end', _Config}) ->
ok.
wait_for_events(Action, Kinds) ->
wait_for_events(Action, Kinds, 500).
wait_for_events(Action, Kinds, Timeout) ->
Predicate = fun(#{?snk_kind := K}) ->
lists:member(K, Kinds)
end,
N = length(Kinds),
{ok, Sub} = snabbkaffe_collector:subscribe(Predicate, N, Timeout, 0),
Res = Action(),
case snabbkaffe_collector:receive_events(Sub) of
{timeout, _} ->
{Res, timeout};
{ok, Events} ->
{Res, {ok, Events}}
end.
wait_for_stats(Action, Stats) ->
Predicate = fun(Event = #{?snk_kind := emqx_stats_setstat}) ->
Stat = maps:with(
[ count_stat
, max_stat
], Event),
lists:member(Stat, Stats);
(_) ->
false
end,
N = length(Stats),
Timeout = 500,
{ok, Sub} = snabbkaffe_collector:subscribe(Predicate, N, Timeout, 0),
Res = Action(),
case snabbkaffe_collector:receive_events(Sub) of
{timeout, _} ->
{Res, timeout};
{ok, Events} ->
{Res, {ok, Events}}
end.
recv_msgs(Count) ->
recv_msgs(Count, []).

View File

@ -144,6 +144,8 @@ set_test_listenser_confs() ->
init_per_suite(Config) ->
%% CM Meck
ok = meck:new(emqx_cm, [passthrough, no_history, no_link]),
ok = meck:expect(emqx_cm, mark_channel_connected, fun(_) -> ok end),
ok = meck:expect(emqx_cm, mark_channel_disconnected, fun(_) -> ok end),
%% Access Control Meck
ok = meck:new(emqx_access_control, [passthrough, no_history, no_link]),
ok = meck:expect(emqx_access_control, authenticate,

View File

@ -32,6 +32,12 @@
conn_mod => emqx_connection,
receive_maximum => 100}}).
-define(WAIT(PATTERN, TIMEOUT, RET),
fun() ->
receive PATTERN -> RET
after TIMEOUT -> error({timeout, ?LINE}) end
end()).
%%--------------------------------------------------------------------
%% CT callbacks
%%--------------------------------------------------------------------
@ -179,28 +185,100 @@ t_open_session_race_condition(_) ->
exit(Winner, kill),
receive {'DOWN', _, process, Winner, _} -> ok end,
ignored = gen_server:call(emqx_cm, ignore, infinity), %% sync
ignored = gen_server:call(?CM, ignore, infinity), %% sync
ok = emqx_pool:flush_async_tasks(),
?assertEqual([], emqx_cm:lookup_channels(ClientId)).
t_discard_session(_) ->
t_kick_session_discard_normal(_) ->
test_kick_session(discard, normal).
t_kick_session_discard_shutdown(_) ->
test_kick_session(discard, shutdown).
t_kick_session_discard_shutdown_with_reason(_) ->
test_kick_session(discard, {shutdown, discard}).
t_kick_session_discard_timeout(_) ->
test_kick_session(discard, timeout).
t_kick_session_discard_noproc(_) ->
test_kick_session(discard, noproc).
t_kick_session_kick_normal(_) ->
test_kick_session(discard, normal).
t_kick_session_kick_shutdown(_) ->
test_kick_session(discard, shutdown).
t_kick_session_kick_shutdown_with_reason(_) ->
test_kick_session(discard, {shutdown, discard}).
t_kick_session_kick_timeout(_) ->
test_kick_session(discard, timeout).
t_kick_session_kick_noproc(_) ->
test_kick_session(discard, noproc).
test_kick_session(Action, Reason) ->
ClientId = rand_client_id(),
#{conninfo := ConnInfo} = ?ChanInfo,
ok = emqx_cm:register_channel(ClientId, self(), ConnInfo),
FakeSessionFun =
fun Loop() ->
receive
{'$gen_call', From, A} when A =:= kick orelse
A =:= discard ->
case Reason of
normal ->
gen_server:reply(From, ok);
timeout ->
%% no response to the call
Loop();
_ ->
exit(Reason)
end;
Msg ->
ct:pal("(~p) fake_session_discarded ~p", [Action, Msg]),
Loop()
end
end,
{Pid1, _} = spawn_monitor(FakeSessionFun),
{Pid2, _} = spawn_monitor(FakeSessionFun),
ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo),
ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo),
ok = emqx_cm:register_channel(ClientId, Pid2, ConnInfo),
?assertEqual([Pid1, Pid2], lists:sort(emqx_cm:lookup_channels(ClientId))),
case Reason of
noproc -> exit(Pid1, kill), exit(Pid2, kill);
_ -> ok
end,
ok = case Action of
kick -> emqx_cm:kick_session(ClientId);
discard -> emqx_cm:discard_session(ClientId)
end,
case Reason =:= timeout orelse Reason =:= noproc of
true ->
?assertEqual(killed, ?WAIT({'DOWN', _, process, Pid1, R}, 2_000, R)),
?assertEqual(killed, ?WAIT({'DOWN', _, process, Pid2, R}, 2_000, R));
false ->
?assertEqual(Reason, ?WAIT({'DOWN', _, process, Pid1, R}, 2_000, R)),
?assertEqual(Reason, ?WAIT({'DOWN', _, process, Pid2, R}, 2_000, R))
end,
ignored = gen_server:call(?CM, ignore, infinity), % sync
ok = flush_emqx_pool(),
?assertEqual([], emqx_cm:lookup_channels(ClientId)).
ok = meck:new(emqx_connection, [passthrough, no_history]),
ok = meck:expect(emqx_connection, call, fun(_, _) -> ok end),
ok = meck:expect(emqx_connection, call, fun(_, _, _) -> ok end),
ok = emqx_cm:discard_session(ClientId),
ok = emqx_cm:register_channel(ClientId, self(), ConnInfo),
ok = emqx_cm:discard_session(ClientId),
ok = emqx_cm:unregister_channel(ClientId),
ok = emqx_cm:register_channel(ClientId, self(), ConnInfo),
ok = emqx_cm:discard_session(ClientId),
ok = meck:expect(emqx_connection, call, fun(_, _) -> error(testing) end),
ok = meck:expect(emqx_connection, call, fun(_, _, _) -> error(testing) end),
ok = emqx_cm:discard_session(ClientId),
ok = emqx_cm:unregister_channel(ClientId),
ok = meck:unload(emqx_connection).
%% Channel deregistration is delegated to emqx_pool as a sync tasks.
%% The emqx_pool is pool of workers, and there is no way to know
%% which worker was picked for the last deregistration task.
%% This help function creates a large enough number of async tasks
%% to sync with the pool workers.
%% The number of tasks should be large enough to ensure all workers have
%% the chance to work on at least one of the tasks.
flush_emqx_pool() ->
Self = self(),
L = lists:seq(1, 1000),
lists:foreach(fun(I) -> emqx_pool:async_submit(fun() -> Self ! {done, I} end, []) end, L),
lists:foreach(fun(I) -> receive {done, I} -> ok end end, L).
t_discard_session_race(_) ->
ClientId = rand_client_id(),
@ -222,37 +300,55 @@ t_discard_session_race(_) ->
t_takeover_session(_) ->
#{conninfo := ConnInfo} = ?ChanInfo,
none = emqx_cm:takeover_session(<<"clientid">>),
Parent = self(),
erlang:spawn_link(fun() ->
ok = emqx_cm:register_channel(<<"clientid">>, self(), ConnInfo),
Parent ! registered,
receive
{'$gen_call', From, {takeover, 'begin'}} ->
gen_server:reply(From, test), ok
end
end),
timer:sleep(100),
receive registered -> ok end,
{living, emqx_connection, _, test} = emqx_cm:takeover_session(<<"clientid">>),
emqx_cm:unregister_channel(<<"clientid">>).
t_kick_session(_) ->
Info = #{conninfo := ConnInfo} = ?ChanInfo,
ok = meck:new(emqx_connection, [passthrough, no_history]),
ok = meck:expect(emqx_connection, call, fun(_, _) -> test end),
ok = meck:expect(emqx_connection, call, fun(_, _, _) -> test end),
{error, not_found} = emqx_cm:kick_session(<<"clientid">>),
ok = emqx_cm:register_channel(<<"clientid">>, self(), ConnInfo),
ok = emqx_cm:insert_channel_info(<<"clientid">>, Info, []),
test = emqx_cm:kick_session(<<"clientid">>),
erlang:spawn_link(
fun() ->
ok = emqx_cm:register_channel(<<"clientid">>, self(), ConnInfo),
ok = emqx_cm:insert_channel_info(<<"clientid">>, Info, []),
timer:sleep(1000)
t_takeover_session_process_gone(_) ->
#{conninfo := ConnInfo} = ?ChanInfo,
ClientIDTcp = <<"clientidTCP">>,
ClientIDWs = <<"clientidWs">>,
ClientIDRpc = <<"clientidRPC">>,
none = emqx_cm:takeover_session(ClientIDTcp),
none = emqx_cm:takeover_session(ClientIDWs),
meck:new(emqx_connection, [passthrough, no_history]),
meck:expect(emqx_connection, call,
fun(Pid, {takeover, 'begin'}, _) ->
exit({noproc, {gen_server,call,[Pid, takeover_session]}});
(Pid, What, Args) ->
meck:passthrough([Pid, What, Args])
end),
ct:sleep(100),
test = emqx_cm:kick_session(<<"clientid">>),
ok = emqx_cm:unregister_channel(<<"clientid">>),
ok = meck:unload(emqx_connection).
ok = emqx_cm:register_channel(ClientIDTcp, self(), ConnInfo),
none = emqx_cm:takeover_session(ClientIDTcp),
meck:expect(emqx_connection, call,
fun(_Pid, {takeover, 'begin'}, _) ->
exit(noproc);
(Pid, What, Args) ->
meck:passthrough([Pid, What, Args])
end),
ok = emqx_cm:register_channel(ClientIDWs, self(), ConnInfo),
none = emqx_cm:takeover_session(ClientIDWs),
meck:expect(emqx_connection, call,
fun(Pid, {takeover, 'begin'}, _) ->
exit({'EXIT', {noproc, {gen_server,call,[Pid, takeover_session]}}});
(Pid, What, Args) ->
meck:passthrough([Pid, What, Args])
end),
ok = emqx_cm:register_channel(ClientIDRpc, self(), ConnInfo),
none = emqx_cm:takeover_session(ClientIDRpc),
emqx_cm:unregister_channel(ClientIDTcp),
emqx_cm:unregister_channel(ClientIDWs),
emqx_cm:unregister_channel(ClientIDRpc),
meck:unload(emqx_connection).
t_all_channels(_) ->
?assertEqual(true, is_list(emqx_cm:all_channels())).

View File

@ -26,7 +26,7 @@
, delete_default_app/0
, default_auth_header/0
, auth_header/2
]).
]).
request_api(Method, Url, Auth) ->
request_api(Method, Url, [], Auth, []).
@ -57,15 +57,14 @@ do_request_api(Method, Request, HttpOpts) ->
case httpc:request(Method, Request, HttpOpts, [{body_format, binary}]) of
{error, socket_closed_remotely} ->
{error, socket_closed_remotely};
{ok, {{"HTTP/1.1", Code, _}, _Headers, Return} }
when Code =:= 200 orelse Code =:= 201 ->
{ok, Return};
{ok, {{"HTTP/1.1", Code, _}, _Headers, Return} } ->
{ok, Code, Return};
{ok, {Reason, _, _}} ->
{error, Reason}
end.
get_http_data(ResponseBody) ->
maps:get(<<"data">>, emqx_json:decode(ResponseBody, [return_maps])).
emqx_json:decode(ResponseBody, [return_maps]).
auth_header(User, Pass) ->
Encoded = base64:encode_to_string(lists:append([User,":",Pass])),

View File

@ -36,6 +36,8 @@ init_per_suite(Config) ->
ok = meck:new(emqx_channel, [passthrough, no_history, no_link]),
%% Meck Cm
ok = meck:new(emqx_cm, [passthrough, no_history, no_link]),
ok = meck:expect(emqx_cm, mark_channel_connected, fun(_) -> ok end),
ok = meck:expect(emqx_cm, mark_channel_disconnected, fun(_) -> ok end),
%% Meck Limiter
ok = meck:new(emqx_limiter, [passthrough, no_history, no_link]),
%% Meck Pd

View File

@ -55,12 +55,12 @@ t_unmount(_) ->
t_replvar(_) ->
?assertEqual(undefined, replvar(undefined, #{})),
?assertEqual(<<"mount/user/clientid/">>,
replvar(<<"mount/%u/%c/">>,
replvar(<<"mount/${username}/${clientid}/">>,
#{clientid => <<"clientid">>,
username => <<"user">>
})),
?assertEqual(<<"mount/%u/clientid/">>,
replvar(<<"mount/%u/%c/">>,
?assertEqual(<<"mount/${username}/clientid/">>,
replvar(<<"mount/${username}/${clientid}/">>,
#{clientid => <<"clientid">>,
username => undefined
})).

View File

@ -62,79 +62,104 @@ t_conn_stats(_) ->
t_tcp_sock_passive(_) ->
with_client(fun(CPid) -> CPid ! {tcp_passive, sock} end, []).
t_message_expiry_interval_1(_) ->
ClientA = message_expiry_interval_init(),
[message_expiry_interval_exipred(ClientA, QoS) || QoS <- [0,1,2]],
emqtt:stop(ClientA).
t_message_expiry_interval(_) ->
{CPublish, CControl} = message_expiry_interval_init(),
[message_expiry_interval_exipred(CPublish, CControl, QoS) || QoS <- [0,1,2]],
emqtt:stop(CPublish),
emqtt:stop(CControl).
t_message_expiry_interval_2(_) ->
ClientA = message_expiry_interval_init(),
[message_expiry_interval_not_exipred(ClientA, QoS) || QoS <- [0,1,2]],
emqtt:stop(ClientA).
t_message_not_expiry_interval(_) ->
{CPublish, CControl} = message_expiry_interval_init(),
[message_expiry_interval_not_exipred(CPublish, CControl, QoS) || QoS <- [0,1,2]],
emqtt:stop(CPublish),
emqtt:stop(CControl).
message_expiry_interval_init() ->
{ok, ClientA} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"client-a">>},
{ok, CPublish} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"Client-Publish">>},
{clean_start, false},
{properties, #{'Session-Expiry-Interval' => 360}}]),
{ok, ClientB} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"client-b">>},
{ok, CVerify} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"Client-Verify">>},
{clean_start, false},
{properties, #{'Session-Expiry-Interval' => 360}}]),
{ok, _} = emqtt:connect(ClientA),
{ok, _} = emqtt:connect(ClientB),
%% subscribe and disconnect client-b
emqtt:subscribe(ClientB, <<"t/a">>, 1),
emqtt:stop(ClientB),
ClientA.
{ok, CControl} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"Client-Control">>},
{clean_start, false},
{properties, #{'Session-Expiry-Interval' => 360}}]),
{ok, _} = emqtt:connect(CPublish),
{ok, _} = emqtt:connect(CVerify),
{ok, _} = emqtt:connect(CControl),
%% subscribe and disconnect Client-verify
emqtt:subscribe(CControl, <<"t/a">>, 1),
emqtt:subscribe(CVerify, <<"t/a">>, 1),
emqtt:stop(CVerify),
{CPublish, CControl}.
message_expiry_interval_exipred(ClientA, QoS) ->
message_expiry_interval_exipred(CPublish, CControl, QoS) ->
ct:pal("~p ~p", [?FUNCTION_NAME, QoS]),
%% publish to t/a and waiting for the message expired
emqtt:publish(ClientA, <<"t/a">>, #{'Message-Expiry-Interval' => 1}, <<"this will be purged in 1s">>, [{qos, QoS}]),
ct:sleep(1500),
emqtt:publish(CPublish, <<"t/a">>, #{'Message-Expiry-Interval' => 1},
<<"this will be purged in 1s">>, [{qos, QoS}]),
%% CControl make sure publish already store in broker.
receive
{publish,#{client_pid := CControl, topic := <<"t/a">>}} ->
ok
after 1000 ->
ct:fail(should_receive_publish)
end,
ct:sleep(1100),
%% resume the session for client-b
{ok, ClientB1} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"client-b">>},
%% resume the session for Client-Verify
{ok, CVerify} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"Client-Verify">>},
{clean_start, false},
{properties, #{'Session-Expiry-Interval' => 360}}]),
{ok, _} = emqtt:connect(ClientB1),
{ok, _} = emqtt:connect(CVerify),
%% verify client-b could not receive the publish message
%% verify Client-Verify could not receive the publish message
receive
{publish,#{client_pid := ClientB1, topic := <<"t/a">>}} ->
{publish,#{client_pid := CVerify, topic := <<"t/a">>}} ->
ct:fail(should_have_expired)
after 300 ->
ok
end,
emqtt:stop(ClientB1).
emqtt:stop(CVerify).
message_expiry_interval_not_exipred(ClientA, QoS) ->
message_expiry_interval_not_exipred(CPublish, CControl, QoS) ->
ct:pal("~p ~p", [?FUNCTION_NAME, QoS]),
%% publish to t/a
emqtt:publish(ClientA, <<"t/a">>, #{'Message-Expiry-Interval' => 20}, <<"this will be purged in 1s">>, [{qos, QoS}]),
emqtt:publish(CPublish, <<"t/a">>, #{'Message-Expiry-Interval' => 20},
<<"this will be purged in 20s">>, [{qos, QoS}]),
%% wait for 1s and then resume the session for client-b, the message should not expires
%% CControl make sure publish already store in broker.
receive
{publish,#{client_pid := CControl, topic := <<"t/a">>}} ->
ok
after 1000 ->
ct:fail(should_receive_publish)
end,
%% wait for 1.2s and then resume the session for Client-Verify, the message should not expires
%% as Message-Expiry-Interval = 20s
ct:sleep(1000),
{ok, ClientB1} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"client-b">>},
ct:sleep(1200),
{ok, CVerify} = emqtt:start_link([{proto_ver,v5},
{clientid, <<"Client-Verify">>},
{clean_start, false},
{properties, #{'Session-Expiry-Interval' => 360}}]),
{ok, _} = emqtt:connect(ClientB1),
{ok, _} = emqtt:connect(CVerify),
%% verify client-b could receive the publish message and the Message-Expiry-Interval is set
%% verify Client-Verify could receive the publish message and the Message-Expiry-Interval is set
receive
{publish,#{client_pid := ClientB1, topic := <<"t/a">>,
{publish,#{client_pid := CVerify, topic := <<"t/a">>,
properties := #{'Message-Expiry-Interval' := MsgExpItvl}}}
when MsgExpItvl < 20 -> ok;
when MsgExpItvl =< 20 -> ok;
{publish, _} = Msg ->
ct:fail({incorrect_publish, Msg})
after 300 ->
ct:fail(no_publish_received)
end,
emqtt:stop(ClientB1).
emqtt:stop(CVerify).
with_client(TestFun, _Options) ->
ClientId = <<"t_conn">>,

View File

@ -113,6 +113,9 @@ init_per_group(snabbkaffe, Config) ->
[ {kill_connection_process, true} | Config];
init_per_group(gc_tests, Config) ->
%% We need to make sure the system does not interfere with this test group.
lists:foreach(fun(ClientId) ->
maybe_kill_connection_process(ClientId, [{kill_connection_process, true}])
end, emqx_cm:all_client_ids()),
emqx_common_test_helpers:stop_apps([]),
SessionMsgEts = gc_tests_session_store,
MsgEts = gc_tests_msg_store,
@ -230,50 +233,92 @@ receive_messages(Count, Msgs) ->
maybe_kill_connection_process(ClientId, Config) ->
case ?config(kill_connection_process, Config) of
true ->
[ConnectionPid] = emqx_cm:lookup_channels(ClientId),
case emqx_cm:lookup_channels(ClientId) of
[] ->
ok;
[ConnectionPid] ->
?assert(is_pid(ConnectionPid)),
Ref = monitor(process, ConnectionPid),
ConnectionPid ! die_if_test,
receive {'DOWN', Ref, process, ConnectionPid, normal} -> ok
after 3000 -> error(process_did_not_die)
end,
wait_for_cm_unregister(ClientId)
end;
false ->
ok
end.
snabbkaffe_sync_publish(Topic, Payloads, Config) ->
wait_for_cm_unregister(ClientId) ->
wait_for_cm_unregister(ClientId, 10).
wait_for_cm_unregister(_ClientId, 0) ->
error(cm_did_not_unregister);
wait_for_cm_unregister(ClientId, N) ->
case emqx_cm:lookup_channels(ClientId) of
[] -> ok;
[_] -> timer:sleep(100), wait_for_cm_unregister(ClientId, N - 1)
end.
snabbkaffe_sync_publish(Topic, Payloads) ->
Fun = fun(Client, Payload) ->
?check_trace(
begin
?wait_async_action( {ok, _} = emqtt:publish(Client, Topic, Payload, 2)
, #{?snk_kind := ps_persist_msg, payload := Payload}
)
end,
do_publish(Payloads, Fun, Config).
fun(_, _Trace) -> ok end)
end,
do_publish(Payloads, Fun, true).
publish(Topic, Payloads, Config) ->
publish(Topic, Payloads) ->
publish(Topic, Payloads, false).
publish(Topic, Payloads, WaitForUnregister) ->
Fun = fun(Client, Payload) ->
{ok, _} = emqtt:publish(Client, Topic, Payload, 2)
end,
do_publish(Payloads, Fun, Config).
do_publish(Payloads, Fun, WaitForUnregister).
do_publish(Payloads = [_|_], PublishFun, Config) ->
do_publish(Payloads = [_|_], PublishFun, WaitForUnregister) ->
%% Publish from another process to avoid connection confusion.
{Pid, Ref} =
spawn_monitor(
fun() ->
%% For convenience, always publish using tcp.
%% The publish path is not what we are testing.
ClientID = <<"ps_SUITE_publisher">>,
{ok, Client} = emqtt:start_link([ {proto_ver, v5}
, {clientid, ClientID}
, {port, 1883} ]),
{ok, _} = emqtt:connect(Client),
lists:foreach(fun(Payload) -> PublishFun(Client, Payload) end, Payloads),
ok = emqtt:disconnect(Client)
ok = emqtt:disconnect(Client),
%% Snabbkaffe sometimes fails unless all processes are gone.
case WaitForUnregister of
false ->
ok;
true ->
case emqx_cm:lookup_channels(ClientID) of
[] ->
ok;
[ConnectionPid] ->
?assert(is_pid(ConnectionPid)),
Ref1 = monitor(process, ConnectionPid),
receive {'DOWN', Ref1, process, ConnectionPid, _} -> ok
after 3000 -> error(process_did_not_die)
end,
wait_for_cm_unregister(ClientID)
end
end
end),
receive
{'DOWN', Ref, process, Pid, normal} -> ok;
{'DOWN', Ref, process, Pid, What} -> error({failed_publish, What})
end;
do_publish(Payload, PublishFun, Config) ->
do_publish([Payload], PublishFun, Config).
do_publish(Payload, PublishFun, WaitForUnregister) ->
do_publish([Payload], PublishFun, WaitForUnregister).
%%--------------------------------------------------------------------
%% Test Cases
@ -297,7 +342,7 @@ t_connect_session_expiry_interval(Config) ->
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload, Config),
publish(Topic, Payload),
{ok, Client2} = emqtt:start_link([ {clientid, ClientId},
{proto_ver, v5},
@ -356,6 +401,8 @@ t_cancel_on_disconnect(Config) ->
{ok, _} = emqtt:ConnFun(Client1),
ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 0}),
wait_for_cm_unregister(ClientId),
{ok, Client2} = emqtt:start_link([ {clientid, ClientId},
{proto_ver, v5},
{clean_start, false},
@ -382,6 +429,8 @@ t_persist_on_disconnect(Config) ->
%% Strangely enough, the disconnect is reported as successful by emqtt.
ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 30}),
wait_for_cm_unregister(ClientId),
{ok, Client2} = emqtt:start_link([ {clientid, ClientId},
{proto_ver, v5},
{clean_start, false},
@ -424,7 +473,7 @@ t_process_dies_session_expires(Config) ->
maybe_kill_connection_process(ClientId, Config),
ok = publish(Topic, [Payload], Config),
ok = publish(Topic, [Payload]),
SessionId =
case ?config(persistent_store_enabled, Config) of
@ -467,7 +516,8 @@ t_process_dies_session_expires(Config) ->
%% The session should be a fresh one
{persistent, NewSession} = emqx_persistent_session:lookup(ClientId),
?assertNotEqual(SessionId, emqx_session:info(id, NewSession)),
%% The old session should now either be marked as abandoned or already be garbage collected.
%% The old session should now either
%% be marked as abandoned or already be garbage collected.
?assertMatch([], emqx_persistent_session:pending(SessionId));
false ->
skip
@ -498,7 +548,7 @@ t_publish_while_client_is_gone(Config) ->
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
ok = publish(Topic, [Payload1, Payload2], Config),
ok = publish(Topic, [Payload1, Payload2]),
{ok, Client2} = emqtt:start_link([ {proto_ver, v5},
{clientid, ClientId},
@ -506,8 +556,9 @@ t_publish_while_client_is_gone(Config) ->
{clean_start, false}
| Config]),
{ok, _} = emqtt:ConnFun(Client2),
[Msg1] = receive_messages(1),
[Msg2] = receive_messages(1),
Msgs = receive_messages(2),
?assertEqual(length(Msgs), 2),
[Msg2, Msg1] = Msgs,
?assertEqual({ok, iolist_to_binary(Payload1)}, maps:find(payload, Msg1)),
?assertEqual({ok, 2}, maps:find(qos, Msg1)),
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg2)),
@ -544,7 +595,7 @@ t_clean_start_drops_subscriptions(Config) ->
maybe_kill_connection_process(ClientId, Config),
%% 2.
ok = publish(Topic, Payload1, Config),
ok = publish(Topic, Payload1),
%% 3.
{ok, Client2} = emqtt:start_link([ {proto_ver, v5},
@ -556,7 +607,7 @@ t_clean_start_drops_subscriptions(Config) ->
?assertEqual(0, client_info(session_present, Client2)),
{ok, _, [2]} = emqtt:subscribe(Client2, STopic, qos2),
ok = publish(Topic, Payload2, Config),
ok = publish(Topic, Payload2),
[Msg1] = receive_messages(1),
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg1)),
@ -571,7 +622,7 @@ t_clean_start_drops_subscriptions(Config) ->
| Config]),
{ok, _} = emqtt:ConnFun(Client3),
ok = publish(Topic, Payload3, Config),
ok = publish(Topic, Payload3),
[Msg2] = receive_messages(1),
?assertEqual({ok, iolist_to_binary(Payload3)}, maps:find(payload, Msg2)),
@ -625,7 +676,7 @@ t_multiple_subscription_matches(Config) ->
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload, Config),
publish(Topic, Payload),
{ok, Client2} = emqtt:start_link([ {clientid, ClientId},
{proto_ver, v5},
@ -675,9 +726,9 @@ t_lost_messages_because_of_gc(Config) ->
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload1, Config),
publish(Topic, Payload1),
timer:sleep(2 * Retain),
publish(Topic, Payload2, Config),
publish(Topic, Payload2),
emqx_persistent_session_gc:message_gc_worker(),
{ok, Client2} = emqtt:start_link([ {clientid, ClientId},
{clean_start, false},
@ -747,7 +798,6 @@ check_snabbkaffe_vanilla(Trace) ->
t_snabbkaffe_vanilla_stages(Config) ->
%% Test that all stages of session resume works ok in the simplest case
process_flag(trap_exit, true),
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
EmqttOpts = [ {proto_ver, v5},
@ -772,7 +822,6 @@ t_snabbkaffe_vanilla_stages(Config) ->
t_snabbkaffe_pending_messages(Config) ->
%% Make sure there are pending messages are fetched during the init stage.
process_flag(trap_exit, true),
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
Topic = ?config(topic, Config),
@ -790,7 +839,7 @@ t_snabbkaffe_pending_messages(Config) ->
?check_trace(
begin
snabbkaffe_sync_publish(Topic, Payloads, Config),
snabbkaffe_sync_publish(Topic, Payloads),
{ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client2),
Msgs = receive_messages(length(Payloads)),
@ -812,7 +861,6 @@ t_snabbkaffe_pending_messages(Config) ->
t_snabbkaffe_buffered_messages(Config) ->
%% Make sure to buffer messages during startup.
process_flag(trap_exit, true),
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
Topic = ?config(topic, Config),
@ -829,7 +877,7 @@ t_snabbkaffe_buffered_messages(Config) ->
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payloads1, Config),
publish(Topic, Payloads1),
?check_trace(
begin
@ -837,8 +885,8 @@ t_snabbkaffe_buffered_messages(Config) ->
?force_ordering( #{ ?snk_kind := ps_worker_deliver },
#{ ?snk_kind := ps_resume_end }),
spawn_link(fun() ->
?block_until(#{ ?snk_kind := ps_marker_pendings_msgs }, infinity, 5000),
publish(Topic, Payloads2, Config)
?block_until(#{?snk_kind := ps_marker_pendings_msgs}, infinity, 5000),
publish(Topic, Payloads2, true)
end),
{ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client2),

View File

@ -112,7 +112,8 @@ t_out(_) ->
t_out_2(_) ->
{empty, {pqueue, [{-1, {queue, [a], [], 1}}]}} = ?PQ:out(0, ?PQ:from_list([{1, a}])),
{{value, a}, {queue, [], [], 0}} = ?PQ:out(1, ?PQ:from_list([{1, a}])),
{{value, a}, {pqueue, [{-1, {queue, [], [b], 1}}]}} = ?PQ:out(1, ?PQ:from_list([{1, a}, {1, b}])),
{{value, a}, {pqueue, [{-1, {queue, [], [b], 1}}]}} =
?PQ:out(1, ?PQ:from_list([{1, a}, {1, b}])),
{{value, a}, {queue, [b], [], 1}} = ?PQ:out(1, ?PQ:from_list([{1, a}, {0, b}])).
t_out_p(_) ->

View File

@ -0,0 +1,318 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_SUITE).
%% API
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx/include/emqx.hrl").
-record(emqx_trace, {name, type, filter, enable = true, start_at, end_at}).
%%--------------------------------------------------------------------
%% Setups
%%--------------------------------------------------------------------
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
application:load(emqx_plugin_libs),
emqx_common_test_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
emqx_common_test_helpers:stop_apps([]).
t_base_create_delete(_Config) ->
ok = emqx_trace:clear(),
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
End = to_rfc3339(Now + 30 * 60),
Name = <<"name1">>,
ClientId = <<"test-device">>,
Trace = #{
name => Name,
type => <<"clientid">>,
clientid => ClientId,
start_at => Start,
end_at => End
},
AnotherTrace = Trace#{name => <<"anotherTrace">>},
ok = emqx_trace:create(Trace),
?assertEqual({error, {already_existed, Name}}, emqx_trace:create(Trace)),
?assertEqual({error, {duplicate_condition, Name}}, emqx_trace:create(AnotherTrace)),
[TraceRec] = emqx_trace:list(),
Expect = #emqx_trace{
name = Name,
type = clientid,
filter = ClientId,
start_at = Now,
end_at = Now + 30 * 60
},
?assertEqual(Expect, TraceRec),
ExpectFormat = [
#{
filter => <<"test-device">>,
enable => true,
type => clientid,
name => <<"name1">>,
start_at => Now,
end_at => Now + 30 * 60
}
],
?assertEqual(ExpectFormat, emqx_trace:format([TraceRec])),
?assertEqual(ok, emqx_trace:delete(Name)),
?assertEqual({error, not_found}, emqx_trace:delete(Name)),
?assertEqual([], emqx_trace:list()),
ok.
t_create_size_max(_Config) ->
emqx_trace:clear(),
lists:map(fun(Seq) ->
Name = list_to_binary("name" ++ integer_to_list(Seq)),
Trace = [{name, Name}, {type, <<"topic">>},
{topic, list_to_binary("/x/y/" ++ integer_to_list(Seq))}],
ok = emqx_trace:create(Trace)
end, lists:seq(1, 30)),
Trace31 = [{<<"name">>, <<"name31">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/31">>}],
{error, _} = emqx_trace:create(Trace31),
ok = emqx_trace:delete(<<"name30">>),
ok = emqx_trace:create(Trace31),
?assertEqual(30, erlang:length(emqx_trace:list())),
ok.
t_create_failed(_Config) ->
ok = emqx_trace:clear(),
UnknownField = [{<<"unknown">>, 12}],
{error, Reason1} = emqx_trace:create(UnknownField),
?assertEqual(<<"unknown field: {unknown,12}">>, iolist_to_binary(Reason1)),
InvalidTopic = [{<<"topic">>, "#/#//"}],
{error, Reason2} = emqx_trace:create(InvalidTopic),
?assertEqual(<<"topic: #/#// invalid by function_clause">>, iolist_to_binary(Reason2)),
InvalidStart = [{<<"start_at">>, <<"2021-12-3:12">>}],
{error, Reason3} = emqx_trace:create(InvalidStart),
?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>,
iolist_to_binary(Reason3)),
InvalidEnd = [{<<"end_at">>, <<"2021-12-3:12">>}],
{error, Reason4} = emqx_trace:create(InvalidEnd),
?assertEqual(<<"The rfc3339 specification not satisfied: 2021-12-3:12">>,
iolist_to_binary(Reason4)),
{error, Reason7} = emqx_trace:create([{<<"name">>, <<"test">>}, {<<"type">>, <<"clientid">>}]),
?assertEqual(<<"topic/clientid/ip_address filter required">>, iolist_to_binary(Reason7)),
InvalidPackets4 = [{<<"name">>, <<"/test">>}, {<<"clientid">>, <<"t">>},
{<<"type">>, <<"clientid">>}],
{error, Reason9} = emqx_trace:create(InvalidPackets4),
?assertEqual(<<"name cannot contain /">>, iolist_to_binary(Reason9)),
?assertEqual({error, "type=[topic,clientid,ip_address] required"},
emqx_trace:create([{<<"name">>, <<"test-name">>}, {<<"clientid">>, <<"good">>}])),
?assertEqual({error, "incorrect type: only support clientid/topic/ip_address"},
emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"clientid">>, <<"good">>}, {<<"type">>, <<"typeerror">> }])),
?assertEqual({error, "ip address: einval"},
emqx_trace:create([{<<"ip_address">>, <<"test-name">>}])),
ok.
t_create_default(_Config) ->
ok = emqx_trace:clear(),
{error, "name required"} = emqx_trace:create([]),
ok = emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, <<"good">>}]),
[#emqx_trace{name = <<"test-name">>}] = emqx_trace:list(),
ok = emqx_trace:clear(),
Trace = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"start_at">>, <<"2021-10-28T10:54:47+08:00">>},
{<<"end_at">>, <<"2021-10-27T10:54:47+08:00">>}
],
{error, "end_at time has already passed"} = emqx_trace:create(Trace),
Now = erlang:system_time(second),
Trace2 = [
{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>},
{<<"start_at">>, to_rfc3339(Now + 10)},
{<<"end_at">>, to_rfc3339(Now + 3)}
],
{error, "failed by start_at >= end_at"} = emqx_trace:create(Trace2),
ok = emqx_trace:create([{<<"name">>, <<"test-name">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/x/y/z">>}]),
[#emqx_trace{start_at = Start, end_at = End}] = emqx_trace:list(),
?assertEqual(10 * 60, End - Start),
?assertEqual(true, Start - erlang:system_time(second) < 5),
ok.
t_update_enable(_Config) ->
ok = emqx_trace:clear(),
Name = <<"test-name">>,
Now = erlang:system_time(second),
End = list_to_binary(calendar:system_time_to_rfc3339(Now + 2)),
ok = emqx_trace:create([{<<"name">>, Name}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/z">>}, {<<"end_at">>, End}]),
[#emqx_trace{enable = Enable}] = emqx_trace:list(),
?assertEqual(Enable, true),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
ok = emqx_trace:update(Name, true),
[#emqx_trace{enable = true}] = emqx_trace:list(),
ok = emqx_trace:update(Name, false),
[#emqx_trace{enable = false}] = emqx_trace:list(),
?assertEqual({error, not_found}, emqx_trace:update(<<"Name not found">>, true)),
ct:sleep(2100),
?assertEqual({error, finished}, emqx_trace:update(Name, true)),
ok.
t_load_state(_Config) ->
emqx_trace:clear(),
load(),
Now = erlang:system_time(second),
Running = [{<<"name">>, <<"Running">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/1">>}, {<<"start_at">>, to_rfc3339(Now - 1)},
{<<"end_at">>, to_rfc3339(Now + 2)}],
Waiting = [{<<"name">>, <<"Waiting">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/2">>}, {<<"start_at">>, to_rfc3339(Now + 3)},
{<<"end_at">>, to_rfc3339(Now + 8)}],
Finished = [{<<"name">>, <<"Finished">>}, {<<"type">>, <<"topic">>},
{<<"topic">>, <<"/x/y/3">>}, {<<"start_at">>, to_rfc3339(Now - 5)},
{<<"end_at">>, to_rfc3339(Now)}],
ok = emqx_trace:create(Running),
ok = emqx_trace:create(Waiting),
{error, "end_at time has already passed"} = emqx_trace:create(Finished),
Traces = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces)),
Enables = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces),
ExpectEnables = [{<<"Running">>, true}, {<<"Waiting">>, true}],
?assertEqual(ExpectEnables, lists:sort(Enables)),
ct:sleep(3500),
Traces2 = emqx_trace:format(emqx_trace:list()),
?assertEqual(2, erlang:length(Traces2)),
Enables2 = lists:map(fun(#{name := Name, enable := Enable}) -> {Name, Enable} end, Traces2),
ExpectEnables2 = [{<<"Running">>, false}, {<<"Waiting">>, true}],
?assertEqual(ExpectEnables2, lists:sort(Enables2)),
unload(),
ok.
t_client_event(_Config) ->
application:set_env(emqx, allow_anonymous, true),
emqx_trace:clear(),
ClientId = <<"client-test">>,
load(),
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
Name = <<"test_client_id_event">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
ct:sleep(200),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
emqtt:ping(Client),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"1">>, [{qos, 0}]),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"2">>, [{qos, 0}]),
ct:sleep(200),
ok = emqx_trace:create([{<<"name">>, <<"test_topic">>},
{<<"type">>, <<"topic">>}, {<<"topic">>, <<"/test">>}, {<<"start_at">>, Start}]),
ct:sleep(200),
{ok, Bin} = file:read_file(emqx_trace:log_file(Name, Now)),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"3">>, [{qos, 0}]),
ok = emqtt:publish(Client, <<"/test">>, #{}, <<"4">>, [{qos, 0}]),
ok = emqtt:disconnect(Client),
ct:sleep(200),
{ok, Bin2} = file:read_file(emqx_trace:log_file(Name, Now)),
{ok, Bin3} = file:read_file(emqx_trace:log_file(<<"test_topic">>, Now)),
ct:pal("Bin ~p Bin2 ~p Bin3 ~p", [byte_size(Bin), byte_size(Bin2), byte_size(Bin3)]),
?assert(erlang:byte_size(Bin) > 0),
?assert(erlang:byte_size(Bin) < erlang:byte_size(Bin2)),
?assert(erlang:byte_size(Bin3) > 0),
unload(),
ok.
t_get_log_filename(_Config) ->
ok = emqx_trace:clear(),
load(),
Now = erlang:system_time(second),
Start = calendar:system_time_to_rfc3339(Now),
End = calendar:system_time_to_rfc3339(Now + 2),
Name = <<"name1">>,
Trace = [
{<<"name">>, Name},
{<<"type">>, <<"ip_address">>},
{<<"ip_address">>, <<"127.0.0.1">>},
{<<"start_at">>, list_to_binary(Start)},
{<<"end_at">>, list_to_binary(End)}
],
ok = emqx_trace:create(Trace),
?assertEqual({error, not_found}, emqx_trace:get_trace_filename(<<"test">>)),
?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))),
ct:sleep(3000),
?assertEqual(ok, element(1, emqx_trace:get_trace_filename(Name))),
unload(),
ok.
t_trace_file(_Config) ->
FileName = "test.log",
Content = <<"test \n test">>,
TraceDir = emqx_trace:trace_dir(),
File = filename:join(TraceDir, FileName),
ok = file:write_file(File, Content),
{ok, Node, Bin} = emqx_trace:trace_file(FileName),
?assertEqual(Node, atom_to_list(node())),
?assertEqual(Content, Bin),
ok = file:delete(File),
ok.
t_download_log(_Config) ->
emqx_trace:clear(),
load(),
ClientId = <<"client-test">>,
Now = erlang:system_time(second),
Start = to_rfc3339(Now),
Name = <<"test_client_id">>,
ok = emqx_trace:create([{<<"name">>, Name},
{<<"type">>, <<"clientid">>}, {<<"clientid">>, ClientId}, {<<"start_at">>, Start}]),
{ok, Client} = emqtt:start_link([{clean_start, true}, {clientid, ClientId}]),
{ok, _} = emqtt:connect(Client),
[begin _ = emqtt:ping(Client) end ||_ <- lists:seq(1, 5)],
ct:sleep(100),
{ok, ZipFile} = emqx_trace_api:download_zip_log(#{name => Name}, []),
?assert(filelib:file_size(ZipFile) > 0),
ok = emqtt:disconnect(Client),
unload(),
ok.
to_rfc3339(Second) ->
list_to_binary(calendar:system_time_to_rfc3339(Second)).
load() ->
emqx_trace:start_link().
unload() ->
gen_server:stop(emqx_trace).

View File

@ -0,0 +1,191 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2019-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trace_handler_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-define(CLIENT, [{host, "localhost"},
{clientid, <<"client">>},
{username, <<"testuser">>},
{password, <<"pass">>}
]).
all() -> [t_trace_clientid, t_trace_topic, t_trace_ip_address].
init_per_suite(Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
emqx_common_test_helpers:stop_apps([]).
init_per_testcase(t_trace_clientid, Config) ->
Config;
init_per_testcase(_Case, Config) ->
ok = emqx_logger:set_log_level(debug),
_ = [logger:remove_handler(Id) ||#{id := Id} <- emqx_trace_handler:running()],
Config.
end_per_testcase(_Case, _Config) ->
ok = emqx_logger:set_log_level(warning),
ok.
t_trace_clientid(_Config) ->
%% Start tracing
emqx_logger:set_log_level(error),
{error, _} = emqx_trace_handler:install(clientid, <<"client">>, debug, "tmp/client.log"),
emqx_logger:set_log_level(debug),
%% add list clientid
ok = emqx_trace_handler:install(clientid, "client", debug, "tmp/client.log"),
ok = emqx_trace_handler:install(clientid, <<"client2">>, all, "tmp/client2.log"),
ok = emqx_trace_handler:install(clientid, <<"client3">>, all, "tmp/client3.log"),
{error, {invalid_log_level, bad_level}} =
emqx_trace_handler:install(clientid, <<"client4">>, bad_level, "tmp/client4.log"),
{error, {handler_not_added, {file_error, ".", eisdir}}} =
emqx_trace_handler:install(clientid, <<"client5">>, debug, "."),
ct:sleep(100),
%% Verify the tracing file exits
?assert(filelib:is_regular("tmp/client.log")),
?assert(filelib:is_regular("tmp/client2.log")),
?assert(filelib:is_regular("tmp/client3.log")),
%% Get current traces
?assertMatch([#{type := clientid, filter := "client", name := <<"client">>,
level := debug, dst := "tmp/client.log"},
#{type := clientid, filter := "client2", name := <<"client2">>
, level := debug, dst := "tmp/client2.log"},
#{type := clientid, filter := "client3", name := <<"client3">>,
level := debug, dst := "tmp/client3.log"}
], emqx_trace_handler:running()),
%% Client with clientid = "client" publishes a "hi" message to "a/b/c".
{ok, T} = emqtt:start_link(?CLIENT),
emqtt:connect(T),
emqtt:publish(T, <<"a/b/c">>, <<"hi">>),
emqtt:ping(T),
ct:sleep(200),
%% Verify messages are logged to "tmp/client.log" but not "tmp/client2.log".
{ok, Bin} = file:read_file("tmp/client.log"),
?assertNotEqual(nomatch, binary:match(Bin, [<<"CONNECT">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"CONNACK">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"PUBLISH">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"PINGREQ">>])),
?assert(filelib:file_size("tmp/client2.log") == 0),
%% Stop tracing
ok = emqx_trace_handler:uninstall(clientid, <<"client">>),
ok = emqx_trace_handler:uninstall(clientid, <<"client2">>),
ok = emqx_trace_handler:uninstall(clientid, <<"client3">>),
emqtt:disconnect(T),
?assertEqual([], emqx_trace_handler:running()).
t_trace_topic(_Config) ->
{ok, T} = emqtt:start_link(?CLIENT),
emqtt:connect(T),
%% Start tracing
emqx_logger:set_log_level(debug),
ok = emqx_trace_handler:install(topic, <<"x/#">>, all, "tmp/topic_trace_x.log"),
ok = emqx_trace_handler:install(topic, <<"y/#">>, all, "tmp/topic_trace_y.log"),
ct:sleep(100),
%% Verify the tracing file exits
?assert(filelib:is_regular("tmp/topic_trace_x.log")),
?assert(filelib:is_regular("tmp/topic_trace_y.log")),
%% Get current traces
?assertMatch([#{type := topic, filter := <<"x/#">>,
level := debug, dst := "tmp/topic_trace_x.log", name := <<"x/#">>},
#{type := topic, filter := <<"y/#">>,
name := <<"y/#">>, level := debug, dst := "tmp/topic_trace_y.log"}
],
emqx_trace_handler:running()),
%% Client with clientid = "client" publishes a "hi" message to "x/y/z".
emqtt:publish(T, <<"x/y/z">>, <<"hi1">>),
emqtt:publish(T, <<"x/y/z">>, <<"hi2">>),
emqtt:subscribe(T, <<"x/y/z">>),
emqtt:unsubscribe(T, <<"x/y/z">>),
ct:sleep(200),
{ok, Bin} = file:read_file("tmp/topic_trace_x.log"),
?assertNotEqual(nomatch, binary:match(Bin, [<<"hi1">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"hi2">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"PUBLISH">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"SUBSCRIBE">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"UNSUBSCRIBE">>])),
?assert(filelib:file_size("tmp/topic_trace_y.log") =:= 0),
%% Stop tracing
ok = emqx_trace_handler:uninstall(topic, <<"x/#">>),
ok = emqx_trace_handler:uninstall(topic, <<"y/#">>),
{error, _Reason} = emqx_trace_handler:uninstall(topic, <<"z/#">>),
?assertEqual([], emqx_trace_handler:running()),
emqtt:disconnect(T).
t_trace_ip_address(_Config) ->
{ok, T} = emqtt:start_link(?CLIENT),
emqtt:connect(T),
%% Start tracing
ok = emqx_trace_handler:install(ip_address, "127.0.0.1", all, "tmp/ip_trace_x.log"),
ok = emqx_trace_handler:install(ip_address, "192.168.1.1", all, "tmp/ip_trace_y.log"),
ct:sleep(100),
%% Verify the tracing file exits
?assert(filelib:is_regular("tmp/ip_trace_x.log")),
?assert(filelib:is_regular("tmp/ip_trace_y.log")),
%% Get current traces
?assertMatch([#{type := ip_address, filter := "127.0.0.1",
name := <<"127.0.0.1">>,
level := debug, dst := "tmp/ip_trace_x.log"},
#{type := ip_address, filter := "192.168.1.1",
name := <<"192.168.1.1">>,
level := debug, dst := "tmp/ip_trace_y.log"}
],
emqx_trace_handler:running()),
%% Client with clientid = "client" publishes a "hi" message to "x/y/z".
emqtt:publish(T, <<"x/y/z">>, <<"hi1">>),
emqtt:publish(T, <<"x/y/z">>, <<"hi2">>),
emqtt:subscribe(T, <<"x/y/z">>),
emqtt:unsubscribe(T, <<"x/y/z">>),
ct:sleep(200),
{ok, Bin} = file:read_file("tmp/ip_trace_x.log"),
?assertNotEqual(nomatch, binary:match(Bin, [<<"hi1">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"hi2">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"PUBLISH">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"SUBSCRIBE">>])),
?assertNotEqual(nomatch, binary:match(Bin, [<<"UNSUBSCRIBE">>])),
?assert(filelib:file_size("tmp/ip_trace_y.log") =:= 0),
%% Stop tracing
ok = emqx_trace_handler:uninstall(ip_address, <<"127.0.0.1">>),
ok = emqx_trace_handler:uninstall(ip_address, <<"192.168.1.1">>),
{error, _Reason} = emqx_trace_handler:uninstall(ip_address, <<"127.0.0.2">>),
emqtt:disconnect(T),
?assertEqual([], emqx_trace_handler:running()).

View File

@ -1,120 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2019-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_tracer_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
all() -> [t_trace_clientid, t_trace_topic].
init_per_suite(Config) ->
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
Config.
end_per_suite(_Config) ->
emqx_common_test_helpers:stop_apps([]).
t_trace_clientid(_Config) ->
{ok, T} = emqtt:start_link([{host, "localhost"},
{clientid, <<"client">>},
{username, <<"testuser">>},
{password, <<"pass">>}
]),
emqtt:connect(T),
%% Start tracing
emqx_logger:set_log_level(error),
{error, _} = emqx_tracer:start_trace({clientid, <<"client">>}, debug, "tmp/client.log"),
emqx_logger:set_log_level(debug),
ok = emqx_tracer:start_trace({clientid, <<"client">>}, debug, "tmp/client.log"),
ok = emqx_tracer:start_trace({clientid, <<"client2">>}, all, "tmp/client2.log"),
ok = emqx_tracer:start_trace({clientid, <<"client3">>}, all, "tmp/client3.log"),
{error, {invalid_log_level, bad_level}} = emqx_tracer:start_trace({clientid, <<"client4">>}, bad_level, "tmp/client4.log"),
{error, {handler_not_added, {file_error,".",eisdir}}} = emqx_tracer:start_trace({clientid, <<"client5">>}, debug, "."),
ct:sleep(100),
%% Verify the tracing file exits
?assert(filelib:is_regular("tmp/client.log")),
?assert(filelib:is_regular("tmp/client2.log")),
%% Get current traces
?assertEqual([{{clientid,"client"},{debug,"tmp/client.log"}},
{{clientid,"client2"},{debug,"tmp/client2.log"}},
{{clientid,"client3"},{debug,"tmp/client3.log"}}
], emqx_tracer:lookup_traces()),
%% set the overall log level to debug
emqx_logger:set_log_level(debug),
%% Client with clientid = "client" publishes a "hi" message to "a/b/c".
emqtt:publish(T, <<"a/b/c">>, <<"hi">>),
ct:sleep(200),
%% Verify messages are logged to "tmp/client.log" but not "tmp/client2.log".
?assert(filelib:file_size("tmp/client.log") > 0),
?assert(filelib:file_size("tmp/client2.log") == 0),
%% Stop tracing
ok = emqx_tracer:stop_trace({clientid, <<"client">>}),
ok = emqx_tracer:stop_trace({clientid, <<"client2">>}),
ok = emqx_tracer:stop_trace({clientid, <<"client3">>}),
emqtt:disconnect(T),
emqx_logger:set_log_level(warning).
t_trace_topic(_Config) ->
{ok, T} = emqtt:start_link([{host, "localhost"},
{clientid, <<"client">>},
{username, <<"testuser">>},
{password, <<"pass">>}
]),
emqtt:connect(T),
%% Start tracing
emqx_logger:set_log_level(debug),
ok = emqx_tracer:start_trace({topic, <<"x/#">>}, all, "tmp/topic_trace.log"),
ok = emqx_tracer:start_trace({topic, <<"y/#">>}, all, "tmp/topic_trace.log"),
ct:sleep(100),
%% Verify the tracing file exits
?assert(filelib:is_regular("tmp/topic_trace.log")),
%% Get current traces
?assertEqual([{{topic,"x/#"},{debug,"tmp/topic_trace.log"}},
{{topic,"y/#"},{debug,"tmp/topic_trace.log"}}], emqx_tracer:lookup_traces()),
%% set the overall log level to debug
emqx_logger:set_log_level(debug),
%% Client with clientid = "client" publishes a "hi" message to "x/y/z".
emqtt:publish(T, <<"x/y/z">>, <<"hi">>),
ct:sleep(200),
?assert(filelib:file_size("tmp/topic_trace.log") > 0),
%% Stop tracing
ok = emqx_tracer:stop_trace({topic, <<"x/#">>}),
ok = emqx_tracer:stop_trace({topic, <<"y/#">>}),
{error, _Reason} = emqx_tracer:stop_trace({topic, <<"z/#">>}),
emqtt:disconnect(T),
emqx_logger:set_log_level(warning).

View File

@ -48,7 +48,10 @@ init_per_testcase(TestCase, Config) when
TestCase =/= t_ws_pingreq_before_connected,
TestCase =/= t_ws_non_check_origin
->
emqx_channel_SUITE:set_test_listenser_confs(),
%% Meck Cm
ok = meck:new(emqx_cm, [passthrough, no_history, no_link]),
ok = meck:expect(emqx_cm, mark_channel_connected, fun(_) -> ok end),
ok = meck:expect(emqx_cm, mark_channel_disconnected, fun(_) -> ok end),
%% Mock cowboy_req
ok = meck:new(cowboy_req, [passthrough, no_history, no_link]),
ok = meck:expect(cowboy_req, header, fun(_, _, _) -> <<>> end),
@ -90,7 +93,8 @@ end_per_testcase(TestCase, _Config) when
TestCase =/= t_ws_pingreq_before_connected
->
lists:foreach(fun meck:unload/1,
[cowboy_req,
[emqx_cm,
cowboy_req,
emqx_access_control,
emqx_broker,
emqx_hooks,
@ -363,14 +367,12 @@ t_handle_info_close(_) ->
{[{close, _}], _St} = ?ws_conn:handle_info({close, protocol_error}, st()).
t_handle_info_event(_) ->
ok = meck:new(emqx_cm, [passthrough, no_history]),
ok = meck:expect(emqx_cm, register_channel, fun(_,_,_) -> ok end),
ok = meck:expect(emqx_cm, insert_channel_info, fun(_,_,_) -> ok end),
ok = meck:expect(emqx_cm, connection_closed, fun(_) -> true end),
{ok, _} = ?ws_conn:handle_info({event, connected}, st()),
{ok, _} = ?ws_conn:handle_info({event, disconnected}, st()),
{ok, _} = ?ws_conn:handle_info({event, updated}, st()),
ok = meck:unload(emqx_cm).
{ok, _} = ?ws_conn:handle_info({event, updated}, st()).
t_handle_timeout_idle_timeout(_) ->
TRef = make_ref(),

File diff suppressed because it is too large Load Diff

View File

@ -34,12 +34,11 @@
start(_StartType, _StartArgs) ->
ok = mria_rlog:wait_for_shards([?AUTH_SHARD], infinity),
{ok, Sup} = emqx_authn_sup:start_link(),
ok = ?AUTHN:register_providers(emqx_authn:providers()),
ok = initialize(),
{ok, Sup}.
stop(_State) ->
ok = ?AUTHN:deregister_providers(provider_types()),
ok = deinitialize(),
ok.
%%------------------------------------------------------------------------------
@ -47,12 +46,38 @@ stop(_State) ->
%%------------------------------------------------------------------------------
initialize() ->
RawConfigs = emqx:get_raw_config([authentication], []),
Config = emqx_authn:check_configs(RawConfigs),
?AUTHN:initialize_authentication(?GLOBAL, Config),
lists:foreach(fun({ListenerID, ListenerConfig}) ->
?AUTHN:initialize_authentication(ListenerID, maps:get(authentication, ListenerConfig, []))
end, emqx_listeners:list()).
ok = ?AUTHN:register_providers(emqx_authn:providers()),
lists:foreach(
fun({ChainName, RawAuthConfigs}) ->
AuthConfig = emqx_authn:check_configs(RawAuthConfigs),
?AUTHN:initialize_authentication(
ChainName,
AuthConfig)
end,
chain_configs()).
deinitialize() ->
ok = ?AUTHN:deregister_providers(provider_types()),
ok = emqx_authn_utils:cleanup_resources().
chain_configs() ->
[global_chain_config() | listener_chain_configs()].
global_chain_config() ->
{?GLOBAL, emqx:get_raw_config([<<"authentication">>], [])}.
listener_chain_configs() ->
lists:map(
fun({ListenerID, _}) ->
{ListenerID, emqx:get_raw_config(auth_config_path(ListenerID), [])}
end,
emqx_listeners:list()).
auth_config_path(ListenerID) ->
[<<"listeners">>]
++ binary:split(atom_to_binary(ListenerID), <<":">>)
++ [<<"authentication">>].
provider_types() ->
lists:map(fun({Type, _Module}) -> Type end, emqx_authn:providers()).

View File

@ -21,12 +21,11 @@
-export([ common_fields/0
, roots/0
, fields/1
, authenticator_type/0
]).
%% only for doc generation
roots() -> [{authenticator_config,
#{type => hoconsc:union(config_refs([Module || {_AuthnType, Module} <- emqx_authn:providers()]))
}}].
roots() -> [{authenticator_config, hoconsc:mk(authenticator_type())}].
fields(_) -> [].
@ -38,5 +37,8 @@ enable(type) -> boolean();
enable(default) -> true;
enable(_) -> undefined.
authenticator_type() ->
hoconsc:union(config_refs([Module || {_AuthnType, Module} <- emqx_authn:providers()])).
config_refs(Modules) ->
lists:append([Module:refs() || Module <- Modules]).

View File

@ -16,6 +16,8 @@
-module(emqx_authn_utils).
-include_lib("emqx/include/emqx_placeholder.hrl").
-export([ replace_placeholders/2
, replace_placeholder/2
, check_password/3
@ -23,8 +25,13 @@
, hash/4
, gen_salt/0
, bin/1
, ensure_apps_started/1
, cleanup_resources/0
, make_resource_id/1
]).
-define(RESOURCE_GROUP, <<"emqx_authn">>).
%%------------------------------------------------------------------------------
%% APIs
%%------------------------------------------------------------------------------
@ -42,17 +49,17 @@ replace_placeholders([Placeholder | More], Credential, Acc) ->
replace_placeholders(More, Credential, [convert_to_sql_param(V) | Acc])
end.
replace_placeholder(<<"${mqtt-username}">>, Credential) ->
replace_placeholder(?PH_USERNAME, Credential) ->
maps:get(username, Credential, undefined);
replace_placeholder(<<"${mqtt-clientid}">>, Credential) ->
replace_placeholder(?PH_CLIENTID, Credential) ->
maps:get(clientid, Credential, undefined);
replace_placeholder(<<"${mqtt-password}">>, Credential) ->
replace_placeholder(?PH_PASSWORD, Credential) ->
maps:get(password, Credential, undefined);
replace_placeholder(<<"${ip-address}">>, Credential) ->
replace_placeholder(?PH_PEERHOST, Credential) ->
maps:get(peerhost, Credential, undefined);
replace_placeholder(<<"${cert-subject}">>, Credential) ->
replace_placeholder(?PH_CERT_SUBJECT, Credential) ->
maps:get(dn, Credential, undefined);
replace_placeholder(<<"${cert-common-name}">>, Credential) ->
replace_placeholder(?PH_CERT_CN_NAME, Credential) ->
maps:get(cn, Credential, undefined);
replace_placeholder(Constant, _) ->
Constant.
@ -62,22 +69,42 @@ check_password(undefined, _Selected, _State) ->
check_password(Password,
#{<<"password_hash">> := Hash},
#{password_hash_algorithm := bcrypt}) ->
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of
true -> ok;
false -> {error, bad_username_or_password}
case emqx_passwd:hash(bcrypt, {Hash, Password}) of
Hash -> ok;
_ ->
{error, bad_username_or_password}
end;
check_password(Password,
#{<<"password_hash">> := Hash} = Selected,
#{password_hash_algorithm := Algorithm,
salt_position := SaltPosition}) ->
Salt = maps:get(<<"salt">>, Selected, <<>>),
case Hash =:= hash(Algorithm, Password, Salt, SaltPosition) of
true -> ok;
false -> {error, bad_username_or_password}
case hash(Algorithm, Password, Salt, SaltPosition) of
Hash -> ok;
_ ->
{error, bad_username_or_password}
end.
is_superuser(Selected) ->
#{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}.
is_superuser(#{<<"is_superuser">> := <<"">>}) ->
#{is_superuser => false};
is_superuser(#{<<"is_superuser">> := <<"0">>}) ->
#{is_superuser => false};
is_superuser(#{<<"is_superuser">> := 0}) ->
#{is_superuser => false};
is_superuser(#{<<"is_superuser">> := null}) ->
#{is_superuser => false};
is_superuser(#{<<"is_superuser">> := false}) ->
#{is_superuser => false};
is_superuser(#{<<"is_superuser">> := _}) ->
#{is_superuser => true};
is_superuser(#{}) ->
#{is_superuser => false}.
ensure_apps_started(bcrypt) ->
{ok, _} = application:ensure_all_started(bcrypt),
ok;
ensure_apps_started(_) ->
ok.
hash(Algorithm, Password, Salt, prefix) ->
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>);
@ -92,6 +119,15 @@ bin(A) when is_atom(A) -> atom_to_binary(A, utf8);
bin(L) when is_list(L) -> list_to_binary(L);
bin(X) -> X.
cleanup_resources() ->
lists:foreach(
fun emqx_resource:remove_local/1,
emqx_resource:list_group_instances(?RESOURCE_GROUP)).
make_resource_id(Name) ->
NameBin = bin(Name),
emqx_resource:generate_id(?RESOURCE_GROUP, NameBin).
%%------------------------------------------------------------------------------
%% Internal functions
%%------------------------------------------------------------------------------

View File

@ -17,6 +17,7 @@
-module(emqx_enhanced_authn_scram_mnesia).
-include("emqx_authn.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
-include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema).
@ -28,7 +29,7 @@
]).
-export([ refs/0
, create/1
, create/2
, update/2
, authenticate/2
, destroy/1
@ -46,6 +47,8 @@
-define(TAB, ?MODULE).
-define(FORMAT_FUN, {?MODULE, format_user_info}).
-type(user_group() :: binary()).
-export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
@ -58,6 +61,8 @@
, is_superuser
}).
-reflect_type([user_group/0]).
%%------------------------------------------------------------------------------
%% Mnesia bootstrap
%%------------------------------------------------------------------------------
@ -102,17 +107,17 @@ iteration_count(_) -> undefined.
refs() ->
[hoconsc:ref(?MODULE, config)].
create(#{ algorithm := Algorithm
, iteration_count := IterationCount
, '_unique' := Unique
}) ->
State = #{user_group => Unique,
create(AuthenticatorID,
#{algorithm := Algorithm,
iteration_count := IterationCount}) ->
State = #{user_group => AuthenticatorID,
algorithm => Algorithm,
iteration_count => IterationCount},
{ok, State}.
update(Config, #{user_group := Unique}) ->
create(Config#{'_unique' => Unique}).
update(Config, #{user_group := ID}) ->
create(ID, Config).
authenticate(#{auth_method := AuthMethod,
auth_data := AuthData,
@ -132,9 +137,12 @@ authenticate(_Credential, _State) ->
ignore.
destroy(#{user_group := UserGroup}) ->
MatchSpec = ets:fun2ms(
fun(#user_info{user_id = {Group, _}} = User) when Group =:= UserGroup ->
User
end),
trans(
fun() ->
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_', '_'}, [], ['$_']}],
ok = lists:foreach(fun(UserInfo) ->
mnesia:delete_object(?TAB, UserInfo, write)
end, mnesia:select(?TAB, MatchSpec, write))

View File

@ -30,7 +30,7 @@
]).
-export([ refs/0
, create/1
, create/2
, update/2
, authenticate/2
, destroy/1
@ -113,24 +113,25 @@ refs() ->
, hoconsc:ref(?MODULE, post)
].
create(#{ method := Method
, url := URL
, headers := Headers
, body := Body
, request_timeout := RequestTimeout
, '_unique' := Unique
} = Config) ->
create(_AuthenticatorID, Config) ->
create(Config).
create(#{method := Method,
url := URL,
headers := Headers,
body := Body,
request_timeout := RequestTimeout} = Config) ->
#{path := Path,
query := Query} = URIMap = parse_url(URL),
State = #{ method => Method
, path => Path
, base_query => cow_qs:parse_qs(list_to_binary(Query))
, headers => maps:to_list(Headers)
, body => maps:to_list(Body)
, request_timeout => RequestTimeout
, '_unique' => Unique
},
case emqx_resource:create_local(Unique,
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
State = #{method => Method,
path => Path,
base_query => cow_qs:parse_qs(list_to_binary(Query)),
headers => maps:to_list(Headers),
body => maps:to_list(Body),
request_timeout => RequestTimeout,
resource_id => ResourceId},
case emqx_resource:create_local(ResourceId,
emqx_connector_http,
Config#{base_url => maps:remove(query, URIMap),
pool_type => random}) of
@ -153,11 +154,11 @@ update(Config, State) ->
authenticate(#{auth_method := _}, _) ->
ignore;
authenticate(Credential, #{'_unique' := Unique,
authenticate(Credential, #{resource_id := ResourceId,
method := Method,
request_timeout := RequestTimeout} = State) ->
Request = generate_request(Credential, State),
case emqx_resource:query(Unique, {Method, Request, RequestTimeout}) of
case emqx_resource:query(ResourceId, {Method, Request, RequestTimeout}) of
{ok, 204, _Headers} -> {ok, #{is_superuser => false}};
{ok, 200, Headers, Body} ->
ContentType = proplists:get_value(<<"content-type">>, Headers, <<"application/json">>),
@ -165,19 +166,35 @@ authenticate(Credential, #{'_unique' := Unique,
{ok, NBody} ->
%% TODO: Return by user property
{ok, #{is_superuser => maps:get(<<"is_superuser">>, NBody, false),
user_property => NBody}};
user_property => maps:remove(<<"is_superuser">>, NBody)}};
{error, _Reason} ->
{ok, #{is_superuser => false}}
end;
{error, Reason} ->
?SLOG(error, #{msg => "http_server_query_failed",
resource => Unique,
resource => ResourceId,
reason => Reason}),
ignore;
Other ->
Output = may_append_body(#{resource => ResourceId}, Other),
case erlang:element(2, Other) of
Code5xx when Code5xx >= 500 andalso Code5xx < 600 ->
?SLOG(error, Output#{msg => "http_server_error",
code => Code5xx}),
ignore;
Code4xx when Code4xx >= 400 andalso Code4xx < 500 ->
?SLOG(warning, Output#{msg => "refused_by_http_server",
code => Code4xx}),
{error, not_authorized};
OtherCode ->
?SLOG(error, Output#{msg => "undesired_response_code",
code => OtherCode}),
ignore
end
end.
destroy(#{'_unique' := Unique}) ->
_ = emqx_resource:remove_local(Unique),
destroy(#{resource_id := ResourceId}) ->
_ = emqx_resource:remove_local(ResourceId),
ok.
%%--------------------------------------------------------------------
@ -305,6 +322,11 @@ parse_body(<<"application/x-www-form-urlencoded">>, Body) ->
parse_body(ContentType, _) ->
{error, {unsupported_content_type, ContentType}}.
may_append_body(Output, {ok, _, _, Body}) ->
Output#{body => Body};
may_append_body(Output, {ok, _, _}) ->
Output.
to_list(A) when is_atom(A) ->
atom_to_list(A);
to_list(B) when is_binary(B) ->

View File

@ -27,7 +27,7 @@
]).
-export([ refs/0
, create/1
, create/2
, update/2
, authenticate/2
, destroy/1
@ -139,18 +139,23 @@ refs() ->
, hoconsc:ref(?MODULE, 'jwks')
].
create(_AuthenticatorID, Config) ->
create(Config).
create(#{verify_claims := VerifyClaims} = Config) ->
create2(Config#{verify_claims => handle_verify_claims(VerifyClaims)}).
update(#{use_jwks := false} = Config, #{jwk := Connector})
update(#{use_jwks := false} = Config,
#{jwk := Connector})
when is_pid(Connector) ->
_ = emqx_authn_jwks_connector:stop(Connector),
create(Config);
update(#{use_jwks := false} = Config, _) ->
update(#{use_jwks := false} = Config, _State) ->
create(Config);
update(#{use_jwks := true} = Config, #{jwk := Connector} = State)
update(#{use_jwks := true} = Config,
#{jwk := Connector} = State)
when is_pid(Connector) ->
ok = emqx_authn_jwks_connector:update(Connector, Config),
case maps:get(verify_cliams, Config, undefined) of
@ -160,7 +165,7 @@ update(#{use_jwks := true} = Config, #{jwk := Connector} = State)
{ok, State#{verify_claims => handle_verify_claims(VerifyClaims)}}
end;
update(#{use_jwks := true} = Config, _) ->
update(#{use_jwks := true} = Config, _State) ->
create(Config).
authenticate(#{auth_method := _}, _) ->
@ -340,7 +345,7 @@ handle_placeholder(Placeholder0) ->
Placeholder0
end.
validate_placeholder(<<"mqtt-clientid">>) ->
validate_placeholder(<<"clientid">>) ->
clientid;
validate_placeholder(<<"mqtt-username">>) ->
validate_placeholder(<<"username">>) ->
username.

View File

@ -17,6 +17,7 @@
-module(emqx_authn_mnesia).
-include("emqx_authn.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
-include_lib("typerefl/include/types.hrl").
-behaviour(hocon_schema).
@ -28,7 +29,7 @@
]).
-export([ refs/0
, create/1
, create/2
, update/2
, authenticate/2
, destroy/1
@ -45,8 +46,7 @@
-export([format_user_info/1]).
-type user_id_type() :: clientid | username.
-type user_group() :: {binary(), binary()}.
-type user_group() :: binary().
-type user_id() :: binary().
-record(user_info,
@ -56,7 +56,7 @@
, is_superuser :: boolean()
}).
-reflect_type([ user_id_type/0 ]).
-reflect_type([user_id_type/0]).
-export([mnesia/1]).
@ -123,29 +123,28 @@ salt_rounds(_) -> undefined.
refs() ->
[hoconsc:ref(?MODULE, config)].
create(#{ user_id_type := Type
, password_hash_algorithm := #{name := bcrypt,
salt_rounds := SaltRounds}
, '_unique' := Unique
}) ->
{ok, _} = application:ensure_all_started(bcrypt),
State = #{user_group => Unique,
create(AuthenticatorID,
#{user_id_type := Type,
password_hash_algorithm := #{name := bcrypt,
salt_rounds := SaltRounds}}) ->
ok = emqx_authn_utils:ensure_apps_started(bcrypt),
State = #{user_group => AuthenticatorID,
user_id_type => Type,
password_hash_algorithm => bcrypt,
salt_rounds => SaltRounds},
{ok, State};
create(#{ user_id_type := Type
, password_hash_algorithm := #{name := Name}
, '_unique' := Unique
}) ->
State = #{user_group => Unique,
create(AuthenticatorID,
#{user_id_type := Type,
password_hash_algorithm := #{name := Name}}) ->
ok = emqx_authn_utils:ensure_apps_started(Name),
State = #{user_group => AuthenticatorID,
user_id_type => Type,
password_hash_algorithm => Name},
{ok, State}.
update(Config, #{user_group := Unique}) ->
create(Config#{'_unique' => Unique}).
update(Config, #{user_group := ID}) ->
create(ID, Config).
authenticate(#{auth_method := _}, _) ->
ignore;
@ -171,10 +170,14 @@ authenticate(#{password := Password} = Credential,
destroy(#{user_group := UserGroup}) ->
trans(
fun() ->
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_'}, [], ['$_']}],
ok = lists:foreach(fun delete_user2/1, mnesia:select(?TAB, MatchSpec, write))
ok = lists:foreach(
fun(User) ->
mnesia:delete_object(?TAB, User, write)
end,
mnesia:select(?TAB, group_match_spec(UserGroup), write))
end).
import_users(Filename0, State) ->
Filename = to_binary(Filename0),
case filename:extension(Filename) of
@ -246,8 +249,7 @@ lookup_user(UserID, #{user_group := UserGroup}) ->
end.
list_users(PageParams, #{user_group := UserGroup}) ->
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_'}, [], ['$_']}],
{ok, emqx_mgmt_api:paginate(?TAB, MatchSpec, PageParams, ?FORMAT_FUN)}.
{ok, emqx_mgmt_api:paginate(?TAB, group_match_spec(UserGroup), PageParams, ?FORMAT_FUN)}.
%%------------------------------------------------------------------------------
%% Internal functions
@ -374,9 +376,6 @@ insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) ->
is_superuser = IsSuperuser},
mnesia:write(?TAB, UserInfo, write).
delete_user2(UserInfo) ->
mnesia:delete_object(?TAB, UserInfo, write).
%% TODO: Support other type
get_user_identity(#{username := Username}, username) ->
Username;
@ -401,3 +400,9 @@ to_binary(L) when is_list(L) ->
format_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) ->
#{user_id => UserID, is_superuser => IsSuperuser}.
group_match_spec(UserGroup) ->
ets:fun2ms(
fun(#user_info{user_id = {Group, _}} = User) when Group =:= UserGroup ->
User
end).

View File

@ -29,7 +29,7 @@
]).
-export([ refs/0
, create/1
, create/2
, update/2
, authenticate/2
, destroy/1
@ -102,19 +102,24 @@ refs() ->
, hoconsc:ref(?MODULE, 'sharded-cluster')
].
create(#{ selector := Selector
, '_unique' := Unique
} = Config) ->
create(_AuthenticatorID, Config) ->
create(Config).
create(#{selector := Selector} = Config) ->
NSelector = parse_selector(Selector),
State = maps:with([ collection
, password_hash_field
, salt_field
, is_superuser_field
, password_hash_algorithm
, salt_position
, '_unique'], Config),
NState = State#{selector => NSelector},
case emqx_resource:create_local(Unique, emqx_connector_mongo, Config) of
State = maps:with(
[collection,
password_hash_field,
salt_field,
is_superuser_field,
password_hash_algorithm,
salt_position],
Config),
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
NState = State#{
selector => NSelector,
resource_id => ResourceId},
case emqx_resource:create_local(ResourceId, emqx_connector_mongo, Config) of
{ok, already_created} ->
{ok, NState};
{ok, _} ->
@ -135,17 +140,16 @@ update(Config, State) ->
authenticate(#{auth_method := _}, _) ->
ignore;
authenticate(#{password := Password} = Credential,
#{ collection := Collection
, selector := Selector0
, '_unique' := Unique
} = State) ->
#{collection := Collection,
selector := Selector0,
resource_id := ResourceId} = State) ->
Selector1 = replace_placeholders(Selector0, Credential),
Selector2 = normalize_selector(Selector1),
case emqx_resource:query(Unique, {find_one, Collection, Selector2, #{}}) of
case emqx_resource:query(ResourceId, {find_one, Collection, Selector2, #{}}) of
undefined -> ignore;
{error, Reason} ->
?SLOG(error, #{msg => "mongodb_query_failed",
resource => Unique,
resource => ResourceId,
reason => Reason}),
ignore;
Doc ->
@ -154,7 +158,7 @@ authenticate(#{password := Password} = Credential,
{ok, #{is_superuser => is_superuser(Doc, State)}};
{error, {cannot_find_password_hash_field, PasswordHashField}} ->
?SLOG(error, #{msg => "cannot_find_password_hash_field",
resource => Unique,
resource => ResourceId,
password_hash_field => PasswordHashField}),
ignore;
{error, Reason} ->
@ -162,8 +166,8 @@ authenticate(#{password := Password} = Credential,
end
end.
destroy(#{'_unique' := Unique}) ->
_ = emqx_resource:remove_local(Unique),
destroy(#{resource_id := ResourceId}) ->
_ = emqx_resource:remove_local(ResourceId),
ok.
%%------------------------------------------------------------------------------
@ -205,7 +209,7 @@ check_password(Password,
undefined ->
{error, {cannot_find_password_hash_field, PasswordHashField}};
Hash ->
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of
case {ok, to_list(Hash)} =:= bcrypt:hashpw(Password, Hash) of
true -> ok;
false -> {error, bad_username_or_password}
end
@ -238,3 +242,7 @@ hash(Algorithm, Password, Salt, prefix) ->
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>);
hash(Algorithm, Password, Salt, suffix) ->
emqx_passwd:hash(Algorithm, <<Password/binary, Salt/binary>>).
to_list(L) when is_list(L) -> L;
to_list(L) when is_binary(L) -> binary_to_list(L);
to_list(X) -> X.

View File

@ -29,7 +29,7 @@
]).
-export([ refs/0
, create/1
, create/2
, update/2
, authenticate/2
, destroy/1
@ -76,20 +76,23 @@ query_timeout(_) -> undefined.
refs() ->
[hoconsc:ref(?MODULE, config)].
create(#{ password_hash_algorithm := Algorithm
, salt_position := SaltPosition
, query := Query0
, query_timeout := QueryTimeout
, '_unique' := Unique
create(_AuthenticatorID, Config) ->
create(Config).
create(#{password_hash_algorithm := Algorithm,
salt_position := SaltPosition,
query := Query0,
query_timeout := QueryTimeout
} = Config) ->
{Query, PlaceHolders} = parse_query(Query0),
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
State = #{password_hash_algorithm => Algorithm,
salt_position => SaltPosition,
query => Query,
placeholders => PlaceHolders,
query_timeout => QueryTimeout,
'_unique' => Unique},
case emqx_resource:create_local(Unique, emqx_connector_mysql, Config) of
resource_id => ResourceId},
case emqx_resource:create_local(ResourceId, emqx_connector_mysql, Config) of
{ok, already_created} ->
{ok, State};
{ok, _} ->
@ -113,12 +116,12 @@ authenticate(#{password := Password} = Credential,
#{placeholders := PlaceHolders,
query := Query,
query_timeout := Timeout,
'_unique' := Unique} = State) ->
resource_id := ResourceId} = State) ->
Params = emqx_authn_utils:replace_placeholders(PlaceHolders, Credential),
case emqx_resource:query(Unique, {sql, Query, Params, Timeout}) of
case emqx_resource:query(ResourceId, {sql, Query, Params, Timeout}) of
{ok, _Columns, []} -> ignore;
{ok, Columns, Rows} ->
Selected = maps:from_list(lists:zip(Columns, Rows)),
{ok, Columns, [Row | _]} ->
Selected = maps:from_list(lists:zip(Columns, Row)),
case emqx_authn_utils:check_password(Password, Selected, State) of
ok ->
{ok, emqx_authn_utils:is_superuser(Selected)};
@ -127,13 +130,13 @@ authenticate(#{password := Password} = Credential,
end;
{error, Reason} ->
?SLOG(error, #{msg => "mysql_query_failed",
resource => Unique,
resource => ResourceId,
reason => Reason}),
ignore
end.
destroy(#{'_unique' := Unique}) ->
_ = emqx_resource:remove_local(Unique),
destroy(#{resource_id := ResourceId}) ->
_ = emqx_resource:remove_local(ResourceId),
ok.
%%------------------------------------------------------------------------------

View File

@ -30,12 +30,17 @@
]).
-export([ refs/0
, create/1
, create/2
, update/2
, authenticate/2
, destroy/1
]).
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
-endif.
%%------------------------------------------------------------------------------
%% Hocon Schema
%%------------------------------------------------------------------------------
@ -48,7 +53,7 @@ fields(config) ->
[ {mechanism, {enum, ['password-based']}}
, {backend, {enum, [postgresql]}}
, {password_hash_algorithm, fun password_hash_algorithm/1}
, {salt_position, {enum, [prefix, suffix]}}
, {salt_position, fun salt_position/1}
, {query, fun query/1}
] ++ emqx_authn_schema:common_fields()
++ emqx_connector_schema_lib:relational_db_fields()
@ -58,6 +63,10 @@ password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt
password_hash_algorithm(default) -> sha256;
password_hash_algorithm(_) -> undefined.
salt_position(type) -> {enum, [prefix, suffix]};
salt_position(default) -> prefix;
salt_position(_) -> undefined.
query(type) -> string();
query(_) -> undefined.
@ -68,18 +77,20 @@ query(_) -> undefined.
refs() ->
[hoconsc:ref(?MODULE, config)].
create(#{ query := Query0
, password_hash_algorithm := Algorithm
, salt_position := SaltPosition
, '_unique' := Unique
} = Config) ->
create(_AuthenticatorID, Config) ->
create(Config).
create(#{query := Query0,
password_hash_algorithm := Algorithm,
salt_position := SaltPosition} = Config) ->
{Query, PlaceHolders} = parse_query(Query0),
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
State = #{query => Query,
placeholders => PlaceHolders,
password_hash_algorithm => Algorithm,
salt_position => SaltPosition,
'_unique' => Unique},
case emqx_resource:create_local(Unique, emqx_connector_pgsql, Config) of
resource_id => ResourceId},
case emqx_resource:create_local(ResourceId, emqx_connector_pgsql, Config) of
{ok, already_created} ->
{ok, State};
{ok, _} ->
@ -102,14 +113,13 @@ authenticate(#{auth_method := _}, _) ->
authenticate(#{password := Password} = Credential,
#{query := Query,
placeholders := PlaceHolders,
'_unique' := Unique} = State) ->
resource_id := ResourceId} = State) ->
Params = emqx_authn_utils:replace_placeholders(PlaceHolders, Credential),
case emqx_resource:query(Unique, {sql, Query, Params}) of
case emqx_resource:query(ResourceId, {sql, Query, Params}) of
{ok, _Columns, []} -> ignore;
{ok, Columns, Rows} ->
{ok, Columns, [Row | _]} ->
NColumns = [Name || #column{name = Name} <- Columns],
NRows = [erlang:element(1, Row) || Row <- Rows],
Selected = maps:from_list(lists:zip(NColumns, NRows)),
Selected = maps:from_list(lists:zip(NColumns, erlang:tuple_to_list(Row))),
case emqx_authn_utils:check_password(Password, Selected, State) of
ok ->
{ok, emqx_authn_utils:is_superuser(Selected)};
@ -118,13 +128,13 @@ authenticate(#{password := Password} = Credential,
end;
{error, Reason} ->
?SLOG(error, #{msg => "postgresql_query_failed",
resource => Unique,
resource => ResourceId,
reason => Reason}),
ignore
end.
destroy(#{'_unique' := Unique}) ->
_ = emqx_resource:remove_local(Unique),
destroy(#{resource_id := ResourceId}) ->
_ = emqx_resource:remove_local(ResourceId),
ok.
%%------------------------------------------------------------------------------
@ -138,7 +148,7 @@ parse_query(Query) ->
PlaceHolders = [PlaceHolder || [PlaceHolder] <- Captured],
Replacements = ["$" ++ integer_to_list(I) || I <- lists:seq(1, length(Captured))],
NQuery = lists:foldl(fun({PlaceHolder, Replacement}, Query0) ->
re:replace(Query0, PlaceHolder, Replacement, [{return, binary}])
re:replace(Query0, "\\" ++ PlaceHolder, Replacement, [{return, binary}])
end, Query, lists:zip(PlaceHolders, Replacements)),
{NQuery, PlaceHolders};
nomatch ->

View File

@ -29,7 +29,7 @@
]).
-export([ refs/0
, create/1
, create/2
, update/2
, authenticate/2
, destroy/1
@ -56,11 +56,11 @@ fields(sentinel) ->
common_fields() ++ emqx_connector_redis:fields(sentinel).
common_fields() ->
[ {mechanism, {enum, ['password-based']}}
, {backend, {enum, [redis]}}
, {query, fun query/1}
, {password_hash_algorithm, fun password_hash_algorithm/1}
, {salt_position, fun salt_position/1}
[{mechanism, {enum, ['password-based']}},
{backend, {enum, [redis]}},
{query, fun query/1},
{password_hash_algorithm, fun password_hash_algorithm/1},
{salt_position, fun salt_position/1}
] ++ emqx_authn_schema:common_fields().
query(type) -> string();
@ -84,16 +84,22 @@ refs() ->
, hoconsc:ref(?MODULE, sentinel)
].
create(#{ query := Query
, '_unique' := Unique
} = Config) ->
create(_AuthenticatorID, Config) ->
create(Config).
create(#{query := Query,
password_hash_algorithm := Algorithm} = Config) ->
try
NQuery = parse_query(Query),
State = maps:with([ password_hash_algorithm
, salt_position
, '_unique'], Config),
NState = State#{query => NQuery},
case emqx_resource:create_local(Unique, emqx_connector_redis, Config) of
ok = emqx_authn_utils:ensure_apps_started(Algorithm),
State = maps:with(
[password_hash_algorithm, salt_position],
Config),
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
NState = State#{
query => NQuery,
resource_id => ResourceId},
case emqx_resource:create_local(ResourceId, emqx_connector_redis, Config) of
{ok, already_created} ->
{ok, NState};
{ok, _} ->
@ -102,12 +108,12 @@ create(#{ query := Query
{error, Reason}
end
catch
error:{unsupported_query, Query} ->
error:{unsupported_query, _Query} ->
{error, {unsupported_query, Query}};
error:missing_password_hash ->
{error, missing_password_hash};
error:{unsupported_field, Field} ->
{error, {unsupported_field, Field}}
error:{unsupported_fields, Fields} ->
{error, {unsupported_fields, Fields}}
end.
update(Config, State) ->
@ -122,11 +128,10 @@ update(Config, State) ->
authenticate(#{auth_method := _}, _) ->
ignore;
authenticate(#{password := Password} = Credential,
#{ query := {Command, Key, Fields}
, '_unique' := Unique
} = State) ->
#{query := {Command, Key, Fields},
resource_id := ResourceId} = State) ->
NKey = binary_to_list(iolist_to_binary(replace_placeholders(Key, Credential))),
case emqx_resource:query(Unique, {cmd, [Command, NKey | Fields]}) of
case emqx_resource:query(ResourceId, {cmd, [Command, NKey | Fields]}) of
{ok, Values} ->
case merge(Fields, Values) of
#{<<"password_hash">> := _} = Selected ->
@ -138,18 +143,18 @@ authenticate(#{password := Password} = Credential,
end;
_ ->
?SLOG(error, #{msg => "cannot_find_password_hash_field",
resource => Unique}),
resource => ResourceId}),
ignore
end;
{error, Reason} ->
?SLOG(error, #{msg => "redis_query_failed",
resource => Unique,
resource => ResourceId,
reason => Reason}),
ignore
end.
destroy(#{'_unique' := Unique}) ->
_ = emqx_resource:remove_local(Unique),
destroy(#{resource_id := ResourceId}) ->
_ = emqx_resource:remove_local(ResourceId),
ok.
%%------------------------------------------------------------------------------
@ -169,20 +174,15 @@ parse_query(Query) ->
end.
check_fields(Fields) ->
check_fields(Fields, false).
HasPassHash = lists:member("password_hash", Fields),
KnownFields = ["password_hash", "salt", "is_superuser"],
UnknownFields = [F || F <- Fields, not lists:member(F, KnownFields)],
check_fields([], false) ->
error(missing_password_hash);
check_fields([], true) ->
ok;
check_fields(["password_hash" | More], false) ->
check_fields(More, true);
check_fields(["salt" | More], HasPassHash) ->
check_fields(More, HasPassHash);
check_fields(["is_superuser" | More], HasPassHash) ->
check_fields(More, HasPassHash);
check_fields([Field | _], _) ->
error({unsupported_field, Field}).
case {HasPassHash, UnknownFields} of
{true, []} -> ok;
{true, _} -> error({unsupported_fields, UnknownFields});
{false, _} -> error(missing_password_hash)
end.
parse_key(Key) ->
Tokens = re:split(Key, "(" ++ ?RE_PLACEHOLDER ++ ")", [{return, binary}, group, trim]),

View File

@ -0,0 +1,20 @@
-----BEGIN CERTIFICATE-----
MIIDUTCCAjmgAwIBAgIJAPPYCjTmxdt/MA0GCSqGSIb3DQEBCwUAMD8xCzAJBgNV
BAYTAkNOMREwDwYDVQQIDAhoYW5nemhvdTEMMAoGA1UECgwDRU1RMQ8wDQYDVQQD
DAZSb290Q0EwHhcNMjAwNTA4MDgwNjUyWhcNMzAwNTA2MDgwNjUyWjA/MQswCQYD
VQQGEwJDTjERMA8GA1UECAwIaGFuZ3pob3UxDDAKBgNVBAoMA0VNUTEPMA0GA1UE
AwwGUm9vdENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzcgVLex1
EZ9ON64EX8v+wcSjzOZpiEOsAOuSXOEN3wb8FKUxCdsGrsJYB7a5VM/Jot25Mod2
juS3OBMg6r85k2TWjdxUoUs+HiUB/pP/ARaaW6VntpAEokpij/przWMPgJnBF3Ur
MjtbLayH9hGmpQrI5c2vmHQ2reRZnSFbY+2b8SXZ+3lZZgz9+BaQYWdQWfaUWEHZ
uDaNiViVO0OT8DRjCuiDp3yYDj3iLWbTA/gDL6Tf5XuHuEwcOQUrd+h0hyIphO8D
tsrsHZ14j4AWYLk1CPA6pq1HIUvEl2rANx2lVUNv+nt64K/Mr3RnVQd9s8bK+TXQ
KGHd2Lv/PALYuwIDAQABo1AwTjAdBgNVHQ4EFgQUGBmW+iDzxctWAWxmhgdlE8Pj
EbQwHwYDVR0jBBgwFoAUGBmW+iDzxctWAWxmhgdlE8PjEbQwDAYDVR0TBAUwAwEB
/zANBgkqhkiG9w0BAQsFAAOCAQEAGbhRUjpIred4cFAFJ7bbYD9hKu/yzWPWkMRa
ErlCKHmuYsYk+5d16JQhJaFy6MGXfLgo3KV2itl0d+OWNH0U9ULXcglTxy6+njo5
CFqdUBPwN1jxhzo9yteDMKF4+AHIxbvCAJa17qcwUKR5MKNvv09C6pvQDJLzid7y
E2dkgSuggik3oa0427KvctFf8uhOV94RvEDyqvT5+pgNYZ2Yfga9pD/jjpoHEUlo
88IGU8/wJCx3Ds2yc8+oBg/ynxG8f/HmCC1ET6EHHoe2jlo8FpU/SgGtghS1YL30
IWxNsPrUP+XsZpBJy/mvOhE5QXo6Y35zDqqj8tI7AGmAWu22jg==
-----END CERTIFICATE-----

View File

@ -0,0 +1,19 @@
-----BEGIN CERTIFICATE-----
MIIDEzCCAfugAwIBAgIBAjANBgkqhkiG9w0BAQsFADA/MQswCQYDVQQGEwJDTjER
MA8GA1UECAwIaGFuZ3pob3UxDDAKBgNVBAoMA0VNUTEPMA0GA1UEAwwGUm9vdENB
MB4XDTIwMDUwODA4MDcwNVoXDTMwMDUwNjA4MDcwNVowPzELMAkGA1UEBhMCQ04x
ETAPBgNVBAgMCGhhbmd6aG91MQwwCgYDVQQKDANFTVExDzANBgNVBAMMBlNlcnZl
cjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALNeWT3pE+QFfiRJzKmn
AMUrWo3K2j/Tm3+Xnl6WLz67/0rcYrJbbKvS3uyRP/stXyXEKw9CepyQ1ViBVFkW
Aoy8qQEOWFDsZc/5UzhXUnb6LXr3qTkFEjNmhj+7uzv/lbBxlUG1NlYzSeOB6/RT
8zH/lhOeKhLnWYPXdXKsa1FL6ij4X8DeDO1kY7fvAGmBn/THh1uTpDizM4YmeI+7
4dmayA5xXvARte5h4Vu5SIze7iC057N+vymToMk2Jgk+ZZFpyXrnq+yo6RaD3ANc
lrc4FbeUQZ5a5s5Sxgs9a0Y3WMG+7c5VnVXcbjBRz/aq2NtOnQQjikKKQA8GF080
BQkCAwEAAaMaMBgwCQYDVR0TBAIwADALBgNVHQ8EBAMCBeAwDQYJKoZIhvcNAQEL
BQADggEBAJefnMZpaRDHQSNUIEL3iwGXE9c6PmIsQVE2ustr+CakBp3TZ4l0enLt
iGMfEVFju69cO4oyokWv+hl5eCMkHBf14Kv51vj448jowYnF1zmzn7SEzm5Uzlsa
sqjtAprnLyof69WtLU1j5rYWBuFX86yOTwRAFNjm9fvhAcrEONBsQtqipBWkMROp
iUYMkRqbKcQMdwxov+lHBYKq9zbWRoqLROAn54SRqgQk6c15JdEfgOOjShbsOkIH
UhqcwRkQic7n1zwHVGVDgNIZVgmJ2IdIWBlPEC7oLrRrBD/X1iEEXtKab6p5o22n
KB5mN+iQaE+Oe2cpGKZJiJRdM+IqDDQ=
-----END CERTIFICATE-----

View File

@ -0,0 +1,19 @@
-----BEGIN CERTIFICATE-----
MIIDEzCCAfugAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MQswCQYDVQQGEwJDTjER
MA8GA1UECAwIaGFuZ3pob3UxDDAKBgNVBAoMA0VNUTEPMA0GA1UEAwwGUm9vdENB
MB4XDTIwMDUwODA4MDY1N1oXDTMwMDUwNjA4MDY1N1owPzELMAkGA1UEBhMCQ04x
ETAPBgNVBAgMCGhhbmd6aG91MQwwCgYDVQQKDANFTVExDzANBgNVBAMMBkNsaWVu
dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMy4hoksKcZBDbY680u6
TS25U51nuB1FBcGMlF9B/t057wPOlxF/OcmbxY5MwepS41JDGPgulE1V7fpsXkiW
1LUimYV/tsqBfymIe0mlY7oORahKji7zKQ2UBIVFhdlvQxunlIDnw6F9popUgyHt
dMhtlgZK8oqRwHxO5dbfoukYd6J/r+etS5q26sgVkf3C6dt0Td7B25H9qW+f7oLV
PbcHYCa+i73u9670nrpXsC+Qc7Mygwa2Kq/jwU+ftyLQnOeW07DuzOwsziC/fQZa
nbxR+8U9FNftgRcC3uP/JMKYUqsiRAuaDokARZxVTV5hUElfpO6z6/NItSDvvh3i
eikCAwEAAaMaMBgwCQYDVR0TBAIwADALBgNVHQ8EBAMCBeAwDQYJKoZIhvcNAQEL
BQADggEBABchYxKo0YMma7g1qDswJXsR5s56Czx/I+B41YcpMBMTrRqpUC0nHtLk
M7/tZp592u/tT8gzEnQjZLKBAhFeZaR3aaKyknLqwiPqJIgg0pgsBGITrAK3Pv4z
5/YvAJJKgTe5UdeTz6U4lvNEux/4juZ4pmqH4qSFJTOzQS7LmgSmNIdd072rwXBd
UzcSHzsJgEMb88u/LDLjj1pQ7AtZ4Tta8JZTvcgBFmjB0QUi6fgkHY6oGat/W4kR
jSRUBlMUbM/drr2PVzRc2dwbFIl3X+ZE6n5Sl3ZwRAC/s92JU6CPMRW02muVu6xl
goraNgPISnrbpR6KjxLZkVembXzjNNc=
-----END CERTIFICATE-----

Some files were not shown because too many files have changed in this diff Show More