Merge branch 'master' into EMQX-782

This commit is contained in:
x1001100011 2021-10-11 23:44:44 -07:00
commit f20d5b5395
225 changed files with 8254 additions and 9124 deletions

View File

@ -36,9 +36,14 @@ emqx_test(){
"zip")
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.zip)
unzip -q "${PACKAGE_PATH}/${packagename}"
export EMQX_ZONES__DEFAULT__MQTT__SERVER_KEEPALIVE=60 \
EMQX_MQTT__MAX_TOPIC_ALIAS=10
[[ $(arch) == *arm* || $(arch) == aarch64 ]] && export EMQX_LISTENERS__QUIC__DEFAULT__ENABLED=false
export EMQX_ZONES__DEFAULT__MQTT__SERVER_KEEPALIVE=60
export EMQX_MQTT__MAX_TOPIC_ALIAS=10
export EMQX_LOG__CONSOLE_HANDLER__LEVEL=debug
export EMQX_LOG__FILE_HANDLERS__DEFAULT__LEVEL=debug
if [[ $(arch) == *arm* || $(arch) == aarch64 ]]; then
export EMQX_LISTENERS__QUIC__DEFAULT__ENABLED=false
export WAIT_FOR_ERLANG_STOP=120
fi
# sed -i '/emqx_telemetry/d' "${PACKAGE_PATH}"/emqx/data/loaded_plugins
echo "running ${packagename} start"
@ -58,7 +63,11 @@ emqx_test(){
IDLE_TIME=$((IDLE_TIME+1))
done
pytest -v /paho-mqtt-testing/interoperability/test_client/V5/test_connect.py::test_basic
"${PACKAGE_PATH}"/emqx/bin/emqx stop
if ! "${PACKAGE_PATH}"/emqx/bin/emqx stop; then
cat "${PACKAGE_PATH}"/emqx/log/erlang.log.1 || true
cat "${PACKAGE_PATH}"/emqx/log/emqx.log.1 || true
exit 1
fi
echo "running ${packagename} stop"
rm -rf "${PACKAGE_PATH}"/emqx
;;
@ -133,6 +142,7 @@ EOF
## for ARM, due to CI env issue, skip start of quic listener for the moment
[[ $(arch) == *arm* || $(arch) == aarch64 ]] && tee -a "$emqx_env_vars" <<EOF
export EMQX_LISTENERS__QUIC__DEFAULT__ENABLED=false
export WAIT_FOR_ERLANG_STOP=120
EOF
else
echo "Error: cannot locate emqx_vars"

1
.gitattributes vendored
View File

@ -1,5 +1,6 @@
* text=auto
*.* text eol=lf
*.cmd text eol=crlf
*.jpg -text
*.png -text
*.pdf -text

View File

@ -140,7 +140,6 @@ jobs:
path: source/_packages/${{ matrix.profile }}/.
mac:
runs-on: macos-10.15
needs: prepare
@ -148,11 +147,16 @@ jobs:
fail-fast: false
matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
macos:
- macos-11
- macos-10.15
otp:
- 24.0.5-emqx-1
exclude:
- profile: emqx-edge
runs-on: ${{ matrix.macos }}
steps:
- uses: actions/download-artifact@v2
with:
@ -170,16 +174,12 @@ jobs:
id: cache
with:
path: ~/.kerl
key: erl${{ matrix.otp }}-macos10.15
key: otp-${{ matrix.otp }}-${{ matrix.macos }}
- name: build erlang
if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60
env:
KERL_BUILD_BACKEND: git
OTP_GITHUB_URL: https://github.com/emqx/otp
run: |
kerl update releases
kerl build ${{ matrix.otp }}
kerl build git https://github.com/emqx/otp.git OTP-${{ matrix.otp }} ${{ matrix.otp }}
kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build
working-directory: source
@ -191,8 +191,8 @@ jobs:
- name: test
working-directory: source
run: |
pkg_name=$(basename _packages/${{ matrix.profile }}/${{ matrix.profile }}-*.zip)
unzip -q _packages/${{ matrix.profile }}/$pkg_name
pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip | head)
unzip -q $pkg_name
# gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no'
@ -211,7 +211,7 @@ jobs:
./emqx/bin/emqx_ctl status
./emqx/bin/emqx stop
rm -rf emqx
openssl dgst -sha256 ./_packages/${{ matrix.profile }}/$pkg_name | awk '{print $2}' > ./_packages/${{ matrix.profile }}/$pkg_name.sha256
openssl dgst -sha256 $pkg_name | awk '{print $2}' > $pkg_name.sha256
- uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/')
with:
@ -386,7 +386,7 @@ jobs:
username: ${{ secrets.DOCKER_HUB_USER }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}
- uses: docker/build-push-action@v2
if: github.event_name == 'release'
if: github.event_name == 'release' && github.event.release.prerelease
with:
push: true
pull: true
@ -400,6 +400,23 @@ jobs:
EMQX_NAME=${{ matrix.profile }}
file: source/deploy/docker/Dockerfile
context: source
- uses: docker/build-push-action@v2
if: github.event_name == 'release' && !github.event.release.prerelease
with:
push: true
pull: true
no-cache: true
platforms: linux/amd64,linux/arm64
tags: |
emqx/${{ matrix.profile }}:latest
emqx/${{ matrix.profile }}:${{ steps.version.outputs.version }}
build-args: |
PKG_VSN=${{ steps.version.outputs.version }}
BUILD_FROM=ghcr.io/emqx/emqx-builder-helper/5.0:${{ matrix.otp }}-alpine3.14
RUN_FROM=alpine:3.14
EMQX_NAME=${{ matrix.profile }}
file: source/deploy/docker/Dockerfile
context: source
delete-artifact:
@ -487,15 +504,6 @@ jobs:
-X POST \
-d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ env.version }}\" }" \
${{ secrets.EMQX_IO_RELEASE_API }}
- name: push docker image to docker hub
if: github.event_name == 'release'
run: |
set -e -x -u
sudo make docker-prepare
cd _packages/${{ matrix.profile }} && for var in $(ls |grep docker |grep -v sha256); do unzip $var; sudo docker load < ${var%.*}; rm -f ${var%.*}; done && cd -
echo ${{ secrets.DOCKER_HUB_TOKEN }} |sudo docker login -u ${{ secrets.DOCKER_HUB_USER }} --password-stdin
sudo TARGET=emqx/${{ matrix.profile }} make docker-push
sudo TARGET=emqx/${{ matrix.profile }} make docker-manifest-list
- name: update repo.emqx.io
if: github.event_name == 'release' && endsWith(github.repository, 'enterprise') && matrix.profile == 'emqx-ee'
run: |

View File

@ -13,6 +13,7 @@ jobs:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
otp:
- 24.0.5-emqx-1
@ -53,13 +54,18 @@ jobs:
path: _packages/**/*.zip
mac:
runs-on: macos-10.15
strategy:
fail-fast: false
matrix:
macos:
- macos-11
- macos-10.15
otp:
- 24.0.5-emqx-1
runs-on: ${{ matrix.macos }}
steps:
- uses: actions/checkout@v1
- name: prepare
@ -82,16 +88,12 @@ jobs:
id: cache
with:
path: ~/.kerl
key: erl${{ matrix.otp }}-macos10.15
key: otp-${{ matrix.otp }}-${{ matrix.macos }}
- name: build erlang
if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60
env:
KERL_BUILD_BACKEND: git
OTP_GITHUB_URL: https://github.com/emqx/otp
run: |
kerl update releases
kerl build ${{ matrix.otp }}
kerl build git https://github.com/emqx/otp.git OTP-${{ matrix.otp }} ${{ matrix.otp }}
kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build
run: |
@ -106,8 +108,7 @@ jobs:
path: ./rebar3.crashdump
- name: test
run: |
pkg_name=$(basename _packages/${EMQX_NAME}/emqx-*.zip)
unzip -q _packages/${EMQX_NAME}/$pkg_name
unzip -q $(find _packages/${EMQX_NAME} -mindepth 1 -maxdepth 1 -iname \*.zip | head)
# gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no'

View File

@ -45,10 +45,19 @@ jobs:
- api_login
- api_banned
- api_alarms
- api_nodes
- api_topic_metrics
- api_retainer
- api_auto_subscribe
- api_delayed_publish
- api_topic_rewrite
- api_event_message
- api_stats
steps:
- uses: actions/checkout@v2
with:
repository: emqx/emqx-fvt
ref: v1.2.0
path: .
- uses: actions/setup-java@v1
with:
@ -74,7 +83,7 @@ jobs:
cd /tmp && tar -xvf apache-jmeter.tgz
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-2.0.2-jar-with-dependencies.jar
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
- name: run ${{ matrix.script_name }}
run: |

2
.gitignore vendored
View File

@ -50,3 +50,5 @@ _upgrade_base/
TAGS
erlang_ls.config
.els_cache/
.vs/
.vscode/

View File

@ -5,7 +5,7 @@ BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
export EMQX_DESC ?= EMQ X
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.13
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.16
ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none
endif

View File

@ -14,7 +14,7 @@ English | [简体中文](./README-CN.md) | [日本語](./README-JP.md) | [рус
*EMQ X* broker is a fully open source, highly scalable, highly available distributed MQTT messaging broker for IoT, M2M and Mobile applications that can handle tens of millions of concurrent clients.
Starting from 3.0 release, *EMQ X* broker fully supports MQTT V5.0 protocol specifications and backward compatible with MQTT V3.1 and V3.1.1, as well as other communication protocols such as MQTT-SN, CoAP, LwM2M, WebSocket and STOMP. The 3.0 release of the *EMQ X* broker can scaled to 10+ million concurrent MQTT connections on one cluster.
Starting from 3.0 release, *EMQ X* broker fully supports MQTT V5.0 protocol specifications and backward compatible with MQTT V3.1 and V3.1.1, as well as other communication protocols such as MQTT-SN, CoAP, LwM2M, WebSocket and STOMP. The 3.0 release of the *EMQ X* broker can scale to 10+ million concurrent MQTT connections on one cluster.
- For full list of new features, please read [EMQ X Release Notes](https://github.com/emqx/emqx/releases).
- For more information, please visit [EMQ X homepage](https://www.emqx.io/).

View File

@ -194,12 +194,17 @@ listeners.ssl.default {
mountpoint = ""
## SSL options
## See ${example_common_ssl_options} for more information
ssl.versions = ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
ssl.keyfile = "{{ platform_etc_dir }}/certs/key.pem"
ssl.certfile = "{{ platform_etc_dir }}/certs/cert.pem"
ssl.cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
# ssl.versions = ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
# TLS 1.3: "TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256,TLS_CHACHA20_POLY1305_SHA256,TLS_AES_128_CCM_SHA256,TLS_AES_128_CCM_8_SHA256"
# TLS 1-1.2 "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES256-SHA384,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-DES-CBC3-SHA,ECDH-ECDSA-AES256-GCM-SHA384,ECDH-RSA-AES256-GCM-SHA384,ECDH-ECDSA-AES256-SHA384,ECDH-RSA-AES256-SHA384,DHE-DSS-AES256-GCM-SHA384,DHE-DSS-AES256-SHA256,AES256-GCM-SHA384,AES256-SHA256,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256,ECDHE-ECDSA-AES128-SHA256,ECDHE-RSA-AES128-SHA256,ECDH-ECDSA-AES128-GCM-SHA256,ECDH-RSA-AES128-GCM-SHA256,ECDH-ECDSA-AES128-SHA256,ECDH-RSA-AES128-SHA256,DHE-DSS-AES128-GCM-SHA256,DHE-DSS-AES128-SHA256,AES128-GCM-SHA256,AES128-SHA256,ECDHE-ECDSA-AES256-SHA,ECDHE-RSA-AES256-SHA,DHE-DSS-AES256-SHA,ECDH-ECDSA-AES256-SHA,ECDH-RSA-AES256-SHA,AES256-SHA,ECDHE-ECDSA-AES128-SHA,ECDHE-RSA-AES128-SHA,DHE-DSS-AES128-SHA,ECDH-ECDSA-AES128-SHA,ECDH-RSA-AES128-SHA,AES128-SHA"
# PSK: "PSK-AES128-CBC-SHA,PSK-AES256-CBC-SHA,PSK-3DES-EDE-CBC-SHA,PSK-RC4-SHA"
# NOTE: If PSK cipher-suites are intended, tlsv1.3 should not be enabled in 'versions' config
# ssl.ciphers = ""
## TCP options
## See ${example_common_tcp_options} for more information
tcp.backlog = 1024
@ -1345,12 +1350,13 @@ example_common_ssl_options {
## Default: true
ssl.honor_cipher_order = true
## TLS versions only to protect from POODLE attack.
##
## @doc listeners.<name>.ssl.versions
## ValueType: Array<TLSVersion>
## Default: ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
ssl.versions = ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
# ssl.versions = ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
# TLS 1.3: "TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256,TLS_CHACHA20_POLY1305_SHA256,TLS_AES_128_CCM_SHA256,TLS_AES_128_CCM_8_SHA256"
# TLS 1-1.2 "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES256-SHA384,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-DES-CBC3-SHA,ECDH-ECDSA-AES256-GCM-SHA384,ECDH-RSA-AES256-GCM-SHA384,ECDH-ECDSA-AES256-SHA384,ECDH-RSA-AES256-SHA384,DHE-DSS-AES256-GCM-SHA384,DHE-DSS-AES256-SHA256,AES256-GCM-SHA384,AES256-SHA256,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256,ECDHE-ECDSA-AES128-SHA256,ECDHE-RSA-AES128-SHA256,ECDH-ECDSA-AES128-GCM-SHA256,ECDH-RSA-AES128-GCM-SHA256,ECDH-ECDSA-AES128-SHA256,ECDH-RSA-AES128-SHA256,DHE-DSS-AES128-GCM-SHA256,DHE-DSS-AES128-SHA256,AES128-GCM-SHA256,AES128-SHA256,ECDHE-ECDSA-AES256-SHA,ECDHE-RSA-AES256-SHA,DHE-DSS-AES256-SHA,ECDH-ECDSA-AES256-SHA,ECDH-RSA-AES256-SHA,AES256-SHA,ECDHE-ECDSA-AES128-SHA,ECDHE-RSA-AES128-SHA,DHE-DSS-AES128-SHA,ECDH-ECDSA-AES128-SHA,ECDH-RSA-AES128-SHA,AES128-SHA"
# PSK: "PSK-AES128-CBC-SHA,PSK-AES256-CBC-SHA,PSK-3DES-EDE-CBC-SHA,PSK-RC4-SHA"
# NOTE: If PSK cipher-suites are intended, tlsv1.3 should not be enabled in 'versions' config
# NOTE: by default, ALL ciphers are enabled
# ssl.ciphers = ""
## TLS Handshake timeout.
##
@ -1446,27 +1452,6 @@ example_common_ssl_options {
## Default: true
ssl.fail_if_no_peer_cert = false
## This is the single most important configuration option of an Erlang SSL
## application. Ciphers (and their ordering) define the way the client and
## server encrypt information over the wire, from the initial Diffie-Helman
## key exchange, the session key encryption ## algorithm and the message
## digest algorithm. Selecting a good cipher suite is critical for the
## applications data security, confidentiality and performance.
##
## The cipher list above offers:
##
## A good balance between compatibility with older browsers.
## It can get stricter for Machine-To-Machine scenarios.
## Perfect Forward Secrecy.
## No old/insecure encryption and HMAC algorithms
##
## Most of it was copied from Mozillas Server Side TLS article
##
## @doc listeners.<name>.ssl.ciphers
## ValueType: Array<Cipher>
## Default: [ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES256-SHA384,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-DES-CBC3-SHA,ECDH-ECDSA-AES256-GCM-SHA384,ECDH-RSA-AES256-GCM-SHA384,ECDH-ECDSA-AES256-SHA384,ECDH-RSA-AES256-SHA384,DHE-DSS-AES256-GCM-SHA384,DHE-DSS-AES256-SHA256,AES256-GCM-SHA384,AES256-SHA256,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256,ECDHE-ECDSA-AES128-SHA256,ECDHE-RSA-AES128-SHA256,ECDH-ECDSA-AES128-GCM-SHA256,ECDH-RSA-AES128-GCM-SHA256,ECDH-ECDSA-AES128-SHA256,ECDH-RSA-AES128-SHA256,DHE-DSS-AES128-GCM-SHA256,DHE-DSS-AES128-SHA256,AES128-GCM-SHA256,AES128-SHA256,ECDHE-ECDSA-AES256-SHA,ECDHE-RSA-AES256-SHA,DHE-DSS-AES256-SHA,ECDH-ECDSA-AES256-SHA,ECDH-RSA-AES256-SHA,AES256-SHA,ECDHE-ECDSA-AES128-SHA,ECDHE-RSA-AES128-SHA,DHE-DSS-AES128-SHA,ECDH-ECDSA-AES128-SHA,ECDH-RSA-AES128-SHA,AES128-SHA,PSK-AES128-CBC-SHA,PSK-AES256-CBC-SHA,PSK-3DES-EDE-CBC-SHA,PSK-RC4-SHA]
ssl.ciphers = [ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES256-SHA384,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-DES-CBC3-SHA,ECDH-ECDSA-AES256-GCM-SHA384,ECDH-RSA-AES256-GCM-SHA384,ECDH-ECDSA-AES256-SHA384,ECDH-RSA-AES256-SHA384,DHE-DSS-AES256-GCM-SHA384,DHE-DSS-AES256-SHA256,AES256-GCM-SHA384,AES256-SHA256,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256,ECDHE-ECDSA-AES128-SHA256,ECDHE-RSA-AES128-SHA256,ECDH-ECDSA-AES128-GCM-SHA256,ECDH-RSA-AES128-GCM-SHA256,ECDH-ECDSA-AES128-SHA256,ECDH-RSA-AES128-SHA256,DHE-DSS-AES128-GCM-SHA256,DHE-DSS-AES128-SHA256,AES128-GCM-SHA256,AES128-SHA256,ECDHE-ECDSA-AES256-SHA,ECDHE-RSA-AES256-SHA,DHE-DSS-AES256-SHA,ECDH-ECDSA-AES256-SHA,ECDH-RSA-AES256-SHA,AES256-SHA,ECDHE-ECDSA-AES128-SHA,ECDHE-RSA-AES128-SHA,DHE-DSS-AES128-SHA,ECDH-ECDSA-AES128-SHA,ECDH-RSA-AES128-SHA,AES128-SHA,PSK-AES128-CBC-SHA,PSK-AES256-CBC-SHA,PSK-3DES-EDE-CBC-SHA,PSK-RC4-SHA]
}
## Socket options for websocket connections

View File

@ -17,18 +17,18 @@
-ifndef(EMQ_X_HRL).
-define(EMQ_X_HRL, true).
%% Shard
%%--------------------------------------------------------------------
%% Common
%%--------------------------------------------------------------------
-define(Otherwise, true).
-define(COMMON_SHARD, emqx_common_shard).
-define(SHARED_SUB_SHARD, emqx_shared_sub_shard).
-define(MOD_DELAYED_SHARD, emqx_delayed_shard).
-define(CM_SHARD, emqx_cm_shard).
-define(ROUTE_SHARD, route_shard).
-define(BOOT_SHARDS, [ ?ROUTE_SHARD
, ?COMMON_SHARD
, ?SHARED_SUB_SHARD
]).
%%--------------------------------------------------------------------
%% Banner
%%--------------------------------------------------------------------
@ -71,7 +71,9 @@
%% Message Payload
payload :: emqx_types:payload(),
%% Timestamp (Unit: millisecond)
timestamp :: integer()
timestamp :: integer(),
%% not used so far, for future extension
extra = [] :: term()
}).
-record(delivery, {
@ -83,11 +85,6 @@
%% Route
%%--------------------------------------------------------------------
-define(ROUTE_SHARD, route_shard).
-define(RULE_ENGINE_SHARD, emqx_rule_engine_shard).
-record(route, {
topic :: binary(),
dest :: node() | {binary(), node()}
@ -133,8 +130,6 @@
until :: integer()
}).
-endif.
%%--------------------------------------------------------------------
%% Authentication
%%--------------------------------------------------------------------
@ -150,3 +145,5 @@
{ name :: atom()
, authenticators :: [#authenticator{}]
}).
-endif.

View File

@ -542,4 +542,9 @@
-define(SHARE(Group, Topic), emqx_topic:join([<<?SHARE>>, Group, Topic])).
-define(IS_SHARE(Topic), case Topic of <<?SHARE, _/binary>> -> true; _ -> false end).
-define(FRAME_PARSE_ERROR(Reason), {frame_parse_error, Reason}).
-define(FRAME_SERIALIZE_ERROR(Reason), {frame_serialize_error, Reason}).
-define(THROW_FRAME_ERROR(Reason), erlang:throw(?FRAME_PARSE_ERROR(Reason))).
-define(THROW_SERIALIZE_ERROR(Reason), erlang:throw(?FRAME_SERIALIZE_ERROR(Reason))).
-endif.

View File

@ -29,7 +29,7 @@
-ifndef(EMQX_ENTERPRISE).
-define(EMQX_RELEASE, {opensource, "5.0-alpha.6"}).
-define(EMQX_RELEASE, {opensource, "5.0-beta.1"}).
-else.

View File

@ -41,6 +41,7 @@
-define(LOG(Level, Format), ?LOG(Level, Format, [])).
%% deprecated
-define(LOG(Level, Format, Args, Meta),
%% check 'allow' here so we do not have to pass an anonymous function
%% down to logger which may cause `badfun` exception during upgrade
@ -58,8 +59,15 @@
%% structured logging
-define(SLOG(Level, Data),
logger:log(Level, Data, #{ mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}
, line => ?LINE})).
%% check 'allow' here, only evaluate Data when necessary
case logger:allow(Level, ?MODULE) of
true ->
logger:log(Level, (Data), #{ mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY}
, line => ?LINE
});
false ->
ok
end).
%% print to 'user' group leader
-define(ULOG(Fmt, Args), io:format(user, Fmt, Args)).

View File

@ -10,13 +10,13 @@
%% `git_subdir` dependency in other projects.
{deps,
[ {gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}}
, {typerefl, {git, "https://github.com/k32/typerefl", {tag, "0.8.4"}}}
, {typerefl, {git, "https://github.com/k32/typerefl", {tag, "0.8.5"}}}
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.8.3"}}}
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.2"}}}
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.3"}}}
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.10.8"}}}
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.19.0"}}}
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.19.5"}}}
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.14.1"}}}

View File

@ -239,11 +239,11 @@ handle_call({get_alarms, deactivated}, _From, State) ->
{reply, Alarms, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected msg: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info({timeout, _TRef, delete_expired_deactivated_alarm},
@ -253,11 +253,11 @@ handle_info({timeout, _TRef, delete_expired_deactivated_alarm},
{noreply, State#state{timer = ensure_timer(TRef, Period)}};
handle_info({update_timer, Period}, #state{timer = TRef} = State) ->
?LOG(warning, "update the 'validity_period' timer to ~p", [Period]),
?SLOG(warning, #{msg => "validity_timer_updated", period => Period}),
{noreply, State#state{timer = ensure_timer(TRef, Period)}};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->
@ -323,8 +323,11 @@ deactivate_all_alarms() ->
clear_table(TableName) ->
case ekka_mnesia:clear_table(TableName) of
{aborted, Reason} ->
?LOG(warning, "Faile to clear table ~p reason: ~p",
[TableName, Reason]);
?SLOG(warning, #{
msg => "fail_to_clear_table",
table_name => TableName,
reason => Reason
});
{atomic, ok} ->
ok
end.
@ -354,10 +357,17 @@ delete_expired_deactivated_alarms(ActivatedAt, Checkpoint) ->
do_actions(_, _, []) ->
ok;
do_actions(activate, Alarm = #activated_alarm{name = Name, message = Message}, [log | More]) ->
?LOG(warning, "Alarm ~s is activated, ~s", [Name, Message]),
?SLOG(warning, #{
msg => "alarm_is_activated",
name => Name,
message => Message
}),
do_actions(activate, Alarm, More);
do_actions(deactivate, Alarm = #deactivated_alarm{name = Name}, [log | More]) ->
?LOG(warning, "Alarm ~s is deactivated", [Name]),
?SLOG(warning, #{
msg => "alarm_is_deactivated",
name => Name
}),
do_actions(deactivate, Alarm, More);
do_actions(Operation, Alarm, [publish | More]) ->
Topic = topic(Operation),

View File

@ -24,6 +24,7 @@
, get_description/0
, get_release/0
, set_init_config_load_done/0
, get_init_config_load_done/0
, set_override_conf_file/1
]).
@ -33,25 +34,18 @@
-define(APP, emqx).
-define(EMQX_SHARDS, [ ?ROUTE_SHARD
, ?COMMON_SHARD
, ?SHARED_SUB_SHARD
, ?RULE_ENGINE_SHARD
, ?MOD_DELAYED_SHARD
]).
%%--------------------------------------------------------------------
%% Application callbacks
%%--------------------------------------------------------------------
start(_Type, _Args) ->
ok = maybe_load_config(),
%% Load application first for ekka_mnesia scanner
mnesia:change_table_copy_type(schema, node(), disc_copies),
ekka:start(),
ok = ekka_rlog:wait_for_shards(?EMQX_SHARDS, infinity),
%% mnesia:change_table_copy_type(schema, node(), disc_copies),
ok = maybe_start_quicer(),
ensure_ekka_started(),
{ok, Sup} = emqx_sup:start_link(),
ok = maybe_start_listeners(),
ok = emqx_alarm_handler:load(),
@ -65,21 +59,28 @@ prep_stop(_State) ->
stop(_State) -> ok.
ensure_ekka_started() ->
ekka:start(),
ok = ekka_rlog:wait_for_shards(?BOOT_SHARDS, infinity).
%% @doc Call this function to make emqx boot without loading config,
%% in case we want to delegate the config load to a higher level app
%% which manages emqx app.
set_init_config_load_done() ->
application:set_env(emqx, init_config_load_done, true).
get_init_config_load_done() ->
application:get_env(emqx, init_config_load_done, false).
%% @doc This API is mostly for testing.
%% The override config file is typically located in the 'data' dir when
%% it is a emqx release, but emqx app should not have to konw where the
%% it is a emqx release, but emqx app should not have to know where the
%% 'data' dir is located.
set_override_conf_file(File) ->
application:set_env(emqx, override_conf_file, File).
maybe_load_config() ->
case application:get_env(emqx, init_config_load_done, false) of
case get_init_config_load_done() of
true ->
ok;
false ->

View File

@ -289,22 +289,16 @@ check_config(Config) ->
%%------------------------------------------------------------------------------
authenticate(#{listener := Listener, protocol := Protocol} = Credential, _AuthResult) ->
case ets:lookup(?CHAINS_TAB, Listener) of
[#chain{authenticators = Authenticators}] when Authenticators =/= [] ->
do_authenticate(Authenticators, Credential);
_ ->
case ets:lookup(?CHAINS_TAB, global_chain(Protocol)) of
[#chain{authenticators = Authenticators}] when Authenticators =/= [] ->
do_authenticate(Authenticators, Credential);
_ ->
ignore
end
Authenticators = get_authenticators(Listener, global_chain(Protocol)),
case get_enabled(Authenticators) of
[] -> ignore;
NAuthenticators -> do_authenticate(NAuthenticators, Credential)
end.
do_authenticate([], _) ->
{stop, {error, not_authorized}};
do_authenticate([#authenticator{provider = Provider, state = State} | More], Credential) ->
case Provider:authenticate(Credential, State) of
do_authenticate([#authenticator{id = ID, provider = Provider, state = State} | More], Credential) ->
try Provider:authenticate(Credential, State) of
ignore ->
do_authenticate(More, Credential);
Result ->
@ -314,8 +308,32 @@ do_authenticate([#authenticator{provider = Provider, state = State} | More], Cre
%% {continue, AuthData, AuthCache}
%% {error, Reason}
{stop, Result}
catch
Class:Reason:Stacktrace ->
?SLOG(warning, #{msg => "unexpected_error_in_authentication",
exception => Class,
reason => Reason,
stacktrace => Stacktrace,
authenticator => ID}),
do_authenticate(More, Credential)
end.
get_authenticators(Listener, Global) ->
case ets:lookup(?CHAINS_TAB, Listener) of
[#chain{authenticators = Authenticators}] ->
Authenticators;
_ ->
case ets:lookup(?CHAINS_TAB, Global) of
[#chain{authenticators = Authenticators}] ->
Authenticators;
_ ->
[]
end
end.
get_enabled(Authenticators) ->
[Authenticator || Authenticator <- Authenticators, Authenticator#authenticator.enable =:= true].
%%------------------------------------------------------------------------------
%% APIs
%%------------------------------------------------------------------------------
@ -331,7 +349,11 @@ initialize_authentication(ChainName, AuthenticatorsConfig) ->
{ok, _} ->
ok;
{error, Reason} ->
?LOG(error, "Failed to create authenticator '~s': ~p", [generate_id(AuthenticatorConfig), Reason])
?SLOG(error, #{
msg => "failed_to_create_authenticator",
authenticator => generate_id(AuthenticatorConfig),
reason => Reason
})
end
end, CheckedConfig).
@ -536,7 +558,7 @@ handle_call({create_authenticator, ChainName, Config}, _From, #{providers := Pro
false ->
case do_create_authenticator(ChainName, AuthenticatorID, Config, Providers) of
{ok, Authenticator} ->
NAuthenticators = Authenticators ++ [Authenticator],
NAuthenticators = Authenticators ++ [Authenticator#authenticator{enable = maps:get(enable, Config)}],
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
{ok, serialize_authenticator(Authenticator)};
{error, Reason} ->
@ -575,7 +597,8 @@ handle_call({update_authenticator, ChainName, AuthenticatorID, Config}, _From, S
Unique = unique(ChainName, AuthenticatorID, Version),
case Provider:update(Config#{'_unique' => Unique}, ST) of
{ok, NewST} ->
NewAuthenticator = Authenticator#authenticator{state = switch_version(NewST)},
NewAuthenticator = Authenticator#authenticator{state = switch_version(NewST),
enable = maps:get(enable, Config)},
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NewAuthenticators}),
{ok, serialize_authenticator(NewAuthenticator)};
@ -629,15 +652,15 @@ handle_call({list_users, ChainName, AuthenticatorID}, _From, State) ->
reply(Reply, State);
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast(Req, State) ->
?LOG(error, "Unexpected case: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_cast", cast => Req}),
{noreply, State}.
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->

View File

@ -187,11 +187,11 @@ init([]) ->
{ok, ensure_expiry_timer(#{expiry_timer => undefined})}.
handle_call(Req, _From, State) ->
?LOG(error, "unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "unexpected msg: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_msg", cast => Msg}),
{noreply, State}.
handle_info({timeout, TRef, expire}, State = #{expiry_timer := TRef}) ->
@ -199,7 +199,7 @@ handle_info({timeout, TRef, expire}, State = #{expiry_timer := TRef}) ->
{noreply, ensure_expiry_timer(State), hibernate};
handle_info(Info, State) ->
?LOG(error, "unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, #{expiry_timer := TRef}) ->

View File

@ -202,7 +202,8 @@ publish(Msg) when is_record(Msg, message) ->
emqx_message:is_sys(Msg) orelse emqx_metrics:inc('messages.publish'),
case emqx_hooks:run_fold('message.publish', [], emqx_message:clean_dup(Msg)) of
#message{headers = #{allow_publish := false}} ->
?LOG(notice, "Stop publishing: ~s", [emqx_message:format(Msg)]),
?SLOG(debug, #{msg => "message_not_published",
payload => emqx_message:to_log_map(Msg)}),
[];
Msg1 = #message{topic = Topic} ->
route(aggre(emqx_router:match_routes(Topic)), delivery(Msg1))
@ -214,9 +215,14 @@ safe_publish(Msg) when is_record(Msg, message) ->
try
publish(Msg)
catch
_:Error:Stk->
?LOG(error, "Publish error: ~0p~n~s~n~0p",
[Error, emqx_message:format(Msg), Stk]),
Error : Reason : Stk->
?SLOG(error,#{
msg => "publishing_error",
exception => Error,
reason => Reason,
payload => emqx_message:to_log_map(Msg),
stacktrace => Stk
}),
[]
end.
@ -266,14 +272,22 @@ forward(Node, To, Delivery, async) ->
case emqx_rpc:cast(To, Node, ?BROKER, dispatch, [To, Delivery]) of
true -> emqx_metrics:inc('messages.forward');
{badrpc, Reason} ->
?LOG(error, "Ansync forward msg to ~s failed due to ~p", [Node, Reason]),
?SLOG(error, #{
msg => "async_forward_msg_to_node_failed",
node => Node,
reason => Reason
}),
{error, badrpc}
end;
forward(Node, To, Delivery, sync) ->
case emqx_rpc:call(To, Node, ?BROKER, dispatch, [To, Delivery]) of
{badrpc, Reason} ->
?LOG(error, "Sync forward msg to ~s failed due to ~p", [Node, Reason]),
?SLOG(error, #{
msg => "sync_forward_msg_to_node_failed",
node => Node,
reason => Reason
}),
{error, badrpc};
Result ->
emqx_metrics:inc('messages.forward'), Result
@ -450,14 +464,14 @@ handle_call({subscribe, Topic, I}, _From, State) ->
{reply, Ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast({subscribe, Topic}, State) ->
case emqx_router:do_add_route(Topic) of
ok -> ok;
{error, Reason} ->
?LOG(error, "Failed to add route: ~p", [Reason])
?SLOG(error, #{msg => "failed_to_add_route", reason => Reason})
end,
{noreply, State};
@ -481,11 +495,11 @@ handle_cast({unsubscribed, Topic, I}, State) ->
{noreply, State};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, #{pool := Pool, id := Id}) ->

View File

@ -118,7 +118,7 @@ init([]) ->
{ok, #{pmon => emqx_pmon:new()}}.
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast({register_sub, SubPid, SubId}, State = #{pmon := PMon}) ->
@ -127,7 +127,7 @@ handle_cast({register_sub, SubPid, SubId}, State = #{pmon := PMon}) ->
{noreply, State#{pmon := emqx_pmon:monitor(SubPid, PMon)}};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info({'DOWN', _MRef, process, SubPid, _Reason}, State = #{pmon := PMon}) ->
@ -138,7 +138,7 @@ handle_info({'DOWN', _MRef, process, SubPid, _Reason}, State = #{pmon := PMon})
{noreply, State#{pmon := PMon1}};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->

View File

@ -373,11 +373,11 @@ handle_in(?PUBACK_PACKET(PacketId, _ReasonCode, Properties), Channel
ok = after_message_acked(ClientInfo, Msg, Properties),
handle_out(publish, Publishes, Channel#channel{session = NSession});
{error, ?RC_PACKET_IDENTIFIER_IN_USE} ->
?LOG(warning, "The PUBACK PacketId ~w is inuse.", [PacketId]),
?SLOG(warning, #{msg => "puback_packetId_inuse", packetId => PacketId}),
ok = emqx_metrics:inc('packets.puback.inuse'),
{ok, Channel};
{error, ?RC_PACKET_IDENTIFIER_NOT_FOUND} ->
?LOG(warning, "The PUBACK PacketId ~w is not found.", [PacketId]),
?SLOG(warning, #{msg => "puback_packetId_not_found", packetId => PacketId}),
ok = emqx_metrics:inc('packets.puback.missed'),
{ok, Channel}
end;
@ -390,11 +390,11 @@ handle_in(?PUBREC_PACKET(PacketId, _ReasonCode, Properties), Channel
NChannel = Channel#channel{session = NSession},
handle_out(pubrel, {PacketId, ?RC_SUCCESS}, NChannel);
{error, RC = ?RC_PACKET_IDENTIFIER_IN_USE} ->
?LOG(warning, "The PUBREC PacketId ~w is inuse.", [PacketId]),
?SLOG(warning, #{msg => "pubrec_packetId_inuse", packetId => PacketId}),
ok = emqx_metrics:inc('packets.pubrec.inuse'),
handle_out(pubrel, {PacketId, RC}, Channel);
{error, RC = ?RC_PACKET_IDENTIFIER_NOT_FOUND} ->
?LOG(warning, "The PUBREC ~w is not found.", [PacketId]),
?SLOG(warning, #{msg => "pubrec_packetId_not_found", packetId => PacketId}),
ok = emqx_metrics:inc('packets.pubrec.missed'),
handle_out(pubrel, {PacketId, RC}, Channel)
end;
@ -405,7 +405,7 @@ handle_in(?PUBREL_PACKET(PacketId, _ReasonCode), Channel = #channel{session = Se
NChannel = Channel#channel{session = NSession},
handle_out(pubcomp, {PacketId, ?RC_SUCCESS}, NChannel);
{error, RC = ?RC_PACKET_IDENTIFIER_NOT_FOUND} ->
?LOG(warning, "The PUBREL PacketId ~w is not found.", [PacketId]),
?SLOG(warning, #{msg => "pubrec_packetId_not_found", packetId => PacketId}),
ok = emqx_metrics:inc('packets.pubrel.missed'),
handle_out(pubcomp, {PacketId, RC}, Channel)
end;
@ -420,7 +420,7 @@ handle_in(?PUBCOMP_PACKET(PacketId, _ReasonCode), Channel = #channel{session = S
ok = emqx_metrics:inc('packets.pubcomp.inuse'),
{ok, Channel};
{error, ?RC_PACKET_IDENTIFIER_NOT_FOUND} ->
?LOG(warning, "The PUBCOMP PacketId ~w is not found", [PacketId]),
?SLOG(warning, #{msg => "pubcomp_packetId_not_found", packetId => PacketId}),
ok = emqx_metrics:inc('packets.pubcomp.missed'),
{ok, Channel}
end;
@ -501,11 +501,11 @@ handle_in({frame_error, Reason}, Channel = #channel{conn_state = ConnState})
handle_out(disconnect, {?RC_MALFORMED_PACKET, Reason}, Channel);
handle_in({frame_error, Reason}, Channel = #channel{conn_state = disconnected}) ->
?LOG(error, "Unexpected frame error: ~p", [Reason]),
?SLOG(error, #{msg => "malformed_mqtt_message", reason => Reason}),
{ok, Channel};
handle_in(Packet, Channel) ->
?LOG(error, "Unexpected incoming: ~p", [Packet]),
?SLOG(error, #{msg => "disconnecting_due_to_unexpected_message", packet => Packet}),
handle_out(disconnect, ?RC_PROTOCOL_ERROR, Channel).
%%--------------------------------------------------------------------
@ -529,7 +529,7 @@ process_connect(AckProps, Channel = #channel{conninfo = ConnInfo,
{error, client_id_unavailable} ->
handle_out(connack, ?RC_CLIENT_IDENTIFIER_NOT_VALID, Channel);
{error, Reason} ->
?LOG(error, "Failed to open session due to ~p", [Reason]),
?SLOG(error, #{msg => "failed_to_open_session", reason => Reason}),
handle_out(connack, ?RC_UNSPECIFIED_ERROR, Channel)
end.
@ -548,8 +548,11 @@ process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), Channel) ->
Msg = packet_to_message(NPacket, NChannel),
do_publish(PacketId, Msg, NChannel);
{error, Rc = ?RC_NOT_AUTHORIZED, NChannel} ->
?LOG(warning, "Cannot publish message to ~s due to ~s.",
[Topic, emqx_reason_codes:text(Rc)]),
?SLOG(warning, #{
msg => "cannot_publish_to_topic",
topic => Topic,
reason => emqx_reason_codes:name(Rc)
}),
case emqx:get_config([authorization, deny_action], ignore) of
ignore ->
case QoS of
@ -563,8 +566,11 @@ process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), Channel) ->
handle_out(disconnect, Rc, NChannel)
end;
{error, Rc = ?RC_QUOTA_EXCEEDED, NChannel} ->
?LOG(warning, "Cannot publish messages to ~s due to ~s.",
[Topic, emqx_reason_codes:text(Rc)]),
?SLOG(warning, #{
msg => "cannot_publish_to_topic",
topic => Topic,
reason => emqx_reason_codes:name(Rc)
}),
case QoS of
?QOS_0 ->
ok = emqx_metrics:inc('packets.publish.dropped'),
@ -575,8 +581,11 @@ process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), Channel) ->
handle_out(pubrec, {PacketId, Rc}, NChannel)
end;
{error, Rc, NChannel} ->
?LOG(warning, "Cannot publish message to ~s due to ~s.",
[Topic, emqx_reason_codes:text(Rc)]),
?SLOG(warning, #{
msg => "cannot_publish_to_topic",
topic => Topic,
reason => emqx_reason_codes:name(Rc)
}),
handle_out(disconnect, Rc, NChannel)
end.
@ -621,8 +630,11 @@ do_publish(PacketId, Msg = #message{qos = ?QOS_2},
ok = emqx_metrics:inc('packets.publish.inuse'),
handle_out(pubrec, {PacketId, RC}, Channel);
{error, RC = ?RC_RECEIVE_MAXIMUM_EXCEEDED} ->
?LOG(warning, "Dropped the qos2 packet ~w "
"due to awaiting_rel is full.", [PacketId]),
?SLOG(warning, #{
msg => "dropped_qos2_packet",
reason => emqx_reason_codes:name(RC),
packetId => PacketId
}),
ok = emqx_metrics:inc('packets.publish.dropped'),
handle_out(pubrec, {PacketId, RC}, Channel)
end.
@ -671,8 +683,10 @@ process_subscribe([Topic = {TopicFilter, SubOpts}|More], SubProps, Channel, Acc)
Channel),
process_subscribe(More, SubProps, NChannel, [{Topic, ReasonCode} | Acc]);
{error, ReasonCode} ->
?LOG(warning, "Cannot subscribe ~s due to ~s.",
[TopicFilter, emqx_reason_codes:text(ReasonCode)]),
?SLOG(warning, #{
msg => "cannot_subscribe_topic_filter",
reason => emqx_reason_codes:name(ReasonCode)
}),
process_subscribe(More, SubProps, Channel, [{Topic, ReasonCode} | Acc])
end.
@ -685,8 +699,10 @@ do_subscribe(TopicFilter, SubOpts = #{qos := QoS}, Channel =
{ok, NSession} ->
{QoS, Channel#channel{session = NSession}};
{error, RC} ->
?LOG(warning, "Cannot subscribe ~s due to ~s.",
[TopicFilter, emqx_reason_codes:text(RC)]),
?SLOG(warning, #{
msg => "cannot_subscribe_topic_filter",
reason => emqx_reason_codes:text(RC)
}),
{RC, Channel}
end.
@ -869,7 +885,7 @@ handle_out(auth, {ReasonCode, Properties}, Channel) ->
{ok, ?AUTH_PACKET(ReasonCode, Properties), Channel};
handle_out(Type, Data, Channel) ->
?LOG(error, "Unexpected outgoing: ~s, ~p", [Type, Data]),
?SLOG(error, #{msg => "unexpected_outgoing", type => Type, data => Data}),
{ok, Channel}.
%%--------------------------------------------------------------------
@ -964,7 +980,7 @@ handle_call({quota, Policy}, Channel) ->
reply(ok, Channel#channel{quota = Quota});
handle_call(Req, Channel) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
reply(ignored, Channel).
%%--------------------------------------------------------------------
@ -1004,7 +1020,7 @@ handle_info({sock_closed, Reason}, Channel =
end;
handle_info({sock_closed, Reason}, Channel = #channel{conn_state = disconnected}) ->
?LOG(error, "Unexpected sock_closed: ~p", [Reason]),
?SLOG(error, #{msg => "unexpected_sock_close", reason => Reason}),
{ok, Channel};
handle_info(clean_authz_cache, Channel) ->
@ -1012,7 +1028,7 @@ handle_info(clean_authz_cache, Channel) ->
{ok, Channel};
handle_info(Info, Channel) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{ok, Channel}.
%%--------------------------------------------------------------------
@ -1075,7 +1091,7 @@ handle_timeout(_TRef, expire_quota_limit, Channel) ->
{ok, clean_timer(quota_timer, Channel)};
handle_timeout(_TRef, Msg, Channel) ->
?LOG(error, "Unexpected timeout: ~p~n", [Msg]),
?SLOG(error, #{msg => "unexpected_timeout", timeout_message => Msg}),
{ok, Channel}.
%%--------------------------------------------------------------------

View File

@ -266,9 +266,8 @@ get_mqtt_conf(Zone, Key) ->
emqx_config:get_zone_conf(Zone, [mqtt, Key]).
%% @doc Try to takeover a session.
-spec(takeover_session(emqx_types:clientid())
-> {error, term()}
| {ok, atom(), pid(), emqx_session:session()}).
-spec(takeover_session(emqx_types:clientid()) ->
{error, term()} | {ok, atom(), pid(), emqx_session:session()}).
takeover_session(ClientId) ->
case lookup_channels(ClientId) of
[] -> {error, not_found};
@ -276,7 +275,7 @@ takeover_session(ClientId) ->
takeover_session(ClientId, ChanPid);
ChanPids ->
[ChanPid|StalePids] = lists:reverse(ChanPids),
?LOG(error, "More than one channel found: ~p", [ChanPids]),
?SLOG(warning, #{msg => "more_than_one_channel_found", chan_pids => ChanPids}),
lists:foreach(fun(StalePid) ->
catch discard_session(ClientId, StalePid)
end, StalePids),
@ -341,7 +340,7 @@ kick_session(ClientId) ->
kick_session(ClientId, ChanPid);
ChanPids ->
[ChanPid|StalePids] = lists:reverse(ChanPids),
?LOG(error, "More than one channel found: ~p", [ChanPids]),
?SLOG(warning, #{msg => "more_than_one_channel_found", chan_pids => ChanPids}),
lists:foreach(fun(StalePid) ->
catch discard_session(ClientId, StalePid)
end, StalePids),
@ -416,7 +415,7 @@ init([]) ->
{ok, #{chan_pmon => emqx_pmon:new()}}.
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast({registered, {ClientId, ChanPid}}, State = #{chan_pmon := PMon}) ->
@ -424,7 +423,7 @@ handle_cast({registered, {ClientId, ChanPid}}, State = #{chan_pmon := PMon}) ->
{noreply, State#{chan_pmon := PMon1}};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}) ->
@ -434,7 +433,8 @@ handle_info({'DOWN', _MRef, process, Pid, _Reason}, State = #{chan_pmon := PMon}
{noreply, State#{chan_pmon := PMon1}};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->

View File

@ -114,11 +114,11 @@ init([]) ->
{ok, #{}}.
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info({membership, {mnesia, down, Node}}, State) ->
@ -132,7 +132,7 @@ handle_info({membership, _Event}, State) ->
{noreply, State};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->

View File

@ -66,6 +66,8 @@
, find_listener_conf/3
]).
-include("logger.hrl").
-define(CONF, conf).
-define(RAW_CONF, raw_conf).
-define(PERSIS_SCHEMA_MODS, {?MODULE, schema_mods}).
@ -250,7 +252,7 @@ init_load(SchemaMod, Conf) when is_list(Conf) orelse is_binary(Conf) ->
{ok, RawRichConf} ->
init_load(SchemaMod, RawRichConf);
{error, Reason} ->
logger:error(#{msg => failed_to_load_hocon_conf,
?SLOG(error, #{msg => failed_to_load_hocon_conf,
reason => Reason
}),
error(failed_to_load_hocon_conf)
@ -294,7 +296,8 @@ fill_defaults(RawConf) ->
-spec fill_defaults(module(), raw_config()) -> map().
fill_defaults(SchemaMod, RawConf) ->
hocon_schema:check_plain(SchemaMod, RawConf,
#{nullable => true, no_conversion => true}, root_names_from_conf(RawConf)).
#{nullable => true, only_fill_defaults => true},
root_names_from_conf(RawConf)).
-spec read_override_conf() -> raw_config().
read_override_conf() ->
@ -358,7 +361,9 @@ save_to_override_conf(RawConf) ->
case file:write_file(FileName, hocon_pp:do(RawConf, #{})) of
ok -> ok;
{error, Reason} ->
logger:error("write to ~s failed, ~p", [FileName, Reason]),
?SLOG(error, #{msg => failed_to_write_override_file,
filename => FileName,
reason => Reason}),
{error, Reason}
end
end.

View File

@ -77,7 +77,7 @@ stop() ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config(SchemaModule, ConfKeyPath, UpdateArgs) ->
?ATOM_CONF_PATH(ConfKeyPath, gen_server:call(?MODULE, {change_config, SchemaModule,
AtomKeyPath, UpdateArgs}), {error, ConfKeyPath}).
AtomKeyPath, UpdateArgs}), {error, {not_found, ConfKeyPath}}).
-spec add_handler(emqx_config:config_key_path(), handler_name()) -> ok.
add_handler(ConfKeyPath, HandlerName) ->
@ -117,7 +117,12 @@ handle_call({change_config, SchemaModule, ConfKeyPath, UpdateArgs}, _From,
{error, Result}
end
catch Error:Reason:ST ->
?LOG(error, "change_config failed: ~p", [{Error, Reason, ST}]),
?SLOG(error, #{
msg => "change_config_failed",
exception => Error,
reason => Reason,
stacktrace => ST
}),
{error, Reason}
end,
{reply, Reply, State};

View File

@ -417,14 +417,14 @@ handle_msg({'$gen_cast', Req}, State) ->
{ok, NewState};
handle_msg({Inet, _Sock, Data}, State) when Inet == tcp; Inet == ssl ->
?LOG(debug, "RECV ~0p", [Data]),
?SLOG(debug, #{msg => "RECV_data", data => Data, transport => Inet}),
Oct = iolist_size(Data),
inc_counter(incoming_bytes, Oct),
ok = emqx_metrics:inc('bytes.received', Oct),
parse_incoming(Data, State);
handle_msg({quic, Data, _Sock, _, _, _}, State) ->
?LOG(debug, "RECV ~0p", [Data]),
?SLOG(debug, #{msg => "RECV_data", data => Data, transport => quic}),
Oct = iolist_size(Data),
inc_counter(incoming_bytes, Oct),
ok = emqx_metrics:inc('bytes.received', Oct),
@ -489,7 +489,7 @@ handle_msg({connack, ConnAck}, State) ->
handle_outgoing(ConnAck, State);
handle_msg({close, Reason}, State) ->
?LOG(debug, "Force to close the socket due to ~p", [Reason]),
?SLOG(debug, #{msg => "force_socket_close", reason => Reason}),
handle_info({sock_closed, Reason}, close_socket(State));
handle_msg({event, connected}, State = #state{channel = Channel}) ->
@ -644,9 +644,20 @@ parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
NState = State#state{parse_state = NParseState},
parse_incoming(Rest, [Packet|Packets], NState)
catch
error:Reason:Stk ->
?LOG(error, "~nParse failed for ~0p~n~0p~nFrame data:~0p",
[Reason, Stk, Data]),
throw : ?FRAME_PARSE_ERROR(Reason) ->
?SLOG(info, #{ reason => Reason
, at_state => emqx_frame:describe_state(ParseState)
, input_bytes => Data
, parsed_packets => Packets
}),
{[{frame_error, Reason} | Packets], State};
error : Reason : Stacktrace ->
?SLOG(error, #{ at_state => emqx_frame:describe_state(ParseState)
, input_bytes => Data
, parsed_packets => Packets
, reason => Reason
, stacktrace => Stacktrace
}),
{[{frame_error, Reason} | Packets], State}
end.
@ -661,7 +672,7 @@ next_incoming_msgs(Packets) ->
handle_incoming(Packet, State) when is_record(Packet, mqtt_packet) ->
ok = inc_incoming_stats(Packet),
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
?SLOG(debug, #{msg => "RECV_packet", packet => Packet}),
with_channel(handle_in, [Packet], State);
handle_incoming(FrameError, State) ->
@ -696,15 +707,32 @@ handle_outgoing(Packet, State) ->
serialize_and_inc_stats_fun(#state{serialize = Serialize}) ->
fun(Packet) ->
case emqx_frame:serialize_pkt(Packet, Serialize) of
<<>> -> ?LOG(warning, "~s is discarded due to the frame is too large!",
[emqx_packet:format(Packet)]),
try emqx_frame:serialize_pkt(Packet, Serialize) of
<<>> -> ?SLOG(warning, #{
msg => "packet_is_discarded",
reason => "frame_is_too_large",
packet => emqx_packet:format(Packet)
}),
ok = emqx_metrics:inc('delivery.dropped.too_large'),
ok = emqx_metrics:inc('delivery.dropped'),
<<>>;
Data -> ?LOG(debug, "SEND ~s", [emqx_packet:format(Packet)]),
Data -> ?SLOG(debug, #{
msg => "SEND_packet",
packet => emqx_packet:format(Packet)
}),
ok = inc_outgoing_stats(Packet),
Data
catch
%% Maybe Never happen.
throw : ?FRAME_SERIALIZE_ERROR(Reason) ->
?SLOG(info, #{ reason => Reason
, input_packet => Packet}),
erlang:error(?FRAME_SERIALIZE_ERROR(Reason));
error : Reason : Stacktrace ->
?SLOG(error, #{ input_packet => Packet
, exception => Reason
, stacktrace => Stacktrace}),
erlang:error(frame_serialize_error)
end
end.
@ -741,7 +769,7 @@ handle_info(activate_socket, State = #state{sockstate = OldSst}) ->
handle_info({sock_error, Reason}, State) ->
case Reason =/= closed andalso Reason =/= einval of
true -> ?LOG(warning, "socket_error: ~p", [Reason]);
true -> ?SLOG(warning, #{msg => "socket_error", reason => Reason});
false -> ok
end,
handle_info({sock_closed, Reason}, close_socket(State));
@ -783,7 +811,7 @@ ensure_rate_limit(Stats, State = #state{limiter = Limiter}) ->
{ok, Limiter1} ->
State#state{limiter = Limiter1};
{pause, Time, Limiter1} ->
?LOG(warning, "Pause ~pms due to rate limit", [Time]),
?SLOG(warning, #{msg => "pause_time_due_to_rate_limit", time_in_ms => Time}),
TRef = start_timer(Time, limit_timeout),
State#state{sockstate = blocked,
limiter = Limiter1,

View File

@ -185,13 +185,13 @@ handle_call({register_command, Cmd, MF, Opts}, _From, State = #state{seq = Seq})
case ets:match(?CMD_TAB, {{'$1', Cmd}, '_', '_'}) of
[] -> ets:insert(?CMD_TAB, {{Seq, Cmd}, MF, Opts});
[[OriginSeq] | _] ->
?LOG(warning, "CMD ~s is overidden by ~p", [Cmd, MF]),
?SLOG(warning, #{msg => "CMD_overidden", cmd => Cmd, mf => MF}),
true = ets:insert(?CMD_TAB, {{OriginSeq, Cmd}, MF, Opts})
end,
{reply, ok, next_seq(State)};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast({unregister_command, Cmd}, State) ->
@ -199,11 +199,11 @@ handle_cast({unregister_command, Cmd}, State) ->
noreply(State);
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
noreply(State).
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
noreply(State).
terminate(_Reason, _State) ->

View File

@ -106,7 +106,7 @@ init([]) ->
{ok, #{}, hibernate}.
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast({detected, #flapping{clientid = ClientId,
@ -116,8 +116,13 @@ handle_cast({detected, #flapping{clientid = ClientId,
#{window_time := WindTime, ban_time := Interval}}, State) ->
case now_diff(StartedAt) < WindTime of
true -> %% Flapping happened:(
?LOG(error, "Flapping detected: ~s(~s) disconnected ~w times in ~wms",
[ClientId, inet:ntoa(PeerHost), DetectCnt, WindTime]),
?SLOG(warning, #{
msg => "flapping_detected",
client_id => ClientId,
peer_host => fmt_host(PeerHost),
detect_cnt => DetectCnt,
wind_time_in_ms => WindTime
}),
Now = erlang:system_time(second),
Banned = #banned{who = {clientid, ClientId},
by = <<"flapping detector">>,
@ -126,13 +131,18 @@ handle_cast({detected, #flapping{clientid = ClientId,
until = Now + (Interval div 1000)},
emqx_banned:create(Banned);
false ->
?LOG(warning, "~s(~s) disconnected ~w times in ~wms",
[ClientId, inet:ntoa(PeerHost), DetectCnt, Interval])
?SLOG(warning, #{
msg => "client_disconnected",
client_id => ClientId,
peer_host => fmt_host(PeerHost),
detect_cnt => DetectCnt,
interval => Interval
})
end,
{noreply, State};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info({timeout, _TRef, {garbage_collect, Zone}}, State) ->
@ -144,7 +154,7 @@ handle_info({timeout, _TRef, {garbage_collect, Zone}}, State) ->
{noreply, State, hibernate};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->
@ -161,3 +171,8 @@ start_timers() ->
lists:foreach(fun({Zone, _ZoneConf}) ->
start_timer(Zone)
end, maps:to_list(emqx:get_config([zones], #{}))).
fmt_host(PeerHost) ->
try inet:ntoa(PeerHost)
catch _:_ -> PeerHost
end.

View File

@ -34,6 +34,10 @@
, serialize/2
]).
-export([ describe_state/1
]).
-export_type([ options/0
, parse_state/0
, parse_result/0
@ -47,7 +51,9 @@
version => emqx_types:proto_ver()
}).
-type(parse_state() :: {none, options()} | {cont_state(), options()}).
-define(NONE(Options), {none, Options}).
-type(parse_state() :: ?NONE(options()) | {cont_state(), options()}).
-type(parse_result() :: {more, parse_state()}
| {ok, emqx_types:packet(), binary(), parse_state()}).
@ -61,27 +67,45 @@
-type(serialize_opts() :: options()).
-define(none(Options), {none, Options}).
-define(DEFAULT_OPTIONS,
#{strict_mode => false,
max_size => ?MAX_PACKET_SIZE,
version => ?MQTT_PROTO_V4
}).
-define(PARSE_ERR(Reason), ?THROW_FRAME_ERROR(Reason)).
-define(SERIALIZE_ERR(Reason), ?THROW_SERIALIZE_ERROR(Reason)).
-define(MULTIPLIER_MAX, 16#200000).
-dialyzer({no_match, [serialize_utf8_string/2]}).
%% @doc Describe state for logging.
describe_state(?NONE(_Opts)) -> <<"clean">>;
describe_state({{len, _}, _Opts}) -> <<"parsing_varint_length">>;
describe_state({{body, State}, _Opts}) ->
#{ hdr := Hdr
, len := Len
} = State,
Desc = #{ parsed_header => Hdr
, expected_bytes => Len
},
case maps:get(rest, State, undefined) of
undefined -> Desc;
Body -> Desc#{received_bytes => body_bytes(Body)}
end.
%%--------------------------------------------------------------------
%% Init Parse State
%%--------------------------------------------------------------------
-spec(initial_parse_state() -> {none, options()}).
-spec(initial_parse_state() -> ?NONE(options())).
initial_parse_state() ->
initial_parse_state(#{}).
-spec(initial_parse_state(options()) -> {none, options()}).
-spec(initial_parse_state(options()) -> ?NONE(options())).
initial_parse_state(Options) when is_map(Options) ->
?none(maps:merge(?DEFAULT_OPTIONS, Options)).
?NONE(maps:merge(?DEFAULT_OPTIONS, Options)).
%%--------------------------------------------------------------------
%% Parse MQTT Frame
@ -92,10 +116,10 @@ parse(Bin) ->
parse(Bin, initial_parse_state()).
-spec(parse(binary(), parse_state()) -> parse_result()).
parse(<<>>, {none, Options}) ->
{more, {none, Options}};
parse(<<>>, ?NONE(Options)) ->
{more, ?NONE(Options)};
parse(<<Type:4, Dup:1, QoS:2, Retain:1, Rest/binary>>,
{none, Options = #{strict_mode := StrictMode}}) ->
?NONE(Options = #{strict_mode := StrictMode})) ->
%% Validate header if strict mode.
StrictMode andalso validate_header(Type, Dup, QoS, Retain),
Header = #mqtt_packet_header{type = Type,
@ -123,14 +147,14 @@ parse_remaining_len(Rest, Header, Options) ->
parse_remaining_len(_Bin, _Header, _Multiplier, Length, #{max_size := MaxSize})
when Length > MaxSize ->
error(frame_too_large);
?PARSE_ERR(frame_too_large);
parse_remaining_len(<<>>, Header, Multiplier, Length, Options) ->
{more, {{len, #{hdr => Header, len => {Multiplier, Length}}}, Options}};
%% Match DISCONNECT without payload
parse_remaining_len(<<0:8, Rest/binary>>,
Header = #mqtt_packet_header{type = ?DISCONNECT}, 1, 0, Options) ->
Packet = packet(Header, #mqtt_packet_disconnect{reason_code = ?RC_SUCCESS}),
{ok, Packet, Rest, ?none(Options)};
{ok, Packet, Rest, ?NONE(Options)};
%% Match PINGREQ.
parse_remaining_len(<<0:8, Rest/binary>>, Header, 1, 0, Options) ->
parse_frame(Rest, Header, 0, Options);
@ -138,21 +162,22 @@ parse_remaining_len(<<0:8, Rest/binary>>, Header, 1, 0, Options) ->
parse_remaining_len(<<0:1, 2:7, Rest/binary>>, Header, 1, 0, Options) ->
parse_frame(Rest, Header, 2, Options);
parse_remaining_len(<<1:1, _Len:7, _Rest/binary>>, _Header, Multiplier, _Value, _Options)
when Multiplier > 2097152 ->
error(malformed_variable_byte_integer);
when Multiplier > ?MULTIPLIER_MAX ->
?PARSE_ERR(malformed_variable_byte_integer);
parse_remaining_len(<<1:1, Len:7, Rest/binary>>, Header, Multiplier, Value, Options) ->
parse_remaining_len(Rest, Header, Multiplier * ?HIGHBIT, Value + Len * Multiplier, Options);
parse_remaining_len(<<0:1, Len:7, Rest/binary>>, Header, Multiplier, Value,
Options = #{max_size := MaxSize}) ->
FrameLen = Value + Len * Multiplier,
case FrameLen > MaxSize of
true -> error(frame_too_large);
true -> ?PARSE_ERR(frame_too_large);
false -> parse_frame(Rest, Header, FrameLen, Options)
end.
body_bytes(B) when is_binary(B) -> size(B);
body_bytes(?Q(Bytes, _)) -> Bytes.
append_body(H, <<>>) -> H;
append_body(H, T) when is_binary(H) andalso size(H) < 1024 ->
<<H/binary, T/binary>>;
append_body(H, T) when is_binary(H) ->
@ -165,18 +190,18 @@ flatten_body(Body) when is_binary(Body) -> Body;
flatten_body(?Q(_, Q)) -> iolist_to_binary(queue:to_list(Q)).
parse_frame(Body, Header, 0, Options) ->
{ok, packet(Header), flatten_body(Body), ?none(Options)};
{ok, packet(Header), flatten_body(Body), ?NONE(Options)};
parse_frame(Body, Header, Length, Options) ->
case body_bytes(Body) >= Length of
true ->
<<FrameBin:Length/binary, Rest/binary>> = flatten_body(Body),
case parse_packet(Header, FrameBin, Options) of
{Variable, Payload} ->
{ok, packet(Header, Variable, Payload), Rest, ?none(Options)};
{ok, packet(Header, Variable, Payload), Rest, ?NONE(Options)};
Variable = #mqtt_packet_connect{proto_ver = Ver} ->
{ok, packet(Header, Variable), Rest, ?none(Options#{version := Ver})};
{ok, packet(Header, Variable), Rest, ?NONE(Options#{version := Ver})};
Variable ->
{ok, packet(Header, Variable), Rest, ?none(Options)}
{ok, packet(Header, Variable), Rest, ?NONE(Options)}
end;
false ->
{more, {{body, #{hdr => Header,
@ -420,10 +445,16 @@ parse_property(<<16#28, Val, Bin/binary>>, Props) ->
parse_property(<<16#29, Val, Bin/binary>>, Props) ->
parse_property(Bin, Props#{'Subscription-Identifier-Available' => Val});
parse_property(<<16#2A, Val, Bin/binary>>, Props) ->
parse_property(Bin, Props#{'Shared-Subscription-Available' => Val}).
parse_property(Bin, Props#{'Shared-Subscription-Available' => Val});
parse_property(<<Property:8, _Rest/binary>>, _Props) ->
?PARSE_ERR(#{invalid_property_code => Property}).
%% TODO: invalid property in specific packet.
parse_variable_byte_integer(Bin) ->
parse_variable_byte_integer(Bin, 1, 0).
parse_variable_byte_integer(<<1:1, _Len:7, _Rest/binary>>, Multiplier, _Value)
when Multiplier > ?MULTIPLIER_MAX ->
?PARSE_ERR(malformed_variable_byte_integer);
parse_variable_byte_integer(<<1:1, Len:7, Rest/binary>>, Multiplier, Value) ->
parse_variable_byte_integer(Rest, Multiplier * ?HIGHBIT, Value + Len * Multiplier);
parse_variable_byte_integer(<<0:1, Len:7, Rest/binary>>, Multiplier, Value) ->
@ -441,7 +472,23 @@ parse_reason_codes(Bin) ->
parse_utf8_pair(<<Len1:16/big, Key:Len1/binary,
Len2:16/big, Val:Len2/binary, Rest/binary>>) ->
{{Key, Val}, Rest}.
{{Key, Val}, Rest};
parse_utf8_pair(<<LenK:16/big, Rest/binary>>)
when LenK > byte_size(Rest) ->
?PARSE_ERR(#{ hint => user_property_not_enough_bytes
, parsed_key_length => LenK
, remaining_bytes_length => byte_size(Rest)});
parse_utf8_pair(<<LenK:16/big, _Key:LenK/binary, %% key maybe malformed
LenV:16/big, Rest/binary>>)
when LenV > byte_size(Rest) ->
?PARSE_ERR(#{ hint => malformed_user_property_value
, parsed_key_length => LenK
, parsed_value_length => LenV
, remaining_bytes_length => byte_size(Rest)});
parse_utf8_pair(Bin)
when 4 > byte_size(Bin) ->
?PARSE_ERR(#{ hint => user_property_not_enough_bytes
, total_bytes => byte_size(Bin)}).
parse_utf8_string(Bin, false) ->
{undefined, Bin};
@ -449,10 +496,26 @@ parse_utf8_string(Bin, true) ->
parse_utf8_string(Bin).
parse_utf8_string(<<Len:16/big, Str:Len/binary, Rest/binary>>) ->
{Str, Rest}.
{Str, Rest};
parse_utf8_string(<<Len:16/big, Rest/binary>>)
when Len > byte_size(Rest) ->
?PARSE_ERR(#{ hint => malformed_utf8_string
, parsed_length => Len
, remaining_bytes_length => byte_size(Rest)});
parse_utf8_string(Bin)
when 2 > byte_size(Bin) ->
?PARSE_ERR(malformed_utf8_string_length).
parse_binary_data(<<Len:16/big, Data:Len/binary, Rest/binary>>) ->
{Data, Rest}.
{Data, Rest};
parse_binary_data(<<Len:16/big, Rest/binary>>)
when Len > byte_size(Rest) ->
?PARSE_ERR(#{ hint => malformed_binary_data
, parsed_length => Len
, remaining_bytes_length => byte_size(Rest)});
parse_binary_data(Bin)
when 2 > byte_size(Bin) ->
?PARSE_ERR(malformed_binary_data_length).
%%--------------------------------------------------------------------
%% Serialize MQTT Packet
@ -719,7 +782,7 @@ serialize_binary_data(Bin) ->
[<<(byte_size(Bin)):16/big-unsigned-integer>>, Bin].
serialize_utf8_string(undefined, false) ->
error(utf8_string_undefined);
?SERIALIZE_ERR(utf8_string_undefined);
serialize_utf8_string(undefined, true) ->
<<>>;
serialize_utf8_string(String, _AllowNull) ->
@ -767,13 +830,13 @@ validate_header(?PINGREQ, 0, 0, 0) -> ok;
validate_header(?PINGRESP, 0, 0, 0) -> ok;
validate_header(?DISCONNECT, 0, 0, 0) -> ok;
validate_header(?AUTH, 0, 0, 0) -> ok;
validate_header(_Type, _Dup, _QoS, _Rt) -> error(bad_frame_header).
validate_header(_Type, _Dup, _QoS, _Rt) -> ?PARSE_ERR(bad_frame_header).
-compile({inline, [validate_packet_id/1]}).
validate_packet_id(0) -> error(bad_packet_id);
validate_packet_id(0) -> ?PARSE_ERR(bad_packet_id);
validate_packet_id(_) -> ok.
validate_subqos([3|_]) -> error(bad_subqos);
validate_subqos([3|_]) -> ?PARSE_ERR(bad_subqos);
validate_subqos([_|T]) -> validate_subqos(T);
validate_subqos([]) -> ok.

View File

@ -67,7 +67,7 @@
%% - The execution order is the adding order of callbacks if they have
%% equal priority values.
-type(hookpoint() :: atom()).
-type(hookpoint() :: atom() | binary()).
-type(action() :: {module(), atom(), [term()] | undefined}).
-type(filter() :: {module(), atom(), [term()] | undefined}).
@ -158,12 +158,12 @@ del(HookPoint, Action) ->
gen_server:cast(?SERVER, {del, HookPoint, Action}).
%% @doc Run hooks.
-spec(run(atom(), list(Arg::term())) -> ok).
-spec(run(hookpoint(), list(Arg::term())) -> ok).
run(HookPoint, Args) ->
do_run(lookup(HookPoint), Args).
%% @doc Run hooks with Accumulator.
-spec(run_fold(atom(), list(Arg::term()), Acc::term()) -> Acc::term()).
-spec(run_fold(hookpoint(), list(Arg::term()), Acc::term()) -> Acc::term()).
run_fold(HookPoint, Args, Acc) ->
do_run_fold(lookup(HookPoint), Args, Acc).
@ -206,8 +206,13 @@ safe_execute({M, F, A}, Args) ->
Result -> Result
catch
Error:Reason:Stacktrace ->
?LOG(error, "Failed to execute ~0p: ~0p", [{M, F, A}, {Error, Reason, Stacktrace}]),
ok
?SLOG(error, #{
msg => "failed_to_execute",
exception => Error,
reason => Reason,
stacktrace => Stacktrace,
failed_call => {M, F, A}
})
end.
%% @doc execute a function.
@ -246,7 +251,7 @@ handle_call({put, HookPoint, Callback = #callback{action = {M, F, _}}}, _From, S
{reply, Reply, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", req => Req}),
{reply, ignored, State}.
handle_cast({del, HookPoint, Action}, State) ->
@ -259,11 +264,11 @@ handle_cast({del, HookPoint, Action}, State) ->
{noreply, State};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected msg: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", req => Msg}),
{noreply, State}.
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->

View File

@ -46,6 +46,7 @@
-export([post_config_update/4]).
-define(CONF_KEY_PATH, [listeners]).
-define(TYPES_STRING, ["tcp","ssl","ws","wss","quic"]).
%% @doc List configured listeners.
-spec(list() -> [{ListenerId :: atom(), ListenerConf :: map()}]).
@ -349,11 +350,10 @@ listener_id(Type, ListenerName) ->
list_to_atom(lists:append([str(Type), ":", str(ListenerName)])).
parse_listener_id(Id) ->
try
[Type, Name] = string:split(str(Id), ":", leading),
{list_to_existing_atom(Type), list_to_atom(Name)}
catch
_ : _ -> error({invalid_listener_id, Id})
case lists:member(Type, ?TYPES_STRING) of
true -> {list_to_existing_atom(Type), list_to_atom(Name)};
false -> {error, {invalid_listener_id, Id}}
end.
zone(Opts) ->

View File

@ -66,6 +66,7 @@
-export([ to_packet/2
, to_map/1
, to_log_map/1
, to_list/1
, from_map/1
]).
@ -79,11 +80,10 @@
headers := emqx_types:headers(),
topic := emqx_types:topic(),
payload := emqx_types:payload(),
timestamp := integer()}
timestamp := integer(),
extra := _}
).
-export([format/1]).
-elvis([{elvis_style, god_modules, disable}]).
-spec(make(emqx_types:topic(), emqx_types:payload()) -> emqx_types:message()).
@ -292,7 +292,8 @@ to_map(#message{
headers = Headers,
topic = Topic,
payload = Payload,
timestamp = Timestamp
timestamp = Timestamp,
extra = Extra
}) ->
#{id => Id,
qos => QoS,
@ -301,9 +302,13 @@ to_map(#message{
headers => Headers,
topic => Topic,
payload => Payload,
timestamp => Timestamp
timestamp => Timestamp,
extra => Extra
}.
%% @doc To map for logging, with payload dropped.
to_log_map(Msg) -> maps:without([payload], to_map(Msg)).
%% @doc Message to tuple list
-spec(to_list(emqx_types:message()) -> list()).
to_list(Msg) ->
@ -318,7 +323,8 @@ from_map(#{id := Id,
headers := Headers,
topic := Topic,
payload := Payload,
timestamp := Timestamp
timestamp := Timestamp,
extra := Extra
}) ->
#message{
id = Id,
@ -328,24 +334,10 @@ from_map(#{id := Id,
headers = Headers,
topic = Topic,
payload = Payload,
timestamp = Timestamp
timestamp = Timestamp,
extra = Extra
}.
%% MilliSeconds
elapsed(Since) ->
max(0, erlang:system_time(millisecond) - Since).
format(#message{id = Id,
qos = QoS,
topic = Topic,
from = From,
flags = Flags,
headers = Headers}) ->
io_lib:format("Message(Id=~s, QoS=~w, Topic=~s, From=~p, Flags=~s, Headers=~s)",
[Id, QoS, Topic, From, format(flags, Flags), format(headers, Headers)]).
format(flags, Flags) ->
io_lib:format("~p", [[Flag || {Flag, true} <- maps:to_list(Flags)]]);
format(headers, Headers) ->
io_lib:format("~p", [Headers]).

View File

@ -442,13 +442,17 @@ init([]) ->
{ok, #state{next_idx = ?RESERVED_IDX + 1}, hibernate}.
handle_call({create, Type, Name}, _From, State = #state{next_idx = ?MAX_SIZE}) ->
?LOG(error, "Failed to create ~s:~s for index exceeded.", [Type, Name]),
?SLOG(error, #{
msg => "failed_to_create_type_name_for_index_exceeded",
type => Type,
name => Name
}),
{reply, {error, metric_index_exceeded}, State};
handle_call({create, Type, Name}, _From, State = #state{next_idx = NextIdx}) ->
case ets:lookup(?TAB, Name) of
[#metric{idx = Idx}] ->
?LOG(info, "~s already exists.", [Name]),
?SLOG(info, #{msg => "name_already_exists", name => Name}),
{reply, {ok, Idx}, State};
[] ->
Metric = #metric{name = Name, type = Type, idx = NextIdx},
@ -464,15 +468,15 @@ handle_call({set_type_to_counter, Keys}, _From, State) ->
{reply, ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", req => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", req => Msg}),
{noreply, State}.
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->

View File

@ -87,7 +87,7 @@ handle_call(Req, _From, State) ->
{reply, {error, {unexpected_call, Req}}, State}.
handle_cast(Msg, State) ->
?LOG(error, "unexpected_cast_discarded: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast=> Msg}),
{noreply, State}.
handle_info({timeout, _Timer, check}, State) ->
@ -109,7 +109,7 @@ handle_info({timeout, _Timer, check}, State) ->
{noreply, State};
handle_info(Info, State) ->
?LOG(info, "unexpected_info_discarded: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->

View File

@ -20,6 +20,8 @@
, check_pass/2
]).
-include("logger.hrl").
-type(hash_type() :: plain | md5 | sha | sha256 | pbkdf2 | bcrypt).
-export_type([hash_type/0]).
@ -67,8 +69,8 @@ hash(pbkdf2, {Salt, Password, Macfun, Iterations, Dklen}) ->
case pbkdf2:pbkdf2(Macfun, Password, Salt, Iterations, Dklen) of
{ok, Hexstring} ->
pbkdf2:to_hex(Hexstring);
{error, Error} ->
error_logger:error_msg("pbkdf2 hash error:~p", [Error]),
{error, Reason} ->
?SLOG(error, #{msg => "pbkdf2_hash_error", reason => Reason}),
<<>>
end;
hash(bcrypt, {Salt, Password}) ->
@ -76,8 +78,8 @@ hash(bcrypt, {Salt, Password}) ->
case bcrypt:hashpw(Password, Salt) of
{ok, HashPasswd} ->
list_to_binary(HashPasswd);
{error, Error}->
error_logger:error_msg("bcrypt hash error:~p", [Error]),
{error, Reason}->
?SLOG(error, #{msg => "bcrypt_hash_error", reason => Reason}),
<<>>
end.

View File

@ -29,8 +29,6 @@
, find_plugin/1
]).
-export([funlog/2]).
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
@ -50,10 +48,14 @@ load() ->
load(PluginName) when is_atom(PluginName) ->
case {lists:member(PluginName, names(plugin)), lists:member(PluginName, names(started_app))} of
{false, _} ->
?LOG(alert, "Plugin ~s not found, cannot load it", [PluginName]),
?SLOG(alert, #{msg => "failed_to_load_plugin",
plugin_name => PluginName,
reason => not_found}),
{error, not_found};
{_, true} ->
?LOG(notice, "Plugin ~s is already started", [PluginName]),
?SLOG(notice, #{msg => "plugin_already_loaded",
plugin_name => PluginName,
reason => already_loaded}),
{error, already_started};
{_, false} ->
load_plugin(PluginName)
@ -69,10 +71,14 @@ unload() ->
unload(PluginName) when is_atom(PluginName) ->
case {lists:member(PluginName, names(plugin)), lists:member(PluginName, names(started_app))} of
{false, _} ->
?LOG(error, "Plugin ~s is not found, cannot unload it", [PluginName]),
?SLOG(error, #{msg => "fialed_to_unload_plugin",
plugin_name => PluginName,
reason => not_found}),
{error, not_found};
{_, false} ->
?LOG(error, "Plugin ~s is not started", [PluginName]),
?SLOG(error, #{msg => "failed_to_unload_plugin",
plugin_name => PluginName,
reason => not_loaded}),
{error, not_started};
{_, _} ->
unload_plugin(PluginName)
@ -81,7 +87,9 @@ unload(PluginName) when is_atom(PluginName) ->
reload(PluginName) when is_atom(PluginName)->
case {lists:member(PluginName, names(plugin)), lists:member(PluginName, names(started_app))} of
{false, _} ->
?LOG(error, "Plugin ~s is not found, cannot reload it", [PluginName]),
?SLOG(error, #{msg => "failed_to_reload_plugin",
plugin_name => PluginName,
reason => not_found}),
{error, not_found};
{_, false} ->
load(PluginName);
@ -127,14 +135,14 @@ load_ext_plugins(Dir) ->
end, filelib:wildcard("*", Dir)).
load_ext_plugin(PluginDir) ->
?LOG(debug, "loading_extra_plugin: ~s", [PluginDir]),
?SLOG(debug, #{msg => "loading_extra_plugin", plugin_dir => PluginDir}),
Ebin = filename:join([PluginDir, "ebin"]),
AppFile = filename:join([Ebin, "*.app"]),
AppName = case filelib:wildcard(AppFile) of
[App] ->
list_to_atom(filename:basename(App, ".app"));
[] ->
?LOG(alert, "plugin_app_file_not_found: ~s", [AppFile]),
?SLOG(alert, #{msg => "plugin_app_file_not_found", app_file => AppFile}),
error({plugin_app_file_not_found, AppFile})
end,
ok = load_plugin_app(AppName, Ebin).
@ -184,8 +192,14 @@ load_plugin(Name) ->
{error, Error0} ->
{error, Error0}
end
catch _ : Error : Stacktrace ->
?LOG(alert, "Plugin ~s load failed with ~p", [Name, {Error, Stacktrace}]),
catch Error : Reason : Stacktrace ->
?SLOG(alert, #{
msg => "plugin_load_failed",
name => Name,
exception => Error,
reason => Reason,
stacktrace => Stacktrace
}),
{error, parse_config_file_failed}
end.
@ -202,12 +216,19 @@ load_app(App) ->
start_app(App) ->
case application:ensure_all_started(App) of
{ok, Started} ->
?LOG(info, "Started plugins: ~p", [Started]),
?LOG(info, "Load plugin ~s successfully", [App]),
case Started =/= [] of
true -> ?SLOG(info, #{msg => "started_plugin_dependency_apps", apps => Started});
false -> ok
end,
?SLOG(info, #{msg => "started_plugin_app", app => App}),
ok;
{error, {ErrApp, Reason}} ->
?LOG(error, "Load plugin ~s failed, cannot start plugin ~s for ~0p", [App, ErrApp, Reason]),
{error, {ErrApp, Reason}}
?SLOG(error, #{msg => failed_to_start_plugin_app,
app => App,
err_app => ErrApp,
reason => Reason
}),
{error, failed_to_start_plugin_app}
end.
unload_plugin(App) ->
@ -221,11 +242,17 @@ unload_plugin(App) ->
stop_app(App) ->
case application:stop(App) of
ok ->
?LOG(info, "Stop plugin ~s successfully", [App]), ok;
?SLOG(info, #{msg => "stop_plugin_successfully", app => App}),
ok;
{error, {not_started, App}} ->
?LOG(error, "Plugin ~s is not started", [App]), ok;
?SLOG(info, #{msg => "plugin_not_started", app => App}),
ok;
{error, Reason} ->
?LOG(error, "Stop plugin ~s error: ~p", [App]), {error, Reason}
?SLOG(error, #{msg => "failed_to_stop_plugin_app",
app => App,
error => Reason
}),
{error, Reason}
end.
names(plugin) ->
@ -236,6 +263,3 @@ names(started_app) ->
names(Plugins) ->
[Name || #plugin{name = Name} <- Plugins].
funlog(Key, Value) ->
?LOG(info, "~s = ~p", [string:join(Key, "."), Value]).

View File

@ -100,22 +100,26 @@ handle_call({submit, Task}, _From, State) ->
{reply, catch run(Task), State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast({async_submit, Task}, State) ->
try run(Task)
catch _:Error:Stacktrace ->
?LOG(error, "Error: ~0p, ~0p", [Error, Stacktrace])
catch Error:Reason:Stacktrace ->
?SLOG(error, #{msg => "async_submit_error",
exception => Error,
reason => Reason,
stacktrace => Stacktrace
})
end,
{noreply, State};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, #{pool := Pool, id := Id}) ->

View File

@ -203,15 +203,15 @@ handle_call({delete_route, Topic, Dest}, _From, State) ->
{reply, Ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, #{pool := Pool, id := Id}) ->

View File

@ -109,11 +109,11 @@ init([]) ->
{ok, #{nodes => Nodes}, hibernate}.
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info({mnesia_table_event, {write, {?ROUTING_NODE, Node, _}, _}},
@ -130,7 +130,7 @@ handle_info({mnesia_table_event, {delete, {?ROUTING_NODE, _Node}, _}}, State) ->
{noreply, State};
handle_info({mnesia_table_event, Event}, State) ->
?LOG(error, "Unexpected mnesia_table_event: ~p", [Event]),
?SLOG(error,#{msg => "unexpected_mnesia_table_event", event => Event}),
{noreply, State};
handle_info({nodedown, Node}, State = #{nodes := Nodes}) ->
@ -148,7 +148,7 @@ handle_info({membership, _Event}, State) ->
{noreply, State};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->

View File

@ -1,66 +0,0 @@
-module(emqx_rule_actions_trans).
-include_lib("syntax_tools/include/merl.hrl").
-export([parse_transform/2]).
parse_transform(Forms, _Options) ->
trans(Forms, []).
trans([], ResAST) ->
lists:reverse(ResAST);
trans([{eof, L} | AST], ResAST) ->
lists:reverse([{eof, L} | ResAST]) ++ AST;
trans([{function, LineNo, FuncName, Arity, Clauses} | AST], ResAST) ->
NewClauses = trans_func_clauses(atom_to_list(FuncName), Clauses),
trans(AST, [{function, LineNo, FuncName, Arity, NewClauses} | ResAST]);
trans([Form | AST], ResAST) ->
trans(AST, [Form | ResAST]).
trans_func_clauses("on_action_create_" ++ _ = _FuncName , Clauses) ->
NewClauses = [
begin
Bindings = lists:flatten(get_vars(Args) ++ get_vars(Body, lefth)),
Body2 = append_to_result(Bindings, Body),
{clause, LineNo, Args, Guards, Body2}
end || {clause, LineNo, Args, Guards, Body} <- Clauses],
NewClauses;
trans_func_clauses(_FuncName, Clauses) ->
Clauses.
get_vars(Exprs) ->
get_vars(Exprs, all).
get_vars(Exprs, Type) ->
do_get_vars(Exprs, [], Type).
do_get_vars([], Vars, _Type) -> Vars;
do_get_vars([Line | Expr], Vars, all) ->
do_get_vars(Expr, [syntax_vars(erl_syntax:form_list([Line])) | Vars], all);
do_get_vars([Line | Expr], Vars, lefth) ->
do_get_vars(Expr,
case (Line) of
?Q("_@LeftV = _@@_") -> Vars ++ syntax_vars(LeftV);
_ -> Vars
end, lefth).
syntax_vars(Line) ->
sets:to_list(erl_syntax_lib:variables(Line)).
%% append bindings to the return value as the first tuple element.
%% e.g. if the original result is R, then the new result will be {[binding()], R}.
append_to_result(Bindings, Exprs) ->
erl_syntax:revert_forms(do_append_to_result(to_keyword(Bindings), Exprs, [])).
do_append_to_result(KeyWordVars, [Line], Res) ->
case Line of
?Q("_@LeftV = _@RightV") ->
lists:reverse([?Q("{[_@KeyWordVars], _@LeftV}"), Line | Res]);
_ ->
lists:reverse([?Q("{[_@KeyWordVars], _@Line}") | Res])
end;
do_append_to_result(KeyWordVars, [Line | Exprs], Res) ->
do_append_to_result(KeyWordVars, Exprs, [Line | Res]).
to_keyword(Vars) ->
[erl_syntax:tuple([erl_syntax:atom(Var), merl:var(Var)])
|| Var <- Vars].

View File

@ -55,7 +55,7 @@
% workaround: prevent being recognized as unused functions
-export([to_duration/1, to_duration_s/1, to_duration_ms/1,
to_bytesize/1, to_wordsize/1,
mk_duration/2, to_bytesize/1, to_wordsize/1,
to_percent/1, to_comma_separated_list/1,
to_bar_separated_list/1, to_ip_port/1,
to_erl_cipher_suite/1,
@ -71,7 +71,7 @@
-export([namespace/0, roots/0, roots/1, fields/1]).
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
-export([ssl/1]).
-export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1, default_ciphers/1]).
namespace() -> undefined.
@ -87,23 +87,26 @@ roots(high) ->
}
, {"zones",
sc(map("name", ref("zone")),
#{ desc => "A zone is a set of configs grouped by the zone <code>name</code>. <br>"
"For flexible configuration mapping, the <code>name</code> "
"can be set to a listener's <code>zone</code> config.<br>"
"NOTE: A builtin zone named <code>default</code> is auto created "
"and can not be deleted."
#{ desc =>
"""A zone is a set of configs grouped by the zone <code>name</code>.<br>
For flexible configuration mapping, the <code>name</code>
can be set to a listener's <code>zone</code> config.<br>
NOTE: A builtin zone named <code>default</code> is auto created
and can not be deleted."""
})}
, {"mqtt",
sc(ref("mqtt"),
#{ desc => "Global MQTT configuration.<br>"
"The configs here work as default values which can be overriden "
"in <code>zone</code> configs"
#{ desc =>
"""Global MQTT configuration.<br>
The configs here work as default values which can be overriden
in <code>zone</code> configs"""
})}
, {"authentication",
sc(hoconsc:lazy(hoconsc:array(map())),
#{ desc => "Default authentication configs for all MQTT listeners.<br>"
"For per-listener overrides see <code>authentication</code> "
"in listener configs"
#{ desc =>
"""Default authentication configs for all MQTT listeners.<br>
For per-listener overrides see <code>authentication</code>
in listener configs"""
})}
, {"authorization",
sc(ref("authorization"),
@ -156,11 +159,11 @@ fields("stats") ->
fields("authorization") ->
[ {"no_match",
sc(hoconsc:union([allow, deny]),
sc(hoconsc:enum([allow, deny]),
#{ default => allow
})}
, {"deny_action",
sc(hoconsc:union([ignore, disconnect]),
sc(hoconsc:enum([ignore, disconnect]),
#{ default => ignore
})}
, {"cache",
@ -294,7 +297,7 @@ fields("mqtt") ->
})
}
, {"mqueue_default_priority",
sc(union(highest, lowest),
sc(hoconsc:enum([highest, lowest]),
#{ default => lowest
})
}
@ -309,11 +312,11 @@ fields("mqtt") ->
})
}
, {"peer_cert_as_username",
sc(hoconsc:union([disabled, cn, dn, crt, pem, md5]),
sc(hoconsc:enum([disabled, cn, dn, crt, pem, md5]),
#{ default => disabled
})}
, {"peer_cert_as_clientid",
sc(hoconsc:union([disabled, cn, dn, crt, pem, md5]),
sc(hoconsc:enum([disabled, cn, dn, crt, pem, md5]),
#{ default => disabled
})}
];
@ -483,7 +486,7 @@ fields("mqtt_wss_listener") ->
#{})
}
, {"ssl",
sc(ref("listener_ssl_opts"),
sc(ref("listener_wss_opts"),
#{})
}
, {"websocket",
@ -498,6 +501,7 @@ fields("mqtt_quic_listener") ->
#{ default => true
})
}
%% TODO: ensure cacertfile is configurable
, {"certfile",
sc(string(),
#{})
@ -506,11 +510,7 @@ fields("mqtt_quic_listener") ->
sc(string(),
#{})
}
, {"ciphers",
sc(comma_separated_list(),
#{ default => "TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256,"
"TLS_CHACHA20_POLY1305_SHA256"
})}
, {"ciphers", ciphers_schema(quic)}
, {"idle_timeout",
sc(duration(),
#{ default => "15s"
@ -525,7 +525,7 @@ fields("ws_opts") ->
})
}
, {"mqtt_piggyback",
sc(hoconsc:union([single, multiple]),
sc(hoconsc:enum([single, multiple]),
#{ default => multiple
})
}
@ -634,16 +634,26 @@ fields("tcp_opts") ->
];
fields("listener_ssl_opts") ->
ssl(#{handshake_timeout => "15s"
, depth => 10
server_ssl_opts_schema(
#{ depth => 10
, reuse_sessions => true
, versions => default_tls_vsns()
, ciphers => default_ciphers()
});
, versions => tls_all_available
, ciphers => tls_all_available
}, false);
fields("listener_wss_opts") ->
server_ssl_opts_schema(
#{ depth => 10
, reuse_sessions => true
, versions => tls_all_available
, ciphers => tls_all_available
}, true);
fields(ssl_client_opts) ->
client_ssl_opts_schema(#{});
fields("deflate_opts") ->
[ {"level",
sc(hoconsc:union([none, default, best_compression, best_speed]),
sc(hoconsc:enum([none, default, best_compression, best_speed]),
#{})
}
, {"mem_level",
@ -652,15 +662,15 @@ fields("deflate_opts") ->
})
}
, {"strategy",
sc(hoconsc:union([default, filtered, huffman_only, rle]),
sc(hoconsc:enum([default, filtered, huffman_only, rle]),
#{})
}
, {"server_context_takeover",
sc(hoconsc:union([takeover, no_takeover]),
sc(hoconsc:enum([takeover, no_takeover]),
#{})
}
, {"client_context_takeover",
sc(hoconsc:union([takeover, no_takeover]),
sc(hoconsc:enum([takeover, no_takeover]),
#{})
}
, {"server_max_window_bits",
@ -699,12 +709,12 @@ fields("broker") ->
})
}
, {"session_locking_strategy",
sc(hoconsc:union([local, leader, quorum, all]),
sc(hoconsc:enum([local, leader, quorum, all]),
#{ default => quorum
})
}
, {"shared_subscription_strategy",
sc(hoconsc:union([random, round_robin]),
sc(hoconsc:enum([random, round_robin]),
#{ default => round_robin
})
}
@ -726,7 +736,7 @@ fields("broker") ->
fields("broker_perf") ->
[ {"route_lock_type",
sc(hoconsc:union([key, tab, global]),
sc(hoconsc:enum([key, tab, global]),
#{ default => key
})}
, {"trie_compaction",
@ -902,7 +912,10 @@ conf_get(Key, Conf, Default) ->
filter(Opts) ->
[{K, V} || {K, V} <- Opts, V =/= undefined].
ssl(Defaults) ->
%% @private This function defines the SSL opts which are commonly used by
%% SSL listener and client.
-spec common_ssl_opts_schema(map()) -> hocon_schema:field_schema().
common_ssl_opts_schema(Defaults) ->
D = fun (Field) -> maps:get(to_atom(Field), Defaults, undefined) end,
Df = fun (Field, Default) -> maps:get(to_atom(Field), Defaults, Default) end,
[ {"enable",
@ -913,26 +926,82 @@ ssl(Defaults) ->
, {"cacertfile",
sc(string(),
#{ default => D("cacertfile")
, nullable => true
, desc =>
"""Trusted PEM format CA certificates bundle file.<br>
The certificates in this file are used to verify the TLS peer's certificates.
Append new certificates to the file if new CAs are to be trusted.
There is no need to restart EMQ X to have the updated file loaded, because
the system regularly checks if file has been updated (and reload).<br>
NOTE: invalidating (deleting) a certificate from the file will not affect
already established connections.
"""
})
}
, {"certfile",
sc(string(),
#{ default => D("certfile")
, nullable => true
, desc =>
"""PEM format certificates chain file.<br>
The certificates in this file should be in reversed order of the certificate
issue chain. That is, the host's certificate should be placed in the beginning
of the file, followed by the immediate issuer certificate and so on.
Although the root CA certificate is optional, it should placed at the end of
the file if it is to be added.
"""
})
}
, {"keyfile",
sc(string(),
#{ default => D("keyfile")
, nullable => true
, desc =>
"""PEM format private key file.<br>
"""
})
}
, {"verify",
sc(hoconsc:union([verify_peer, verify_none]),
sc(hoconsc:enum([verify_peer, verify_none]),
#{ default => Df("verify", verify_none)
})
}
, {"fail_if_no_peer_cert",
, {"reuse_sessions",
sc(boolean(),
#{ default => Df("fail_if_no_peer_cert", false)
#{ default => Df("reuse_sessions", true)
})
}
, {"depth",
sc(integer(),
#{default => Df("depth", 10)
})
}
, {"password",
sc(string(),
#{ sensitive => true
, nullable => true
, desc =>
"""String containing the user's password. Only used if the private
keyfile is password-protected."""
})
}
, {"versions",
sc(hoconsc:array(typerefl:atom()),
#{ default => default_tls_vsns(maps:get(versions, Defaults, tls_all_available))
, desc =>
"""All TLS/DTLS versions to be supported.<br>
NOTE: PSK ciphers are suppresed by 'tlsv1.3' version config<br>
In case PSK cipher suites are intended, make sure to configured
<code>['tlsv1.2', 'tlsv1.1']</code> here.
"""
, validator => fun validate_tls_versions/1
})
}
, {"ciphers", ciphers_schema(D("ciphers"))}
, {user_lookup_fun,
sc(typerefl:alias("string", any()),
#{ default => "emqx_tls_psk:lookup"
, converter => fun ?MODULE:parse_user_lookup_fun/1
})
}
, {"secure_renegotiate",
@ -946,6 +1015,45 @@ you drop support for the insecure renegotiation, prone to MitM attacks.
"""
})
}
].
%% @doc Make schema for SSL listener options.
%% When it's for ranch listener, an extra field `handshake_timeout' is added.
-spec server_ssl_opts_schema(map(), boolean()) -> hocon_schema:field_schema().
server_ssl_opts_schema(Defaults, IsRanchListener) ->
D = fun (Field) -> maps:get(to_atom(Field), Defaults, undefined) end,
Df = fun (Field, Default) -> maps:get(to_atom(Field), Defaults, Default) end,
common_ssl_opts_schema(Defaults) ++
[ {"dhfile",
sc(string(),
#{ default => D("dhfile")
, nullable => true
, desc =>
"""Path to a file containing PEM-encoded Diffie Hellman parameters
to be used by the server if a cipher suite using Diffie Hellman
key exchange is negotiated. If not specified, default parameters
are used.<br>
NOTE: The dhfile option is not supported by TLS 1.3."""
})
}
, {"fail_if_no_peer_cert",
sc(boolean(),
#{ default => Df("fail_if_no_peer_cert", false)
, desc =>
"""
Used together with {verify, verify_peer} by an TLS/DTLS server.
If set to true, the server fails if the client does not have a
certificate to send, that is, sends an empty certificate.
If set to false, it fails only if the client sends an invalid
certificate (an empty certificate is considered valid).
"""
})
}
, {"honor_cipher_order",
sc(boolean(),
#{ default => Df("honor_cipher_order", true)
})
}
, {"client_renegotiation",
sc(boolean(),
#{ default => Df("client_renegotiation", true)
@ -961,96 +1069,105 @@ the number of messages the underlying cipher suite can encipher.
"""
})
}
, {"reuse_sessions",
sc(boolean(),
#{ default => Df("reuse_sessions", true)
})
}
, {"honor_cipher_order",
sc(boolean(),
#{ default => Df("honor_cipher_order", true)
})
}
, {"handshake_timeout",
| [ {"handshake_timeout",
sc(duration(),
#{ default => Df("handshake_timeout", "15s")
})
}
, {"depth",
sc(integer(),
#{default => Df("depth", 10)
})
}
, {"password",
sc(string(),
#{ default => D("key_password")
, sensitive => true
})
}
, {"dhfile",
sc(string(),
#{ default => D("dhfile")
})
}
, {"server_name_indication",
, desc => "Maximum time duration allowed for the handshake to complete"
})}
|| IsRanchListener]
].
%% @doc Make schema for SSL client.
-spec client_ssl_opts_schema(map()) -> hocon_schema:field_schema().
client_ssl_opts_schema(Defaults) ->
common_ssl_opts_schema(Defaults) ++
[ { "server_name_indication",
sc(hoconsc:union([disable, string()]),
#{ default => D("server_name_indication")
})
}
, {"versions",
sc(typerefl:alias("string", list(atom())),
#{ default => maps:get(versions, Defaults, default_tls_vsns())
, converter => fun (Vsns) -> [tls_vsn(iolist_to_binary(V)) || V <- Vsns] end
})
}
, {"ciphers",
#{ default => disable
, desc =>
"""Specify the host name to be used in TLS Server Name Indication extension.<br>
For instance, when connecting to \"server.example.net\", the genuine server
which accedpts the connection and performs TLS handshake may differ from the
host the TLS client initially connects to, e.g. when connecting to an IP address
or when the host has multiple resolvable DNS records <br>
If not specified, it will default to the host name string which is used
to establish the connection, unless it is IP addressed used.<br>
The host name is then also used in the host name verification of the peer
certificate.<br> The special value 'disable' prevents the Server Name
Indication extension from being sent and disables the hostname
verification check."""
})}
].
default_tls_vsns(dtls_all_available) ->
proplists:get_value(available_dtls, ssl:versions());
default_tls_vsns(tls_all_available) ->
emqx_tls_lib:default_versions().
-spec ciphers_schema(quic | dtls_all_available | tls_all_available | undefined) -> hocon_schema:field_schema().
ciphers_schema(Default) ->
sc(hoconsc:array(string()),
#{ default => D("ciphers")
})
}
, {"user_lookup_fun",
sc(typerefl:alias("string", any()),
#{ default => "emqx_psk:lookup"
, converter => fun ?MODULE:parse_user_lookup_fun/1
})
}
].
#{ default => default_ciphers(Default)
, converter => fun(Ciphers) when is_binary(Ciphers) ->
binary:split(Ciphers, <<",">>, [global]);
(Ciphers) when is_list(Ciphers) ->
Ciphers
end
, validator => case Default =:= quic of
true -> undefined; %% quic has openssl statically linked
false -> fun validate_ciphers/1
end
, desc =>
"""TLS cipher suite names separated by comma, or as an array of strings
<code>\"TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256\"</code> or
<code>[\"TLS_AES_256_GCM_SHA384\",\"TLS_AES_128_GCM_SHA256\"]</code].
<br>
Ciphers (and their ordering) define the way in which the
client and server encrypts information over the wire.
Selecting a good cipher suite is critical for the
application's data security, confidentiality and performance.
The names should be in OpenSSL sting format (not RFC format).
Default values and examples proveded by EMQ X config
documentation are all in OpenSSL format.<br>
%% on erl23.2.7.2-emqx-2, sufficient_crypto_support('tlsv1.3') -> false
default_tls_vsns() -> [<<"tlsv1.2">>, <<"tlsv1.1">>, <<"tlsv1">>].
NOTE: Certain cipher suites are only compatible with
specific TLS <code>versions</code> ('tlsv1.1', 'tlsv1.2' or 'tlsv1.3')
incompatible cipher suites will be silently dropped.
For instance, if only 'tlsv1.3' is given in the <code>versions</code>,
configuring cipher suites for other versions will have no effect.
<br>
tls_vsn(<<"tlsv1.3">>) -> 'tlsv1.3';
tls_vsn(<<"tlsv1.2">>) -> 'tlsv1.2';
tls_vsn(<<"tlsv1.1">>) -> 'tlsv1.1';
tls_vsn(<<"tlsv1">>) -> 'tlsv1'.
NOTE: PSK ciphers are suppresed by 'tlsv1.3' version config<br>
If PSK cipher suites are intended, 'tlsv1.3' should be disabled from <code>versions</code>.<br>
PSK cipher suites: <code>\"RSA-PSK-AES256-GCM-SHA384,RSA-PSK-AES256-CBC-SHA384,
RSA-PSK-AES128-GCM-SHA256,RSA-PSK-AES128-CBC-SHA256,
RSA-PSK-AES256-CBC-SHA,RSA-PSK-AES128-CBC-SHA,
RSA-PSK-DES-CBC3-SHA,RSA-PSK-RC4-SHA\"</code><br>
""" ++ case Default of
quic -> "NOTE: QUIC listener supports only 'tlsv1.3' ciphers<br>";
_ -> ""
end}).
default_ciphers() -> [
"TLS_AES_256_GCM_SHA384", "TLS_AES_128_GCM_SHA256", "TLS_CHACHA20_POLY1305_SHA256",
"TLS_AES_128_CCM_SHA256", "TLS_AES_128_CCM_8_SHA256", "ECDHE-ECDSA-AES256-GCM-SHA384",
"ECDHE-RSA-AES256-GCM-SHA384", "ECDHE-ECDSA-AES256-SHA384", "ECDHE-RSA-AES256-SHA384",
"ECDHE-ECDSA-DES-CBC3-SHA", "ECDH-ECDSA-AES256-GCM-SHA384", "ECDH-RSA-AES256-GCM-SHA384",
"ECDH-ECDSA-AES256-SHA384", "ECDH-RSA-AES256-SHA384", "DHE-DSS-AES256-GCM-SHA384",
"DHE-DSS-AES256-SHA256", "AES256-GCM-SHA384", "AES256-SHA256",
"ECDHE-ECDSA-AES128-GCM-SHA256", "ECDHE-RSA-AES128-GCM-SHA256",
"ECDHE-ECDSA-AES128-SHA256", "ECDHE-RSA-AES128-SHA256", "ECDH-ECDSA-AES128-GCM-SHA256",
"ECDH-RSA-AES128-GCM-SHA256", "ECDH-ECDSA-AES128-SHA256", "ECDH-RSA-AES128-SHA256",
"DHE-DSS-AES128-GCM-SHA256", "DHE-DSS-AES128-SHA256", "AES128-GCM-SHA256", "AES128-SHA256",
"ECDHE-ECDSA-AES256-SHA", "ECDHE-RSA-AES256-SHA", "DHE-DSS-AES256-SHA",
"ECDH-ECDSA-AES256-SHA", "ECDH-RSA-AES256-SHA", "AES256-SHA", "ECDHE-ECDSA-AES128-SHA",
"ECDHE-RSA-AES128-SHA", "DHE-DSS-AES128-SHA", "ECDH-ECDSA-AES128-SHA",
"ECDH-RSA-AES128-SHA", "AES128-SHA"
] ++ psk_ciphers().
psk_ciphers() -> [
"PSK-AES128-CBC-SHA", "PSK-AES256-CBC-SHA", "PSK-3DES-EDE-CBC-SHA", "PSK-RC4-SHA"
].
default_ciphers(undefined) ->
default_ciphers(tls_all_available);
default_ciphers(quic) -> [
"TLS_AES_256_GCM_SHA384",
"TLS_AES_128_GCM_SHA256",
"TLS_CHACHA20_POLY1305_SHA256"
];
default_ciphers(dtls_all_available) ->
%% as of now, dtls does not support tlsv1.3 ciphers
emqx_tls_lib:selected_ciphers(['dtlsv1.2', 'dtlsv1']);
default_ciphers(tls_all_available) ->
emqx_tls_lib:default_ciphers().
%% @private return a list of keys in a parent field
-spec(keys(string(), hocon:config()) -> [string()]).
keys(Parent, Conf) ->
[binary_to_list(B) || B <- maps:keys(conf_get(Parent, Conf, #{}))].
-spec ceiling(float()) -> integer().
-spec ceiling(number()) -> integer().
ceiling(X) ->
T = erlang:trunc(X),
case (X - T) of
@ -1069,6 +1186,15 @@ ref(Field) -> hoconsc:ref(?MODULE, Field).
ref(Module, Field) -> hoconsc:ref(Module, Field).
mk_duration(Desc, OverrideMeta) ->
DefaultMeta = #{desc => Desc ++ " Time span. A text string with number followed by time units:
`ms` for milli-seconds,
`s` for seconds,
`m` for minutes,
`h` for hours;
or combined representation like `1h5m0s`"},
hoconsc:mk(typerefl:alias("string", duration()), maps:merge(DefaultMeta, OverrideMeta)).
to_duration(Str) ->
case hocon_postprocess:duration(Str) of
I when is_integer(I) -> {ok, I};
@ -1077,13 +1203,15 @@ to_duration(Str) ->
to_duration_s(Str) ->
case hocon_postprocess:duration(Str) of
I when is_integer(I) -> {ok, ceiling(I / 1000)};
I when is_number(I) -> {ok, ceiling(I / 1000)};
_ -> {error, Str}
end.
-spec to_duration_ms(Input) -> {ok, integer()} | {error, Input}
when Input :: string() | binary().
to_duration_ms(Str) ->
case hocon_postprocess:duration(Str) of
I when is_integer(I) -> {ok, ceiling(I)};
I when is_number(I) -> {ok, ceiling(I)};
_ -> {error, Str}
end.
@ -1159,4 +1287,19 @@ parse_user_lookup_fun(StrConf) ->
[ModStr, FunStr] = string:tokens(StrConf, ":"),
Mod = list_to_atom(ModStr),
Fun = list_to_atom(FunStr),
{fun Mod:Fun/3, <<>>}.
{fun Mod:Fun/3, undefined}.
validate_ciphers(Ciphers) ->
All = emqx_tls_lib:all_ciphers(),
case lists:filter(fun(Cipher) -> not lists:member(Cipher, All) end, Ciphers) of
[] -> ok;
Bad -> {error, {bad_ciphers, Bad}}
end.
validate_tls_versions(Versions) ->
AvailableVersions = proplists:get_value(available, ssl:versions()) ++
proplists:get_value(available_dtls, ssl:versions()),
case lists:filter(fun(V) -> not lists:member(V, AvailableVersions) end, Versions) of
[] -> ok;
Vs -> {error, {unsupported_ssl_versions, Vs}}
end.

View File

@ -479,11 +479,12 @@ log_dropped(Msg = #message{qos = QoS}, #session{mqueue = Q}) ->
case (QoS == ?QOS_0) andalso (not emqx_mqueue:info(store_qos0, Q)) of
true ->
ok = emqx_metrics:inc('delivery.dropped.qos0_msg'),
?LOG(warning, "Dropped qos0 msg: ~s", [emqx_message:format(Msg)]);
?SLOG(warning, #{msg => "dropped_qos0_msg",
payload => emqx_message:to_log_map(Msg)});
false ->
ok = emqx_metrics:inc('delivery.dropped.queue_full'),
?LOG(warning, "Dropped msg due to mqueue is full: ~s",
[emqx_message:format(Msg)])
?SLOG(warning, #{msg => "dropped_msg_due_to_mqueue_is_full",
payload => emqx_message:to_log_map(Msg)})
end.
enrich_fun(Session = #session{subscriptions = Subs}) ->

View File

@ -325,11 +325,11 @@ handle_call({unsubscribe, Group, Topic, SubPid}, _From, State) ->
{reply, ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", req => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", req => Msg}),
{noreply, State}.
handle_info({mnesia_table_event, {write, NewRecord, _}}, State = #state{pmon = PMon}) ->
@ -347,8 +347,8 @@ handle_info({mnesia_table_event, {write, NewRecord, _}}, State = #state{pmon = P
handle_info({mnesia_table_event, _Event}, State) ->
{noreply, State};
handle_info({'DOWN', _MRef, process, SubPid, _Reason}, State = #state{pmon = PMon}) ->
?LOG(info, "Shared subscriber down: ~p", [SubPid]),
handle_info({'DOWN', _MRef, process, SubPid, Reason}, State = #state{pmon = PMon}) ->
?SLOG(info, #{msg => "shared_subscriber_down", sub_pid => SubPid, reason => Reason}),
cleanup_down(SubPid),
{noreply, update_stats(State#state{pmon = emqx_pmon:erase(SubPid, PMon)})};

View File

@ -202,7 +202,7 @@ handle_call(stop, _From, State) ->
{stop, normal, ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast({setstat, Stat, MaxStat, Val}, State) ->
@ -221,7 +221,9 @@ handle_cast({update_interval, Update = #update{name = Name}},
State = #state{updates = Updates}) ->
NState = case lists:keyfind(Name, #update.name, Updates) of
#update{} ->
?LOG(warning, "Duplicated update: ~s", [Name]),
?SLOG(warning, #{msg => "duplicated_update",
name => Name
}),
State;
false -> State#state{updates = [Update|Updates]}
end,
@ -232,7 +234,7 @@ handle_cast({cancel_update, Name}, State = #state{updates = Updates}) ->
{noreply, State#state{updates = Updates1}};
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info({timeout, TRef, tick}, State = #state{timer = TRef, updates = Updates}) ->
@ -241,8 +243,13 @@ handle_info({timeout, TRef, tick}, State = #state{timer = TRef, updates = Update
func = UpFun}, Acc) when C =< 0 ->
try UpFun()
catch
_:Error ->
?LOG(error, "Update ~s failed: ~0p", [Name, Error])
Error : Reason : Stacktrace ->
?SLOG(error, #{msg => "update_name_failed",
name => Name,
exception => Error,
reason => Reason,
stacktrace => Stacktrace
})
end,
[Update#update{countdown = I} | Acc];
(Update = #update{countdown = C}, Acc) ->
@ -251,7 +258,7 @@ handle_info({timeout, TRef, tick}, State = #state{timer = TRef, updates = Update
{noreply, start_timer(State#state{updates = Updates1}), hibernate};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, #state{timer = TRef}) ->
@ -271,6 +278,9 @@ safe_update_element(Key, Val) ->
true -> true
catch
error:badarg ->
?LOG(warning, "Failed to update ~0p to ~0p", [Key, Val])
?SLOG(warning, #{
msg => "failed_to_update",
key => Key,
val => Val
})
end.

View File

@ -134,11 +134,11 @@ handle_call(uptime, _From, State) ->
{reply, uptime(State), State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info({timeout, TRef, heartbeat}, State = #state{heartbeat = TRef}) ->
@ -156,7 +156,7 @@ handle_info({timeout, TRef, tick},
{noreply, tick(State), hibernate};
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, #state{heartbeat = TRef1, ticker = TRef2}) ->

View File

@ -83,42 +83,51 @@ sysm_opts([_Opt|Opts], Acc) ->
sysm_opts(Opts, Acc).
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", req => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", req => Msg}),
{noreply, State}.
handle_info({monitor, Pid, long_gc, Info}, State) ->
suppress({long_gc, Pid},
fun() ->
WarnMsg = io_lib:format("long_gc warning: pid = ~p, info: ~p", [Pid, Info]),
?LOG(warning, "~s~n~p", [WarnMsg, procinfo(Pid)]),
WarnMsg = io_lib:format("long_gc warning: pid = ~p", [Pid]),
?SLOG(warning, #{msg => long_gc,
info => Info,
porcinfo => procinfo(Pid)
}),
safe_publish(long_gc, WarnMsg)
end, State);
handle_info({monitor, Pid, long_schedule, Info}, State) when is_pid(Pid) ->
suppress({long_schedule, Pid},
fun() ->
WarnMsg = io_lib:format("long_schedule warning: pid = ~p, info: ~p", [Pid, Info]),
?LOG(warning, "~s~n~p", [WarnMsg, procinfo(Pid)]),
WarnMsg = io_lib:format("long_schedule warning: pid = ~p", [Pid]),
?SLOG(warning, #{msg => long_schedule,
info => Info,
procinfo => procinfo(Pid)}),
safe_publish(long_schedule, WarnMsg)
end, State);
handle_info({monitor, Port, long_schedule, Info}, State) when is_port(Port) ->
suppress({long_schedule, Port},
fun() ->
WarnMsg = io_lib:format("long_schedule warning: port = ~p, info: ~p", [Port, Info]),
?LOG(warning, "~s~n~p", [WarnMsg, erlang:port_info(Port)]),
WarnMsg = io_lib:format("long_schedule warning: port = ~p", [Port]),
?SLOG(warning, #{msg => long_schedule,
info => Info,
portinfo => portinfo(Port)}),
safe_publish(long_schedule, WarnMsg)
end, State);
handle_info({monitor, Pid, large_heap, Info}, State) ->
suppress({large_heap, Pid},
fun() ->
WarnMsg = io_lib:format("large_heap warning: pid = ~p, info: ~p", [Pid, Info]),
?LOG(warning, "~s~n~p", [WarnMsg, procinfo(Pid)]),
WarnMsg = io_lib:format("large_heap warning: pid = ~p", [Pid]),
?SLOG(warning, #{msg => large_heap,
info => Info,
procinfo => procinfo(Pid)}),
safe_publish(large_heap, WarnMsg)
end, State);
@ -126,7 +135,10 @@ handle_info({monitor, SusPid, busy_port, Port}, State) ->
suppress({busy_port, Port},
fun() ->
WarnMsg = io_lib:format("busy_port warning: suspid = ~p, port = ~p", [SusPid, Port]),
?LOG(warning, "~s~n~p~n~p", [WarnMsg, procinfo(SusPid), erlang:port_info(Port)]),
?SLOG(warning, #{msg => busy_port,
portinfo => portinfo(Port),
procinfo => procinfo(SusPid)
}),
safe_publish(busy_port, WarnMsg)
end, State);
@ -134,7 +146,9 @@ handle_info({monitor, SusPid, busy_dist_port, Port}, State) ->
suppress({busy_dist_port, Port},
fun() ->
WarnMsg = io_lib:format("busy_dist_port warning: suspid = ~p, port = ~p", [SusPid, Port]),
?LOG(warning, "~s~n~p~n~p", [WarnMsg, procinfo(SusPid), erlang:port_info(Port)]),
?SLOG(warning, #{msg => busy_dist_port,
portinfo => portinfo(Port),
procinfo => procinfo(SusPid)}),
safe_publish(busy_dist_port, WarnMsg)
end, State);
@ -142,7 +156,7 @@ handle_info({timeout, _Ref, reset}, State) ->
{noreply, State#{events := []}, hibernate};
handle_info(Info, State) ->
?LOG(error, "Unexpected Info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, #{timer := TRef}) ->
@ -170,11 +184,14 @@ suppress(Key, SuccFun, State = #{events := Events}) ->
end.
procinfo(Pid) ->
case {emqx_vm:get_process_info(Pid), emqx_vm:get_process_gc_info(Pid)} of
{undefined, _} -> undefined;
{_, undefined} -> undefined;
{Info, GcInfo} -> Info ++ GcInfo
end.
[{pid, Pid} | procinfo_l(emqx_vm:get_process_gc_info(Pid))] ++
procinfo_l(emqx_vm:get_process_info(Pid)).
procinfo_l(undefined) -> [];
procinfo_l(List) -> List.
portinfo(Port) ->
[{port, Port} | erlang:port_info(Port)].
safe_publish(Event, WarnMsg) ->
Topic = emqx_topic:systop(lists:concat(['sysmon/', Event])),

View File

@ -19,9 +19,10 @@
-export([ default_versions/0
, integral_versions/1
, default_ciphers/0
, default_ciphers/1
, selected_ciphers/1
, integral_ciphers/2
, drop_tls13_for_old_otp/1
, all_ciphers/0
]).
%% non-empty string
@ -31,9 +32,7 @@
%% @doc Returns the default supported tls versions.
-spec default_versions() -> [atom()].
default_versions() ->
OtpRelease = list_to_integer(erlang:system_info(otp_release)),
integral_versions(default_versions(OtpRelease)).
default_versions() -> available_versions().
%% @doc Validate a given list of desired tls versions.
%% raise an error exception if non of them are available.
@ -51,7 +50,7 @@ integral_versions(Desired) when ?IS_STRING(Desired) ->
integral_versions(Desired) when is_binary(Desired) ->
integral_versions(parse_versions(Desired));
integral_versions(Desired) ->
{_, Available} = lists:keyfind(available, 1, ssl:versions()),
Available = available_versions(),
case lists:filter(fun(V) -> lists:member(V, Available) end, Desired) of
[] -> erlang:error(#{ reason => no_available_tls_version
, desired => Desired
@ -61,27 +60,63 @@ integral_versions(Desired) ->
Filtered
end.
%% @doc Return a list of default (openssl string format) cipher suites.
-spec default_ciphers() -> [string()].
default_ciphers() -> default_ciphers(default_versions()).
%% @doc Return a list of all supported ciphers.
all_ciphers() -> all_ciphers(default_versions()).
%% @doc Return a list of (openssl string format) cipher suites.
-spec default_ciphers([ssl:tls_version()]) -> [string()].
default_ciphers(['tlsv1.3']) ->
-spec all_ciphers([ssl:tls_version()]) -> [string()].
all_ciphers(['tlsv1.3']) ->
%% When it's only tlsv1.3 wanted, use 'exclusive' here
%% because 'all' returns legacy cipher suites too,
%% which does not make sense since tlsv1.3 can not use
%% legacy cipher suites.
ssl:cipher_suites(exclusive, 'tlsv1.3', openssl);
default_ciphers(Versions) ->
all_ciphers(Versions) ->
%% assert non-empty
[_ | _] = dedup(lists:append([ssl:cipher_suites(all, V, openssl) || V <- Versions])).
%% @doc All Pre-selected TLS ciphers.
default_ciphers() ->
selected_ciphers(available_versions()).
%% @doc Pre-selected TLS ciphers for given versions..
selected_ciphers(Vsns) ->
All = all_ciphers(Vsns),
dedup(lists:filter(fun(Cipher) -> lists:member(Cipher, All) end,
lists:flatmap(fun do_selected_ciphers/1, Vsns))).
do_selected_ciphers('tlsv1.3') ->
case lists:member('tlsv1.3', proplists:get_value(available, ssl:versions())) of
true -> ssl:cipher_suites(exclusive, 'tlsv1.3', openssl);
false -> []
end ++ do_selected_ciphers('tlsv1.2');
do_selected_ciphers(_) ->
[ "ECDHE-ECDSA-AES256-GCM-SHA384",
"ECDHE-RSA-AES256-GCM-SHA384", "ECDHE-ECDSA-AES256-SHA384", "ECDHE-RSA-AES256-SHA384",
"ECDH-ECDSA-AES256-GCM-SHA384", "ECDH-RSA-AES256-GCM-SHA384",
"ECDH-ECDSA-AES256-SHA384", "ECDH-RSA-AES256-SHA384", "DHE-DSS-AES256-GCM-SHA384",
"DHE-DSS-AES256-SHA256", "AES256-GCM-SHA384", "AES256-SHA256",
"ECDHE-ECDSA-AES128-GCM-SHA256", "ECDHE-RSA-AES128-GCM-SHA256",
"ECDHE-ECDSA-AES128-SHA256", "ECDHE-RSA-AES128-SHA256", "ECDH-ECDSA-AES128-GCM-SHA256",
"ECDH-RSA-AES128-GCM-SHA256", "ECDH-ECDSA-AES128-SHA256", "ECDH-RSA-AES128-SHA256",
"DHE-DSS-AES128-GCM-SHA256", "DHE-DSS-AES128-SHA256", "AES128-GCM-SHA256", "AES128-SHA256",
"ECDHE-ECDSA-AES256-SHA", "ECDHE-RSA-AES256-SHA", "DHE-DSS-AES256-SHA",
"ECDH-ECDSA-AES256-SHA", "ECDH-RSA-AES256-SHA", "ECDHE-ECDSA-AES128-SHA",
"ECDHE-RSA-AES128-SHA", "DHE-DSS-AES128-SHA", "ECDH-ECDSA-AES128-SHA",
"ECDH-RSA-AES128-SHA",
%% psk
"RSA-PSK-AES256-GCM-SHA384","RSA-PSK-AES256-CBC-SHA384",
"RSA-PSK-AES128-GCM-SHA256","RSA-PSK-AES128-CBC-SHA256",
"RSA-PSK-AES256-CBC-SHA","RSA-PSK-AES128-CBC-SHA"
].
%% @doc Ensure version & cipher-suites integrity.
-spec integral_ciphers([ssl:tls_version()], binary() | string() | [string()]) -> [string()].
integral_ciphers(Versions, Ciphers) when Ciphers =:= [] orelse Ciphers =:= undefined ->
%% not configured
integral_ciphers(Versions, default_ciphers(Versions));
integral_ciphers(Versions, selected_ciphers(Versions));
integral_ciphers(Versions, Ciphers) when ?IS_STRING_LIST(Ciphers) ->
%% ensure tlsv1.3 ciphers if none of them is found in Ciphers
dedup(ensure_tls13_cipher(lists:member('tlsv1.3', Versions), Ciphers));
@ -95,7 +130,7 @@ integral_ciphers(Versions, Ciphers) ->
%% In case tlsv1.3 is present, ensure tlsv1.3 cipher is added if user
%% did not provide it from config --- which is a common mistake
ensure_tls13_cipher(true, Ciphers) ->
Tls13Ciphers = default_ciphers(['tlsv1.3']),
Tls13Ciphers = selected_ciphers(['tlsv1.3']),
case lists:any(fun(C) -> lists:member(C, Tls13Ciphers) end, Ciphers) of
true -> Ciphers;
false -> Tls13Ciphers ++ Ciphers
@ -103,11 +138,17 @@ ensure_tls13_cipher(true, Ciphers) ->
ensure_tls13_cipher(false, Ciphers) ->
Ciphers.
%% default ssl versions based on available versions.
-spec available_versions() -> [atom()].
available_versions() ->
OtpRelease = list_to_integer(erlang:system_info(otp_release)),
default_versions(OtpRelease).
%% tlsv1.3 is available from OTP-22 but we do not want to use until 23.
default_versions(OtpRelease) when OtpRelease >= 23 ->
['tlsv1.3' | default_versions(22)];
proplists:get_value(available, ssl:versions());
default_versions(_) ->
['tlsv1.2', 'tlsv1.1', tlsv1].
lists:delete('tlsv1.3', proplists:get_value(available, ssl:versions())).
%% Deduplicate a list without re-ordering the elements.
dedup([]) -> [];
@ -178,7 +219,7 @@ drop_tls13(SslOpts0) ->
drop_tls13_test() ->
Versions = default_versions(),
?assert(lists:member('tlsv1.3', Versions)),
Ciphers = default_ciphers(),
Ciphers = all_ciphers(),
?assert(has_tlsv13_cipher(Ciphers)),
Opts0 = #{versions => Versions, ciphers => Ciphers, other => true},
Opts = drop_tls13(Opts0),

View File

@ -14,11 +14,10 @@
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_psk).
-module(emqx_tls_psk).
-include("logger.hrl").
%% SSL PSK Callbacks
-export([lookup/3]).
@ -26,14 +25,24 @@
-type psk_user_state() :: term().
-spec lookup(psk, psk_identity(), psk_user_state()) -> {ok, SharedSecret :: binary()} | error.
lookup(psk, ClientPSKID, _UserState) ->
try emqx_hooks:run_fold('tls_handshake.psk_lookup', [ClientPSKID], not_found) of
SharedSecret when is_binary(SharedSecret) -> {ok, SharedSecret};
Error ->
?LOG(error, "Look PSK for PSKID ~p error: ~p", [ClientPSKID, Error]),
lookup(psk, PSKIdentity, _UserState) ->
try emqx_hooks:run_fold('tls_handshake.psk_lookup', [PSKIdentity], normal) of
{ok, SharedSecret} when is_binary(SharedSecret) ->
{ok, SharedSecret};
normal ->
?SLOG(info, #{msg => "psk_identity_not_found",
psk_identity => PSKIdentity}),
error;
{error, Reason} ->
?SLOG(warning, #{msg => "psk_identity_not_found",
psk_identity => PSKIdentity,
reason => Reason}),
error
catch
Except:Error:Stacktrace ->
?LOG(error, "Lookup PSK failed, ~0p: ~0p", [{Except,Error}, Stacktrace]),
Class:Reason:Stacktrace ->
?SLOG(error, #{msg => "lookup_psk_failed",
class => Class,
reason => Reason,
stacktrace => Stacktrace}),
error
end.

View File

@ -115,18 +115,18 @@ install_trace_handler(Who, Level, LogFile) ->
{fun filter_by_meta_key/2, Who}}]})
of
ok ->
?LOG(info, "Start trace for ~p", [Who]);
?SLOG(info, #{msg => "start_trace", who => Who});
{error, Reason} ->
?LOG(error, "Start trace for ~p failed, error: ~p", [Who, Reason]),
?SLOG(error, #{msg => "failed_to_trace", who => Who, reason => Reason}),
{error, Reason}
end.
uninstall_trance_handler(Who) ->
case logger:remove_handler(handler_id(Who)) of
ok ->
?LOG(info, "Stop trace for ~p", [Who]);
?SLOG(info, #{msg => "stop_trace", who => Who});
{error, Reason} ->
?LOG(error, "Stop trace for ~p failed, error: ~p", [Who, Reason]),
?SLOG(error, #{msg => "failed_to_stop_trace", who => Who, reason => Reason}),
{error, Reason}
end.

View File

@ -49,11 +49,11 @@ init([]) ->
{ok, #{}}.
handle_call(Req, _From, State) ->
?LOG(error, "[VM_MON] Unexpected call: ~p", [Req]),
?SLOG(error, #{msg => "unexpected_call", call => Req}),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "[VM_MON] Unexpected cast: ~p", [Msg]),
?SLOG(error, #{msg => "unexpected_cast", cast => Msg}),
{noreply, State}.
handle_info({timeout, _Timer, check}, State) ->
@ -75,7 +75,7 @@ handle_info({timeout, _Timer, check}, State) ->
{noreply, State};
handle_info(Info, State) ->
?LOG(error, "[VM_MON] Unexpected info: ~p", [Info]),
?SLOG(error, #{msg => "unexpected_info", info => Info}),
{noreply, State}.
terminate(_Reason, _State) ->

View File

@ -181,10 +181,11 @@ init(Req, #{listener := {Type, Listener}} = Opts) ->
idle_timeout => get_ws_opts(Type, Listener, idle_timeout)
},
case check_origin_header(Req, Opts) of
{error, Message} ->
?LOG(error, "Invalid Origin Header ~p~n", [Message]),
{error, Reason} ->
?SLOG(error, #{msg => "invalid_origin_header", reason => Reason}),
{ok, cowboy_req:reply(403, Req), WsOpts};
ok -> parse_sec_websocket_protocol(Req, Opts, WsOpts)
ok ->
parse_sec_websocket_protocol(Req, Opts, WsOpts)
end.
parse_sec_websocket_protocol(Req, #{listener := {Type, Listener}} = Opts, WsOpts) ->
@ -231,7 +232,7 @@ parse_header_fun_origin(Req, #{listener := {Type, Listener}}) ->
Value ->
case lists:member(Value, get_ws_opts(Type, Listener, check_origins)) of
true -> ok;
false -> {origin_not_allowed, Value}
false -> {error, #{bad_origin => Value}}
end
end.
@ -263,11 +264,12 @@ websocket_init([Req, #{zone := Zone, listener := {Type, Listener}} = Opts]) ->
WsCookie = try cowboy_req:parse_cookies(Req)
catch
error:badarg ->
?LOG(error, "Illegal cookie"),
?SLOG(error, #{msg => "bad_cookie"}),
undefined;
Error:Reason ->
?LOG(error, "Failed to parse cookie, Error: ~0p, Reason ~0p",
[Error, Reason]),
?SLOG(error, #{msg => "failed_to_parse_cookie",
exception => Error,
reason => Reason}),
undefined
end,
ConnInfo = #{socktype => ws,
@ -324,7 +326,7 @@ websocket_handle({binary, Data}, State) when is_list(Data) ->
websocket_handle({binary, iolist_to_binary(Data)}, State);
websocket_handle({binary, Data}, State) ->
?LOG(debug, "RECV ~0p", [Data]),
?SLOG(debug, #{msg => "RECV_data", data => Data, transport => websocket}),
ok = inc_recv_stats(1, iolist_size(Data)),
NState = ensure_stats_timer(State),
return(parse_incoming(Data, NState));
@ -339,7 +341,7 @@ websocket_handle({Frame, _}, State) when Frame =:= ping; Frame =:= pong ->
websocket_handle({Frame, _}, State) ->
%% TODO: should not close the ws connection
?LOG(error, "Unexpected frame - ~p", [Frame]),
?SLOG(error, #{msg => "unexpected_frame", frame => Frame}),
shutdown(unexpected_ws_frame, State).
websocket_info({call, From, Req}, State) ->
@ -397,11 +399,11 @@ websocket_info(Info, State) ->
websocket_close({_, ReasonCode, _Payload}, State) when is_integer(ReasonCode) ->
websocket_close(ReasonCode, State);
websocket_close(Reason, State) ->
?LOG(debug, "Websocket closed due to ~p~n", [Reason]),
?SLOG(debug, #{msg => "websocket_closed", reason => Reason}),
handle_info({sock_closed, Reason}, State).
terminate(Reason, _Req, #state{channel = Channel}) ->
?LOG(debug, "Terminated due to ~p", [Reason]),
?SLOG(debug, #{msg => "terminated", reason => Reason}),
emqx_channel:terminate(Reason, Channel);
terminate(_Reason, _Req, _UnExpectedState) ->
@ -446,7 +448,7 @@ handle_info({connack, ConnAck}, State) ->
return(enqueue(ConnAck, State));
handle_info({close, Reason}, State) ->
?LOG(debug, "Force to close the socket due to ~p", [Reason]),
?SLOG(debug, #{msg => "force_socket_close", reason => Reason}),
return(enqueue({close, Reason}, State));
handle_info({event, connected}, State = #state{channel = Channel}) ->
@ -499,7 +501,7 @@ ensure_rate_limit(Stats, State = #state{limiter = Limiter}) ->
{ok, Limiter1} ->
State#state{limiter = Limiter1};
{pause, Time, Limiter1} ->
?LOG(warning, "Pause ~pms due to rate limit", [Time]),
?SLOG(warning, #{msg => "pause_due_to_rate_limit", time => Time}),
TRef = start_timer(Time, limit_timeout),
NState = State#state{sockstate = blocked,
limiter = Limiter1,
@ -547,9 +549,19 @@ parse_incoming(Data, State = #state{parse_state = ParseState}) ->
NState = State#state{parse_state = NParseState},
parse_incoming(Rest, postpone({incoming, Packet}, NState))
catch
error:Reason:Stk ->
?LOG(error, "~nParse failed for ~0p~n~0p~nFrame data: ~0p",
[Reason, Stk, Data]),
throw : ?FRAME_PARSE_ERROR(Reason) ->
?SLOG(info, #{ reason => Reason
, at_state => emqx_frame:describe_state(ParseState)
, input_bytes => Data
}),
FrameError = {frame_error, Reason},
postpone({incoming, FrameError}, State);
error : Reason : Stacktrace ->
?SLOG(error, #{ at_state => emqx_frame:describe_state(ParseState)
, input_bytes => Data
, exception => Reason
, stacktrace => Stacktrace
}),
FrameError = {frame_error, Reason},
postpone({incoming, FrameError}, State)
end.
@ -560,7 +572,7 @@ parse_incoming(Data, State = #state{parse_state = ParseState}) ->
handle_incoming(Packet, State = #state{listener = {Type, Listener}})
when is_record(Packet, mqtt_packet) ->
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
?SLOG(debug, #{msg => "RECV", packet => emqx_packet:format(Packet)}),
ok = inc_incoming_stats(Packet),
NState = case emqx_pd:get_counter(incoming_pubs) >
get_active_n(Type, Listener) of
@ -617,15 +629,27 @@ handle_outgoing(Packets, State = #state{mqtt_piggyback = MQTTPiggyback,
serialize_and_inc_stats_fun(#state{serialize = Serialize}) ->
fun(Packet) ->
case emqx_frame:serialize_pkt(Packet, Serialize) of
<<>> -> ?LOG(warning, "~s is discarded due to the frame is too large.",
[emqx_packet:format(Packet)]),
try emqx_frame:serialize_pkt(Packet, Serialize) of
<<>> -> ?SLOG(warning, #{msg => "packet_discarded",
reason => "frame_too_large",
packet => emqx_packet:format(Packet)}),
ok = emqx_metrics:inc('delivery.dropped.too_large'),
ok = emqx_metrics:inc('delivery.dropped'),
<<>>;
Data -> ?LOG(debug, "SEND ~s", [emqx_packet:format(Packet)]),
Data -> ?SLOG(debug, #{msg => "SEND", packet => Packet}),
ok = inc_outgoing_stats(Packet),
Data
catch
%% Maybe Never happen.
throw : ?FRAME_SERIALIZE_ERROR(Reason) ->
?SLOG(info, #{ reason => Reason
, input_packet => Packet}),
erlang:error(?FRAME_SERIALIZE_ERROR(Reason));
error : Reason : Stacktrace ->
?SLOG(error, #{ input_packet => Packet
, exception => Reason
, stacktrace => Stacktrace}),
erlang:error(frame_serialize_error)
end
end.

View File

@ -90,7 +90,7 @@ t_validity_period(_) ->
ok = emqx_alarm:activate(a),
ok = emqx_alarm:deactivate(a),
?assertNotEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))),
ct:sleep(2000),
ct:sleep(3000),
?assertEqual({error, not_found}, get_alarm(a, emqx_alarm:get_alarms(deactivated))).
get_alarm(Name, [Alarm = #{name := Name} | _More]) ->

View File

@ -236,6 +236,9 @@ t_update_config(Config) when is_list(Config) ->
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID1})),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID2})),
?assertEqual({error, {not_found, {authenticator, ID2}}}, ?AUTHN:lookup_authenticator(Global, ID2)),
ListenerID = 'tcp:default',
ConfKeyPath = [listeners, tcp, default, authentication],
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig1})),

View File

@ -37,38 +37,82 @@ init_per_suite(Config) ->
end_per_suite(_Config) ->
emqx_ct_helpers:stop_apps([]).
init_per_testcase(Case, Config) ->
?MODULE:Case({init, Config}).
end_per_testcase(Case, Config) ->
?MODULE:Case({'end', Config}).
%%--------------------------------------------------------------------
%% PubSub Test
%%--------------------------------------------------------------------
t_stats_fun(_) ->
Subscribers = emqx_stats:getstat('subscribers.count'),
Subscriptions = emqx_stats:getstat('subscriptions.count'),
Subopts = emqx_stats:getstat('suboptions.count'),
t_stats_fun({init, Config}) ->
Parent = self(),
F = fun Loop() ->
N1 = emqx_stats:getstat('subscribers.count'),
N2 = emqx_stats:getstat('subscriptions.count'),
N3 = emqx_stats:getstat('suboptions.count'),
case N1 + N2 + N3 =:= 0 of
true ->
Parent ! {ready, self()},
exit(normal);
false ->
receive
stop ->
exit(normal)
after
100 ->
Loop()
end
end
end,
Pid = spawn_link(F),
receive
{ready, P} when P =:= Pid->
Config
after
5000 ->
Pid ! stop,
ct:fail("timedout_waiting_for_sub_stats_to_reach_zero")
end;
t_stats_fun(Config) when is_list(Config) ->
ok = emqx_broker:subscribe(<<"topic">>, <<"clientid">>),
ok = emqx_broker:subscribe(<<"topic2">>, <<"clientid">>),
%% ensure stats refreshed
emqx_broker:stats_fun(),
ct:sleep(10),
?assertEqual(Subscribers + 2, emqx_stats:getstat('subscribers.count')),
?assertEqual(Subscribers + 2, emqx_stats:getstat('subscribers.max')),
?assertEqual(Subscriptions + 2, emqx_stats:getstat('subscriptions.count')),
?assertEqual(Subscriptions + 2, emqx_stats:getstat('subscriptions.max')),
?assertEqual(Subopts + 2, emqx_stats:getstat('suboptions.count')),
?assertEqual(Subopts + 2, emqx_stats:getstat('suboptions.max')).
%% emqx_stats:set_stat is a gen_server cast
%% make a synced call sync
ignored = gen_server:call(emqx_stats, call, infinity),
?assertEqual(2, emqx_stats:getstat('subscribers.count')),
?assertEqual(2, emqx_stats:getstat('subscribers.max')),
?assertEqual(2, emqx_stats:getstat('subscriptions.count')),
?assertEqual(2, emqx_stats:getstat('subscriptions.max')),
?assertEqual(2, emqx_stats:getstat('suboptions.count')),
?assertEqual(2, emqx_stats:getstat('suboptions.max'));
t_stats_fun({'end', _Config}) ->
ok = emqx_broker:unsubscribe(<<"topic">>),
ok = emqx_broker:unsubscribe(<<"topic2">>).
t_subscribed(_) ->
t_subscribed({init, Config}) ->
emqx_broker:subscribe(<<"topic">>),
Config;
t_subscribed(Config) when is_list(Config) ->
?assertEqual(false, emqx_broker:subscribed(undefined, <<"topic">>)),
?assertEqual(true, emqx_broker:subscribed(self(), <<"topic">>)),
?assertEqual(true, emqx_broker:subscribed(self(), <<"topic">>));
t_subscribed({'end', _Config}) ->
emqx_broker:unsubscribe(<<"topic">>).
t_subscribed_2(_) ->
t_subscribed_2({init, Config}) ->
emqx_broker:subscribe(<<"topic">>, <<"clientid">>),
%?assertEqual(true, emqx_broker:subscribed(<<"clientid">>, <<"topic">>)),
?assertEqual(true, emqx_broker:subscribed(self(), <<"topic">>)),
Config;
t_subscribed_2(Config) when is_list(Config) ->
?assertEqual(true, emqx_broker:subscribed(self(), <<"topic">>));
t_subscribed_2({'end', _Config}) ->
emqx_broker:unsubscribe(<<"topic">>).
t_subopts(_) ->
t_subopts({init, Config}) -> Config;
t_subopts(Config) when is_list(Config) ->
?assertEqual(false, emqx_broker:set_subopts(<<"topic">>, #{qos => 1})),
?assertEqual(undefined, emqx_broker:get_subopts(self(), <<"topic">>)),
?assertEqual(undefined, emqx_broker:get_subopts(<<"clientid">>, <<"topic">>)),
@ -85,42 +129,54 @@ t_subopts(_) ->
?assertEqual(true, emqx_broker:set_subopts(<<"topic">>, #{qos => 0})),
?assertEqual(#{nl => 0, qos => 0, rap => 0, rh => 0, subid => <<"clientid">>},
emqx_broker:get_subopts(self(), <<"topic">>)),
emqx_broker:get_subopts(self(), <<"topic">>));
t_subopts({'end', _Config}) ->
emqx_broker:unsubscribe(<<"topic">>).
t_topics(_) ->
t_topics({init, Config}) ->
Topics = [<<"topic">>, <<"topic/1">>, <<"topic/2">>],
ok = emqx_broker:subscribe(lists:nth(1, Topics), <<"clientId">>),
ok = emqx_broker:subscribe(lists:nth(2, Topics), <<"clientId">>),
ok = emqx_broker:subscribe(lists:nth(3, Topics), <<"clientId">>),
[{topics, Topics} | Config];
t_topics(Config) when is_list(Config) ->
Topics = [T1, T2, T3] = proplists:get_value(topics, Config),
ok = emqx_broker:subscribe(T1, <<"clientId">>),
ok = emqx_broker:subscribe(T2, <<"clientId">>),
ok = emqx_broker:subscribe(T3, <<"clientId">>),
Topics1 = emqx_broker:topics(),
?assertEqual(true, lists:foldl(fun(Topic, Acc) ->
case lists:member(Topic, Topics1) of
true -> Acc;
false -> false
end
end, true, Topics)),
emqx_broker:unsubscribe(lists:nth(1, Topics)),
emqx_broker:unsubscribe(lists:nth(2, Topics)),
emqx_broker:unsubscribe(lists:nth(3, Topics)).
end, true, Topics));
t_topics({'end', Config}) ->
Topics = proplists:get_value(topics, Config),
lists:foreach(fun(T) -> emqx_broker:unsubscribe(T) end, Topics).
t_subscribers(_) ->
t_subscribers({init, Config}) ->
emqx_broker:subscribe(<<"topic">>, <<"clientid">>),
?assertEqual([self()], emqx_broker:subscribers(<<"topic">>)),
Config;
t_subscribers(Config) when is_list(Config) ->
?assertEqual([self()], emqx_broker:subscribers(<<"topic">>));
t_subscribers({'end', _Config}) ->
emqx_broker:unsubscribe(<<"topic">>).
t_subscriptions(_) ->
t_subscriptions({init, Config}) ->
emqx_broker:subscribe(<<"topic">>, <<"clientid">>, #{qos => 1}),
ok = timer:sleep(100),
Config;
t_subscriptions(Config) when is_list(Config) ->
ct:sleep(100),
?assertEqual(#{nl => 0, qos => 1, rap => 0, rh => 0, subid => <<"clientid">>},
proplists:get_value(<<"topic">>, emqx_broker:subscriptions(self()))),
?assertEqual(#{nl => 0, qos => 1, rap => 0, rh => 0, subid => <<"clientid">>},
proplists:get_value(<<"topic">>, emqx_broker:subscriptions(<<"clientid">>))),
proplists:get_value(<<"topic">>, emqx_broker:subscriptions(<<"clientid">>)));
t_subscriptions({'end', _Config}) ->
emqx_broker:unsubscribe(<<"topic">>).
t_sub_pub(_) ->
t_sub_pub({init, Config}) ->
ok = emqx_broker:subscribe(<<"topic">>),
ct:sleep(10),
Config;
t_sub_pub(Config) when is_list(Config) ->
ct:sleep(100),
emqx_broker:safe_publish(emqx_message:make(ct, <<"topic">>, <<"hello">>)),
?assert(
receive
@ -130,16 +186,22 @@ t_sub_pub(_) ->
false
after 100 ->
false
end).
end);
t_sub_pub({'end', _Config}) ->
ok = emqx_broker:unsubscribe(<<"topic">>).
t_nosub_pub(_) ->
t_nosub_pub({init, Config}) -> Config;
t_nosub_pub({'end', _Config}) -> ok;
t_nosub_pub(Config) when is_list(Config) ->
?assertEqual(0, emqx_metrics:val('messages.dropped')),
emqx_broker:publish(emqx_message:make(ct, <<"topic">>, <<"hello">>)),
?assertEqual(1, emqx_metrics:val('messages.dropped')).
t_shared_subscribe(_) ->
t_shared_subscribe({init, Config}) ->
emqx_broker:subscribe(<<"topic">>, <<"clientid">>, #{share => <<"group">>}),
ct:sleep(10),
ct:sleep(100),
Config;
t_shared_subscribe(Config) when is_list(Config) ->
emqx_broker:safe_publish(emqx_message:make(ct, <<"topic">>, <<"hello">>)),
?assert(receive
{deliver, <<"topic">>, #message{payload = <<"hello">>}} ->
@ -149,9 +211,12 @@ t_shared_subscribe(_) ->
false
after 100 ->
false
end),
end);
t_shared_subscribe({'end', _Config}) ->
emqx_broker:unsubscribe(<<"$share/group/topic">>).
t_shared_subscribe_2({init, Config}) -> Config;
t_shared_subscribe_2({'end', _Config}) -> ok;
t_shared_subscribe_2(_) ->
{ok, ConnPid} = emqtt:start_link([{clean_start, true}, {clientid, <<"clientid">>}]),
{ok, _} = emqtt:connect(ConnPid),
@ -173,6 +238,8 @@ t_shared_subscribe_2(_) ->
emqtt:disconnect(ConnPid),
emqtt:disconnect(ConnPid2).
t_shared_subscribe_3({init, Config}) -> Config;
t_shared_subscribe_3({'end', _Config}) -> ok;
t_shared_subscribe_3(_) ->
{ok, ConnPid} = emqtt:start_link([{clean_start, true}, {clientid, <<"clientid">>}]),
{ok, _} = emqtt:connect(ConnPid),
@ -189,11 +256,13 @@ t_shared_subscribe_3(_) ->
emqtt:disconnect(ConnPid),
emqtt:disconnect(ConnPid2).
t_shard(_) ->
t_shard({init, Config}) ->
ok = meck:new(emqx_broker_helper, [passthrough, no_history]),
ok = meck:expect(emqx_broker_helper, get_sub_shard, fun(_, _) -> 1 end),
emqx_broker:subscribe(<<"topic">>, <<"clientid">>),
ct:sleep(10),
Config;
t_shard(Config) when is_list(Config) ->
ct:sleep(100),
emqx_broker:safe_publish(emqx_message:make(ct, <<"topic">>, <<"hello">>)),
?assert(
receive
@ -203,7 +272,9 @@ t_shard(_) ->
false
after 100 ->
false
end),
end);
t_shard({'end', _Config}) ->
emqx_broker:unsubscribe(<<"topic">>),
ok = meck:unload(emqx_broker_helper).
recv_msgs(Count) ->

View File

@ -141,11 +141,8 @@ t_open_session_race_condition(_) ->
end
end,
N = 1000,
[spawn(
fun() ->
spawn(OpenASession),
spawn(OpenASession)
end) || _ <- lists:seq(1, N)],
Pids = lists:flatten([[spawn_monitor(OpenASession), spawn_monitor(OpenASession)] ||
_ <- lists:seq(1, N)]),
WaitingRecv = fun _Wr(N1, N2, 0) ->
{N1, N2};
@ -158,14 +155,29 @@ t_open_session_race_condition(_) ->
{Succeeded, Failed} = WaitingRecv(0, 0, 2 * N),
ct:pal("Race condition status: succeeded=~p failed=~p~n", [Succeeded, Failed]),
?assertEqual(2 * N, length(Pids)),
WaitForDowns =
fun _Wd([{Pid, _Ref}]) -> Pid;
_Wd(Pids0) ->
receive
{'DOWN', DownRef, process, DownPid, _} ->
?assert(lists:member({DownPid, DownRef}, Pids0)),
_Wd(lists:delete({DownPid, DownRef}, Pids0))
after
10000 ->
exit(timeout)
end
end,
Winner = WaitForDowns(Pids),
?assertMatch([_], ets:lookup(emqx_channel, ClientId)),
[Pid] = emqx_cm:lookup_channels(ClientId),
?assertMatch([_], ets:lookup(emqx_channel_conn, {ClientId, Pid})),
?assertEqual([Winner], emqx_cm:lookup_channels(ClientId)),
?assertMatch([_], ets:lookup(emqx_channel_conn, {ClientId, Winner})),
?assertMatch([_], ets:lookup(emqx_channel_registry, ClientId)),
exit(Pid, kill),
timer:sleep(100), %% TODO deterministic
exit(Winner, kill),
receive {'DOWN', _, process, Winner, _} -> ok end,
ignored = gen_server:call(emqx_cm, ignore, infinity), %% sync
?assertEqual([], emqx_cm:lookup_channels(ClientId)).
t_discard_session(_) ->

View File

@ -22,7 +22,9 @@
-include_lib("emqx/include/emqx_mqtt.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("emqx_ct_helpers/include/emqx_ct.hrl").
-define(ASSERT_FRAME_THROW(Reason, Expr),
?assertThrow(?FRAME_PARSE_ERROR(Reason), Expr)).
all() ->
[{group, parse},
@ -127,14 +129,14 @@ t_parse_cont(_) ->
t_parse_frame_too_large(_) ->
Packet = ?PUBLISH_PACKET(?QOS_1, <<"t">>, 1, payload(1000)),
?catch_error(frame_too_large, parse_serialize(Packet, #{max_size => 256})),
?catch_error(frame_too_large, parse_serialize(Packet, #{max_size => 512})),
?ASSERT_FRAME_THROW(frame_too_large, parse_serialize(Packet, #{max_size => 256})),
?ASSERT_FRAME_THROW(frame_too_large, parse_serialize(Packet, #{max_size => 512})),
?assertEqual(Packet, parse_serialize(Packet, #{max_size => 2048, version => ?MQTT_PROTO_V4})).
t_parse_frame_malformed_variable_byte_integer(_) ->
MalformedPayload = << <<16#80>> || _ <- lists:seq(1, 4) >>,
MalformedPayload = << <<16#80>> || _ <- lists:seq(1, 6) >>,
ParseState = emqx_frame:initial_parse_state(#{}),
?catch_error(malformed_variable_byte_integer,
?ASSERT_FRAME_THROW(malformed_variable_byte_integer,
emqx_frame:parse(MalformedPayload, ParseState)).
t_serialize_parse_v3_connect(_) ->
@ -329,7 +331,7 @@ t_serialize_parse_qos1_publish(_) ->
?assertEqual(Bin, serialize_to_binary(Packet)),
?assertMatch(Packet, parse_to_packet(Bin, #{strict_mode => true})),
%% strict_mode = true
?catch_error(bad_packet_id, parse_serialize(?PUBLISH_PACKET(?QOS_1, <<"Topic">>, 0, <<>>))),
?ASSERT_FRAME_THROW(bad_packet_id, parse_serialize(?PUBLISH_PACKET(?QOS_1, <<"Topic">>, 0, <<>>))),
%% strict_mode = false
_ = parse_serialize(?PUBLISH_PACKET(?QOS_1, <<"Topic">>, 0, <<>>), #{strict_mode => false}).
@ -340,7 +342,7 @@ t_serialize_parse_qos2_publish(_) ->
?assertEqual(Bin, serialize_to_binary(Packet)),
?assertMatch(Packet, parse_to_packet(Bin, #{strict_mode => true})),
%% strict_mode = true
?catch_error(bad_packet_id, parse_serialize(?PUBLISH_PACKET(?QOS_2, <<"Topic">>, 0, <<>>))),
?ASSERT_FRAME_THROW(bad_packet_id, parse_serialize(?PUBLISH_PACKET(?QOS_2, <<"Topic">>, 0, <<>>))),
%% strict_mode = false
_ = parse_serialize(?PUBLISH_PACKET(?QOS_2, <<"Topic">>, 0, <<>>), #{strict_mode => false}).
@ -360,7 +362,7 @@ t_serialize_parse_puback(_) ->
?assertEqual(<<64,2,0,1>>, serialize_to_binary(Packet)),
?assertEqual(Packet, parse_serialize(Packet)),
%% strict_mode = true
?catch_error(bad_packet_id, parse_serialize(?PUBACK_PACKET(0))),
?ASSERT_FRAME_THROW(bad_packet_id, parse_serialize(?PUBACK_PACKET(0))),
%% strict_mode = false
?PUBACK_PACKET(0) = parse_serialize(?PUBACK_PACKET(0), #{strict_mode => false}).
@ -381,7 +383,7 @@ t_serialize_parse_pubrec(_) ->
?assertEqual(<<5:4,0:4,2,0,1>>, serialize_to_binary(Packet)),
?assertEqual(Packet, parse_serialize(Packet)),
%% strict_mode = true
?catch_error(bad_packet_id, parse_serialize(?PUBREC_PACKET(0))),
?ASSERT_FRAME_THROW(bad_packet_id, parse_serialize(?PUBREC_PACKET(0))),
%% strict_mode = false
?PUBREC_PACKET(0) = parse_serialize(?PUBREC_PACKET(0), #{strict_mode => false}).
@ -397,11 +399,11 @@ t_serialize_parse_pubrel(_) ->
%% PUBREL with bad qos 0
Bin0 = <<6:4,0:4,2,0,1>>,
?assertMatch(Packet, parse_to_packet(Bin0, #{strict_mode => false})),
?catch_error(bad_frame_header, parse_to_packet(Bin0, #{strict_mode => true})),
?ASSERT_FRAME_THROW(bad_frame_header, parse_to_packet(Bin0, #{strict_mode => true})),
%% strict_mode = false
?PUBREL_PACKET(0) = parse_serialize(?PUBREL_PACKET(0), #{strict_mode => false}),
%% strict_mode = true
?catch_error(bad_packet_id, parse_serialize(?PUBREL_PACKET(0))).
?ASSERT_FRAME_THROW(bad_packet_id, parse_serialize(?PUBREL_PACKET(0))).
t_serialize_parse_pubrel_v5(_) ->
Packet = ?PUBREL_PACKET(16, ?RC_SUCCESS, #{'Reason-String' => <<"success">>}),
@ -415,7 +417,7 @@ t_serialize_parse_pubcomp(_) ->
%% strict_mode = false
?PUBCOMP_PACKET(0) = parse_serialize(?PUBCOMP_PACKET(0), #{strict_mode => false}),
%% strict_mode = true
?catch_error(bad_packet_id, parse_serialize(?PUBCOMP_PACKET(0))).
?ASSERT_FRAME_THROW(bad_packet_id, parse_serialize(?PUBCOMP_PACKET(0))).
t_serialize_parse_pubcomp_v5(_) ->
Packet = ?PUBCOMP_PACKET(16, ?RC_SUCCESS, #{'Reason-String' => <<"success">>}),
@ -434,12 +436,12 @@ t_serialize_parse_subscribe(_) ->
?assertMatch(Packet, parse_to_packet(Bin0, #{strict_mode => false})),
%% strict_mode = false
_ = parse_to_packet(Bin0, #{strict_mode => false}),
?catch_error(bad_frame_header, parse_to_packet(Bin0, #{strict_mode => true})),
?ASSERT_FRAME_THROW(bad_frame_header, parse_to_packet(Bin0, #{strict_mode => true})),
%% strict_mode = false
_ = parse_serialize(?SUBSCRIBE_PACKET(0, TopicFilters), #{strict_mode => false}),
%% strict_mode = true
?catch_error(bad_packet_id, parse_serialize(?SUBSCRIBE_PACKET(0, TopicFilters))),
?catch_error(bad_subqos, parse_serialize(?SUBSCRIBE_PACKET(1, [{<<"t">>, #{qos => 3}}]))).
?ASSERT_FRAME_THROW(bad_packet_id, parse_serialize(?SUBSCRIBE_PACKET(0, TopicFilters))),
?ASSERT_FRAME_THROW(bad_subqos, parse_serialize(?SUBSCRIBE_PACKET(1, [{<<"t">>, #{qos => 3}}]))).
t_serialize_parse_subscribe_v5(_) ->
TopicFilters = [{<<"TopicQos0">>, #{rh => 1, qos => ?QOS_2, rap => 0, nl => 0}},
@ -453,7 +455,7 @@ t_serialize_parse_suback(_) ->
%% strict_mode = false
_ = parse_serialize(?SUBACK_PACKET(0, [?QOS_0]), #{strict_mode => false}),
%% strict_mode = true
?catch_error(bad_packet_id, parse_serialize(?SUBACK_PACKET(0, [?QOS_0]))).
?ASSERT_FRAME_THROW(bad_packet_id, parse_serialize(?SUBACK_PACKET(0, [?QOS_0]))).
t_serialize_parse_suback_v5(_) ->
Packet = ?SUBACK_PACKET(1, #{'Reason-String' => <<"success">>,
@ -471,11 +473,11 @@ t_serialize_parse_unsubscribe(_) ->
%% UNSUBSCRIBE(Q1, R0, D0, PacketId=2, TopicTable=[<<"TopicA">>])
Bin0 = <<?UNSUBSCRIBE:4,0:4,10,0,2,0,6,84,111,112,105,99,65>>,
?assertMatch(Packet, parse_to_packet(Bin0, #{strict_mode => false})),
?catch_error(bad_frame_header, parse_to_packet(Bin0, #{strict_mode => true})),
?ASSERT_FRAME_THROW(bad_frame_header, parse_to_packet(Bin0, #{strict_mode => true})),
%% strict_mode = false
_ = parse_serialize(?UNSUBSCRIBE_PACKET(0, [<<"TopicA">>]), #{strict_mode => false}),
%% strict_mode = true
?catch_error(bad_packet_id, parse_serialize(?UNSUBSCRIBE_PACKET(0, [<<"TopicA">>]))).
?ASSERT_FRAME_THROW(bad_packet_id, parse_serialize(?UNSUBSCRIBE_PACKET(0, [<<"TopicA">>]))).
t_serialize_parse_unsubscribe_v5(_) ->
Props = #{'User-Property' => [{<<"key">>, <<"val">>}]},
@ -550,4 +552,3 @@ parse_to_packet(Bin, Opts) ->
Packet.
payload(Len) -> iolist_to_binary(lists:duplicate(Len, 1)).

View File

@ -20,7 +20,6 @@
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx_ct_helpers/include/emqx_ct.hrl").
all() -> emqx_ct:all(?MODULE).
@ -41,12 +40,12 @@ t_insert(_) ->
?assertEqual(2, emqx_inflight:size(Inflight)),
?assertEqual({value, 1}, emqx_inflight:lookup(a, Inflight)),
?assertEqual({value, 2}, emqx_inflight:lookup(b, Inflight)),
?catch_error({key_exists, a}, emqx_inflight:insert(a, 1, Inflight)).
?assertError({key_exists, a}, emqx_inflight:insert(a, 1, Inflight)).
t_update(_) ->
Inflight = emqx_inflight:insert(k, v, emqx_inflight:new()),
?assertEqual(Inflight, emqx_inflight:update(k, v, Inflight)),
?catch_error(function_clause, emqx_inflight:update(badkey, v, Inflight)).
?assertError(function_clause, emqx_inflight:update(badkey, v, Inflight)).
t_resize(_) ->
Inflight = emqx_inflight:insert(k, v, emqx_inflight:new(2)),

View File

@ -141,13 +141,6 @@ t_undefined_headers(_) ->
Msg2 = emqx_message:set_header(c, 3, Msg),
?assertEqual(3, emqx_message:get_header(c, Msg2)).
t_format(_) ->
Msg = emqx_message:make(<<"clientid">>, <<"topic">>, <<"payload">>),
io:format("~s~n", [emqx_message:format(Msg)]),
Msg1 = emqx_message:set_header(properties, #{'Subscription-Identifier' => 1},
emqx_message:set_flag(dup, Msg)),
io:format("~s~n", [emqx_message:format(Msg1)]).
t_is_expired(_) ->
Msg = emqx_message:make(<<"clientid">>, <<"topic">>, <<"payload">>),
?assertNot(emqx_message:is_expired(Msg)),
@ -206,7 +199,9 @@ t_to_map(_) ->
{headers, #{}},
{topic, <<"topic">>},
{payload, <<"payload">>},
{timestamp, emqx_message:timestamp(Msg)}],
{timestamp, emqx_message:timestamp(Msg)},
{extra, []}
],
?assertEqual(List, emqx_message:to_list(Msg)),
?assertEqual(maps:from_list(List), emqx_message:to_map(Msg)).
@ -219,6 +214,8 @@ t_from_map(_) ->
headers => #{},
topic => <<"topic">>,
payload => <<"payload">>,
timestamp => emqx_message:timestamp(Msg)},
timestamp => emqx_message:timestamp(Msg),
extra => []
},
?assertEqual(Map, emqx_message:to_map(Msg)),
?assertEqual(Msg, emqx_message:from_map(emqx_message:to_map(Msg))).

View File

@ -21,7 +21,6 @@
-include_lib("emqx/include/emqx_mqtt.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx_ct_helpers/include/emqx_ct.hrl").
all() -> emqx_ct:all(?MODULE).
@ -30,14 +29,14 @@ t_id(_) ->
fun({Id, Prop}) ->
?assertEqual(Id, emqx_mqtt_props:id(element(1, Prop)))
end),
?catch_error({bad_property, 'Bad-Property'}, emqx_mqtt_props:id('Bad-Property')).
?assertError({bad_property, 'Bad-Property'}, emqx_mqtt_props:id('Bad-Property')).
t_name(_) ->
foreach_prop(
fun({Id, Prop}) ->
?assertEqual(emqx_mqtt_props:name(Id), element(1, Prop))
end),
?catch_error({unsupported_property, 16#FF}, emqx_mqtt_props:name(16#FF)).
?assertError({unsupported_property, 16#FF}, emqx_mqtt_props:name(16#FF)).
t_filter(_) ->
ConnProps = #{'Session-Expiry-Interval' => 1,
@ -60,7 +59,7 @@ t_validate(_) ->
},
ok = emqx_mqtt_props:validate(ConnProps),
BadProps = #{'Unknown-Property' => 10},
?catch_error({bad_property,'Unknown-Property'},
?assertError({bad_property,'Unknown-Property'},
emqx_mqtt_props:validate(BadProps)).
t_validate_value(_) ->
@ -68,11 +67,11 @@ t_validate_value(_) ->
ok = emqx_mqtt_props:validate(#{'Reason-String' => <<"Unknown Reason">>}),
ok = emqx_mqtt_props:validate(#{'User-Property' => {<<"Prop">>, <<"Val">>}}),
ok = emqx_mqtt_props:validate(#{'User-Property' => [{<<"Prop">>, <<"Val">>}]}),
?catch_error({bad_property_value, {'Payload-Format-Indicator', 16#FFFF}},
?assertError({bad_property_value, {'Payload-Format-Indicator', 16#FFFF}},
emqx_mqtt_props:validate(#{'Payload-Format-Indicator' => 16#FFFF})),
?catch_error({bad_property_value, {'Server-Keep-Alive', 16#FFFFFF}},
?assertError({bad_property_value, {'Server-Keep-Alive', 16#FFFFFF}},
emqx_mqtt_props:validate(#{'Server-Keep-Alive' => 16#FFFFFF})),
?catch_error({bad_property_value, {'Will-Delay-Interval', -16#FF}},
?assertError({bad_property_value, {'Will-Delay-Interval', -16#FF}},
emqx_mqtt_props:validate(#{'Will-Delay-Interval' => -16#FF})).
foreach_prop(Fun) ->

View File

@ -0,0 +1,483 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2019 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% The proper types extension for EMQ X
-module(emqx_proper_types).
-include_lib("proper/include/proper.hrl").
-include("emqx.hrl").
%% High level Types
-export([ conninfo/0
, clientinfo/0
, sessioninfo/0
, connack_return_code/0
, message/0
, topictab/0
, topic/0
, systopic/0
, subopts/0
, nodename/0
, normal_topic/0
, normal_topic_filter/0
]).
%% Basic Types
-export([ url/0
, ip/0
, port/0
, limited_atom/0
, limited_latin_atom/0
]).
%% Iterators
-export([ nof/1
]).
%%--------------------------------------------------------------------
%% Types High level
%%--------------------------------------------------------------------
%% Type defined emqx_types.erl - conninfo()
conninfo() ->
Keys = [{socktype, socktype()},
{sockname, peername()},
{peername, peername()},
{peercert, peercert()},
{conn_mod, conn_mod()},
{proto_name, proto_name()},
{proto_ver, non_neg_integer()},
{clean_start, boolean()},
{clientid, clientid()},
{username, username()},
{conn_props, properties()},
{connected, boolean()},
{connected_at, timestamp()},
{keepalive, range(0, 16#ffff)},
{receive_maximum, non_neg_integer()},
{expiry_interval, non_neg_integer()}],
?LET({Ks, M}, {Keys, map(limited_atom(), limited_any_term())},
begin
maps:merge(maps:from_list(Ks), M)
end).
clientinfo() ->
Keys = [{zone, zone()},
{protocol, protocol()},
{peerhost, ip()},
{sockport, port()},
{clientid, clientid()},
{username, username()},
{is_bridge, boolean()},
{is_supuser, boolean()},
{mountpoint, maybe(utf8())},
{ws_cookie, maybe(list())}
% password,
% auth_result,
% anonymous,
% cn,
% dn,
],
?LET({Ks, M}, {Keys, map(limited_atom(), limited_any_term())},
begin
maps:merge(maps:from_list(Ks), M)
end).
%% See emqx_session:session() type define
sessioninfo() ->
?LET(Session, {session,
subscriptions(), % subscriptions
non_neg_integer(), % max_subscriptions
boolean(), % upgrade_qos
inflight(), % emqx_inflight:inflight()
mqueue(), % emqx_mqueue:mqueue()
packet_id(), % next_pkt_id
safty_timeout(), % retry_interval
awaiting_rel(), % awaiting_rel
non_neg_integer(), % max_awaiting_rel
safty_timeout(), % await_rel_timeout
timestamp() % created_at
},
emqx_session:info(Session)).
subscriptions() ->
?LET(L, list({topic(), subopts()}), maps:from_list(L)).
inflight() ->
?LET(MaxLen, non_neg_integer(),
begin
?LET(Msgs, limited_list(MaxLen, {packet_id(), message(), timestamp()}),
begin
lists:foldl(fun({PktId, Msg, Ts}, Ift) ->
try
emqx_inflight:insert(PktId, {Msg, Ts}, Ift)
catch _:_ ->
Ift
end
end, emqx_inflight:new(MaxLen), Msgs)
end)
end).
mqueue() ->
?LET({MaxLen, IsStoreQos0}, {non_neg_integer(), boolean()},
begin
?LET(Msgs, limited_list(MaxLen, message()),
begin
Q = emqx_mqueue:init(#{max_len => MaxLen, store_qos0 => IsStoreQos0}),
lists:foldl(fun(Msg, Acc) ->
{_Dropped, NQ} = emqx_mqueue:in(Msg, Acc),
NQ
end, Q, Msgs)
end)
end).
message() ->
#message{
id = emqx_guid:gen(),
qos = qos(),
from = from(),
flags = flags(),
headers = map(limited_latin_atom(), limited_any_term()), %% headers
topic = topic(),
payload = payload(),
timestamp = timestamp(),
extra = []
}.
%% @private
flags() ->
?LET({Dup, Retain}, {boolean(), boolean()}, #{dup => Dup, retain => Retain}).
packet_id() ->
range(1, 16#ffff).
awaiting_rel() ->
?LET(L, list({packet_id(), timestamp()}), maps:from_list(L)).
connack_return_code() ->
oneof([ success
, protocol_error
, client_identifier_not_valid
, bad_username_or_password
, bad_clientid_or_password
, username_or_password_undefined
, password_error
, not_authorized
, server_unavailable
, server_busy
, banned
, bad_authentication_method
]).
topictab() ->
non_empty(list({topic(), subopts()})).
topic() ->
oneof([normal_topic(),
normal_topic_filter(),
systopic_broker(), systopic_present(), systopic_stats(),
systopic_metrics(), systopic_alarms(), systopic_mon(),
sharetopic()]).
subopts() ->
?LET({Nl, Qos, Rap, Rh},
{range(0, 1), qos(),
range(0, 1), range(0, 1)},
#{nl => Nl, qos => Qos, rap => Rap, rh => Rh}).
qos() ->
range(0, 2).
from() ->
oneof([limited_latin_atom()]).
payload() ->
binary().
safty_timeout() ->
non_neg_integer().
nodename() ->
?LET({Name, Ip}, {non_empty(list(latin_char())), ip()},
begin
binary_to_atom(iolist_to_binary([Name, "@", inet:ntoa(Ip)]), utf8)
end).
systopic() ->
oneof(
[systopic_broker(), systopic_present(), systopic_stats(),
systopic_metrics(), systopic_alarms(), systopic_mon()]).
systopic_broker() ->
Topics = [<<"">>, <<"version">>, <<"uptime">>, <<"datetime">>, <<"sysdescr">>],
?LET({Nodename, T},
{nodename(), oneof(Topics)},
begin
case byte_size(T) of
0 -> <<"$SYS/brokers">>;
_ ->
<<"$SYS/brokers/", (ensure_bin(Nodename))/binary, "/", T/binary>>
end
end).
systopic_present() ->
?LET({Nodename, ClientId, T},
{nodename(), clientid(), oneof([<<"connected">>, <<"disconnected">>])},
begin
<<"$SYS/brokers/", (ensure_bin(Nodename))/binary, "/clients/", (ensure_bin(ClientId))/binary, "/", T/binary>>
end).
systopic_stats() ->
Topics = [<<"connections/max">>, <<"connections/count">>,
<<"suboptions/max">>, <<"suboptions/count">>,
<<"subscribers/max">>, <<"subscribers/count">>,
<<"subscriptions/max">>, <<"subscriptions/count">>,
<<"subscriptions/shared/max">>, <<"subscriptions/shared/count">>,
<<"topics/max">>, <<"topics/count">>,
<<"routes/max">>, <<"routes/count">>
],
?LET({Nodename, T},
{nodename(), oneof(Topics)},
<<"$SYS/brokers/", (ensure_bin(Nodename))/binary, "/stats/", T/binary>>).
systopic_metrics() ->
Topics = [<<"bytes/received">>, <<"bytes/sent">>,
<<"packets/received">>, <<"packets/sent">>,
<<"packets/connect/received">>, <<"packets/connack/sent">>,
<<"packets/publish/received">>, <<"packets/publish/sent">>,
<<"packets/publish/error">>, <<"packets/publish/auth_error">>,
<<"packets/publish/dropped">>,
<<"packets/puback/received">>, <<"packets/puback/sent">>,
<<"packets/puback/inuse">>, <<"packets/puback/missed">>,
<<"packets/pubrec/received">>, <<"packets/pubrec/sent">>,
<<"packets/pubrec/inuse">>, <<"packets/pubrec/missed">>,
<<"packets/pubrel/received">>, <<"packets/pubrel/sent">>,
<<"packets/pubrel/missed">>,
<<"packets/pubcomp/received">>, <<"packets/pubcomp/sent">>,
<<"packets/pubcomp/inuse">>, <<"packets/pubcomp/missed">>,
<<"packets/subscribe/received">>, <<"packets/subscribe/error">>,
<<"packets/subscribe/auth_error">>, <<"packets/suback/sent">>,
<<"packets/unsubscribe/received">>, <<"packets/unsuback/sent">>,
<<"packets/pingreq/received">>, <<"packets/pingresp/sent">>,
<<"packets/disconnect/received">>, <<"packets/disconnect/sent">>,
<<"packets/auth/received">>, <<"packets/auth/sent">>,
<<"messages/received">>, <<"messages/sent">>,
<<"messages/qos0/received">>, <<"messages/qos0/sent">>,
<<"messages/qos1/received">>, <<"messages/qos1/sent">>,
<<"messages/qos2/received">>, <<"messages/qos2/sent">>,
<<"messages/publish">>, <<"messages/dropped">>,
<<"messages/dropped/expired">>, <<"messages/dropped/no_subscribers">>,
<<"messages/forward">>, <<"messages/retained">>,
<<"messages/delayed">>, <<"messages/delivered">>,
<<"messages/acked">>],
?LET({Nodename, T},
{nodename(), oneof(Topics)},
<<"$SYS/brokers/", (ensure_bin(Nodename))/binary, "/metrics/", T/binary>>).
systopic_alarms() ->
?LET({Nodename, T},
{nodename(), oneof([<<"alert">>, <<"clear">>])},
<<"$SYS/brokers/", (ensure_bin(Nodename))/binary, "/alarms/", T/binary>>).
systopic_mon() ->
Topics = [<<"long_gc">>, <<"long_schedule">>,
<<"large_heap">>, <<"busy_port">>, <<"busy_dist_port">>],
?LET({Nodename, T},
{nodename(), oneof(Topics)},
<<"$SYS/brokers/", (ensure_bin(Nodename))/binary, "/sysmon/", T/binary>>).
sharetopic() ->
?LET({Type, Grp, T},
{oneof([<<"$queue">>, <<"$share">>]), list(latin_char()), normal_topic()},
<<Type/binary, "/", (iolist_to_binary(Grp))/binary, "/", T/binary>>).
normal_topic() ->
?LET(L, list(frequency([{3, latin_char()}, {1, $/}])),
list_to_binary(L)).
normal_topic_filter() ->
?LET({L, Wild}, {list(list(latin_char())), oneof(['#', '+'])},
begin
case Wild of
'#' ->
case L of
[] -> <<"#">>;
_ -> iolist_to_binary([lists:join("/", L), "/#"])
end;
'+' ->
case L of
[] -> <<"+">>;
_ ->
L1 = [case rand:uniform(3) == 1 of
true -> "+";
_ -> E
end || E <- L],
iolist_to_binary(lists:join("/", L1))
end
end
end).
%%--------------------------------------------------------------------
%% Basic Types
%%--------------------------------------------------------------------
maybe(T) ->
oneof([undefined, T]).
socktype() ->
oneof([tcp, udp, ssl, proxy]).
peername() ->
{ip(), port()}.
peercert() ->
%% TODO: cert?
oneof([nossl, undefined]).
conn_mod() ->
oneof([emqx_connection, emqx_ws_connection, emqx_coap_mqtt_adapter,
emqx_sn_gateway, emqx_lwm2m_protocol, emqx_gbt32960_conn,
emqx_jt808_connection, emqx_tcp_connection]).
proto_name() ->
oneof([<<"MQTT">>, <<"MQTT-SN">>, <<"CoAP">>, <<"LwM2M">>, utf8()]).
clientid() ->
utf8().
username() ->
maybe(utf8()).
properties() ->
map(limited_latin_atom(), binary()).
%% millisecond
timestamp() ->
%% 12h <- Now -> 12h
?LET(Offset, range(-43200, 43200), erlang:system_time(millisecond) + Offset).
zone() ->
oneof([external, internal, limited_latin_atom()]).
protocol() ->
oneof([mqtt, 'mqtt-sn', coap, lwm2m, limited_latin_atom()]).
url() ->
?LET({Schema, IP, Port, Path}, {oneof(["http://", "https://"]), ip(), port(), http_path()},
begin
IP1 = case tuple_size(IP) == 8 of
true -> "[" ++ inet:ntoa(IP) ++ "]";
false -> inet:ntoa(IP)
end,
lists:concat([Schema, IP1, ":", integer_to_list(Port), "/", Path])
end).
ip() ->
oneof([ipv4(), ipv6(), ipv6_from_v4()]).
ipv4() ->
?LET(IP, {range(1, 16#ff), range(0, 16#ff),
range(0, 16#ff), range(0, 16#ff)}, IP).
ipv6() ->
?LET(IP, {range(0, 16#ff), range(0, 16#ff),
range(0, 16#ff), range(0, 16#ff),
range(0, 16#ff), range(0, 16#ff),
range(0, 16#ff), range(0, 16#ff)}, IP).
ipv6_from_v4() ->
?LET(IP, {range(1, 16#ff), range(0, 16#ff),
range(0, 16#ff), range(0, 16#ff)},
inet:ipv4_mapped_ipv6_address(IP)).
port() ->
?LET(Port, range(1, 16#ffff), Port).
http_path() ->
list(frequency([{3, latin_char()},
{1, $/}])).
latin_char() ->
oneof([integer($0, $9), integer($A, $Z), integer($a, $z)]).
limited_latin_atom() ->
oneof([ 'abc_atom'
, '0123456789'
, 'ABC-ATOM'
, 'abc123ABC'
]).
%% Avoid generating a lot of atom and causing atom table overflows
limited_atom() ->
oneof([ 'a_normal_atom'
, '10123_num_prefixed_atom'
, '___dash_prefixed_atom'
, '123'
, binary_to_atom(<<"你好_utf8_atom"/utf8>>)
, '_', ' ', '""', '#$%^&*'
%% The longest atom with 255 chars
, list_to_atom(
lists:append([ "so"
, [ $o || _ <- lists:seq(1, 243)]
, "-long-atom"]
)
)
]).
limited_any_term() ->
oneof([binary(), number(), string()]).
%%--------------------------------------------------------------------
%% Iterators
%%--------------------------------------------------------------------
nof(Ls) when is_list(Ls) ->
Len = length(Ls),
?LET(N, range(0, Len),
begin
Ns = rand_nl(N, Len, []),
[lists:nth(I, Ls) || I <- Ns]
end).
limited_list(0, T) ->
list(T);
limited_list(N, T) ->
?LET(N2, range(0, N),
begin
[T || _ <- lists:seq(1, N2)]
end).
%%--------------------------------------------------------------------
%% Internal funcs
%%--------------------------------------------------------------------
-compile({inline, rand_nl/3}).
rand_nl(0, _, Acc) ->
Acc;
rand_nl(N, L, Acc) ->
R = rand:uniform(L),
case lists:member(R, Acc) of
true -> rand_nl(N, L, Acc);
_ -> rand_nl(N-1, L, [R|Acc])
end.
ensure_bin(A) when is_atom(A) ->
atom_to_binary(A, utf8);
ensure_bin(B) when is_binary(B) ->
B.

View File

@ -0,0 +1,99 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_schema_tests).
-include_lib("eunit/include/eunit.hrl").
ssl_opts_dtls_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{versions => dtls_all_available,
ciphers => dtls_all_available}, false),
Checked = validate(Sc, #{<<"versions">> => [<<"dtlsv1.2">>, <<"dtlsv1">>]}),
?assertMatch(#{versions := ['dtlsv1.2', 'dtlsv1'],
ciphers := ["ECDHE-ECDSA-AES256-GCM-SHA384" | _]
}, Checked).
ssl_opts_tls_1_3_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
Checked = validate(Sc, #{<<"versions">> => [<<"tlsv1.3">>]}),
?assertNot(maps:is_key(handshake_timeout, Checked)),
?assertMatch(#{versions := ['tlsv1.3'],
ciphers := [_ | _]
}, Checked).
ssl_opts_tls_for_ranch_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{}, true),
Checked = validate(Sc, #{<<"versions">> => [<<"tlsv1.3">>]}),
?assertMatch(#{versions := ['tlsv1.3'],
ciphers := [_ | _],
handshake_timeout := _
}, Checked).
ssl_opts_cipher_array_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
Checked = validate(Sc, #{<<"versions">> => [<<"tlsv1.3">>],
<<"ciphers">> => [<<"TLS_AES_256_GCM_SHA384">>,
<<"ECDHE-ECDSA-AES256-GCM-SHA384">>]}),
?assertMatch(#{versions := ['tlsv1.3'],
ciphers := ["TLS_AES_256_GCM_SHA384", "ECDHE-ECDSA-AES256-GCM-SHA384"]
}, Checked).
ssl_opts_cipher_comma_separated_string_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
Checked = validate(Sc, #{<<"versions">> => [<<"tlsv1.3">>],
<<"ciphers">> => <<"TLS_AES_256_GCM_SHA384,ECDHE-ECDSA-AES256-GCM-SHA384">>}),
?assertMatch(#{versions := ['tlsv1.3'],
ciphers := ["TLS_AES_256_GCM_SHA384", "ECDHE-ECDSA-AES256-GCM-SHA384"]
}, Checked).
ssl_opts_tls_psk_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
Checked = validate(Sc, #{<<"versions">> => [<<"tlsv1.2">>]}),
?assertMatch(#{versions := ['tlsv1.2']}, Checked).
bad_cipher_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
Reason = {bad_ciphers, ["foo"]},
?assertThrow({_Sc, [{validation_error, #{reason := Reason}}]},
validate(Sc, #{<<"versions">> => [<<"tlsv1.2">>],
<<"ciphers">> => [<<"foo">>]})),
ok.
validate(Schema, Data0) ->
Sc = #{ roots => [ssl_opts]
, fields => #{ssl_opts => Schema}
},
Data = Data0#{ cacertfile => <<"cacertfile">>
, certfile => <<"certfile">>
, keyfile => <<"keyfile">>
},
#{ssl_opts := Checked} =
hocon_schema:check_plain(Sc, #{<<"ssl_opts">> => Data},
#{atom_key => true}),
Checked.
ciperhs_schema_test() ->
Sc = emqx_schema:ciphers_schema(undefined),
WSc = #{roots => [{ciphers, Sc}]},
?assertThrow({_, [{validation_error, _}]},
hocon_schema:check_plain(WSc, #{<<"ciphers">> => <<"foo,bar">>})).
bad_tls_version_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
Reason = {unsupported_ssl_versions, [foo]},
?assertThrow({_Sc, [{validation_error, #{reason := Reason}}]},
validate(Sc, #{<<"versions">> => [<<"foo">>]})),
ok.

View File

@ -24,21 +24,22 @@
-define(SYSMON, emqx_sys_mon).
-define(FAKE_PORT, hd(erlang:ports())).
-define(FAKE_INFO, [{timeout, 100}, {in, foo}, {out, {?MODULE, bar, 1}}]).
-define(INPUTINFO, [{self(), long_gc,
concat_str("long_gc warning: pid = ~p, info: ~p", self(), "hello"), "hello"},
fmt("long_gc warning: pid = ~p", [self()]), ?FAKE_INFO},
{self(), long_schedule,
concat_str("long_schedule warning: pid = ~p, info: ~p", self(), "hello"), "hello"},
fmt("long_schedule warning: pid = ~p", [self()]), ?FAKE_INFO},
{self(), large_heap,
concat_str("large_heap warning: pid = ~p, info: ~p", self(), "hello"), "hello"},
fmt("large_heap warning: pid = ~p", [self()]), ?FAKE_INFO},
{self(), busy_port,
concat_str("busy_port warning: suspid = ~p, port = ~p",
self(), list_to_port("#Port<0.4>")), list_to_port("#Port<0.4>")},
fmt("busy_port warning: suspid = ~p, port = ~p",
[self(), ?FAKE_PORT]), ?FAKE_PORT},
{self(), busy_dist_port,
concat_str("busy_dist_port warning: suspid = ~p, port = ~p",
self(), list_to_port("#Port<0.4>")),list_to_port("#Port<0.4>")},
{list_to_port("#Port<0.4>"), long_schedule,
concat_str("long_schedule warning: port = ~p, info: ~p",
list_to_port("#Port<0.4>"), "hello"), "hello"}
fmt("busy_dist_port warning: suspid = ~p, port = ~p",
[self(), ?FAKE_PORT]), ?FAKE_PORT},
{?FAKE_PORT, long_schedule,
fmt("long_schedule warning: port = ~p", [?FAKE_PORT]), ?FAKE_INFO}
]).
all() -> emqx_ct:all(?MODULE).
@ -82,10 +83,10 @@ t_procinfo(_) ->
ok = meck:new(emqx_vm, [passthrough, no_history]),
ok = meck:expect(emqx_vm, get_process_info, fun(_) -> [] end),
ok = meck:expect(emqx_vm, get_process_gc_info, fun(_) -> [] end),
?assertEqual([], emqx_sys_mon:procinfo([])),
ok = meck:expect(emqx_vm, get_process_info, fun(_) -> ok end),
?assertEqual([{pid, undefined}], emqx_sys_mon:procinfo(undefined)),
ok = meck:expect(emqx_vm, get_process_info, fun(_) -> [] end),
ok = meck:expect(emqx_vm, get_process_gc_info, fun(_) -> undefined end),
?assertEqual(undefined, emqx_sys_mon:procinfo([])),
?assertEqual([{pid, self()}], emqx_sys_mon:procinfo(self())),
ok = meck:unload(emqx_vm).
t_sys_mon(_Config) ->
@ -117,6 +118,4 @@ validate_sys_mon_info(PidOrPort, SysMonName,ValidateInfo, InfoOrPort) ->
end,
emqtt:stop(C).
concat_str(ValidateInfo, InfoOrPort, Info) ->
WarnInfo = io_lib:format(ValidateInfo, [InfoOrPort, Info]),
lists:flatten(WarnInfo).
fmt(Fmt, Args) -> lists:flatten(io_lib:format(Fmt, Args)).

View File

@ -20,7 +20,6 @@
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("emqx_ct_helpers/include/emqx_ct.hrl").
-import(emqx_topic,
[ wildcard/1
@ -127,20 +126,20 @@ t_validate(_) ->
true = validate({filter, <<"x">>}),
true = validate({name, <<"x//y">>}),
true = validate({filter, <<"sport/tennis/#">>}),
ok = ?catch_error(empty_topic, validate({name, <<>>})),
ok = ?catch_error(topic_name_error, validate({name, <<"abc/#">>})),
ok = ?catch_error(topic_too_long, validate({name, long_topic()})),
ok = ?catch_error('topic_invalid_#', validate({filter, <<"abc/#/1">>})),
ok = ?catch_error(topic_invalid_char, validate({filter, <<"abc/#xzy/+">>})),
ok = ?catch_error(topic_invalid_char, validate({filter, <<"abc/xzy/+9827">>})),
ok = ?catch_error(topic_invalid_char, validate({filter, <<"sport/tennis#">>})),
ok = ?catch_error('topic_invalid_#', validate({filter, <<"sport/tennis/#/ranking">>})).
?assertError(empty_topic, validate({name, <<>>})),
?assertError(topic_name_error, validate({name, <<"abc/#">>})),
?assertError(topic_too_long, validate({name, long_topic()})),
?assertError('topic_invalid_#', validate({filter, <<"abc/#/1">>})),
?assertError(topic_invalid_char, validate({filter, <<"abc/#xzy/+">>})),
?assertError(topic_invalid_char, validate({filter, <<"abc/xzy/+9827">>})),
?assertError(topic_invalid_char, validate({filter, <<"sport/tennis#">>})),
?assertError('topic_invalid_#', validate({filter, <<"sport/tennis/#/ranking">>})).
t_sigle_level_validate(_) ->
true = validate({filter, <<"+">>}),
true = validate({filter, <<"+/tennis/#">>}),
true = validate({filter, <<"sport/+/player1">>}),
ok = ?catch_error(topic_invalid_char, validate({filter, <<"sport+">>})).
?assertError(topic_invalid_char, validate({filter, <<"sport+">>})).
t_prepend(_) ->
?assertEqual(<<"ab">>, prepend(undefined, <<"ab">>)),
@ -192,13 +191,13 @@ long_topic() ->
iolist_to_binary([[integer_to_list(I), "/"] || I <- lists:seq(0, 66666)]).
t_parse(_) ->
ok = ?catch_error({invalid_topic_filter, <<"$queue/t">>},
?assertError({invalid_topic_filter, <<"$queue/t">>},
parse(<<"$queue/t">>, #{share => <<"g">>})),
ok = ?catch_error({invalid_topic_filter, <<"$share/g/t">>},
?assertError({invalid_topic_filter, <<"$share/g/t">>},
parse(<<"$share/g/t">>, #{share => <<"g">>})),
ok = ?catch_error({invalid_topic_filter, <<"$share/t">>},
?assertError({invalid_topic_filter, <<"$share/t">>},
parse(<<"$share/t">>)),
ok = ?catch_error({invalid_topic_filter, <<"$share/+/t">>},
?assertError({invalid_topic_filter, <<"$share/+/t">>},
parse(<<"$share/+/t">>)),
?assertEqual({<<"a/b/+/#">>, #{}}, parse(<<"a/b/+/#">>)),
?assertEqual({<<"a/b/+/#">>, #{qos => 1}}, parse({<<"a/b/+/#">>, #{qos => 1}})),

View File

@ -229,7 +229,7 @@ t_ws_check_origin(_) ->
?assertMatch({gun_upgrade, _},
start_ws_client(#{protocols => [<<"mqtt">>],
headers => [{<<"origin">>, <<"http://localhost:18083">>}]})),
?assertMatch({gun_response, {_, 500, _}},
?assertMatch({gun_response, {_, 403, _}},
start_ws_client(#{protocols => [<<"mqtt">>],
headers => [{<<"origin">>, <<"http://localhost:18080">>}]})).

View File

@ -135,7 +135,7 @@ json_basic() ->
oneof([true, false, null, number(), json_string()]).
latin_atom() ->
emqx_ct_proper_types:limited_latin_atom().
emqx_proper_types:limited_latin_atom().
json_string() -> utf8().

View File

@ -33,7 +33,7 @@ prop_lookup() ->
?ALL({ClientPSKID, UserState},
{client_pskid(), user_state()},
begin
case emqx_psk:lookup(psk, ClientPSKID, UserState) of
case emqx_tls_psk:lookup(psk, ClientPSKID, UserState) of
{ok, _Result} -> true;
error -> true;
_Other -> false

View File

@ -91,7 +91,7 @@
enable => true})).
-define(INSTANCE_EXAMPLE_2, maps:merge(?EXAMPLE_2, #{id => <<"password-based:http-server">>,
connect_timeout => 5000,
connect_timeout => "5s",
enable_pipelining => true,
headers => #{
<<"accept">> => <<"application/json">>,
@ -102,8 +102,8 @@
},
max_retries => 5,
pool_size => 8,
request_timeout => 5000,
retry_interval => 1000,
request_timeout => "5s",
retry_interval => "1s",
enable => true})).
-define(INSTANCE_EXAMPLE_3, maps:merge(?EXAMPLE_3, #{id => <<"jwt">>,
@ -1259,9 +1259,9 @@ definitions() ->
example => <<"SELECT password_hash FROM mqtt_user WHERE username = ${mqtt-username}">>
},
query_timeout => #{
type => integer,
description => <<"Query timeout, Unit: Milliseconds">>,
default => 5000
type => string,
description => <<"Query timeout">>,
default => "5s"
}
}
},
@ -1528,16 +1528,16 @@ definitions() ->
type => object
},
connect_timeout => #{
type => integer,
default => 5000
type => string,
default => <<"5s">>
},
max_retries => #{
type => integer,
default => 5
},
retry_interval => #{
type => integer,
default => 1000
type => string,
default => <<"1s">>
},
request_timout => #{
type => integer,
@ -1970,8 +1970,9 @@ find_config(AuthenticatorID, AuthenticatorsConfig) ->
end.
fill_defaults(Config) ->
#{<<"authentication">> := CheckedConfig} = hocon_schema:check_plain(
?AUTHN, #{<<"authentication">> => Config}, #{no_conversion => true}),
#{<<"authentication">> := CheckedConfig} =
hocon_schema:check_plain(?AUTHN, #{<<"authentication">> => Config},
#{only_fill_defaults => true}),
CheckedConfig.
convert_certs(#{<<"ssl">> := SSLOpts} = Config) ->

View File

@ -18,6 +18,8 @@
-export([ replace_placeholders/2
, replace_placeholder/2
, check_password/3
, is_superuser/1
, hash/4
, gen_salt/0
, bin/1
@ -55,6 +57,28 @@ replace_placeholder(<<"${cert-common-name}">>, Credential) ->
replace_placeholder(Constant, _) ->
Constant.
check_password(undefined, _Selected, _State) ->
{error, bad_username_or_password};
check_password(Password,
#{<<"password_hash">> := Hash},
#{password_hash_algorithm := bcrypt}) ->
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of
true -> ok;
false -> {error, bad_username_or_password}
end;
check_password(Password,
#{<<"password_hash">> := Hash} = Selected,
#{password_hash_algorithm := Algorithm,
salt_position := SaltPosition}) ->
Salt = maps:get(<<"salt">>, Selected, <<>>),
case Hash =:= hash(Algorithm, Password, Salt, SaltPosition) of
true -> ok;
false -> {error, bad_username_or_password}
end.
is_superuser(Selected) ->
#{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}.
hash(Algorithm, Password, Salt, prefix) ->
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>);
hash(Algorithm, Password, Salt, suffix) ->

View File

@ -100,8 +100,8 @@ body(type) -> map();
body(validator) -> [fun check_body/1];
body(_) -> undefined.
request_timeout(type) -> non_neg_integer();
request_timeout(default) -> 5000;
request_timeout(type) -> emqx_schema:duration_ms();
request_timeout(default) -> "5s";
request_timeout(_) -> undefined.
%%------------------------------------------------------------------------------
@ -156,7 +156,6 @@ authenticate(#{auth_method := _}, _) ->
authenticate(Credential, #{'_unique' := Unique,
method := Method,
request_timeout := RequestTimeout} = State) ->
try
Request = generate_request(Credential, State),
case emqx_resource:query(Unique, {Method, Request, RequestTimeout}) of
{ok, 204, _Headers} -> {ok, #{is_superuser => false}};
@ -170,12 +169,10 @@ authenticate(Credential, #{'_unique' := Unique,
{error, _Reason} ->
{ok, #{is_superuser => false}}
end;
{error, _Reason} ->
ignore
end
catch
error:Reason ->
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Reason]),
{error, Reason} ->
?SLOG(error, #{msg => "http_server_query_failed",
resource => Unique,
reason => Reason}),
ignore
end.
@ -194,9 +191,9 @@ check_url(URL) ->
end.
check_body(Body) ->
lists:any(fun({_, V}) ->
not is_binary(V)
end, maps:to_list(Body)).
maps:fold(fun(_K, _V, false) -> false;
(_K, V, true) -> is_binary(V)
end, true, Body).
default_headers() ->
maps:put(<<"content-type">>,

View File

@ -94,7 +94,9 @@ handle_info({http, {RequestID, Result}},
State1 = State0#{request_id := undefined},
case Result of
{error, Reason} ->
?LOG(error, "Failed to request jwks endpoint(~s): ~p", [Endpoint, Reason]),
?SLOG(warning, #{msg => "failed_to_request_jwks_endpoint",
endpoint => Endpoint,
reason => Reason}),
State1;
{_StatusLine, _Headers, Body} ->
try
@ -102,7 +104,9 @@ handle_info({http, {RequestID, Result}},
{_, JWKs} = JWKS#jose_jwk.keys,
State1#{jwks := JWKs}
catch _:_ ->
?LOG(error, "Invalid jwks returned from jwks endpoint(~s): ~p~n", [Endpoint, Body]),
?SLOG(warning, #{msg => "invalid_jwks_returned",
endpoint => Endpoint,
body => Body}),
State1
end
end;
@ -136,11 +140,16 @@ handle_options(#{endpoint := Endpoint,
refresh_jwks(#{endpoint := Endpoint,
ssl_opts := SSLOpts} = State) ->
HTTPOpts = [{timeout, 5000}, {connect_timeout, 5000}, {ssl, SSLOpts}],
HTTPOpts = [ {timeout, 5000}
, {connect_timeout, 5000}
, {ssl, SSLOpts}
],
NState = case httpc:request(get, {Endpoint, [{"Accept", "application/json"}]}, HTTPOpts,
[{body_format, binary}, {sync, false}, {receiver, self()}]) of
{error, Reason} ->
?LOG(error, "Failed to request jwks endpoint(~s): ~p", [Endpoint, Reason]),
?SLOG(warning, #{msg => "failed_to_request_jwks_endpoint",
endpoint => Endpoint,
reason => Reason}),
State;
{ok, RequestID} ->
State#{request_id := RequestID}

View File

@ -141,29 +141,27 @@ authenticate(#{password := Password} = Credential,
, selector := Selector0
, '_unique' := Unique
} = State) ->
try
Selector1 = replace_placeholders(Selector0, Credential),
Selector2 = normalize_selector(Selector1),
case emqx_resource:query(Unique, {find_one, Collection, Selector2, #{}}) of
undefined -> ignore;
{error, Reason} ->
?LOG(error, "['~s'] Query failed: ~p", [Unique, Reason]),
?SLOG(error, #{msg => "mongodb_query_failed",
resource => Unique,
reason => Reason}),
ignore;
Doc ->
case check_password(Password, Doc, State) of
ok ->
{ok, #{is_superuser => is_superuser(Doc, State)}};
{error, {cannot_find_password_hash_field, PasswordHashField}} ->
?LOG(error, "['~s'] Can't find password hash field: ~s", [Unique, PasswordHashField]),
{error, bad_username_or_password};
?SLOG(error, #{msg => "cannot_find_password_hash_field",
resource => Unique,
password_hash_field => PasswordHashField}),
ignore;
{error, Reason} ->
{error, Reason}
end
end
catch
error:Error ->
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Error]),
ignore
end.
destroy(#{'_unique' := Unique}) ->

View File

@ -65,8 +65,8 @@ salt_position(_) -> undefined.
query(type) -> string();
query(_) -> undefined.
query_timeout(type) -> integer();
query_timeout(default) -> 5000;
query_timeout(type) -> emqx_schema:duration_ms();
query_timeout(default) -> "5s";
query_timeout(_) -> undefined.
%%------------------------------------------------------------------------------
@ -114,24 +114,21 @@ authenticate(#{password := Password} = Credential,
query := Query,
query_timeout := Timeout,
'_unique' := Unique} = State) ->
try
Params = emqx_authn_utils:replace_placeholders(PlaceHolders, Credential),
case emqx_resource:query(Unique, {sql, Query, Params, Timeout}) of
{ok, _Columns, []} -> ignore;
{ok, Columns, Rows} ->
Selected = maps:from_list(lists:zip(Columns, Rows)),
case check_password(Password, Selected, State) of
case emqx_authn_utils:check_password(Password, Selected, State) of
ok ->
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}};
{ok, emqx_authn_utils:is_superuser(Selected)};
{error, Reason} ->
{error, Reason}
end;
{error, _Reason} ->
ignore
end
catch
error:Error ->
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Error]),
{error, Reason} ->
?SLOG(error, #{msg => "mysql_query_failed",
resource => Unique,
reason => Reason}),
ignore
end.
@ -143,25 +140,6 @@ destroy(#{'_unique' := Unique}) ->
%% Internal functions
%%------------------------------------------------------------------------------
check_password(undefined, _Selected, _State) ->
{error, bad_username_or_password};
check_password(Password,
#{<<"password_hash">> := Hash},
#{password_hash_algorithm := bcrypt}) ->
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of
true -> ok;
false -> {error, bad_username_or_password}
end;
check_password(Password,
#{<<"password_hash">> := Hash} = Selected,
#{password_hash_algorithm := Algorithm,
salt_position := SaltPosition}) ->
Salt = maps:get(<<"salt">>, Selected, <<>>),
case Hash =:= emqx_authn_utils:hash(Algorithm, Password, Salt, SaltPosition) of
true -> ok;
false -> {error, bad_username_or_password}
end.
%% TODO: Support prepare
parse_query(Query) ->
case re:run(Query, ?RE_PLACEHOLDER, [global, {capture, all, binary}]) of

View File

@ -103,25 +103,22 @@ authenticate(#{password := Password} = Credential,
#{query := Query,
placeholders := PlaceHolders,
'_unique' := Unique} = State) ->
try
Params = emqx_authn_utils:replace_placeholders(PlaceHolders, Credential),
case emqx_resource:query(Unique, {sql, Query, Params}) of
{ok, _Columns, []} -> ignore;
{ok, Columns, Rows} ->
NColumns = [Name || #column{name = Name} <- Columns],
Selected = maps:from_list(lists:zip(NColumns, Rows)),
case check_password(Password, Selected, State) of
case emqx_authn_utils:check_password(Password, Selected, State) of
ok ->
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}};
{ok, emqx_authn_utils:is_superuser(Selected)};
{error, Reason} ->
{error, Reason}
end;
{error, _Reason} ->
ignore
end
catch
error:Error ->
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, Error]),
{error, Reason} ->
?SLOG(error, #{msg => "postgresql_query_failed",
resource => Unique,
reason => Reason}),
ignore
end.
@ -133,30 +130,11 @@ destroy(#{'_unique' := Unique}) ->
%% Internal functions
%%------------------------------------------------------------------------------
check_password(undefined, _Selected, _State) ->
{error, bad_username_or_password};
check_password(Password,
#{<<"password_hash">> := Hash},
#{password_hash_algorithm := bcrypt}) ->
case {ok, Hash} =:= bcrypt:hashpw(Password, Hash) of
true -> ok;
false -> {error, bad_username_or_password}
end;
check_password(Password,
#{<<"password_hash">> := Hash} = Selected,
#{password_hash_algorithm := Algorithm,
salt_position := SaltPosition}) ->
Salt = maps:get(<<"salt">>, Selected, <<>>),
case Hash =:= emqx_authn_utils:hash(Algorithm, Password, Salt, SaltPosition) of
true -> ok;
false -> {error, bad_username_or_password}
end.
%% TODO: Support prepare
parse_query(Query) ->
case re:run(Query, ?RE_PLACEHOLDER, [global, {capture, all, binary}]) of
{match, Captured} ->
PlaceHolders = [PlaceHolder || PlaceHolder <- Captured],
PlaceHolders = [PlaceHolder || [PlaceHolder] <- Captured],
Replacements = ["$" ++ integer_to_list(I) || I <- lists:seq(1, length(Captured))],
NQuery = lists:foldl(fun({PlaceHolder, Replacement}, Query0) ->
re:replace(Query0, <<"'\\", PlaceHolder/binary, "'">>, Replacement, [{return, binary}])

View File

@ -127,24 +127,26 @@ authenticate(#{password := Password} = Credential,
#{ query := {Command, Key, Fields}
, '_unique' := Unique
} = State) ->
try
NKey = binary_to_list(iolist_to_binary(replace_placeholders(Key, Credential))),
case emqx_resource:query(Unique, {cmd, [Command, NKey | Fields]}) of
{ok, Values} ->
Selected = merge(Fields, Values),
case check_password(Password, Selected, State) of
case merge(Fields, Values) of
#{<<"password_hash">> := _} = Selected ->
case emqx_authn_utils:check_password(Password, Selected, State) of
ok ->
{ok, #{is_superuser => maps:get("is_superuser", Selected, false)}};
{ok, emqx_authn_utils:is_superuser(Selected)};
{error, Reason} ->
{error, Reason}
end;
{error, Reason} ->
?LOG(error, "['~s'] Query failed: ~p", [Unique, Reason]),
_ ->
?SLOG(error, #{msg => "cannot_find_password_hash_field",
resource => Unique}),
ignore
end
catch
error:{cannot_get_variable, Placeholder} ->
?LOG(warning, "The following error occurred in '~s' during authentication: ~p", [Unique, {cannot_get_variable, Placeholder}]),
end;
{error, Reason} ->
?SLOG(error, #{msg => "redis_query_failed",
resource => Unique,
reason => Reason}),
ignore
end.
@ -209,27 +211,5 @@ merge(Fields, Value) when not is_list(Value) ->
merge(Fields, [Value]);
merge(Fields, Values) ->
maps:from_list(
lists:filter(fun({_, V}) ->
V =/= undefined
end, lists:zip(Fields, Values))).
check_password(undefined, _Selected, _State) ->
{error, bad_username_or_password};
check_password(Password,
#{"password_hash" := PasswordHash},
#{password_hash_algorithm := bcrypt}) ->
case {ok, PasswordHash} =:= bcrypt:hashpw(Password, PasswordHash) of
true -> ok;
false -> {error, bad_username_or_password}
end;
check_password(Password,
#{"password_hash" := PasswordHash} = Selected,
#{password_hash_algorithm := Algorithm,
salt_position := SaltPosition}) ->
Salt = maps:get("salt", Selected, <<>>),
case PasswordHash =:= emqx_authn_utils:hash(Algorithm, Password, Salt, SaltPosition) of
true -> ok;
false -> {error, bad_username_or_password}
end;
check_password(_Password, _Selected, _State) ->
ignore.
[{list_to_binary(K), V}
|| {K, V} <- lists:zip(Fields, Values), V =/= undefined]).

View File

@ -3,9 +3,9 @@
%%
%% -type(ipaddrs() :: {ipaddrs, string()}).
%%
%% -type(username() :: {username, regex()}).
%% -type(username() :: {user | username, string()} | {user | username, {re, regex()}}).
%%
%% -type(clientid() :: {clientid, regex()}).
%% -type(clientid() :: {client | clientid, string()} | {client | clientid, {re, regex()}}).
%%
%% -type(who() :: ipaddr() | ipaddrs() |username() | clientid() |
%% {'and', [ipaddr() | ipaddrs()| username() | clientid()]} |
@ -20,7 +20,7 @@
%%
%% -type(permission() :: allow | deny).
%%
%% -type(rule() :: {permission(), who(), access(), topics()}).
%% -type(rule() :: {permission(), who(), access(), topics()} | {permission(), all}).
%%--------------------------------------------------------------------
{allow, {username, "^dashboard?"}, subscribe, ["$SYS/#"]}.

View File

@ -1,63 +1,67 @@
authorization {
# sources = [
# # {
# # type: http
# # url: "https://emqx.com"
# # headers: {
# # Accept: "application/json"
# # Content-Type: "application/json"
# # }
# # },
# # {
# # type: mysql
# # server: "127.0.0.1:3306"
# # database: mqtt
# # pool_size: 1
# # username: root
# # password: public
# # auto_reconnect: true
# # ssl: {
# # enable: true
# # cacertfile: "{{ platform_etc_dir }}/certs/cacert.pem"
# # certfile: "{{ platform_etc_dir }}/certs/client-cert.pem"
# # keyfile: "{{ platform_etc_dir }}/certs/client-key.pem"
# # }
# # query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or clientid = '%c'"
# # },
# # {
# # type: postgresql
# # server: "127.0.0.1:5432"
# # database: mqtt
# # pool_size: 1
# # username: root
# # password: public
# # auto_reconnect: true
# # ssl: {enable: false}
# # query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or username = '$all' or clientid = '%c'"
# # },
# # {
# # type: redis
# # server: "127.0.0.1:6379"
# # database: 0
# # pool_size: 1
# # password: public
# # auto_reconnect: true
# # ssl: {enable: false}
# # cmd: "HGETALL mqtt_authz:%u"
# # },
# # {
# # type: mongodb
# # mongo_type: single
# # server: "127.0.0.1:27017"
# # pool_size: 1
# # database: mqtt
# # ssl: {enable: false}
# # collection: mqtt_authz
# # selector: { "$or": [ { "username": "%u" }, { "clientid": "%c" } ] }
# # },
sources = [
# {
# type: file
# path: "{{ platform_etc_dir }}/acl.conf"
# type: http
# url: "https://emqx.com"
# headers: {
# Accept: "application/json"
# Content-Type: "application/json"
# }
# ]
# },
# {
# type: mysql
# server: "127.0.0.1:3306"
# database: mqtt
# pool_size: 1
# username: root
# password: public
# auto_reconnect: true
# ssl: {
# enable: true
# cacertfile: "{{ platform_etc_dir }}/certs/cacert.pem"
# certfile: "{{ platform_etc_dir }}/certs/client-cert.pem"
# keyfile: "{{ platform_etc_dir }}/certs/client-key.pem"
# }
# query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or clientid = '%c'"
# },
# {
# type: postgresql
# server: "127.0.0.1:5432"
# database: mqtt
# pool_size: 1
# username: root
# password: public
# auto_reconnect: true
# ssl: {enable: false}
# query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or username = '$all' or clientid = '%c'"
# },
# {
# type: redis
# server: "127.0.0.1:6379"
# database: 0
# pool_size: 1
# password: public
# auto_reconnect: true
# ssl: {enable: false}
# cmd: "HGETALL mqtt_authz:%u"
# },
# {
# type: mongodb
# mongo_type: single
# server: "127.0.0.1:27017"
# pool_size: 1
# database: mqtt
# ssl: {enable: false}
# collection: mqtt_authz
# selector: { "$or": [ { "username": "%u" }, { "clientid": "%c" } ] }
# },
{
type: built-in-database
},
{
type: file
# file is loaded into cache
path: "{{ platform_etc_dir }}/acl.conf"
}
]
}

View File

@ -29,12 +29,37 @@
(A =:= all) orelse (A =:= <<"all">>)
)).
-define(ACL_SHARDED, emqx_acl_sharded).
-define(ACL_TABLE, emqx_acl).
%% To save some space, use an integer for label, 0 for 'all', {1, Username} and {2, ClientId}.
-define(ACL_TABLE_ALL, 0).
-define(ACL_TABLE_USERNAME, 1).
-define(ACL_TABLE_CLIENTID, 2).
-record(emqx_acl, {
who :: ?ACL_TABLE_ALL| {?ACL_TABLE_USERNAME, binary()} | {?ACL_TABLE_CLIENTID, binary()},
rules :: [ {permission(), action(), emqx_topic:topic()} ]
}).
-record(authz_metrics, {
allow = 'client.authorize.allow',
deny = 'client.authorize.deny',
ignore = 'client.authorize.ignore'
}).
-define(CMD_REPLCAE, replace).
-define(CMD_DELETE, delete).
-define(CMD_PREPEND, prepend).
-define(CMD_APPEND, append).
-define(CMD_MOVE, move).
-define(CMD_MOVE_TOP, <<"top">>).
-define(CMD_MOVE_BOTTOM, <<"bottom">>).
-define(CMD_MOVE_BEFORE(Before), {<<"before">>, Before}).
-define(CMD_MOVE_AFTER(After), {<<"after">>, After}).
-define(METRICS(Type), tl(tuple_to_list(#Type{}))).
-define(METRICS(Type, K), #Type{}#Type.K).

View File

@ -39,7 +39,6 @@
-export([post_config_update/4, pre_config_update/2]).
-define(CONF_KEY_PATH, [authorization, sources]).
-define(SOURCE_TYPES, [file, http, mongodb, mysql, postgresql, redis]).
-spec(register_metrics() -> ok).
register_metrics() ->
@ -50,228 +49,153 @@ init() ->
emqx_config_handler:add_handler(?CONF_KEY_PATH, ?MODULE),
Sources = emqx:get_config(?CONF_KEY_PATH, []),
ok = check_dup_types(Sources),
NSources = [init_source(Source) || Source <- Sources],
NSources = init_sources(Sources),
ok = emqx_hooks:add('client.authorize', {?MODULE, authorize, [NSources]}, -1).
lookup() ->
{_M, _F, [A]}= find_action_in_hooks(),
A.
lookup(Type) ->
try find_source_by_type(atom(Type), lookup()) of
{_, Source} -> Source
catch
error:Reason -> {error, Reason}
end.
{Source, _Front, _Rear} = take(Type),
Source.
move(Type, Cmd) ->
move(Type, Cmd, #{}).
move(Type, #{<<"before">> := Before}, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), #{<<"before">> => atom(Before)}}, Opts);
emqx:update_config(?CONF_KEY_PATH, {?CMD_MOVE, type(Type), ?CMD_MOVE_BEFORE(type(Before))}, Opts);
move(Type, #{<<"after">> := After}, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), #{<<"after">> => atom(After)}}, Opts);
emqx:update_config(?CONF_KEY_PATH, {?CMD_MOVE, type(Type), ?CMD_MOVE_AFTER(type(After))}, Opts);
move(Type, Position, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), Position}, Opts).
emqx:update_config(?CONF_KEY_PATH, {?CMD_MOVE, type(Type), Position}, Opts).
update(Cmd, Sources) ->
update(Cmd, Sources, #{}).
update({replace_once, Type}, Sources, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {{replace_once, atom(Type)}, Sources}, Opts);
update({delete_once, Type}, Sources, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {{delete_once, atom(Type)}, Sources}, Opts);
update({replace, Type}, Sources, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {{replace, type(Type)}, Sources}, Opts);
update({delete, Type}, Sources, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {{delete, type(Type)}, Sources}, Opts);
update(Cmd, Sources, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {Cmd, Sources}, Opts).
pre_config_update({move, Type, <<"top">>}, Conf) when is_list(Conf) ->
{Index, _} = find_source_by_type(Type),
{List1, List2} = lists:split(Index, Conf),
NConf = [lists:nth(Index, Conf)] ++ lists:droplast(List1) ++ List2,
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({move, Type, <<"bottom">>}, Conf) when is_list(Conf) ->
{Index, _} = find_source_by_type(Type),
{List1, List2} = lists:split(Index, Conf),
NConf = lists:droplast(List1) ++ List2 ++ [lists:nth(Index, Conf)],
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({move, Type, #{<<"before">> := Before}}, Conf) when is_list(Conf) ->
{Index1, _} = find_source_by_type(Type),
Conf1 = lists:nth(Index1, Conf),
{Index2, _} = find_source_by_type(Before),
Conf2 = lists:nth(Index2, Conf),
{List1, List2} = lists:split(Index2, Conf),
NConf = lists:delete(Conf1, lists:droplast(List1))
++ [Conf1] ++ [Conf2]
++ lists:delete(Conf1, List2),
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({move, Type, #{<<"after">> := After}}, Conf) when is_list(Conf) ->
{Index1, _} = find_source_by_type(Type),
Conf1 = lists:nth(Index1, Conf),
{Index2, _} = find_source_by_type(After),
{List1, List2} = lists:split(Index2, Conf),
NConf = lists:delete(Conf1, List1)
++ [Conf1]
++ lists:delete(Conf1, List2),
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({head, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
do_update({?CMD_MOVE, Type, ?CMD_MOVE_TOP}, Conf) when is_list(Conf) ->
{Source, Front, Rear} = take(Type, Conf),
[Source | Front] ++ Rear;
do_update({?CMD_MOVE, Type, ?CMD_MOVE_BOTTOM}, Conf) when is_list(Conf) ->
{Source, Front, Rear} = take(Type, Conf),
Front ++ Rear ++ [Source];
do_update({?CMD_MOVE, Type, ?CMD_MOVE_BEFORE(Before)}, Conf) when is_list(Conf) ->
{S1, Front1, Rear1} = take(Type, Conf),
{S2, Front2, Rear2} = take(Before, Front1 ++ Rear1),
Front2 ++ [S1, S2] ++ Rear2;
do_update({?CMD_MOVE, Type, ?CMD_MOVE_AFTER(After)}, Conf) when is_list(Conf) ->
{S1, Front1, Rear1} = take(Type, Conf),
{S2, Front2, Rear2} = take(After, Front1 ++ Rear1),
Front2 ++ [S2, S1] ++ Rear2;
do_update({?CMD_PREPEND, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
NConf = Sources ++ Conf,
case check_dup_types(NConf) of
ok -> {ok, Sources ++ Conf};
Error -> Error
end;
pre_config_update({tail, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
ok = check_dup_types(NConf),
NConf;
do_update({?CMD_APPEND, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
NConf = Conf ++ Sources,
case check_dup_types(NConf) of
ok -> {ok, Conf ++ Sources};
Error -> Error
end;
pre_config_update({{replace_once, Type}, Source}, Conf) when is_map(Source), is_list(Conf) ->
{Index, _} = find_source_by_type(Type),
{List1, List2} = lists:split(Index, Conf),
NConf = lists:droplast(List1) ++ [Source] ++ List2,
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({{delete_once, Type}, _Source}, Conf) when is_list(Conf) ->
{Index, _} = find_source_by_type(Type),
{List1, List2} = lists:split(Index, Conf),
NConf = lists:droplast(List1) ++ List2,
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({_, Sources}, _Conf) when is_list(Sources)->
ok = check_dup_types(NConf),
NConf;
do_update({{replace, Type}, Source}, Conf) when is_map(Source), is_list(Conf) ->
{_Old, Front, Rear} = take(Type, Conf),
NConf = Front ++ [Source | Rear],
ok = check_dup_types(NConf),
NConf;
do_update({{delete, Type}, _Source}, Conf) when is_list(Conf) ->
{_Old, Front, Rear} = take(Type, Conf),
NConf = Front ++ Rear,
NConf;
do_update({_, Sources}, _Conf) when is_list(Sources)->
%% overwrite the entire config!
{ok, Sources}.
Sources.
pre_config_update(Cmd, Conf) ->
{ok, do_update(Cmd, Conf)}.
post_config_update(_, undefined, _Conf, _AppEnvs) ->
ok;
post_config_update({move, Type, <<"top">>}, _NewSources, _OldSources, _AppEnvs) ->
InitedSources = lookup(),
{Index, Source} = find_source_by_type(Type, InitedSources),
{Sources1, Sources2 } = lists:split(Index, InitedSources),
Sources3 = [Source] ++ lists:droplast(Sources1) ++ Sources2,
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Sources3]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({move, Type, <<"bottom">>}, _NewSources, _OldSources, _AppEnvs) ->
InitedSources = lookup(),
{Index, Source} = find_source_by_type(Type, InitedSources),
{Sources1, Sources2 } = lists:split(Index, InitedSources),
Sources3 = lists:droplast(Sources1) ++ Sources2 ++ [Source],
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Sources3]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({move, Type, #{<<"before">> := Before}}, _NewSources, _OldSources, _AppEnvs) ->
InitedSources = lookup(),
{_, Source0} = find_source_by_type(Type, InitedSources),
{Index, Source1} = find_source_by_type(Before, InitedSources),
{Sources1, Sources2} = lists:split(Index, InitedSources),
Sources3 = lists:delete(Source0, lists:droplast(Sources1))
++ [Source0] ++ [Source1]
++ lists:delete(Source0, Sources2),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Sources3]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({move, Type, #{<<"after">> := After}}, _NewSources, _OldSources, _AppEnvs) ->
InitedSources = lookup(),
{_, Source} = find_source_by_type(Type, InitedSources),
{Index, _} = find_source_by_type(After, InitedSources),
{Sources1, Sources2} = lists:split(Index, InitedSources),
Sources3 = lists:delete(Source, Sources1)
++ [Source]
++ lists:delete(Source, Sources2),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Sources3]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({head, Sources}, _NewSources, _OldConf, _AppEnvs) ->
InitedSources = [init_source(R) || R <- check_sources(Sources)],
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [InitedSources ++ lookup()]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({tail, Sources}, _NewSources, _OldConf, _AppEnvs) ->
InitedSources = [init_source(R) || R <- check_sources(Sources)],
emqx_hooks:put('client.authorize', {?MODULE, authorize, [lookup() ++ InitedSources]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({{replace_once, Type}, #{type := Type} = Source}, _NewSources, _OldConf, _AppEnvs) when is_map(Source) ->
OldInitedSources = lookup(),
{Index, OldSource} = find_source_by_type(Type, OldInitedSources),
case maps:get(type, OldSource, undefined) of
undefined -> ok;
file -> ok;
_ ->
#{annotations := #{id := Id}} = OldSource,
ok = emqx_resource:remove(Id)
end,
{OldSources1, OldSources2 } = lists:split(Index, OldInitedSources),
InitedSources = [init_source(R) || R <- check_sources([Source])],
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [lists:droplast(OldSources1) ++ InitedSources ++ OldSources2]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({{delete_once, Type}, _Source}, _NewSources, _OldConf, _AppEnvs) ->
OldInitedSources = lookup(),
{_, OldSource} = find_source_by_type(Type, OldInitedSources),
case OldSource of
#{annotations := #{id := Id}} ->
ok = emqx_resource:remove(Id);
_ -> ok
end,
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [lists:delete(OldSource, OldInitedSources)]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update(_, NewSources, _OldConf, _AppEnvs) ->
%% overwrite the entire config!
OldInitedSources = lookup(),
InitedSources = [init_source(Source) || Source <- NewSources],
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [InitedSources]}, -1),
lists:foreach(fun (#{type := _Type, enable := true, annotations := #{id := Id}}) ->
ok = emqx_resource:remove(Id);
(_) -> ok
end, OldInitedSources),
post_config_update(Cmd, NewSources, _OldSource, _AppEnvs) ->
ok = do_post_update(Cmd, NewSources),
ok = emqx_authz_cache:drain_cache().
%%--------------------------------------------------------------------
%% Initialize source
%%--------------------------------------------------------------------
do_post_update({?CMD_MOVE, _Type, _Where} = Cmd, _NewSources) ->
InitedSources = lookup(),
MovedSources = do_update(Cmd, InitedSources),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [MovedSources]}, -1),
ok = emqx_authz_cache:drain_cache();
do_post_update({?CMD_PREPEND, Sources}, _NewSources) ->
InitedSources = init_sources(check_sources(Sources)),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [InitedSources ++ lookup()]}, -1),
ok = emqx_authz_cache:drain_cache();
do_post_update({?CMD_APPEND, Sources}, _NewSources) ->
InitedSources = init_sources(check_sources(Sources)),
emqx_hooks:put('client.authorize', {?MODULE, authorize, [lookup() ++ InitedSources]}, -1),
ok = emqx_authz_cache:drain_cache();
do_post_update({{replace, Type}, #{type := Type} = Source}, _NewSources) when is_map(Source) ->
OldInitedSources = lookup(),
{OldSource, Front, Rear} = take(Type, OldInitedSources),
ok = ensure_resource_deleted(OldSource),
InitedSources = init_sources(check_sources([Source])),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Front ++ InitedSources ++ Rear]}, -1),
ok = emqx_authz_cache:drain_cache();
do_post_update({{delete, Type}, _Source}, _NewSources) ->
OldInitedSources = lookup(),
{OldSource, Front, Rear} = take(Type, OldInitedSources),
ok = ensure_resource_deleted(OldSource),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Front ++ Rear]}, -1),
ok = emqx_authz_cache:drain_cache();
do_post_update(_, NewSources) ->
%% overwrite the entire config!
OldInitedSources = lookup(),
InitedSources = init_sources(NewSources),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [InitedSources]}, -1),
lists:foreach(fun ensure_resource_deleted/1, OldInitedSources),
ok = emqx_authz_cache:drain_cache().
ensure_resource_deleted(#{enable := false}) -> ok;
ensure_resource_deleted(#{type := file}) -> ok;
ensure_resource_deleted(#{type := 'built-in-database'}) -> ok;
ensure_resource_deleted(#{annotations := #{id := Id}}) -> ok = emqx_resource:remove(Id).
check_dup_types(Sources) ->
check_dup_types(Sources, ?SOURCE_TYPES).
check_dup_types(_Sources, []) -> ok;
check_dup_types(Sources, [T0 | Tail]) ->
case lists:foldl(fun (#{type := T1}, AccIn) ->
case T0 =:= T1 of
true -> AccIn + 1;
false -> AccIn
end;
(#{<<"type">> := T1}, AccIn) ->
case T0 =:= atom(T1) of
true -> AccIn + 1;
false -> AccIn
end
end, 0, Sources) > 1 of
check_dup_types(Sources, []).
check_dup_types([], _Checked) -> ok;
check_dup_types([Source | Sources], Checked) ->
%% the input might be raw or type-checked result, so lookup both 'type' and <<"type">>
%% TODO: check: really?
Type = case maps:get(<<"type">>, Source, maps:get(type, Source, undefined)) of
undefined ->
%% this should never happen if the value is type checked by honcon schema
error({bad_source_input, Source});
Type0 ->
type(Type0)
end,
case lists:member(Type, Checked) of
true ->
?LOG(error, "The type is duplicated in the Authorization source"),
{error, 'The type is duplicated in the Authorization source'};
false -> check_dup_types(Sources, Tail)
%% we have made it clear not to support more than one authz instance for each type
error({duplicated_authz_source_type, Type});
false ->
check_dup_types(Sources, [Type | Checked])
end.
init_source(#{enable := true,
type := file,
init_sources(Sources) ->
{_Enabled, Disabled} = lists:partition(fun(#{enable := Enable}) -> Enable end, Sources),
case Disabled =/= [] of
true -> ?SLOG(info, #{msg => "disabled_sources_ignored", sources => Disabled});
false -> ok
end,
lists:map(fun init_source/1, Sources).
init_source(#{enable := false} = Source) -> Source;
init_source(#{type := file,
path := Path
} = Source) ->
Rules = case file:consult(Path) of
@ -288,8 +212,7 @@ init_source(#{enable := true,
error(Reason)
end,
Source#{annotations => #{rules => Rules}};
init_source(#{enable := true,
type := http,
init_source(#{type := http,
url := Url
} = Source) ->
NSource= maps:put(base_url, maps:remove(query, Url), Source),
@ -297,16 +220,17 @@ init_source(#{enable := true,
{error, Reason} -> error({load_config_error, Reason});
Id -> Source#{annotations => #{id => Id}}
end;
init_source(#{enable := true,
type := DB
init_source(#{type := 'built-in-database'
} = Source) ->
Source;
init_source(#{type := DB
} = Source) when DB =:= redis;
DB =:= mongodb ->
case create_resource(Source) of
{error, Reason} -> error({load_config_error, Reason});
Id -> Source#{annotations => #{id => Id}}
end;
init_source(#{enable := true,
type := DB,
init_source(#{type := DB,
query := SQL
} = Source) when DB =:= mysql;
DB =:= postgresql ->
@ -318,8 +242,7 @@ init_source(#{enable := true,
query => Mod:parse_query(SQL)
}
}
end;
init_source(#{enable := false} = Source) ->Source.
end.
%%--------------------------------------------------------------------
%% AuthZ callbacks
@ -373,13 +296,17 @@ check_sources(RawSources) ->
#{sources := Sources} = hocon_schema:check_plain(Schema, Conf, #{atom_key => true}),
Sources.
find_source_by_type(Type) -> find_source_by_type(Type, lookup()).
find_source_by_type(Type, Sources) -> find_source_by_type(Type, Sources, 1).
find_source_by_type(_, [], _N) -> error(not_found_source);
find_source_by_type(Type, [ Source = #{type := T} | Tail], N) ->
case Type =:= T of
true -> {N, Source};
false -> find_source_by_type(Type, Tail, N + 1)
take(Type) -> take(Type, lookup()).
%% Take the source of give type, the sources list is split into two parts
%% front part and rear part.
take(Type, Sources) ->
{Front, Rear} = lists:splitwith(fun(T) -> type(T) =/= type(Type) end, Sources),
case Rear =:= [] of
true ->
error({authz_source_of_type_not_found, Type});
_ ->
{hd(Rear), Front, tl(Rear)}
end.
find_action_in_hooks() ->
@ -404,6 +331,8 @@ create_resource(#{type := DB} = Source) ->
{error, Reason} -> {error, Reason}
end.
authz_module('built-in-database') ->
emqx_authz_mnesia;
authz_module(Type) ->
list_to_existing_atom("emqx_authz_" ++ atom_to_list(Type)).
@ -414,9 +343,20 @@ connector_module(postgresql) ->
connector_module(Type) ->
list_to_existing_atom("emqx_connector_" ++ atom_to_list(Type)).
atom(B) when is_binary(B) ->
try binary_to_existing_atom(B, utf8)
catch
_ -> binary_to_atom(B)
end;
atom(A) when is_atom(A) -> A.
type(#{type := Type}) -> type(Type);
type(#{<<"type">> := Type}) -> type(Type);
type(file) -> file;
type(<<"file">>) -> file;
type(http) -> http;
type(<<"http">>) -> http;
type(mongodb) -> mongodb;
type(<<"mongodb">>) -> mongodb;
type(mysql) -> mysql;
type(<<"mysql">>) -> mysql;
type(redis) -> redis;
type(<<"redis">>) -> redis;
type(postgresql) -> postgresql;
type(<<"postgresql">>) -> postgresql;
type('built-in-database') -> 'built-in-database';
type(<<"built-in-database">>) -> 'built-in-database';
type(Unknown) -> error({unknown_authz_source_type, Unknown}). % should never happend if the input is type-checked by hocon schema

View File

@ -0,0 +1,659 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authz_api_mnesia).
-behavior(minirest_api).
-include("emqx_authz.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
-define(EXAMPLE_USERNAME, #{username => user1,
rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
-define(EXAMPLE_CLIENTID, #{clientid => client1,
rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
-define(EXAMPLE_ALL , #{rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
-export([ api_spec/0
, purge/2
, users/2
, user/2
, clients/2
, client/2
, all/2
]).
api_spec() ->
{[ purge_api()
, users_api()
, user_api()
, clients_api()
, client_api()
, all_api()
], definitions()}.
definitions() ->
Rules = #{
type => array,
items => #{
type => object,
required => [topic, permission, action],
properties => #{
topic => #{
type => string,
example => <<"test/topic/1">>
},
permission => #{
type => string,
enum => [<<"allow">>, <<"deny">>],
example => <<"allow">>
},
action => #{
type => string,
enum => [<<"publish">>, <<"subscribe">>, <<"all">>],
example => <<"publish">>
}
}
}
},
Username = #{
type => object,
required => [username, rules],
properties => #{
username => #{
type => string,
example => <<"username">>
},
rules => minirest:ref(<<"rules">>)
}
},
Clientid = #{
type => object,
required => [clientid, rules],
properties => #{
clientid => #{
type => string,
example => <<"clientid">>
},
rules => minirest:ref(<<"rules">>)
}
},
ALL = #{
type => object,
required => [rules],
properties => #{
rules => minirest:ref(<<"rules">>)
}
},
[ #{<<"rules">> => Rules}
, #{<<"username">> => Username}
, #{<<"clientid">> => Clientid}
, #{<<"all">> => ALL}
].
users_api() ->
Metadata = #{
get => #{
description => "Show the list of record for username",
parameters => [
#{
name => page,
in => query,
required => false,
description => <<"Page Index">>,
schema => #{type => integer}
},
#{
name => limit,
in => query,
required => false,
description => <<"Page limit">>,
schema => #{type => integer}
}
],
responses => #{
<<"200">> => #{
description => <<"OK">>,
content => #{
'application/json' => #{
schema => #{
type => array,
items => minirest:ref(<<"username">>)
},
examples => #{
username => #{
summary => <<"Username">>,
value => jsx:encode([?EXAMPLE_USERNAME])
}
}
}
}
}
}
},
post => #{
description => "Add new records for username",
requestBody => #{
content => #{
'application/json' => #{
schema => #{
type => array,
items => #{
oneOf => [ minirest:ref(<<"username">>)
]
}
},
examples => #{
username => #{
summary => <<"Username">>,
value => jsx:encode([?EXAMPLE_USERNAME])
}
}
}
}
},
responses => #{
<<"204">> => #{description => <<"Created">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
}
},
{"/authorization/sources/built-in-database/username", Metadata, users}.
clients_api() ->
Metadata = #{
get => #{
description => "Show the list of record for clientid",
parameters => [
#{
name => page,
in => query,
required => false,
description => <<"Page Index">>,
schema => #{type => integer}
},
#{
name => limit,
in => query,
required => false,
description => <<"Page limit">>,
schema => #{type => integer}
}
],
responses => #{
<<"200">> => #{
description => <<"OK">>,
content => #{
'application/json' => #{
schema => #{
type => array,
items => minirest:ref(<<"clientid">>)
},
examples => #{
clientid => #{
summary => <<"Clientid">>,
value => jsx:encode([?EXAMPLE_CLIENTID])
}
}
}
}
}
}
},
post => #{
description => "Add new records for clientid",
requestBody => #{
content => #{
'application/json' => #{
schema => #{
type => array,
items => #{
oneOf => [ minirest:ref(<<"clientid">>)
]
}
},
examples => #{
clientid => #{
summary => <<"Clientid">>,
value => jsx:encode([?EXAMPLE_CLIENTID])
}
}
}
}
},
responses => #{
<<"204">> => #{description => <<"Created">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
}
},
{"/authorization/sources/built-in-database/clientid", Metadata, clients}.
user_api() ->
Metadata = #{
get => #{
description => "Get record info for username",
parameters => [
#{
name => username,
in => path,
schema => #{
type => string
},
required => true
}
],
responses => #{
<<"200">> => #{
description => <<"OK">>,
content => #{
'application/json' => #{
schema => minirest:ref(<<"username">>),
examples => #{
username => #{
summary => <<"Username">>,
value => jsx:encode(?EXAMPLE_USERNAME)
}
}
}
}
},
<<"404">> => emqx_mgmt_util:bad_request(<<"Not Found">>)
}
},
put => #{
description => "Set record for username",
parameters => [
#{
name => username,
in => path,
schema => #{
type => string
},
required => true
}
],
requestBody => #{
content => #{
'application/json' => #{
schema => minirest:ref(<<"username">>),
examples => #{
username => #{
summary => <<"Username">>,
value => jsx:encode(?EXAMPLE_USERNAME)
}
}
}
}
},
responses => #{
<<"204">> => #{description => <<"Updated">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
},
delete => #{
description => "Delete one record for username",
parameters => [
#{
name => username,
in => path,
schema => #{
type => string
},
required => true
}
],
responses => #{
<<"204">> => #{description => <<"No Content">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
}
},
{"/authorization/sources/built-in-database/username/:username", Metadata, user}.
client_api() ->
Metadata = #{
get => #{
description => "Get record info for clientid",
parameters => [
#{
name => clientid,
in => path,
schema => #{
type => string
},
required => true
}
],
responses => #{
<<"200">> => #{
description => <<"OK">>,
content => #{
'application/json' => #{
schema => minirest:ref(<<"clientid">>),
examples => #{
clientid => #{
summary => <<"Clientid">>,
value => jsx:encode(?EXAMPLE_CLIENTID)
}
}
}
}
},
<<"404">> => emqx_mgmt_util:bad_request(<<"Not Found">>)
}
},
put => #{
description => "Set record for clientid",
parameters => [
#{
name => clientid,
in => path,
schema => #{
type => string
},
required => true
}
],
requestBody => #{
content => #{
'application/json' => #{
schema => minirest:ref(<<"clientid">>),
examples => #{
clientid => #{
summary => <<"Clientid">>,
value => jsx:encode(?EXAMPLE_CLIENTID)
}
}
}
}
},
responses => #{
<<"204">> => #{description => <<"Updated">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
},
delete => #{
description => "Delete one record for clientid",
parameters => [
#{
name => clientid,
in => path,
schema => #{
type => string
},
required => true
}
],
responses => #{
<<"204">> => #{description => <<"No Content">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
}
},
{"/authorization/sources/built-in-database/clientid/:clientid", Metadata, client}.
all_api() ->
Metadata = #{
get => #{
description => "Show the list of rules for all",
responses => #{
<<"200">> => #{
description => <<"OK">>,
content => #{
'application/json' => #{
schema => minirest:ref(<<"clientid">>),
examples => #{
clientid => #{
summary => <<"All">>,
value => jsx:encode(?EXAMPLE_ALL)
}
}
}
}
}
}
},
put => #{
description => "Set the list of rules for all",
requestBody => #{
content => #{
'application/json' => #{
schema => minirest:ref(<<"all">>),
examples => #{
all => #{
summary => <<"All">>,
value => jsx:encode(?EXAMPLE_ALL)
}
}
}
}
},
responses => #{
<<"204">> => #{description => <<"Created">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
}
},
{"/authorization/sources/built-in-database/all", Metadata, all}.
purge_api() ->
Metadata = #{
delete => #{
description => "Purge all records",
responses => #{
<<"204">> => #{description => <<"No Content">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
}
},
{"/authorization/sources/built-in-database/purge-all", Metadata, purge}.
users(get, #{query_string := Qs}) ->
MatchSpec = ets:fun2ms(
fun({?ACL_TABLE, {?ACL_TABLE_USERNAME, Username}, Rules}) ->
[{username, Username}, {rules, Rules}]
end),
Format = fun ([{username, Username}, {rules, Rules}]) ->
#{username => Username,
rules => [ #{topic => Topic,
action => Action,
permission => Permission
} || {Permission, Action, Topic} <- Rules]
}
end,
case Qs of
#{<<"limit">> := _, <<"page">> := _} = Page ->
{200, emqx_mgmt_api:paginate(?ACL_TABLE, MatchSpec, Page, Format)};
#{<<"limit">> := Limit} ->
case ets:select(?ACL_TABLE, MatchSpec, binary_to_integer(Limit)) of
{Rows, _Continuation} -> {200, [Format(Row) || Row <- Rows ]};
'$end_of_table' -> {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}
end;
_ ->
{200, [Format(Row) || Row <- ets:select(?ACL_TABLE, MatchSpec)]}
end;
users(post, #{body := Body}) when is_list(Body) ->
lists:foreach(fun(#{<<"username">> := Username, <<"rules">> := Rules}) ->
ekka_mnesia:dirty_write(#emqx_acl{
who = {?ACL_TABLE_USERNAME, Username},
rules = format_rules(Rules)
})
end, Body),
{204}.
clients(get, #{query_string := Qs}) ->
MatchSpec = ets:fun2ms(
fun({?ACL_TABLE, {?ACL_TABLE_CLIENTID, Clientid}, Rules}) ->
[{clientid, Clientid}, {rules, Rules}]
end),
Format = fun ([{clientid, Clientid}, {rules, Rules}]) ->
#{clientid => Clientid,
rules => [ #{topic => Topic,
action => Action,
permission => Permission
} || {Permission, Action, Topic} <- Rules]
}
end,
case Qs of
#{<<"limit">> := _, <<"page">> := _} = Page ->
{200, emqx_mgmt_api:paginate(?ACL_TABLE, MatchSpec, Page, Format)};
#{<<"limit">> := Limit} ->
case ets:select(?ACL_TABLE, MatchSpec, binary_to_integer(Limit)) of
{Rows, _Continuation} -> {200, [Format(Row) || Row <- Rows ]};
'$end_of_table' -> {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}
end;
_ ->
{200, [Format(Row) || Row <- ets:select(?ACL_TABLE, MatchSpec)]}
end;
clients(post, #{body := Body}) when is_list(Body) ->
lists:foreach(fun(#{<<"clientid">> := Clientid, <<"rules">> := Rules}) ->
ekka_mnesia:dirty_write(#emqx_acl{
who = {?ACL_TABLE_CLIENTID, Clientid},
rules = format_rules(Rules)
})
end, Body),
{204}.
user(get, #{bindings := #{username := Username}}) ->
case mnesia:dirty_read(?ACL_TABLE, {?ACL_TABLE_USERNAME, Username}) of
[] -> {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}};
[#emqx_acl{who = {?ACL_TABLE_USERNAME, Username}, rules = Rules}] ->
{200, #{username => Username,
rules => [ #{topic => Topic,
action => Action,
permission => Permission
} || {Permission, Action, Topic} <- Rules]}
}
end;
user(put, #{bindings := #{username := Username},
body := #{<<"username">> := Username, <<"rules">> := Rules}}) ->
ekka_mnesia:dirty_write(#emqx_acl{
who = {?ACL_TABLE_USERNAME, Username},
rules = format_rules(Rules)
}),
{204};
user(delete, #{bindings := #{username := Username}}) ->
ekka_mnesia:dirty_delete({?ACL_TABLE, {?ACL_TABLE_USERNAME, Username}}),
{204}.
client(get, #{bindings := #{clientid := Clientid}}) ->
case mnesia:dirty_read(?ACL_TABLE, {?ACL_TABLE_CLIENTID, Clientid}) of
[] -> {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}};
[#emqx_acl{who = {?ACL_TABLE_CLIENTID, Clientid}, rules = Rules}] ->
{200, #{clientid => Clientid,
rules => [ #{topic => Topic,
action => Action,
permission => Permission
} || {Permission, Action, Topic} <- Rules]}
}
end;
client(put, #{bindings := #{clientid := Clientid},
body := #{<<"clientid">> := Clientid, <<"rules">> := Rules}}) ->
ekka_mnesia:dirty_write(#emqx_acl{
who = {?ACL_TABLE_CLIENTID, Clientid},
rules = format_rules(Rules)
}),
{204};
client(delete, #{bindings := #{clientid := Clientid}}) ->
ekka_mnesia:dirty_delete({?ACL_TABLE, {?ACL_TABLE_CLIENTID, Clientid}}),
{204}.
all(get, _) ->
case mnesia:dirty_read(?ACL_TABLE, ?ACL_TABLE_ALL) of
[] ->
{200, #{rules => []}};
[#emqx_acl{who = ?ACL_TABLE_ALL, rules = Rules}] ->
{200, #{rules => [ #{topic => Topic,
action => Action,
permission => Permission
} || {Permission, Action, Topic} <- Rules]}
}
end;
all(put, #{body := #{<<"rules">> := Rules}}) ->
ekka_mnesia:dirty_write(#emqx_acl{
who = ?ACL_TABLE_ALL,
rules = format_rules(Rules)
}),
{204}.
purge(delete, _) ->
case emqx_authz_api_sources:get_raw_source(<<"built-in-database">>) of
[#{enable := false}] ->
ok = lists:foreach(fun(Key) ->
ok = ekka_mnesia:dirty_delete(?ACL_TABLE, Key)
end, mnesia:dirty_all_keys(?ACL_TABLE)),
{204};
_ ->
{400, #{code => <<"BAD_REQUEST">>,
message => <<"'built-in-database' type source must be disabled before purge.">>}}
end.
format_rules(Rules) when is_list(Rules) ->
lists:foldl(fun(#{<<"topic">> := Topic,
<<"action">> := Action,
<<"permission">> := Permission
}, AccIn) when ?PUBSUB(Action)
andalso ?ALLOW_DENY(Permission) ->
AccIn ++ [{ atom(Permission), atom(Action), Topic }]
end, [], Rules).
atom(B) when is_binary(B) ->
try binary_to_existing_atom(B, utf8)
catch
_ -> binary_to_atom(B)
end;
atom(A) when is_atom(A) -> A.

View File

@ -19,29 +19,9 @@
-export([definitions/0]).
definitions() ->
RetruenedSources = #{
allOf => [ #{type => object,
properties => #{
annotations => #{
type => object,
required => [status],
properties => #{
id => #{
type => string
},
status => #{
type => string,
example => <<"healthy">>
}
}
}
}
}
, minirest:ref(<<"sources">>)
]
},
Sources = #{
oneOf => [ minirest:ref(<<"http">>)
, minirest:ref(<<"built-in-database">>)
, minirest:ref(<<"mongo_single">>)
, minirest:ref(<<"mongo_rs">>)
, minirest:ref(<<"mongo_sharded">>)
@ -100,9 +80,9 @@ definitions() ->
},
headers => #{type => object},
body => #{type => object},
connect_timeout => #{type => integer},
connect_timeout => #{type => string},
max_retries => #{type => integer},
retry_interval => #{type => integer},
retry_interval => #{type => string},
pool_type => #{
type => string,
enum => [<<"random">>, <<"hash">>],
@ -154,8 +134,8 @@ definitions() ->
properties => #{
pool_size => #{type => integer},
max_overflow => #{type => integer},
overflow_ttl => #{type => integer},
overflow_check_period => #{type => integer},
overflow_ttl => #{type => string},
overflow_check_period => #{type => string},
local_threshold_ms => #{type => integer},
connect_timeout_ms => #{type => integer},
socket_timeout_ms => #{type => integer},
@ -212,8 +192,8 @@ definitions() ->
properties => #{
pool_size => #{type => integer},
max_overflow => #{type => integer},
overflow_ttl => #{type => integer},
overflow_check_period => #{type => integer},
overflow_ttl => #{type => string},
overflow_check_period => #{type => string},
local_threshold_ms => #{type => integer},
connect_timeout_ms => #{type => integer},
socket_timeout_ms => #{type => integer},
@ -268,8 +248,8 @@ definitions() ->
properties => #{
pool_size => #{type => integer},
max_overflow => #{type => integer},
overflow_ttl => #{type => integer},
overflow_check_period => #{type => integer},
overflow_ttl => #{type => string},
overflow_check_period => #{type => string},
local_threshold_ms => #{type => integer},
connect_timeout_ms => #{type => integer},
socket_timeout_ms => #{type => integer},
@ -467,6 +447,21 @@ definitions() ->
ssl => minirest:ref(<<"ssl">>)
}
},
Mnesia = #{
type => object,
required => [type, enable],
properties => #{
type => #{
type => string,
enum => [<<"redis">>],
example => <<"redis">>
},
enable => #{
type => boolean,
example => true
}
}
},
File = #{
type => object,
required => [type, enable, rules],
@ -493,10 +488,10 @@ definitions() ->
}
}
},
[ #{<<"returned_sources">> => RetruenedSources}
, #{<<"sources">> => Sources}
[ #{<<"sources">> => Sources}
, #{<<"ssl">> => SSL}
, #{<<"http">> => HTTP}
, #{<<"built-in-database">> => Mnesia}
, #{<<"mongo_single">> => MongoSingle}
, #{<<"mongo_rs">> => MongoRs}
, #{<<"mongo_sharded">> => MongoSharded}

View File

@ -35,19 +35,16 @@
rules => <<"{allow,{username,\"^dashboard?\"},subscribe,[\"$SYS/#\"]}.\n{allow,{ipaddr,\"127.0.0.1\"},all,[\"$SYS/#\",\"#\"]}.">>
}).
-define(EXAMPLE_RETURNED_REDIS,
maps:put(annotations, #{status => healthy}, ?EXAMPLE_REDIS)
).
-define(EXAMPLE_RETURNED_FILE,
maps:put(annotations, #{status => healthy}, ?EXAMPLE_FILE)
).
-define(EXAMPLE_RETURNED,
#{sources => [ ?EXAMPLE_RETURNED_REDIS
, ?EXAMPLE_RETURNED_FILE
#{sources => [ ?EXAMPLE_REDIS
, ?EXAMPLE_FILE
]
}).
-export([ get_raw_sources/0
, get_raw_source/1
]).
-export([ api_spec/0
, sources/2
, source/2
@ -76,7 +73,7 @@ sources_api() ->
required => [sources],
properties => #{sources => #{
type => array,
items => minirest:ref(<<"returned_sources">>)
items => minirest:ref(<<"sources">>)
}
}
},
@ -122,7 +119,7 @@ sources_api() ->
'application/json' => #{
schema => #{
type => array,
items => minirest:ref(<<"returned_sources">>)
items => minirest:ref(<<"sources">>)
},
examples => #{
redis => #{
@ -154,7 +151,15 @@ source_api() ->
name => type,
in => path,
schema => #{
type => string
type => string,
enum => [ <<"file">>
, <<"http">>
, <<"mongodb">>
, <<"mysql">>
, <<"postgresql">>
, <<"redis">>
, <<"built-in-database">>
]
},
required => true
}
@ -164,15 +169,15 @@ source_api() ->
description => <<"OK">>,
content => #{
'application/json' => #{
schema => minirest:ref(<<"returned_sources">>),
schema => minirest:ref(<<"sources">>),
examples => #{
redis => #{
summary => <<"Redis">>,
value => jsx:encode(?EXAMPLE_RETURNED_REDIS)
value => jsx:encode(?EXAMPLE_REDIS)
},
file => #{
summary => <<"File">>,
value => jsx:encode(?EXAMPLE_RETURNED_FILE)
value => jsx:encode(?EXAMPLE_FILE)
}
}
}
@ -188,7 +193,15 @@ source_api() ->
name => type,
in => path,
schema => #{
type => string
type => string,
enum => [ <<"file">>
, <<"http">>
, <<"mongodb">>
, <<"mysql">>
, <<"postgresql">>
, <<"redis">>
, <<"built-in-database">>
]
},
required => true
}
@ -223,7 +236,15 @@ source_api() ->
name => type,
in => path,
schema => #{
type => string
type => string,
enum => [ <<"file">>
, <<"http">>
, <<"mongodb">>
, <<"mysql">>
, <<"postgresql">>
, <<"redis">>
, <<"built-in-database">>
]
},
required => true
}
@ -245,7 +266,15 @@ move_source_api() ->
name => type,
in => path,
schema => #{
type => string
type => string,
enum => [ <<"file">>
, <<"http">>
, <<"mongodb">>
, <<"mysql">>
, <<"postgresql">>
, <<"redis">>
, <<"built-in-database">>
]
},
required => true
}
@ -297,109 +326,69 @@ move_source_api() ->
{"/authorization/sources/:type/move", Metadata, move_source}.
sources(get, _) ->
Sources = lists:foldl(fun (#{type := file, enable := Enable, path := Path}, AccIn) ->
Sources = lists:foldl(fun (#{<<"type">> := <<"file">>, <<"enable">> := Enable, <<"path">> := Path}, AccIn) ->
case file:read_file(Path) of
{ok, Rules} ->
lists:append(AccIn, [#{type => file,
enable => Enable,
rules => Rules,
annotations => #{status => healthy}
rules => Rules
}]);
{error, _} ->
lists:append(AccIn, [#{type => file,
enable => Enable,
rules => <<"">>,
annotations => #{status => unhealthy}
rules => <<"">>
}])
end;
(#{enable := false} = Source, AccIn) ->
lists:append(AccIn, [Source#{annotations => #{status => unhealthy}}]);
(#{type := _Type, annotations := #{id := Id}} = Source, AccIn) ->
NSource0 = case maps:get(server, Source, undefined) of
undefined -> Source;
Server ->
Source#{server => emqx_connector_schema_lib:ip_port_to_string(Server)}
end,
NSource1 = case maps:get(servers, Source, undefined) of
undefined -> NSource0;
Servers ->
NSource0#{servers => [emqx_connector_schema_lib:ip_port_to_string(Server) || Server <- Servers]}
end,
NSource2 = case emqx_resource:health_check(Id) of
ok ->
NSource1#{annotations => #{status => healthy}};
_ ->
NSource1#{annotations => #{status => unhealthy}}
end,
lists:append(AccIn, [read_cert(NSource2)]);
(Source, AccIn) ->
lists:append(AccIn, [Source#{annotations => #{status => healthy}}])
end, [], emqx_authz:lookup()),
lists:append(AccIn, [read_cert(Source)])
end, [], get_raw_sources()),
{200, #{sources => Sources}};
sources(post, #{body := #{<<"type">> := <<"file">>, <<"rules">> := Rules}}) ->
{ok, Filename} = write_file(filename:join([emqx:get_config([node, data_dir]), "acl.conf"]), Rules),
update_config(head, [#{type => file, enable => true, path => Filename}]);
update_config(?CMD_PREPEND, [#{<<"type">> => <<"file">>, <<"enable">> => true, <<"path">> => Filename}]);
sources(post, #{body := Body}) when is_map(Body) ->
update_config(head, [write_cert(Body)]);
update_config(?CMD_PREPEND, [write_cert(Body)]);
sources(put, #{body := Body}) when is_list(Body) ->
NBody = [ begin
case Source of
#{<<"type">> := <<"file">>, <<"rules">> := Rules, <<"enable">> := Enable} ->
{ok, Filename} = write_file(filename:join([emqx:get_config([node, data_dir]), "acl.conf"]), Rules),
#{type => file, enable => Enable, path => Filename};
#{<<"type">> => <<"file">>, <<"enable">> => Enable, <<"path">> => Filename};
_ -> write_cert(Source)
end
end || Source <- Body],
update_config(replace, NBody).
update_config(?CMD_REPLCAE, NBody).
source(get, #{bindings := #{type := Type}}) ->
case emqx_authz:lookup(Type) of
{error, Reason} -> {404, #{message => atom_to_binary(Reason)}};
#{type := file, enable := Enable, path := Path}->
case get_raw_source(Type) of
[] -> {404, #{message => <<"Not found ", Type/binary>>}};
[#{<<"type">> := <<"file">>, <<"enable">> := Enable, <<"path">> := Path}] ->
case file:read_file(Path) of
{ok, Rules} ->
{200, #{type => file,
enable => Enable,
rules => Rules,
annotations => #{status => healthy}
rules => Rules
}
};
{error, Reason} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}}
message => bin(Reason)}}
end;
#{enable := false} = Source -> {200, Source#{annotations => #{status => unhealthy}}};
#{annotations := #{id := Id}} = Source ->
NSource0 = case maps:get(server, Source, undefined) of
undefined -> Source;
Server ->
Source#{server => emqx_connector_schema_lib:ip_port_to_string(Server)}
end,
NSource1 = case maps:get(servers, Source, undefined) of
undefined -> NSource0;
Servers ->
NSource0#{servers => [emqx_connector_schema_lib:ip_port_to_string(Server) || Server <- Servers]}
end,
NSource2 = case emqx_resource:health_check(Id) of
ok ->
NSource1#{annotations => #{status => healthy}};
_ ->
NSource1#{annotations => #{status => unhealthy}}
end,
{200, read_cert(NSource2)}
[Source] ->
{200, read_cert(Source)}
end;
source(put, #{bindings := #{type := <<"file">>}, body := #{<<"type">> := <<"file">>, <<"rules">> := Rules, <<"enable">> := Enable}}) ->
{ok, Filename} = write_file(maps:get(path, emqx_authz:lookup(file), ""), Rules),
case emqx_authz:update({replace_once, file}, #{type => file, enable => Enable, path => Filename}) of
case emqx_authz:update({?CMD_REPLCAE, file}, #{<<"type">> => file, <<"enable">> => Enable, <<"path">> => Filename}) of
{ok, _} -> {204};
{error, Reason} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}}
message => bin(Reason)}}
end;
source(put, #{bindings := #{type := Type}, body := Body}) when is_map(Body) ->
update_config({replace_once, Type}, write_cert(Body));
update_config({?CMD_REPLCAE, Type}, write_cert(Body));
source(delete, #{bindings := #{type := Type}}) ->
update_config({delete_once, Type}, #{}).
update_config({?CMD_DELETE, Type}, #{}).
move_source(post, #{bindings := #{type := Type}, body := #{<<"position">> := Position}}) ->
case emqx_authz:move(Type, Position) of
@ -409,39 +398,51 @@ move_source(post, #{bindings := #{type := Type}, body := #{<<"position">> := Pos
message => <<"source ", Type/binary, " not found">>}};
{error, Reason} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}}
message => bin(Reason)}}
end.
get_raw_sources() ->
RawSources = emqx:get_raw_config([authorization, sources]),
Schema = #{roots => emqx_authz_schema:fields("authorization"), fields => #{}},
Conf = #{<<"sources">> => RawSources},
#{<<"sources">> := Sources} = hocon_schema:check_plain(Schema, Conf, #{only_fill_defaults => true}),
Sources.
get_raw_source(Type) ->
lists:filter(fun (#{<<"type">> := T}) ->
T =:= Type
end, get_raw_sources()).
update_config(Cmd, Sources) ->
case emqx_authz:update(Cmd, Sources) of
{ok, _} -> {204};
{error, {pre_config_update, emqx_authz, Reason}} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}};
message => bin(Reason)}};
{error, {post_config_update, emqx_authz, Reason}} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}};
message => bin(Reason)}};
{error, Reason} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}}
message => bin(Reason)}}
end.
read_cert(#{ssl := #{enable := true} = SSL} = Source) ->
CaCert = case file:read_file(maps:get(cacertfile, SSL, "")) of
read_cert(#{<<"ssl">> := #{<<"enable">> := true} = SSL} = Source) ->
CaCert = case file:read_file(maps:get(<<"cacertfile">>, SSL, "")) of
{ok, CaCert0} -> CaCert0;
_ -> ""
end,
Cert = case file:read_file(maps:get(certfile, SSL, "")) of
Cert = case file:read_file(maps:get(<<"certfile">>, SSL, "")) of
{ok, Cert0} -> Cert0;
_ -> ""
end,
Key = case file:read_file(maps:get(keyfile, SSL, "")) of
Key = case file:read_file(maps:get(<<"keyfile">>, SSL, "")) of
{ok, Key0} -> Key0;
_ -> ""
end,
Source#{ssl => SSL#{cacertfile => CaCert,
certfile => Cert,
keyfile => Key
Source#{<<"ssl">> => SSL#{<<"cacertfile">> => CaCert,
<<"certfile">> => Cert,
<<"keyfile">> => Key
}
};
read_cert(Source) -> Source.
@ -494,3 +495,6 @@ do_write_file(Filename, Bytes) ->
?LOG(error, "Write File ~p Error: ~p", [Filename, Reason]),
error(Reason)
end.
bin(Term) ->
erlang:iolist_to_binary(io_lib:format("~p", [Term])).

View File

@ -7,9 +7,12 @@
-behaviour(application).
-include("emqx_authz.hrl").
-export([start/2, stop/1]).
start(_StartType, _StartArgs) ->
ok = ekka_rlog:wait_for_shards([?ACL_SHARDED], infinity),
{ok, Sup} = emqx_authz_sup:start_link(),
ok = emqx_authz:init(),
{ok, Sup}.

View File

@ -0,0 +1,76 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authz_mnesia).
-include("emqx_authz.hrl").
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
%% AuthZ Callbacks
-export([ mnesia/1
, authorize/4
, description/0
]).
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
-endif.
-boot_mnesia({mnesia, [boot]}).
-copy_mnesia({mnesia, [copy]}).
-spec(mnesia(boot | copy) -> ok).
mnesia(boot) ->
ok = ekka_mnesia:create_table(?ACL_TABLE, [
{type, ordered_set},
{rlog_shard, ?ACL_SHARDED},
{disc_copies, [node()]},
{attributes, record_info(fields, ?ACL_TABLE)},
{storage_properties, [{ets, [{read_concurrency, true}]}]}]);
mnesia(copy) ->
ok = ekka_mnesia:copy_table(?ACL_TABLE, disc_copies).
description() ->
"AuthZ with Mnesia".
authorize(#{username := Username,
clientid := Clientid
} = Client, PubSub, Topic, #{type := 'built-in-database'}) ->
Rules = case mnesia:dirty_read(?ACL_TABLE, {?ACL_TABLE_CLIENTID, Clientid}) of
[] -> [];
[#emqx_acl{rules = Rules0}] when is_list(Rules0) -> Rules0
end
++ case mnesia:dirty_read(?ACL_TABLE, {?ACL_TABLE_USERNAME, Username}) of
[] -> [];
[#emqx_acl{rules = Rules1}] when is_list(Rules1) -> Rules1
end
++ case mnesia:dirty_read(?ACL_TABLE, ?ACL_TABLE_ALL) of
[] -> [];
[#emqx_acl{rules = Rules2}] when is_list(Rules2) -> Rules2
end,
do_authorize(Client, PubSub, Topic, Rules).
do_authorize(_Client, _PubSub, _Topic, []) -> nomatch;
do_authorize(Client, PubSub, Topic, [ {Permission, Action, TopicFilter} | Tail]) ->
case emqx_authz_rule:match(Client, PubSub, Topic,
emqx_authz_rule:compile({Permission, all, Action, [TopicFilter]})
) of
{matched, Permission} -> {matched, Permission};
nomatch -> do_authorize(Client, PubSub, Topic, Tail)
end.

View File

@ -69,7 +69,6 @@ do_authorize(Client, PubSub, Topic, Columns, [Row | Tail]) ->
nomatch -> do_authorize(Client, PubSub, Topic, Columns, Tail)
end.
format_result(Columns, Row) ->
Permission = lists:nth(index(<<"permission">>, Columns), Row),
Action = lists:nth(index(<<"action">>, Columns), Row),

View File

@ -32,16 +32,21 @@
-export_type([rule/0]).
compile({Permission, all}) when ?ALLOW_DENY(Permission) -> {Permission, all, all, [compile_topic(<<"#">>)]};
compile({Permission, Who, Action, TopicFilters}) when ?ALLOW_DENY(Permission), ?PUBSUB(Action), is_list(TopicFilters) ->
{atom(Permission), compile_who(Who), atom(Action), [compile_topic(Topic) || Topic <- TopicFilters]}.
compile_who(all) -> all;
compile_who({username, Username}) ->
compile_who({user, Username}) -> compile_who({username, Username});
compile_who({username, {re, Username}}) ->
{ok, MP} = re:compile(bin(Username)),
{username, MP};
compile_who({clientid, Clientid}) ->
compile_who({username, Username}) -> {username, {eq, bin(Username)}};
compile_who({client, Clientid}) -> compile_who({clientid, Clientid});
compile_who({clientid, {re, Clientid}}) ->
{ok, MP} = re:compile(bin(Clientid)),
{clientid, MP};
compile_who({clientid, Clientid}) -> {clientid, {eq, bin(Clientid)}};
compile_who({ipaddr, CIDR}) ->
{ipaddr, esockd_cidr:parse(CIDR, true)};
compile_who({ipaddrs, CIDRs}) ->
@ -102,14 +107,16 @@ match_action(_, all) -> true;
match_action(_, _) -> false.
match_who(_, all) -> true;
match_who(#{username := undefined}, {username, _MP}) ->
match_who(#{username := undefined}, {username, _}) ->
false;
match_who(#{username := Username}, {username, MP}) ->
match_who(#{username := Username}, {username, {eq, Username}}) -> true;
match_who(#{username := Username}, {username, {re_pattern, _, _, _, _} = MP}) ->
case re:run(Username, MP) of
{match, _} -> true;
_ -> false
end;
match_who(#{clientid := Clientid}, {clientid, MP}) ->
match_who(#{clientid := Clientid}, {clientid, {eq, Clientid}}) -> true;
match_who(#{clientid := Clientid}, {clientid, {re_pattern, _, _, _, _} = MP}) ->
case re:run(Clientid, MP) of
{match, _} -> true;
_ -> false

View File

@ -18,6 +18,8 @@
, fields/1
]).
-import(emqx_schema, [mk_duration/2]).
namespace() -> authz.
%% @doc authorization schema is not exported
@ -29,6 +31,7 @@ fields("authorization") ->
[ hoconsc:ref(?MODULE, file)
, hoconsc:ref(?MODULE, http_get)
, hoconsc:ref(?MODULE, http_post)
, hoconsc:ref(?MODULE, mnesia)
, hoconsc:ref(?MODULE, mongo_single)
, hoconsc:ref(?MODULE, mongo_rs)
, hoconsc:ref(?MODULE, mongo_sharded)
@ -45,11 +48,7 @@ fields(file) ->
, {enable, #{type => boolean(),
default => true}}
, {path, #{type => string(),
validator => fun(S) -> case filelib:is_file(S) of
true -> ok;
_ -> {error, "File does not exist"}
end
end
desc => "Path to the file which contains the ACL rules."
}}
];
fields(http_get) ->
@ -77,7 +76,7 @@ fields(http_get) ->
end
}
}
, {request_timeout, #{type => timeout(), default => 30000 }}
, {request_timeout, mk_duration("request timeout", #{default => "30s"})}
] ++ proplists:delete(base_url, emqx_connector_http:fields(config));
fields(http_post) ->
[ {type, #{type => http}}
@ -107,12 +106,17 @@ fields(http_post) ->
end
}
}
, {request_timeout, #{type => timeout(), default => 30000 }}
, {request_timeout, mk_duration("request timeout", #{default => "30s"})}
, {body, #{type => map(),
nullable => true
}
}
] ++ proplists:delete(base_url, emqx_connector_http:fields(config));
fields(mnesia) ->
[ {type, #{type => 'built-in-database'}}
, {enable, #{type => boolean(),
default => true}}
];
fields(mongo_single) ->
[ {collection, #{type => atom()}}
, {selector, #{type => map()}}

View File

@ -50,14 +50,14 @@ init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
{ok, _} = emqx_authz:update(replace, []),
{ok, _} = emqx_authz:update(?CMD_REPLCAE, []),
emqx_ct_helpers:stop_apps([emqx_authz, emqx_resource]),
meck:unload(emqx_resource),
meck:unload(emqx_schema),
ok.
init_per_testcase(_, Config) ->
{ok, _} = emqx_authz:update(replace, []),
{ok, _} = emqx_authz:update(?CMD_REPLCAE, []),
Config.
-define(SOURCE1, #{<<"type">> => <<"http">>,
@ -120,12 +120,12 @@ init_per_testcase(_, Config) ->
%%------------------------------------------------------------------------------
t_update_source(_) ->
{ok, _} = emqx_authz:update(replace, [?SOURCE3]),
{ok, _} = emqx_authz:update(head, [?SOURCE2]),
{ok, _} = emqx_authz:update(head, [?SOURCE1]),
{ok, _} = emqx_authz:update(tail, [?SOURCE4]),
{ok, _} = emqx_authz:update(tail, [?SOURCE5]),
{ok, _} = emqx_authz:update(tail, [?SOURCE6]),
{ok, _} = emqx_authz:update(?CMD_REPLCAE, [?SOURCE3]),
{ok, _} = emqx_authz:update(?CMD_PREPEND, [?SOURCE2]),
{ok, _} = emqx_authz:update(?CMD_PREPEND, [?SOURCE1]),
{ok, _} = emqx_authz:update(?CMD_APPEND, [?SOURCE4]),
{ok, _} = emqx_authz:update(?CMD_APPEND, [?SOURCE5]),
{ok, _} = emqx_authz:update(?CMD_APPEND, [?SOURCE6]),
?assertMatch([ #{type := http, enable := true}
, #{type := mongodb, enable := true}
@ -135,12 +135,12 @@ t_update_source(_) ->
, #{type := file, enable := true}
], emqx:get_config([authorization, sources], [])),
{ok, _} = emqx_authz:update({replace_once, http}, ?SOURCE1#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({replace_once, mongodb}, ?SOURCE2#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({replace_once, mysql}, ?SOURCE3#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({replace_once, postgresql}, ?SOURCE4#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({replace_once, redis}, ?SOURCE5#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({replace_once, file}, ?SOURCE6#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, http}, ?SOURCE1#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, mongodb}, ?SOURCE2#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, mysql}, ?SOURCE3#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, postgresql}, ?SOURCE4#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, redis}, ?SOURCE5#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, file}, ?SOURCE6#{<<"enable">> := false}),
?assertMatch([ #{type := http, enable := false}
, #{type := mongodb, enable := false}
@ -150,10 +150,10 @@ t_update_source(_) ->
, #{type := file, enable := false}
], emqx:get_config([authorization, sources], [])),
{ok, _} = emqx_authz:update(replace, []).
{ok, _} = emqx_authz:update(?CMD_REPLCAE, []).
t_move_source(_) ->
{ok, _} = emqx_authz:update(replace, [?SOURCE1, ?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5, ?SOURCE6]),
{ok, _} = emqx_authz:update(?CMD_REPLCAE, [?SOURCE1, ?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5, ?SOURCE6]),
?assertMatch([ #{type := http}
, #{type := mongodb}
, #{type := mysql}

View File

@ -0,0 +1,224 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authz_api_mnesia_SUITE).
-compile(nowarn_export_all).
-compile(export_all).
-include("emqx_authz.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-define(CONF_DEFAULT, <<"authorization: {sources: []}">>).
-import(emqx_ct_http, [ request_api/3
, request_api/5
, get_http_data/1
, create_default_app/0
, delete_default_app/0
, default_auth_header/0
, auth_header/2
]).
-define(HOST, "http://127.0.0.1:18083/").
-define(API_VERSION, "v5").
-define(BASE_PATH, "api").
-define(EXAMPLE_USERNAME, #{username => user1,
rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
-define(EXAMPLE_CLIENTID, #{clientid => client1,
rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
-define(EXAMPLE_ALL , #{rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
all() ->
[]. %% Todo: Waiting for @terry-xiaoyu to fix the config_not_found error
% emqx_ct:all(?MODULE).
groups() ->
[].
init_per_suite(Config) ->
meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]),
meck:expect(emqx_schema, fields, fun("authorization") ->
meck:passthrough(["authorization"]) ++
emqx_authz_schema:fields("authorization");
(F) -> meck:passthrough([F])
end),
ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT),
ok = emqx_ct_helpers:start_apps([emqx_authz, emqx_dashboard], fun set_special_configs/1),
{ok, _} = emqx:update_config([authorization, cache, enable], false),
{ok, _} = emqx:update_config([authorization, no_match], deny),
Config.
end_per_suite(_Config) ->
{ok, _} = emqx_authz:update(replace, []),
emqx_ct_helpers:stop_apps([emqx_authz, emqx_dashboard]),
meck:unload(emqx_schema),
ok.
set_special_configs(emqx_dashboard) ->
Config = #{
default_username => <<"admin">>,
default_password => <<"public">>,
listeners => [#{
protocol => http,
port => 18083
}]
},
emqx_config:put([emqx_dashboard], Config),
ok;
set_special_configs(emqx_authz) ->
emqx_config:put([authorization], #{sources => [#{type => 'built-in-database',
enable => true}
]}),
ok;
set_special_configs(_App) ->
ok.
%%------------------------------------------------------------------------------
%% Testcases
%%------------------------------------------------------------------------------
t_api(_) ->
{ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "username"]), [?EXAMPLE_USERNAME]),
{ok, 200, Request1} = request(get, uri(["authorization", "sources", "built-in-database", "username"]), []),
{ok, 200, Request2} = request(get, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []),
[#{<<"username">> := <<"user1">>, <<"rules">> := Rules1}] = jsx:decode(Request1),
#{<<"username">> := <<"user1">>, <<"rules">> := Rules1} = jsx:decode(Request2),
?assertEqual(3, length(Rules1)),
{ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "username", "user1"]), ?EXAMPLE_USERNAME#{rules => []}),
{ok, 200, Request3} = request(get, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []),
#{<<"username">> := <<"user1">>, <<"rules">> := Rules2} = jsx:decode(Request3),
?assertEqual(0, length(Rules2)),
{ok, 204, _} = request(delete, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []),
{ok, 404, _} = request(get, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []),
{ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "clientid"]), [?EXAMPLE_CLIENTID]),
{ok, 200, Request4} = request(get, uri(["authorization", "sources", "built-in-database", "clientid"]), []),
{ok, 200, Request5} = request(get, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []),
[#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3}] = jsx:decode(Request4),
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3} = jsx:decode(Request5),
?assertEqual(3, length(Rules3)),
{ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), ?EXAMPLE_CLIENTID#{rules => []}),
{ok, 200, Request6} = request(get, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []),
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules4} = jsx:decode(Request6),
?assertEqual(0, length(Rules4)),
{ok, 204, _} = request(delete, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []),
{ok, 404, _} = request(get, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []),
{ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "all"]), ?EXAMPLE_ALL),
{ok, 200, Request7} = request(get, uri(["authorization", "sources", "built-in-database", "all"]), []),
[#{<<"rules">> := Rules5}] = jsx:decode(Request7),
?assertEqual(3, length(Rules5)),
{ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "all"]), ?EXAMPLE_ALL#{rules => []}),
{ok, 200, Request8} = request(get, uri(["authorization", "sources", "built-in-database", "all"]), []),
[#{<<"rules">> := Rules6}] = jsx:decode(Request8),
?assertEqual(0, length(Rules6)),
{ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "username"]), [ #{username => N, rules => []} || N <- lists:seq(1, 20) ]),
{ok, 200, Request9} = request(get, uri(["authorization", "sources", "built-in-database", "username?page=2&limit=5"]), []),
#{<<"data">> := Data1} = jsx:decode(Request9),
?assertEqual(5, length(Data1)),
{ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "clientid"]), [ #{clientid => N, rules => []} || N <- lists:seq(1, 20) ]),
{ok, 200, Request10} = request(get, uri(["authorization", "sources", "built-in-database", "clientid?limit=5"]), []),
?assertEqual(5, length(jsx:decode(Request10))),
{ok, 400, _} = request(delete, uri(["authorization", "sources", "built-in-database", "purge-all"]), []),
{ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database"]), #{<<"enable">> => false}),
{ok, 204, _} = request(delete, uri(["authorization", "sources", "built-in-database", "purge-all"]), []),
?assertEqual([], mnesia:dirty_all_keys(?ACL_TABLE)),
ok.
%%--------------------------------------------------------------------
%% HTTP Request
%%--------------------------------------------------------------------
request(Method, Url, Body) ->
Request = case Body of
[] -> {Url, [auth_header_()]};
_ -> {Url, [auth_header_()], "application/json", jsx:encode(Body)}
end,
ct:pal("Method: ~p, Request: ~p", [Method, Request]),
case httpc:request(Method, Request, [], [{body_format, binary}]) of
{error, socket_closed_remotely} ->
{error, socket_closed_remotely};
{ok, {{"HTTP/1.1", Code, _}, _Headers, Return} } ->
{ok, Code, Return};
{ok, {Reason, _, _}} ->
{error, Reason}
end.
uri() -> uri([]).
uri(Parts) when is_list(Parts) ->
NParts = [E || E <- Parts],
?HOST ++ filename:join([?BASE_PATH, ?API_VERSION | NParts]).
get_sources(Result) -> jsx:decode(Result).
auth_header_() ->
Username = <<"admin">>,
Password = <<"public">>,
{ok, Token} = emqx_dashboard_admin:sign_token(Username, Password),
{"Authorization", "Bearer " ++ binary_to_list(Token)}.

View File

@ -42,7 +42,7 @@
<<"url">> => <<"https://fake.com:443/">>,
<<"headers">> => #{},
<<"method">> => <<"get">>,
<<"request_timeout">> => 5000
<<"request_timeout">> => <<"5s">>
}).
-define(SOURCE2, #{<<"type">> => <<"mongodb">>,
<<"enable">> => true,
@ -96,7 +96,8 @@
}).
all() ->
emqx_ct:all(?MODULE).
[]. %% Todo: Waiting for @terry-xiaoyu to fix the config_not_found error
% emqx_ct:all(?MODULE).
groups() ->
[].

View File

@ -0,0 +1,109 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authz_mnesia_SUITE).
-compile(nowarn_export_all).
-compile(export_all).
-include("emqx_authz.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-define(CONF_DEFAULT, <<"authorization: {sources: []}">>).
all() ->
emqx_ct:all(?MODULE).
groups() ->
[].
init_per_suite(Config) ->
meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]),
meck:expect(emqx_schema, fields, fun("authorization") ->
meck:passthrough(["authorization"]) ++
emqx_authz_schema:fields("authorization");
(F) -> meck:passthrough([F])
end),
ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT),
ok = emqx_ct_helpers:start_apps([emqx_authz]),
{ok, _} = emqx:update_config([authorization, cache, enable], false),
{ok, _} = emqx:update_config([authorization, no_match], deny),
Rules = [#{<<"type">> => <<"built-in-database">>}],
{ok, _} = emqx_authz:update(replace, Rules),
Config.
end_per_suite(_Config) ->
{ok, _} = emqx_authz:update(replace, []),
emqx_ct_helpers:stop_apps([emqx_authz]),
meck:unload(emqx_schema),
ok.
init_per_testcase(t_authz, Config) ->
mnesia:transaction(fun ekka_mnesia:dirty_write/1, [#emqx_acl{who = {?ACL_TABLE_USERNAME, <<"test_username">>},
rules = [{allow, publish, <<"test/%u">>},
{allow, subscribe, <<"eq #">>}
]
}]),
mnesia:transaction(fun ekka_mnesia:dirty_write/1, [#emqx_acl{who = {?ACL_TABLE_CLIENTID, <<"test_clientid">>},
rules = [{allow, publish, <<"test/%c">>},
{deny, subscribe, <<"eq #">>}
]
}]),
mnesia:transaction(fun ekka_mnesia:dirty_write/1, [#emqx_acl{who = ?ACL_TABLE_ALL,
rules = [{deny, all, <<"#">>}]
}]),
Config;
init_per_testcase(_, Config) -> Config.
end_per_testcase(t_authz, Config) ->
[ ekka_mnesia:dirty_delete(?ACL_TABLE, K) || K <- mnesia:dirty_all_keys(?ACL_TABLE)],
Config;
end_per_testcase(_, Config) -> Config.
%%------------------------------------------------------------------------------
%% Testcases
%%------------------------------------------------------------------------------
t_authz(_) ->
ClientInfo1 = #{clientid => <<"test">>,
username => <<"test">>,
peerhost => {127,0,0,1},
listener => {tcp, default}
},
ClientInfo2 = #{clientid => <<"fake_clientid">>,
username => <<"test_username">>,
peerhost => {127,0,0,1},
listener => {tcp, default}
},
ClientInfo3 = #{clientid => <<"test_clientid">>,
username => <<"fake_username">>,
peerhost => {127,0,0,1},
listener => {tcp, default}
},
?assertEqual(deny, emqx_access_control:authorize(ClientInfo1, subscribe, <<"#">>)),
?assertEqual(deny, emqx_access_control:authorize(ClientInfo1, publish, <<"#">>)),
?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, publish, <<"test/test_username">>)),
?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, subscribe, <<"#">>)),
?assertEqual(allow, emqx_access_control:authorize(ClientInfo3, publish, <<"test/test_clientid">>)),
?assertEqual(deny, emqx_access_control:authorize(ClientInfo3, subscribe, <<"#">>)),
ok.

View File

@ -22,11 +22,11 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-define(SOURCE1, {deny, all, all, ["#"]}).
-define(SOURCE1, {deny, all}).
-define(SOURCE2, {allow, {ipaddr, "127.0.0.1"}, all, [{eq, "#"}, {eq, "+"}]}).
-define(SOURCE3, {allow, {ipaddrs, ["127.0.0.1", "192.168.1.0/24"]}, subscribe, ["%c"]}).
-define(SOURCE4, {allow, {'and', [{clientid, "^test?"}, {username, "^test?"}]}, publish, ["topic/test"]}).
-define(SOURCE5, {allow, {'or', [{username, "^test"}, {clientid, "test?"}]}, publish, ["%u", "%c"]}).
-define(SOURCE4, {allow, {'and', [{client, "test"}, {user, "test"}]}, publish, ["topic/test"]}).
-define(SOURCE5, {allow, {'or', [{username, {re, "^test"}}, {clientid, {re, "test?"}}]}, publish, ["%u", "%c"]}).
all() ->
emqx_ct:all(?MODULE).
@ -52,7 +52,7 @@ t_compile(_) ->
}, emqx_authz_rule:compile(?SOURCE3)),
?assertMatch({allow,
{'and', [{clientid, {re_pattern, _, _, _, _}}, {username, {re_pattern, _, _, _, _}}]},
{'and', [{clientid, {eq, <<"test">>}}, {username, {eq, <<"test">>}}]},
publish,
[[<<"topic">>, <<"test">>]]
}, emqx_authz_rule:compile(?SOURCE4)),

View File

@ -45,3 +45,30 @@
# retain = false
# }
#}
#
#bridges.http.my_http_bridge {
# base_url: "http://localhost:9901"
# connect_timeout: "30s"
# max_retries: 3
# retry_interval = "10s"
# pool_type = "random"
# pool_size = 4
# enable_pipelining = true
# ssl {
# enable = false
# keyfile = "{{ platform_etc_dir }}/certs/client-key.pem"
# certfile = "{{ platform_etc_dir }}/certs/client-cert.pem"
# cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
# }
# egress_channels.post_messages {
# subscribe_local_topic = "emqx_http/#"
# request_timeout: "30s"
# ## following config entries can use placehodler variables
# method = post
# path = "/messages/${topic}"
# body = "${payload}"
# headers {
# "content-type": "application/json"
# }
# }
#}

View File

@ -15,9 +15,15 @@
%%--------------------------------------------------------------------
-module(emqx_bridge).
-behaviour(emqx_config_handler).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-export([post_config_update/4]).
-export([reload_hook/0, unload_hook/0]).
-export([on_message_publish/1]).
-export([ load_bridges/0
, get_bridge/2
, get_bridge/3
@ -28,6 +34,7 @@
, start_bridge/2
, stop_bridge/2
, restart_bridge/2
, send_message/2
]).
-export([ config_key_path/0
@ -38,24 +45,57 @@
, resource_id/1
, resource_id/2
, parse_bridge_id/1
, channel_id/4
, parse_channel_id/1
]).
reload_hook() ->
unload_hook(),
Bridges = emqx:get_config([bridges], #{}),
lists:foreach(fun({_Type, Bridge}) ->
lists:foreach(fun({_Name, BridgeConf}) ->
load_hook(BridgeConf)
end, maps:to_list(Bridge))
end, maps:to_list(Bridges)).
load_hook(#{egress_channels := Channels}) ->
case has_subscribe_local_topic(Channels) of
true -> ok;
false -> emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []})
end;
load_hook(_Conf) -> ok.
unload_hook() ->
ok = emqx_hooks:del('message.publish', {?MODULE, on_message_publish}).
on_message_publish(Message = #message{topic = Topic, flags = Flags}) ->
case maps:get(sys, Flags, false) of
false ->
ChannelIds = get_matched_channels(Topic),
lists:foreach(fun(ChannelId) ->
send_message(ChannelId, emqx_message:to_map(Message))
end, ChannelIds);
true -> ok
end,
{ok, Message}.
%% TODO: remove this clause, treat mqtt bridges the same as other bridges
send_message(ChannelId, Message) ->
{BridgeType, BridgeName, _, _} = parse_channel_id(ChannelId),
ResId = emqx_bridge:resource_id(BridgeType, BridgeName),
do_send_message(ResId, ChannelId, Message).
do_send_message(ResId, ChannelId, Message) ->
emqx_resource:query(ResId, {send_message, ChannelId, Message}).
config_key_path() ->
[bridges].
resource_type(mqtt) -> emqx_connector_mqtt;
resource_type(mysql) -> emqx_connector_mysql;
resource_type(pgsql) -> emqx_connector_pgsql;
resource_type(mongo) -> emqx_connector_mongo;
resource_type(redis) -> emqx_connector_redis;
resource_type(ldap) -> emqx_connector_ldap.
resource_type(http) -> emqx_connector_http.
bridge_type(emqx_connector_mqtt) -> mqtt;
bridge_type(emqx_connector_mysql) -> mysql;
bridge_type(emqx_connector_pgsql) -> pgsql;
bridge_type(emqx_connector_mongo) -> mongo;
bridge_type(emqx_connector_redis) -> redis;
bridge_type(emqx_connector_ldap) -> ldap.
bridge_type(emqx_connector_http) -> http.
post_config_update(_Req, NewConf, OldConf, _AppEnv) ->
#{added := Added, removed := Removed, changed := Updated}
@ -100,11 +140,23 @@ bridge_id(BridgeType, BridgeName) ->
<<Type/binary, ":", Name/binary>>.
parse_bridge_id(BridgeId) ->
try
[Type, Name] = string:split(str(BridgeId), ":", leading),
{list_to_existing_atom(Type), list_to_atom(Name)}
catch
_ : _ -> error({invalid_bridge_id, BridgeId})
case string:split(bin(BridgeId), ":", all) of
[Type, Name] -> {binary_to_atom(Type, utf8), binary_to_atom(Name, utf8)};
_ -> error({invalid_bridge_id, BridgeId})
end.
channel_id(BridgeType, BridgeName, ChannelType, ChannelName) ->
BType = bin(BridgeType),
BName = bin(BridgeName),
CType = bin(ChannelType),
CName = bin(ChannelName),
<<BType/binary, ":", BName/binary, ":", CType/binary, ":", CName/binary>>.
parse_channel_id(ChannelId) ->
case string:split(bin(ChannelId), ":", all) of
[BridgeType, BridgeName, ChannelType, ChannelName] ->
{BridgeType, BridgeName, ChannelType, ChannelName};
_ -> error({invalid_bridge_id, ChannelId})
end.
list_bridges() ->
@ -137,7 +189,8 @@ restart_bridge(Type, Name) ->
emqx_resource:restart(resource_id(Type, Name)).
create_bridge(Type, Name, Conf) ->
logger:info("create ~p bridge ~p use config: ~p", [Type, Name, Conf]),
?SLOG(info, #{msg => "create bridge", type => Type, name => Name,
config => Conf}),
ResId = resource_id(Type, Name),
case emqx_resource:create(ResId,
emqx_bridge:resource_type(Type), Conf) of
@ -158,12 +211,13 @@ update_bridge(Type, Name, {_OldConf, Conf}) ->
%% `egress_channels` are changed, then we should not restart the bridge, we only restart/start
%% the channels.
%%
logger:info("update ~p bridge ~p use config: ~p", [Type, Name, Conf]),
?SLOG(info, #{msg => "update bridge", type => Type, name => Name,
config => Conf}),
emqx_resource:recreate(resource_id(Type, Name),
emqx_bridge:resource_type(Type), Conf, []).
remove_bridge(Type, Name, _Conf) ->
logger:info("remove ~p bridge ~p", [Type, Name]),
?SLOG(info, #{msg => "remove bridge", type => Type, name => Name}),
case emqx_resource:remove(resource_id(Type, Name)) of
ok -> ok;
{error, not_found} -> ok;
@ -184,13 +238,35 @@ flatten_confs(Conf0) ->
do_flatten_confs(Type, Conf0) ->
[{{Type, Name}, Conf} || {Name, Conf} <- maps:to_list(Conf0)].
has_subscribe_local_topic(Channels) ->
lists:any(fun (#{subscribe_local_topic := _}) -> true;
(_) -> false
end, maps:to_list(Channels)).
get_matched_channels(Topic) ->
Bridges = emqx:get_config([bridges], #{}),
maps:fold(fun
%% TODO: also trigger 'message.publish' for mqtt bridges.
(mqtt, _Conf, Acc0) -> Acc0;
(BType, Conf, Acc0) ->
maps:fold(fun
(BName, #{egress_channels := Channels}, Acc1) ->
do_get_matched_channels(Topic, Channels, BType, BName, egress_channels)
++ Acc1;
(_Name, _BridgeConf, Acc1) -> Acc1
end, Acc0, Conf)
end, [], Bridges).
do_get_matched_channels(Topic, Channels, BType, BName, CType) ->
maps:fold(fun
(ChannName, #{subscribe_local_topic := Filter}, Acc) ->
case emqx_topic:match(Topic, Filter) of
true -> [channel_id(BType, BName, CType, ChannName) | Acc];
false -> Acc
end;
(_ChannName, _ChannConf, Acc) -> Acc
end, [], Channels).
bin(Bin) when is_binary(Bin) -> Bin;
bin(Str) when is_list(Str) -> list_to_binary(Str);
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
str(A) when is_atom(A) ->
atom_to_list(A);
str(B) when is_binary(B) ->
binary_to_list(B);
str(S) when is_list(S) ->
S.

View File

@ -22,10 +22,12 @@
start(_StartType, _StartArgs) ->
{ok, Sup} = emqx_bridge_sup:start_link(),
ok = emqx_bridge:load_bridges(),
ok = emqx_bridge:reload_hook(),
emqx_config_handler:add_handler(emqx_bridge:config_key_path(), emqx_bridge),
{ok, Sup}.
stop(_State) ->
ok = emqx_bridge:unload_hook(),
ok.
%% internal functions

View File

@ -1,5 +1,7 @@
-module(emqx_bridge_schema).
-include_lib("typerefl/include/types.hrl").
-export([roots/0, fields/1]).
%%======================================================================================
@ -8,7 +10,16 @@
roots() -> [bridges].
fields(bridges) ->
[{mqtt, hoconsc:mk(hoconsc:map(name, hoconsc:ref(?MODULE, "mqtt_bridge")))}];
[ {mqtt, hoconsc:mk(hoconsc:map(name, hoconsc:ref(?MODULE, "mqtt_bridge")))}
, {http, hoconsc:mk(hoconsc:map(name, hoconsc:ref(?MODULE, "http_bridge")))}
];
fields("mqtt_bridge") ->
emqx_connector_mqtt:fields("config").
emqx_connector_mqtt:fields("config");
fields("http_bridge") ->
emqx_connector_http:fields(config) ++ http_channels().
http_channels() ->
[{egress_channels, hoconsc:mk(hoconsc:map(id,
hoconsc:ref(emqx_connector_http, "http_request")))}].

View File

@ -21,6 +21,8 @@
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
%% callbacks of behaviour emqx_resource
-export([ on_start/2
, on_stop/2
@ -38,7 +40,7 @@
-export([ check_ssl_opts/2 ]).
-type connect_timeout() :: non_neg_integer() | infinity.
-type connect_timeout() :: emqx_schema:duration() | infinity.
-type pool_type() :: random | hash.
-reflect_type([ connect_timeout/0
@ -50,6 +52,22 @@
roots() ->
[{config, #{type => hoconsc:ref(?MODULE, config)}}].
fields("http_request") ->
[ {subscribe_local_topic, hoconsc:mk(binary())}
, {method, hoconsc:mk(method(), #{default => post})}
, {path, hoconsc:mk(binary(), #{default => <<"">>})}
, {headers, hoconsc:mk(map(),
#{default => #{
<<"accept">> => <<"application/json">>,
<<"cache-control">> => <<"no-cache">>,
<<"connection">> => <<"keep-alive">>,
<<"content-type">> => <<"application/json">>,
<<"keep-alive">> => <<"timeout=5">>}})
}
, {body, hoconsc:mk(binary(), #{default => <<"${payload}">>})}
, {request_timeout, hoconsc:mk(emqx_schema:duration_ms(), #{default => <<"30s">>})}
];
fields(config) ->
[ {base_url, fun base_url/1}
, {connect_timeout, fun connect_timeout/1}
@ -60,6 +78,13 @@ fields(config) ->
, {enable_pipelining, fun enable_pipelining/1}
] ++ emqx_connector_schema_lib:ssl_fields().
method() ->
hoconsc:union([ typerefl:atom(post)
, typerefl:atom(put)
, typerefl:atom(get)
, typerefl:atom(delete)
]).
validations() ->
[ {check_ssl_opts, fun check_ssl_opts/1} ].
@ -71,16 +96,16 @@ base_url(validator) -> fun(#{query := _Query}) ->
end;
base_url(_) -> undefined.
connect_timeout(type) -> connect_timeout();
connect_timeout(default) -> 5000;
connect_timeout(type) -> emqx_schema:duration_ms();
connect_timeout(default) -> "5s";
connect_timeout(_) -> undefined.
max_retries(type) -> non_neg_integer();
max_retries(default) -> 5;
max_retries(_) -> undefined.
retry_interval(type) -> non_neg_integer();
retry_interval(default) -> 1000;
retry_interval(type) -> emqx_schema:duration();
retry_interval(default) -> "1s";
retry_interval(_) -> undefined.
pool_type(type) -> pool_type();
@ -105,13 +130,14 @@ on_start(InstId, #{base_url := #{scheme := Scheme,
retry_interval := RetryInterval,
pool_type := PoolType,
pool_size := PoolSize} = Config) ->
logger:info("starting http connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting http connector",
connector => InstId, config => Config}),
{Transport, TransportOpts} = case Scheme of
http ->
{tcp, []};
https ->
SSLOpts = emqx_plugin_libs_ssl:save_files_return_opts(
maps:get(ssl_opts, Config), "connectors", InstId),
maps:get(ssl, Config), "connectors", InstId),
{tls, SSLOpts}
end,
NTransportOpts = emqx_misc:ipv6_probe(TransportOpts),
@ -126,30 +152,51 @@ on_start(InstId, #{base_url := #{scheme := Scheme,
, {transport, Transport}
, {transport_opts, NTransportOpts}],
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
{ok, _} = ehttpc_sup:start_pool(PoolName, PoolOpts),
{ok, #{pool_name => PoolName,
State = #{
pool_name => PoolName,
host => Host,
port => Port,
base_path => BasePath}}.
base_path => BasePath,
channels => preproc_channels(InstId, Config)
},
case ehttpc_sup:start_pool(PoolName, PoolOpts) of
{ok, _} -> {ok, State};
{error, {already_started, _}} -> {ok, State};
{error, Reason} ->
{error, Reason}
end.
on_stop(InstId, #{pool_name := PoolName}) ->
logger:info("stopping http connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping http connector",
connector => InstId}),
ehttpc_sup:stop_pool(PoolName).
on_query(InstId, {send_message, ChannelId, Msg}, AfterQuery, #{channels := Channels} = State) ->
case maps:find(ChannelId, Channels) of
error -> ?SLOG(error, #{msg => "channel not found", channel_id => ChannelId});
{ok, ChannConf} ->
#{method := Method, path := Path, body := Body, headers := Headers,
request_timeout := Timeout} = proc_channel_conf(ChannConf, Msg),
on_query(InstId, {Method, {Path, Headers, Body}, Timeout}, AfterQuery, State)
end;
on_query(InstId, {Method, Request}, AfterQuery, State) ->
on_query(InstId, {undefined, Method, Request, 5000}, AfterQuery, State);
on_query(InstId, {Method, Request, Timeout}, AfterQuery, State) ->
on_query(InstId, {undefined, Method, Request, Timeout}, AfterQuery, State);
on_query(InstId, {KeyOrNum, Method, Request, Timeout}, AfterQuery, #{pool_name := PoolName,
base_path := BasePath} = State) ->
logger:debug("http connector ~p received request: ~p, at state: ~p", [InstId, Request, State]),
on_query(InstId, {KeyOrNum, Method, Request, Timeout}, AfterQuery,
#{pool_name := PoolName, base_path := BasePath} = State) ->
?SLOG(debug, #{msg => "http connector received request",
request => Request, connector => InstId,
state => State}),
NRequest = update_path(BasePath, Request),
case Result = ehttpc:request(case KeyOrNum of
undefined -> PoolName;
_ -> {PoolName, KeyOrNum}
end, Method, NRequest, Timeout) of
{error, Reason} ->
logger:debug("http connector ~p do reqeust failed, sql: ~p, reason: ~p", [InstId, NRequest, Reason]),
?SLOG(error, #{msg => "http connector do reqeust failed",
request => NRequest, reason => Reason,
connector => InstId}),
emqx_resource:query_failed(AfterQuery);
_ ->
emqx_resource:query_success(AfterQuery)
@ -169,6 +216,54 @@ on_health_check(_InstId, #{host := Host, port := Port} = State) ->
%% Internal functions
%%--------------------------------------------------------------------
preproc_channels(<<"bridge:", BridgeId/binary>>, Config) ->
{BridgeType, BridgeName} = emqx_bridge:parse_bridge_id(BridgeId),
maps:fold(fun(ChannName, ChannConf, Acc) ->
Acc#{emqx_bridge:channel_id(BridgeType, BridgeName, egress_channels, ChannName) =>
preproc_channel_conf(ChannConf)}
end, #{}, maps:get(egress_channels, Config, #{}));
preproc_channels(_InstId, _Config) ->
#{}.
preproc_channel_conf(#{
method := Method,
path := Path,
body := Body,
headers := Headers} = Conf) ->
Conf#{ method => emqx_plugin_libs_rule:preproc_tmpl(bin(Method))
, path => emqx_plugin_libs_rule:preproc_tmpl(Path)
, body => emqx_plugin_libs_rule:preproc_tmpl(Body)
, headers => preproc_headers(Headers)
}.
preproc_headers(Headers) ->
maps:fold(fun(K, V, Acc) ->
Acc#{emqx_plugin_libs_rule:preproc_tmpl(bin(K)) =>
emqx_plugin_libs_rule:preproc_tmpl(bin(V))}
end, #{}, Headers).
proc_channel_conf(#{
method := MethodTks,
path := PathTks,
body := BodyTks,
headers := HeadersTks} = Conf, Msg) ->
Conf#{ method => make_method(emqx_plugin_libs_rule:proc_tmpl(MethodTks, Msg))
, path => emqx_plugin_libs_rule:proc_tmpl(PathTks, Msg)
, body => emqx_plugin_libs_rule:proc_tmpl(BodyTks, Msg)
, headers => maps:to_list(proc_headers(HeadersTks, Msg))
}.
proc_headers(HeaderTks, Msg) ->
maps:fold(fun(K, V, Acc) ->
Acc#{emqx_plugin_libs_rule:proc_tmpl(K, Msg) =>
emqx_plugin_libs_rule:proc_tmpl(V, Msg)}
end, #{}, HeaderTks).
make_method(M) when M == <<"POST">>; M == <<"post">> -> post;
make_method(M) when M == <<"PUT">>; M == <<"put">> -> put;
make_method(M) when M == <<"GET">>; M == <<"get">> -> get;
make_method(M) when M == <<"DELETE">>; M == <<"delete">> -> delete.
check_ssl_opts(Conf) ->
check_ssl_opts("base_url", Conf).
@ -185,3 +280,10 @@ update_path(BasePath, {Path, Headers}) ->
{filename:join(BasePath, Path), Headers};
update_path(BasePath, {Path, Headers, Body}) ->
{filename:join(BasePath, Path), Headers, Body}.
bin(Bin) when is_binary(Bin) ->
Bin;
bin(Str) when is_list(Str) ->
list_to_binary(Str);
bin(Atom) when is_atom(Atom) ->
atom_to_binary(Atom, utf8).

View File

@ -18,6 +18,7 @@
-include("emqx_connector.hrl").
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
-export([roots/0, fields/1]).
@ -53,7 +54,8 @@ on_start(InstId, #{servers := Servers0,
pool_size := PoolSize,
auto_reconnect := AutoReconn,
ssl := SSL} = Config) ->
logger:info("starting ldap connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting ldap connector",
connector => InstId, config => Config}),
Servers = [begin proplists:get_value(host, S) end || S <- Servers0],
SslOpts = case maps:get(enable, SSL) of
true ->
@ -75,14 +77,20 @@ on_start(InstId, #{servers := Servers0,
{ok, #{poolname => PoolName}}.
on_stop(InstId, #{poolname := PoolName}) ->
logger:info("stopping ldap connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping ldap connector",
connector => InstId}),
emqx_plugin_libs_pool:stop_pool(PoolName).
on_query(InstId, {search, Base, Filter, Attributes}, AfterQuery, #{poolname := PoolName} = State) ->
logger:debug("ldap connector ~p received request: ~p, at state: ~p", [InstId, {Base, Filter, Attributes}, State]),
Request = {Base, Filter, Attributes},
?SLOG(debug, #{msg => "ldap connector received request",
request => Request, connector => InstId,
state => State}),
case Result = ecpool:pick_and_do(PoolName, {?MODULE, search, [Base, Filter, Attributes]}, no_handover) of
{error, Reason} ->
logger:debug("ldap connector ~p do request failed, request: ~p, reason: ~p", [InstId, {Base, Filter, Attributes}, Reason]),
?SLOG(error, #{msg => "ldap connector do request failed",
request => Request, connector => InstId,
reason => Reason}),
emqx_resource:query_failed(AfterQuery);
_ ->
emqx_resource:query_success(AfterQuery)

View File

@ -18,6 +18,7 @@
-include("emqx_connector.hrl").
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
-type server() :: emqx_schema:ip_port().
-reflect_type([server/0]).
@ -93,7 +94,8 @@ on_jsonify(Config) ->
%% ===================================================================
on_start(InstId, Config = #{server := Server,
mongo_type := single}) ->
logger:info("starting mongodb connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting mongodb single connector",
connector => InstId, config => Config}),
Opts = [{type, single},
{hosts, [emqx_connector_schema_lib:ip_port_to_string(Server)]}
],
@ -102,7 +104,8 @@ on_start(InstId, Config = #{server := Server,
on_start(InstId, Config = #{servers := Servers,
mongo_type := rs,
replica_set_name := RsName}) ->
logger:info("starting mongodb connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting mongodb rs connector",
connector => InstId, config => Config}),
Opts = [{type, {rs, RsName}},
{hosts, [emqx_connector_schema_lib:ip_port_to_string(S)
|| S <- Servers]}
@ -111,7 +114,8 @@ on_start(InstId, Config = #{servers := Servers,
on_start(InstId, Config = #{servers := Servers,
mongo_type := sharded}) ->
logger:info("starting mongodb connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting mongodb sharded connector",
connector => InstId, config => Config}),
Opts = [{type, sharded},
{hosts, [emqx_connector_schema_lib:ip_port_to_string(S)
|| S <- Servers]}
@ -119,14 +123,20 @@ on_start(InstId, Config = #{servers := Servers,
do_start(InstId, Opts, Config).
on_stop(InstId, #{poolname := PoolName}) ->
logger:info("stopping mongodb connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping mongodb connector",
connector => InstId}),
emqx_plugin_libs_pool:stop_pool(PoolName).
on_query(InstId, {Action, Collection, Selector, Docs}, AfterQuery, #{poolname := PoolName} = State) ->
logger:debug("mongodb connector ~p received request: ~p, at state: ~p", [InstId, {Action, Collection, Selector, Docs}, State]),
Request = {Action, Collection, Selector, Docs},
?SLOG(debug, #{msg => "mongodb connector received request",
request => Request, connector => InstId,
state => State}),
case ecpool:pick_and_do(PoolName, {?MODULE, mongo_query, [Action, Collection, Selector, Docs]}, no_handover) of
{error, Reason} ->
logger:debug("mongodb connector ~p do sql query failed, request: ~p, reason: ~p", [InstId, {Action, Collection, Selector, Docs}, Reason]),
?SLOG(error, #{msg => "mongodb connector do query failed",
request => Request, reason => Reason,
connector => InstId}),
emqx_resource:query_failed(AfterQuery),
{error, Reason};
{ok, Cursor} when is_pid(Cursor) ->

View File

@ -17,6 +17,7 @@
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
-behaviour(supervisor).
@ -88,13 +89,15 @@ drop_bridge(Name) ->
%% ===================================================================
%% When use this bridge as a data source, ?MODULE:on_message_received/2 will be called
%% if the bridge received msgs from the remote broker.
on_message_received(Msg, ChannelName) ->
emqx:run_hook(ChannelName, [Msg]).
on_message_received(Msg, ChannId) ->
Name = atom_to_binary(ChannId, utf8),
emqx:run_hook(<<"$bridges/", Name/binary>>, [Msg]).
%% ===================================================================
on_start(InstId, Conf) ->
logger:info("starting mqtt connector: ~p, ~p", [InstId, Conf]),
NamePrefix = binary_to_list(InstId),
?SLOG(info, #{msg => "starting mqtt connector",
connector => InstId, config => Conf}),
"bridge:" ++ NamePrefix = binary_to_list(InstId),
BasicConf = basic_config(Conf),
InitRes = {ok, #{name_prefix => NamePrefix, baisc_conf => BasicConf, channels => []}},
InOutConfigs = taged_map_list(ingress_channels, maps:get(ingress_channels, Conf, #{}))
@ -110,7 +113,8 @@ on_start(InstId, Conf) ->
end, InitRes, InOutConfigs).
on_stop(InstId, #{channels := NameList}) ->
logger:info("stopping mqtt connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping mqtt connector",
connector => InstId}),
lists:foreach(fun(Name) ->
remove_channel(Name)
end, NameList).
@ -120,9 +124,10 @@ on_stop(InstId, #{channels := NameList}) ->
on_query(_InstId, {create_channel, Conf}, _AfterQuery, #{name_prefix := Prefix,
baisc_conf := BasicConf}) ->
create_channel(Conf, Prefix, BasicConf);
on_query(_InstId, {send_to_remote, ChannelName, Msg}, _AfterQuery, _State) ->
logger:debug("send msg to remote node on channel: ~p, msg: ~p", [ChannelName, Msg]),
emqx_connector_mqtt_worker:send_to_remote(ChannelName, Msg).
on_query(_InstId, {send_message, ChannelId, Msg}, _AfterQuery, _State) ->
?SLOG(debug, #{msg => "send msg to remote node", message => Msg,
channel_id => ChannelId}),
emqx_connector_mqtt_worker:send_to_remote(ChannelId, Msg).
on_health_check(_InstId, #{channels := NameList} = State) ->
Results = [{Name, emqx_connector_mqtt_worker:ping(Name)} || Name <- NameList],
@ -134,35 +139,43 @@ on_health_check(_InstId, #{channels := NameList} = State) ->
create_channel({{ingress_channels, Id}, #{subscribe_remote_topic := RemoteT} = Conf},
NamePrefix, BasicConf) ->
LocalT = maps:get(local_topic, Conf, undefined),
Name = ingress_channel_name(NamePrefix, Id),
logger:info("creating ingress channel ~p, remote ~s -> local ~s", [Name, RemoteT, LocalT]),
ChannId = ingress_channel_id(NamePrefix, Id),
?SLOG(info, #{msg => "creating ingress channel",
remote_topic => RemoteT,
local_topic => LocalT,
channel_id => ChannId}),
do_create_channel(BasicConf#{
name => Name,
clientid => clientid(Name),
name => ChannId,
clientid => clientid(ChannId),
subscriptions => Conf#{
local_topic => LocalT,
on_message_received => {fun ?MODULE:on_message_received/2, [Name]}
on_message_received => {fun ?MODULE:on_message_received/2, [ChannId]}
},
forwards => undefined});
create_channel({{egress_channels, Id}, #{remote_topic := RemoteT} = Conf},
NamePrefix, BasicConf) ->
LocalT = maps:get(subscribe_local_topic, Conf, undefined),
Name = egress_channel_name(NamePrefix, Id),
logger:info("creating egress channel ~p, local ~s -> remote ~s", [Name, LocalT, RemoteT]),
ChannId = egress_channel_id(NamePrefix, Id),
?SLOG(info, #{msg => "creating egress channel",
remote_topic => RemoteT,
local_topic => LocalT,
channel_id => ChannId}),
do_create_channel(BasicConf#{
name => Name,
clientid => clientid(Name),
name => ChannId,
clientid => clientid(ChannId),
subscriptions => undefined,
forwards => Conf#{subscribe_local_topic => LocalT}}).
remove_channel(ChannelName) ->
logger:info("removing channel ~p", [ChannelName]),
case ?MODULE:drop_bridge(ChannelName) of
remove_channel(ChannId) ->
?SLOG(info, #{msg => "removing channel",
channel_id => ChannId}),
case ?MODULE:drop_bridge(ChannId) of
ok -> ok;
{error, not_found} -> ok;
{error, Reason} ->
logger:error("stop channel ~p failed, error: ~p", [ChannelName, Reason])
?SLOG(error, #{msg => "stop channel failed",
channel_id => ChannId, reason => Reason})
end.
do_create_channel(#{name := Name} = Conf) ->
@ -215,9 +228,9 @@ basic_config(#{
taged_map_list(Tag, Map) ->
[{{Tag, K}, V} || {K, V} <- maps:to_list(Map)].
ingress_channel_name(Prefix, Id) ->
ingress_channel_id(Prefix, Id) ->
channel_name("ingress_channels", Prefix, Id).
egress_channel_name(Prefix, Id) ->
egress_channel_id(Prefix, Id) ->
channel_name("egress_channels", Prefix, Id).
channel_name(Type, Prefix, Id) ->

View File

@ -17,6 +17,7 @@
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
%% callbacks of behaviour emqx_resource
-export([ on_start/2
@ -54,7 +55,8 @@ on_start(InstId, #{server := {Host, Port},
auto_reconnect := AutoReconn,
pool_size := PoolSize,
ssl := SSL } = Config) ->
logger:info("starting mysql connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting mysql connector",
connector => InstId, config => Config}),
SslOpts = case maps:get(enable, SSL) of
true ->
[{ssl, [{server_name_indication, disable} |
@ -73,16 +75,21 @@ on_start(InstId, #{server := {Host, Port},
{ok, #{poolname => PoolName}}.
on_stop(InstId, #{poolname := PoolName}) ->
logger:info("stopping mysql connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping mysql connector",
connector => InstId}),
emqx_plugin_libs_pool:stop_pool(PoolName).
on_query(InstId, {sql, SQL}, AfterQuery, #{poolname := PoolName} = State) ->
on_query(InstId, {sql, SQL, []}, AfterQuery, #{poolname := PoolName} = State);
on_query(InstId, {sql, SQL, Params}, AfterQuery, #{poolname := PoolName} = State) ->
logger:debug("mysql connector ~p received sql query: ~p, at state: ~p", [InstId, SQL, State]),
case Result = ecpool:pick_and_do(PoolName, {mysql, query, [SQL, Params]}, no_handover) of
on_query(InstId, {sql, SQL}, AfterQuery, #{poolname := _PoolName} = State) ->
on_query(InstId, {sql, SQL, [], default_timeout}, AfterQuery, State);
on_query(InstId, {sql, SQL, Params}, AfterQuery, #{poolname := _PoolName} = State) ->
on_query(InstId, {sql, SQL, Params, default_timeout}, AfterQuery, State);
on_query(InstId, {sql, SQL, Params, Timeout}, AfterQuery, #{poolname := PoolName} = State) ->
?SLOG(debug, #{msg => "mysql connector received sql query",
connector => InstId, sql => SQL, state => State}),
case Result = ecpool:pick_and_do(PoolName, {mysql, query, [SQL, Params, Timeout]}, no_handover) of
{error, Reason} ->
logger:debug("mysql connector ~p do sql query failed, sql: ~p, reason: ~p", [InstId, SQL, Reason]),
?SLOG(error, #{msg => "mysql connector do sql query failed",
connector => InstId, sql => SQL, reason => Reason}),
emqx_resource:query_failed(AfterQuery);
_ ->
emqx_resource:query_success(AfterQuery)

Some files were not shown because too many files have changed in this diff Show More