Merge remote-tracking branch 'origin/master' into build-with-mix
This commit is contained in:
commit
51bd361b16
|
@ -175,26 +175,6 @@ EOF
|
||||||
cat /var/log/emqx/emqx.log.1 || true
|
cat /var/log/emqx/emqx.log.1 || true
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$(sed -n '/^ID=/p' /etc/os-release | sed -r 's/ID=(.*)/\1/g' | sed 's/"//g')" = ubuntu ] \
|
|
||||||
|| [ "$(sed -n '/^ID=/p' /etc/os-release | sed -r 's/ID=(.*)/\1/g' | sed 's/"//g')" = debian ] ;then
|
|
||||||
if ! service emqx start; then
|
|
||||||
cat /var/log/emqx/erlang.log.1 || true
|
|
||||||
cat /var/log/emqx/emqx.log.1 || true
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
IDLE_TIME=0
|
|
||||||
while ! curl http://127.0.0.1:18083/api/v5/status >/dev/null 2>&1; do
|
|
||||||
if [ $IDLE_TIME -gt 10 ]
|
|
||||||
then
|
|
||||||
echo "emqx service error"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
sleep 10
|
|
||||||
IDLE_TIME=$((IDLE_TIME+1))
|
|
||||||
done
|
|
||||||
service emqx stop
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
relup_test(){
|
relup_test(){
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
MYSQL_TAG=8
|
MYSQL_TAG=8
|
||||||
REDIS_TAG=6
|
REDIS_TAG=6
|
||||||
MONGO_TAG=4
|
MONGO_TAG=5
|
||||||
PGSQL_TAG=13
|
PGSQL_TAG=13
|
||||||
LDAP_TAG=2.4.50
|
LDAP_TAG=2.4.50
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ up:
|
||||||
env \
|
env \
|
||||||
MYSQL_TAG=8 \
|
MYSQL_TAG=8 \
|
||||||
REDIS_TAG=6 \
|
REDIS_TAG=6 \
|
||||||
MONGO_TAG=4 \
|
MONGO_TAG=5 \
|
||||||
PGSQL_TAG=13 \
|
PGSQL_TAG=13 \
|
||||||
LDAP_TAG=2.4.50 \
|
LDAP_TAG=2.4.50 \
|
||||||
docker-compose \
|
docker-compose \
|
||||||
|
|
|
@ -2,11 +2,9 @@ version: '3.9'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
mongo_server:
|
mongo_server:
|
||||||
container_name: mongo
|
container_name: mongo
|
||||||
image: mongo:${MONGO_TAG}
|
image: mongo:${MONGO_TAG}
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
|
||||||
MONGO_INITDB_DATABASE: mqtt
|
|
||||||
networks:
|
networks:
|
||||||
- emqx_bridge
|
- emqx_bridge
|
||||||
ports:
|
ports:
|
||||||
|
|
|
@ -3,7 +3,7 @@ version: '3.9'
|
||||||
services:
|
services:
|
||||||
erlang23:
|
erlang23:
|
||||||
container_name: erlang23
|
container_name: erlang23
|
||||||
image: ghcr.io/emqx/emqx-builder/5.0-2:23.3.4.9-3-ubuntu20.04
|
image: ghcr.io/emqx/emqx-builder/5.0-3:23.3.4.9-3-ubuntu20.04
|
||||||
env_file:
|
env_file:
|
||||||
- conf.env
|
- conf.env
|
||||||
environment:
|
environment:
|
||||||
|
@ -23,7 +23,7 @@ services:
|
||||||
|
|
||||||
erlang24:
|
erlang24:
|
||||||
container_name: erlang24
|
container_name: erlang24
|
||||||
image: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04
|
image: ghcr.io/emqx/emqx-builder/5.0-3:24.1.5-3-ubuntu20.04
|
||||||
env_file:
|
env_file:
|
||||||
- conf.env
|
- conf.env
|
||||||
environment:
|
environment:
|
||||||
|
|
|
@ -19,7 +19,7 @@ jobs:
|
||||||
prepare:
|
prepare:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
# prepare source with any OTP version, no need for a matrix
|
# prepare source with any OTP version, no need for a matrix
|
||||||
container: "ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04"
|
container: "ghcr.io/emqx/emqx-builder/5.0-3:24.1.5-3-ubuntu20.04"
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
old_vsns: ${{ steps.find_old_versons.outputs.old_vsns }}
|
old_vsns: ${{ steps.find_old_versons.outputs.old_vsns }}
|
||||||
|
@ -129,7 +129,7 @@ jobs:
|
||||||
- emqx
|
- emqx
|
||||||
- emqx-enterprise
|
- emqx-enterprise
|
||||||
otp:
|
otp:
|
||||||
- 24.1.5-2
|
- 24.1.5-3
|
||||||
macos:
|
macos:
|
||||||
- macos-11
|
- macos-11
|
||||||
- macos-10.15
|
- macos-10.15
|
||||||
|
@ -215,7 +215,7 @@ jobs:
|
||||||
- emqx
|
- emqx
|
||||||
- emqx-enterprise
|
- emqx-enterprise
|
||||||
otp:
|
otp:
|
||||||
- 24.1.5-2 # we test with OTP 23, but only build package on OTP 24 versions
|
- 24.1.5-3 # we test with OTP 23, but only build package on OTP 24 versions
|
||||||
arch:
|
arch:
|
||||||
- amd64
|
- amd64
|
||||||
- arm64
|
- arm64
|
||||||
|
@ -301,7 +301,7 @@ jobs:
|
||||||
-v $(pwd):/emqx \
|
-v $(pwd):/emqx \
|
||||||
--workdir /emqx \
|
--workdir /emqx \
|
||||||
--platform linux/$ARCH \
|
--platform linux/$ARCH \
|
||||||
ghcr.io/emqx/emqx-builder/5.0-2:$OTP-$SYSTEM \
|
ghcr.io/emqx/emqx-builder/5.0-3:$OTP-$SYSTEM \
|
||||||
bash -euc "make $PROFILE-zip || cat rebar3.crashdump; \
|
bash -euc "make $PROFILE-zip || cat rebar3.crashdump; \
|
||||||
make $PROFILE-pkg || cat rebar3.crashdump; \
|
make $PROFILE-pkg || cat rebar3.crashdump; \
|
||||||
EMQX_NAME=$PROFILE && .ci/build_packages/tests.sh"
|
EMQX_NAME=$PROFILE && .ci/build_packages/tests.sh"
|
||||||
|
@ -336,7 +336,7 @@ jobs:
|
||||||
- emqx-enterprise
|
- emqx-enterprise
|
||||||
# NOTE: for docker, only support latest otp version, not a matrix
|
# NOTE: for docker, only support latest otp version, not a matrix
|
||||||
otp:
|
otp:
|
||||||
- 24.1.5-2 # update to latest
|
- 24.1.5-3 # update to latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/download-artifact@v2
|
- uses: actions/download-artifact@v2
|
||||||
|
@ -377,7 +377,7 @@ jobs:
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
build-args: |
|
build-args: |
|
||||||
BUILD_FROM=ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-alpine3.14
|
BUILD_FROM=ghcr.io/emqx/emqx-builder/5.0-3:${{ matrix.otp }}-alpine3.14
|
||||||
RUN_FROM=alpine:3.14
|
RUN_FROM=alpine:3.14
|
||||||
EMQX_NAME=${{ matrix.profile }}
|
EMQX_NAME=${{ matrix.profile }}
|
||||||
file: source/deploy/docker/Dockerfile
|
file: source/deploy/docker/Dockerfile
|
||||||
|
@ -405,7 +405,7 @@ jobs:
|
||||||
- emqx
|
- emqx
|
||||||
- emqx-enterprise
|
- emqx-enterprise
|
||||||
otp:
|
otp:
|
||||||
- 24.1.5-2
|
- 24.1.5-3
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
|
@ -24,12 +24,12 @@ jobs:
|
||||||
- emqx
|
- emqx
|
||||||
- emqx-enterprise
|
- emqx-enterprise
|
||||||
otp:
|
otp:
|
||||||
- 24.1.5-2
|
- 24.1.5-3
|
||||||
os:
|
os:
|
||||||
- ubuntu20.04
|
- ubuntu20.04
|
||||||
- centos7
|
- centos7
|
||||||
|
|
||||||
container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-${{ matrix.os }}"
|
container: "ghcr.io/emqx/emqx-builder/5.0-3:${{ matrix.otp }}-${{ matrix.os }}"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
|
@ -55,7 +55,7 @@ jobs:
|
||||||
- emqx
|
- emqx
|
||||||
- emqx-enterprise
|
- emqx-enterprise
|
||||||
otp:
|
otp:
|
||||||
- 24.1.5-2
|
- 24.1.5-3
|
||||||
macos:
|
macos:
|
||||||
- macos-11
|
- macos-11
|
||||||
- macos-10.15
|
- macos-10.15
|
||||||
|
|
|
@ -5,7 +5,7 @@ on: [pull_request]
|
||||||
jobs:
|
jobs:
|
||||||
check_deps_integrity:
|
check_deps_integrity:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
container: "ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-ubuntu20.04"
|
container: "ghcr.io/emqx/emqx-builder/5.0-3:24.1.5-3-ubuntu20.04"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
|
@ -13,10 +13,10 @@ jobs:
|
||||||
matrix:
|
matrix:
|
||||||
otp:
|
otp:
|
||||||
- 23.3.4.9-3
|
- 23.3.4.9-3
|
||||||
- 24.1.5-2
|
- 24.1.5-3
|
||||||
|
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp }}-ubuntu20.04"
|
container: "ghcr.io/emqx/emqx-builder/5.0-3:${{ matrix.otp }}-ubuntu20.04"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
|
@ -14,7 +14,7 @@ jobs:
|
||||||
prepare:
|
prepare:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
# prepare source with any OTP version, no need for a matrix
|
# prepare source with any OTP version, no need for a matrix
|
||||||
container: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14
|
container: ghcr.io/emqx/emqx-builder/5.0-3:24.1.5-3-alpine3.14
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
@ -55,7 +55,7 @@ jobs:
|
||||||
- name: make docker image
|
- name: make docker image
|
||||||
working-directory: source
|
working-directory: source
|
||||||
env:
|
env:
|
||||||
EMQX_BUILDER: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14
|
EMQX_BUILDER: ghcr.io/emqx/emqx-builder/5.0-3:24.1.5-3-alpine3.14
|
||||||
run: |
|
run: |
|
||||||
make ${{ matrix.profile }}-docker
|
make ${{ matrix.profile }}-docker
|
||||||
- name: run emqx
|
- name: run emqx
|
||||||
|
@ -100,7 +100,7 @@ jobs:
|
||||||
- name: make docker image
|
- name: make docker image
|
||||||
working-directory: source
|
working-directory: source
|
||||||
env:
|
env:
|
||||||
EMQX_BUILDER: ghcr.io/emqx/emqx-builder/5.0-2:24.1.5-2-alpine3.14
|
EMQX_BUILDER: ghcr.io/emqx/emqx-builder/5.0-3:24.1.5-3-alpine3.14
|
||||||
run: |
|
run: |
|
||||||
make ${{ matrix.profile }}-docker
|
make ${{ matrix.profile }}-docker
|
||||||
echo "TARGET=emqx/${{ matrix.profile }}" >> $GITHUB_ENV
|
echo "TARGET=emqx/${{ matrix.profile }}" >> $GITHUB_ENV
|
||||||
|
|
|
@ -19,10 +19,10 @@ jobs:
|
||||||
- emqx
|
- emqx
|
||||||
- emqx-enterprise
|
- emqx-enterprise
|
||||||
otp_vsn:
|
otp_vsn:
|
||||||
- 24.1.5-2
|
- 24.1.5-3
|
||||||
|
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
container: "ghcr.io/emqx/emqx-builder/5.0-2:${{ matrix.otp_vsn }}-ubuntu20.04"
|
container: "ghcr.io/emqx/emqx-builder/5.0-3:${{ matrix.otp_vsn }}-ubuntu20.04"
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
|
|
|
@ -16,7 +16,7 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
emqx_builder:
|
emqx_builder:
|
||||||
- 5.0-2:24.1.5-2 # run dialyzer on latest OTP
|
- 5.0-3:24.1.5-3 # run dialyzer on latest OTP
|
||||||
|
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.emqx_builder }}-ubuntu20.04"
|
container: "ghcr.io/emqx/emqx-builder/${{ matrix.emqx_builder }}-ubuntu20.04"
|
||||||
|
@ -32,7 +32,7 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
emqx_builder:
|
emqx_builder:
|
||||||
- 5.0-2:24.1.5-2
|
- 5.0-3:24.1.5-3
|
||||||
|
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
container: "ghcr.io/emqx/emqx-builder/${{ matrix.emqx_builder }}-ubuntu20.04"
|
container: "ghcr.io/emqx/emqx-builder/${{ matrix.emqx_builder }}-ubuntu20.04"
|
||||||
|
@ -55,12 +55,14 @@ jobs:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: docker compose up
|
- name: docker compose up
|
||||||
env:
|
env:
|
||||||
|
MONGO_TAG: 5
|
||||||
MYSQL_TAG: 8
|
MYSQL_TAG: 8
|
||||||
PGSQL_TAG: 13
|
PGSQL_TAG: 13
|
||||||
REDIS_TAG: 6
|
REDIS_TAG: 6
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
docker-compose \
|
docker-compose \
|
||||||
|
-f .ci/docker-compose-file/docker-compose-mongo-single-tcp.yaml \
|
||||||
-f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \
|
-f .ci/docker-compose-file/docker-compose-mysql-tcp.yaml \
|
||||||
-f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \
|
-f .ci/docker-compose-file/docker-compose-pgsql-tcp.yaml \
|
||||||
-f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \
|
-f .ci/docker-compose-file/docker-compose-redis-single-tcp.yaml \
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
erlang 24.1.5-2
|
erlang 24.1.5-3
|
||||||
|
|
2
Makefile
2
Makefile
|
@ -57,7 +57,7 @@ APPS=$(shell $(CURDIR)/scripts/find-apps.sh)
|
||||||
## app/name-ct targets are intended for local tests hence cover is not enabled
|
## app/name-ct targets are intended for local tests hence cover is not enabled
|
||||||
.PHONY: $(APPS:%=%-ct)
|
.PHONY: $(APPS:%=%-ct)
|
||||||
define gen-app-ct-target
|
define gen-app-ct-target
|
||||||
$1-ct:
|
$1-ct: conf-segs
|
||||||
$(REBAR) ct --name $(CT_NODE_NAME) -v --suite $(shell $(CURDIR)/scripts/find-suites.sh $1)
|
$(REBAR) ct --name $(CT_NODE_NAME) -v --suite $(shell $(CURDIR)/scripts/find-suites.sh $1)
|
||||||
endef
|
endef
|
||||||
$(foreach app,$(APPS),$(eval $(call gen-app-ct-target,$(app))))
|
$(foreach app,$(APPS),$(eval $(call gen-app-ct-target,$(app))))
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-ifndef(EMQX_AUTHENTICATION_HRL).
|
||||||
|
-define(EMQX_AUTHENTICATION_HRL, true).
|
||||||
|
|
||||||
|
%% config root name all auth providers have to agree on.
|
||||||
|
-define(EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, "authentication").
|
||||||
|
-define(EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM, authentication).
|
||||||
|
-define(EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY, <<"authentication">>).
|
||||||
|
|
||||||
|
%% key to a persistent term which stores a module name in order to inject
|
||||||
|
%% schema module at run-time to keep emqx app's compile time purity.
|
||||||
|
%% see emqx_schema.erl for more details
|
||||||
|
%% and emqx_conf_schema for an examples
|
||||||
|
-define(EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY, emqx_authentication_schema_module).
|
||||||
|
|
||||||
|
-endif.
|
|
@ -17,10 +17,10 @@
|
||||||
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.0"}}}
|
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.0"}}}
|
||||||
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.11.1"}}}
|
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.11.1"}}}
|
||||||
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
|
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
|
||||||
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.20.6"}}}
|
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.22.0"}}}
|
||||||
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
|
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
|
||||||
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
|
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
|
||||||
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.15.0"}}}
|
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.16.0"}}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{plugins, [{rebar3_proper, "0.12.1"}]}.
|
{plugins, [{rebar3_proper, "0.12.1"}]}.
|
||||||
|
|
|
@ -63,4 +63,5 @@ do_authorize(ClientInfo, PubSub, Topic) ->
|
||||||
|
|
||||||
-compile({inline, [run_hooks/3]}).
|
-compile({inline, [run_hooks/3]}).
|
||||||
run_hooks(Name, Args, Acc) ->
|
run_hooks(Name, Args, Acc) ->
|
||||||
ok = emqx_metrics:inc(Name), emqx_hooks:run_fold(Name, Args, Acc).
|
ok = emqx_metrics:inc(Name),
|
||||||
|
emqx_hooks:run_fold(Name, Args, Acc).
|
||||||
|
|
|
@ -24,9 +24,12 @@
|
||||||
|
|
||||||
-include("emqx.hrl").
|
-include("emqx.hrl").
|
||||||
-include("logger.hrl").
|
-include("logger.hrl").
|
||||||
|
-include("emqx_authentication.hrl").
|
||||||
|
|
||||||
-include_lib("stdlib/include/ms_transform.hrl").
|
-include_lib("stdlib/include/ms_transform.hrl").
|
||||||
|
|
||||||
|
-define(CONF_ROOT, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM).
|
||||||
|
|
||||||
%% The authentication entrypoint.
|
%% The authentication entrypoint.
|
||||||
-export([ authenticate/2
|
-export([ authenticate/2
|
||||||
]).
|
]).
|
||||||
|
@ -383,8 +386,8 @@ list_users(ChainName, AuthenticatorID, Params) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
init(_Opts) ->
|
init(_Opts) ->
|
||||||
ok = emqx_config_handler:add_handler([authentication], ?MODULE),
|
ok = emqx_config_handler:add_handler([?CONF_ROOT], ?MODULE),
|
||||||
ok = emqx_config_handler:add_handler([listeners, '?', '?', authentication], ?MODULE),
|
ok = emqx_config_handler:add_handler([listeners, '?', '?', ?CONF_ROOT], ?MODULE),
|
||||||
{ok, #{hooked => false, providers => #{}}}.
|
{ok, #{hooked => false, providers => #{}}}.
|
||||||
|
|
||||||
handle_call(get_providers, _From, #{providers := Providers} = State) ->
|
handle_call(get_providers, _From, #{providers := Providers} = State) ->
|
||||||
|
@ -496,8 +499,8 @@ terminate(Reason, _State) ->
|
||||||
Other -> ?SLOG(error, #{msg => "emqx_authentication_terminating",
|
Other -> ?SLOG(error, #{msg => "emqx_authentication_terminating",
|
||||||
reason => Other})
|
reason => Other})
|
||||||
end,
|
end,
|
||||||
emqx_config_handler:remove_handler([authentication]),
|
emqx_config_handler:remove_handler([?CONF_ROOT]),
|
||||||
emqx_config_handler:remove_handler([listeners, '?', '?', authentication]),
|
emqx_config_handler:remove_handler([listeners, '?', '?', ?CONF_ROOT]),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
code_change(_OldVsn, State, _Extra) ->
|
code_change(_OldVsn, State, _Extra) ->
|
||||||
|
|
|
@ -34,6 +34,7 @@
|
||||||
-export_type([config/0]).
|
-export_type([config/0]).
|
||||||
|
|
||||||
-include("logger.hrl").
|
-include("logger.hrl").
|
||||||
|
-include("emqx_authentication.hrl").
|
||||||
|
|
||||||
-type parsed_config() :: #{mechanism := atom(),
|
-type parsed_config() :: #{mechanism := atom(),
|
||||||
backend => atom(),
|
backend => atom(),
|
||||||
|
@ -132,9 +133,9 @@ do_post_config_update({move_authenticator, ChainName, AuthenticatorID, Position}
|
||||||
|
|
||||||
check_configs(Configs) ->
|
check_configs(Configs) ->
|
||||||
Providers = emqx_authentication:get_providers(),
|
Providers = emqx_authentication:get_providers(),
|
||||||
lists:map(fun(C) -> do_check_conifg(C, Providers) end, Configs).
|
lists:map(fun(C) -> do_check_config(C, Providers) end, Configs).
|
||||||
|
|
||||||
do_check_conifg(Config, Providers) ->
|
do_check_config(Config, Providers) ->
|
||||||
Type = authn_type(Config),
|
Type = authn_type(Config),
|
||||||
case maps:get(Type, Providers, false) of
|
case maps:get(Type, Providers, false) of
|
||||||
false ->
|
false ->
|
||||||
|
@ -143,19 +144,20 @@ do_check_conifg(Config, Providers) ->
|
||||||
providers => Providers}),
|
providers => Providers}),
|
||||||
throw({unknown_authn_type, Type});
|
throw({unknown_authn_type, Type});
|
||||||
Module ->
|
Module ->
|
||||||
do_check_conifg(Type, Config, Module)
|
do_check_config(Type, Config, Module)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
do_check_conifg(Type, Config, Module) ->
|
do_check_config(Type, Config, Module) ->
|
||||||
F = case erlang:function_exported(Module, check_config, 1) of
|
F = case erlang:function_exported(Module, check_config, 1) of
|
||||||
true ->
|
true ->
|
||||||
fun Module:check_config/1;
|
fun Module:check_config/1;
|
||||||
false ->
|
false ->
|
||||||
fun(C) ->
|
fun(C) ->
|
||||||
#{config := R} =
|
Key = list_to_binary(?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME),
|
||||||
hocon_schema:check_plain(Module, #{<<"config">> => C},
|
AtomKey = list_to_atom(?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME),
|
||||||
|
R = hocon_schema:check_plain(Module, #{Key => C},
|
||||||
#{atom_key => true}),
|
#{atom_key => true}),
|
||||||
R
|
maps:get(AtomKey, R)
|
||||||
end
|
end
|
||||||
end,
|
end,
|
||||||
try
|
try
|
||||||
|
@ -261,8 +263,8 @@ authn_type(#{mechanism := M}) -> atom(M);
|
||||||
authn_type(#{<<"mechanism">> := M, <<"backend">> := B}) -> {atom(M), atom(B)};
|
authn_type(#{<<"mechanism">> := M, <<"backend">> := B}) -> {atom(M), atom(B)};
|
||||||
authn_type(#{<<"mechanism">> := M}) -> atom(M).
|
authn_type(#{<<"mechanism">> := M}) -> atom(M).
|
||||||
|
|
||||||
atom(Bin) ->
|
atom(A) when is_atom(A) -> A;
|
||||||
binary_to_existing_atom(Bin, utf8).
|
atom(Bin) -> binary_to_existing_atom(Bin, utf8).
|
||||||
|
|
||||||
%% The relative dir for ssl files.
|
%% The relative dir for ssl files.
|
||||||
certs_dir(ChainName, ConfigOrID) ->
|
certs_dir(ChainName, ConfigOrID) ->
|
||||||
|
|
|
@ -268,23 +268,39 @@ init_load(SchemaMod, Conf) when is_list(Conf) orelse is_binary(Conf) ->
|
||||||
}),
|
}),
|
||||||
error(failed_to_load_hocon_conf)
|
error(failed_to_load_hocon_conf)
|
||||||
end;
|
end;
|
||||||
init_load(SchemaMod, RawConf0) when is_map(RawConf0) ->
|
init_load(SchemaMod, RawConf) when is_map(RawConf) ->
|
||||||
ok = save_schema_mod_and_names(SchemaMod),
|
ok = save_schema_mod_and_names(SchemaMod),
|
||||||
%% check and save configs
|
%% check configs agains the schema, with environment variables applied on top
|
||||||
{_AppEnvs, CheckedConf} = check_config(SchemaMod, RawConf0),
|
{_AppEnvs, CheckedConf} =
|
||||||
|
check_config(SchemaMod, RawConf, #{apply_override_envs => true}),
|
||||||
|
%% fill default values for raw config
|
||||||
|
RawConfWithEnvs = merge_envs(SchemaMod, RawConf),
|
||||||
|
RootNames = get_root_names(),
|
||||||
ok = save_to_config_map(maps:with(get_atom_root_names(), CheckedConf),
|
ok = save_to_config_map(maps:with(get_atom_root_names(), CheckedConf),
|
||||||
maps:with(get_root_names(), RawConf0)).
|
maps:with(RootNames, RawConfWithEnvs)).
|
||||||
|
|
||||||
include_dirs() ->
|
include_dirs() ->
|
||||||
[filename:join(emqx:data_dir(), "configs")].
|
[filename:join(emqx:data_dir(), "configs")].
|
||||||
|
|
||||||
|
merge_envs(SchemaMod, RawConf) ->
|
||||||
|
Opts = #{logger => fun(_, _) -> ok end, %% everything should have been logged already when check_config
|
||||||
|
nullable => true, %% TODO: evil, remove, nullable should be declared in schema
|
||||||
|
format => map,
|
||||||
|
apply_override_envs => true
|
||||||
|
},
|
||||||
|
hocon_schema:merge_env_overrides(SchemaMod, RawConf, all, Opts).
|
||||||
|
|
||||||
-spec check_config(module(), raw_config()) -> {AppEnvs, CheckedConf}
|
-spec check_config(module(), raw_config()) -> {AppEnvs, CheckedConf}
|
||||||
when AppEnvs :: app_envs(), CheckedConf :: config().
|
when AppEnvs :: app_envs(), CheckedConf :: config().
|
||||||
check_config(SchemaMod, RawConf) ->
|
check_config(SchemaMod, RawConf) ->
|
||||||
Opts = #{return_plain => true,
|
check_config(SchemaMod, RawConf, #{}).
|
||||||
nullable => true,
|
|
||||||
format => map
|
check_config(SchemaMod, RawConf, Opts0) ->
|
||||||
},
|
Opts1 = #{return_plain => true,
|
||||||
|
nullable => true, %% TODO: evil, remove, nullable should be declared in schema
|
||||||
|
format => map
|
||||||
|
},
|
||||||
|
Opts = maps:merge(Opts0, Opts1),
|
||||||
{AppEnvs, CheckedConf} =
|
{AppEnvs, CheckedConf} =
|
||||||
hocon_schema:map_translate(SchemaMod, RawConf, Opts),
|
hocon_schema:map_translate(SchemaMod, RawConf, Opts),
|
||||||
{AppEnvs, emqx_map_lib:unsafe_atom_key_map(CheckedConf)}.
|
{AppEnvs, emqx_map_lib:unsafe_atom_key_map(CheckedConf)}.
|
||||||
|
@ -312,13 +328,15 @@ read_override_conf(#{} = Opts) ->
|
||||||
File = override_conf_file(Opts),
|
File = override_conf_file(Opts),
|
||||||
load_hocon_file(File, map).
|
load_hocon_file(File, map).
|
||||||
|
|
||||||
override_conf_file(Opts) ->
|
override_conf_file(Opts) when is_map(Opts) ->
|
||||||
Key =
|
Key =
|
||||||
case maps:get(override_to, Opts, local) of
|
case maps:get(override_to, Opts, local) of
|
||||||
local -> local_override_conf_file;
|
local -> local_override_conf_file;
|
||||||
cluster -> cluster_override_conf_file
|
cluster -> cluster_override_conf_file
|
||||||
end,
|
end,
|
||||||
application:get_env(emqx, Key, undefined).
|
application:get_env(emqx, Key, undefined);
|
||||||
|
override_conf_file(Which) when is_atom(Which) ->
|
||||||
|
application:get_env(emqx, Which, undefined).
|
||||||
|
|
||||||
-spec save_schema_mod_and_names(module()) -> ok.
|
-spec save_schema_mod_and_names(module()) -> ok.
|
||||||
save_schema_mod_and_names(SchemaMod) ->
|
save_schema_mod_and_names(SchemaMod) ->
|
||||||
|
|
|
@ -248,8 +248,8 @@ parse_packet(#mqtt_packet_header{type = ?CONNECT}, FrameBin, _Options) ->
|
||||||
},
|
},
|
||||||
{ConnPacket1, Rest5} = parse_will_message(ConnPacket, Rest4),
|
{ConnPacket1, Rest5} = parse_will_message(ConnPacket, Rest4),
|
||||||
{Username, Rest6} = parse_utf8_string(Rest5, bool(UsernameFlag)),
|
{Username, Rest6} = parse_utf8_string(Rest5, bool(UsernameFlag)),
|
||||||
{Passsword, <<>>} = parse_utf8_string(Rest6, bool(PasswordFlag)),
|
{Password, <<>>} = parse_utf8_string(Rest6, bool(PasswordFlag)),
|
||||||
ConnPacket1#mqtt_packet_connect{username = Username, password = Passsword};
|
ConnPacket1#mqtt_packet_connect{username = Username, password = Password};
|
||||||
|
|
||||||
parse_packet(#mqtt_packet_header{type = ?CONNACK},
|
parse_packet(#mqtt_packet_header{type = ?CONNACK},
|
||||||
<<AckFlags:8, ReasonCode:8, Rest/binary>>, #{version := Ver}) ->
|
<<AckFlags:8, ReasonCode:8, Rest/binary>>, #{version := Ver}) ->
|
||||||
|
|
|
@ -111,7 +111,7 @@ current_conns(ID, ListenOn) ->
|
||||||
{Type, Name} = parse_listener_id(ID),
|
{Type, Name} = parse_listener_id(ID),
|
||||||
current_conns(Type, Name, ListenOn).
|
current_conns(Type, Name, ListenOn).
|
||||||
|
|
||||||
current_conns(Type, Name, ListenOn) when Type == tcl; Type == ssl ->
|
current_conns(Type, Name, ListenOn) when Type == tcp; Type == ssl ->
|
||||||
esockd:get_current_connections({listener_id(Type, Name), ListenOn});
|
esockd:get_current_connections({listener_id(Type, Name), ListenOn});
|
||||||
current_conns(Type, Name, _ListenOn) when Type =:= ws; Type =:= wss ->
|
current_conns(Type, Name, _ListenOn) when Type =:= ws; Type =:= wss ->
|
||||||
proplists:get_value(all_connections, ranch:info(listener_id(Type, Name)));
|
proplists:get_value(all_connections, ranch:info(listener_id(Type, Name)));
|
||||||
|
@ -122,7 +122,7 @@ max_conns(ID, ListenOn) ->
|
||||||
{Type, Name} = parse_listener_id(ID),
|
{Type, Name} = parse_listener_id(ID),
|
||||||
max_conns(Type, Name, ListenOn).
|
max_conns(Type, Name, ListenOn).
|
||||||
|
|
||||||
max_conns(Type, Name, ListenOn) when Type == tcl; Type == ssl ->
|
max_conns(Type, Name, ListenOn) when Type == tcp; Type == ssl ->
|
||||||
esockd:get_max_connections({listener_id(Type, Name), ListenOn});
|
esockd:get_max_connections({listener_id(Type, Name), ListenOn});
|
||||||
max_conns(Type, Name, _ListenOn) when Type =:= ws; Type =:= wss ->
|
max_conns(Type, Name, _ListenOn) when Type =:= ws; Type =:= wss ->
|
||||||
proplists:get_value(max_connections, ranch:info(listener_id(Type, Name)));
|
proplists:get_value(max_connections, ranch:info(listener_id(Type, Name)));
|
||||||
|
|
|
@ -96,8 +96,8 @@ handle_info({timeout, _Timer, check}, State) ->
|
||||||
_ = case emqx_vm:cpu_util() of %% TODO: should be improved?
|
_ = case emqx_vm:cpu_util() of %% TODO: should be improved?
|
||||||
0 -> ok;
|
0 -> ok;
|
||||||
Busy when Busy >= CPUHighWatermark ->
|
Busy when Busy >= CPUHighWatermark ->
|
||||||
Usage = io_lib:format("~p%", [Busy]),
|
Usage = list_to_binary(io_lib:format("~.2f%", [Busy])),
|
||||||
Message = [Usage, " cpu usage"],
|
Message = <<Usage/binary, " cpu usage">>,
|
||||||
emqx_alarm:activate(high_cpu_usage,
|
emqx_alarm:activate(high_cpu_usage,
|
||||||
#{
|
#{
|
||||||
usage => Usage,
|
usage => Usage,
|
||||||
|
@ -107,8 +107,8 @@ handle_info({timeout, _Timer, check}, State) ->
|
||||||
Message),
|
Message),
|
||||||
start_check_timer();
|
start_check_timer();
|
||||||
Busy when Busy =< CPULowWatermark ->
|
Busy when Busy =< CPULowWatermark ->
|
||||||
Usage = io_lib:format("~p%", [Busy]),
|
Usage = list_to_binary(io_lib:format("~.2f%", [Busy])),
|
||||||
Message = [Usage, " cpu usage"],
|
Message = <<Usage/binary, " cpu usage">>,
|
||||||
emqx_alarm:deactivate(high_cpu_usage,
|
emqx_alarm:deactivate(high_cpu_usage,
|
||||||
#{
|
#{
|
||||||
usage => Usage,
|
usage => Usage,
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-dialyzer(no_unused).
|
-dialyzer(no_unused).
|
||||||
-dialyzer(no_fail_call).
|
-dialyzer(no_fail_call).
|
||||||
|
|
||||||
|
-include("emqx_authentication.hrl").
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
|
||||||
-type duration() :: integer().
|
-type duration() :: integer().
|
||||||
|
@ -105,11 +106,29 @@ and can not be deleted."""
|
||||||
The configs here work as default values which can be overriden
|
The configs here work as default values which can be overriden
|
||||||
in <code>zone</code> configs"""
|
in <code>zone</code> configs"""
|
||||||
})}
|
})}
|
||||||
, {"authentication",
|
, {?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME,
|
||||||
authentication(
|
authentication(
|
||||||
"""Default authentication configs for all MQTT listeners.<br>
|
"""Default authentication configs for all MQTT listeners.
|
||||||
|
<br>
|
||||||
For per-listener overrides see <code>authentication</code>
|
For per-listener overrides see <code>authentication</code>
|
||||||
in listener configs""")}
|
in listener configs
|
||||||
|
<br>
|
||||||
|
<br>
|
||||||
|
EMQ X can be configured with:
|
||||||
|
<br>
|
||||||
|
<ul>
|
||||||
|
<li><code>[]</code>: The default value, it allows *ALL* logins</li>
|
||||||
|
<li>one: For example <code>{enable:true,backend:\"built-in-database\",mechanism=\"password-based\"}</code></li>
|
||||||
|
<li>chain: An array of structs.</li>
|
||||||
|
</ul>
|
||||||
|
<br>
|
||||||
|
When a chain is configured, the login credentials are checked against the backends
|
||||||
|
per the configured order, until an 'allow' or 'deny' decision can be made.
|
||||||
|
<br>
|
||||||
|
If there is no decision after a full chain exhaustion, the login is rejected.
|
||||||
|
""")}
|
||||||
|
%% NOTE: authorization schema here is only to keep emqx app prue
|
||||||
|
%% the full schema for EMQ X node is injected in emqx_conf_schema.
|
||||||
, {"authorization",
|
, {"authorization",
|
||||||
sc(ref("authorization"),
|
sc(ref("authorization"),
|
||||||
#{})}
|
#{})}
|
||||||
|
@ -972,7 +991,7 @@ mqtt_listener() ->
|
||||||
sc(duration(),
|
sc(duration(),
|
||||||
#{})
|
#{})
|
||||||
}
|
}
|
||||||
, {"authentication",
|
, {?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME,
|
||||||
authentication("Per-listener authentication override")
|
authentication("Per-listener authentication override")
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
@ -1231,16 +1250,18 @@ ciphers_schema(Default) ->
|
||||||
false -> fun validate_ciphers/1
|
false -> fun validate_ciphers/1
|
||||||
end
|
end
|
||||||
, desc =>
|
, desc =>
|
||||||
"""TLS cipher suite names separated by comma, or as an array of strings
|
"""This config holds TLS cipher suite names separated by comma,
|
||||||
|
or as an array of strings. e.g.
|
||||||
<code>\"TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256\"</code> or
|
<code>\"TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256\"</code> or
|
||||||
<code>[\"TLS_AES_256_GCM_SHA384\",\"TLS_AES_128_GCM_SHA256\"]</code].
|
<code>[\"TLS_AES_256_GCM_SHA384\",\"TLS_AES_128_GCM_SHA256\"]</code>.
|
||||||
<br>
|
<br>
|
||||||
Ciphers (and their ordering) define the way in which the
|
Ciphers (and their ordering) define the way in which the
|
||||||
client and server encrypts information over the wire.
|
client and server encrypts information over the network connection.
|
||||||
Selecting a good cipher suite is critical for the
|
Selecting a good cipher suite is critical for the
|
||||||
application's data security, confidentiality and performance.
|
application's data security, confidentiality and performance.
|
||||||
The names should be in OpenSSL sting format (not RFC format).
|
|
||||||
Default values and examples proveded by EMQ X config
|
The names should be in OpenSSL string format (not RFC format).
|
||||||
|
All default values and examples proveded by EMQ X config
|
||||||
documentation are all in OpenSSL format.<br>
|
documentation are all in OpenSSL format.<br>
|
||||||
|
|
||||||
NOTE: Certain cipher suites are only compatible with
|
NOTE: Certain cipher suites are only compatible with
|
||||||
|
@ -1436,12 +1457,23 @@ str(S) when is_list(S) ->
|
||||||
S.
|
S.
|
||||||
|
|
||||||
authentication(Desc) ->
|
authentication(Desc) ->
|
||||||
#{ type => hoconsc:lazy(hoconsc:union([typerefl:map(), hoconsc:array(typerefl:map())]))
|
%% authentication schemais lazy to make it more 'plugable'
|
||||||
, desc => iolist_to_binary([Desc, "<br>", """
|
%% the type checks are done in emqx_auth application when it boots.
|
||||||
|
%% and in emqx_authentication_config module for rutime changes.
|
||||||
|
Default = hoconsc:lazy(hoconsc:union([typerefl:map(), hoconsc:array(typerefl:map())])),
|
||||||
|
%% as the type is lazy, the runtime module injection from EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY
|
||||||
|
%% is for now only affecting document generation.
|
||||||
|
%% maybe in the future, we can find a more straightforward way to support
|
||||||
|
%% * document generation (at compile time)
|
||||||
|
%% * type checks before boot (in bin/emqx config generation)
|
||||||
|
%% * type checks at runtime (when changing configs via management API)
|
||||||
|
#{ type => case persistent_term:get(?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY, undefined) of
|
||||||
|
undefined -> Default;
|
||||||
|
Module -> hoconsc:lazy(Module:root_type())
|
||||||
|
end
|
||||||
|
, desc => iolist_to_binary([Desc, """
|
||||||
Authentication can be one single authenticator instance or a chain of authenticators as an array.
|
Authentication can be one single authenticator instance or a chain of authenticators as an array.
|
||||||
When authenticating a login (username, client ID, etc.) the authenticators are checked
|
When authenticating a login (username, client ID, etc.) the authenticators are checked
|
||||||
in the configured order.<br>
|
in the configured order.<br>
|
||||||
EMQ X comes with a set of pre-built autenticators, for more details, see
|
|
||||||
<a href=\"#root-authenticator_config\">autenticator_config<a>
|
|
||||||
"""])
|
"""])
|
||||||
}.
|
}.
|
||||||
|
|
|
@ -54,11 +54,11 @@ wildcard(Topic) when is_binary(Topic) ->
|
||||||
wildcard(words(Topic));
|
wildcard(words(Topic));
|
||||||
wildcard([]) ->
|
wildcard([]) ->
|
||||||
false;
|
false;
|
||||||
wildcard(['#'|_]) ->
|
wildcard(['#' | _]) ->
|
||||||
true;
|
true;
|
||||||
wildcard(['+'|_]) ->
|
wildcard(['+' | _]) ->
|
||||||
true;
|
true;
|
||||||
wildcard([_H|T]) ->
|
wildcard([_H | T]) ->
|
||||||
wildcard(T).
|
wildcard(T).
|
||||||
|
|
||||||
%% @doc Match Topic name with filter.
|
%% @doc Match Topic name with filter.
|
||||||
|
@ -73,17 +73,17 @@ match(Name, Filter) when is_binary(Name), is_binary(Filter) ->
|
||||||
match(words(Name), words(Filter));
|
match(words(Name), words(Filter));
|
||||||
match([], []) ->
|
match([], []) ->
|
||||||
true;
|
true;
|
||||||
match([H|T1], [H|T2]) ->
|
match([H | T1], [H | T2]) ->
|
||||||
match(T1, T2);
|
match(T1, T2);
|
||||||
match([_H|T1], ['+'|T2]) ->
|
match([_H | T1], ['+' | T2]) ->
|
||||||
match(T1, T2);
|
match(T1, T2);
|
||||||
match(_, ['#']) ->
|
match(_, ['#']) ->
|
||||||
true;
|
true;
|
||||||
match([_H1|_], [_H2|_]) ->
|
match([_H1 | _], [_H2 | _]) ->
|
||||||
false;
|
false;
|
||||||
match([_H1|_], []) ->
|
match([_H1 | _], []) ->
|
||||||
false;
|
false;
|
||||||
match([], [_H|_T2]) ->
|
match([], [_H | _T2]) ->
|
||||||
false.
|
false.
|
||||||
|
|
||||||
%% @doc Validate topic name or filter
|
%% @doc Validate topic name or filter
|
||||||
|
@ -110,13 +110,13 @@ validate2([]) ->
|
||||||
true;
|
true;
|
||||||
validate2(['#']) -> % end with '#'
|
validate2(['#']) -> % end with '#'
|
||||||
true;
|
true;
|
||||||
validate2(['#'|Words]) when length(Words) > 0 ->
|
validate2(['#' | Words]) when length(Words) > 0 ->
|
||||||
error('topic_invalid_#');
|
error('topic_invalid_#');
|
||||||
validate2([''|Words]) ->
|
validate2(['' | Words]) ->
|
||||||
validate2(Words);
|
validate2(Words);
|
||||||
validate2(['+'|Words]) ->
|
validate2(['+' | Words]) ->
|
||||||
validate2(Words);
|
validate2(Words);
|
||||||
validate2([W|Words]) ->
|
validate2([W | Words]) ->
|
||||||
validate3(W) andalso validate2(Words).
|
validate3(W) andalso validate2(Words).
|
||||||
|
|
||||||
validate3(<<>>) ->
|
validate3(<<>>) ->
|
||||||
|
@ -164,7 +164,7 @@ word(<<"#">>) -> '#';
|
||||||
word(Bin) -> Bin.
|
word(Bin) -> Bin.
|
||||||
|
|
||||||
%% @doc '$SYS' Topic.
|
%% @doc '$SYS' Topic.
|
||||||
-spec(systop(atom()|string()|binary()) -> topic()).
|
-spec(systop(atom() | string() | binary()) -> topic()).
|
||||||
systop(Name) when is_atom(Name); is_list(Name) ->
|
systop(Name) when is_atom(Name); is_list(Name) ->
|
||||||
iolist_to_binary(lists:concat(["$SYS/brokers/", node(), "/", Name]));
|
iolist_to_binary(lists:concat(["$SYS/brokers/", node(), "/", Name]));
|
||||||
systop(Name) when is_binary(Name) ->
|
systop(Name) when is_binary(Name) ->
|
||||||
|
@ -175,10 +175,10 @@ feed_var(Var, Val, Topic) ->
|
||||||
feed_var(Var, Val, words(Topic), []).
|
feed_var(Var, Val, words(Topic), []).
|
||||||
feed_var(_Var, _Val, [], Acc) ->
|
feed_var(_Var, _Val, [], Acc) ->
|
||||||
join(lists:reverse(Acc));
|
join(lists:reverse(Acc));
|
||||||
feed_var(Var, Val, [Var|Words], Acc) ->
|
feed_var(Var, Val, [Var | Words], Acc) ->
|
||||||
feed_var(Var, Val, Words, [Val|Acc]);
|
feed_var(Var, Val, Words, [Val | Acc]);
|
||||||
feed_var(Var, Val, [W|Words], Acc) ->
|
feed_var(Var, Val, [W | Words], Acc) ->
|
||||||
feed_var(Var, Val, Words, [W|Acc]).
|
feed_var(Var, Val, Words, [W | Acc]).
|
||||||
|
|
||||||
-spec(join(list(binary())) -> binary()).
|
-spec(join(list(binary())) -> binary()).
|
||||||
join([]) ->
|
join([]) ->
|
||||||
|
@ -218,4 +218,3 @@ parse(TopicFilter = <<"$share/", Rest/binary>>, Options) ->
|
||||||
end;
|
end;
|
||||||
parse(TopicFilter, Options) ->
|
parse(TopicFilter, Options) ->
|
||||||
{TopicFilter, Options}.
|
{TopicFilter, Options}.
|
||||||
|
|
||||||
|
|
|
@ -57,6 +57,7 @@
|
||||||
sl_alloc,
|
sl_alloc,
|
||||||
ll_alloc,
|
ll_alloc,
|
||||||
fix_alloc,
|
fix_alloc,
|
||||||
|
literal_alloc,
|
||||||
std_alloc
|
std_alloc
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
|
|
@ -25,18 +25,11 @@
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
-include("emqx_authentication.hrl").
|
||||||
-export([ roots/0, fields/1 ]).
|
|
||||||
|
|
||||||
-export([ create/2
|
|
||||||
, update/2
|
|
||||||
, authenticate/2
|
|
||||||
, destroy/1
|
|
||||||
, check_config/1
|
|
||||||
]).
|
|
||||||
|
|
||||||
-define(AUTHN, emqx_authentication).
|
-define(AUTHN, emqx_authentication).
|
||||||
-define(config(KEY), (fun() -> {KEY, _V_} = lists:keyfind(KEY, 1, Config), _V_ end)()).
|
-define(config(KEY), (fun() -> {KEY, _V_} = lists:keyfind(KEY, 1, Config), _V_ end)()).
|
||||||
|
-define(CONF_ROOT, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM).
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Hocon Schema
|
%% Hocon Schema
|
||||||
|
@ -250,7 +243,7 @@ t_update_config({init, Config}) ->
|
||||||
{"auth2", AuthNType2} | Config];
|
{"auth2", AuthNType2} | Config];
|
||||||
|
|
||||||
t_update_config(Config) when is_list(Config) ->
|
t_update_config(Config) when is_list(Config) ->
|
||||||
emqx_config_handler:add_handler([authentication], emqx_authentication),
|
emqx_config_handler:add_handler([?CONF_ROOT], emqx_authentication),
|
||||||
ok = register_provider(?config("auth1"), ?MODULE),
|
ok = register_provider(?config("auth1"), ?MODULE),
|
||||||
ok = register_provider(?config("auth2"), ?MODULE),
|
ok = register_provider(?config("auth2"), ?MODULE),
|
||||||
Global = ?config(global),
|
Global = ?config(global),
|
||||||
|
@ -267,7 +260,7 @@ t_update_config(Config) when is_list(Config) ->
|
||||||
|
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, _},
|
{ok, _},
|
||||||
update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})),
|
update_config([?CONF_ROOT], {create_authenticator, Global, AuthenticatorConfig1})),
|
||||||
|
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, #{id := ID1, state := #{mark := 1}}},
|
{ok, #{id := ID1, state := #{mark := 1}}},
|
||||||
|
@ -275,7 +268,7 @@ t_update_config(Config) when is_list(Config) ->
|
||||||
|
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, _},
|
{ok, _},
|
||||||
update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})),
|
update_config([?CONF_ROOT], {create_authenticator, Global, AuthenticatorConfig2})),
|
||||||
|
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, #{id := ID2, state := #{mark := 1}}},
|
{ok, #{id := ID2, state := #{mark := 1}}},
|
||||||
|
@ -283,7 +276,7 @@ t_update_config(Config) when is_list(Config) ->
|
||||||
|
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, _},
|
{ok, _},
|
||||||
update_config([authentication],
|
update_config([?CONF_ROOT],
|
||||||
{update_authenticator,
|
{update_authenticator,
|
||||||
Global,
|
Global,
|
||||||
ID1,
|
ID1,
|
||||||
|
@ -296,25 +289,25 @@ t_update_config(Config) when is_list(Config) ->
|
||||||
|
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, _},
|
{ok, _},
|
||||||
update_config([authentication], {move_authenticator, Global, ID2, top})),
|
update_config([?CONF_ROOT], {move_authenticator, Global, ID2, top})),
|
||||||
|
|
||||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(Global)),
|
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(Global)),
|
||||||
|
|
||||||
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID1})),
|
?assertMatch({ok, _}, update_config([?CONF_ROOT], {delete_authenticator, Global, ID1})),
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
{error, {not_found, {authenticator, ID1}}},
|
{error, {not_found, {authenticator, ID1}}},
|
||||||
?AUTHN:lookup_authenticator(Global, ID1)),
|
?AUTHN:lookup_authenticator(Global, ID1)),
|
||||||
|
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, _},
|
{ok, _},
|
||||||
update_config([authentication], {delete_authenticator, Global, ID2})),
|
update_config([?CONF_ROOT], {delete_authenticator, Global, ID2})),
|
||||||
|
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
{error, {not_found, {authenticator, ID2}}},
|
{error, {not_found, {authenticator, ID2}}},
|
||||||
?AUTHN:lookup_authenticator(Global, ID2)),
|
?AUTHN:lookup_authenticator(Global, ID2)),
|
||||||
|
|
||||||
ListenerID = 'tcp:default',
|
ListenerID = 'tcp:default',
|
||||||
ConfKeyPath = [listeners, tcp, default, authentication],
|
ConfKeyPath = [listeners, tcp, default, ?CONF_ROOT],
|
||||||
|
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, _},
|
{ok, _},
|
||||||
|
|
|
@ -132,7 +132,7 @@ basic_conf() ->
|
||||||
zones => zone_conf()
|
zones => zone_conf()
|
||||||
}.
|
}.
|
||||||
|
|
||||||
set_test_listenser_confs() ->
|
set_test_listener_confs() ->
|
||||||
Conf = emqx_config:get([]),
|
Conf = emqx_config:get([]),
|
||||||
emqx_config:put(basic_conf()),
|
emqx_config:put(basic_conf()),
|
||||||
Conf.
|
Conf.
|
||||||
|
@ -179,7 +179,7 @@ end_per_suite(_Config) ->
|
||||||
]).
|
]).
|
||||||
|
|
||||||
init_per_testcase(_TestCase, Config) ->
|
init_per_testcase(_TestCase, Config) ->
|
||||||
NewConf = set_test_listenser_confs(),
|
NewConf = set_test_listener_confs(),
|
||||||
[{config, NewConf}|Config].
|
[{config, NewConf}|Config].
|
||||||
|
|
||||||
end_per_testcase(_TestCase, Config) ->
|
end_per_testcase(_TestCase, Config) ->
|
||||||
|
|
|
@ -59,7 +59,7 @@ init_per_suite(Config) ->
|
||||||
ok = meck:expect(emqx_alarm, deactivate, fun(_) -> ok end),
|
ok = meck:expect(emqx_alarm, deactivate, fun(_) -> ok end),
|
||||||
ok = meck:expect(emqx_alarm, deactivate, fun(_, _) -> ok end),
|
ok = meck:expect(emqx_alarm, deactivate, fun(_, _) -> ok end),
|
||||||
|
|
||||||
emqx_channel_SUITE:set_test_listenser_confs(),
|
emqx_channel_SUITE:set_test_listener_confs(),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
end_per_suite(_Config) ->
|
end_per_suite(_Config) ->
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-include_lib("emqx/include/emqx.hrl").
|
-include_lib("emqx/include/emqx.hrl").
|
||||||
-include_lib("emqx/include/emqx_mqtt.hrl").
|
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
|
||||||
all() -> emqx_common_test_helpers:all(?MODULE).
|
all() -> emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
|
@ -37,10 +38,37 @@ end_per_suite(_Config) ->
|
||||||
application:stop(esockd),
|
application:stop(esockd),
|
||||||
application:stop(cowboy).
|
application:stop(cowboy).
|
||||||
|
|
||||||
|
init_per_testcase(Case, Config)
|
||||||
|
when Case =:= t_max_conns_tcp; Case =:= t_current_conns_tcp ->
|
||||||
|
{ok, _} = emqx_config_handler:start_link(),
|
||||||
|
PrevListeners = emqx_config:get([listeners, tcp], #{}),
|
||||||
|
PrevRateLimit = emqx_config:get([rate_limit], #{}),
|
||||||
|
emqx_config:put([listeners, tcp], #{ listener_test =>
|
||||||
|
#{ bind => {"127.0.0.1", 9999}
|
||||||
|
, max_connections => 4321
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
emqx_config:put([rate_limit], #{max_conn_rate => 1000}),
|
||||||
|
ListenerConf = #{ bind => {"127.0.0.1", 9999}
|
||||||
|
},
|
||||||
|
ok = emqx_listeners:start(),
|
||||||
|
[ {listener_conf, ListenerConf}
|
||||||
|
, {prev_listener_conf, PrevListeners}
|
||||||
|
, {prev_rate_limit_conf, PrevRateLimit}
|
||||||
|
| Config];
|
||||||
init_per_testcase(_, Config) ->
|
init_per_testcase(_, Config) ->
|
||||||
{ok, _} = emqx_config_handler:start_link(),
|
{ok, _} = emqx_config_handler:start_link(),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
|
end_per_testcase(Case, Config)
|
||||||
|
when Case =:= t_max_conns_tcp; Case =:= t_current_conns_tcp ->
|
||||||
|
PrevListener = ?config(prev_listener_conf, Config),
|
||||||
|
PrevRateLimit = ?config(prev_rate_limit_conf, Config),
|
||||||
|
emqx_config:put([listeners, tcp], PrevListener),
|
||||||
|
emqx_config:put([rate_limit], PrevRateLimit),
|
||||||
|
emqx_listeners:stop(),
|
||||||
|
_ = emqx_config_handler:stop(),
|
||||||
|
ok;
|
||||||
end_per_testcase(_, _Config) ->
|
end_per_testcase(_, _Config) ->
|
||||||
_ = emqx_config_handler:stop(),
|
_ = emqx_config_handler:stop(),
|
||||||
ok.
|
ok.
|
||||||
|
@ -56,6 +84,14 @@ t_restart_listeners(_) ->
|
||||||
ok = emqx_listeners:restart(),
|
ok = emqx_listeners:restart(),
|
||||||
ok = emqx_listeners:stop().
|
ok = emqx_listeners:stop().
|
||||||
|
|
||||||
|
t_max_conns_tcp(_) ->
|
||||||
|
%% Note: Using a string representation for the bind address like
|
||||||
|
%% "127.0.0.1" does not work
|
||||||
|
?assertEqual(4321, emqx_listeners:max_conns('tcp:listener_test', {{127,0,0,1}, 9999})).
|
||||||
|
|
||||||
|
t_current_conns_tcp(_) ->
|
||||||
|
?assertEqual(0, emqx_listeners:current_conns('tcp:listener_test', {{127,0,0,1}, 9999})).
|
||||||
|
|
||||||
render_config_file() ->
|
render_config_file() ->
|
||||||
Path = local_path(["etc", "emqx.conf"]),
|
Path = local_path(["etc", "emqx.conf"]),
|
||||||
{ok, Temp} = file:read_file(Path),
|
{ok, Temp} = file:read_file(Path),
|
||||||
|
@ -101,4 +137,3 @@ get_base_dir(Module) ->
|
||||||
|
|
||||||
get_base_dir() ->
|
get_base_dir() ->
|
||||||
get_base_dir(?MODULE).
|
get_base_dir(?MODULE).
|
||||||
|
|
||||||
|
|
|
@ -245,7 +245,7 @@ receive_messages(Count, Msgs) ->
|
||||||
receive_messages(Count-1, [Msg|Msgs]);
|
receive_messages(Count-1, [Msg|Msgs]);
|
||||||
_Other ->
|
_Other ->
|
||||||
receive_messages(Count, Msgs)
|
receive_messages(Count, Msgs)
|
||||||
after 1000 ->
|
after 5000 ->
|
||||||
Msgs
|
Msgs
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -576,7 +576,7 @@ t_publish_while_client_is_gone(Config) ->
|
||||||
| Config]),
|
| Config]),
|
||||||
{ok, _} = emqtt:ConnFun(Client2),
|
{ok, _} = emqtt:ConnFun(Client2),
|
||||||
Msgs = receive_messages(2),
|
Msgs = receive_messages(2),
|
||||||
?assertEqual(length(Msgs), 2),
|
?assertMatch([_, _], Msgs),
|
||||||
[Msg2, Msg1] = Msgs,
|
[Msg2, Msg1] = Msgs,
|
||||||
?assertEqual({ok, iolist_to_binary(Payload1)}, maps:find(payload, Msg1)),
|
?assertEqual({ok, iolist_to_binary(Payload1)}, maps:find(payload, Msg1)),
|
||||||
?assertEqual({ok, 2}, maps:find(qos, Msg1)),
|
?assertEqual({ok, 2}, maps:find(qos, Msg1)),
|
||||||
|
@ -768,7 +768,7 @@ t_lost_messages_because_of_gc(Config) ->
|
||||||
|
|
||||||
check_snabbkaffe_vanilla(Trace) ->
|
check_snabbkaffe_vanilla(Trace) ->
|
||||||
ResumeTrace = [T || #{?snk_kind := K} = T <- Trace,
|
ResumeTrace = [T || #{?snk_kind := K} = T <- Trace,
|
||||||
re:run(atom_to_list(K), "^ps_") /= nomatch],
|
re:run(to_list(K), "^ps_") /= nomatch],
|
||||||
?assertMatch([_|_], ResumeTrace),
|
?assertMatch([_|_], ResumeTrace),
|
||||||
[_Sid] = lists:usort(?projection(sid, ResumeTrace)),
|
[_Sid] = lists:usort(?projection(sid, ResumeTrace)),
|
||||||
%% Check internal flow of the emqx_cm resuming
|
%% Check internal flow of the emqx_cm resuming
|
||||||
|
@ -811,6 +811,10 @@ check_snabbkaffe_vanilla(Trace) ->
|
||||||
[Markers] = ?projection(markers, ?of_kind(ps_node_markers, Trace)),
|
[Markers] = ?projection(markers, ?of_kind(ps_node_markers, Trace)),
|
||||||
?assertMatch([_], Markers).
|
?assertMatch([_], Markers).
|
||||||
|
|
||||||
|
to_list(L) when is_list(L) -> L;
|
||||||
|
to_list(A) when is_atom(A) -> atom_to_list(A);
|
||||||
|
to_list(B) when is_binary(B) -> binary_to_list(B).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Snabbkaffe tests
|
%% Snabbkaffe tests
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
|
@ -29,7 +29,7 @@ all() -> emqx_common_test_helpers:all(?MODULE).
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
emqx_channel_SUITE:set_test_listenser_confs(),
|
emqx_channel_SUITE:set_test_listener_confs(),
|
||||||
ok = meck:new([emqx_hooks, emqx_metrics, emqx_broker],
|
ok = meck:new([emqx_hooks, emqx_metrics, emqx_broker],
|
||||||
[passthrough, no_history, no_link]),
|
[passthrough, no_history, no_link]),
|
||||||
ok = meck:expect(emqx_metrics, inc, fun(_) -> ok end),
|
ok = meck:expect(emqx_metrics, inc, fun(_) -> ok end),
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
|
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
|
|
||||||
-import(emqx_topic,
|
-import(emqx_topic,
|
||||||
[ wildcard/1
|
[ wildcard/1
|
||||||
|
@ -183,9 +184,11 @@ t_feed_var(_) ->
|
||||||
?assertEqual(<<"$queue/client/clientId">>,
|
?assertEqual(<<"$queue/client/clientId">>,
|
||||||
feed_var(<<"$c">>, <<"clientId">>, <<"$queue/client/$c">>)),
|
feed_var(<<"$c">>, <<"clientId">>, <<"$queue/client/$c">>)),
|
||||||
?assertEqual(<<"username/test/client/x">>,
|
?assertEqual(<<"username/test/client/x">>,
|
||||||
feed_var(<<"%u">>, <<"test">>, <<"username/%u/client/x">>)),
|
feed_var( ?PH_USERNAME, <<"test">>
|
||||||
|
, <<"username/", ?PH_USERNAME/binary, "/client/x">>)),
|
||||||
?assertEqual(<<"username/test/client/clientId">>,
|
?assertEqual(<<"username/test/client/clientId">>,
|
||||||
feed_var(<<"%c">>, <<"clientId">>, <<"username/test/client/%c">>)).
|
feed_var( ?PH_CLIENTID, <<"clientId">>
|
||||||
|
, <<"username/test/client/", ?PH_CLIENTID/binary>>)).
|
||||||
|
|
||||||
long_topic() ->
|
long_topic() ->
|
||||||
iolist_to_binary([[integer_to_list(I), "/"] || I <- lists:seq(0, 66666)]).
|
iolist_to_binary([[integer_to_list(I), "/"] || I <- lists:seq(0, 66666)]).
|
||||||
|
|
|
@ -199,6 +199,7 @@ t_trace_ip_address(_Config) ->
|
||||||
?assertEqual([], emqx_trace_handler:running()).
|
?assertEqual([], emqx_trace_handler:running()).
|
||||||
|
|
||||||
filesync(Name, Type) ->
|
filesync(Name, Type) ->
|
||||||
|
ct:sleep(50),
|
||||||
filesync(Name, Type, 3).
|
filesync(Name, Type, 3).
|
||||||
|
|
||||||
%% sometime the handler process is not started yet.
|
%% sometime the handler process is not started yet.
|
||||||
|
|
|
@ -1,6 +1 @@
|
||||||
# authentication: {
|
authentication: []
|
||||||
# mechanism: password-based
|
|
||||||
# backend: built-in-database
|
|
||||||
# user_id_type: clientid
|
|
||||||
# }
|
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,8 @@
|
||||||
-ifndef(EMQX_AUTHN_HRL).
|
-ifndef(EMQX_AUTHN_HRL).
|
||||||
-define(EMQX_AUTHN_HRL, true).
|
-define(EMQX_AUTHN_HRL, true).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/emqx_authentication.hrl").
|
||||||
|
|
||||||
-define(APP, emqx_authn).
|
-define(APP, emqx_authn).
|
||||||
|
|
||||||
-define(AUTHN, emqx_authentication).
|
-define(AUTHN, emqx_authentication).
|
||||||
|
@ -27,4 +29,9 @@
|
||||||
|
|
||||||
-define(AUTH_SHARD, emqx_authn_shard).
|
-define(AUTH_SHARD, emqx_authn_shard).
|
||||||
|
|
||||||
|
%% has to be the same as the root field name defined in emqx_schema
|
||||||
|
-define(CONF_NS, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME).
|
||||||
|
-define(CONF_NS_ATOM, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM).
|
||||||
|
-define(CONF_NS_BINARY, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY).
|
||||||
|
|
||||||
-endif.
|
-endif.
|
||||||
|
|
|
@ -22,6 +22,8 @@
|
||||||
, check_configs/1
|
, check_configs/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_authn.hrl").
|
||||||
|
|
||||||
providers() ->
|
providers() ->
|
||||||
[ {{'password-based', 'built-in-database'}, emqx_authn_mnesia}
|
[ {{'password-based', 'built-in-database'}, emqx_authn_mnesia}
|
||||||
, {{'password-based', mysql}, emqx_authn_mysql}
|
, {{'password-based', mysql}, emqx_authn_mysql}
|
||||||
|
@ -44,8 +46,8 @@ check_config(Config) ->
|
||||||
|
|
||||||
check_config(Config, Opts) ->
|
check_config(Config, Opts) ->
|
||||||
case do_check_config(Config, Opts) of
|
case do_check_config(Config, Opts) of
|
||||||
#{config := Checked} -> Checked;
|
#{?CONF_NS_ATOM := Checked} -> Checked;
|
||||||
#{<<"config">> := WithDefaults} -> WithDefaults
|
#{?CONF_NS_BINARY := WithDefaults} -> WithDefaults
|
||||||
end.
|
end.
|
||||||
|
|
||||||
do_check_config(#{<<"mechanism">> := Mec} = Config, Opts) ->
|
do_check_config(#{<<"mechanism">> := Mec} = Config, Opts) ->
|
||||||
|
@ -56,10 +58,15 @@ do_check_config(#{<<"mechanism">> := Mec} = Config, Opts) ->
|
||||||
case lists:keyfind(Key, 1, providers()) of
|
case lists:keyfind(Key, 1, providers()) of
|
||||||
false ->
|
false ->
|
||||||
throw({unknown_handler, Key});
|
throw({unknown_handler, Key});
|
||||||
{_, Provider} ->
|
{_, ProviderModule} ->
|
||||||
hocon_schema:check_plain(Provider, #{<<"config">> => Config},
|
hocon_schema:check_plain(ProviderModule, #{?CONF_NS_BINARY => Config},
|
||||||
Opts#{atom_key => true})
|
Opts#{atom_key => true})
|
||||||
end.
|
end.
|
||||||
|
|
||||||
atom(Bin) ->
|
atom(Bin) ->
|
||||||
binary_to_existing_atom(Bin, utf8).
|
try
|
||||||
|
binary_to_existing_atom(Bin, utf8)
|
||||||
|
catch
|
||||||
|
_ : _ ->
|
||||||
|
throw({unknown_auth_provider, Bin})
|
||||||
|
end.
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-include("emqx_authn.hrl").
|
-include("emqx_authn.hrl").
|
||||||
-include_lib("emqx/include/emqx_placeholder.hrl").
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
-include_lib("emqx/include/emqx_authentication.hrl").
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, ref/1]).
|
-import(hoconsc, [mk/2, ref/1]).
|
||||||
-import(emqx_dashboard_swagger, [error_codes/2]).
|
-import(emqx_dashboard_swagger, [error_codes/2]).
|
||||||
|
@ -32,8 +33,10 @@
|
||||||
|
|
||||||
% Swagger
|
% Swagger
|
||||||
|
|
||||||
-define(API_TAGS_GLOBAL, [<<"authentication">>, <<"authentication config(global)">>]).
|
-define(API_TAGS_GLOBAL, [?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY,
|
||||||
-define(API_TAGS_SINGLE, [<<"authentication">>, <<"authentication config(single listener)">>]).
|
<<"authentication config(global)">>]).
|
||||||
|
-define(API_TAGS_SINGLE, [?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY,
|
||||||
|
<<"authentication config(single listener)">>]).
|
||||||
|
|
||||||
-export([ api_spec/0
|
-export([ api_spec/0
|
||||||
, paths/0
|
, paths/0
|
||||||
|
@ -793,9 +796,10 @@ add_user(ChainName,
|
||||||
AuthenticatorID,
|
AuthenticatorID,
|
||||||
#{<<"user_id">> := UserID, <<"password">> := Password} = UserInfo) ->
|
#{<<"user_id">> := UserID, <<"password">> := Password} = UserInfo) ->
|
||||||
IsSuperuser = maps:get(<<"is_superuser">>, UserInfo, false),
|
IsSuperuser = maps:get(<<"is_superuser">>, UserInfo, false),
|
||||||
case emqx_authentication:add_user(ChainName, AuthenticatorID, #{ user_id => UserID
|
case emqx_authentication:add_user(ChainName, AuthenticatorID,
|
||||||
, password => Password
|
#{ user_id => UserID
|
||||||
, is_superuser => IsSuperuser}) of
|
, password => Password
|
||||||
|
, is_superuser => IsSuperuser}) of
|
||||||
{ok, User} ->
|
{ok, User} ->
|
||||||
{201, User};
|
{201, User};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
|
@ -845,7 +849,8 @@ list_users(ChainName, AuthenticatorID, PageParams) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
update_config(Path, ConfigRequest) ->
|
update_config(Path, ConfigRequest) ->
|
||||||
emqx:update_config(Path, ConfigRequest, #{rawconf_with_defaults => true}).
|
emqx_conf:update(Path, ConfigRequest, #{rawconf_with_defaults => true,
|
||||||
|
override_to => cluster}).
|
||||||
|
|
||||||
get_raw_config_with_defaults(ConfKeyPath) ->
|
get_raw_config_with_defaults(ConfKeyPath) ->
|
||||||
NConfKeyPath = [atom_to_binary(Key, utf8) || Key <- ConfKeyPath],
|
NConfKeyPath = [atom_to_binary(Key, utf8) || Key <- ConfKeyPath],
|
||||||
|
@ -1027,7 +1032,7 @@ authenticator_examples() ->
|
||||||
backend => <<"redis">>,
|
backend => <<"redis">>,
|
||||||
server => <<"127.0.0.1:6379">>,
|
server => <<"127.0.0.1:6379">>,
|
||||||
database => 0,
|
database => 0,
|
||||||
query => <<"HMGET ${username} password_hash salt">>,
|
cmd => <<"HMGET ${username} password_hash salt">>,
|
||||||
password_hash_algorithm => <<"sha256">>,
|
password_hash_algorithm => <<"sha256">>,
|
||||||
salt_position => <<"prefix">>
|
salt_position => <<"prefix">>
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,8 @@
|
||||||
, stop/1
|
, stop/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/emqx_authentication.hrl").
|
||||||
|
|
||||||
-dialyzer({nowarn_function, [start/2]}).
|
-dialyzer({nowarn_function, [start/2]}).
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
@ -65,7 +67,7 @@ chain_configs() ->
|
||||||
[global_chain_config() | listener_chain_configs()].
|
[global_chain_config() | listener_chain_configs()].
|
||||||
|
|
||||||
global_chain_config() ->
|
global_chain_config() ->
|
||||||
{?GLOBAL, emqx:get_raw_config([<<"authentication">>], [])}.
|
{?GLOBAL, emqx:get_raw_config([?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY], [])}.
|
||||||
|
|
||||||
listener_chain_configs() ->
|
listener_chain_configs() ->
|
||||||
lists:map(
|
lists:map(
|
||||||
|
@ -77,7 +79,7 @@ listener_chain_configs() ->
|
||||||
auth_config_path(ListenerID) ->
|
auth_config_path(ListenerID) ->
|
||||||
[<<"listeners">>]
|
[<<"listeners">>]
|
||||||
++ binary:split(atom_to_binary(ListenerID), <<":">>)
|
++ binary:split(atom_to_binary(ListenerID), <<":">>)
|
||||||
++ [<<"authentication">>].
|
++ [?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY].
|
||||||
|
|
||||||
provider_types() ->
|
provider_types() ->
|
||||||
lists:map(fun({Type, _Module}) -> Type end, emqx_authn:providers()).
|
lists:map(fun({Type, _Module}) -> Type end, emqx_authn:providers()).
|
||||||
|
|
|
@ -22,10 +22,12 @@
|
||||||
, roots/0
|
, roots/0
|
||||||
, fields/1
|
, fields/1
|
||||||
, authenticator_type/0
|
, authenticator_type/0
|
||||||
|
, root_type/0
|
||||||
|
, mechanism/1
|
||||||
|
, backend/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% only for doc generation
|
roots() -> [].
|
||||||
roots() -> [{authenticator_config, hoconsc:mk(authenticator_type())}].
|
|
||||||
|
|
||||||
fields(_) -> [].
|
fields(_) -> [].
|
||||||
|
|
||||||
|
@ -35,6 +37,7 @@ common_fields() ->
|
||||||
|
|
||||||
enable(type) -> boolean();
|
enable(type) -> boolean();
|
||||||
enable(default) -> true;
|
enable(default) -> true;
|
||||||
|
enable(desc) -> "Set to <code>false</code> to disable this auth provider";
|
||||||
enable(_) -> undefined.
|
enable(_) -> undefined.
|
||||||
|
|
||||||
authenticator_type() ->
|
authenticator_type() ->
|
||||||
|
@ -42,3 +45,18 @@ authenticator_type() ->
|
||||||
|
|
||||||
config_refs(Modules) ->
|
config_refs(Modules) ->
|
||||||
lists:append([Module:refs() || Module <- Modules]).
|
lists:append([Module:refs() || Module <- Modules]).
|
||||||
|
|
||||||
|
%% authn is a core functionality however implemented outside fo emqx app
|
||||||
|
%% in emqx_schema, 'authentication' is a map() type which is to allow
|
||||||
|
%% EMQ X more plugable.
|
||||||
|
root_type() ->
|
||||||
|
T = authenticator_type(),
|
||||||
|
hoconsc:union([T, hoconsc:array(T)]).
|
||||||
|
|
||||||
|
mechanism(Name) ->
|
||||||
|
hoconsc:mk(hoconsc:enum([Name]),
|
||||||
|
#{nullable => false}).
|
||||||
|
|
||||||
|
backend(Name) ->
|
||||||
|
hoconsc:mk(hoconsc:enum([Name]),
|
||||||
|
#{nullable => false}).
|
||||||
|
|
|
@ -93,6 +93,8 @@ is_superuser(#{<<"is_superuser">> := 0}) ->
|
||||||
#{is_superuser => false};
|
#{is_superuser => false};
|
||||||
is_superuser(#{<<"is_superuser">> := null}) ->
|
is_superuser(#{<<"is_superuser">> := null}) ->
|
||||||
#{is_superuser => false};
|
#{is_superuser => false};
|
||||||
|
is_superuser(#{<<"is_superuser">> := undefined}) ->
|
||||||
|
#{is_superuser => false};
|
||||||
is_superuser(#{<<"is_superuser">> := false}) ->
|
is_superuser(#{<<"is_superuser">> := false}) ->
|
||||||
#{is_superuser => false};
|
#{is_superuser => false};
|
||||||
is_superuser(#{<<"is_superuser">> := _}) ->
|
is_superuser(#{<<"is_superuser">> := _}) ->
|
||||||
|
|
|
@ -83,11 +83,11 @@ mnesia(boot) ->
|
||||||
|
|
||||||
namespace() -> "authn-scram-builtin_db".
|
namespace() -> "authn-scram-builtin_db".
|
||||||
|
|
||||||
roots() -> [config].
|
roots() -> [?CONF_NS].
|
||||||
|
|
||||||
fields(config) ->
|
fields(?CONF_NS) ->
|
||||||
[ {mechanism, {enum, [scram]}}
|
[ {mechanism, emqx_authn_schema:mechanism('scram')}
|
||||||
, {backend, {enum, ['built-in-database']}}
|
, {backend, emqx_authn_schema:backend('built-in-database')}
|
||||||
, {algorithm, fun algorithm/1}
|
, {algorithm, fun algorithm/1}
|
||||||
, {iteration_count, fun iteration_count/1}
|
, {iteration_count, fun iteration_count/1}
|
||||||
] ++ emqx_authn_schema:common_fields().
|
] ++ emqx_authn_schema:common_fields().
|
||||||
|
@ -105,7 +105,7 @@ iteration_count(_) -> undefined.
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[hoconsc:ref(?MODULE, config)].
|
[hoconsc:ref(?MODULE, ?CONF_NS)].
|
||||||
|
|
||||||
create(AuthenticatorID,
|
create(AuthenticatorID,
|
||||||
#{algorithm := Algorithm,
|
#{algorithm := Algorithm,
|
||||||
|
@ -137,10 +137,7 @@ authenticate(_Credential, _State) ->
|
||||||
ignore.
|
ignore.
|
||||||
|
|
||||||
destroy(#{user_group := UserGroup}) ->
|
destroy(#{user_group := UserGroup}) ->
|
||||||
MatchSpec = ets:fun2ms(
|
MatchSpec = group_match_spec(UserGroup),
|
||||||
fun(#user_info{user_id = {Group, _}} = User) when Group =:= UserGroup ->
|
|
||||||
User
|
|
||||||
end),
|
|
||||||
trans(
|
trans(
|
||||||
fun() ->
|
fun() ->
|
||||||
ok = lists:foreach(fun(UserInfo) ->
|
ok = lists:foreach(fun(UserInfo) ->
|
||||||
|
@ -205,16 +202,16 @@ lookup_user(UserID, #{user_group := UserGroup}) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
list_users(PageParams, #{user_group := UserGroup}) ->
|
list_users(PageParams, #{user_group := UserGroup}) ->
|
||||||
MatchSpec = [{{user_info, {UserGroup, '_'}, '_', '_', '_', '_'}, [], ['$_']}],
|
MatchSpec = group_match_spec(UserGroup),
|
||||||
{ok, emqx_mgmt_api:paginate(?TAB, MatchSpec, PageParams, ?FORMAT_FUN)}.
|
{ok, emqx_mgmt_api:paginate(?TAB, MatchSpec, PageParams, ?FORMAT_FUN)}.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Internal functions
|
%% Internal functions
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
ensure_auth_method('SCRAM-SHA-256', #{algorithm := sha256}) ->
|
ensure_auth_method(<<"SCRAM-SHA-256">>, #{algorithm := sha256}) ->
|
||||||
true;
|
true;
|
||||||
ensure_auth_method('SCRAM-SHA-512', #{algorithm := sha512}) ->
|
ensure_auth_method(<<"SCRAM-SHA-512">>, #{algorithm := sha512}) ->
|
||||||
true;
|
true;
|
||||||
ensure_auth_method(_, _) ->
|
ensure_auth_method(_, _) ->
|
||||||
false.
|
false.
|
||||||
|
@ -228,8 +225,10 @@ check_client_first_message(Bin, _Cache, #{iteration_count := IterationCount} = S
|
||||||
#{iteration_count => IterationCount,
|
#{iteration_count => IterationCount,
|
||||||
retrieve => RetrieveFun}
|
retrieve => RetrieveFun}
|
||||||
) of
|
) of
|
||||||
{cotinue, ServerFirstMessage, Cache} ->
|
{continue, ServerFirstMessage, Cache} ->
|
||||||
{cotinue, ServerFirstMessage, Cache};
|
{continue, ServerFirstMessage, Cache};
|
||||||
|
ignore ->
|
||||||
|
ignore;
|
||||||
{error, _Reason} ->
|
{error, _Reason} ->
|
||||||
{error, not_authorized}
|
{error, not_authorized}
|
||||||
end.
|
end.
|
||||||
|
@ -280,3 +279,9 @@ trans(Fun, Args) ->
|
||||||
|
|
||||||
format_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) ->
|
format_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) ->
|
||||||
#{user_id => UserID, is_superuser => IsSuperuser}.
|
#{user_id => UserID, is_superuser => IsSuperuser}.
|
||||||
|
|
||||||
|
group_match_spec(UserGroup) ->
|
||||||
|
ets:fun2ms(
|
||||||
|
fun(#user_info{user_id = {Group, _}} = User) when Group =:= UserGroup ->
|
||||||
|
User
|
||||||
|
end).
|
||||||
|
|
|
@ -43,8 +43,9 @@
|
||||||
namespace() -> "authn-http".
|
namespace() -> "authn-http".
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
[ {config, hoconsc:mk(hoconsc:union(refs()),
|
[ {?CONF_NS,
|
||||||
#{})}
|
hoconsc:mk(hoconsc:union(refs()),
|
||||||
|
#{})}
|
||||||
].
|
].
|
||||||
|
|
||||||
fields(get) ->
|
fields(get) ->
|
||||||
|
@ -60,8 +61,8 @@ fields(post) ->
|
||||||
] ++ common_fields().
|
] ++ common_fields().
|
||||||
|
|
||||||
common_fields() ->
|
common_fields() ->
|
||||||
[ {mechanism, hoconsc:enum(['password-based'])}
|
[ {mechanism, emqx_authn_schema:mechanism('password-based')}
|
||||||
, {backend, hoconsc:enum(['http'])}
|
, {backend, emqx_authn_schema:backend(http)}
|
||||||
, {url, fun url/1}
|
, {url, fun url/1}
|
||||||
, {body, fun body/1}
|
, {body, fun body/1}
|
||||||
, {request_timeout, fun request_timeout/1}
|
, {request_timeout, fun request_timeout/1}
|
||||||
|
@ -233,9 +234,9 @@ transform_header_name(Headers) ->
|
||||||
end, #{}, Headers).
|
end, #{}, Headers).
|
||||||
|
|
||||||
check_ssl_opts(Conf) ->
|
check_ssl_opts(Conf) ->
|
||||||
case parse_url(hocon_schema:get_value("config.url", Conf)) of
|
case parse_url(get_conf_val("url", Conf)) of
|
||||||
#{scheme := https} ->
|
#{scheme := https} ->
|
||||||
case hocon_schema:get_value("config.ssl.enable", Conf) of
|
case get_conf_val("ssl.enable", Conf) of
|
||||||
true -> ok;
|
true -> ok;
|
||||||
false -> false
|
false -> false
|
||||||
end;
|
end;
|
||||||
|
@ -244,8 +245,8 @@ check_ssl_opts(Conf) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
check_headers(Conf) ->
|
check_headers(Conf) ->
|
||||||
Method = to_bin(hocon_schema:get_value("config.method", Conf)),
|
Method = to_bin(get_conf_val("method", Conf)),
|
||||||
Headers = hocon_schema:get_value("config.headers", Conf),
|
Headers = get_conf_val("headers", Conf),
|
||||||
Method =:= <<"post">> orelse (not maps:is_key(<<"content-type">>, Headers)).
|
Method =:= <<"post">> orelse (not maps:is_key(<<"content-type">>, Headers)).
|
||||||
|
|
||||||
parse_url(URL) ->
|
parse_url(URL) ->
|
||||||
|
@ -284,7 +285,7 @@ replace_placeholders([{K, V0} | More], Credential, Acc) ->
|
||||||
undefined ->
|
undefined ->
|
||||||
error({cannot_get_variable, V0});
|
error({cannot_get_variable, V0});
|
||||||
V ->
|
V ->
|
||||||
replace_placeholders(More, Credential, [{K, emqx_authn_utils:bin(V)} | Acc])
|
replace_placeholders(More, Credential, [{K, to_bin(V)} | Acc])
|
||||||
end.
|
end.
|
||||||
|
|
||||||
append_query(Path, []) ->
|
append_query(Path, []) ->
|
||||||
|
@ -340,3 +341,6 @@ to_bin(B) when is_binary(B) ->
|
||||||
B;
|
B;
|
||||||
to_bin(L) when is_list(L) ->
|
to_bin(L) when is_list(L) ->
|
||||||
list_to_binary(L).
|
list_to_binary(L).
|
||||||
|
|
||||||
|
get_conf_val(Name, Conf) ->
|
||||||
|
hocon_schema:get_value(?CONF_NS ++ "." ++ Name, Conf).
|
||||||
|
|
|
@ -20,6 +20,8 @@
|
||||||
|
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
-include_lib("jose/include/jose_jwk.hrl").
|
-include_lib("jose/include/jose_jwk.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
|
|
||||||
-export([ start_link/1
|
-export([ start_link/1
|
||||||
, stop/1
|
, stop/1
|
||||||
|
@ -66,9 +68,9 @@ init([Opts]) ->
|
||||||
handle_call(get_cached_jwks, _From, #{jwks := Jwks} = State) ->
|
handle_call(get_cached_jwks, _From, #{jwks := Jwks} = State) ->
|
||||||
{reply, {ok, Jwks}, State};
|
{reply, {ok, Jwks}, State};
|
||||||
|
|
||||||
handle_call({update, Opts}, _From, State) ->
|
handle_call({update, Opts}, _From, _State) ->
|
||||||
State = handle_options(Opts),
|
NewState = handle_options(Opts),
|
||||||
{reply, ok, refresh_jwks(State)};
|
{reply, ok, refresh_jwks(NewState)};
|
||||||
|
|
||||||
handle_call(_Req, _From, State) ->
|
handle_call(_Req, _From, State) ->
|
||||||
{reply, ok, State}.
|
{reply, ok, State}.
|
||||||
|
@ -91,25 +93,27 @@ handle_info({refresh_jwks, _TRef, refresh}, #{request_id := RequestID} = State)
|
||||||
|
|
||||||
handle_info({http, {RequestID, Result}},
|
handle_info({http, {RequestID, Result}},
|
||||||
#{request_id := RequestID, endpoint := Endpoint} = State0) ->
|
#{request_id := RequestID, endpoint := Endpoint} = State0) ->
|
||||||
|
?tp(debug, jwks_endpoint_response, #{request_id => RequestID}),
|
||||||
State1 = State0#{request_id := undefined},
|
State1 = State0#{request_id := undefined},
|
||||||
case Result of
|
NewState = case Result of
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
?SLOG(warning, #{msg => "failed_to_request_jwks_endpoint",
|
?SLOG(warning, #{msg => "failed_to_request_jwks_endpoint",
|
||||||
endpoint => Endpoint,
|
endpoint => Endpoint,
|
||||||
reason => Reason}),
|
reason => Reason}),
|
||||||
State1;
|
State1;
|
||||||
{_StatusLine, _Headers, Body} ->
|
{_StatusLine, _Headers, Body} ->
|
||||||
try
|
try
|
||||||
JWKS = jose_jwk:from(emqx_json:decode(Body, [return_maps])),
|
JWKS = jose_jwk:from(emqx_json:decode(Body, [return_maps])),
|
||||||
{_, JWKs} = JWKS#jose_jwk.keys,
|
{_, JWKs} = JWKS#jose_jwk.keys,
|
||||||
State1#{jwks := JWKs}
|
State1#{jwks := JWKs}
|
||||||
catch _:_ ->
|
catch _:_ ->
|
||||||
?SLOG(warning, #{msg => "invalid_jwks_returned",
|
?SLOG(warning, #{msg => "invalid_jwks_returned",
|
||||||
endpoint => Endpoint,
|
endpoint => Endpoint,
|
||||||
body => Body}),
|
body => Body}),
|
||||||
State1
|
State1
|
||||||
end
|
end
|
||||||
end;
|
end,
|
||||||
|
{noreply, NewState};
|
||||||
|
|
||||||
handle_info({http, {_, _}}, State) ->
|
handle_info({http, {_, _}}, State) ->
|
||||||
%% ignore
|
%% ignore
|
||||||
|
@ -147,17 +151,18 @@ refresh_jwks(#{endpoint := Endpoint,
|
||||||
NState = case httpc:request(get, {Endpoint, [{"Accept", "application/json"}]}, HTTPOpts,
|
NState = case httpc:request(get, {Endpoint, [{"Accept", "application/json"}]}, HTTPOpts,
|
||||||
[{body_format, binary}, {sync, false}, {receiver, self()}]) of
|
[{body_format, binary}, {sync, false}, {receiver, self()}]) of
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
?SLOG(warning, #{msg => "failed_to_request_jwks_endpoint",
|
?tp(warning, jwks_endpoint_request_fail, #{endpoint => Endpoint,
|
||||||
endpoint => Endpoint,
|
http_opts => HTTPOpts,
|
||||||
reason => Reason}),
|
reason => Reason}),
|
||||||
State;
|
State;
|
||||||
{ok, RequestID} ->
|
{ok, RequestID} ->
|
||||||
|
?tp(debug, jwks_endpoint_request_ok, #{request_id => RequestID}),
|
||||||
State#{request_id := RequestID}
|
State#{request_id := RequestID}
|
||||||
end,
|
end,
|
||||||
ensure_expiry_timer(NState).
|
ensure_expiry_timer(NState).
|
||||||
|
|
||||||
ensure_expiry_timer(State = #{refresh_interval := Interval}) ->
|
ensure_expiry_timer(State = #{refresh_interval := Interval}) ->
|
||||||
State#{refresh_timer := emqx_misc:start_timer(timer:seconds(Interval), refresh_jwks)}.
|
State#{refresh_timer => emqx_misc:start_timer(timer:seconds(Interval), refresh_jwks)}.
|
||||||
|
|
||||||
cancel_timer(State = #{refresh_timer := undefined}) ->
|
cancel_timer(State = #{refresh_timer := undefined}) ->
|
||||||
State;
|
State;
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
|
|
||||||
-module(emqx_authn_jwt).
|
-module(emqx_authn_jwt).
|
||||||
|
|
||||||
|
-include("emqx_authn.hrl").
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
|
||||||
-behaviour(hocon_schema).
|
-behaviour(hocon_schema).
|
||||||
|
@ -40,9 +41,9 @@
|
||||||
namespace() -> "authn-jwt".
|
namespace() -> "authn-jwt".
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
[ {config, hoconsc:mk(hoconsc:union(refs()),
|
[ {?CONF_NS,
|
||||||
#{}
|
hoconsc:mk(hoconsc:union(refs()),
|
||||||
)}
|
#{})}
|
||||||
].
|
].
|
||||||
|
|
||||||
fields('hmac-based') ->
|
fields('hmac-based') ->
|
||||||
|
@ -82,7 +83,7 @@ fields(ssl_disable) ->
|
||||||
[ {enable, #{type => false}} ].
|
[ {enable, #{type => false}} ].
|
||||||
|
|
||||||
common_fields() ->
|
common_fields() ->
|
||||||
[ {mechanism, {enum, [jwt]}}
|
[ {mechanism, emqx_authn_schema:mechanism('jwt')}
|
||||||
, {verify_claims, fun verify_claims/1}
|
, {verify_claims, fun verify_claims/1}
|
||||||
] ++ emqx_authn_schema:common_fields().
|
] ++ emqx_authn_schema:common_fields().
|
||||||
|
|
||||||
|
@ -157,7 +158,7 @@ update(#{use_jwks := false} = Config, _State) ->
|
||||||
update(#{use_jwks := true} = Config,
|
update(#{use_jwks := true} = Config,
|
||||||
#{jwk := Connector} = State)
|
#{jwk := Connector} = State)
|
||||||
when is_pid(Connector) ->
|
when is_pid(Connector) ->
|
||||||
ok = emqx_authn_jwks_connector:update(Connector, Config),
|
ok = emqx_authn_jwks_connector:update(Connector, connector_opts(Config)),
|
||||||
case maps:get(verify_cliams, Config, undefined) of
|
case maps:get(verify_cliams, Config, undefined) of
|
||||||
undefined ->
|
undefined ->
|
||||||
{ok, State};
|
{ok, State};
|
||||||
|
@ -208,7 +209,7 @@ create2(#{use_jwks := false,
|
||||||
JWK = jose_jwk:from_oct(Secret),
|
JWK = jose_jwk:from_oct(Secret),
|
||||||
{ok, #{jwk => JWK,
|
{ok, #{jwk => JWK,
|
||||||
verify_claims => VerifyClaims}}
|
verify_claims => VerifyClaims}}
|
||||||
end;
|
end;
|
||||||
|
|
||||||
create2(#{use_jwks := false,
|
create2(#{use_jwks := false,
|
||||||
algorithm := 'public-key',
|
algorithm := 'public-key',
|
||||||
|
@ -219,13 +220,8 @@ create2(#{use_jwks := false,
|
||||||
verify_claims => VerifyClaims}};
|
verify_claims => VerifyClaims}};
|
||||||
|
|
||||||
create2(#{use_jwks := true,
|
create2(#{use_jwks := true,
|
||||||
verify_claims := VerifyClaims,
|
verify_claims := VerifyClaims} = Config) ->
|
||||||
ssl := #{enable := Enable} = SSL} = Config) ->
|
case emqx_authn_jwks_connector:start_link(connector_opts(Config)) of
|
||||||
SSLOpts = case Enable of
|
|
||||||
true -> maps:without([enable], SSL);
|
|
||||||
false -> #{}
|
|
||||||
end,
|
|
||||||
case emqx_authn_jwks_connector:start_link(Config#{ssl_opts => SSLOpts}) of
|
|
||||||
{ok, Connector} ->
|
{ok, Connector} ->
|
||||||
{ok, #{jwk => Connector,
|
{ok, #{jwk => Connector,
|
||||||
verify_claims => VerifyClaims}};
|
verify_claims => VerifyClaims}};
|
||||||
|
@ -233,6 +229,14 @@ create2(#{use_jwks := true,
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
connector_opts(#{ssl := #{enable := Enable} = SSL} = Config) ->
|
||||||
|
SSLOpts = case Enable of
|
||||||
|
true -> maps:without([enable], SSL);
|
||||||
|
false -> #{}
|
||||||
|
end,
|
||||||
|
Config#{ssl_opts => SSLOpts}.
|
||||||
|
|
||||||
|
|
||||||
may_decode_secret(false, Secret) -> Secret;
|
may_decode_secret(false, Secret) -> Secret;
|
||||||
may_decode_secret(true, Secret) ->
|
may_decode_secret(true, Secret) ->
|
||||||
try base64:decode(Secret)
|
try base64:decode(Secret)
|
||||||
|
@ -260,7 +264,7 @@ verify(JWS, [JWK | More], VerifyClaims) ->
|
||||||
Claims = emqx_json:decode(Payload, [return_maps]),
|
Claims = emqx_json:decode(Payload, [return_maps]),
|
||||||
case verify_claims(Claims, VerifyClaims) of
|
case verify_claims(Claims, VerifyClaims) of
|
||||||
ok ->
|
ok ->
|
||||||
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Claims, false)}};
|
{ok, emqx_authn_utils:is_superuser(Claims)};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end;
|
end;
|
||||||
|
|
|
@ -85,11 +85,11 @@ mnesia(boot) ->
|
||||||
|
|
||||||
namespace() -> "authn-builtin_db".
|
namespace() -> "authn-builtin_db".
|
||||||
|
|
||||||
roots() -> [config].
|
roots() -> [?CONF_NS].
|
||||||
|
|
||||||
fields(config) ->
|
fields(?CONF_NS) ->
|
||||||
[ {mechanism, {enum, ['password-based']}}
|
[ {mechanism, emqx_authn_schema:mechanism('password-based')}
|
||||||
, {backend, {enum, ['built-in-database']}}
|
, {backend, emqx_authn_schema:backend('built-in-database')}
|
||||||
, {user_id_type, fun user_id_type/1}
|
, {user_id_type, fun user_id_type/1}
|
||||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||||
] ++ emqx_authn_schema:common_fields();
|
] ++ emqx_authn_schema:common_fields();
|
||||||
|
@ -104,7 +104,7 @@ fields(other_algorithms) ->
|
||||||
].
|
].
|
||||||
|
|
||||||
user_id_type(type) -> user_id_type();
|
user_id_type(type) -> user_id_type();
|
||||||
user_id_type(default) -> username;
|
user_id_type(default) -> <<"username">>;
|
||||||
user_id_type(_) -> undefined.
|
user_id_type(_) -> undefined.
|
||||||
|
|
||||||
password_hash_algorithm(type) -> hoconsc:union([hoconsc:ref(?MODULE, bcrypt),
|
password_hash_algorithm(type) -> hoconsc:union([hoconsc:ref(?MODULE, bcrypt),
|
||||||
|
@ -121,7 +121,7 @@ salt_rounds(_) -> undefined.
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[hoconsc:ref(?MODULE, config)].
|
[hoconsc:ref(?MODULE, ?CONF_NS)].
|
||||||
|
|
||||||
create(AuthenticatorID,
|
create(AuthenticatorID,
|
||||||
#{user_id_type := Type,
|
#{user_id_type := Type,
|
||||||
|
|
|
@ -42,8 +42,8 @@
|
||||||
namespace() -> "authn-mongodb".
|
namespace() -> "authn-mongodb".
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
[ {config, hoconsc:mk(hoconsc:union(refs()),
|
[ {?CONF_NS, hoconsc:mk(hoconsc:union(refs()),
|
||||||
#{})}
|
#{})}
|
||||||
].
|
].
|
||||||
|
|
||||||
fields(standalone) ->
|
fields(standalone) ->
|
||||||
|
@ -56,8 +56,8 @@ fields('sharded-cluster') ->
|
||||||
common_fields() ++ emqx_connector_mongo:fields(sharded).
|
common_fields() ++ emqx_connector_mongo:fields(sharded).
|
||||||
|
|
||||||
common_fields() ->
|
common_fields() ->
|
||||||
[ {mechanism, {enum, ['password-based']}}
|
[ {mechanism, emqx_authn_schema:mechanism('password-based')}
|
||||||
, {backend, {enum, [mongodb]}}
|
, {backend, emqx_authn_schema:backend(mongodb)}
|
||||||
, {collection, fun collection/1}
|
, {collection, fun collection/1}
|
||||||
, {selector, fun selector/1}
|
, {selector, fun selector/1}
|
||||||
, {password_hash_field, fun password_hash_field/1}
|
, {password_hash_field, fun password_hash_field/1}
|
||||||
|
@ -115,6 +115,8 @@ create(#{selector := Selector} = Config) ->
|
||||||
password_hash_algorithm,
|
password_hash_algorithm,
|
||||||
salt_position],
|
salt_position],
|
||||||
Config),
|
Config),
|
||||||
|
#{password_hash_algorithm := Algorithm} = State,
|
||||||
|
ok = emqx_authn_utils:ensure_apps_started(Algorithm),
|
||||||
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
|
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
|
||||||
NState = State#{
|
NState = State#{
|
||||||
selector => NSelector,
|
selector => NSelector,
|
||||||
|
@ -155,7 +157,7 @@ authenticate(#{password := Password} = Credential,
|
||||||
Doc ->
|
Doc ->
|
||||||
case check_password(Password, Doc, State) of
|
case check_password(Password, Doc, State) of
|
||||||
ok ->
|
ok ->
|
||||||
{ok, #{is_superuser => is_superuser(Doc, State)}};
|
{ok, is_superuser(Doc, State)};
|
||||||
{error, {cannot_find_password_hash_field, PasswordHashField}} ->
|
{error, {cannot_find_password_hash_field, PasswordHashField}} ->
|
||||||
?SLOG(error, #{msg => "cannot_find_password_hash_field",
|
?SLOG(error, #{msg => "cannot_find_password_hash_field",
|
||||||
resource => ResourceId,
|
resource => ResourceId,
|
||||||
|
@ -234,9 +236,10 @@ check_password(Password,
|
||||||
end.
|
end.
|
||||||
|
|
||||||
is_superuser(Doc, #{is_superuser_field := IsSuperuserField}) ->
|
is_superuser(Doc, #{is_superuser_field := IsSuperuserField}) ->
|
||||||
maps:get(IsSuperuserField, Doc, false);
|
IsSuperuser = maps:get(IsSuperuserField, Doc, false),
|
||||||
|
emqx_authn_utils:is_superuser(#{<<"is_superuser">> => IsSuperuser});
|
||||||
is_superuser(_, _) ->
|
is_superuser(_, _) ->
|
||||||
false.
|
emqx_authn_utils:is_superuser(#{<<"is_superuser">> => false}).
|
||||||
|
|
||||||
hash(Algorithm, Password, Salt, prefix) ->
|
hash(Algorithm, Password, Salt, prefix) ->
|
||||||
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>);
|
emqx_passwd:hash(Algorithm, <<Salt/binary, Password/binary>>);
|
||||||
|
|
|
@ -41,11 +41,11 @@
|
||||||
|
|
||||||
namespace() -> "authn-mysql".
|
namespace() -> "authn-mysql".
|
||||||
|
|
||||||
roots() -> [config].
|
roots() -> [?CONF_NS].
|
||||||
|
|
||||||
fields(config) ->
|
fields(?CONF_NS) ->
|
||||||
[ {mechanism, {enum, ['password-based']}}
|
[ {mechanism, emqx_authn_schema:mechanism('password-based')}
|
||||||
, {backend, {enum, [mysql]}}
|
, {backend, emqx_authn_schema:backend(mysql)}
|
||||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||||
, {salt_position, fun salt_position/1}
|
, {salt_position, fun salt_position/1}
|
||||||
, {query, fun query/1}
|
, {query, fun query/1}
|
||||||
|
@ -74,7 +74,7 @@ query_timeout(_) -> undefined.
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[hoconsc:ref(?MODULE, config)].
|
[hoconsc:ref(?MODULE, ?CONF_NS)].
|
||||||
|
|
||||||
create(_AuthenticatorID, Config) ->
|
create(_AuthenticatorID, Config) ->
|
||||||
create(Config).
|
create(Config).
|
||||||
|
|
|
@ -47,11 +47,11 @@
|
||||||
|
|
||||||
namespace() -> "authn-postgresql".
|
namespace() -> "authn-postgresql".
|
||||||
|
|
||||||
roots() -> [config].
|
roots() -> [?CONF_NS].
|
||||||
|
|
||||||
fields(config) ->
|
fields(?CONF_NS) ->
|
||||||
[ {mechanism, {enum, ['password-based']}}
|
[ {mechanism, emqx_authn_schema:mechanism('password-based')}
|
||||||
, {backend, {enum, [postgresql]}}
|
, {backend, emqx_authn_schema:backend(postgresql)}
|
||||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||||
, {salt_position, fun salt_position/1}
|
, {salt_position, fun salt_position/1}
|
||||||
, {query, fun query/1}
|
, {query, fun query/1}
|
||||||
|
@ -75,7 +75,7 @@ query(_) -> undefined.
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[hoconsc:ref(?MODULE, config)].
|
[hoconsc:ref(?MODULE, ?CONF_NS)].
|
||||||
|
|
||||||
create(_AuthenticatorID, Config) ->
|
create(_AuthenticatorID, Config) ->
|
||||||
create(Config).
|
create(Config).
|
||||||
|
|
|
@ -42,8 +42,8 @@
|
||||||
namespace() -> "authn-redis".
|
namespace() -> "authn-redis".
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
[ {config, hoconsc:mk(hoconsc:union(refs()),
|
[ {?CONF_NS, hoconsc:mk(hoconsc:union(refs()),
|
||||||
#{})}
|
#{})}
|
||||||
].
|
].
|
||||||
|
|
||||||
fields(standalone) ->
|
fields(standalone) ->
|
||||||
|
@ -56,15 +56,15 @@ fields(sentinel) ->
|
||||||
common_fields() ++ emqx_connector_redis:fields(sentinel).
|
common_fields() ++ emqx_connector_redis:fields(sentinel).
|
||||||
|
|
||||||
common_fields() ->
|
common_fields() ->
|
||||||
[{mechanism, {enum, ['password-based']}},
|
[ {mechanism, emqx_authn_schema:mechanism('password-based')}
|
||||||
{backend, {enum, [redis]}},
|
, {backend, emqx_authn_schema:backend(redis)}
|
||||||
{query, fun query/1},
|
, {cmd, fun cmd/1}
|
||||||
{password_hash_algorithm, fun password_hash_algorithm/1},
|
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||||
{salt_position, fun salt_position/1}
|
, {salt_position, fun salt_position/1}
|
||||||
] ++ emqx_authn_schema:common_fields().
|
] ++ emqx_authn_schema:common_fields().
|
||||||
|
|
||||||
query(type) -> string();
|
cmd(type) -> string();
|
||||||
query(_) -> undefined.
|
cmd(_) -> undefined.
|
||||||
|
|
||||||
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
|
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
|
||||||
password_hash_algorithm(default) -> sha256;
|
password_hash_algorithm(default) -> sha256;
|
||||||
|
@ -87,17 +87,17 @@ refs() ->
|
||||||
create(_AuthenticatorID, Config) ->
|
create(_AuthenticatorID, Config) ->
|
||||||
create(Config).
|
create(Config).
|
||||||
|
|
||||||
create(#{query := Query,
|
create(#{cmd := Cmd,
|
||||||
password_hash_algorithm := Algorithm} = Config) ->
|
password_hash_algorithm := Algorithm} = Config) ->
|
||||||
try
|
try
|
||||||
NQuery = parse_query(Query),
|
NCmd = parse_cmd(Cmd),
|
||||||
ok = emqx_authn_utils:ensure_apps_started(Algorithm),
|
ok = emqx_authn_utils:ensure_apps_started(Algorithm),
|
||||||
State = maps:with(
|
State = maps:with(
|
||||||
[password_hash_algorithm, salt_position],
|
[password_hash_algorithm, salt_position],
|
||||||
Config),
|
Config),
|
||||||
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
|
ResourceId = emqx_authn_utils:make_resource_id(?MODULE),
|
||||||
NState = State#{
|
NState = State#{
|
||||||
query => NQuery,
|
cmd => NCmd,
|
||||||
resource_id => ResourceId},
|
resource_id => ResourceId},
|
||||||
case emqx_resource:create_local(ResourceId, emqx_connector_redis, Config) of
|
case emqx_resource:create_local(ResourceId, emqx_connector_redis, Config) of
|
||||||
{ok, already_created} ->
|
{ok, already_created} ->
|
||||||
|
@ -108,8 +108,8 @@ create(#{query := Query,
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end
|
end
|
||||||
catch
|
catch
|
||||||
error:{unsupported_query, _Query} ->
|
error:{unsupported_cmd, _Cmd} ->
|
||||||
{error, {unsupported_query, Query}};
|
{error, {unsupported_cmd, Cmd}};
|
||||||
error:missing_password_hash ->
|
error:missing_password_hash ->
|
||||||
{error, missing_password_hash};
|
{error, missing_password_hash};
|
||||||
error:{unsupported_fields, Fields} ->
|
error:{unsupported_fields, Fields} ->
|
||||||
|
@ -128,7 +128,7 @@ update(Config, State) ->
|
||||||
authenticate(#{auth_method := _}, _) ->
|
authenticate(#{auth_method := _}, _) ->
|
||||||
ignore;
|
ignore;
|
||||||
authenticate(#{password := Password} = Credential,
|
authenticate(#{password := Password} = Credential,
|
||||||
#{query := {Command, Key, Fields},
|
#{cmd := {Command, Key, Fields},
|
||||||
resource_id := ResourceId} = State) ->
|
resource_id := ResourceId} = State) ->
|
||||||
NKey = binary_to_list(iolist_to_binary(replace_placeholders(Key, Credential))),
|
NKey = binary_to_list(iolist_to_binary(replace_placeholders(Key, Credential))),
|
||||||
case emqx_resource:query(ResourceId, {cmd, [Command, NKey | Fields]}) of
|
case emqx_resource:query(ResourceId, {cmd, [Command, NKey | Fields]}) of
|
||||||
|
@ -162,15 +162,15 @@ destroy(#{resource_id := ResourceId}) ->
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
%% Only support HGET and HMGET
|
%% Only support HGET and HMGET
|
||||||
parse_query(Query) ->
|
parse_cmd(Cmd) ->
|
||||||
case string:tokens(Query, " ") of
|
case string:tokens(Cmd, " ") of
|
||||||
[Command, Key, Field | Fields] when Command =:= "HGET" orelse Command =:= "HMGET" ->
|
[Command, Key, Field | Fields] when Command =:= "HGET" orelse Command =:= "HMGET" ->
|
||||||
NFields = [Field | Fields],
|
NFields = [Field | Fields],
|
||||||
check_fields(NFields),
|
check_fields(NFields),
|
||||||
NKey = parse_key(Key),
|
NKey = parse_key(Key),
|
||||||
{Command, NKey, NFields};
|
{Command, NKey, NFields};
|
||||||
_ ->
|
_ ->
|
||||||
error({unsupported_query, Query})
|
error({unsupported_cmd, Cmd})
|
||||||
end.
|
end.
|
||||||
|
|
||||||
check_fields(Fields) ->
|
check_fields(Fields) ->
|
||||||
|
|
|
@ -1,22 +0,0 @@
|
||||||
%%--------------------------------------------------------------------
|
|
||||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
|
||||||
%%
|
|
||||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
%% you may not use this file except in compliance with the License.
|
|
||||||
%% You may obtain a copy of the License at
|
|
||||||
%%
|
|
||||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
%%
|
|
||||||
%% Unless required by applicable law or agreed to in writing, software
|
|
||||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
%% See the License for the specific language governing permissions and
|
|
||||||
%% limitations under the License.
|
|
||||||
%%--------------------------------------------------------------------
|
|
||||||
|
|
||||||
-module(emqx_authn_SUITE).
|
|
||||||
|
|
||||||
-compile(export_all).
|
|
||||||
-compile(nowarn_export_all).
|
|
||||||
|
|
||||||
all() -> emqx_common_test_helpers:all(?MODULE).
|
|
|
@ -43,18 +43,20 @@ groups() ->
|
||||||
[].
|
[].
|
||||||
|
|
||||||
init_per_testcase(_, Config) ->
|
init_per_testcase(_, Config) ->
|
||||||
|
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
|
||||||
emqx_authn_test_lib:delete_authenticators(
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
[authentication],
|
[?CONF_NS_ATOM],
|
||||||
?GLOBAL),
|
?GLOBAL),
|
||||||
|
|
||||||
emqx_authn_test_lib:delete_authenticators(
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
[listeners, tcp, default, authentication],
|
[listeners, tcp, default, ?CONF_NS_ATOM],
|
||||||
?TCP_DEFAULT),
|
?TCP_DEFAULT),
|
||||||
|
|
||||||
{atomic, ok} = mria:clear_table(emqx_authn_mnesia),
|
{atomic, ok} = mria:clear_table(emqx_authn_mnesia),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
|
_ = application:load(emqx_conf),
|
||||||
ok = emqx_common_test_helpers:start_apps(
|
ok = emqx_common_test_helpers:start_apps(
|
||||||
[emqx_authn, emqx_dashboard],
|
[emqx_authn, emqx_dashboard],
|
||||||
fun set_special_configs/1),
|
fun set_special_configs/1),
|
||||||
|
@ -87,8 +89,8 @@ set_special_configs(_App) ->
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
t_invalid_listener(_) ->
|
t_invalid_listener(_) ->
|
||||||
{ok, 404, _} = request(get, uri(["listeners", "invalid", "authentication"])),
|
{ok, 404, _} = request(get, uri(["listeners", "invalid", ?CONF_NS])),
|
||||||
{ok, 404, _} = request(get, uri(["listeners", "in:valid", "authentication"])).
|
{ok, 404, _} = request(get, uri(["listeners", "in:valid", ?CONF_NS])).
|
||||||
|
|
||||||
t_authenticators(_) ->
|
t_authenticators(_) ->
|
||||||
test_authenticators([]).
|
test_authenticators([]).
|
||||||
|
@ -131,86 +133,86 @@ test_authenticators(PathPrefix) ->
|
||||||
ValidConfig = emqx_authn_test_lib:http_example(),
|
ValidConfig = emqx_authn_test_lib:http_example(),
|
||||||
{ok, 200, _} = request(
|
{ok, 200, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication"]),
|
uri(PathPrefix ++ [?CONF_NS]),
|
||||||
ValidConfig),
|
ValidConfig),
|
||||||
|
|
||||||
{ok, 409, _} = request(
|
{ok, 409, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication"]),
|
uri(PathPrefix ++ [?CONF_NS]),
|
||||||
ValidConfig),
|
ValidConfig),
|
||||||
|
|
||||||
InvalidConfig0 = ValidConfig#{method => <<"delete">>},
|
InvalidConfig0 = ValidConfig#{method => <<"delete">>},
|
||||||
{ok, 400, _} = request(
|
{ok, 400, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication"]),
|
uri(PathPrefix ++ [?CONF_NS]),
|
||||||
InvalidConfig0),
|
InvalidConfig0),
|
||||||
|
|
||||||
InvalidConfig1 = ValidConfig#{method => <<"get">>,
|
InvalidConfig1 = ValidConfig#{method => <<"get">>,
|
||||||
headers => #{<<"content-type">> => <<"application/json">>}},
|
headers => #{<<"content-type">> => <<"application/json">>}},
|
||||||
{ok, 400, _} = request(
|
{ok, 400, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication"]),
|
uri(PathPrefix ++ [?CONF_NS]),
|
||||||
InvalidConfig1),
|
InvalidConfig1),
|
||||||
|
|
||||||
?assertAuthenticatorsMatch(
|
?assertAuthenticatorsMatch(
|
||||||
[#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"http">>}],
|
[#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"http">>}],
|
||||||
PathPrefix ++ ["authentication"]).
|
PathPrefix ++ [?CONF_NS]).
|
||||||
|
|
||||||
test_authenticator(PathPrefix) ->
|
test_authenticator(PathPrefix) ->
|
||||||
ValidConfig0 = emqx_authn_test_lib:http_example(),
|
ValidConfig0 = emqx_authn_test_lib:http_example(),
|
||||||
{ok, 200, _} = request(
|
{ok, 200, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication"]),
|
uri(PathPrefix ++ [?CONF_NS]),
|
||||||
ValidConfig0),
|
ValidConfig0),
|
||||||
{ok, 200, _} = request(
|
{ok, 200, _} = request(
|
||||||
get,
|
get,
|
||||||
uri(PathPrefix ++ ["authentication", "password-based:http"])),
|
uri(PathPrefix ++ [?CONF_NS, "password-based:http"])),
|
||||||
|
|
||||||
{ok, 404, _} = request(
|
{ok, 404, _} = request(
|
||||||
get,
|
get,
|
||||||
uri(PathPrefix ++ ["authentication", "password-based:redis"])),
|
uri(PathPrefix ++ [?CONF_NS, "password-based:redis"])),
|
||||||
|
|
||||||
|
|
||||||
{ok, 404, _} = request(
|
{ok, 404, _} = request(
|
||||||
put,
|
put,
|
||||||
uri(PathPrefix ++ ["authentication", "password-based:built-in-database"]),
|
uri(PathPrefix ++ [?CONF_NS, "password-based:built-in-database"]),
|
||||||
emqx_authn_test_lib:built_in_database_example()),
|
emqx_authn_test_lib:built_in_database_example()),
|
||||||
|
|
||||||
InvalidConfig0 = ValidConfig0#{method => <<"delete">>},
|
InvalidConfig0 = ValidConfig0#{method => <<"delete">>},
|
||||||
{ok, 400, _} = request(
|
{ok, 400, _} = request(
|
||||||
put,
|
put,
|
||||||
uri(PathPrefix ++ ["authentication", "password-based:http"]),
|
uri(PathPrefix ++ [?CONF_NS, "password-based:http"]),
|
||||||
InvalidConfig0),
|
InvalidConfig0),
|
||||||
|
|
||||||
InvalidConfig1 = ValidConfig0#{method => <<"get">>,
|
InvalidConfig1 = ValidConfig0#{method => <<"get">>,
|
||||||
headers => #{<<"content-type">> => <<"application/json">>}},
|
headers => #{<<"content-type">> => <<"application/json">>}},
|
||||||
{ok, 400, _} = request(
|
{ok, 400, _} = request(
|
||||||
put,
|
put,
|
||||||
uri(PathPrefix ++ ["authentication", "password-based:http"]),
|
uri(PathPrefix ++ [?CONF_NS, "password-based:http"]),
|
||||||
InvalidConfig1),
|
InvalidConfig1),
|
||||||
|
|
||||||
ValidConfig1 = ValidConfig0#{pool_size => 9},
|
ValidConfig1 = ValidConfig0#{pool_size => 9},
|
||||||
{ok, 200, _} = request(
|
{ok, 200, _} = request(
|
||||||
put,
|
put,
|
||||||
uri(PathPrefix ++ ["authentication", "password-based:http"]),
|
uri(PathPrefix ++ [?CONF_NS, "password-based:http"]),
|
||||||
ValidConfig1),
|
ValidConfig1),
|
||||||
|
|
||||||
{ok, 404, _} = request(
|
{ok, 404, _} = request(
|
||||||
delete,
|
delete,
|
||||||
uri(PathPrefix ++ ["authentication", "password-based:redis"])),
|
uri(PathPrefix ++ [?CONF_NS, "password-based:redis"])),
|
||||||
|
|
||||||
{ok, 204, _} = request(
|
{ok, 204, _} = request(
|
||||||
delete,
|
delete,
|
||||||
uri(PathPrefix ++ ["authentication", "password-based:http"])),
|
uri(PathPrefix ++ [?CONF_NS, "password-based:http"])),
|
||||||
|
|
||||||
?assertAuthenticatorsMatch([], PathPrefix ++ ["authentication"]).
|
?assertAuthenticatorsMatch([], PathPrefix ++ [?CONF_NS]).
|
||||||
|
|
||||||
test_authenticator_users(PathPrefix) ->
|
test_authenticator_users(PathPrefix) ->
|
||||||
UsersUri = uri(PathPrefix ++ ["authentication", "password-based:built-in-database", "users"]),
|
UsersUri = uri(PathPrefix ++ [?CONF_NS, "password-based:built-in-database", "users"]),
|
||||||
|
|
||||||
{ok, 200, _} = request(
|
{ok, 200, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication"]),
|
uri(PathPrefix ++ [?CONF_NS]),
|
||||||
emqx_authn_test_lib:built_in_database_example()),
|
emqx_authn_test_lib:built_in_database_example()),
|
||||||
|
|
||||||
InvalidUsers = [
|
InvalidUsers = [
|
||||||
|
@ -261,11 +263,11 @@ test_authenticator_users(PathPrefix) ->
|
||||||
lists:usort([ UserId || #{<<"user_id">> := UserId} <- Page1Users ++ Page2Users])).
|
lists:usort([ UserId || #{<<"user_id">> := UserId} <- Page1Users ++ Page2Users])).
|
||||||
|
|
||||||
test_authenticator_user(PathPrefix) ->
|
test_authenticator_user(PathPrefix) ->
|
||||||
UsersUri = uri(PathPrefix ++ ["authentication", "password-based:built-in-database", "users"]),
|
UsersUri = uri(PathPrefix ++ [?CONF_NS, "password-based:built-in-database", "users"]),
|
||||||
|
|
||||||
{ok, 200, _} = request(
|
{ok, 200, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication"]),
|
uri(PathPrefix ++ [?CONF_NS]),
|
||||||
emqx_authn_test_lib:built_in_database_example()),
|
emqx_authn_test_lib:built_in_database_example()),
|
||||||
|
|
||||||
User = #{user_id => <<"u1">>, password => <<"p1">>},
|
User = #{user_id => <<"u1">>, password => <<"p1">>},
|
||||||
|
@ -309,7 +311,7 @@ test_authenticator_move(PathPrefix) ->
|
||||||
fun(Conf) ->
|
fun(Conf) ->
|
||||||
{ok, 200, _} = request(
|
{ok, 200, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication"]),
|
uri(PathPrefix ++ [?CONF_NS]),
|
||||||
Conf)
|
Conf)
|
||||||
end,
|
end,
|
||||||
AuthenticatorConfs),
|
AuthenticatorConfs),
|
||||||
|
@ -320,40 +322,40 @@ test_authenticator_move(PathPrefix) ->
|
||||||
#{<<"mechanism">> := <<"jwt">>},
|
#{<<"mechanism">> := <<"jwt">>},
|
||||||
#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>}
|
#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>}
|
||||||
],
|
],
|
||||||
PathPrefix ++ ["authentication"]),
|
PathPrefix ++ [?CONF_NS]),
|
||||||
|
|
||||||
% Invalid moves
|
% Invalid moves
|
||||||
|
|
||||||
{ok, 400, _} = request(
|
{ok, 400, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication", "jwt", "move"]),
|
uri(PathPrefix ++ [?CONF_NS, "jwt", "move"]),
|
||||||
#{position => <<"up">>}),
|
#{position => <<"up">>}),
|
||||||
|
|
||||||
{ok, 400, _} = request(
|
{ok, 400, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication", "jwt", "move"]),
|
uri(PathPrefix ++ [?CONF_NS, "jwt", "move"]),
|
||||||
#{}),
|
#{}),
|
||||||
|
|
||||||
{ok, 404, _} = request(
|
{ok, 404, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication", "jwt", "move"]),
|
uri(PathPrefix ++ [?CONF_NS, "jwt", "move"]),
|
||||||
#{position => <<"before:invalid">>}),
|
#{position => <<"before:invalid">>}),
|
||||||
|
|
||||||
{ok, 404, _} = request(
|
{ok, 404, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication", "jwt", "move"]),
|
uri(PathPrefix ++ [?CONF_NS, "jwt", "move"]),
|
||||||
#{position => <<"before:password-based:redis">>}),
|
#{position => <<"before:password-based:redis">>}),
|
||||||
|
|
||||||
{ok, 404, _} = request(
|
{ok, 404, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication", "jwt", "move"]),
|
uri(PathPrefix ++ [?CONF_NS, "jwt", "move"]),
|
||||||
#{position => <<"before:password-based:redis">>}),
|
#{position => <<"before:password-based:redis">>}),
|
||||||
|
|
||||||
% Valid moves
|
% Valid moves
|
||||||
|
|
||||||
{ok, 204, _} = request(
|
{ok, 204, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication", "jwt", "move"]),
|
uri(PathPrefix ++ [?CONF_NS, "jwt", "move"]),
|
||||||
#{position => <<"top">>}),
|
#{position => <<"top">>}),
|
||||||
|
|
||||||
?assertAuthenticatorsMatch(
|
?assertAuthenticatorsMatch(
|
||||||
|
@ -362,11 +364,11 @@ test_authenticator_move(PathPrefix) ->
|
||||||
#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"http">>},
|
#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"http">>},
|
||||||
#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>}
|
#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>}
|
||||||
],
|
],
|
||||||
PathPrefix ++ ["authentication"]),
|
PathPrefix ++ [?CONF_NS]),
|
||||||
|
|
||||||
{ok, 204, _} = request(
|
{ok, 204, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication", "jwt", "move"]),
|
uri(PathPrefix ++ [?CONF_NS, "jwt", "move"]),
|
||||||
#{position => <<"bottom">>}),
|
#{position => <<"bottom">>}),
|
||||||
|
|
||||||
?assertAuthenticatorsMatch(
|
?assertAuthenticatorsMatch(
|
||||||
|
@ -375,11 +377,11 @@ test_authenticator_move(PathPrefix) ->
|
||||||
#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>},
|
#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>},
|
||||||
#{<<"mechanism">> := <<"jwt">>}
|
#{<<"mechanism">> := <<"jwt">>}
|
||||||
],
|
],
|
||||||
PathPrefix ++ ["authentication"]),
|
PathPrefix ++ [?CONF_NS]),
|
||||||
|
|
||||||
{ok, 204, _} = request(
|
{ok, 204, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication", "jwt", "move"]),
|
uri(PathPrefix ++ [?CONF_NS, "jwt", "move"]),
|
||||||
#{position => <<"before:password-based:built-in-database">>}),
|
#{position => <<"before:password-based:built-in-database">>}),
|
||||||
|
|
||||||
?assertAuthenticatorsMatch(
|
?assertAuthenticatorsMatch(
|
||||||
|
@ -388,17 +390,17 @@ test_authenticator_move(PathPrefix) ->
|
||||||
#{<<"mechanism">> := <<"jwt">>},
|
#{<<"mechanism">> := <<"jwt">>},
|
||||||
#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>}
|
#{<<"mechanism">> := <<"password-based">>, <<"backend">> := <<"built-in-database">>}
|
||||||
],
|
],
|
||||||
PathPrefix ++ ["authentication"]).
|
PathPrefix ++ [?CONF_NS]).
|
||||||
|
|
||||||
test_authenticator_import_users(PathPrefix) ->
|
test_authenticator_import_users(PathPrefix) ->
|
||||||
ImportUri = uri(
|
ImportUri = uri(
|
||||||
PathPrefix ++
|
PathPrefix ++
|
||||||
["authentication", "password-based:built-in-database", "import_users"]),
|
[?CONF_NS, "password-based:built-in-database", "import_users"]),
|
||||||
|
|
||||||
|
|
||||||
{ok, 200, _} = request(
|
{ok, 200, _} = request(
|
||||||
post,
|
post,
|
||||||
uri(PathPrefix ++ ["authentication"]),
|
uri(PathPrefix ++ [?CONF_NS]),
|
||||||
emqx_authn_test_lib:built_in_database_example()),
|
emqx_authn_test_lib:built_in_database_example()),
|
||||||
|
|
||||||
{ok, 400, _} = request(post, ImportUri, #{}),
|
{ok, 400, _} = request(post, ImportUri, #{}),
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
-include_lib("emqx/include/emqx_placeholder.hrl").
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
|
|
||||||
-define(PATH, [authentication]).
|
-define(PATH, [?CONF_NS_ATOM]).
|
||||||
|
|
||||||
-define(HTTP_PORT, 33333).
|
-define(HTTP_PORT, 33333).
|
||||||
-define(HTTP_PATH, "/auth").
|
-define(HTTP_PATH, "/auth").
|
||||||
|
@ -39,6 +39,7 @@ all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
|
_ = application:load(emqx_conf),
|
||||||
emqx_common_test_helpers:start_apps([emqx_authn]),
|
emqx_common_test_helpers:start_apps([emqx_authn]),
|
||||||
application:ensure_all_started(cowboy),
|
application:ensure_all_started(cowboy),
|
||||||
Config.
|
Config.
|
||||||
|
@ -52,6 +53,7 @@ end_per_suite(_) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_testcase(_Case, Config) ->
|
init_per_testcase(_Case, Config) ->
|
||||||
|
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
|
||||||
emqx_authn_test_lib:delete_authenticators(
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
[authentication],
|
[authentication],
|
||||||
?GLOBAL),
|
?GLOBAL),
|
||||||
|
|
|
@ -21,15 +21,25 @@
|
||||||
|
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
-include("emqx_authn.hrl").
|
-include("emqx_authn.hrl").
|
||||||
|
|
||||||
-define(AUTHN_ID, <<"mechanism:jwt">>).
|
-define(AUTHN_ID, <<"mechanism:jwt">>).
|
||||||
|
|
||||||
|
-define(JWKS_PORT, 33333).
|
||||||
|
-define(JWKS_PATH, "/jwks.json").
|
||||||
|
|
||||||
|
|
||||||
all() ->
|
all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
|
init_per_testcase(_, Config) ->
|
||||||
|
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
|
||||||
|
Config.
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
|
_ = application:load(emqx_conf),
|
||||||
emqx_common_test_helpers:start_apps([emqx_authn]),
|
emqx_common_test_helpers:start_apps([emqx_authn]),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
|
@ -37,7 +47,11 @@ end_per_suite(_) ->
|
||||||
emqx_common_test_helpers:stop_apps([emqx_authn]),
|
emqx_common_test_helpers:stop_apps([emqx_authn]),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_jwt_authenticator(_) ->
|
%%------------------------------------------------------------------------------
|
||||||
|
%% Tests
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
t_jwt_authenticator_hmac_based(_) ->
|
||||||
Secret = <<"abcdef">>,
|
Secret = <<"abcdef">>,
|
||||||
Config = #{mechanism => jwt,
|
Config = #{mechanism => jwt,
|
||||||
use_jwks => false,
|
use_jwks => false,
|
||||||
|
@ -121,10 +135,9 @@ t_jwt_authenticator(_) ->
|
||||||
?assertEqual(ok, emqx_authn_jwt:destroy(State3)),
|
?assertEqual(ok, emqx_authn_jwt:destroy(State3)),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_jwt_authenticator2(_) ->
|
t_jwt_authenticator_public_key(_) ->
|
||||||
Dir = code:lib_dir(emqx_authn, test),
|
PublicKey = test_rsa_key(public),
|
||||||
PublicKey = list_to_binary(filename:join([Dir, "data/public_key.pem"])),
|
PrivateKey = test_rsa_key(private),
|
||||||
PrivateKey = list_to_binary(filename:join([Dir, "data/private_key.pem"])),
|
|
||||||
Config = #{mechanism => jwt,
|
Config = #{mechanism => jwt,
|
||||||
use_jwks => false,
|
use_jwks => false,
|
||||||
algorithm => 'public-key',
|
algorithm => 'public-key',
|
||||||
|
@ -142,6 +155,78 @@ t_jwt_authenticator2(_) ->
|
||||||
?assertEqual(ok, emqx_authn_jwt:destroy(State)),
|
?assertEqual(ok, emqx_authn_jwt:destroy(State)),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_jwks_renewal(_Config) ->
|
||||||
|
ok = emqx_authn_http_test_server:start(?JWKS_PORT, ?JWKS_PATH),
|
||||||
|
ok = emqx_authn_http_test_server:set_handler(fun jwks_handler/2),
|
||||||
|
|
||||||
|
PrivateKey = test_rsa_key(private),
|
||||||
|
Payload = #{<<"username">> => <<"myuser">>},
|
||||||
|
JWS = generate_jws('public-key', Payload, PrivateKey),
|
||||||
|
Credential = #{username => <<"myuser">>,
|
||||||
|
password => JWS},
|
||||||
|
|
||||||
|
BadConfig = #{mechanism => jwt,
|
||||||
|
algorithm => 'public-key',
|
||||||
|
ssl => #{enable => false},
|
||||||
|
verify_claims => [],
|
||||||
|
|
||||||
|
use_jwks => true,
|
||||||
|
endpoint => "http://127.0.0.1:" ++ integer_to_list(?JWKS_PORT + 1) ++ ?JWKS_PATH,
|
||||||
|
refresh_interval => 1000
|
||||||
|
},
|
||||||
|
|
||||||
|
ok = snabbkaffe:start_trace(),
|
||||||
|
|
||||||
|
{{ok, State0}, _} = ?wait_async_action(
|
||||||
|
emqx_authn_jwt:create(?AUTHN_ID, BadConfig),
|
||||||
|
#{?snk_kind := jwks_endpoint_response},
|
||||||
|
1000),
|
||||||
|
|
||||||
|
ok = snabbkaffe:stop(),
|
||||||
|
|
||||||
|
?assertEqual(ignore, emqx_authn_jwt:authenticate(Credential, State0)),
|
||||||
|
?assertEqual(ignore, emqx_authn_jwt:authenticate(Credential#{password => <<"badpassword">>}, State0)),
|
||||||
|
|
||||||
|
GoodConfig = BadConfig#{endpoint =>
|
||||||
|
"http://127.0.0.1:" ++ integer_to_list(?JWKS_PORT) ++ ?JWKS_PATH},
|
||||||
|
|
||||||
|
ok = snabbkaffe:start_trace(),
|
||||||
|
|
||||||
|
{{ok, State1}, _} = ?wait_async_action(
|
||||||
|
emqx_authn_jwt:update(GoodConfig, State0),
|
||||||
|
#{?snk_kind := jwks_endpoint_response},
|
||||||
|
1000),
|
||||||
|
|
||||||
|
ok = snabbkaffe:stop(),
|
||||||
|
|
||||||
|
?assertEqual({ok, #{is_superuser => false}}, emqx_authn_jwt:authenticate(Credential, State1)),
|
||||||
|
?assertEqual(ignore, emqx_authn_jwt:authenticate(Credential#{password => <<"badpassword">>}, State1)),
|
||||||
|
|
||||||
|
?assertEqual(ok, emqx_authn_jwt:destroy(State1)),
|
||||||
|
ok = emqx_authn_http_test_server:stop().
|
||||||
|
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
%% Helpers
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
jwks_handler(Req0, State) ->
|
||||||
|
JWK = jose_jwk:from_pem_file(test_rsa_key(public)),
|
||||||
|
JWKS = jose_jwk_set:to_map([JWK], #{}),
|
||||||
|
Req = cowboy_req:reply(
|
||||||
|
200,
|
||||||
|
#{<<"content-type">> => <<"application/json">>},
|
||||||
|
jiffy:encode(JWKS),
|
||||||
|
Req0),
|
||||||
|
{ok, Req, State}.
|
||||||
|
|
||||||
|
test_rsa_key(public) ->
|
||||||
|
Dir = code:lib_dir(emqx_authn, test),
|
||||||
|
list_to_binary(filename:join([Dir, "data/public_key.pem"]));
|
||||||
|
|
||||||
|
test_rsa_key(private) ->
|
||||||
|
Dir = code:lib_dir(emqx_authn, test),
|
||||||
|
list_to_binary(filename:join([Dir, "data/private_key.pem"])).
|
||||||
|
|
||||||
generate_jws('hmac-based', Payload, Secret) ->
|
generate_jws('hmac-based', Payload, Secret) ->
|
||||||
JWK = jose_jwk:from_oct(Secret),
|
JWK = jose_jwk:from_oct(Secret),
|
||||||
Header = #{ <<"alg">> => <<"HS256">>
|
Header = #{ <<"alg">> => <<"HS256">>
|
||||||
|
|
|
@ -29,6 +29,7 @@ all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
|
_ = application:load(emqx_conf),
|
||||||
emqx_common_test_helpers:start_apps([emqx_authn]),
|
emqx_common_test_helpers:start_apps([emqx_authn]),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
|
@ -37,7 +38,8 @@ end_per_suite(_) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_testcase(_Case, Config) ->
|
init_per_testcase(_Case, Config) ->
|
||||||
mnesia:clear_table(emqx_authn_mnesia),
|
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
|
||||||
|
mria:clear_table(emqx_authn_mnesia),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
end_per_testcase(_Case, Config) ->
|
end_per_testcase(_Case, Config) ->
|
||||||
|
@ -47,6 +49,8 @@ end_per_testcase(_Case, Config) ->
|
||||||
%% Tests
|
%% Tests
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
-define(CONF(Conf), #{?CONF_NS_BINARY => Conf}).
|
||||||
|
|
||||||
t_check_schema(_Config) ->
|
t_check_schema(_Config) ->
|
||||||
ConfigOk = #{
|
ConfigOk = #{
|
||||||
<<"mechanism">> => <<"password-based">>,
|
<<"mechanism">> => <<"password-based">>,
|
||||||
|
@ -58,7 +62,7 @@ t_check_schema(_Config) ->
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
hocon_schema:check_plain(emqx_authn_mnesia, #{<<"config">> => ConfigOk}),
|
hocon_schema:check_plain(emqx_authn_mnesia, ?CONF(ConfigOk)),
|
||||||
|
|
||||||
ConfigNotOk = #{
|
ConfigNotOk = #{
|
||||||
<<"mechanism">> => <<"password-based">>,
|
<<"mechanism">> => <<"password-based">>,
|
||||||
|
@ -72,7 +76,7 @@ t_check_schema(_Config) ->
|
||||||
?assertException(
|
?assertException(
|
||||||
throw,
|
throw,
|
||||||
{emqx_authn_mnesia, _},
|
{emqx_authn_mnesia, _},
|
||||||
hocon_schema:check_plain(emqx_authn_mnesia, #{<<"config">> => ConfigNotOk})).
|
hocon_schema:check_plain(emqx_authn_mnesia, ?CONF(ConfigNotOk))).
|
||||||
|
|
||||||
t_create(_) ->
|
t_create(_) ->
|
||||||
Config0 = config(),
|
Config0 = config(),
|
||||||
|
|
|
@ -0,0 +1,409 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_authn_mongo_SUITE).
|
||||||
|
|
||||||
|
-compile(nowarn_export_all).
|
||||||
|
-compile(export_all).
|
||||||
|
|
||||||
|
-include("emqx_authn.hrl").
|
||||||
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
|
||||||
|
|
||||||
|
-define(MONGO_HOST, "mongo").
|
||||||
|
-define(MONGO_PORT, 27017).
|
||||||
|
-define(MONGO_CLIENT, 'emqx_authn_mongo_SUITE_client').
|
||||||
|
|
||||||
|
-define(PATH, [authentication]).
|
||||||
|
|
||||||
|
all() ->
|
||||||
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
|
init_per_testcase(_TestCase, Config) ->
|
||||||
|
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
|
||||||
|
emqx_authentication:initialize_authentication(?GLOBAL, []),
|
||||||
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
|
[authentication],
|
||||||
|
?GLOBAL),
|
||||||
|
{ok, _} = mc_worker_api:connect(mongo_config()),
|
||||||
|
Config.
|
||||||
|
|
||||||
|
end_per_testcase(_TestCase, _Config) ->
|
||||||
|
ok = mc_worker_api:disconnect(?MONGO_CLIENT).
|
||||||
|
|
||||||
|
init_per_suite(Config) ->
|
||||||
|
_ = application:load(emqx_conf),
|
||||||
|
case emqx_authn_test_lib:is_tcp_server_available(?MONGO_HOST, ?MONGO_PORT) of
|
||||||
|
true ->
|
||||||
|
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
|
||||||
|
ok = start_apps([emqx_resource, emqx_connector]),
|
||||||
|
Config;
|
||||||
|
false ->
|
||||||
|
{skip, no_mongo}
|
||||||
|
end.
|
||||||
|
|
||||||
|
end_per_suite(_Config) ->
|
||||||
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
|
[authentication],
|
||||||
|
?GLOBAL),
|
||||||
|
ok = stop_apps([emqx_resource, emqx_connector]),
|
||||||
|
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
|
||||||
|
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
%% Tests
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
t_create(_Config) ->
|
||||||
|
AuthConfig = raw_mongo_auth_config(),
|
||||||
|
|
||||||
|
{ok, _} = emqx:update_config(
|
||||||
|
?PATH,
|
||||||
|
{create_authenticator, ?GLOBAL, AuthConfig}),
|
||||||
|
|
||||||
|
{ok, [#{provider := emqx_authn_mongodb}]} = emqx_authentication:list_authenticators(?GLOBAL).
|
||||||
|
|
||||||
|
t_create_invalid(_Config) ->
|
||||||
|
AuthConfig = raw_mongo_auth_config(),
|
||||||
|
|
||||||
|
InvalidConfigs =
|
||||||
|
[
|
||||||
|
AuthConfig#{mongo_type => <<"unknown">>},
|
||||||
|
AuthConfig#{selector => <<"{ \"username\": \"${username}\" }">>}
|
||||||
|
],
|
||||||
|
|
||||||
|
lists:foreach(
|
||||||
|
fun(Config) ->
|
||||||
|
{error, _} = emqx:update_config(
|
||||||
|
?PATH,
|
||||||
|
{create_authenticator, ?GLOBAL, Config}),
|
||||||
|
|
||||||
|
{ok, []} = emqx_authentication:list_authenticators(?GLOBAL)
|
||||||
|
end,
|
||||||
|
InvalidConfigs).
|
||||||
|
|
||||||
|
t_authenticate(_Config) ->
|
||||||
|
ok = init_seeds(),
|
||||||
|
ok = lists:foreach(
|
||||||
|
fun(Sample) ->
|
||||||
|
ct:pal("test_user_auth sample: ~p", [Sample]),
|
||||||
|
test_user_auth(Sample)
|
||||||
|
end,
|
||||||
|
user_seeds()),
|
||||||
|
ok = drop_seeds().
|
||||||
|
|
||||||
|
test_user_auth(#{credentials := Credentials0,
|
||||||
|
config_params := SpecificConfigParams,
|
||||||
|
result := Result}) ->
|
||||||
|
AuthConfig = maps:merge(raw_mongo_auth_config(), SpecificConfigParams),
|
||||||
|
|
||||||
|
{ok, _} = emqx:update_config(
|
||||||
|
?PATH,
|
||||||
|
{create_authenticator, ?GLOBAL, AuthConfig}),
|
||||||
|
|
||||||
|
Credentials = Credentials0#{
|
||||||
|
listener => 'tcp:default',
|
||||||
|
protocol => mqtt
|
||||||
|
},
|
||||||
|
?assertEqual(Result, emqx_access_control:authenticate(Credentials)),
|
||||||
|
|
||||||
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
|
[authentication],
|
||||||
|
?GLOBAL).
|
||||||
|
|
||||||
|
t_destroy(_Config) ->
|
||||||
|
ok = init_seeds(),
|
||||||
|
AuthConfig = raw_mongo_auth_config(),
|
||||||
|
|
||||||
|
{ok, _} = emqx:update_config(
|
||||||
|
?PATH,
|
||||||
|
{create_authenticator, ?GLOBAL, AuthConfig}),
|
||||||
|
|
||||||
|
{ok, [#{provider := emqx_authn_mongodb, state := State}]}
|
||||||
|
= emqx_authentication:list_authenticators(?GLOBAL),
|
||||||
|
|
||||||
|
{ok, _} = emqx_authn_mongodb:authenticate(
|
||||||
|
#{username => <<"plain">>,
|
||||||
|
password => <<"plain">>
|
||||||
|
},
|
||||||
|
State),
|
||||||
|
|
||||||
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
|
[authentication],
|
||||||
|
?GLOBAL),
|
||||||
|
|
||||||
|
% Authenticator should not be usable anymore
|
||||||
|
?assertException(
|
||||||
|
error,
|
||||||
|
_,
|
||||||
|
emqx_authn_mongodb:authenticate(
|
||||||
|
#{username => <<"plain">>,
|
||||||
|
password => <<"plain">>
|
||||||
|
},
|
||||||
|
State)),
|
||||||
|
|
||||||
|
ok = drop_seeds().
|
||||||
|
|
||||||
|
t_update(_Config) ->
|
||||||
|
ok = init_seeds(),
|
||||||
|
CorrectConfig = raw_mongo_auth_config(),
|
||||||
|
IncorrectConfig =
|
||||||
|
CorrectConfig#{selector => #{<<"wrongfield">> => <<"wrongvalue">>}},
|
||||||
|
|
||||||
|
{ok, _} = emqx:update_config(
|
||||||
|
?PATH,
|
||||||
|
{create_authenticator, ?GLOBAL, IncorrectConfig}),
|
||||||
|
|
||||||
|
{error, not_authorized} = emqx_access_control:authenticate(
|
||||||
|
#{username => <<"plain">>,
|
||||||
|
password => <<"plain">>,
|
||||||
|
listener => 'tcp:default',
|
||||||
|
protocol => mqtt
|
||||||
|
}),
|
||||||
|
|
||||||
|
% We update with config with correct selector, provider should update and work properly
|
||||||
|
{ok, _} = emqx:update_config(
|
||||||
|
?PATH,
|
||||||
|
{update_authenticator, ?GLOBAL, <<"password-based:mongodb">>, CorrectConfig}),
|
||||||
|
|
||||||
|
{ok,_} = emqx_access_control:authenticate(
|
||||||
|
#{username => <<"plain">>,
|
||||||
|
password => <<"plain">>,
|
||||||
|
listener => 'tcp:default',
|
||||||
|
protocol => mqtt
|
||||||
|
}),
|
||||||
|
ok = drop_seeds().
|
||||||
|
|
||||||
|
t_is_superuser(_Config) ->
|
||||||
|
Config = raw_mongo_auth_config(),
|
||||||
|
{ok, _} = emqx:update_config(
|
||||||
|
?PATH,
|
||||||
|
{create_authenticator, ?GLOBAL, Config}),
|
||||||
|
|
||||||
|
Checks = [
|
||||||
|
{<<"0">>, false},
|
||||||
|
{<<"">>, false},
|
||||||
|
{null, false},
|
||||||
|
{false, false},
|
||||||
|
{0, false},
|
||||||
|
|
||||||
|
{<<"1">>, true},
|
||||||
|
{<<"val">>, true},
|
||||||
|
{1, true},
|
||||||
|
{123, true},
|
||||||
|
{true, true}
|
||||||
|
],
|
||||||
|
|
||||||
|
lists:foreach(fun test_is_superuser/1, Checks).
|
||||||
|
|
||||||
|
test_is_superuser({Value, ExpectedValue}) ->
|
||||||
|
{true, _} = mc_worker_api:delete(?MONGO_CLIENT, <<"users">>, #{}),
|
||||||
|
|
||||||
|
UserData = #{
|
||||||
|
username => <<"user">>,
|
||||||
|
password_hash => <<"plainsalt">>,
|
||||||
|
salt => <<"salt">>,
|
||||||
|
is_superuser => Value
|
||||||
|
},
|
||||||
|
|
||||||
|
{{true, _}, _} = mc_worker_api:insert(?MONGO_CLIENT, <<"users">>, [UserData]),
|
||||||
|
|
||||||
|
Credentials = #{
|
||||||
|
listener => 'tcp:default',
|
||||||
|
protocol => mqtt,
|
||||||
|
username => <<"user">>,
|
||||||
|
password => <<"plain">>
|
||||||
|
},
|
||||||
|
|
||||||
|
?assertEqual(
|
||||||
|
{ok, #{is_superuser => ExpectedValue}},
|
||||||
|
emqx_access_control:authenticate(Credentials)).
|
||||||
|
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
%% Helpers
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
raw_mongo_auth_config() ->
|
||||||
|
#{
|
||||||
|
mechanism => <<"password-based">>,
|
||||||
|
password_hash_algorithm => <<"plain">>,
|
||||||
|
salt_position => <<"suffix">>,
|
||||||
|
enable => <<"true">>,
|
||||||
|
|
||||||
|
backend => <<"mongodb">>,
|
||||||
|
mongo_type => <<"single">>,
|
||||||
|
database => <<"mqtt">>,
|
||||||
|
collection => <<"users">>,
|
||||||
|
server => mongo_server(),
|
||||||
|
|
||||||
|
selector => #{<<"username">> => <<"${username}">>},
|
||||||
|
password_hash_field => <<"password_hash">>,
|
||||||
|
salt_field => <<"salt">>,
|
||||||
|
is_superuser_field => <<"is_superuser">>
|
||||||
|
}.
|
||||||
|
|
||||||
|
user_seeds() ->
|
||||||
|
[#{data => #{
|
||||||
|
username => <<"plain">>,
|
||||||
|
password_hash => <<"plainsalt">>,
|
||||||
|
salt => <<"salt">>,
|
||||||
|
is_superuser => <<"1">>
|
||||||
|
},
|
||||||
|
credentials => #{
|
||||||
|
username => <<"plain">>,
|
||||||
|
password => <<"plain">>
|
||||||
|
},
|
||||||
|
config_params => #{
|
||||||
|
},
|
||||||
|
result => {ok,#{is_superuser => true}}
|
||||||
|
},
|
||||||
|
|
||||||
|
#{data => #{
|
||||||
|
username => <<"md5">>,
|
||||||
|
password_hash => <<"9b4d0c43d206d48279e69b9ad7132e22">>,
|
||||||
|
salt => <<"salt">>,
|
||||||
|
is_superuser => <<"0">>
|
||||||
|
},
|
||||||
|
credentials => #{
|
||||||
|
username => <<"md5">>,
|
||||||
|
password => <<"md5">>
|
||||||
|
},
|
||||||
|
config_params => #{
|
||||||
|
password_hash_algorithm => <<"md5">>,
|
||||||
|
salt_position => <<"suffix">>
|
||||||
|
},
|
||||||
|
result => {ok,#{is_superuser => false}}
|
||||||
|
},
|
||||||
|
|
||||||
|
#{data => #{
|
||||||
|
username => <<"sha256">>,
|
||||||
|
password_hash => <<"ac63a624e7074776d677dd61a003b8c803eb11db004d0ec6ae032a5d7c9c5caf">>,
|
||||||
|
salt => <<"salt">>,
|
||||||
|
is_superuser => 1
|
||||||
|
},
|
||||||
|
credentials => #{
|
||||||
|
clientid => <<"sha256">>,
|
||||||
|
password => <<"sha256">>
|
||||||
|
},
|
||||||
|
config_params => #{
|
||||||
|
selector => #{<<"username">> => <<"${clientid}">>},
|
||||||
|
password_hash_algorithm => <<"sha256">>,
|
||||||
|
salt_position => <<"prefix">>
|
||||||
|
},
|
||||||
|
result => {ok,#{is_superuser => true}}
|
||||||
|
},
|
||||||
|
|
||||||
|
#{data => #{
|
||||||
|
username => <<"bcrypt">>,
|
||||||
|
password_hash => <<"$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u">>,
|
||||||
|
salt => <<"$2b$12$wtY3h20mUjjmeaClpqZVve">>,
|
||||||
|
is_superuser => 0
|
||||||
|
},
|
||||||
|
credentials => #{
|
||||||
|
username => <<"bcrypt">>,
|
||||||
|
password => <<"bcrypt">>
|
||||||
|
},
|
||||||
|
config_params => #{
|
||||||
|
password_hash_algorithm => <<"bcrypt">>,
|
||||||
|
salt_position => <<"suffix">> % should be ignored
|
||||||
|
},
|
||||||
|
result => {ok,#{is_superuser => false}}
|
||||||
|
},
|
||||||
|
|
||||||
|
#{data => #{
|
||||||
|
username => <<"bcrypt0">>,
|
||||||
|
password_hash => <<"$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u">>,
|
||||||
|
salt => <<"$2b$12$wtY3h20mUjjmeaClpqZVve">>,
|
||||||
|
is_superuser => <<"0">>
|
||||||
|
},
|
||||||
|
credentials => #{
|
||||||
|
username => <<"bcrypt0">>,
|
||||||
|
password => <<"bcrypt">>
|
||||||
|
},
|
||||||
|
config_params => #{
|
||||||
|
% clientid variable & username credentials
|
||||||
|
selector => #{<<"username">> => <<"${clientid}">>},
|
||||||
|
password_hash_algorithm => <<"bcrypt">>,
|
||||||
|
salt_position => <<"suffix">>
|
||||||
|
},
|
||||||
|
result => {error,not_authorized}
|
||||||
|
},
|
||||||
|
|
||||||
|
#{data => #{
|
||||||
|
username => <<"bcrypt1">>,
|
||||||
|
password_hash => <<"$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u">>,
|
||||||
|
salt => <<"$2b$12$wtY3h20mUjjmeaClpqZVve">>,
|
||||||
|
is_superuser => <<"0">>
|
||||||
|
},
|
||||||
|
credentials => #{
|
||||||
|
username => <<"bcrypt1">>,
|
||||||
|
password => <<"bcrypt">>
|
||||||
|
},
|
||||||
|
config_params => #{
|
||||||
|
selector => #{<<"userid">> => <<"${clientid}">>},
|
||||||
|
password_hash_algorithm => <<"bcrypt">>,
|
||||||
|
salt_position => <<"suffix">>
|
||||||
|
},
|
||||||
|
result => {error,not_authorized}
|
||||||
|
},
|
||||||
|
|
||||||
|
#{data => #{
|
||||||
|
username => <<"bcrypt2">>,
|
||||||
|
password_hash => <<"$2b$12$wtY3h20mUjjmeaClpqZVveDWGlHzCGsvuThMlneGHA7wVeFYyns2u">>,
|
||||||
|
salt => <<"$2b$12$wtY3h20mUjjmeaClpqZVve">>,
|
||||||
|
is_superuser => <<"0">>
|
||||||
|
},
|
||||||
|
credentials => #{
|
||||||
|
username => <<"bcrypt2">>,
|
||||||
|
% Wrong password
|
||||||
|
password => <<"wrongpass">>
|
||||||
|
},
|
||||||
|
config_params => #{
|
||||||
|
password_hash_algorithm => <<"bcrypt">>,
|
||||||
|
salt_position => <<"suffix">>
|
||||||
|
},
|
||||||
|
result => {error,bad_username_or_password}
|
||||||
|
}
|
||||||
|
].
|
||||||
|
|
||||||
|
init_seeds() ->
|
||||||
|
Users = [Values || #{data := Values} <- user_seeds()],
|
||||||
|
{{true, _}, _} = mc_worker_api:insert(?MONGO_CLIENT, <<"users">>, Users),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
drop_seeds() ->
|
||||||
|
{true, _} = mc_worker_api:delete(?MONGO_CLIENT, <<"users">>, #{}),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
mongo_server() ->
|
||||||
|
iolist_to_binary(
|
||||||
|
io_lib:format(
|
||||||
|
"~s:~b",
|
||||||
|
[?MONGO_HOST, ?MONGO_PORT])).
|
||||||
|
|
||||||
|
mongo_config() ->
|
||||||
|
[
|
||||||
|
{database, <<"mqtt">>},
|
||||||
|
{host, ?MONGO_HOST},
|
||||||
|
{port, ?MONGO_PORT},
|
||||||
|
{register, ?MONGO_CLIENT}
|
||||||
|
].
|
||||||
|
|
||||||
|
start_apps(Apps) ->
|
||||||
|
lists:foreach(fun application:ensure_all_started/1, Apps).
|
||||||
|
|
||||||
|
stop_apps(Apps) ->
|
||||||
|
lists:foreach(fun application:stop/1, Apps).
|
|
@ -0,0 +1,115 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_authn_mqtt_test_client).
|
||||||
|
|
||||||
|
-behaviour(gen_server).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||||
|
|
||||||
|
%% API
|
||||||
|
-export([start_link/2,
|
||||||
|
stop/1]).
|
||||||
|
|
||||||
|
-export([send/2]).
|
||||||
|
|
||||||
|
%% gen_server callbacks
|
||||||
|
|
||||||
|
-export([init/1,
|
||||||
|
handle_call/3,
|
||||||
|
handle_cast/2,
|
||||||
|
handle_info/2,
|
||||||
|
terminate/2]).
|
||||||
|
|
||||||
|
-define(TIMEOUT, 1000).
|
||||||
|
-define(TCP_OPTIONS, [binary, {packet, raw}, {active, once},
|
||||||
|
{nodelay, true}]).
|
||||||
|
|
||||||
|
-define(PARSE_OPTIONS,
|
||||||
|
#{strict_mode => false,
|
||||||
|
max_size => ?MAX_PACKET_SIZE,
|
||||||
|
version => ?MQTT_PROTO_V5
|
||||||
|
}).
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% API
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
start_link(Host, Port) ->
|
||||||
|
gen_server:start_link(?MODULE, [Host, Port, self()], []).
|
||||||
|
|
||||||
|
stop(Pid) ->
|
||||||
|
gen_server:call(Pid, stop).
|
||||||
|
|
||||||
|
send(Pid, Packet) ->
|
||||||
|
gen_server:call(Pid, {send, Packet}).
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% gen_server callbacks
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
init([Host, Port, Owner]) ->
|
||||||
|
{ok, Socket} = gen_tcp:connect(Host, Port, ?TCP_OPTIONS, ?TIMEOUT),
|
||||||
|
{ok, #{owner => Owner,
|
||||||
|
socket => Socket,
|
||||||
|
parse_state => emqx_frame:initial_parse_state(?PARSE_OPTIONS)
|
||||||
|
}}.
|
||||||
|
|
||||||
|
handle_info({tcp, _Sock, Data}, #{parse_state := PSt,
|
||||||
|
owner := Owner,
|
||||||
|
socket := Socket} = St) ->
|
||||||
|
{NewPSt, Packets} = process_incoming(PSt, Data, []),
|
||||||
|
ok = deliver(Owner, Packets),
|
||||||
|
ok = run_sock(Socket),
|
||||||
|
{noreply, St#{parse_state => NewPSt}};
|
||||||
|
|
||||||
|
handle_info({tcp_closed, _Sock}, St) ->
|
||||||
|
{stop, normal, St}.
|
||||||
|
|
||||||
|
handle_call({send, Packet}, _From, #{socket := Socket} = St) ->
|
||||||
|
ok = gen_tcp:send(Socket, emqx_frame:serialize(Packet, ?MQTT_PROTO_V5)),
|
||||||
|
{reply, ok, St};
|
||||||
|
|
||||||
|
handle_call(stop, _From, #{socket := Socket} = St) ->
|
||||||
|
ok = gen_tcp:close(Socket),
|
||||||
|
{stop, normal, ok, St}.
|
||||||
|
|
||||||
|
handle_cast(_, St) ->
|
||||||
|
{noreply, St}.
|
||||||
|
|
||||||
|
terminate(_Reason, _St) ->
|
||||||
|
ok.
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% internal functions
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
process_incoming(PSt, Data, Packets) ->
|
||||||
|
case emqx_frame:parse(Data, PSt) of
|
||||||
|
{more, NewPSt} ->
|
||||||
|
{NewPSt, lists:reverse(Packets)};
|
||||||
|
{ok, Packet, Rest, NewPSt} ->
|
||||||
|
process_incoming(NewPSt, Rest, [Packet | Packets])
|
||||||
|
end.
|
||||||
|
|
||||||
|
deliver(_Owner, []) -> ok;
|
||||||
|
deliver(Owner, [Packet | Packets]) ->
|
||||||
|
Owner ! {packet, Packet},
|
||||||
|
deliver(Owner, Packets).
|
||||||
|
|
||||||
|
|
||||||
|
run_sock(Socket) ->
|
||||||
|
inet:setopts(Socket, [{active, once}]).
|
|
@ -38,6 +38,7 @@ groups() ->
|
||||||
[{require_seeds, [], [t_authenticate, t_update, t_destroy]}].
|
[{require_seeds, [], [t_authenticate, t_update, t_destroy]}].
|
||||||
|
|
||||||
init_per_testcase(_, Config) ->
|
init_per_testcase(_, Config) ->
|
||||||
|
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
|
||||||
emqx_authentication:initialize_authentication(?GLOBAL, []),
|
emqx_authentication:initialize_authentication(?GLOBAL, []),
|
||||||
emqx_authn_test_lib:delete_authenticators(
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
[authentication],
|
[authentication],
|
||||||
|
@ -53,6 +54,7 @@ end_per_group(require_seeds, Config) ->
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
|
_ = application:load(emqx_conf),
|
||||||
case emqx_authn_test_lib:is_tcp_server_available(?MYSQL_HOST, ?MYSQL_PORT) of
|
case emqx_authn_test_lib:is_tcp_server_available(?MYSQL_HOST, ?MYSQL_PORT) of
|
||||||
true ->
|
true ->
|
||||||
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
|
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
|
||||||
|
@ -117,9 +119,9 @@ t_authenticate(_Config) ->
|
||||||
user_seeds()).
|
user_seeds()).
|
||||||
|
|
||||||
test_user_auth(#{credentials := Credentials0,
|
test_user_auth(#{credentials := Credentials0,
|
||||||
config_params := SpecificConfgParams,
|
config_params := SpecificConfigParams,
|
||||||
result := Result}) ->
|
result := Result}) ->
|
||||||
AuthConfig = maps:merge(raw_mysql_auth_config(), SpecificConfgParams),
|
AuthConfig = maps:merge(raw_mysql_auth_config(), SpecificConfigParams),
|
||||||
|
|
||||||
{ok, _} = emqx:update_config(
|
{ok, _} = emqx:update_config(
|
||||||
?PATH,
|
?PATH,
|
||||||
|
|
|
@ -38,6 +38,7 @@ groups() ->
|
||||||
[{require_seeds, [], [t_authenticate, t_update, t_destroy, t_is_superuser]}].
|
[{require_seeds, [], [t_authenticate, t_update, t_destroy, t_is_superuser]}].
|
||||||
|
|
||||||
init_per_testcase(_, Config) ->
|
init_per_testcase(_, Config) ->
|
||||||
|
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
|
||||||
emqx_authentication:initialize_authentication(?GLOBAL, []),
|
emqx_authentication:initialize_authentication(?GLOBAL, []),
|
||||||
emqx_authn_test_lib:delete_authenticators(
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
[authentication],
|
[authentication],
|
||||||
|
@ -53,6 +54,7 @@ end_per_group(require_seeds, Config) ->
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
|
_ = application:load(emqx_conf),
|
||||||
case emqx_authn_test_lib:is_tcp_server_available(?PGSQL_HOST, ?PGSQL_PORT) of
|
case emqx_authn_test_lib:is_tcp_server_available(?PGSQL_HOST, ?PGSQL_PORT) of
|
||||||
true ->
|
true ->
|
||||||
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
|
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
|
||||||
|
@ -117,9 +119,9 @@ t_authenticate(_Config) ->
|
||||||
user_seeds()).
|
user_seeds()).
|
||||||
|
|
||||||
test_user_auth(#{credentials := Credentials0,
|
test_user_auth(#{credentials := Credentials0,
|
||||||
config_params := SpecificConfgParams,
|
config_params := SpecificConfigParams,
|
||||||
result := Result}) ->
|
result := Result}) ->
|
||||||
AuthConfig = maps:merge(raw_pgsql_auth_config(), SpecificConfgParams),
|
AuthConfig = maps:merge(raw_pgsql_auth_config(), SpecificConfigParams),
|
||||||
|
|
||||||
{ok, _} = emqx:update_config(
|
{ok, _} = emqx:update_config(
|
||||||
?PATH,
|
?PATH,
|
||||||
|
|
|
@ -23,12 +23,10 @@
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
|
||||||
|
|
||||||
-define(REDIS_HOST, "redis").
|
-define(REDIS_HOST, "redis").
|
||||||
-define(REDIS_PORT, 6379).
|
-define(REDIS_PORT, 6379).
|
||||||
-define(REDIS_RESOURCE, <<"emqx_authn_redis_SUITE">>).
|
-define(REDIS_RESOURCE, <<"emqx_authn_redis_SUITE">>).
|
||||||
|
|
||||||
|
|
||||||
-define(PATH, [authentication]).
|
-define(PATH, [authentication]).
|
||||||
|
|
||||||
all() ->
|
all() ->
|
||||||
|
@ -38,6 +36,7 @@ groups() ->
|
||||||
[{require_seeds, [], [t_authenticate, t_update, t_destroy]}].
|
[{require_seeds, [], [t_authenticate, t_update, t_destroy]}].
|
||||||
|
|
||||||
init_per_testcase(_, Config) ->
|
init_per_testcase(_, Config) ->
|
||||||
|
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
|
||||||
emqx_authentication:initialize_authentication(?GLOBAL, []),
|
emqx_authentication:initialize_authentication(?GLOBAL, []),
|
||||||
emqx_authn_test_lib:delete_authenticators(
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
[authentication],
|
[authentication],
|
||||||
|
@ -53,6 +52,7 @@ end_per_group(require_seeds, Config) ->
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
|
_ = application:load(emqx_conf),
|
||||||
case emqx_authn_test_lib:is_tcp_server_available(?REDIS_HOST, ?REDIS_PORT) of
|
case emqx_authn_test_lib:is_tcp_server_available(?REDIS_HOST, ?REDIS_PORT) of
|
||||||
true ->
|
true ->
|
||||||
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
|
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
|
||||||
|
@ -98,11 +98,11 @@ t_create_invalid(_Config) ->
|
||||||
AuthConfig#{password => <<"wrongpass">>},
|
AuthConfig#{password => <<"wrongpass">>},
|
||||||
AuthConfig#{database => <<"5678">>},
|
AuthConfig#{database => <<"5678">>},
|
||||||
AuthConfig#{
|
AuthConfig#{
|
||||||
query => <<"MGET password_hash:${username} salt:${username}">>},
|
cmd => <<"MGET password_hash:${username} salt:${username}">>},
|
||||||
AuthConfig#{
|
AuthConfig#{
|
||||||
query => <<"HMGET mqtt_user:${username} password_hash invalid_field">>},
|
cmd => <<"HMGET mqtt_user:${username} password_hash invalid_field">>},
|
||||||
AuthConfig#{
|
AuthConfig#{
|
||||||
query => <<"HMGET mqtt_user:${username} salt is_superuser">>}
|
cmd => <<"HMGET mqtt_user:${username} salt is_superuser">>}
|
||||||
],
|
],
|
||||||
|
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
|
@ -124,9 +124,9 @@ t_authenticate(_Config) ->
|
||||||
user_seeds()).
|
user_seeds()).
|
||||||
|
|
||||||
test_user_auth(#{credentials := Credentials0,
|
test_user_auth(#{credentials := Credentials0,
|
||||||
config_params := SpecificConfgParams,
|
config_params := SpecificConfigParams,
|
||||||
result := Result}) ->
|
result := Result}) ->
|
||||||
AuthConfig = maps:merge(raw_redis_auth_config(), SpecificConfgParams),
|
AuthConfig = maps:merge(raw_redis_auth_config(), SpecificConfigParams),
|
||||||
|
|
||||||
{ok, _} = emqx:update_config(
|
{ok, _} = emqx:update_config(
|
||||||
?PATH,
|
?PATH,
|
||||||
|
@ -177,7 +177,7 @@ t_update(_Config) ->
|
||||||
CorrectConfig = raw_redis_auth_config(),
|
CorrectConfig = raw_redis_auth_config(),
|
||||||
IncorrectConfig =
|
IncorrectConfig =
|
||||||
CorrectConfig#{
|
CorrectConfig#{
|
||||||
query => <<"HMGET invalid_key:${username} password_hash salt is_superuser">>},
|
cmd => <<"HMGET invalid_key:${username} password_hash salt is_superuser">>},
|
||||||
|
|
||||||
{ok, _} = emqx:update_config(
|
{ok, _} = emqx:update_config(
|
||||||
?PATH,
|
?PATH,
|
||||||
|
@ -214,7 +214,7 @@ raw_redis_auth_config() ->
|
||||||
enable => <<"true">>,
|
enable => <<"true">>,
|
||||||
|
|
||||||
backend => <<"redis">>,
|
backend => <<"redis">>,
|
||||||
query => <<"HMGET mqtt_user:${username} password_hash salt is_superuser">>,
|
cmd => <<"HMGET mqtt_user:${username} password_hash salt is_superuser">>,
|
||||||
database => <<"1">>,
|
database => <<"1">>,
|
||||||
password => <<"public">>,
|
password => <<"public">>,
|
||||||
server => redis_server()
|
server => redis_server()
|
||||||
|
@ -262,7 +262,7 @@ user_seeds() ->
|
||||||
},
|
},
|
||||||
key => "mqtt_user:sha256",
|
key => "mqtt_user:sha256",
|
||||||
config_params => #{
|
config_params => #{
|
||||||
query => <<"HMGET mqtt_user:${clientid} password_hash salt is_superuser">>,
|
cmd => <<"HMGET mqtt_user:${clientid} password_hash salt is_superuser">>,
|
||||||
password_hash_algorithm => <<"sha256">>,
|
password_hash_algorithm => <<"sha256">>,
|
||||||
salt_position => <<"prefix">>
|
salt_position => <<"prefix">>
|
||||||
},
|
},
|
||||||
|
@ -298,7 +298,7 @@ user_seeds() ->
|
||||||
key => "mqtt_user:bcrypt0",
|
key => "mqtt_user:bcrypt0",
|
||||||
config_params => #{
|
config_params => #{
|
||||||
% clientid variable & username credentials
|
% clientid variable & username credentials
|
||||||
query => <<"HMGET mqtt_client:${clientid} password_hash salt is_superuser">>,
|
cmd => <<"HMGET mqtt_client:${clientid} password_hash salt is_superuser">>,
|
||||||
password_hash_algorithm => <<"bcrypt">>,
|
password_hash_algorithm => <<"bcrypt">>,
|
||||||
salt_position => <<"suffix">>
|
salt_position => <<"suffix">>
|
||||||
},
|
},
|
||||||
|
@ -316,8 +316,8 @@ user_seeds() ->
|
||||||
},
|
},
|
||||||
key => "mqtt_user:bcrypt1",
|
key => "mqtt_user:bcrypt1",
|
||||||
config_params => #{
|
config_params => #{
|
||||||
% Bad key in query
|
% Bad key in cmd
|
||||||
query => <<"HMGET badkey:${username} password_hash salt is_superuser">>,
|
cmd => <<"HMGET badkey:${username} password_hash salt is_superuser">>,
|
||||||
password_hash_algorithm => <<"bcrypt">>,
|
password_hash_algorithm => <<"bcrypt">>,
|
||||||
salt_position => <<"suffix">>
|
salt_position => <<"suffix">>
|
||||||
},
|
},
|
||||||
|
@ -336,7 +336,7 @@ user_seeds() ->
|
||||||
},
|
},
|
||||||
key => "mqtt_user:bcrypt2",
|
key => "mqtt_user:bcrypt2",
|
||||||
config_params => #{
|
config_params => #{
|
||||||
query => <<"HMGET mqtt_user:${username} password_hash salt is_superuser">>,
|
cmd => <<"HMGET mqtt_user:${username} password_hash salt is_superuser">>,
|
||||||
password_hash_algorithm => <<"bcrypt">>,
|
password_hash_algorithm => <<"bcrypt">>,
|
||||||
salt_position => <<"suffix">>
|
salt_position => <<"suffix">>
|
||||||
},
|
},
|
||||||
|
|
|
@ -0,0 +1,375 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_enhanced_authn_scram_mnesia_SUITE).
|
||||||
|
|
||||||
|
-compile(export_all).
|
||||||
|
-compile(nowarn_export_all).
|
||||||
|
|
||||||
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
|
||||||
|
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||||
|
-include("emqx_authn.hrl").
|
||||||
|
|
||||||
|
-define(PATH, [authentication]).
|
||||||
|
|
||||||
|
-define(USER_MAP, #{user_id := _,
|
||||||
|
is_superuser := _}).
|
||||||
|
|
||||||
|
all() ->
|
||||||
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
|
init_per_suite(Config) ->
|
||||||
|
_ = application:load(emqx_conf),
|
||||||
|
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
|
||||||
|
Config.
|
||||||
|
|
||||||
|
end_per_suite(_Config) ->
|
||||||
|
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
|
||||||
|
|
||||||
|
init_per_testcase(_Case, Config) ->
|
||||||
|
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
|
||||||
|
mria:clear_table(emqx_enhanced_authn_scram_mnesia),
|
||||||
|
emqx_authn_test_lib:delete_authenticators(
|
||||||
|
[authentication],
|
||||||
|
?GLOBAL),
|
||||||
|
Config.
|
||||||
|
|
||||||
|
end_per_testcase(_Case, Config) ->
|
||||||
|
Config.
|
||||||
|
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
%% Tests
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
t_create(_Config) ->
|
||||||
|
ValidConfig = #{
|
||||||
|
<<"mechanism">> => <<"scram">>,
|
||||||
|
<<"backend">> => <<"built-in-database">>,
|
||||||
|
<<"algorithm">> => <<"sha512">>,
|
||||||
|
<<"iteration_count">> => <<"4096">>
|
||||||
|
},
|
||||||
|
|
||||||
|
{ok, _} = emqx:update_config(
|
||||||
|
?PATH,
|
||||||
|
{create_authenticator, ?GLOBAL, ValidConfig}),
|
||||||
|
|
||||||
|
{ok, [#{provider := emqx_enhanced_authn_scram_mnesia}]}
|
||||||
|
= emqx_authentication:list_authenticators(?GLOBAL).
|
||||||
|
|
||||||
|
t_create_invalid(_Config) ->
|
||||||
|
InvalidConfig = #{
|
||||||
|
<<"mechanism">> => <<"scram">>,
|
||||||
|
<<"backend">> => <<"built-in-database">>,
|
||||||
|
<<"algorithm">> => <<"sha271828">>,
|
||||||
|
<<"iteration_count">> => <<"4096">>
|
||||||
|
},
|
||||||
|
|
||||||
|
{error, _} = emqx:update_config(
|
||||||
|
?PATH,
|
||||||
|
{create_authenticator, ?GLOBAL, InvalidConfig}),
|
||||||
|
|
||||||
|
{ok, []} = emqx_authentication:list_authenticators(?GLOBAL).
|
||||||
|
|
||||||
|
t_authenticate(_Config) ->
|
||||||
|
Algorithm = sha512,
|
||||||
|
Username = <<"u">>,
|
||||||
|
Password = <<"p">>,
|
||||||
|
|
||||||
|
init_auth(Username, Password, Algorithm),
|
||||||
|
|
||||||
|
{ok, Pid} = emqx_authn_mqtt_test_client:start_link("127.0.0.1", 1883),
|
||||||
|
|
||||||
|
ClientFirstMessage = esasl_scram:client_first_message(Username),
|
||||||
|
|
||||||
|
ConnectPacket = ?CONNECT_PACKET(
|
||||||
|
#mqtt_packet_connect{
|
||||||
|
proto_ver = ?MQTT_PROTO_V5,
|
||||||
|
properties = #{
|
||||||
|
'Authentication-Method' => <<"SCRAM-SHA-512">>,
|
||||||
|
'Authentication-Data' => ClientFirstMessage
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
|
||||||
|
ok = emqx_authn_mqtt_test_client:send(Pid, ConnectPacket),
|
||||||
|
|
||||||
|
?AUTH_PACKET(
|
||||||
|
?RC_CONTINUE_AUTHENTICATION,
|
||||||
|
#{'Authentication-Data' := ServerFirstMessage}) = receive_packet(),
|
||||||
|
|
||||||
|
{continue, ClientFinalMessage, ClientCache} =
|
||||||
|
esasl_scram:check_server_first_message(
|
||||||
|
ServerFirstMessage,
|
||||||
|
#{client_first_message => ClientFirstMessage,
|
||||||
|
password => Password,
|
||||||
|
algorithm => Algorithm}
|
||||||
|
),
|
||||||
|
|
||||||
|
AuthContinuePacket = ?AUTH_PACKET(
|
||||||
|
?RC_CONTINUE_AUTHENTICATION,
|
||||||
|
#{'Authentication-Method' => <<"SCRAM-SHA-512">>,
|
||||||
|
'Authentication-Data' => ClientFinalMessage}),
|
||||||
|
|
||||||
|
ok = emqx_authn_mqtt_test_client:send(Pid, AuthContinuePacket),
|
||||||
|
|
||||||
|
?CONNACK_PACKET(
|
||||||
|
?RC_SUCCESS,
|
||||||
|
_,
|
||||||
|
#{'Authentication-Data' := ServerFinalMessage}) = receive_packet(),
|
||||||
|
|
||||||
|
ok = esasl_scram:check_server_final_message(
|
||||||
|
ServerFinalMessage, ClientCache#{algorithm => Algorithm}
|
||||||
|
).
|
||||||
|
|
||||||
|
t_authenticate_bad_username(_Config) ->
|
||||||
|
Algorithm = sha512,
|
||||||
|
Username = <<"u">>,
|
||||||
|
Password = <<"p">>,
|
||||||
|
|
||||||
|
init_auth(Username, Password, Algorithm),
|
||||||
|
|
||||||
|
{ok, Pid} = emqx_authn_mqtt_test_client:start_link("127.0.0.1", 1883),
|
||||||
|
|
||||||
|
ClientFirstMessage = esasl_scram:client_first_message(<<"badusername">>),
|
||||||
|
|
||||||
|
ConnectPacket = ?CONNECT_PACKET(
|
||||||
|
#mqtt_packet_connect{
|
||||||
|
proto_ver = ?MQTT_PROTO_V5,
|
||||||
|
properties = #{
|
||||||
|
'Authentication-Method' => <<"SCRAM-SHA-512">>,
|
||||||
|
'Authentication-Data' => ClientFirstMessage
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
|
||||||
|
ok = emqx_authn_mqtt_test_client:send(Pid, ConnectPacket),
|
||||||
|
|
||||||
|
?CONNACK_PACKET(?RC_NOT_AUTHORIZED) = receive_packet().
|
||||||
|
|
||||||
|
t_authenticate_bad_password(_Config) ->
|
||||||
|
Algorithm = sha512,
|
||||||
|
Username = <<"u">>,
|
||||||
|
Password = <<"p">>,
|
||||||
|
|
||||||
|
init_auth(Username, Password, Algorithm),
|
||||||
|
|
||||||
|
{ok, Pid} = emqx_authn_mqtt_test_client:start_link("127.0.0.1", 1883),
|
||||||
|
|
||||||
|
ClientFirstMessage = esasl_scram:client_first_message(Username),
|
||||||
|
|
||||||
|
ConnectPacket = ?CONNECT_PACKET(
|
||||||
|
#mqtt_packet_connect{
|
||||||
|
proto_ver = ?MQTT_PROTO_V5,
|
||||||
|
properties = #{
|
||||||
|
'Authentication-Method' => <<"SCRAM-SHA-512">>,
|
||||||
|
'Authentication-Data' => ClientFirstMessage
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
|
||||||
|
ok = emqx_authn_mqtt_test_client:send(Pid, ConnectPacket),
|
||||||
|
|
||||||
|
?AUTH_PACKET(
|
||||||
|
?RC_CONTINUE_AUTHENTICATION,
|
||||||
|
#{'Authentication-Data' := ServerFirstMessage}) = receive_packet(),
|
||||||
|
|
||||||
|
{continue, ClientFinalMessage, _ClientCache} =
|
||||||
|
esasl_scram:check_server_first_message(
|
||||||
|
ServerFirstMessage,
|
||||||
|
#{client_first_message => ClientFirstMessage,
|
||||||
|
password => <<"badpassword">>,
|
||||||
|
algorithm => Algorithm}
|
||||||
|
),
|
||||||
|
|
||||||
|
AuthContinuePacket = ?AUTH_PACKET(
|
||||||
|
?RC_CONTINUE_AUTHENTICATION,
|
||||||
|
#{'Authentication-Method' => <<"SCRAM-SHA-512">>,
|
||||||
|
'Authentication-Data' => ClientFinalMessage}),
|
||||||
|
|
||||||
|
ok = emqx_authn_mqtt_test_client:send(Pid, AuthContinuePacket),
|
||||||
|
|
||||||
|
?CONNACK_PACKET(?RC_NOT_AUTHORIZED) = receive_packet().
|
||||||
|
|
||||||
|
t_destroy(_) ->
|
||||||
|
Config = config(),
|
||||||
|
OtherId = list_to_binary([<<"id-other">>]),
|
||||||
|
{ok, State0} = emqx_enhanced_authn_scram_mnesia:create(<<"id">>, Config),
|
||||||
|
{ok, StateOther} = emqx_enhanced_authn_scram_mnesia:create(OtherId, Config),
|
||||||
|
|
||||||
|
User = #{user_id => <<"u">>, password => <<"p">>},
|
||||||
|
|
||||||
|
{ok, _} = emqx_enhanced_authn_scram_mnesia:add_user(User, State0),
|
||||||
|
{ok, _} = emqx_enhanced_authn_scram_mnesia:add_user(User, StateOther),
|
||||||
|
|
||||||
|
{ok, _} = emqx_enhanced_authn_scram_mnesia:lookup_user(<<"u">>, State0),
|
||||||
|
{ok, _} = emqx_enhanced_authn_scram_mnesia:lookup_user(<<"u">>, StateOther),
|
||||||
|
|
||||||
|
ok = emqx_enhanced_authn_scram_mnesia:destroy(State0),
|
||||||
|
|
||||||
|
{ok, State1} = emqx_enhanced_authn_scram_mnesia:create(<<"id">>, Config),
|
||||||
|
{error,not_found} = emqx_enhanced_authn_scram_mnesia:lookup_user(<<"u">>, State1),
|
||||||
|
{ok, _} = emqx_enhanced_authn_scram_mnesia:lookup_user(<<"u">>, StateOther).
|
||||||
|
|
||||||
|
t_add_user(_) ->
|
||||||
|
Config = config(),
|
||||||
|
{ok, State} = emqx_enhanced_authn_scram_mnesia:create(<<"id">>, Config),
|
||||||
|
|
||||||
|
User = #{user_id => <<"u">>, password => <<"p">>},
|
||||||
|
{ok, _} = emqx_enhanced_authn_scram_mnesia:add_user(User, State),
|
||||||
|
{error, already_exist} = emqx_enhanced_authn_scram_mnesia:add_user(User, State).
|
||||||
|
|
||||||
|
t_delete_user(_) ->
|
||||||
|
Config = config(),
|
||||||
|
{ok, State} = emqx_enhanced_authn_scram_mnesia:create(<<"id">>, Config),
|
||||||
|
|
||||||
|
{error, not_found} = emqx_enhanced_authn_scram_mnesia:delete_user(<<"u">>, State),
|
||||||
|
User = #{user_id => <<"u">>, password => <<"p">>},
|
||||||
|
{ok, _} = emqx_enhanced_authn_scram_mnesia:add_user(User, State),
|
||||||
|
|
||||||
|
ok = emqx_enhanced_authn_scram_mnesia:delete_user(<<"u">>, State),
|
||||||
|
{error, not_found} = emqx_enhanced_authn_scram_mnesia:delete_user(<<"u">>, State).
|
||||||
|
|
||||||
|
t_update_user(_) ->
|
||||||
|
Config = config(),
|
||||||
|
{ok, State} = emqx_enhanced_authn_scram_mnesia:create(<<"id">>, Config),
|
||||||
|
|
||||||
|
User = #{user_id => <<"u">>, password => <<"p">>},
|
||||||
|
{ok, _} = emqx_enhanced_authn_scram_mnesia:add_user(User, State),
|
||||||
|
{ok, #{is_superuser := false}} = emqx_enhanced_authn_scram_mnesia:lookup_user(<<"u">>, State),
|
||||||
|
|
||||||
|
{ok,
|
||||||
|
#{user_id := <<"u">>,
|
||||||
|
is_superuser := true}} = emqx_enhanced_authn_scram_mnesia:update_user(
|
||||||
|
<<"u">>,
|
||||||
|
#{password => <<"p1">>, is_superuser => true},
|
||||||
|
State),
|
||||||
|
|
||||||
|
{ok, #{is_superuser := true}} = emqx_enhanced_authn_scram_mnesia:lookup_user(<<"u">>, State).
|
||||||
|
|
||||||
|
t_list_users(_) ->
|
||||||
|
Config = config(),
|
||||||
|
{ok, State} = emqx_enhanced_authn_scram_mnesia:create(<<"id">>, Config),
|
||||||
|
|
||||||
|
Users = [#{user_id => <<"u1">>, password => <<"p">>},
|
||||||
|
#{user_id => <<"u2">>, password => <<"p">>},
|
||||||
|
#{user_id => <<"u3">>, password => <<"p">>}],
|
||||||
|
|
||||||
|
lists:foreach(
|
||||||
|
fun(U) -> {ok, _} = emqx_enhanced_authn_scram_mnesia:add_user(U, State) end,
|
||||||
|
Users),
|
||||||
|
|
||||||
|
{ok,
|
||||||
|
#{data := [?USER_MAP, ?USER_MAP],
|
||||||
|
meta := #{page := 1, limit := 2, count := 3}}} = emqx_enhanced_authn_scram_mnesia:list_users(
|
||||||
|
#{<<"page">> => 1, <<"limit">> => 2},
|
||||||
|
State),
|
||||||
|
{ok,
|
||||||
|
#{data := [?USER_MAP],
|
||||||
|
meta := #{page := 2, limit := 2, count := 3}}} = emqx_enhanced_authn_scram_mnesia:list_users(
|
||||||
|
#{<<"page">> => 2, <<"limit">> => 2},
|
||||||
|
State).
|
||||||
|
|
||||||
|
t_is_superuser(_Config) ->
|
||||||
|
ok = test_is_superuser(#{is_superuser => false}, false),
|
||||||
|
ok = test_is_superuser(#{is_superuser => true}, true),
|
||||||
|
ok = test_is_superuser(#{}, false).
|
||||||
|
|
||||||
|
test_is_superuser(UserInfo, ExpectedIsSuperuser) ->
|
||||||
|
Config = config(),
|
||||||
|
{ok, State} = emqx_enhanced_authn_scram_mnesia:create(<<"id">>, Config),
|
||||||
|
|
||||||
|
Username = <<"u">>,
|
||||||
|
Password = <<"p">>,
|
||||||
|
|
||||||
|
UserInfo0 = UserInfo#{user_id => Username,
|
||||||
|
password => Password},
|
||||||
|
|
||||||
|
{ok, _} = emqx_enhanced_authn_scram_mnesia:add_user(UserInfo0, State),
|
||||||
|
|
||||||
|
ClientFirstMessage = esasl_scram:client_first_message(Username),
|
||||||
|
|
||||||
|
{continue, ServerFirstMessage, ServerCache}
|
||||||
|
= emqx_enhanced_authn_scram_mnesia:authenticate(
|
||||||
|
#{auth_method => <<"SCRAM-SHA-512">>,
|
||||||
|
auth_data => ClientFirstMessage,
|
||||||
|
auth_cache => #{}
|
||||||
|
},
|
||||||
|
State),
|
||||||
|
|
||||||
|
{continue, ClientFinalMessage, ClientCache} =
|
||||||
|
esasl_scram:check_server_first_message(
|
||||||
|
ServerFirstMessage,
|
||||||
|
#{client_first_message => ClientFirstMessage,
|
||||||
|
password => Password,
|
||||||
|
algorithm => sha512}
|
||||||
|
),
|
||||||
|
|
||||||
|
{ok, UserInfo1, ServerFinalMessage}
|
||||||
|
= emqx_enhanced_authn_scram_mnesia:authenticate(
|
||||||
|
#{auth_method => <<"SCRAM-SHA-512">>,
|
||||||
|
auth_data => ClientFinalMessage,
|
||||||
|
auth_cache => ServerCache
|
||||||
|
},
|
||||||
|
State),
|
||||||
|
|
||||||
|
ok = esasl_scram:check_server_final_message(
|
||||||
|
ServerFinalMessage, ClientCache#{algorithm => sha512}
|
||||||
|
),
|
||||||
|
|
||||||
|
?assertMatch(#{is_superuser := ExpectedIsSuperuser}, UserInfo1),
|
||||||
|
|
||||||
|
ok = emqx_enhanced_authn_scram_mnesia:destroy(State).
|
||||||
|
|
||||||
|
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
%% Helpers
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
config() ->
|
||||||
|
#{
|
||||||
|
mechanism => <<"scram">>,
|
||||||
|
backend => <<"built-in-database">>,
|
||||||
|
algorithm => sha512,
|
||||||
|
iteration_count => 4096
|
||||||
|
}.
|
||||||
|
|
||||||
|
raw_config(Algorithm) ->
|
||||||
|
#{
|
||||||
|
<<"mechanism">> => <<"scram">>,
|
||||||
|
<<"backend">> => <<"built-in-database">>,
|
||||||
|
<<"algorithm">> => atom_to_binary(Algorithm),
|
||||||
|
<<"iteration_count">> => <<"4096">>
|
||||||
|
}.
|
||||||
|
|
||||||
|
init_auth(Username, Password, Algorithm) ->
|
||||||
|
Config = raw_config(Algorithm),
|
||||||
|
|
||||||
|
{ok, _} = emqx:update_config(
|
||||||
|
?PATH,
|
||||||
|
{create_authenticator, ?GLOBAL, Config}),
|
||||||
|
|
||||||
|
{ok, [#{state := State}]} = emqx_authentication:list_authenticators(?GLOBAL),
|
||||||
|
|
||||||
|
emqx_enhanced_authn_scram_mnesia:add_user(
|
||||||
|
#{user_id => Username, password => Password},
|
||||||
|
State).
|
||||||
|
|
||||||
|
receive_packet() ->
|
||||||
|
receive
|
||||||
|
{packet, Packet} ->
|
||||||
|
ct:pal("Delivered packet: ~p", [Packet]),
|
||||||
|
Packet
|
||||||
|
after 1000 ->
|
||||||
|
ct:fail("Deliver timeout")
|
||||||
|
end.
|
|
@ -23,7 +23,7 @@ authz:{
|
||||||
keyfile: "etc/certs/client-key.pem"
|
keyfile: "etc/certs/client-key.pem"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or clientid = '%c'"
|
sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = ${peerhost} or username = ${username} or clientid = ${clientid}"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: postgresql
|
type: postgresql
|
||||||
|
@ -36,7 +36,7 @@ authz:{
|
||||||
auto_reconnect: true
|
auto_reconnect: true
|
||||||
ssl: {enable: false}
|
ssl: {enable: false}
|
||||||
}
|
}
|
||||||
sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or username = '$all' or clientid = '%c'"
|
sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = ${peerhost} or username = ${username} or username = '$all' or clientid = ${clientid}"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: redis
|
type: redis
|
||||||
|
@ -48,7 +48,7 @@ authz:{
|
||||||
auto_reconnect: true
|
auto_reconnect: true
|
||||||
ssl: {enable: false}
|
ssl: {enable: false}
|
||||||
}
|
}
|
||||||
cmd: "HGETALL mqtt_authz:%u"
|
cmd: "HGETALL mqtt_authz:${username}"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
principal: {username: "^admin?"}
|
principal: {username: "^admin?"}
|
||||||
|
|
|
@ -22,7 +22,7 @@ authorization {
|
||||||
# certfile: "{{ platform_etc_dir }}/certs/client-cert.pem"
|
# certfile: "{{ platform_etc_dir }}/certs/client-cert.pem"
|
||||||
# keyfile: "{{ platform_etc_dir }}/certs/client-key.pem"
|
# keyfile: "{{ platform_etc_dir }}/certs/client-key.pem"
|
||||||
# }
|
# }
|
||||||
# query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or clientid = '%c'"
|
# query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = ${peerhost} or username = ${username} or clientid = ${clientid}"
|
||||||
# },
|
# },
|
||||||
# {
|
# {
|
||||||
# type: postgresql
|
# type: postgresql
|
||||||
|
@ -33,7 +33,7 @@ authorization {
|
||||||
# password: public
|
# password: public
|
||||||
# auto_reconnect: true
|
# auto_reconnect: true
|
||||||
# ssl: {enable: false}
|
# ssl: {enable: false}
|
||||||
# query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or username = '$all' or clientid = '%c'"
|
# query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = ${peerhost} or username = ${username} or username = '$all' or clientid = ${clientid}"
|
||||||
# },
|
# },
|
||||||
# {
|
# {
|
||||||
# type: redis
|
# type: redis
|
||||||
|
@ -43,7 +43,7 @@ authorization {
|
||||||
# password: public
|
# password: public
|
||||||
# auto_reconnect: true
|
# auto_reconnect: true
|
||||||
# ssl: {enable: false}
|
# ssl: {enable: false}
|
||||||
# cmd: "HGETALL mqtt_authz:%u"
|
# cmd: "HGETALL mqtt_authz:${username}"
|
||||||
# },
|
# },
|
||||||
# {
|
# {
|
||||||
# type: mongodb
|
# type: mongodb
|
||||||
|
@ -53,7 +53,7 @@ authorization {
|
||||||
# database: mqtt
|
# database: mqtt
|
||||||
# ssl: {enable: false}
|
# ssl: {enable: false}
|
||||||
# collection: mqtt_authz
|
# collection: mqtt_authz
|
||||||
# selector: { "$or": [ { "username": "%u" }, { "clientid": "%c" } ] }
|
# selector: { "$or": [ { "username": "${username}" }, { "clientid": "${clientid}" } ] }
|
||||||
# },
|
# },
|
||||||
{
|
{
|
||||||
type: built-in-database
|
type: built-in-database
|
||||||
|
|
|
@ -40,6 +40,8 @@
|
||||||
|
|
||||||
-export([acl_conf_file/0]).
|
-export([acl_conf_file/0]).
|
||||||
|
|
||||||
|
-export([ph_to_re/1]).
|
||||||
|
|
||||||
-spec(register_metrics() -> ok).
|
-spec(register_metrics() -> ok).
|
||||||
register_metrics() ->
|
register_metrics() ->
|
||||||
lists:foreach(fun emqx_metrics:ensure/1, ?AUTHZ_METRICS).
|
lists:foreach(fun emqx_metrics:ensure/1, ?AUTHZ_METRICS).
|
||||||
|
@ -64,11 +66,14 @@ move(Type, Cmd) ->
|
||||||
move(Type, Cmd, #{}).
|
move(Type, Cmd, #{}).
|
||||||
|
|
||||||
move(Type, #{<<"before">> := Before}, Opts) ->
|
move(Type, #{<<"before">> := Before}, Opts) ->
|
||||||
emqx:update_config(?CONF_KEY_PATH, {?CMD_MOVE, type(Type), ?CMD_MOVE_BEFORE(type(Before))}, Opts);
|
emqx:update_config( ?CONF_KEY_PATH
|
||||||
|
, {?CMD_MOVE, type(Type), ?CMD_MOVE_BEFORE(type(Before))}, Opts);
|
||||||
move(Type, #{<<"after">> := After}, Opts) ->
|
move(Type, #{<<"after">> := After}, Opts) ->
|
||||||
emqx:update_config(?CONF_KEY_PATH, {?CMD_MOVE, type(Type), ?CMD_MOVE_AFTER(type(After))}, Opts);
|
emqx:update_config( ?CONF_KEY_PATH
|
||||||
|
, {?CMD_MOVE, type(Type), ?CMD_MOVE_AFTER(type(After))}, Opts);
|
||||||
move(Type, Position, Opts) ->
|
move(Type, Position, Opts) ->
|
||||||
emqx:update_config(?CONF_KEY_PATH, {?CMD_MOVE, type(Type), Position}, Opts).
|
emqx:update_config( ?CONF_KEY_PATH
|
||||||
|
, {?CMD_MOVE, type(Type), Position}, Opts).
|
||||||
|
|
||||||
update(Cmd, Sources) ->
|
update(Cmd, Sources) ->
|
||||||
update(Cmd, Sources, #{}).
|
update(Cmd, Sources, #{}).
|
||||||
|
@ -155,7 +160,8 @@ do_post_update({{?CMD_REPLACE, Type}, Source}, _NewSources) when is_map(Source)
|
||||||
{OldSource, Front, Rear} = take(Type, OldInitedSources),
|
{OldSource, Front, Rear} = take(Type, OldInitedSources),
|
||||||
ok = ensure_resource_deleted(OldSource),
|
ok = ensure_resource_deleted(OldSource),
|
||||||
InitedSources = init_sources(check_sources([Source])),
|
InitedSources = init_sources(check_sources([Source])),
|
||||||
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Front ++ InitedSources ++ Rear]}, -1),
|
ok = emqx_hooks:put( 'client.authorize'
|
||||||
|
, {?MODULE, authorize, [Front ++ InitedSources ++ Rear]}, -1),
|
||||||
ok = emqx_authz_cache:drain_cache();
|
ok = emqx_authz_cache:drain_cache();
|
||||||
do_post_update({{?CMD_DELETE, Type}, _Source}, _NewSources) ->
|
do_post_update({{?CMD_DELETE, Type}, _Source}, _NewSources) ->
|
||||||
OldInitedSources = lookup(),
|
OldInitedSources = lookup(),
|
||||||
|
@ -201,7 +207,12 @@ check_dup_types([Source | Sources], Checked) ->
|
||||||
create_dry_run(T, Source) ->
|
create_dry_run(T, Source) ->
|
||||||
case is_connector_source(T) of
|
case is_connector_source(T) of
|
||||||
true ->
|
true ->
|
||||||
[NSource] = check_sources([Source]),
|
[CheckedSource] = check_sources([Source]),
|
||||||
|
case T of
|
||||||
|
http ->
|
||||||
|
URIMap = maps:get(url, CheckedSource),
|
||||||
|
NSource = maps:put(base_url, maps:remove(query, URIMap), CheckedSource)
|
||||||
|
end,
|
||||||
emqx_resource:create_dry_run(connector_module(T), NSource);
|
emqx_resource:create_dry_run(connector_module(T), NSource);
|
||||||
false ->
|
false ->
|
||||||
ok
|
ok
|
||||||
|
@ -267,7 +278,7 @@ init_source(#{type := DB,
|
||||||
{error, Reason} -> error({load_config_error, Reason});
|
{error, Reason} -> error({load_config_error, Reason});
|
||||||
Id -> Source#{annotations =>
|
Id -> Source#{annotations =>
|
||||||
#{id => Id,
|
#{id => Id,
|
||||||
query => Mod:parse_query(SQL)
|
query => erlang:apply(Mod, parse_query, [SQL])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
end.
|
end.
|
||||||
|
@ -277,22 +288,36 @@ init_source(#{type := DB,
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
%% @doc Check AuthZ
|
%% @doc Check AuthZ
|
||||||
-spec(authorize(emqx_types:clientinfo(), emqx_types:all(), emqx_types:topic(), allow | deny, sources())
|
-spec(authorize( emqx_types:clientinfo()
|
||||||
|
, emqx_types:all()
|
||||||
|
, emqx_types:topic()
|
||||||
|
, allow | deny
|
||||||
|
, sources())
|
||||||
-> {stop, allow} | {ok, deny}).
|
-> {stop, allow} | {ok, deny}).
|
||||||
authorize(#{username := Username,
|
authorize(#{username := Username,
|
||||||
peerhost := IpAddress
|
peerhost := IpAddress
|
||||||
} = Client, PubSub, Topic, DefaultResult, Sources) ->
|
} = Client, PubSub, Topic, DefaultResult, Sources) ->
|
||||||
case do_authorize(Client, PubSub, Topic, Sources) of
|
case do_authorize(Client, PubSub, Topic, Sources) of
|
||||||
{matched, allow} ->
|
{matched, allow} ->
|
||||||
?SLOG(info, #{msg => "authorization_permission_allowed", username => Username, ipaddr => IpAddress, topic => Topic}),
|
?SLOG(info, #{msg => "authorization_permission_allowed",
|
||||||
|
username => Username,
|
||||||
|
ipaddr => IpAddress,
|
||||||
|
topic => Topic}),
|
||||||
emqx_metrics:inc(?AUTHZ_METRICS(allow)),
|
emqx_metrics:inc(?AUTHZ_METRICS(allow)),
|
||||||
{stop, allow};
|
{stop, allow};
|
||||||
{matched, deny} ->
|
{matched, deny} ->
|
||||||
?SLOG(info, #{msg => "authorization_permission_denied", username => Username, ipaddr => IpAddress, topic => Topic}),
|
?SLOG(info, #{msg => "authorization_permission_denied",
|
||||||
|
username => Username,
|
||||||
|
ipaddr => IpAddress,
|
||||||
|
topic => Topic}),
|
||||||
emqx_metrics:inc(?AUTHZ_METRICS(deny)),
|
emqx_metrics:inc(?AUTHZ_METRICS(deny)),
|
||||||
{stop, deny};
|
{stop, deny};
|
||||||
nomatch ->
|
nomatch ->
|
||||||
?SLOG(info, #{msg => "authorization_failed_nomatch", username => Username, ipaddr => IpAddress, topic => Topic, reason => "no-match rule"}),
|
?SLOG(info, #{msg => "authorization_failed_nomatch",
|
||||||
|
username => Username,
|
||||||
|
ipaddr => IpAddress,
|
||||||
|
topic => Topic,
|
||||||
|
reason => "no-match rule"}),
|
||||||
{stop, DefaultResult}
|
{stop, DefaultResult}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -309,7 +334,7 @@ do_authorize(Client, PubSub, Topic, [#{type := file} = F | Tail]) ->
|
||||||
do_authorize(Client, PubSub, Topic,
|
do_authorize(Client, PubSub, Topic,
|
||||||
[Connector = #{type := Type} | Tail] ) ->
|
[Connector = #{type := Type} | Tail] ) ->
|
||||||
Mod = authz_module(Type),
|
Mod = authz_module(Type),
|
||||||
case Mod:authorize(Client, PubSub, Topic, Connector) of
|
case erlang:apply(Mod, authorize, [Client, PubSub, Topic, Connector]) of
|
||||||
nomatch -> do_authorize(Client, PubSub, Topic, Tail);
|
nomatch -> do_authorize(Client, PubSub, Topic, Tail);
|
||||||
Matched -> Matched
|
Matched -> Matched
|
||||||
end.
|
end.
|
||||||
|
@ -381,8 +406,12 @@ type(postgresql) -> postgresql;
|
||||||
type(<<"postgresql">>) -> postgresql;
|
type(<<"postgresql">>) -> postgresql;
|
||||||
type('built-in-database') -> 'built-in-database';
|
type('built-in-database') -> 'built-in-database';
|
||||||
type(<<"built-in-database">>) -> 'built-in-database';
|
type(<<"built-in-database">>) -> 'built-in-database';
|
||||||
type(Unknown) -> error({unknown_authz_source_type, Unknown}). % should never happend if the input is type-checked by hocon schema
|
%% should never happend if the input is type-checked by hocon schema
|
||||||
|
type(Unknown) -> error({unknown_authz_source_type, Unknown}).
|
||||||
|
|
||||||
%% @doc where the acl.conf file is stored.
|
%% @doc where the acl.conf file is stored.
|
||||||
acl_conf_file() ->
|
acl_conf_file() ->
|
||||||
filename:join([emqx:data_dir(), "authz", "acl.conf"]).
|
filename:join([emqx:data_dir(), "authz", "acl.conf"]).
|
||||||
|
|
||||||
|
ph_to_re(VarPH) ->
|
||||||
|
re:replace(VarPH, "[\\$\\{\\}]", "\\\\&", [global, {return, list}]).
|
||||||
|
|
|
@ -440,14 +440,15 @@ read_certs(#{<<"ssl">> := SSL} = Source) ->
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
?SLOG(error, Reason#{msg => failed_to_readd_ssl_file}),
|
?SLOG(error, Reason#{msg => failed_to_readd_ssl_file}),
|
||||||
throw(failed_to_readd_ssl_file);
|
throw(failed_to_readd_ssl_file);
|
||||||
NewSSL ->
|
{ok, NewSSL} ->
|
||||||
Source#{<<"ssl">> => NewSSL}
|
Source#{<<"ssl">> => NewSSL}
|
||||||
end;
|
end;
|
||||||
read_certs(Source) -> Source.
|
read_certs(Source) -> Source.
|
||||||
|
|
||||||
maybe_write_certs(#{<<"ssl">> := #{<<"enable">> := true} = SSL} = Source) ->
|
maybe_write_certs(#{<<"ssl">> := #{<<"enable">> := true} = SSL} = Source) ->
|
||||||
Type = maps:get(<<"type">>, Source),
|
Type = maps:get(<<"type">>, Source),
|
||||||
emqx_tls_lib:ensure_ssl_files(filename:join(["authz", Type]), SSL);
|
{ok, Return} = emqx_tls_lib:ensure_ssl_files(filename:join(["authz", Type]), SSL),
|
||||||
|
maps:put(<<"ssl">>, Return, Source);
|
||||||
maybe_write_certs(Source) -> Source.
|
maybe_write_certs(Source) -> Source.
|
||||||
|
|
||||||
write_file(Filename, Bytes0) ->
|
write_file(Filename, Bytes0) ->
|
||||||
|
|
|
@ -24,6 +24,7 @@
|
||||||
%% AuthZ Callbacks
|
%% AuthZ Callbacks
|
||||||
-export([ authorize/4
|
-export([ authorize/4
|
||||||
, description/0
|
, description/0
|
||||||
|
, parse_url/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
|
@ -36,7 +37,7 @@ description() ->
|
||||||
|
|
||||||
authorize(Client, PubSub, Topic,
|
authorize(Client, PubSub, Topic,
|
||||||
#{type := http,
|
#{type := http,
|
||||||
url := #{path := Path} = Url,
|
url := #{path := Path} = URL,
|
||||||
headers := Headers,
|
headers := Headers,
|
||||||
method := Method,
|
method := Method,
|
||||||
request_timeout := RequestTimeout,
|
request_timeout := RequestTimeout,
|
||||||
|
@ -44,7 +45,7 @@ authorize(Client, PubSub, Topic,
|
||||||
} = Source) ->
|
} = Source) ->
|
||||||
Request = case Method of
|
Request = case Method of
|
||||||
get ->
|
get ->
|
||||||
Query = maps:get(query, Url, ""),
|
Query = maps:get(query, URL, ""),
|
||||||
Path1 = replvar(Path ++ "?" ++ Query, PubSub, Topic, Client),
|
Path1 = replvar(Path ++ "?" ++ Query, PubSub, Topic, Client),
|
||||||
{Path1, maps:to_list(Headers)};
|
{Path1, maps:to_list(Headers)};
|
||||||
_ ->
|
_ ->
|
||||||
|
@ -56,10 +57,32 @@ authorize(Client, PubSub, Topic,
|
||||||
Path1 = replvar(Path, PubSub, Topic, Client),
|
Path1 = replvar(Path, PubSub, Topic, Client),
|
||||||
{Path1, maps:to_list(Headers), Body1}
|
{Path1, maps:to_list(Headers), Body1}
|
||||||
end,
|
end,
|
||||||
case emqx_resource:query(ResourceID, {Method, Request, RequestTimeout}) of
|
case emqx_resource:query(ResourceID, {Method, Request, RequestTimeout}) of
|
||||||
{ok, 204, _Headers} -> {matched, allow};
|
{ok, 200, _Headers} ->
|
||||||
{ok, 200, _Headers, _Body} -> {matched, allow};
|
{matched, allow};
|
||||||
_ -> nomatch
|
{ok, 204, _Headers} ->
|
||||||
|
{matched, allow};
|
||||||
|
{ok, 200, _Headers, _Body} ->
|
||||||
|
{matched, allow};
|
||||||
|
{ok, _Status, _Headers, _Body} ->
|
||||||
|
nomatch;
|
||||||
|
{error, Reason} ->
|
||||||
|
?SLOG(error, #{msg => "http_server_query_failed",
|
||||||
|
resource => ResourceID,
|
||||||
|
reason => Reason}),
|
||||||
|
ignore
|
||||||
|
end.
|
||||||
|
|
||||||
|
parse_url(URL)
|
||||||
|
when URL =:= undefined ->
|
||||||
|
#{};
|
||||||
|
parse_url(URL) ->
|
||||||
|
{ok, URIMap} = emqx_http_lib:uri_parse(URL),
|
||||||
|
case maps:get(query, URIMap, undefined) of
|
||||||
|
undefined ->
|
||||||
|
URIMap#{query => ""};
|
||||||
|
_ ->
|
||||||
|
URIMap
|
||||||
end.
|
end.
|
||||||
|
|
||||||
query_string(Body) ->
|
query_string(Body) ->
|
||||||
|
@ -87,19 +110,19 @@ replvar(Str0, PubSub, Topic,
|
||||||
}) when is_list(Str0);
|
}) when is_list(Str0);
|
||||||
is_binary(Str0) ->
|
is_binary(Str0) ->
|
||||||
NTopic = emqx_http_lib:uri_encode(Topic),
|
NTopic = emqx_http_lib:uri_encode(Topic),
|
||||||
Str1 = re:replace( Str0, ?PH_S_CLIENTID
|
Str1 = re:replace( Str0, emqx_authz:ph_to_re(?PH_S_CLIENTID)
|
||||||
, Clientid, [global, {return, binary}]),
|
, bin(Clientid), [global, {return, binary}]),
|
||||||
Str2 = re:replace( Str1, ?PH_S_USERNAME
|
Str2 = re:replace( Str1, emqx_authz:ph_to_re(?PH_S_USERNAME)
|
||||||
, bin(Username), [global, {return, binary}]),
|
, bin(Username), [global, {return, binary}]),
|
||||||
Str3 = re:replace( Str2, ?PH_S_HOST
|
Str3 = re:replace( Str2, emqx_authz:ph_to_re(?PH_S_HOST)
|
||||||
, inet_parse:ntoa(IpAddress), [global, {return, binary}]),
|
, inet_parse:ntoa(IpAddress), [global, {return, binary}]),
|
||||||
Str4 = re:replace( Str3, ?PH_S_PROTONAME
|
Str4 = re:replace( Str3, emqx_authz:ph_to_re(?PH_S_PROTONAME)
|
||||||
, bin(Protocol), [global, {return, binary}]),
|
, bin(Protocol), [global, {return, binary}]),
|
||||||
Str5 = re:replace( Str4, ?PH_S_MOUNTPOINT
|
Str5 = re:replace( Str4, emqx_authz:ph_to_re(?PH_S_MOUNTPOINT)
|
||||||
, Mountpoint, [global, {return, binary}]),
|
, bin(Mountpoint), [global, {return, binary}]),
|
||||||
Str6 = re:replace( Str5, ?PH_S_TOPIC
|
Str6 = re:replace( Str5, emqx_authz:ph_to_re(?PH_S_TOPIC)
|
||||||
, NTopic, [global, {return, binary}]),
|
, bin(NTopic), [global, {return, binary}]),
|
||||||
Str7 = re:replace( Str6, ?PH_S_ACTION
|
Str7 = re:replace( Str6, emqx_authz:ph_to_re(?PH_S_ACTION)
|
||||||
, bin(PubSub), [global, {return, binary}]),
|
, bin(PubSub), [global, {return, binary}]),
|
||||||
Str7.
|
Str7.
|
||||||
|
|
||||||
|
|
|
@ -76,11 +76,11 @@ replvar(Selector, #{clientid := Clientid,
|
||||||
end || M <- V],
|
end || M <- V],
|
||||||
AccIn);
|
AccIn);
|
||||||
InFun(K, V, AccIn) when is_binary(V) ->
|
InFun(K, V, AccIn) when is_binary(V) ->
|
||||||
V1 = re:replace( V, ?PH_S_CLIENTID
|
V1 = re:replace( V, emqx_authz:ph_to_re(?PH_S_CLIENTID)
|
||||||
, bin(Clientid), [global, {return, binary}]),
|
, bin(Clientid), [global, {return, binary}]),
|
||||||
V2 = re:replace( V1, ?PH_S_USERNAME
|
V2 = re:replace( V1, emqx_authz:ph_to_re(?PH_S_USERNAME)
|
||||||
, bin(Username), [global, {return, binary}]),
|
, bin(Username), [global, {return, binary}]),
|
||||||
V3 = re:replace( V2, ?PH_S_HOST
|
V3 = re:replace( V2, emqx_authz:ph_to_re(?PH_S_HOST)
|
||||||
, inet_parse:ntoa(IpAddress), [global, {return, binary}]),
|
, inet_parse:ntoa(IpAddress), [global, {return, binary}]),
|
||||||
maps:put(K, V3, AccIn);
|
maps:put(K, V3, AccIn);
|
||||||
InFun(K, V, AccIn) -> maps:put(K, V, AccIn)
|
InFun(K, V, AccIn) -> maps:put(K, V, AccIn)
|
||||||
|
|
|
@ -71,8 +71,9 @@ replvar(Cmd, Client = #{username := Username}) ->
|
||||||
replvar(Cmd, _) ->
|
replvar(Cmd, _) ->
|
||||||
Cmd.
|
Cmd.
|
||||||
|
|
||||||
repl(S, _Var, undefined) ->
|
repl(S, _VarPH, undefined) ->
|
||||||
S;
|
S;
|
||||||
repl(S, Var, Val) ->
|
repl(S, VarPH, Val) ->
|
||||||
NVal = re:replace(Val, "&", "\\\\&", [global, {return, list}]),
|
NVal = re:replace(Val, "&", "\\\\&", [global, {return, list}]),
|
||||||
re:replace(S, Var, NVal, [{return, list}]).
|
NVarPH = emqx_authz:ph_to_re(VarPH),
|
||||||
|
re:replace(S, NVarPH, NVal, [{return, list}]).
|
||||||
|
|
|
@ -32,10 +32,15 @@
|
||||||
-export([ namespace/0
|
-export([ namespace/0
|
||||||
, roots/0
|
, roots/0
|
||||||
, fields/1
|
, fields/1
|
||||||
|
, validations/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-import(emqx_schema, [mk_duration/2]).
|
-import(emqx_schema, [mk_duration/2]).
|
||||||
|
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Hocon Schema
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
namespace() -> authz.
|
namespace() -> authz.
|
||||||
|
|
||||||
%% @doc authorization schema is not exported
|
%% @doc authorization schema is not exported
|
||||||
|
@ -98,92 +103,24 @@ and the new rules will override all rules from the old config file.
|
||||||
}}
|
}}
|
||||||
];
|
];
|
||||||
fields(http_get) ->
|
fields(http_get) ->
|
||||||
[ {type, #{type => http}}
|
[ {method, #{type => get, default => post}}
|
||||||
, {enable, #{type => boolean(),
|
, {headers, fun headers_no_content_type/1}
|
||||||
default => true}}
|
] ++ http_common_fields();
|
||||||
, {url, #{type => url()}}
|
|
||||||
, {method, #{type => get, default => get }}
|
|
||||||
, {headers, #{type => map(),
|
|
||||||
default => #{ <<"accept">> => <<"application/json">>
|
|
||||||
, <<"cache-control">> => <<"no-cache">>
|
|
||||||
, <<"connection">> => <<"keep-alive">>
|
|
||||||
, <<"keep-alive">> => <<"timeout=5">>
|
|
||||||
},
|
|
||||||
converter => fun (Headers0) ->
|
|
||||||
Headers1 = maps:fold(fun(K0, V, AccIn) ->
|
|
||||||
K1 = iolist_to_binary(string:to_lower(to_list(K0))),
|
|
||||||
maps:put(K1, V, AccIn)
|
|
||||||
end, #{}, Headers0),
|
|
||||||
maps:merge(#{ <<"accept">> => <<"application/json">>
|
|
||||||
, <<"cache-control">> => <<"no-cache">>
|
|
||||||
, <<"connection">> => <<"keep-alive">>
|
|
||||||
, <<"keep-alive">> => <<"timeout=5">>
|
|
||||||
}, Headers1)
|
|
||||||
end
|
|
||||||
}
|
|
||||||
}
|
|
||||||
, {request_timeout, mk_duration("request timeout", #{default => "30s"})}
|
|
||||||
] ++ proplists:delete(base_url, emqx_connector_http:fields(config));
|
|
||||||
fields(http_post) ->
|
fields(http_post) ->
|
||||||
[ {type, #{type => http}}
|
[ {method, #{type => post, default => post}}
|
||||||
, {enable, #{type => boolean(),
|
, {headers, fun headers/1}
|
||||||
default => true}}
|
] ++ http_common_fields();
|
||||||
, {url, #{type => url()}}
|
|
||||||
, {method, #{type => post,
|
|
||||||
default => get}}
|
|
||||||
, {headers, #{type => map(),
|
|
||||||
default => #{ <<"accept">> => <<"application/json">>
|
|
||||||
, <<"cache-control">> => <<"no-cache">>
|
|
||||||
, <<"connection">> => <<"keep-alive">>
|
|
||||||
, <<"content-type">> => <<"application/json">>
|
|
||||||
, <<"keep-alive">> => <<"timeout=5">>
|
|
||||||
},
|
|
||||||
converter => fun (Headers0) ->
|
|
||||||
Headers1 = maps:fold(fun(K0, V, AccIn) ->
|
|
||||||
K1 = iolist_to_binary(string:to_lower(binary_to_list(K0))),
|
|
||||||
maps:put(K1, V, AccIn)
|
|
||||||
end, #{}, Headers0),
|
|
||||||
maps:merge(#{ <<"accept">> => <<"application/json">>
|
|
||||||
, <<"cache-control">> => <<"no-cache">>
|
|
||||||
, <<"connection">> => <<"keep-alive">>
|
|
||||||
, <<"content-type">> => <<"application/json">>
|
|
||||||
, <<"keep-alive">> => <<"timeout=5">>
|
|
||||||
}, Headers1)
|
|
||||||
end
|
|
||||||
}
|
|
||||||
}
|
|
||||||
, {request_timeout, mk_duration("request timeout", #{default => "30s"})}
|
|
||||||
, {body, #{type => map(),
|
|
||||||
nullable => true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
] ++ proplists:delete(base_url, emqx_connector_http:fields(config));
|
|
||||||
fields(mnesia) ->
|
fields(mnesia) ->
|
||||||
[ {type, #{type => 'built-in-database'}}
|
[ {type, #{type => 'built-in-database'}}
|
||||||
, {enable, #{type => boolean(),
|
, {enable, #{type => boolean(),
|
||||||
default => true}}
|
default => true}}
|
||||||
];
|
];
|
||||||
fields(mongo_single) ->
|
fields(mongo_single) ->
|
||||||
[ {collection, #{type => atom()}}
|
mongo_common_fields() ++ emqx_connector_mongo:fields(single);
|
||||||
, {selector, #{type => map()}}
|
|
||||||
, {type, #{type => mongodb}}
|
|
||||||
, {enable, #{type => boolean(),
|
|
||||||
default => true}}
|
|
||||||
] ++ emqx_connector_mongo:fields(single);
|
|
||||||
fields(mongo_rs) ->
|
fields(mongo_rs) ->
|
||||||
[ {collection, #{type => atom()}}
|
mongo_common_fields() ++ emqx_connector_mongo:fields(rs);
|
||||||
, {selector, #{type => map()}}
|
|
||||||
, {type, #{type => mongodb}}
|
|
||||||
, {enable, #{type => boolean(),
|
|
||||||
default => true}}
|
|
||||||
] ++ emqx_connector_mongo:fields(rs);
|
|
||||||
fields(mongo_sharded) ->
|
fields(mongo_sharded) ->
|
||||||
[ {collection, #{type => atom()}}
|
mongo_common_fields() ++ emqx_connector_mongo:fields(sharded);
|
||||||
, {selector, #{type => map()}}
|
|
||||||
, {type, #{type => mongodb}}
|
|
||||||
, {enable, #{type => boolean(),
|
|
||||||
default => true}}
|
|
||||||
] ++ emqx_connector_mongo:fields(sharded);
|
|
||||||
fields(mysql) ->
|
fields(mysql) ->
|
||||||
connector_fields(mysql) ++
|
connector_fields(mysql) ++
|
||||||
[ {query, query()} ];
|
[ {query, query()} ];
|
||||||
|
@ -203,10 +140,87 @@ fields(redis_cluster) ->
|
||||||
connector_fields(redis, cluster) ++
|
connector_fields(redis, cluster) ++
|
||||||
[ {cmd, query()} ].
|
[ {cmd, query()} ].
|
||||||
|
|
||||||
|
http_common_fields() ->
|
||||||
|
[ {type, #{type => http}}
|
||||||
|
, {enable, #{type => boolean(), default => true}}
|
||||||
|
, {url, #{type => url()}}
|
||||||
|
, {request_timeout, mk_duration("request timeout", #{default => "30s"})}
|
||||||
|
, {body, #{type => map(), nullable => true}}
|
||||||
|
] ++ proplists:delete(base_url, emqx_connector_http:fields(config)).
|
||||||
|
|
||||||
|
mongo_common_fields() ->
|
||||||
|
[ {collection, #{type => atom()}}
|
||||||
|
, {selector, #{type => map()}}
|
||||||
|
, {type, #{type => mongodb}}
|
||||||
|
, {enable, #{type => boolean(),
|
||||||
|
default => true}}
|
||||||
|
].
|
||||||
|
|
||||||
|
validations() ->
|
||||||
|
[ {check_ssl_opts, fun check_ssl_opts/1}
|
||||||
|
, {check_headers, fun check_headers/1}
|
||||||
|
].
|
||||||
|
|
||||||
|
headers(type) -> map();
|
||||||
|
headers(converter) ->
|
||||||
|
fun(Headers) ->
|
||||||
|
maps:merge(default_headers(), transform_header_name(Headers))
|
||||||
|
end;
|
||||||
|
headers(default) -> default_headers();
|
||||||
|
headers(_) -> undefined.
|
||||||
|
|
||||||
|
headers_no_content_type(type) -> map();
|
||||||
|
headers_no_content_type(converter) ->
|
||||||
|
fun(Headers) ->
|
||||||
|
maps:merge(default_headers_no_content_type(), transform_header_name(Headers))
|
||||||
|
end;
|
||||||
|
headers_no_content_type(default) -> default_headers_no_content_type();
|
||||||
|
headers_no_content_type(_) -> undefined.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Internal functions
|
%% Internal functions
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
default_headers() ->
|
||||||
|
maps:put(<<"content-type">>,
|
||||||
|
<<"application/json">>,
|
||||||
|
default_headers_no_content_type()).
|
||||||
|
|
||||||
|
default_headers_no_content_type() ->
|
||||||
|
#{ <<"accept">> => <<"application/json">>
|
||||||
|
, <<"cache-control">> => <<"no-cache">>
|
||||||
|
, <<"connection">> => <<"keep-alive">>
|
||||||
|
, <<"keep-alive">> => <<"timeout=5">>
|
||||||
|
}.
|
||||||
|
|
||||||
|
transform_header_name(Headers) ->
|
||||||
|
maps:fold(fun(K0, V, Acc) ->
|
||||||
|
K = list_to_binary(string:to_lower(to_list(K0))),
|
||||||
|
maps:put(K, V, Acc)
|
||||||
|
end, #{}, Headers).
|
||||||
|
|
||||||
|
check_ssl_opts(Conf)
|
||||||
|
when Conf =:= #{} ->
|
||||||
|
true;
|
||||||
|
check_ssl_opts(Conf) ->
|
||||||
|
case emqx_authz_http:parse_url(hocon_schema:get_value("config.url", Conf)) of
|
||||||
|
#{scheme := https} ->
|
||||||
|
case hocon_schema:get_value("config.ssl.enable", Conf) of
|
||||||
|
true -> ok;
|
||||||
|
false -> false
|
||||||
|
end;
|
||||||
|
#{scheme := http} ->
|
||||||
|
ok
|
||||||
|
end.
|
||||||
|
|
||||||
|
check_headers(Conf)
|
||||||
|
when Conf =:= #{} ->
|
||||||
|
true;
|
||||||
|
check_headers(Conf) ->
|
||||||
|
Method = to_bin(hocon_schema:get_value("config.method", Conf)),
|
||||||
|
Headers = hocon_schema:get_value("config.headers", Conf),
|
||||||
|
Method =:= <<"post">> orelse (not maps:is_key(<<"content-type">>, Headers)).
|
||||||
|
|
||||||
union_array(Item) when is_list(Item) ->
|
union_array(Item) when is_list(Item) ->
|
||||||
hoconsc:array(hoconsc:union(Item)).
|
hoconsc:array(hoconsc:union(Item)).
|
||||||
|
|
||||||
|
@ -229,15 +243,22 @@ connector_fields(DB, Fields) ->
|
||||||
catch
|
catch
|
||||||
error:badarg ->
|
error:badarg ->
|
||||||
list_to_atom(Mod0);
|
list_to_atom(Mod0);
|
||||||
Error ->
|
error:Reason ->
|
||||||
erlang:error(Error)
|
erlang:error(Reason)
|
||||||
end,
|
end,
|
||||||
[ {type, #{type => DB}}
|
[ {type, #{type => DB}}
|
||||||
, {enable, #{type => boolean(),
|
, {enable, #{type => boolean(),
|
||||||
default => true}}
|
default => true}}
|
||||||
] ++ Mod:fields(Fields).
|
] ++ erlang:apply(Mod, fields, [Fields]).
|
||||||
|
|
||||||
to_list(A) when is_atom(A) ->
|
to_list(A) when is_atom(A) ->
|
||||||
atom_to_list(A);
|
atom_to_list(A);
|
||||||
to_list(B) when is_binary(B) ->
|
to_list(B) when is_binary(B) ->
|
||||||
binary_to_list(B).
|
binary_to_list(B).
|
||||||
|
|
||||||
|
to_bin(A) when is_atom(A) ->
|
||||||
|
atom_to_binary(A);
|
||||||
|
to_bin(B) when is_binary(B) ->
|
||||||
|
B;
|
||||||
|
to_bin(L) when is_list(L) ->
|
||||||
|
list_to_binary(L).
|
||||||
|
|
|
@ -36,7 +36,8 @@ init_per_suite(Config) ->
|
||||||
meck:expect(emqx_resource, remove, fun(_) -> ok end ),
|
meck:expect(emqx_resource, remove, fun(_) -> ok end ),
|
||||||
|
|
||||||
ok = emqx_common_test_helpers:start_apps(
|
ok = emqx_common_test_helpers:start_apps(
|
||||||
[emqx_conf, emqx_authz], fun set_special_configs/1),
|
[emqx_connector, emqx_conf, emqx_authz],
|
||||||
|
fun set_special_configs/1),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
end_per_suite(_Config) ->
|
end_per_suite(_Config) ->
|
||||||
|
|
|
@ -31,7 +31,7 @@ groups() ->
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
ok = emqx_common_test_helpers:start_apps(
|
ok = emqx_common_test_helpers:start_apps(
|
||||||
[emqx_conf, emqx_authz],
|
[emqx_connector, emqx_conf, emqx_authz],
|
||||||
fun set_special_configs/1
|
fun set_special_configs/1
|
||||||
),
|
),
|
||||||
Config.
|
Config.
|
||||||
|
|
|
@ -17,220 +17,266 @@
|
||||||
|
|
||||||
-behaviour(minirest_api).
|
-behaviour(minirest_api).
|
||||||
|
|
||||||
-export([api_spec/0]).
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
|
||||||
-export([ list_create_bridges_in_cluster/2
|
-import(hoconsc, [mk/2, array/1, enum/1]).
|
||||||
, list_local_bridges/1
|
|
||||||
, crud_bridges_in_cluster/2
|
%% Swagger specs from hocon schema
|
||||||
, manage_bridges/2
|
-export([api_spec/0, paths/0, schema/1, namespace/0]).
|
||||||
|
|
||||||
|
%% API callbacks
|
||||||
|
-export(['/bridges'/2, '/bridges/:id'/2,
|
||||||
|
'/nodes/:node/bridges/:id/operation/:operation'/2]).
|
||||||
|
|
||||||
|
-export([ list_local_bridges/1
|
||||||
, lookup_from_local_node/2
|
, lookup_from_local_node/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-define(TYPES, [mqtt, http]).
|
-define(TYPES, [mqtt, http]).
|
||||||
|
|
||||||
|
-define(CONN_TYPES, [mqtt]).
|
||||||
|
|
||||||
-define(TRY_PARSE_ID(ID, EXPR),
|
-define(TRY_PARSE_ID(ID, EXPR),
|
||||||
try emqx_bridge:parse_bridge_id(Id) of
|
try emqx_bridge:parse_bridge_id(Id) of
|
||||||
{BridgeType, BridgeName} -> EXPR
|
{BridgeType, BridgeName} -> EXPR
|
||||||
catch
|
catch
|
||||||
error:{invalid_bridge_id, Id0} ->
|
error:{invalid_bridge_id, Id0} ->
|
||||||
{400, #{code => 'INVALID_ID', message => <<"invalid_bridge_id: ", Id0/binary,
|
{400, #{code => 'INVALID_ID', message => <<"invalid_bridge_id: ", Id0/binary,
|
||||||
". Bridge Ids must be of format <bridge_type>:<name>">>}}
|
". Bridge Ids must be of format {type}:{name}">>}}
|
||||||
end).
|
end).
|
||||||
|
|
||||||
-define(METRICS(MATCH, SUCC, FAILED, RATE, RATE_5, RATE_MAX),
|
-define(METRICS(MATCH, SUCC, FAILED, RATE, RATE_5, RATE_MAX),
|
||||||
#{ matched => MATCH,
|
#{ matched => MATCH,
|
||||||
success => SUCC,
|
success => SUCC,
|
||||||
failed => FAILED,
|
failed => FAILED,
|
||||||
speed => RATE,
|
rate => RATE,
|
||||||
speed_last5m => RATE_5,
|
rate_last5m => RATE_5,
|
||||||
speed_max => RATE_MAX
|
rate_max => RATE_MAX
|
||||||
}).
|
}).
|
||||||
-define(metrics(MATCH, SUCC, FAILED, RATE, RATE_5, RATE_MAX),
|
-define(metrics(MATCH, SUCC, FAILED, RATE, RATE_5, RATE_MAX),
|
||||||
#{ matched := MATCH,
|
#{ matched := MATCH,
|
||||||
success := SUCC,
|
success := SUCC,
|
||||||
failed := FAILED,
|
failed := FAILED,
|
||||||
speed := RATE,
|
rate := RATE,
|
||||||
speed_last5m := RATE_5,
|
rate_last5m := RATE_5,
|
||||||
speed_max := RATE_MAX
|
rate_max := RATE_MAX
|
||||||
}).
|
}).
|
||||||
|
|
||||||
req_schema() ->
|
namespace() -> "bridge".
|
||||||
Schema = [
|
|
||||||
case maps:to_list(emqx:get_raw_config([bridges, T], #{})) of
|
|
||||||
%% the bridge is not configured, so we have no method to get the schema
|
|
||||||
[] -> #{};
|
|
||||||
[{_K, Conf} | _] ->
|
|
||||||
emqx_mgmt_api_configs:gen_schema(Conf)
|
|
||||||
end
|
|
||||||
|| T <- ?TYPES],
|
|
||||||
#{'oneOf' => Schema}.
|
|
||||||
|
|
||||||
node_schema() ->
|
|
||||||
#{type => string, example => "emqx@127.0.0.1"}.
|
|
||||||
|
|
||||||
status_schema() ->
|
|
||||||
#{type => string, enum => [connected, disconnected]}.
|
|
||||||
|
|
||||||
metrics_schema() ->
|
|
||||||
#{ type => object
|
|
||||||
, properties => #{
|
|
||||||
matched => #{type => integer, example => "0"},
|
|
||||||
success => #{type => integer, example => "0"},
|
|
||||||
failed => #{type => integer, example => "0"},
|
|
||||||
speed => #{type => number, format => float, example => "0.0"},
|
|
||||||
speed_last5m => #{type => number, format => float, example => "0.0"},
|
|
||||||
speed_max => #{type => number, format => float, example => "0.0"}
|
|
||||||
}
|
|
||||||
}.
|
|
||||||
|
|
||||||
per_node_schema(Key, Schema) ->
|
|
||||||
#{
|
|
||||||
type => array,
|
|
||||||
items => #{
|
|
||||||
type => object,
|
|
||||||
properties => #{
|
|
||||||
node => node_schema(),
|
|
||||||
Key => Schema
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}.
|
|
||||||
|
|
||||||
resp_schema() ->
|
|
||||||
AddMetadata = fun(Prop) ->
|
|
||||||
Prop#{status => status_schema(),
|
|
||||||
node_status => per_node_schema(status, status_schema()),
|
|
||||||
metrics => metrics_schema(),
|
|
||||||
node_metrics => per_node_schema(metrics, metrics_schema()),
|
|
||||||
id => #{type => string, example => "http:my_http_bridge"},
|
|
||||||
bridge_type => #{type => string, enum => ?TYPES},
|
|
||||||
node => node_schema()
|
|
||||||
}
|
|
||||||
end,
|
|
||||||
more_props_resp_schema(AddMetadata).
|
|
||||||
|
|
||||||
more_props_resp_schema(AddMetadata) ->
|
|
||||||
#{'oneOf' := Schema} = req_schema(),
|
|
||||||
Schema1 = [S#{properties => AddMetadata(Prop)}
|
|
||||||
|| S = #{properties := Prop} <- Schema],
|
|
||||||
#{'oneOf' => Schema1}.
|
|
||||||
|
|
||||||
api_spec() ->
|
api_spec() ->
|
||||||
{bridge_apis(), []}.
|
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => false}).
|
||||||
|
|
||||||
bridge_apis() ->
|
paths() -> ["/bridges", "/bridges/:id", "/nodes/:node/bridges/:id/operation/:operation"].
|
||||||
[list_all_bridges_api(), crud_bridges_apis(), operation_apis()].
|
|
||||||
|
|
||||||
list_all_bridges_api() ->
|
error_schema(Code, Message) ->
|
||||||
ReqSchema = more_props_resp_schema(fun(Prop) ->
|
[ {code, mk(string(), #{example => Code})}
|
||||||
Prop#{id => #{type => string, required => true}}
|
, {message, mk(string(), #{example => Message})}
|
||||||
end),
|
].
|
||||||
RespSchema = resp_schema(),
|
|
||||||
Metadata = #{
|
get_response_body_schema() ->
|
||||||
|
emqx_dashboard_swagger:schema_with_examples(emqx_bridge_schema:get_response(),
|
||||||
|
bridge_info_examples(get)).
|
||||||
|
|
||||||
|
param_path_node() ->
|
||||||
|
path_param(node, binary(), atom_to_binary(node(), utf8)).
|
||||||
|
|
||||||
|
param_path_operation() ->
|
||||||
|
path_param(operation, enum([start, stop, restart]), <<"start">>).
|
||||||
|
|
||||||
|
param_path_id() ->
|
||||||
|
path_param(id, binary(), <<"http:my_http_bridge">>).
|
||||||
|
|
||||||
|
path_param(Name, Type, Example) ->
|
||||||
|
{Name, mk(Type,
|
||||||
|
#{ in => path
|
||||||
|
, required => true
|
||||||
|
, example => Example
|
||||||
|
})}.
|
||||||
|
|
||||||
|
bridge_info_array_example(Method) ->
|
||||||
|
[Config || #{value := Config} <- maps:values(bridge_info_examples(Method))].
|
||||||
|
|
||||||
|
bridge_info_examples(Method) ->
|
||||||
|
maps:merge(conn_bridge_examples(Method), #{
|
||||||
|
<<"http_bridge">> => #{
|
||||||
|
summary => <<"HTTP Bridge">>,
|
||||||
|
value => info_example(http, awesome, Method)
|
||||||
|
}
|
||||||
|
}).
|
||||||
|
|
||||||
|
conn_bridge_examples(Method) ->
|
||||||
|
lists:foldl(fun(Type, Acc) ->
|
||||||
|
SType = atom_to_list(Type),
|
||||||
|
KeyIngress = bin(SType ++ "_ingress"),
|
||||||
|
KeyEgress = bin(SType ++ "_egress"),
|
||||||
|
maps:merge(Acc, #{
|
||||||
|
KeyIngress => #{
|
||||||
|
summary => bin(string:uppercase(SType) ++ " Ingress Bridge"),
|
||||||
|
value => info_example(Type, ingress, Method)
|
||||||
|
},
|
||||||
|
KeyEgress => #{
|
||||||
|
summary => bin(string:uppercase(SType) ++ " Egress Bridge"),
|
||||||
|
value => info_example(Type, egress, Method)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end, #{}, ?CONN_TYPES).
|
||||||
|
|
||||||
|
info_example(Type, Direction, Method) ->
|
||||||
|
maps:merge(info_example_basic(Type, Direction),
|
||||||
|
method_example(Type, Direction, Method)).
|
||||||
|
|
||||||
|
method_example(Type, Direction, get) ->
|
||||||
|
SType = atom_to_list(Type),
|
||||||
|
SDir = atom_to_list(Direction),
|
||||||
|
SName = "my_" ++ SDir ++ "_" ++ SType ++ "_bridge",
|
||||||
|
#{
|
||||||
|
id => bin(SType ++ ":" ++ SName),
|
||||||
|
type => bin(SType),
|
||||||
|
name => bin(SName)
|
||||||
|
};
|
||||||
|
method_example(Type, Direction, post) ->
|
||||||
|
SType = atom_to_list(Type),
|
||||||
|
SDir = atom_to_list(Direction),
|
||||||
|
SName = "my_" ++ SDir ++ "_" ++ SType ++ "_bridge",
|
||||||
|
#{
|
||||||
|
type => bin(SType),
|
||||||
|
name => bin(SName)
|
||||||
|
};
|
||||||
|
method_example(_Type, _Direction, put) ->
|
||||||
|
#{}.
|
||||||
|
|
||||||
|
info_example_basic(http, _) ->
|
||||||
|
#{
|
||||||
|
url => <<"http://localhost:9901/messages/${topic}">>,
|
||||||
|
request_timeout => <<"30s">>,
|
||||||
|
connect_timeout => <<"30s">>,
|
||||||
|
max_retries => 3,
|
||||||
|
retry_interval => <<"10s">>,
|
||||||
|
pool_type => <<"random">>,
|
||||||
|
pool_size => 4,
|
||||||
|
enable_pipelining => true,
|
||||||
|
ssl => #{enable => false},
|
||||||
|
from_local_topic => <<"emqx_http/#">>,
|
||||||
|
method => post,
|
||||||
|
body => <<"${payload}">>
|
||||||
|
};
|
||||||
|
info_example_basic(mqtt, ingress) ->
|
||||||
|
#{
|
||||||
|
connector => <<"mqtt:my_mqtt_connector">>,
|
||||||
|
direction => ingress,
|
||||||
|
from_remote_topic => <<"aws/#">>,
|
||||||
|
subscribe_qos => 1,
|
||||||
|
to_local_topic => <<"from_aws/${topic}">>,
|
||||||
|
payload => <<"${payload}">>,
|
||||||
|
qos => <<"${qos}">>,
|
||||||
|
retain => <<"${retain}">>
|
||||||
|
};
|
||||||
|
info_example_basic(mqtt, egress) ->
|
||||||
|
#{
|
||||||
|
connector => <<"mqtt:my_mqtt_connector">>,
|
||||||
|
direction => egress,
|
||||||
|
from_local_topic => <<"emqx/#">>,
|
||||||
|
to_remote_topic => <<"from_emqx/${topic}">>,
|
||||||
|
payload => <<"${payload}">>,
|
||||||
|
qos => 1,
|
||||||
|
retain => false
|
||||||
|
}.
|
||||||
|
|
||||||
|
schema("/bridges") ->
|
||||||
|
#{
|
||||||
|
operationId => '/bridges',
|
||||||
get => #{
|
get => #{
|
||||||
|
tags => [<<"bridges">>],
|
||||||
|
summary => <<"List Bridges">>,
|
||||||
description => <<"List all created bridges">>,
|
description => <<"List all created bridges">>,
|
||||||
responses => #{
|
responses => #{
|
||||||
<<"200">> => emqx_mgmt_util:array_schema(resp_schema(),
|
200 => emqx_dashboard_swagger:schema_with_example(
|
||||||
<<"A list of the bridges">>)
|
array(emqx_bridge_schema:get_response()),
|
||||||
|
bridge_info_array_example(get))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
post => #{
|
post => #{
|
||||||
|
tags => [<<"bridges">>],
|
||||||
|
summary => <<"Create Bridge">>,
|
||||||
description => <<"Create a new bridge">>,
|
description => <<"Create a new bridge">>,
|
||||||
'requestBody' => emqx_mgmt_util:schema(ReqSchema),
|
requestBody => emqx_dashboard_swagger:schema_with_examples(
|
||||||
|
emqx_bridge_schema:post_request(),
|
||||||
|
bridge_info_examples(post)),
|
||||||
responses => #{
|
responses => #{
|
||||||
<<"201">> => emqx_mgmt_util:schema(RespSchema, <<"Bridge created">>),
|
201 => get_response_body_schema(),
|
||||||
<<"400">> => emqx_mgmt_util:error_schema(<<"Create bridge failed">>,
|
400 => error_schema('BAD_ARG', "Create bridge failed")
|
||||||
['UPDATE_FAILED'])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
};
|
||||||
{"/bridges/", Metadata, list_create_bridges_in_cluster}.
|
|
||||||
|
|
||||||
crud_bridges_apis() ->
|
schema("/bridges/:id") ->
|
||||||
ReqSchema = req_schema(),
|
#{
|
||||||
RespSchema = resp_schema(),
|
operationId => '/bridges/:id',
|
||||||
Metadata = #{
|
|
||||||
get => #{
|
get => #{
|
||||||
|
tags => [<<"bridges">>],
|
||||||
|
summary => <<"Get Bridge">>,
|
||||||
description => <<"Get a bridge by Id">>,
|
description => <<"Get a bridge by Id">>,
|
||||||
parameters => [param_path_id()],
|
parameters => [param_path_id()],
|
||||||
responses => #{
|
responses => #{
|
||||||
<<"200">> => emqx_mgmt_util:array_schema(RespSchema,
|
200 => get_response_body_schema(),
|
||||||
<<"The details of the bridge">>),
|
404 => error_schema('NOT_FOUND', "Bridge not found")
|
||||||
<<"404">> => emqx_mgmt_util:error_schema(<<"Bridge not found">>, ['NOT_FOUND'])
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
put => #{
|
put => #{
|
||||||
|
tags => [<<"bridges">>],
|
||||||
|
summary => <<"Update Bridge">>,
|
||||||
description => <<"Update a bridge">>,
|
description => <<"Update a bridge">>,
|
||||||
parameters => [param_path_id()],
|
parameters => [param_path_id()],
|
||||||
'requestBody' => emqx_mgmt_util:schema(ReqSchema),
|
requestBody => emqx_dashboard_swagger:schema_with_examples(
|
||||||
|
emqx_bridge_schema:put_request(),
|
||||||
|
bridge_info_examples(put)),
|
||||||
responses => #{
|
responses => #{
|
||||||
<<"200">> => emqx_mgmt_util:array_schema(RespSchema, <<"Bridge updated">>),
|
200 => get_response_body_schema(),
|
||||||
<<"400">> => emqx_mgmt_util:error_schema(<<"Update bridge failed">>,
|
400 => error_schema('BAD_ARG', "Update bridge failed")
|
||||||
['UPDATE_FAILED'])
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
delete => #{
|
delete => #{
|
||||||
|
tags => [<<"bridges">>],
|
||||||
|
summary => <<"Delete Bridge">>,
|
||||||
description => <<"Delete a bridge">>,
|
description => <<"Delete a bridge">>,
|
||||||
parameters => [param_path_id()],
|
parameters => [param_path_id()],
|
||||||
responses => #{
|
responses => #{
|
||||||
<<"204">> => emqx_mgmt_util:schema(<<"Bridge deleted">>),
|
204 => <<"Bridge deleted">>
|
||||||
<<"404">> => emqx_mgmt_util:error_schema(<<"Bridge not found">>, ['NOT_FOUND'])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
};
|
||||||
{"/bridges/:id", Metadata, crud_bridges_in_cluster}.
|
|
||||||
|
|
||||||
operation_apis() ->
|
schema("/nodes/:node/bridges/:id/operation/:operation") ->
|
||||||
Metadata = #{
|
#{
|
||||||
|
operationId => '/nodes/:node/bridges/:id/operation/:operation',
|
||||||
post => #{
|
post => #{
|
||||||
|
tags => [<<"bridges">>],
|
||||||
|
summary => <<"Start/Stop/Restart Bridge">>,
|
||||||
description => <<"Start/Stop/Restart bridges on a specific node">>,
|
description => <<"Start/Stop/Restart bridges on a specific node">>,
|
||||||
parameters => [
|
parameters => [
|
||||||
param_path_node(),
|
param_path_node(),
|
||||||
param_path_id(),
|
param_path_id(),
|
||||||
param_path_operation()],
|
param_path_operation()
|
||||||
|
],
|
||||||
responses => #{
|
responses => #{
|
||||||
<<"500">> => emqx_mgmt_util:error_schema(<<"Operation Failed">>,
|
500 => error_schema('INTERNAL_ERROR', "Operation Failed"),
|
||||||
['INTERNAL_ERROR']),
|
200 => <<"Operation success">>
|
||||||
<<"200">> => emqx_mgmt_util:schema(<<"Operation success">>)}}},
|
}
|
||||||
{"/nodes/:node/bridges/:id/operation/:operation", Metadata, manage_bridges}.
|
}
|
||||||
|
|
||||||
param_path_node() ->
|
|
||||||
#{
|
|
||||||
name => node,
|
|
||||||
in => path,
|
|
||||||
schema => #{type => string},
|
|
||||||
required => true,
|
|
||||||
example => node()
|
|
||||||
}.
|
}.
|
||||||
|
|
||||||
param_path_id() ->
|
'/bridges'(post, #{body := #{<<"type">> := BridgeType} = Conf}) ->
|
||||||
#{
|
BridgeName = maps:get(<<"name">>, Conf, emqx_misc:gen_id()),
|
||||||
name => id,
|
case emqx_bridge:lookup(BridgeType, BridgeName) of
|
||||||
in => path,
|
{ok, _} -> {400, #{code => 'ALREADY_EXISTS', message => <<"bridge already exists">>}};
|
||||||
schema => #{type => string},
|
{error, not_found} ->
|
||||||
required => true
|
case ensure_bridge_created(BridgeType, BridgeName, Conf) of
|
||||||
}.
|
ok -> lookup_from_all_nodes(BridgeType, BridgeName, 201);
|
||||||
|
{error, Error} -> {400, Error}
|
||||||
param_path_operation()->
|
end
|
||||||
#{
|
end;
|
||||||
name => operation,
|
'/bridges'(get, _Params) ->
|
||||||
in => path,
|
|
||||||
required => true,
|
|
||||||
schema => #{
|
|
||||||
type => string,
|
|
||||||
enum => [start, stop, restart]},
|
|
||||||
example => restart
|
|
||||||
}.
|
|
||||||
|
|
||||||
list_create_bridges_in_cluster(post, #{body := #{<<"id">> := Id} = Conf}) ->
|
|
||||||
?TRY_PARSE_ID(Id,
|
|
||||||
case emqx_bridge:lookup(BridgeType, BridgeName) of
|
|
||||||
{ok, _} -> {400, #{code => 'ALREADY_EXISTS', message => <<"bridge already exists">>}};
|
|
||||||
{error, not_found} ->
|
|
||||||
case ensure_bridge(BridgeType, BridgeName, maps:remove(<<"id">>, Conf)) of
|
|
||||||
ok -> lookup_from_all_nodes(Id, BridgeType, BridgeName, 201);
|
|
||||||
{error, Error} -> {400, Error}
|
|
||||||
end
|
|
||||||
end);
|
|
||||||
list_create_bridges_in_cluster(get, _Params) ->
|
|
||||||
{200, zip_bridges([list_local_bridges(Node) || Node <- mria_mnesia:running_nodes()])}.
|
{200, zip_bridges([list_local_bridges(Node) || Node <- mria_mnesia:running_nodes()])}.
|
||||||
|
|
||||||
list_local_bridges(Node) when Node =:= node() ->
|
list_local_bridges(Node) when Node =:= node() ->
|
||||||
|
@ -238,22 +284,22 @@ list_local_bridges(Node) when Node =:= node() ->
|
||||||
list_local_bridges(Node) ->
|
list_local_bridges(Node) ->
|
||||||
rpc_call(Node, list_local_bridges, [Node]).
|
rpc_call(Node, list_local_bridges, [Node]).
|
||||||
|
|
||||||
crud_bridges_in_cluster(get, #{bindings := #{id := Id}}) ->
|
'/bridges/:id'(get, #{bindings := #{id := Id}}) ->
|
||||||
?TRY_PARSE_ID(Id, lookup_from_all_nodes(Id, BridgeType, BridgeName, 200));
|
?TRY_PARSE_ID(Id, lookup_from_all_nodes(BridgeType, BridgeName, 200));
|
||||||
|
|
||||||
crud_bridges_in_cluster(put, #{bindings := #{id := Id}, body := Conf}) ->
|
'/bridges/:id'(put, #{bindings := #{id := Id}, body := Conf}) ->
|
||||||
?TRY_PARSE_ID(Id,
|
?TRY_PARSE_ID(Id,
|
||||||
case emqx_bridge:lookup(BridgeType, BridgeName) of
|
case emqx_bridge:lookup(BridgeType, BridgeName) of
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
case ensure_bridge(BridgeType, BridgeName, Conf) of
|
case ensure_bridge_created(BridgeType, BridgeName, Conf) of
|
||||||
ok -> lookup_from_all_nodes(Id, BridgeType, BridgeName, 200);
|
ok -> lookup_from_all_nodes(BridgeType, BridgeName, 200);
|
||||||
{error, Error} -> {400, Error}
|
{error, Error} -> {400, Error}
|
||||||
end;
|
end;
|
||||||
{error, not_found} ->
|
{error, not_found} ->
|
||||||
{404, #{code => 'NOT_FOUND', message => <<"bridge not found">>}}
|
{404, #{code => 'NOT_FOUND', message => <<"bridge not found">>}}
|
||||||
end);
|
end);
|
||||||
|
|
||||||
crud_bridges_in_cluster(delete, #{bindings := #{id := Id}}) ->
|
'/bridges/:id'(delete, #{bindings := #{id := Id}}) ->
|
||||||
?TRY_PARSE_ID(Id,
|
?TRY_PARSE_ID(Id,
|
||||||
case emqx_conf:remove(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName],
|
case emqx_conf:remove(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName],
|
||||||
#{override_to => cluster}) of
|
#{override_to => cluster}) of
|
||||||
|
@ -262,12 +308,12 @@ crud_bridges_in_cluster(delete, #{bindings := #{id := Id}}) ->
|
||||||
{500, #{code => 102, message => emqx_resource_api:stringify(Reason)}}
|
{500, #{code => 102, message => emqx_resource_api:stringify(Reason)}}
|
||||||
end).
|
end).
|
||||||
|
|
||||||
lookup_from_all_nodes(Id, BridgeType, BridgeName, SuccCode) ->
|
lookup_from_all_nodes(BridgeType, BridgeName, SuccCode) ->
|
||||||
case rpc_multicall(lookup_from_local_node, [BridgeType, BridgeName]) of
|
case rpc_multicall(lookup_from_local_node, [BridgeType, BridgeName]) of
|
||||||
{ok, [{ok, _} | _] = Results} ->
|
{ok, [{ok, _} | _] = Results} ->
|
||||||
{SuccCode, format_bridge_info([R || {ok, R} <- Results])};
|
{SuccCode, format_bridge_info([R || {ok, R} <- Results])};
|
||||||
{ok, [{error, not_found} | _]} ->
|
{ok, [{error, not_found} | _]} ->
|
||||||
{404, error_msg('NOT_FOUND', <<"not_found: ", Id/binary>>)};
|
{404, error_msg('NOT_FOUND', <<"not_found">>)};
|
||||||
{error, ErrL} ->
|
{error, ErrL} ->
|
||||||
{500, error_msg('UNKNOWN_ERROR', ErrL)}
|
{500, error_msg('UNKNOWN_ERROR', ErrL)}
|
||||||
end.
|
end.
|
||||||
|
@ -278,7 +324,8 @@ lookup_from_local_node(BridgeType, BridgeName) ->
|
||||||
Error -> Error
|
Error -> Error
|
||||||
end.
|
end.
|
||||||
|
|
||||||
manage_bridges(post, #{bindings := #{node := Node, id := Id, operation := Op}}) ->
|
'/nodes/:node/bridges/:id/operation/:operation'(post, #{bindings :=
|
||||||
|
#{node := Node, id := Id, operation := Op}}) ->
|
||||||
OperFun =
|
OperFun =
|
||||||
fun (<<"start">>) -> start;
|
fun (<<"start">>) -> start;
|
||||||
(<<"stop">>) -> stop;
|
(<<"stop">>) -> stop;
|
||||||
|
@ -292,9 +339,10 @@ manage_bridges(post, #{bindings := #{node := Node, id := Id, operation := Op}})
|
||||||
{500, #{code => 102, message => emqx_resource_api:stringify(Reason)}}
|
{500, #{code => 102, message => emqx_resource_api:stringify(Reason)}}
|
||||||
end).
|
end).
|
||||||
|
|
||||||
ensure_bridge(BridgeType, BridgeName, Conf) ->
|
ensure_bridge_created(BridgeType, BridgeName, Conf) ->
|
||||||
case emqx_conf:update(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], Conf,
|
Conf1 = maps:without([<<"type">>, <<"name">>], Conf),
|
||||||
#{override_to => cluster}) of
|
case emqx_conf:update(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName],
|
||||||
|
Conf1, #{override_to => cluster}) of
|
||||||
{ok, _} -> ok;
|
{ok, _} -> ok;
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
{error, error_msg('BAD_ARG', Reason)}
|
{error, error_msg('BAD_ARG', Reason)}
|
||||||
|
@ -346,12 +394,14 @@ aggregate_metrics(AllMetrics) ->
|
||||||
end, InitMetrics, AllMetrics).
|
end, InitMetrics, AllMetrics).
|
||||||
|
|
||||||
format_resp(#{id := Id, raw_config := RawConf,
|
format_resp(#{id := Id, raw_config := RawConf,
|
||||||
resource_data := #{mod := Mod, status := Status, metrics := Metrics}}) ->
|
resource_data := #{status := Status, metrics := Metrics}}) ->
|
||||||
|
{Type, Name} = emqx_bridge:parse_bridge_id(Id),
|
||||||
IsConnected = fun(started) -> connected; (_) -> disconnected end,
|
IsConnected = fun(started) -> connected; (_) -> disconnected end,
|
||||||
RawConf#{
|
RawConf#{
|
||||||
id => Id,
|
id => Id,
|
||||||
|
type => Type,
|
||||||
|
name => Name,
|
||||||
node => node(),
|
node => node(),
|
||||||
bridge_type => emqx_bridge:bridge_type(Mod),
|
|
||||||
status => IsConnected(Status),
|
status => IsConnected(Status),
|
||||||
metrics => Metrics
|
metrics => Metrics
|
||||||
}.
|
}.
|
||||||
|
@ -378,4 +428,7 @@ rpc_call(Node, Mod, Fun, Args) ->
|
||||||
error_msg(Code, Msg) when is_binary(Msg) ->
|
error_msg(Code, Msg) when is_binary(Msg) ->
|
||||||
#{code => Code, message => Msg};
|
#{code => Code, message => Msg};
|
||||||
error_msg(Code, Msg) ->
|
error_msg(Code, Msg) ->
|
||||||
#{code => Code, message => list_to_binary(io_lib:format("~p", [Msg]))}.
|
#{code => Code, message => bin(io_lib:format("~p", [Msg]))}.
|
||||||
|
|
||||||
|
bin(S) when is_list(S) ->
|
||||||
|
list_to_binary(S).
|
||||||
|
|
|
@ -0,0 +1,95 @@
|
||||||
|
-module(emqx_bridge_http_schema).
|
||||||
|
|
||||||
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
|
||||||
|
-import(hoconsc, [mk/2, enum/1]).
|
||||||
|
|
||||||
|
-export([roots/0, fields/1]).
|
||||||
|
|
||||||
|
%%======================================================================================
|
||||||
|
%% Hocon Schema Definitions
|
||||||
|
roots() -> [].
|
||||||
|
|
||||||
|
fields("bridge") ->
|
||||||
|
basic_config() ++
|
||||||
|
[ {url, mk(binary(),
|
||||||
|
#{ nullable => false
|
||||||
|
, desc =>"""
|
||||||
|
The URL of the HTTP Bridge.<br>
|
||||||
|
Template with variables is allowed in the path, but variables cannot be used in the scheme, host,
|
||||||
|
or port part.<br>
|
||||||
|
For example, <code> http://localhost:9901/${topic} </code> is allowed, but
|
||||||
|
<code> http://${host}:9901/message </code> or <code> http://localhost:${port}/message </code>
|
||||||
|
is not allowed.
|
||||||
|
"""
|
||||||
|
})}
|
||||||
|
, {from_local_topic, mk(binary(),
|
||||||
|
#{ desc =>"""
|
||||||
|
The MQTT topic filter to be forwarded to the HTTP server. All MQTT PUBLISH messages which topic
|
||||||
|
match the from_local_topic will be forwarded.<br>
|
||||||
|
NOTE: if this bridge is used as the output of a rule (emqx rule engine), and also from_local_topic is configured, then both the data got from the rule and the MQTT messages that matches
|
||||||
|
from_local_topic will be forwarded.
|
||||||
|
"""
|
||||||
|
})}
|
||||||
|
, {method, mk(method(),
|
||||||
|
#{ default => post
|
||||||
|
, desc =>"""
|
||||||
|
The method of the HTTP request. All the available methods are: post, put, get, delete.<br>
|
||||||
|
Template with variables is allowed.<br>
|
||||||
|
"""
|
||||||
|
})}
|
||||||
|
, {headers, mk(map(),
|
||||||
|
#{ default => #{
|
||||||
|
<<"accept">> => <<"application/json">>,
|
||||||
|
<<"cache-control">> => <<"no-cache">>,
|
||||||
|
<<"connection">> => <<"keep-alive">>,
|
||||||
|
<<"content-type">> => <<"application/json">>,
|
||||||
|
<<"keep-alive">> => <<"timeout=5">>}
|
||||||
|
, desc =>"""
|
||||||
|
The headers of the HTTP request.<br>
|
||||||
|
Template with variables is allowed.
|
||||||
|
"""
|
||||||
|
})
|
||||||
|
}
|
||||||
|
, {body, mk(binary(),
|
||||||
|
#{ default => <<"${payload}">>
|
||||||
|
, desc =>"""
|
||||||
|
The body of the HTTP request.<br>
|
||||||
|
Template with variables is allowed.
|
||||||
|
"""
|
||||||
|
})}
|
||||||
|
, {request_timeout, mk(emqx_schema:duration_ms(),
|
||||||
|
#{ default => <<"30s">>
|
||||||
|
, desc =>"""
|
||||||
|
How long will the HTTP request timeout.
|
||||||
|
"""
|
||||||
|
})}
|
||||||
|
];
|
||||||
|
|
||||||
|
fields("post") ->
|
||||||
|
[ type_field()
|
||||||
|
, name_field()
|
||||||
|
] ++ fields("bridge");
|
||||||
|
|
||||||
|
fields("put") ->
|
||||||
|
fields("bridge");
|
||||||
|
|
||||||
|
fields("get") ->
|
||||||
|
[ id_field()
|
||||||
|
] ++ fields("post").
|
||||||
|
|
||||||
|
basic_config() ->
|
||||||
|
proplists:delete(base_url, emqx_connector_http:fields(config)).
|
||||||
|
|
||||||
|
%%======================================================================================
|
||||||
|
id_field() ->
|
||||||
|
{id, mk(binary(), #{desc => "The Bridge Id", example => "http:my_http_bridge"})}.
|
||||||
|
|
||||||
|
type_field() ->
|
||||||
|
{type, mk(http, #{desc => "The Bridge Type"})}.
|
||||||
|
|
||||||
|
name_field() ->
|
||||||
|
{name, mk(binary(), #{desc => "The Bridge Name"})}.
|
||||||
|
|
||||||
|
method() ->
|
||||||
|
enum([post, put, get, delete]).
|
|
@ -0,0 +1,62 @@
|
||||||
|
-module(emqx_bridge_mqtt_schema).
|
||||||
|
|
||||||
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
|
||||||
|
-import(hoconsc, [mk/2]).
|
||||||
|
|
||||||
|
-export([roots/0, fields/1]).
|
||||||
|
|
||||||
|
%%======================================================================================
|
||||||
|
%% Hocon Schema Definitions
|
||||||
|
roots() -> [].
|
||||||
|
|
||||||
|
fields("ingress") ->
|
||||||
|
[ direction(ingress, emqx_connector_mqtt_schema:ingress_desc())
|
||||||
|
, emqx_bridge_schema:connector_name()
|
||||||
|
] ++ proplists:delete(hookpoint, emqx_connector_mqtt_schema:fields("ingress"));
|
||||||
|
|
||||||
|
fields("egress") ->
|
||||||
|
[ direction(egress, emqx_connector_mqtt_schema:egress_desc())
|
||||||
|
, emqx_bridge_schema:connector_name()
|
||||||
|
] ++ emqx_connector_mqtt_schema:fields("egress");
|
||||||
|
|
||||||
|
fields("post_ingress") ->
|
||||||
|
[ type_field()
|
||||||
|
, name_field()
|
||||||
|
] ++ fields("ingress");
|
||||||
|
fields("post_egress") ->
|
||||||
|
[ type_field()
|
||||||
|
, name_field()
|
||||||
|
] ++ fields("egress");
|
||||||
|
|
||||||
|
fields("put_ingress") ->
|
||||||
|
fields("ingress");
|
||||||
|
fields("put_egress") ->
|
||||||
|
fields("egress");
|
||||||
|
|
||||||
|
fields("get_ingress") ->
|
||||||
|
[ id_field()
|
||||||
|
] ++ fields("post_ingress");
|
||||||
|
fields("get_egress") ->
|
||||||
|
[ id_field()
|
||||||
|
] ++ fields("post_egress").
|
||||||
|
|
||||||
|
%%======================================================================================
|
||||||
|
direction(Dir, Desc) ->
|
||||||
|
{direction, mk(Dir,
|
||||||
|
#{ nullable => false
|
||||||
|
, desc => "The direction of the bridge. Can be one of 'ingress' or 'egress'.<br>"
|
||||||
|
++ Desc
|
||||||
|
})}.
|
||||||
|
|
||||||
|
id_field() ->
|
||||||
|
{id, mk(binary(), #{desc => "The Bridge Id", example => "mqtt:my_mqtt_bridge"})}.
|
||||||
|
|
||||||
|
type_field() ->
|
||||||
|
{type, mk(mqtt, #{desc => "The Bridge Type"})}.
|
||||||
|
|
||||||
|
name_field() ->
|
||||||
|
{name, mk(binary(),
|
||||||
|
#{ desc => "The Bridge Name"
|
||||||
|
, example => "some_bridge_name"
|
||||||
|
})}.
|
|
@ -2,122 +2,63 @@
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
|
||||||
|
-import(hoconsc, [mk/2, ref/2]).
|
||||||
|
|
||||||
-export([roots/0, fields/1]).
|
-export([roots/0, fields/1]).
|
||||||
|
|
||||||
|
-export([ get_response/0
|
||||||
|
, put_request/0
|
||||||
|
, post_request/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
-export([ connector_name/0
|
||||||
|
]).
|
||||||
|
|
||||||
%%======================================================================================
|
%%======================================================================================
|
||||||
%% Hocon Schema Definitions
|
%% Hocon Schema Definitions
|
||||||
|
|
||||||
roots() -> [bridges].
|
-define(CONN_TYPES, [mqtt]).
|
||||||
|
|
||||||
fields(bridges) ->
|
%%======================================================================================
|
||||||
[ {mqtt,
|
%% For HTTP APIs
|
||||||
sc(hoconsc:map(name, hoconsc:union([ ref("ingress_mqtt_bridge")
|
get_response() ->
|
||||||
, ref("egress_mqtt_bridge")
|
http_schema("get").
|
||||||
])),
|
|
||||||
#{ desc => "MQTT bridges"
|
|
||||||
})}
|
|
||||||
, {http,
|
|
||||||
sc(hoconsc:map(name, ref("http_bridge")),
|
|
||||||
#{ desc => "HTTP bridges"
|
|
||||||
})}
|
|
||||||
];
|
|
||||||
|
|
||||||
fields("ingress_mqtt_bridge") ->
|
|
||||||
[ direction(ingress, emqx_connector_mqtt_schema:ingress_desc())
|
|
||||||
, connector_name()
|
|
||||||
] ++ proplists:delete(hookpoint, emqx_connector_mqtt_schema:fields("ingress"));
|
|
||||||
|
|
||||||
fields("egress_mqtt_bridge") ->
|
|
||||||
[ direction(egress, emqx_connector_mqtt_schema:egress_desc())
|
|
||||||
, connector_name()
|
|
||||||
] ++ emqx_connector_mqtt_schema:fields("egress");
|
|
||||||
|
|
||||||
fields("http_bridge") ->
|
|
||||||
basic_config_http() ++
|
|
||||||
[ {url,
|
|
||||||
sc(binary(),
|
|
||||||
#{ nullable => false
|
|
||||||
, desc =>"""
|
|
||||||
The URL of the HTTP Bridge.<br>
|
|
||||||
Template with variables is allowed in the path, but variables cannot be used in the scheme, host,
|
|
||||||
or port part.<br>
|
|
||||||
For example, <code> http://localhost:9901/${topic} </code> is allowed, but
|
|
||||||
<code> http://${host}:9901/message </code> or <code> http://localhost:${port}/message </code>
|
|
||||||
is not allowed.
|
|
||||||
"""
|
|
||||||
})}
|
|
||||||
, {from_local_topic,
|
|
||||||
sc(binary(),
|
|
||||||
#{ desc =>"""
|
|
||||||
The MQTT topic filter to be forwarded to the HTTP server. All MQTT PUBLISH messages which topic
|
|
||||||
match the from_local_topic will be forwarded.<br>
|
|
||||||
NOTE: if this bridge is used as the output of a rule (emqx rule engine), and also from_local_topic is configured, then both the data got from the rule and the MQTT messages that matches
|
|
||||||
from_local_topic will be forwarded.
|
|
||||||
"""
|
|
||||||
})}
|
|
||||||
, {method,
|
|
||||||
sc(method(),
|
|
||||||
#{ default => post
|
|
||||||
, desc =>"""
|
|
||||||
The method of the HTTP request. All the available methods are: post, put, get, delete.<br>
|
|
||||||
Template with variables is allowed.<br>
|
|
||||||
"""
|
|
||||||
})}
|
|
||||||
, {headers,
|
|
||||||
sc(map(),
|
|
||||||
#{ default => #{
|
|
||||||
<<"accept">> => <<"application/json">>,
|
|
||||||
<<"cache-control">> => <<"no-cache">>,
|
|
||||||
<<"connection">> => <<"keep-alive">>,
|
|
||||||
<<"content-type">> => <<"application/json">>,
|
|
||||||
<<"keep-alive">> => <<"timeout=5">>}
|
|
||||||
, desc =>"""
|
|
||||||
The headers of the HTTP request.<br>
|
|
||||||
Template with variables is allowed.
|
|
||||||
"""
|
|
||||||
})
|
|
||||||
}
|
|
||||||
, {body,
|
|
||||||
sc(binary(),
|
|
||||||
#{ default => <<"${payload}">>
|
|
||||||
, desc =>"""
|
|
||||||
The body of the HTTP request.<br>
|
|
||||||
Template with variables is allowed.
|
|
||||||
"""
|
|
||||||
})}
|
|
||||||
, {request_timeout,
|
|
||||||
sc(emqx_schema:duration_ms(),
|
|
||||||
#{ default => <<"30s">>
|
|
||||||
, desc =>"""
|
|
||||||
How long will the HTTP request timeout.
|
|
||||||
"""
|
|
||||||
})}
|
|
||||||
].
|
|
||||||
|
|
||||||
direction(Dir, Desc) ->
|
|
||||||
{direction,
|
|
||||||
sc(Dir,
|
|
||||||
#{ nullable => false
|
|
||||||
, desc => "The direction of the bridge. Can be one of 'ingress' or 'egress'.<br>" ++
|
|
||||||
Desc
|
|
||||||
})}.
|
|
||||||
|
|
||||||
connector_name() ->
|
connector_name() ->
|
||||||
{connector,
|
{connector,
|
||||||
sc(binary(),
|
mk(binary(),
|
||||||
#{ nullable => false
|
#{ nullable => false
|
||||||
, desc =>"""
|
, desc =>"""
|
||||||
The connector name to be used for this bridge.
|
The connector name to be used for this bridge.
|
||||||
Connectors are configured by 'connectors.<type>.<name>
|
Connectors are configured as 'connectors.{type}.{name}',
|
||||||
|
for example 'connectors.http.mybridge'.
|
||||||
"""
|
"""
|
||||||
})}.
|
})}.
|
||||||
|
|
||||||
basic_config_http() ->
|
put_request() ->
|
||||||
proplists:delete(base_url, emqx_connector_http:fields(config)).
|
http_schema("put").
|
||||||
|
|
||||||
method() ->
|
post_request() ->
|
||||||
hoconsc:enum([post, put, get, delete]).
|
http_schema("post").
|
||||||
|
|
||||||
sc(Type, Meta) -> hoconsc:mk(Type, Meta).
|
http_schema(Method) ->
|
||||||
|
Schemas = lists:flatmap(fun(Type) ->
|
||||||
|
[ref(schema_mod(Type), Method ++ "_ingress"),
|
||||||
|
ref(schema_mod(Type), Method ++ "_egress")]
|
||||||
|
end, ?CONN_TYPES),
|
||||||
|
hoconsc:union([ref(emqx_bridge_http_schema, Method)
|
||||||
|
| Schemas]).
|
||||||
|
|
||||||
ref(Field) -> hoconsc:ref(?MODULE, Field).
|
%%======================================================================================
|
||||||
|
%% For config files
|
||||||
|
roots() -> [bridges].
|
||||||
|
|
||||||
|
fields(bridges) ->
|
||||||
|
[{http, mk(hoconsc:map(name, ref(emqx_bridge_http_schema, "bridge")), #{})}]
|
||||||
|
++ [{T, mk(hoconsc:map(name, hoconsc:union([
|
||||||
|
ref(schema_mod(T), "ingress"),
|
||||||
|
ref(schema_mod(T), "egress")
|
||||||
|
])), #{})} || T <- ?CONN_TYPES].
|
||||||
|
|
||||||
|
schema_mod(Type) ->
|
||||||
|
list_to_atom(lists:concat(["emqx_bridge_", Type, "_schema"])).
|
||||||
|
|
|
@ -21,7 +21,9 @@
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
-define(CONF_DEFAULT, <<"bridges: {}">>).
|
-define(CONF_DEFAULT, <<"bridges: {}">>).
|
||||||
-define(TEST_ID, <<"http:test_bridge">>).
|
-define(BRIDGE_TYPE, <<"http">>).
|
||||||
|
-define(BRIDGE_NAME, <<"test_bridge">>).
|
||||||
|
-define(BRIDGE_ID, <<"http:test_bridge">>).
|
||||||
-define(URL(PORT, PATH), list_to_binary(
|
-define(URL(PORT, PATH), list_to_binary(
|
||||||
io_lib:format("http://localhost:~s/~s",
|
io_lib:format("http://localhost:~s/~s",
|
||||||
[integer_to_list(PORT), PATH]))).
|
[integer_to_list(PORT), PATH]))).
|
||||||
|
@ -134,11 +136,15 @@ t_http_crud_apis(_) ->
|
||||||
%% POST /bridges/ will create a bridge
|
%% POST /bridges/ will create a bridge
|
||||||
URL1 = ?URL(Port, "path1"),
|
URL1 = ?URL(Port, "path1"),
|
||||||
{ok, 201, Bridge} = request(post, uri(["bridges"]),
|
{ok, 201, Bridge} = request(post, uri(["bridges"]),
|
||||||
?HTTP_BRIDGE(URL1)#{<<"id">> => ?TEST_ID}),
|
?HTTP_BRIDGE(URL1)#{
|
||||||
|
<<"type">> => ?BRIDGE_TYPE,
|
||||||
|
<<"name">> => ?BRIDGE_NAME
|
||||||
|
}),
|
||||||
|
|
||||||
%ct:pal("---bridge: ~p", [Bridge]),
|
%ct:pal("---bridge: ~p", [Bridge]),
|
||||||
?assertMatch(#{ <<"id">> := ?TEST_ID
|
?assertMatch(#{ <<"id">> := ?BRIDGE_ID
|
||||||
, <<"bridge_type">> := <<"http">>
|
, <<"type">> := ?BRIDGE_TYPE
|
||||||
|
, <<"name">> := ?BRIDGE_NAME
|
||||||
, <<"status">> := _
|
, <<"status">> := _
|
||||||
, <<"node_status">> := [_|_]
|
, <<"node_status">> := [_|_]
|
||||||
, <<"metrics">> := _
|
, <<"metrics">> := _
|
||||||
|
@ -148,7 +154,10 @@ t_http_crud_apis(_) ->
|
||||||
|
|
||||||
%% create a again returns an error
|
%% create a again returns an error
|
||||||
{ok, 400, RetMsg} = request(post, uri(["bridges"]),
|
{ok, 400, RetMsg} = request(post, uri(["bridges"]),
|
||||||
?HTTP_BRIDGE(URL1)#{<<"id">> => ?TEST_ID}),
|
?HTTP_BRIDGE(URL1)#{
|
||||||
|
<<"type">> => ?BRIDGE_TYPE,
|
||||||
|
<<"name">> => ?BRIDGE_NAME
|
||||||
|
}),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{ <<"code">> := _
|
#{ <<"code">> := _
|
||||||
, <<"message">> := <<"bridge already exists">>
|
, <<"message">> := <<"bridge already exists">>
|
||||||
|
@ -156,10 +165,11 @@ t_http_crud_apis(_) ->
|
||||||
|
|
||||||
%% update the request-path of the bridge
|
%% update the request-path of the bridge
|
||||||
URL2 = ?URL(Port, "path2"),
|
URL2 = ?URL(Port, "path2"),
|
||||||
{ok, 200, Bridge2} = request(put, uri(["bridges", ?TEST_ID]),
|
{ok, 200, Bridge2} = request(put, uri(["bridges", ?BRIDGE_ID]),
|
||||||
?HTTP_BRIDGE(URL2)),
|
?HTTP_BRIDGE(URL2)),
|
||||||
?assertMatch(#{ <<"id">> := ?TEST_ID
|
?assertMatch(#{ <<"id">> := ?BRIDGE_ID
|
||||||
, <<"bridge_type">> := <<"http">>
|
, <<"type">> := ?BRIDGE_TYPE
|
||||||
|
, <<"name">> := ?BRIDGE_NAME
|
||||||
, <<"status">> := _
|
, <<"status">> := _
|
||||||
, <<"node_status">> := [_|_]
|
, <<"node_status">> := [_|_]
|
||||||
, <<"metrics">> := _
|
, <<"metrics">> := _
|
||||||
|
@ -169,8 +179,9 @@ t_http_crud_apis(_) ->
|
||||||
|
|
||||||
%% list all bridges again, assert Bridge2 is in it
|
%% list all bridges again, assert Bridge2 is in it
|
||||||
{ok, 200, Bridge2Str} = request(get, uri(["bridges"]), []),
|
{ok, 200, Bridge2Str} = request(get, uri(["bridges"]), []),
|
||||||
?assertMatch([#{ <<"id">> := ?TEST_ID
|
?assertMatch([#{ <<"id">> := ?BRIDGE_ID
|
||||||
, <<"bridge_type">> := <<"http">>
|
, <<"type">> := ?BRIDGE_TYPE
|
||||||
|
, <<"name">> := ?BRIDGE_NAME
|
||||||
, <<"status">> := _
|
, <<"status">> := _
|
||||||
, <<"node_status">> := [_|_]
|
, <<"node_status">> := [_|_]
|
||||||
, <<"metrics">> := _
|
, <<"metrics">> := _
|
||||||
|
@ -179,9 +190,10 @@ t_http_crud_apis(_) ->
|
||||||
}], jsx:decode(Bridge2Str)),
|
}], jsx:decode(Bridge2Str)),
|
||||||
|
|
||||||
%% get the bridge by id
|
%% get the bridge by id
|
||||||
{ok, 200, Bridge3Str} = request(get, uri(["bridges", ?TEST_ID]), []),
|
{ok, 200, Bridge3Str} = request(get, uri(["bridges", ?BRIDGE_ID]), []),
|
||||||
?assertMatch(#{ <<"id">> := ?TEST_ID
|
?assertMatch(#{ <<"id">> := ?BRIDGE_ID
|
||||||
, <<"bridge_type">> := <<"http">>
|
, <<"type">> := ?BRIDGE_TYPE
|
||||||
|
, <<"name">> := ?BRIDGE_NAME
|
||||||
, <<"status">> := _
|
, <<"status">> := _
|
||||||
, <<"node_status">> := [_|_]
|
, <<"node_status">> := [_|_]
|
||||||
, <<"metrics">> := _
|
, <<"metrics">> := _
|
||||||
|
@ -190,11 +202,11 @@ t_http_crud_apis(_) ->
|
||||||
}, jsx:decode(Bridge3Str)),
|
}, jsx:decode(Bridge3Str)),
|
||||||
|
|
||||||
%% delete the bridge
|
%% delete the bridge
|
||||||
{ok, 204, <<>>} = request(delete, uri(["bridges", ?TEST_ID]), []),
|
{ok, 204, <<>>} = request(delete, uri(["bridges", ?BRIDGE_ID]), []),
|
||||||
{ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []),
|
{ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []),
|
||||||
|
|
||||||
%% update a deleted bridge returns an error
|
%% update a deleted bridge returns an error
|
||||||
{ok, 404, ErrMsg2} = request(put, uri(["bridges", ?TEST_ID]),
|
{ok, 404, ErrMsg2} = request(put, uri(["bridges", ?BRIDGE_ID]),
|
||||||
?HTTP_BRIDGE(URL2)),
|
?HTTP_BRIDGE(URL2)),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{ <<"code">> := _
|
#{ <<"code">> := _
|
||||||
|
@ -206,11 +218,15 @@ t_start_stop_bridges(_) ->
|
||||||
Port = start_http_server(fun handle_fun_200_ok/1),
|
Port = start_http_server(fun handle_fun_200_ok/1),
|
||||||
URL1 = ?URL(Port, "abc"),
|
URL1 = ?URL(Port, "abc"),
|
||||||
{ok, 201, Bridge} = request(post, uri(["bridges"]),
|
{ok, 201, Bridge} = request(post, uri(["bridges"]),
|
||||||
?HTTP_BRIDGE(URL1)#{<<"id">> => ?TEST_ID}),
|
?HTTP_BRIDGE(URL1)#{
|
||||||
|
<<"type">> => ?BRIDGE_TYPE,
|
||||||
|
<<"name">> => ?BRIDGE_NAME
|
||||||
|
}),
|
||||||
%ct:pal("the bridge ==== ~p", [Bridge]),
|
%ct:pal("the bridge ==== ~p", [Bridge]),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{ <<"id">> := ?TEST_ID
|
#{ <<"id">> := ?BRIDGE_ID
|
||||||
, <<"bridge_type">> := <<"http">>
|
, <<"type">> := ?BRIDGE_TYPE
|
||||||
|
, <<"name">> := ?BRIDGE_NAME
|
||||||
, <<"status">> := _
|
, <<"status">> := _
|
||||||
, <<"node_status">> := [_|_]
|
, <<"node_status">> := [_|_]
|
||||||
, <<"metrics">> := _
|
, <<"metrics">> := _
|
||||||
|
@ -219,42 +235,42 @@ t_start_stop_bridges(_) ->
|
||||||
}, jsx:decode(Bridge)),
|
}, jsx:decode(Bridge)),
|
||||||
%% stop it
|
%% stop it
|
||||||
{ok, 200, <<>>} = request(post,
|
{ok, 200, <<>>} = request(post,
|
||||||
uri(["nodes", node(), "bridges", ?TEST_ID, "operation", "stop"]),
|
uri(["nodes", node(), "bridges", ?BRIDGE_ID, "operation", "stop"]),
|
||||||
<<"">>),
|
<<"">>),
|
||||||
{ok, 200, Bridge2} = request(get, uri(["bridges", ?TEST_ID]), []),
|
{ok, 200, Bridge2} = request(get, uri(["bridges", ?BRIDGE_ID]), []),
|
||||||
?assertMatch(#{ <<"id">> := ?TEST_ID
|
?assertMatch(#{ <<"id">> := ?BRIDGE_ID
|
||||||
, <<"status">> := <<"disconnected">>
|
, <<"status">> := <<"disconnected">>
|
||||||
}, jsx:decode(Bridge2)),
|
}, jsx:decode(Bridge2)),
|
||||||
%% start again
|
%% start again
|
||||||
{ok, 200, <<>>} = request(post,
|
{ok, 200, <<>>} = request(post,
|
||||||
uri(["nodes", node(), "bridges", ?TEST_ID, "operation", "start"]),
|
uri(["nodes", node(), "bridges", ?BRIDGE_ID, "operation", "start"]),
|
||||||
<<"">>),
|
<<"">>),
|
||||||
{ok, 200, Bridge3} = request(get, uri(["bridges", ?TEST_ID]), []),
|
{ok, 200, Bridge3} = request(get, uri(["bridges", ?BRIDGE_ID]), []),
|
||||||
?assertMatch(#{ <<"id">> := ?TEST_ID
|
?assertMatch(#{ <<"id">> := ?BRIDGE_ID
|
||||||
, <<"status">> := <<"connected">>
|
, <<"status">> := <<"connected">>
|
||||||
}, jsx:decode(Bridge3)),
|
}, jsx:decode(Bridge3)),
|
||||||
%% restart an already started bridge
|
%% restart an already started bridge
|
||||||
{ok, 200, <<>>} = request(post,
|
{ok, 200, <<>>} = request(post,
|
||||||
uri(["nodes", node(), "bridges", ?TEST_ID, "operation", "restart"]),
|
uri(["nodes", node(), "bridges", ?BRIDGE_ID, "operation", "restart"]),
|
||||||
<<"">>),
|
<<"">>),
|
||||||
{ok, 200, Bridge3} = request(get, uri(["bridges", ?TEST_ID]), []),
|
{ok, 200, Bridge3} = request(get, uri(["bridges", ?BRIDGE_ID]), []),
|
||||||
?assertMatch(#{ <<"id">> := ?TEST_ID
|
?assertMatch(#{ <<"id">> := ?BRIDGE_ID
|
||||||
, <<"status">> := <<"connected">>
|
, <<"status">> := <<"connected">>
|
||||||
}, jsx:decode(Bridge3)),
|
}, jsx:decode(Bridge3)),
|
||||||
%% stop it again
|
%% stop it again
|
||||||
{ok, 200, <<>>} = request(post,
|
{ok, 200, <<>>} = request(post,
|
||||||
uri(["nodes", node(), "bridges", ?TEST_ID, "operation", "stop"]),
|
uri(["nodes", node(), "bridges", ?BRIDGE_ID, "operation", "stop"]),
|
||||||
<<"">>),
|
<<"">>),
|
||||||
%% restart a stopped bridge
|
%% restart a stopped bridge
|
||||||
{ok, 200, <<>>} = request(post,
|
{ok, 200, <<>>} = request(post,
|
||||||
uri(["nodes", node(), "bridges", ?TEST_ID, "operation", "restart"]),
|
uri(["nodes", node(), "bridges", ?BRIDGE_ID, "operation", "restart"]),
|
||||||
<<"">>),
|
<<"">>),
|
||||||
{ok, 200, Bridge4} = request(get, uri(["bridges", ?TEST_ID]), []),
|
{ok, 200, Bridge4} = request(get, uri(["bridges", ?BRIDGE_ID]), []),
|
||||||
?assertMatch(#{ <<"id">> := ?TEST_ID
|
?assertMatch(#{ <<"id">> := ?BRIDGE_ID
|
||||||
, <<"status">> := <<"connected">>
|
, <<"status">> := <<"connected">>
|
||||||
}, jsx:decode(Bridge4)),
|
}, jsx:decode(Bridge4)),
|
||||||
%% delete the bridge
|
%% delete the bridge
|
||||||
{ok, 204, <<>>} = request(delete, uri(["bridges", ?TEST_ID]), []),
|
{ok, 204, <<>>} = request(delete, uri(["bridges", ?BRIDGE_ID]), []),
|
||||||
{ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []).
|
{ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
|
@ -27,12 +27,27 @@ node {
|
||||||
## Default: "{{ platform_data_dir }}/"
|
## Default: "{{ platform_data_dir }}/"
|
||||||
data_dir = "{{ platform_data_dir }}/"
|
data_dir = "{{ platform_data_dir }}/"
|
||||||
|
|
||||||
## Dir of crash dump file.
|
## Location of crash dump file.
|
||||||
##
|
##
|
||||||
## @doc node.crash_dump_dir
|
## @doc node.crash_dump_file
|
||||||
## ValueType: Folder
|
## ValueType: File
|
||||||
## Default: "{{ platform_log_dir }}/"
|
## Default: "{{ platform_log_dir }}/erl_crash.dump"
|
||||||
crash_dump_dir = "{{ platform_log_dir }}/"
|
crash_dump_file = "{{ platform_log_dir }}/erl_crash.dump"
|
||||||
|
|
||||||
|
## The number of seconds that the broker is allowed to spend writing
|
||||||
|
## a crash dump
|
||||||
|
##
|
||||||
|
## @doc node.crash_dump_seconds
|
||||||
|
## ValueType: seconds
|
||||||
|
## Default: 30s
|
||||||
|
crash_dump_seconds = 30s
|
||||||
|
|
||||||
|
## The maximum size of a crash dump file in bytes.
|
||||||
|
##
|
||||||
|
## @doc node.crash_dump_bytes
|
||||||
|
## ValueType: bytes
|
||||||
|
## Default: 100MB
|
||||||
|
crash_dump_bytes = 100MB
|
||||||
|
|
||||||
## Global GC Interval.
|
## Global GC Interval.
|
||||||
##
|
##
|
||||||
|
|
|
@ -0,0 +1,192 @@
|
||||||
|
EMQ X configuration file is in [HOCON](https://github.com/emqx/hocon) format.
|
||||||
|
HOCON, or Human-Optimized Config Object Notation is a format for human-readable data,
|
||||||
|
and a superset of JSON.
|
||||||
|
|
||||||
|
## Syntax
|
||||||
|
|
||||||
|
In config file the values can be notated as JSON like ojbects, such as
|
||||||
|
```
|
||||||
|
node {
|
||||||
|
name = "emqx@127.0.0.1"
|
||||||
|
cookie = "mysecret"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Another equivalent representation is flat, suh as
|
||||||
|
|
||||||
|
```
|
||||||
|
node.name="127.0.0.1"
|
||||||
|
node.cookie="mysecret"
|
||||||
|
```
|
||||||
|
|
||||||
|
This flat format is almost backward compatible with EMQ X's config file format
|
||||||
|
in 4.x series (the so called 'cuttlefish' format).
|
||||||
|
|
||||||
|
It is 'almost' compabile because the often HOCON requires strings to be quoted,
|
||||||
|
while cuttlefish treats all characters to the right of the `=` mark as the value.
|
||||||
|
|
||||||
|
e.g. cuttlefish: `node.name = emqx@127.0.0.1`, HOCON: `node.name = "emqx@127.0.0.1"`
|
||||||
|
|
||||||
|
Strings without special characters in them can be unquoted in HOCON too,
|
||||||
|
e.g. `foo`, `foo_bar`, `foo_bar_1`:
|
||||||
|
|
||||||
|
For more HOCON syntax, pelase refer to the [specification](https://github.com/lightbend/config/blob/main/HOCON.md)
|
||||||
|
|
||||||
|
## Schema
|
||||||
|
|
||||||
|
To make the HOCON objects type-safe, EMQ X introduded a schema for it.
|
||||||
|
The schema defines data types, and data fields' names and metadata for config value validation
|
||||||
|
and more. In fact, this config document itself is generated from schema metadata.
|
||||||
|
|
||||||
|
### Complex Data Types
|
||||||
|
|
||||||
|
There are 4 complex data types in EMQ X's HOCON config:
|
||||||
|
|
||||||
|
1. Struct: Named using an unquoted string, followed by a pre-defined list of fields,
|
||||||
|
fields can not start with a number, and are only allowed to use
|
||||||
|
lowercase letters and underscores as word separater.
|
||||||
|
1. Map: Map is like Struct, however the fields are not pre-defined.
|
||||||
|
1-based index number can also be used as map keys for an alternative
|
||||||
|
representation of an Array.
|
||||||
|
1. Union: `MemberType1 | MemberType2 | ...`
|
||||||
|
1. Array: `[ElementType]`
|
||||||
|
|
||||||
|
### Primitive Data Types
|
||||||
|
|
||||||
|
Complex types define data 'boxes' wich may contain other complex data
|
||||||
|
or primitive values.
|
||||||
|
There are quite some different primitive types, to name a fiew:
|
||||||
|
|
||||||
|
* `atom()`
|
||||||
|
* `boolean()`
|
||||||
|
* `string()`
|
||||||
|
* `integer()`
|
||||||
|
* `float()`
|
||||||
|
* `number()`
|
||||||
|
* `binary()` # another format of string()
|
||||||
|
* `emqx_schema:duration()` # time duration, another format of integer()
|
||||||
|
* ...
|
||||||
|
|
||||||
|
The primitive types are mostly self-describing, some are built-in, such
|
||||||
|
as `atom()`, some are defiend in EMQ X modules, such as `emqx_schema:duration()`.
|
||||||
|
|
||||||
|
### Config Paths
|
||||||
|
|
||||||
|
If we consider the whole EMQ X config as a tree,
|
||||||
|
to reference a primitive value, we can use a dot-separated names form string for
|
||||||
|
the path from the tree-root (always a Struct) down to the primitive values at tree-leaves.
|
||||||
|
|
||||||
|
Each segment of the dotted string is a Struct filed name or Map key.
|
||||||
|
For Array elements, 1-based index is used.
|
||||||
|
|
||||||
|
below are some examples
|
||||||
|
|
||||||
|
```
|
||||||
|
node.name="emqx.127.0.0.1"
|
||||||
|
zone.zone1.max_packet_size="10M"
|
||||||
|
authentication.1.enable=true
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment varialbes
|
||||||
|
|
||||||
|
Environment variables can be used to define or override config values.
|
||||||
|
|
||||||
|
Due to the fact that dots (`.`) are not allowed in environment variables, dots are
|
||||||
|
replaced with double-underscores (`__`).
|
||||||
|
|
||||||
|
And a the `EMQX_` prefix is used as the namespace.
|
||||||
|
|
||||||
|
For example `node.name` can be represented as `EMQX_NODE__NAME`
|
||||||
|
|
||||||
|
Environment varialbe values are parsed as hocon values, this allows users
|
||||||
|
to even set complex values from environment variables.
|
||||||
|
|
||||||
|
For example, this environment variable sets an array value.
|
||||||
|
|
||||||
|
```
|
||||||
|
export EMQX_LISTENERS__SSL__L1__AUTHENTICATION__SSL__CIPHERS="[\"TLS_AES_256_GCM_SHA384\"]"
|
||||||
|
```
|
||||||
|
|
||||||
|
Unknown environment variables are logged as a `warning` level log, for example:
|
||||||
|
|
||||||
|
```
|
||||||
|
[warning] unknown_env_vars: ["EMQX_AUTHENTICATION__ENABLED"]
|
||||||
|
```
|
||||||
|
|
||||||
|
because the field name is `enable`, not `enabled`.
|
||||||
|
|
||||||
|
<strong>NOTE:</strong> Unknown root keys are however silently discarded.
|
||||||
|
|
||||||
|
### Config overlay
|
||||||
|
|
||||||
|
HOCON values are overlayed, earlier defined values are at layers closer to the bottom.
|
||||||
|
The overall order of the overlay rules from bottom up are:
|
||||||
|
|
||||||
|
1. `emqx.conf` the base config file
|
||||||
|
1. `EMQX_` prfixed environment variables
|
||||||
|
1. Cluster override file, the path of which is configured as `cluster_override_conf_file` in the lower layers
|
||||||
|
1. Local override file, the path of which is configured as `local_override_conf_file` in the lower layers
|
||||||
|
|
||||||
|
Below are the rules of config value overlay.
|
||||||
|
|
||||||
|
#### Struct Fileds
|
||||||
|
|
||||||
|
Later config values overwrites earlier values.
|
||||||
|
For example, in below config, the last line `debug` overwrites `errro` for
|
||||||
|
console log handler's `level` config, but leaving `enable` unchanged.
|
||||||
|
```
|
||||||
|
log {
|
||||||
|
console_handler{
|
||||||
|
enable=true,
|
||||||
|
level=error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
## ... more configs ...
|
||||||
|
|
||||||
|
log.console_handler.level=debug
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Map Values
|
||||||
|
|
||||||
|
Maps are like structs, only the files are user-defined rather than
|
||||||
|
the config schema. For instance, `zone1` in the exampele below.
|
||||||
|
|
||||||
|
```
|
||||||
|
zone {
|
||||||
|
zone1 {
|
||||||
|
mqtt.max_packet_size = 1M
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
## The maximum packet size can be defined as above,
|
||||||
|
## then overriden as below
|
||||||
|
|
||||||
|
zone.zone1.mqtt.max_packet_size = 10M
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Array Elements
|
||||||
|
|
||||||
|
Arrays in EMQ X config have two different representations
|
||||||
|
|
||||||
|
* list, such as: `[1, 2, 3]`
|
||||||
|
* indexed-map, such as: `{"1"=1, "2"=2, "3"=3}`
|
||||||
|
|
||||||
|
Dot-separated paths with number in it are parsed to indexed-maps
|
||||||
|
e.g. `authentication.1={...}` is parsed as `authentication={"1": {...}}`
|
||||||
|
|
||||||
|
Indexed-map arrays can be used to override list arrays:
|
||||||
|
|
||||||
|
```
|
||||||
|
authentication=[{enable=true, backend="built-in-database", mechanism="password-based"}]
|
||||||
|
# we can disable this authentication provider with:
|
||||||
|
authentication.1.enable=false
|
||||||
|
```
|
||||||
|
However, list arrays do not get recursively merged into indexed-map arrays.
|
||||||
|
e.g.
|
||||||
|
|
||||||
|
```
|
||||||
|
authentication=[{enable=true, backend="built-in-database", mechanism="password-based"}]
|
||||||
|
## below value will replace the whole array, but not to override just one field.
|
||||||
|
authentication=[{enable=true}]
|
||||||
|
```
|
|
@ -18,8 +18,9 @@
|
||||||
|
|
||||||
%% API
|
%% API
|
||||||
-export([start_link/0, mnesia/1]).
|
-export([start_link/0, mnesia/1]).
|
||||||
-export([multicall/3, multicall/5, query/1, reset/0, status/0, skip_failed_commit/1]).
|
-export([multicall/3, multicall/5, query/1, reset/0, status/0,
|
||||||
-export([get_node_tnx_id/1]).
|
skip_failed_commit/1, fast_forward_to_commit/2]).
|
||||||
|
-export([get_node_tnx_id/1, latest_tnx_id/0]).
|
||||||
|
|
||||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
|
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
|
||||||
handle_continue/2, code_change/3]).
|
handle_continue/2, code_change/3]).
|
||||||
|
@ -60,21 +61,28 @@ start_link() ->
|
||||||
start_link(Node, Name, RetryMs) ->
|
start_link(Node, Name, RetryMs) ->
|
||||||
gen_server:start_link({local, Name}, ?MODULE, [Node, RetryMs], []).
|
gen_server:start_link({local, Name}, ?MODULE, [Node, RetryMs], []).
|
||||||
|
|
||||||
-spec multicall(Module, Function, Args) -> {ok, TnxId, term()} | {error, Reason} when
|
%% @doc return {ok, TnxId, MFARes} the first MFA result when all MFA run ok.
|
||||||
|
%% return {error, MFARes} when the first MFA result is no ok or {ok, term()}.
|
||||||
|
%% return {retry, TnxId, MFARes, Nodes} when some Nodes failed and some Node ok.
|
||||||
|
-spec multicall(Module, Function, Args) ->
|
||||||
|
{ok, TnxId, term()} | {error, Reason} | {retry, TnxId, MFARes, node()} when
|
||||||
Module :: module(),
|
Module :: module(),
|
||||||
Function :: atom(),
|
Function :: atom(),
|
||||||
Args :: [term()],
|
Args :: [term()],
|
||||||
|
MFARes :: term(),
|
||||||
TnxId :: pos_integer(),
|
TnxId :: pos_integer(),
|
||||||
Reason :: string().
|
Reason :: string().
|
||||||
multicall(M, F, A) ->
|
multicall(M, F, A) ->
|
||||||
multicall(M, F, A, all, timer:minutes(2)).
|
multicall(M, F, A, all, timer:minutes(2)).
|
||||||
|
|
||||||
-spec multicall(Module, Function, Args, SucceedNum, Timeout) -> {ok, TnxId, term()} |{error, Reason} when
|
-spec multicall(Module, Function, Args, SucceedNum, Timeout) ->
|
||||||
|
{ok, TnxId, MFARes} | {error, Reason} | {retry, TnxId, MFARes, node()} when
|
||||||
Module :: module(),
|
Module :: module(),
|
||||||
Function :: atom(),
|
Function :: atom(),
|
||||||
Args :: [term()],
|
Args :: [term()],
|
||||||
SucceedNum :: pos_integer() | all,
|
SucceedNum :: pos_integer() | all,
|
||||||
TnxId :: pos_integer(),
|
TnxId :: pos_integer(),
|
||||||
|
MFARes :: term(),
|
||||||
Timeout :: timeout(),
|
Timeout :: timeout(),
|
||||||
Reason :: string().
|
Reason :: string().
|
||||||
multicall(M, F, A, RequireNum, Timeout) when RequireNum =:= all orelse RequireNum >= 1 ->
|
multicall(M, F, A, RequireNum, Timeout) when RequireNum =:= all orelse RequireNum >= 1 ->
|
||||||
|
@ -108,7 +116,10 @@ multicall(M, F, A, RequireNum, Timeout) when RequireNum =:= all orelse RequireNu
|
||||||
end,
|
end,
|
||||||
case OkOrFailed of
|
case OkOrFailed of
|
||||||
ok -> InitRes;
|
ok -> InitRes;
|
||||||
_ -> OkOrFailed
|
{error, Error0} -> {error, Error0};
|
||||||
|
{retry, Node0} ->
|
||||||
|
{ok, TnxId0, MFARes} = InitRes,
|
||||||
|
{retry, TnxId0, MFARes, Node0}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-spec query(pos_integer()) -> {'atomic', map()} | {'aborted', Reason :: term()}.
|
-spec query(pos_integer()) -> {'atomic', map()} | {'aborted', Reason :: term()}.
|
||||||
|
@ -122,6 +133,11 @@ reset() -> gen_server:call(?MODULE, reset).
|
||||||
status() ->
|
status() ->
|
||||||
transaction(fun trans_status/0, []).
|
transaction(fun trans_status/0, []).
|
||||||
|
|
||||||
|
-spec latest_tnx_id() -> pos_integer().
|
||||||
|
latest_tnx_id() ->
|
||||||
|
{atomic, TnxId} = transaction(fun get_latest_id/0, []),
|
||||||
|
TnxId.
|
||||||
|
|
||||||
-spec get_node_tnx_id(node()) -> integer().
|
-spec get_node_tnx_id(node()) -> integer().
|
||||||
get_node_tnx_id(Node) ->
|
get_node_tnx_id(Node) ->
|
||||||
case mnesia:wread({?CLUSTER_COMMIT, Node}) of
|
case mnesia:wread({?CLUSTER_COMMIT, Node}) of
|
||||||
|
@ -136,6 +152,13 @@ get_node_tnx_id(Node) ->
|
||||||
skip_failed_commit(Node) ->
|
skip_failed_commit(Node) ->
|
||||||
gen_server:call({?MODULE, Node}, skip_failed_commit).
|
gen_server:call({?MODULE, Node}, skip_failed_commit).
|
||||||
|
|
||||||
|
%% Regardless of what MFA is returned, consider it a success),
|
||||||
|
%% then skip the specified TnxId.
|
||||||
|
%% If CurrTnxId >= TnxId, nothing happened.
|
||||||
|
%% If CurrTnxId < TnxId, the CurrTnxId will skip to TnxId.
|
||||||
|
-spec fast_forward_to_commit(node(), pos_integer()) -> pos_integer().
|
||||||
|
fast_forward_to_commit(Node, ToTnxId) ->
|
||||||
|
gen_server:call({?MODULE, Node}, {fast_forward_to_commit, ToTnxId}).
|
||||||
%%%===================================================================
|
%%%===================================================================
|
||||||
%%% gen_server callbacks
|
%%% gen_server callbacks
|
||||||
%%%===================================================================
|
%%%===================================================================
|
||||||
|
@ -165,8 +188,13 @@ handle_call({initiate, MFA}, _From, State = #{node := Node}) ->
|
||||||
{aborted, Reason} ->
|
{aborted, Reason} ->
|
||||||
{reply, {error, Reason}, State, {continue, ?CATCH_UP}}
|
{reply, {error, Reason}, State, {continue, ?CATCH_UP}}
|
||||||
end;
|
end;
|
||||||
handle_call(skip_failed_commit, _From, State) ->
|
handle_call(skip_failed_commit, _From, State = #{node := Node}) ->
|
||||||
{reply, ok, State, catch_up(State, true)};
|
Timeout = catch_up(State, true),
|
||||||
|
{atomic, LatestId} = transaction(fun get_node_tnx_id/1, [Node]),
|
||||||
|
{reply, LatestId, State, Timeout};
|
||||||
|
handle_call({fast_forward_to_commit, ToTnxId}, _From, State) ->
|
||||||
|
NodeId = do_fast_forward_to_commit(ToTnxId, State),
|
||||||
|
{reply, NodeId, State, catch_up(State)};
|
||||||
handle_call(_, _From, State) ->
|
handle_call(_, _From, State) ->
|
||||||
{reply, ok, State, catch_up(State)}.
|
{reply, ok, State, catch_up(State)}.
|
||||||
|
|
||||||
|
@ -245,7 +273,8 @@ do_catch_up(ToTnxId, Node) ->
|
||||||
{false, Error} -> mnesia:abort(Error)
|
{false, Error} -> mnesia:abort(Error)
|
||||||
end;
|
end;
|
||||||
[#cluster_rpc_commit{tnx_id = LastAppliedId}] ->
|
[#cluster_rpc_commit{tnx_id = LastAppliedId}] ->
|
||||||
Reason = lists:flatten(io_lib:format("~p catch up failed by LastAppliedId(~p) > ToTnxId(~p)",
|
Reason = lists:flatten(
|
||||||
|
io_lib:format("~p catch up failed by LastAppliedId(~p) > ToTnxId(~p)",
|
||||||
[Node, LastAppliedId, ToTnxId])),
|
[Node, LastAppliedId, ToTnxId])),
|
||||||
?SLOG(error, #{
|
?SLOG(error, #{
|
||||||
msg => "catch up failed!",
|
msg => "catch up failed!",
|
||||||
|
@ -258,6 +287,20 @@ do_catch_up(ToTnxId, Node) ->
|
||||||
commit(Node, TnxId) ->
|
commit(Node, TnxId) ->
|
||||||
ok = mnesia:write(?CLUSTER_COMMIT, #cluster_rpc_commit{node = Node, tnx_id = TnxId}, write).
|
ok = mnesia:write(?CLUSTER_COMMIT, #cluster_rpc_commit{node = Node, tnx_id = TnxId}, write).
|
||||||
|
|
||||||
|
do_fast_forward_to_commit(ToTnxId, State = #{node := Node}) ->
|
||||||
|
{atomic, NodeId} = transaction(fun get_node_tnx_id/1, [Node]),
|
||||||
|
case NodeId >= ToTnxId of
|
||||||
|
true -> NodeId;
|
||||||
|
false ->
|
||||||
|
{atomic, LatestId} = transaction(fun get_latest_id/0, []),
|
||||||
|
case LatestId =< NodeId of
|
||||||
|
true -> NodeId;
|
||||||
|
false ->
|
||||||
|
catch_up(State, true),
|
||||||
|
do_fast_forward_to_commit(ToTnxId, State)
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
get_latest_id() ->
|
get_latest_id() ->
|
||||||
case mnesia:last(?CLUSTER_MFA) of
|
case mnesia:last(?CLUSTER_MFA) of
|
||||||
'$end_of_table' -> 0;
|
'$end_of_table' -> 0;
|
||||||
|
@ -269,7 +312,8 @@ init_mfa(Node, MFA) ->
|
||||||
LatestId = get_latest_id(),
|
LatestId = get_latest_id(),
|
||||||
ok = do_catch_up_in_one_trans(LatestId, Node),
|
ok = do_catch_up_in_one_trans(LatestId, Node),
|
||||||
TnxId = LatestId + 1,
|
TnxId = LatestId + 1,
|
||||||
MFARec = #cluster_rpc_mfa{tnx_id = TnxId, mfa = MFA, initiator = Node, created_at = erlang:localtime()},
|
MFARec = #cluster_rpc_mfa{tnx_id = TnxId, mfa = MFA,
|
||||||
|
initiator = Node, created_at = erlang:localtime()},
|
||||||
ok = mnesia:write(?CLUSTER_MFA, MFARec, write),
|
ok = mnesia:write(?CLUSTER_MFA, MFARec, write),
|
||||||
ok = commit(Node, TnxId),
|
ok = commit(Node, TnxId),
|
||||||
case apply_mfa(TnxId, MFA) of
|
case apply_mfa(TnxId, MFA) of
|
||||||
|
@ -344,7 +388,7 @@ wait_for_all_nodes_commit(TnxId, Delay, Remain) ->
|
||||||
ok = timer:sleep(Delay),
|
ok = timer:sleep(Delay),
|
||||||
wait_for_all_nodes_commit(TnxId, Delay, Remain - Delay);
|
wait_for_all_nodes_commit(TnxId, Delay, Remain - Delay);
|
||||||
[] -> ok;
|
[] -> ok;
|
||||||
Nodes -> {error, Nodes}
|
Nodes -> {retry, Nodes}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
wait_for_nodes_commit(RequiredNum, TnxId, Delay, Remain) ->
|
wait_for_nodes_commit(RequiredNum, TnxId, Delay, Remain) ->
|
||||||
|
@ -356,7 +400,7 @@ wait_for_nodes_commit(RequiredNum, TnxId, Delay, Remain) ->
|
||||||
false ->
|
false ->
|
||||||
case lagging_node(TnxId) of
|
case lagging_node(TnxId) of
|
||||||
[] -> ok; %% All commit but The succeedNum > length(nodes()).
|
[] -> ok; %% All commit but The succeedNum > length(nodes()).
|
||||||
Nodes -> {error, Nodes}
|
Nodes -> {retry, Nodes}
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
-module(emqx_conf).
|
-module(emqx_conf).
|
||||||
|
|
||||||
-compile({no_auto_import, [get/1, get/2]}).
|
-compile({no_auto_import, [get/1, get/2]}).
|
||||||
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
|
||||||
-export([add_handler/2, remove_handler/1]).
|
-export([add_handler/2, remove_handler/1]).
|
||||||
-export([get/1, get/2, get_raw/2, get_all/1]).
|
-export([get/1, get/2, get_raw/2, get_all/1]).
|
||||||
|
@ -23,6 +24,7 @@
|
||||||
-export([update/3, update/4]).
|
-export([update/3, update/4]).
|
||||||
-export([remove/2, remove/3]).
|
-export([remove/2, remove/3]).
|
||||||
-export([reset/2, reset/3]).
|
-export([reset/2, reset/3]).
|
||||||
|
-export([gen_doc/1]).
|
||||||
|
|
||||||
%% for rpc
|
%% for rpc
|
||||||
-export([get_node_and_config/1]).
|
-export([get_node_and_config/1]).
|
||||||
|
@ -122,14 +124,29 @@ reset(Node, KeyPath, Opts) when Node =:= node() ->
|
||||||
reset(Node, KeyPath, Opts) ->
|
reset(Node, KeyPath, Opts) ->
|
||||||
rpc:call(Node, ?MODULE, reset, [KeyPath, Opts]).
|
rpc:call(Node, ?MODULE, reset, [KeyPath, Opts]).
|
||||||
|
|
||||||
|
-spec gen_doc(file:name_all()) -> ok.
|
||||||
|
gen_doc(File) ->
|
||||||
|
Version = emqx_release:version(),
|
||||||
|
Title = "# EMQ X " ++ Version ++ " Configuration",
|
||||||
|
BodyFile = filename:join([code:lib_dir(emqx_conf), "etc", "emqx_conf.md"]),
|
||||||
|
{ok, Body} = file:read_file(BodyFile),
|
||||||
|
Doc = hocon_schema_doc:gen(emqx_conf_schema, #{title => Title,
|
||||||
|
body => Body}),
|
||||||
|
file:write_file(File, Doc).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Internal funcs
|
%% Internal functions
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
multicall(M, F, Args) ->
|
multicall(M, F, Args) ->
|
||||||
case emqx_cluster_rpc:multicall(M, F, Args) of
|
case emqx_cluster_rpc:multicall(M, F, Args) of
|
||||||
{ok, _TnxId, Res} ->
|
{ok, _TnxId, Res} -> Res;
|
||||||
|
{retry, TnxId, Res, Nodes} ->
|
||||||
|
%% The init MFA return ok, but other nodes failed.
|
||||||
|
%% We return ok and alert an alarm.
|
||||||
|
?SLOG(error, #{msg => "failed to update config in cluster", nodes => Nodes,
|
||||||
|
tnx_id => TnxId, mfa => {M, F, Args}}),
|
||||||
Res;
|
Res;
|
||||||
{error, Reason} ->
|
{error, Error} -> %% all MFA return not ok or {ok, term()}.
|
||||||
{error, Reason}
|
Error
|
||||||
end.
|
end.
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_conf_cli).
|
||||||
|
-export([ load/0
|
||||||
|
, admins/1
|
||||||
|
, unload/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
-define(CMD, cluster_call).
|
||||||
|
|
||||||
|
load() ->
|
||||||
|
emqx_ctl:register_command(?CMD, {?MODULE, admins}, []).
|
||||||
|
|
||||||
|
unload() ->
|
||||||
|
emqx_ctl:unregister_command(?CMD).
|
||||||
|
|
||||||
|
admins(["status"]) -> status();
|
||||||
|
|
||||||
|
admins(["skip"]) ->
|
||||||
|
status(),
|
||||||
|
Nodes = mria_mnesia:running_nodes(),
|
||||||
|
lists:foreach(fun emqx_cluster_rpc:skip_failed_commit/1, Nodes),
|
||||||
|
status();
|
||||||
|
|
||||||
|
admins(["skip", Node0]) ->
|
||||||
|
status(),
|
||||||
|
Node = list_to_existing_atom(Node0),
|
||||||
|
emqx_cluster_rpc:skip_failed_commit(Node),
|
||||||
|
status();
|
||||||
|
|
||||||
|
admins(["tnxid", TnxId0]) ->
|
||||||
|
TnxId = list_to_integer(TnxId0),
|
||||||
|
emqx_ctl:print("~p~n", [emqx_cluster_rpc:query(TnxId)]);
|
||||||
|
|
||||||
|
admins(["fast_forward"]) ->
|
||||||
|
status(),
|
||||||
|
Nodes = mria_mnesia:running_nodes(),
|
||||||
|
TnxId = emqx_cluster_rpc:latest_tnx_id(),
|
||||||
|
lists:foreach(fun(N) -> emqx_cluster_rpc:fast_forward_to_commit(N, TnxId) end, Nodes),
|
||||||
|
status();
|
||||||
|
|
||||||
|
admins(["fast_forward", ToTnxId]) ->
|
||||||
|
status(),
|
||||||
|
Nodes = mria_mnesia:running_nodes(),
|
||||||
|
TnxId = list_to_integer(ToTnxId),
|
||||||
|
lists:foreach(fun(N) -> emqx_cluster_rpc:fast_forward_to_commit(N, TnxId) end, Nodes),
|
||||||
|
status();
|
||||||
|
|
||||||
|
admins(["fast_forward", Node0, ToTnxId]) ->
|
||||||
|
status(),
|
||||||
|
TnxId = list_to_integer(ToTnxId),
|
||||||
|
Node = list_to_existing_atom(Node0),
|
||||||
|
emqx_cluster_rpc:fast_forward_to_commit(Node, TnxId),
|
||||||
|
status();
|
||||||
|
|
||||||
|
admins(_) ->
|
||||||
|
emqx_ctl:usage(
|
||||||
|
[
|
||||||
|
{"cluster_call status", "status"},
|
||||||
|
{"cluster_call skip [node]", "increase one commit on specific node"},
|
||||||
|
{"cluster_call tnxid <TnxId>", "get detailed about TnxId"},
|
||||||
|
{"cluster_call fast_forward [node] [tnx_id]", "fast forwards to tnx_id" }
|
||||||
|
]).
|
||||||
|
|
||||||
|
status() ->
|
||||||
|
emqx_ctl:print("-----------------------------------------------\n"),
|
||||||
|
{atomic, Status} = emqx_cluster_rpc:status(),
|
||||||
|
lists:foreach(fun(S) ->
|
||||||
|
#{
|
||||||
|
node := Node,
|
||||||
|
tnx_id := TnxId,
|
||||||
|
mfa := {M, F, A},
|
||||||
|
created_at := CreatedAt
|
||||||
|
} = S,
|
||||||
|
emqx_ctl:print("~p:[~w] CreatedAt:~p ~p:~p/~w\n",
|
||||||
|
[Node, TnxId, CreatedAt, M, F, length(A)])
|
||||||
|
end, Status),
|
||||||
|
emqx_ctl:print("-----------------------------------------------\n").
|
|
@ -24,6 +24,7 @@
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
-include_lib("emqx/include/emqx_authentication.hrl").
|
||||||
|
|
||||||
-type log_level() :: debug | info | notice | warning | error | critical | alert | emergency | all.
|
-type log_level() :: debug | info | notice | warning | error | critical | alert | emergency | all.
|
||||||
-type file() :: string().
|
-type file() :: string().
|
||||||
|
@ -62,8 +63,12 @@
|
||||||
namespace() -> undefined.
|
namespace() -> undefined.
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
%% authorization configs are merged in THIS schema's "authorization" fields
|
PtKey = ?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY,
|
||||||
lists:keydelete("authorization", 1, emqx_schema:roots(high)) ++
|
case persistent_term:get(PtKey, undefined) of
|
||||||
|
undefined -> persistent_term:put(PtKey, emqx_authn_schema);
|
||||||
|
_ -> ok
|
||||||
|
end,
|
||||||
|
emqx_schema_high_prio_roots() ++
|
||||||
[ {"node",
|
[ {"node",
|
||||||
sc(hoconsc:ref("node"),
|
sc(hoconsc:ref("node"),
|
||||||
#{ desc => "Node name, cookie, config & data directories "
|
#{ desc => "Node name, cookie, config & data directories "
|
||||||
|
@ -87,20 +92,6 @@ roots() ->
|
||||||
"should work, but in case you need to do performance "
|
"should work, but in case you need to do performance "
|
||||||
"fine-turning or experiment a bit, this is where to look."
|
"fine-turning or experiment a bit, this is where to look."
|
||||||
})}
|
})}
|
||||||
, {"authorization",
|
|
||||||
sc(hoconsc:ref("authorization"),
|
|
||||||
#{ desc => """
|
|
||||||
Authorization a.k.a ACL.<br>
|
|
||||||
In EMQ X, MQTT client access control is extremly flexible.<br>
|
|
||||||
An out of the box set of authorization data sources are supported.
|
|
||||||
For example,<br>
|
|
||||||
'file' source is to support concise and yet generic ACL rules in a file;<br>
|
|
||||||
'built-in-database' source can be used to store per-client customisable rule sets,
|
|
||||||
natively in the EMQ X node;<br>
|
|
||||||
'http' source to make EMQ X call an external HTTP API to make the decision;<br>
|
|
||||||
'postgresql' etc. to look up clients or rules from external databases;<br>
|
|
||||||
"""
|
|
||||||
})}
|
|
||||||
, {"db",
|
, {"db",
|
||||||
sc(ref("db"),
|
sc(ref("db"),
|
||||||
#{ desc => "Settings of the embedded database."
|
#{ desc => "Settings of the embedded database."
|
||||||
|
@ -251,14 +242,12 @@ fields("node") ->
|
||||||
[ {"name",
|
[ {"name",
|
||||||
sc(string(),
|
sc(string(),
|
||||||
#{ default => "emqx@127.0.0.1"
|
#{ default => "emqx@127.0.0.1"
|
||||||
, override_env => "EMQX_NODE_NAME"
|
|
||||||
})}
|
})}
|
||||||
, {"cookie",
|
, {"cookie",
|
||||||
sc(string(),
|
sc(string(),
|
||||||
#{ mapping => "vm_args.-setcookie",
|
#{ mapping => "vm_args.-setcookie",
|
||||||
default => "emqxsecretcookie",
|
default => "emqxsecretcookie",
|
||||||
sensitive => true,
|
sensitive => true
|
||||||
override_env => "EMQX_NODE_COOKIE"
|
|
||||||
})}
|
})}
|
||||||
, {"data_dir",
|
, {"data_dir",
|
||||||
sc(string(),
|
sc(string(),
|
||||||
|
@ -275,9 +264,25 @@ fields("node") ->
|
||||||
#{ mapping => "emqx_machine.global_gc_interval"
|
#{ mapping => "emqx_machine.global_gc_interval"
|
||||||
, default => "15m"
|
, default => "15m"
|
||||||
})}
|
})}
|
||||||
, {"crash_dump_dir",
|
, {"crash_dump_file",
|
||||||
sc(file(),
|
sc(file(),
|
||||||
#{ mapping => "vm_args.-env ERL_CRASH_DUMP"
|
#{ mapping => "vm_args.-env ERL_CRASH_DUMP"
|
||||||
|
, desc => "Location of the crash dump file"
|
||||||
|
})}
|
||||||
|
, {"crash_dump_seconds",
|
||||||
|
sc(emqx_schema:duration_s(),
|
||||||
|
#{ mapping => "vm_args.-env ERL_CRASH_DUMP_SECONDS"
|
||||||
|
, default => "30s"
|
||||||
|
, desc => """
|
||||||
|
The number of seconds that the broker is allowed to spend writing
|
||||||
|
a crash dump
|
||||||
|
"""
|
||||||
|
})}
|
||||||
|
, {"crash_dump_bytes",
|
||||||
|
sc(emqx_schema:bytesize(),
|
||||||
|
#{ mapping => "vm_args.-env ERL_CRASH_DUMP_BYTES"
|
||||||
|
, default => "100MB"
|
||||||
|
, desc => "The maximum size of a crash dump file in bytes."
|
||||||
})}
|
})}
|
||||||
, {"dist_net_ticktime",
|
, {"dist_net_ticktime",
|
||||||
sc(emqx_schema:duration(),
|
sc(emqx_schema:duration(),
|
||||||
|
@ -347,6 +352,23 @@ to <code>rlog</code>.
|
||||||
List of core nodes that the replicant will connect to.<br/>
|
List of core nodes that the replicant will connect to.<br/>
|
||||||
Note: this parameter only takes effect when the <code>backend</code> is set
|
Note: this parameter only takes effect when the <code>backend</code> is set
|
||||||
to <code>rlog</code> and the <code>role</code> is set to <code>replicant</code>.
|
to <code>rlog</code> and the <code>role</code> is set to <code>replicant</code>.
|
||||||
|
"""
|
||||||
|
})}
|
||||||
|
, {"rpc_module",
|
||||||
|
sc(hoconsc:enum([gen_rpc, rpc]),
|
||||||
|
#{ mapping => "mria.rlog_rpc_module"
|
||||||
|
, default => gen_rpc
|
||||||
|
, desc => """
|
||||||
|
Protocol used for pushing transaction logs to the replicant nodes.
|
||||||
|
"""
|
||||||
|
})}
|
||||||
|
, {"tlog_push_mode",
|
||||||
|
sc(hoconsc:enum([sync, async]),
|
||||||
|
#{ mapping => "mria.tlog_push_mode"
|
||||||
|
, default => async
|
||||||
|
, desc => """
|
||||||
|
In sync mode the core node waits for an ack from the replicant nodes before sending the next
|
||||||
|
transaction log entry.
|
||||||
"""
|
"""
|
||||||
})}
|
})}
|
||||||
];
|
];
|
||||||
|
@ -812,3 +834,22 @@ ensure_list(V) ->
|
||||||
|
|
||||||
roots(Module) ->
|
roots(Module) ->
|
||||||
lists:map(fun({_BinName, Root}) -> Root end, hocon_schema:roots(Module)).
|
lists:map(fun({_BinName, Root}) -> Root end, hocon_schema:roots(Module)).
|
||||||
|
|
||||||
|
%% Like authentication schema, authorization schema is incomplete in emqx_schema
|
||||||
|
%% module, this function replaces the root filed "authorization" with a new schema
|
||||||
|
emqx_schema_high_prio_roots() ->
|
||||||
|
Roots = emqx_schema:roots(high),
|
||||||
|
Authz = {"authorization",
|
||||||
|
sc(hoconsc:ref("authorization"),
|
||||||
|
#{ desc => """
|
||||||
|
Authorization a.k.a ACL.<br>
|
||||||
|
In EMQ X, MQTT client access control is extremly flexible.<br>
|
||||||
|
An out of the box set of authorization data sources are supported.
|
||||||
|
For example,<br>
|
||||||
|
'file' source is to support concise and yet generic ACL rules in a file;<br>
|
||||||
|
'built-in-database' source can be used to store per-client customisable rule sets,
|
||||||
|
natively in the EMQ X node;<br>
|
||||||
|
'http' source to make EMQ X call an external HTTP API to make the decision;<br>
|
||||||
|
'postgresql' etc. to look up clients or rules from external databases;<br>
|
||||||
|
""" })},
|
||||||
|
lists:keyreplace("authorization", 1, Roots, Authz).
|
||||||
|
|
|
@ -33,7 +33,8 @@ all() -> [
|
||||||
t_commit_ok_but_apply_fail_on_other_node,
|
t_commit_ok_but_apply_fail_on_other_node,
|
||||||
t_commit_ok_apply_fail_on_other_node_then_recover,
|
t_commit_ok_apply_fail_on_other_node_then_recover,
|
||||||
t_del_stale_mfa,
|
t_del_stale_mfa,
|
||||||
t_skip_failed_commit
|
t_skip_failed_commit,
|
||||||
|
t_fast_forward_commit
|
||||||
].
|
].
|
||||||
suite() -> [{timetrap, {minutes, 3}}].
|
suite() -> [{timetrap, {minutes, 3}}].
|
||||||
groups() -> [].
|
groups() -> [].
|
||||||
|
@ -183,13 +184,37 @@ t_skip_failed_commit(_Config) ->
|
||||||
?assertEqual([{Node, 1}, {{Node, ?NODE2}, 1}, {{Node, ?NODE3}, 1}],
|
?assertEqual([{Node, 1}, {{Node, ?NODE2}, 1}, {{Node, ?NODE3}, 1}],
|
||||||
tnx_ids(List1)),
|
tnx_ids(List1)),
|
||||||
{M, F, A} = {?MODULE, failed_on_node, [erlang:whereis(?NODE1)]},
|
{M, F, A} = {?MODULE, failed_on_node, [erlang:whereis(?NODE1)]},
|
||||||
{ok, _, ok} = emqx_cluster_rpc:multicall(M, F, A, 1, 1000),
|
{ok, 2, ok} = emqx_cluster_rpc:multicall(M, F, A, 1, 1000),
|
||||||
ok = gen_server:call(?NODE2, skip_failed_commit, 5000),
|
2 = gen_server:call(?NODE2, skip_failed_commit, 5000),
|
||||||
{atomic, List2} = emqx_cluster_rpc:status(),
|
{atomic, List2} = emqx_cluster_rpc:status(),
|
||||||
?assertEqual([{Node, 2}, {{Node, ?NODE2}, 2}, {{Node, ?NODE3}, 1}],
|
?assertEqual([{Node, 2}, {{Node, ?NODE2}, 2}, {{Node, ?NODE3}, 1}],
|
||||||
tnx_ids(List2)),
|
tnx_ids(List2)),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_fast_forward_commit(_Config) ->
|
||||||
|
emqx_cluster_rpc:reset(),
|
||||||
|
{atomic, []} = emqx_cluster_rpc:status(),
|
||||||
|
{ok, 1, ok} = emqx_cluster_rpc:multicall(io, format, ["test~n"], all, 1000),
|
||||||
|
ct:sleep(180),
|
||||||
|
{atomic, List1} = emqx_cluster_rpc:status(),
|
||||||
|
Node = node(),
|
||||||
|
?assertEqual([{Node, 1}, {{Node, ?NODE2}, 1}, {{Node, ?NODE3}, 1}],
|
||||||
|
tnx_ids(List1)),
|
||||||
|
{M, F, A} = {?MODULE, failed_on_node, [erlang:whereis(?NODE1)]},
|
||||||
|
{ok, 2, ok} = emqx_cluster_rpc:multicall(M, F, A, 1, 1000),
|
||||||
|
{ok, 3, ok} = emqx_cluster_rpc:multicall(M, F, A, 1, 1000),
|
||||||
|
{ok, 4, ok} = emqx_cluster_rpc:multicall(M, F, A, 1, 1000),
|
||||||
|
{ok, 5, ok} = emqx_cluster_rpc:multicall(M, F, A, 1, 1000),
|
||||||
|
{retry, 6, ok, _} = emqx_cluster_rpc:multicall(M, F, A, 2, 1000),
|
||||||
|
3 = gen_server:call(?NODE2, {fast_forward_to_commit, 3}, 5000),
|
||||||
|
4 = gen_server:call(?NODE2, {fast_forward_to_commit, 4}, 5000),
|
||||||
|
6 = gen_server:call(?NODE2, {fast_forward_to_commit, 7}, 5000),
|
||||||
|
2 = gen_server:call(?NODE3, {fast_forward_to_commit, 2}, 5000),
|
||||||
|
{atomic, List2} = emqx_cluster_rpc:status(),
|
||||||
|
?assertEqual([{Node, 6}, {{Node, ?NODE2}, 6}, {{Node, ?NODE3}, 2}],
|
||||||
|
tnx_ids(List2)),
|
||||||
|
ok.
|
||||||
|
|
||||||
tnx_ids(Status) ->
|
tnx_ids(Status) ->
|
||||||
lists:sort(lists:map(fun(#{tnx_id := TnxId, node := Node}) ->
|
lists:sort(lists:map(fun(#{tnx_id := TnxId, node := Node}) ->
|
||||||
{Node, TnxId} end, Status)).
|
{Node, TnxId} end, Status)).
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
#connectors.mqtt.my_mqtt_connector {
|
#connectors.mqtt.my_mqtt_connector {
|
||||||
|
# mode = cluster_shareload
|
||||||
# server = "127.0.0.1:1883"
|
# server = "127.0.0.1:1883"
|
||||||
# proto_ver = "v4"
|
# proto_ver = "v4"
|
||||||
# username = "username1"
|
# username = "username1"
|
||||||
|
@ -8,7 +9,6 @@
|
||||||
# retry_interval = "30s"
|
# retry_interval = "30s"
|
||||||
# max_inflight = 32
|
# max_inflight = 32
|
||||||
# reconnect_interval = "30s"
|
# reconnect_interval = "30s"
|
||||||
# bridge_mode = true
|
|
||||||
# replayq {
|
# replayq {
|
||||||
# dir = "{{ platform_data_dir }}/replayq/bridge_mqtt/"
|
# dir = "{{ platform_data_dir }}/replayq/bridge_mqtt/"
|
||||||
# seg_bytes = "100MB"
|
# seg_bytes = "100MB"
|
||||||
|
|
|
@ -30,6 +30,8 @@
|
||||||
%% API callbacks
|
%% API callbacks
|
||||||
-export(['/connectors_test'/2, '/connectors'/2, '/connectors/:id'/2]).
|
-export(['/connectors_test'/2, '/connectors'/2, '/connectors/:id'/2]).
|
||||||
|
|
||||||
|
-define(CONN_TYPES, [mqtt]).
|
||||||
|
|
||||||
-define(TRY_PARSE_ID(ID, EXPR),
|
-define(TRY_PARSE_ID(ID, EXPR),
|
||||||
try emqx_connector:parse_connector_id(Id) of
|
try emqx_connector:parse_connector_id(Id) of
|
||||||
{ConnType, ConnName} ->
|
{ConnType, ConnName} ->
|
||||||
|
@ -38,7 +40,7 @@
|
||||||
catch
|
catch
|
||||||
error:{invalid_bridge_id, Id0} ->
|
error:{invalid_bridge_id, Id0} ->
|
||||||
{400, #{code => 'INVALID_ID', message => <<"invalid_bridge_id: ", Id0/binary,
|
{400, #{code => 'INVALID_ID', message => <<"invalid_bridge_id: ", Id0/binary,
|
||||||
". Bridge Ids must be of format <bridge_type>:<name>">>}}
|
". Bridge Ids must be of format {type}:{name}">>}}
|
||||||
end).
|
end).
|
||||||
|
|
||||||
namespace() -> "connector".
|
namespace() -> "connector".
|
||||||
|
@ -53,17 +55,71 @@ error_schema(Code, Message) ->
|
||||||
, {message, mk(string(), #{example => Message})}
|
, {message, mk(string(), #{example => Message})}
|
||||||
].
|
].
|
||||||
|
|
||||||
connector_info() ->
|
put_request_body_schema() ->
|
||||||
hoconsc:union([ ref(emqx_connector_schema, "mqtt_connector_info")
|
emqx_dashboard_swagger:schema_with_examples(
|
||||||
]).
|
emqx_connector_schema:put_request(), connector_info_examples(put)).
|
||||||
|
|
||||||
connector_test_info() ->
|
post_request_body_schema() ->
|
||||||
hoconsc:union([ ref(emqx_connector_schema, "mqtt_connector_test_info")
|
emqx_dashboard_swagger:schema_with_examples(
|
||||||
]).
|
emqx_connector_schema:post_request(), connector_info_examples(post)).
|
||||||
|
|
||||||
connector_req() ->
|
get_response_body_schema() ->
|
||||||
hoconsc:union([ ref(emqx_connector_schema, "mqtt_connector")
|
emqx_dashboard_swagger:schema_with_examples(
|
||||||
]).
|
emqx_connector_schema:get_response(), connector_info_examples(get)).
|
||||||
|
|
||||||
|
connector_info_array_example(Method) ->
|
||||||
|
[Config || #{value := Config} <- maps:values(connector_info_examples(Method))].
|
||||||
|
|
||||||
|
connector_info_examples(Method) ->
|
||||||
|
lists:foldl(fun(Type, Acc) ->
|
||||||
|
SType = atom_to_list(Type),
|
||||||
|
maps:merge(Acc, #{
|
||||||
|
Type => #{
|
||||||
|
summary => bin(string:uppercase(SType) ++ " Connector"),
|
||||||
|
value => info_example(Type, Method)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end, #{}, ?CONN_TYPES).
|
||||||
|
|
||||||
|
info_example(Type, Method) ->
|
||||||
|
maps:merge(info_example_basic(Type),
|
||||||
|
method_example(Type, Method)).
|
||||||
|
|
||||||
|
method_example(Type, get) ->
|
||||||
|
SType = atom_to_list(Type),
|
||||||
|
SName = "my_" ++ SType ++ "_connector",
|
||||||
|
#{
|
||||||
|
id => bin(SType ++ ":" ++ SName),
|
||||||
|
type => bin(SType),
|
||||||
|
name => bin(SName)
|
||||||
|
};
|
||||||
|
method_example(Type, post) ->
|
||||||
|
SType = atom_to_list(Type),
|
||||||
|
SName = "my_" ++ SType ++ "_connector",
|
||||||
|
#{
|
||||||
|
type => bin(SType),
|
||||||
|
name => bin(SName)
|
||||||
|
};
|
||||||
|
method_example(_Type, put) ->
|
||||||
|
#{}.
|
||||||
|
|
||||||
|
info_example_basic(mqtt) ->
|
||||||
|
#{
|
||||||
|
mode => cluster_shareload,
|
||||||
|
server => <<"127.0.0.1:1883">>,
|
||||||
|
reconnect_interval => <<"30s">>,
|
||||||
|
proto_ver => <<"v4">>,
|
||||||
|
username => <<"foo">>,
|
||||||
|
password => <<"bar">>,
|
||||||
|
clientid => <<"foo">>,
|
||||||
|
clean_start => true,
|
||||||
|
keepalive => <<"300s">>,
|
||||||
|
retry_interval => <<"30s">>,
|
||||||
|
max_inflight => 100,
|
||||||
|
ssl => #{
|
||||||
|
enable => false
|
||||||
|
}
|
||||||
|
}.
|
||||||
|
|
||||||
param_path_id() ->
|
param_path_id() ->
|
||||||
[{id, mk(binary(), #{in => path, example => <<"mqtt:my_mqtt_connector">>})}].
|
[{id, mk(binary(), #{in => path, example => <<"mqtt:my_mqtt_connector">>})}].
|
||||||
|
@ -74,9 +130,9 @@ schema("/connectors_test") ->
|
||||||
post => #{
|
post => #{
|
||||||
tags => [<<"connectors">>],
|
tags => [<<"connectors">>],
|
||||||
description => <<"Test creating a new connector by given Id <br>"
|
description => <<"Test creating a new connector by given Id <br>"
|
||||||
"The Id must be of format <type>:<name>">>,
|
"The ID must be of format '{type}:{name}'">>,
|
||||||
summary => <<"Test creating connector">>,
|
summary => <<"Test creating connector">>,
|
||||||
requestBody => connector_test_info(),
|
requestBody => post_request_body_schema(),
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => <<"Test connector OK">>,
|
200 => <<"Test connector OK">>,
|
||||||
400 => error_schema('TEST_FAILED', "connector test failed")
|
400 => error_schema('TEST_FAILED', "connector test failed")
|
||||||
|
@ -92,17 +148,19 @@ schema("/connectors") ->
|
||||||
description => <<"List all connectors">>,
|
description => <<"List all connectors">>,
|
||||||
summary => <<"List connectors">>,
|
summary => <<"List connectors">>,
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => mk(array(connector_info()), #{desc => "List of connectors"})
|
200 => emqx_dashboard_swagger:schema_with_example(
|
||||||
|
array(emqx_connector_schema:get_response()),
|
||||||
|
connector_info_array_example(get))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
post => #{
|
post => #{
|
||||||
tags => [<<"connectors">>],
|
tags => [<<"connectors">>],
|
||||||
description => <<"Create a new connector by given Id <br>"
|
description => <<"Create a new connector by given Id <br>"
|
||||||
"The Id must be of format <type>:<name>">>,
|
"The ID must be of format '{type}:{name}'">>,
|
||||||
summary => <<"Create connector">>,
|
summary => <<"Create connector">>,
|
||||||
requestBody => connector_info(),
|
requestBody => post_request_body_schema(),
|
||||||
responses => #{
|
responses => #{
|
||||||
201 => connector_info(),
|
201 => get_response_body_schema(),
|
||||||
400 => error_schema('ALREADY_EXISTS', "connector already exists")
|
400 => error_schema('ALREADY_EXISTS', "connector already exists")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -117,7 +175,7 @@ schema("/connectors/:id") ->
|
||||||
summary => <<"Get connector">>,
|
summary => <<"Get connector">>,
|
||||||
parameters => param_path_id(),
|
parameters => param_path_id(),
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => connector_info(),
|
200 => get_response_body_schema(),
|
||||||
404 => error_schema('NOT_FOUND', "Connector not found")
|
404 => error_schema('NOT_FOUND', "Connector not found")
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -126,9 +184,9 @@ schema("/connectors/:id") ->
|
||||||
description => <<"Update an existing connector by Id">>,
|
description => <<"Update an existing connector by Id">>,
|
||||||
summary => <<"Update connector">>,
|
summary => <<"Update connector">>,
|
||||||
parameters => param_path_id(),
|
parameters => param_path_id(),
|
||||||
requestBody => connector_req(),
|
requestBody => put_request_body_schema(),
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => <<"Update connector successfully">>,
|
200 => get_response_body_schema(),
|
||||||
400 => error_schema('UPDATE_FAIL', "Update failed"),
|
400 => error_schema('UPDATE_FAIL', "Update failed"),
|
||||||
404 => error_schema('NOT_FOUND', "Connector not found")
|
404 => error_schema('NOT_FOUND', "Connector not found")
|
||||||
}},
|
}},
|
||||||
|
@ -143,8 +201,8 @@ schema("/connectors/:id") ->
|
||||||
}}
|
}}
|
||||||
}.
|
}.
|
||||||
|
|
||||||
'/connectors_test'(post, #{body := #{<<"bridge_type">> := ConnType} = Params}) ->
|
'/connectors_test'(post, #{body := #{<<"type">> := ConnType} = Params}) ->
|
||||||
case emqx_connector:create_dry_run(ConnType, maps:remove(<<"bridge_type">>, Params)) of
|
case emqx_connector:create_dry_run(ConnType, maps:remove(<<"type">>, Params)) of
|
||||||
ok -> {200};
|
ok -> {200};
|
||||||
{error, Error} ->
|
{error, Error} ->
|
||||||
{400, error_msg('BAD_ARG', Error)}
|
{400, error_msg('BAD_ARG', Error)}
|
||||||
|
@ -153,17 +211,20 @@ schema("/connectors/:id") ->
|
||||||
'/connectors'(get, _Request) ->
|
'/connectors'(get, _Request) ->
|
||||||
{200, emqx_connector:list()};
|
{200, emqx_connector:list()};
|
||||||
|
|
||||||
'/connectors'(post, #{body := #{<<"id">> := Id} = Params}) ->
|
'/connectors'(post, #{body := #{<<"type">> := ConnType} = Params}) ->
|
||||||
?TRY_PARSE_ID(Id,
|
ConnName = maps:get(<<"name">>, Params, emqx_misc:gen_id()),
|
||||||
case emqx_connector:lookup(ConnType, ConnName) of
|
case emqx_connector:lookup(ConnType, ConnName) of
|
||||||
{ok, _} ->
|
{ok, _} ->
|
||||||
{400, error_msg('ALREADY_EXISTS', <<"connector already exists">>)};
|
{400, error_msg('ALREADY_EXISTS', <<"connector already exists">>)};
|
||||||
{error, not_found} ->
|
{error, not_found} ->
|
||||||
case emqx_connector:update(ConnType, ConnName, maps:remove(<<"id">>, Params)) of
|
case emqx_connector:update(ConnType, ConnName,
|
||||||
{ok, #{raw_config := RawConf}} -> {201, RawConf#{<<"id">> => Id}};
|
maps:without([<<"type">>, <<"name">>], Params)) of
|
||||||
{error, Error} -> {400, error_msg('BAD_ARG', Error)}
|
{ok, #{raw_config := RawConf}} ->
|
||||||
end
|
{201, RawConf#{<<"id">> =>
|
||||||
end).
|
emqx_connector:connector_id(ConnType, ConnName)}};
|
||||||
|
{error, Error} -> {400, error_msg('BAD_ARG', Error)}
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
'/connectors/:id'(get, #{bindings := #{id := Id}}) ->
|
'/connectors/:id'(get, #{bindings := #{id := Id}}) ->
|
||||||
?TRY_PARSE_ID(Id,
|
?TRY_PARSE_ID(Id,
|
||||||
|
@ -200,4 +261,7 @@ schema("/connectors/:id") ->
|
||||||
error_msg(Code, Msg) when is_binary(Msg) ->
|
error_msg(Code, Msg) when is_binary(Msg) ->
|
||||||
#{code => Code, message => Msg};
|
#{code => Code, message => Msg};
|
||||||
error_msg(Code, Msg) ->
|
error_msg(Code, Msg) ->
|
||||||
#{code => Code, message => list_to_binary(io_lib:format("~p", [Msg]))}.
|
#{code => Code, message => bin(io_lib:format("~p", [Msg]))}.
|
||||||
|
|
||||||
|
bin(S) when is_list(S) ->
|
||||||
|
list_to_binary(S).
|
||||||
|
|
|
@ -118,7 +118,7 @@ on_start(InstId, Config = #{mongo_type := Type,
|
||||||
false -> [{ssl, false}]
|
false -> [{ssl, false}]
|
||||||
end,
|
end,
|
||||||
Topology = maps:get(topology, NConfig, #{}),
|
Topology = maps:get(topology, NConfig, #{}),
|
||||||
Opts = [{type, init_type(NConfig)},
|
Opts = [{mongo_type, init_type(NConfig)},
|
||||||
{hosts, Hosts},
|
{hosts, Hosts},
|
||||||
{pool_size, PoolSize},
|
{pool_size, PoolSize},
|
||||||
{options, init_topology_options(maps:to_list(Topology), [])},
|
{options, init_topology_options(maps:to_list(Topology), [])},
|
||||||
|
@ -187,6 +187,7 @@ connect(Opts) ->
|
||||||
WorkerOptions = proplists:get_value(worker_options, Opts, []),
|
WorkerOptions = proplists:get_value(worker_options, Opts, []),
|
||||||
mongo_api:connect(Type, Hosts, Options, WorkerOptions).
|
mongo_api:connect(Type, Hosts, Options, WorkerOptions).
|
||||||
|
|
||||||
|
|
||||||
mongo_query(Conn, find, Collection, Selector, Projector) ->
|
mongo_query(Conn, find, Collection, Selector, Projector) ->
|
||||||
mongo_api:find(Conn, Collection, Selector, Projector);
|
mongo_api:find(Conn, Collection, Selector, Projector);
|
||||||
|
|
||||||
|
@ -268,7 +269,7 @@ srv_record(_) -> undefined.
|
||||||
parse_servers(Type, Servers) when is_binary(Servers) ->
|
parse_servers(Type, Servers) when is_binary(Servers) ->
|
||||||
parse_servers(Type, binary_to_list(Servers));
|
parse_servers(Type, binary_to_list(Servers));
|
||||||
parse_servers(Type, Servers) when is_list(Servers) ->
|
parse_servers(Type, Servers) when is_list(Servers) ->
|
||||||
case string:split(Servers, ",", trailing) of
|
case string:split(Servers, ",", all) of
|
||||||
[Host | _] when Type =:= single ->
|
[Host | _] when Type =:= single ->
|
||||||
[Host];
|
[Host];
|
||||||
Hosts ->
|
Hosts ->
|
||||||
|
|
|
@ -40,6 +40,8 @@
|
||||||
|
|
||||||
-behaviour(hocon_schema).
|
-behaviour(hocon_schema).
|
||||||
|
|
||||||
|
-import(hoconsc, [mk/2]).
|
||||||
|
|
||||||
-export([ roots/0
|
-export([ roots/0
|
||||||
, fields/1]).
|
, fields/1]).
|
||||||
|
|
||||||
|
@ -49,7 +51,25 @@ roots() ->
|
||||||
fields("config").
|
fields("config").
|
||||||
|
|
||||||
fields("config") ->
|
fields("config") ->
|
||||||
emqx_connector_mqtt_schema:fields("config").
|
emqx_connector_mqtt_schema:fields("config");
|
||||||
|
|
||||||
|
fields("get") ->
|
||||||
|
[{id, mk(binary(),
|
||||||
|
#{ desc => "The connector Id"
|
||||||
|
, example => <<"mqtt:my_mqtt_connector">>
|
||||||
|
})}]
|
||||||
|
++ fields("post");
|
||||||
|
|
||||||
|
fields("put") ->
|
||||||
|
emqx_connector_mqtt_schema:fields("connector");
|
||||||
|
|
||||||
|
fields("post") ->
|
||||||
|
[ {type, mk(mqtt, #{desc => "The Connector Type"})}
|
||||||
|
, {name, mk(binary(),
|
||||||
|
#{ desc => "The Connector Name"
|
||||||
|
, example => <<"my_mqtt_connector">>
|
||||||
|
})}
|
||||||
|
] ++ fields("put").
|
||||||
|
|
||||||
%% ===================================================================
|
%% ===================================================================
|
||||||
%% supervisor APIs
|
%% supervisor APIs
|
||||||
|
@ -100,7 +120,7 @@ on_start(InstId, Conf) ->
|
||||||
BasicConf = basic_config(Conf),
|
BasicConf = basic_config(Conf),
|
||||||
BridgeConf = BasicConf#{
|
BridgeConf = BasicConf#{
|
||||||
name => InstanceId,
|
name => InstanceId,
|
||||||
clientid => clientid(InstanceId),
|
clientid => clientid(maps:get(clientid, Conf, InstId)),
|
||||||
subscriptions => make_sub_confs(maps:get(ingress, Conf, undefined)),
|
subscriptions => make_sub_confs(maps:get(ingress, Conf, undefined)),
|
||||||
forwards => make_forward_confs(maps:get(egress, Conf, undefined))
|
forwards => make_forward_confs(maps:get(egress, Conf, undefined))
|
||||||
},
|
},
|
||||||
|
@ -162,7 +182,6 @@ basic_config(#{
|
||||||
server := Server,
|
server := Server,
|
||||||
reconnect_interval := ReconnIntv,
|
reconnect_interval := ReconnIntv,
|
||||||
proto_ver := ProtoVer,
|
proto_ver := ProtoVer,
|
||||||
bridge_mode := BridgeMod,
|
|
||||||
username := User,
|
username := User,
|
||||||
password := Password,
|
password := Password,
|
||||||
clean_start := CleanStart,
|
clean_start := CleanStart,
|
||||||
|
@ -177,7 +196,7 @@ basic_config(#{
|
||||||
server => Server,
|
server => Server,
|
||||||
reconnect_interval => ReconnIntv,
|
reconnect_interval => ReconnIntv,
|
||||||
proto_ver => ProtoVer,
|
proto_ver => ProtoVer,
|
||||||
bridge_mode => BridgeMod,
|
bridge_mode => true,
|
||||||
username => User,
|
username => User,
|
||||||
password => Password,
|
password => Password,
|
||||||
clean_start => CleanStart,
|
clean_start => CleanStart,
|
||||||
|
@ -190,4 +209,4 @@ basic_config(#{
|
||||||
}.
|
}.
|
||||||
|
|
||||||
clientid(Id) ->
|
clientid(Id) ->
|
||||||
list_to_binary(lists:concat([Id, ":", node()])).
|
iolist_to_binary([Id, ":", atom_to_list(node())]).
|
||||||
|
|
|
@ -4,33 +4,47 @@
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
|
||||||
|
-import(hoconsc, [mk/2, ref/2]).
|
||||||
|
|
||||||
-export([roots/0, fields/1]).
|
-export([roots/0, fields/1]).
|
||||||
|
|
||||||
|
-export([ get_response/0
|
||||||
|
, put_request/0
|
||||||
|
, post_request/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
-define(CONN_TYPES, [mqtt]).
|
||||||
|
|
||||||
|
%%======================================================================================
|
||||||
|
%% For HTTP APIs
|
||||||
|
|
||||||
|
get_response() ->
|
||||||
|
http_schema("get").
|
||||||
|
|
||||||
|
put_request() ->
|
||||||
|
http_schema("put").
|
||||||
|
|
||||||
|
post_request() ->
|
||||||
|
http_schema("post").
|
||||||
|
|
||||||
|
http_schema(Method) ->
|
||||||
|
Schemas = [ref(schema_mod(Type), Method) || Type <- ?CONN_TYPES],
|
||||||
|
hoconsc:union(Schemas).
|
||||||
|
|
||||||
%%======================================================================================
|
%%======================================================================================
|
||||||
%% Hocon Schema Definitions
|
%% Hocon Schema Definitions
|
||||||
|
|
||||||
roots() -> ["connectors"].
|
roots() -> ["connectors"].
|
||||||
|
|
||||||
|
fields(connectors) -> fields("connectors");
|
||||||
fields("connectors") ->
|
fields("connectors") ->
|
||||||
[ {mqtt,
|
[ {mqtt,
|
||||||
sc(hoconsc:map(name,
|
mk(hoconsc:map(name,
|
||||||
hoconsc:union([ ref("mqtt_connector")
|
hoconsc:union([ ref(emqx_connector_mqtt_schema, "connector")
|
||||||
])),
|
])),
|
||||||
#{ desc => "MQTT bridges"
|
#{ desc => "MQTT bridges"
|
||||||
})}
|
})}
|
||||||
];
|
].
|
||||||
|
|
||||||
fields("mqtt_connector") ->
|
schema_mod(Type) ->
|
||||||
emqx_connector_mqtt_schema:fields("connector");
|
list_to_atom(lists:concat(["emqx_connector_", Type])).
|
||||||
|
|
||||||
fields("mqtt_connector_info") ->
|
|
||||||
[{id, sc(binary(), #{desc => "The connector Id"})}]
|
|
||||||
++ fields("mqtt_connector");
|
|
||||||
|
|
||||||
fields("mqtt_connector_test_info") ->
|
|
||||||
[{bridge_type, sc(mqtt, #{desc => "The Bridge Type"})}]
|
|
||||||
++ fields("mqtt_connector").
|
|
||||||
|
|
||||||
sc(Type, Meta) -> hoconsc:mk(Type, Meta).
|
|
||||||
|
|
||||||
ref(Field) -> hoconsc:ref(?MODULE, Field).
|
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
%%
|
%%
|
||||||
%% Unless required by applicable law or agreed to in writing, software
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
%% cluster_shareload under the License is cluster_shareload on an "AS IS" BASIS,
|
||||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
%% See the License for the specific language governing permissions and
|
%% See the License for the specific language governing permissions and
|
||||||
%% limitations under the License.
|
%% limitations under the License.
|
||||||
|
@ -38,7 +38,24 @@ fields("config") ->
|
||||||
topic_mappings();
|
topic_mappings();
|
||||||
|
|
||||||
fields("connector") ->
|
fields("connector") ->
|
||||||
[ {server,
|
[ {mode,
|
||||||
|
sc(hoconsc:enum([cluster_singleton, cluster_shareload]),
|
||||||
|
#{ default => cluster_shareload
|
||||||
|
, desc => """
|
||||||
|
The mode of the MQTT Bridge. Can be one of 'cluster_singleton' or 'cluster_shareload'<br>
|
||||||
|
|
||||||
|
- cluster_singleton: create an unique MQTT connection within the emqx cluster.<br>
|
||||||
|
In 'cluster_singleton' node, all messages toward the remote broker go through the same
|
||||||
|
MQTT connection.<br>
|
||||||
|
- cluster_shareload: create an MQTT connection on each node in the emqx cluster.<br>
|
||||||
|
In 'cluster_shareload' mode, the incomming load from the remote broker is shared by
|
||||||
|
using shared subscription.<br>
|
||||||
|
Note that the 'clientid' is suffixed by the node name, this is to avoid
|
||||||
|
clientid conflicts between different nodes. And we can only use shared subscription
|
||||||
|
topic filters for 'from_remote_topic'.
|
||||||
|
"""
|
||||||
|
})}
|
||||||
|
, {server,
|
||||||
sc(emqx_schema:ip_port(),
|
sc(emqx_schema:ip_port(),
|
||||||
#{ default => "127.0.0.1:1883"
|
#{ default => "127.0.0.1:1883"
|
||||||
, desc => "The host and port of the remote MQTT broker"
|
, desc => "The host and port of the remote MQTT broker"
|
||||||
|
@ -49,11 +66,6 @@ fields("connector") ->
|
||||||
#{ default => v4
|
#{ default => v4
|
||||||
, desc => "The MQTT protocol version"
|
, desc => "The MQTT protocol version"
|
||||||
})}
|
})}
|
||||||
, {bridge_mode,
|
|
||||||
sc(boolean(),
|
|
||||||
#{ default => true
|
|
||||||
, desc => "The bridge mode of the MQTT protocol"
|
|
||||||
})}
|
|
||||||
, {username,
|
, {username,
|
||||||
sc(binary(),
|
sc(binary(),
|
||||||
#{ default => "emqx"
|
#{ default => "emqx"
|
||||||
|
@ -66,8 +78,7 @@ fields("connector") ->
|
||||||
})}
|
})}
|
||||||
, {clientid,
|
, {clientid,
|
||||||
sc(binary(),
|
sc(binary(),
|
||||||
#{ default => "emqx_${nodename}"
|
#{ desc => "The clientid of the MQTT protocol"
|
||||||
, desc => "The clientid of the MQTT protocol"
|
|
||||||
})}
|
})}
|
||||||
, {clean_start,
|
, {clean_start,
|
||||||
sc(boolean(),
|
sc(boolean(),
|
||||||
|
|
|
@ -24,7 +24,11 @@
|
||||||
|
|
||||||
-define(CONF_DEFAULT, <<"connectors: {}">>).
|
-define(CONF_DEFAULT, <<"connectors: {}">>).
|
||||||
-define(BRIDGE_CONF_DEFAULT, <<"bridges: {}">>).
|
-define(BRIDGE_CONF_DEFAULT, <<"bridges: {}">>).
|
||||||
|
-define(CONNECTR_TYPE, <<"mqtt">>).
|
||||||
|
-define(CONNECTR_NAME, <<"test_connector">>).
|
||||||
-define(CONNECTR_ID, <<"mqtt:test_connector">>).
|
-define(CONNECTR_ID, <<"mqtt:test_connector">>).
|
||||||
|
-define(BRIDGE_NAME_INGRESS, <<"ingress_test_bridge">>).
|
||||||
|
-define(BRIDGE_NAME_EGRESS, <<"egress_test_bridge">>).
|
||||||
-define(BRIDGE_ID_INGRESS, <<"mqtt:ingress_test_bridge">>).
|
-define(BRIDGE_ID_INGRESS, <<"mqtt:ingress_test_bridge">>).
|
||||||
-define(BRIDGE_ID_EGRESS, <<"mqtt:egress_test_bridge">>).
|
-define(BRIDGE_ID_EGRESS, <<"mqtt:egress_test_bridge">>).
|
||||||
-define(MQTT_CONNECOTR(Username),
|
-define(MQTT_CONNECOTR(Username),
|
||||||
|
@ -63,8 +67,8 @@
|
||||||
|
|
||||||
-define(metrics(MATCH, SUCC, FAILED, SPEED, SPEED5M, SPEEDMAX),
|
-define(metrics(MATCH, SUCC, FAILED, SPEED, SPEED5M, SPEEDMAX),
|
||||||
#{<<"matched">> := MATCH, <<"success">> := SUCC,
|
#{<<"matched">> := MATCH, <<"success">> := SUCC,
|
||||||
<<"failed">> := FAILED, <<"speed">> := SPEED,
|
<<"failed">> := FAILED, <<"rate">> := SPEED,
|
||||||
<<"speed_last5m">> := SPEED5M, <<"speed_max">> := SPEEDMAX}).
|
<<"rate_last5m">> := SPEED5M, <<"rate_max">> := SPEEDMAX}).
|
||||||
|
|
||||||
all() ->
|
all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
@ -115,7 +119,9 @@ t_mqtt_crud_apis(_) ->
|
||||||
%% POST /connectors/ will create a connector
|
%% POST /connectors/ will create a connector
|
||||||
User1 = <<"user1">>,
|
User1 = <<"user1">>,
|
||||||
{ok, 201, Connector} = request(post, uri(["connectors"]),
|
{ok, 201, Connector} = request(post, uri(["connectors"]),
|
||||||
?MQTT_CONNECOTR(User1)#{<<"id">> => ?CONNECTR_ID}),
|
?MQTT_CONNECOTR(User1)#{ <<"type">> => ?CONNECTR_TYPE
|
||||||
|
, <<"name">> => ?CONNECTR_NAME
|
||||||
|
}),
|
||||||
|
|
||||||
%ct:pal("---connector: ~p", [Connector]),
|
%ct:pal("---connector: ~p", [Connector]),
|
||||||
?assertMatch(#{ <<"id">> := ?CONNECTR_ID
|
?assertMatch(#{ <<"id">> := ?CONNECTR_ID
|
||||||
|
@ -128,7 +134,9 @@ t_mqtt_crud_apis(_) ->
|
||||||
|
|
||||||
%% create a again returns an error
|
%% create a again returns an error
|
||||||
{ok, 400, RetMsg} = request(post, uri(["connectors"]),
|
{ok, 400, RetMsg} = request(post, uri(["connectors"]),
|
||||||
?MQTT_CONNECOTR(User1)#{<<"id">> => ?CONNECTR_ID}),
|
?MQTT_CONNECOTR(User1)#{ <<"type">> => ?CONNECTR_TYPE
|
||||||
|
, <<"name">> => ?CONNECTR_NAME
|
||||||
|
}),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{ <<"code">> := _
|
#{ <<"code">> := _
|
||||||
, <<"message">> := <<"connector already exists">>
|
, <<"message">> := <<"connector already exists">>
|
||||||
|
@ -187,7 +195,9 @@ t_mqtt_conn_bridge_ingress(_) ->
|
||||||
%% then we add a mqtt connector, using POST
|
%% then we add a mqtt connector, using POST
|
||||||
User1 = <<"user1">>,
|
User1 = <<"user1">>,
|
||||||
{ok, 201, Connector} = request(post, uri(["connectors"]),
|
{ok, 201, Connector} = request(post, uri(["connectors"]),
|
||||||
?MQTT_CONNECOTR(User1)#{<<"id">> => ?CONNECTR_ID}),
|
?MQTT_CONNECOTR(User1)#{ <<"type">> => ?CONNECTR_TYPE
|
||||||
|
, <<"name">> => ?CONNECTR_NAME
|
||||||
|
}),
|
||||||
|
|
||||||
%ct:pal("---connector: ~p", [Connector]),
|
%ct:pal("---connector: ~p", [Connector]),
|
||||||
?assertMatch(#{ <<"id">> := ?CONNECTR_ID
|
?assertMatch(#{ <<"id">> := ?CONNECTR_ID
|
||||||
|
@ -201,11 +211,14 @@ t_mqtt_conn_bridge_ingress(_) ->
|
||||||
%% ... and a MQTT bridge, using POST
|
%% ... and a MQTT bridge, using POST
|
||||||
%% we bind this bridge to the connector created just now
|
%% we bind this bridge to the connector created just now
|
||||||
{ok, 201, Bridge} = request(post, uri(["bridges"]),
|
{ok, 201, Bridge} = request(post, uri(["bridges"]),
|
||||||
?MQTT_BRIDGE_INGRESS(?CONNECTR_ID)#{<<"id">> => ?BRIDGE_ID_INGRESS}),
|
?MQTT_BRIDGE_INGRESS(?CONNECTR_ID)#{
|
||||||
|
<<"type">> => ?CONNECTR_TYPE,
|
||||||
|
<<"name">> => ?BRIDGE_NAME_INGRESS
|
||||||
|
}),
|
||||||
|
|
||||||
%ct:pal("---bridge: ~p", [Bridge]),
|
%ct:pal("---bridge: ~p", [Bridge]),
|
||||||
?assertMatch(#{ <<"id">> := ?BRIDGE_ID_INGRESS
|
?assertMatch(#{ <<"id">> := ?BRIDGE_ID_INGRESS
|
||||||
, <<"bridge_type">> := <<"mqtt">>
|
, <<"type">> := <<"mqtt">>
|
||||||
, <<"status">> := <<"connected">>
|
, <<"status">> := <<"connected">>
|
||||||
, <<"connector">> := ?CONNECTR_ID
|
, <<"connector">> := ?CONNECTR_ID
|
||||||
}, jsx:decode(Bridge)),
|
}, jsx:decode(Bridge)),
|
||||||
|
@ -250,7 +263,9 @@ t_mqtt_conn_bridge_egress(_) ->
|
||||||
%% then we add a mqtt connector, using POST
|
%% then we add a mqtt connector, using POST
|
||||||
User1 = <<"user1">>,
|
User1 = <<"user1">>,
|
||||||
{ok, 201, Connector} = request(post, uri(["connectors"]),
|
{ok, 201, Connector} = request(post, uri(["connectors"]),
|
||||||
?MQTT_CONNECOTR(User1)#{<<"id">> => ?CONNECTR_ID}),
|
?MQTT_CONNECOTR(User1)#{ <<"type">> => ?CONNECTR_TYPE
|
||||||
|
, <<"name">> => ?CONNECTR_NAME
|
||||||
|
}),
|
||||||
|
|
||||||
%ct:pal("---connector: ~p", [Connector]),
|
%ct:pal("---connector: ~p", [Connector]),
|
||||||
?assertMatch(#{ <<"id">> := ?CONNECTR_ID
|
?assertMatch(#{ <<"id">> := ?CONNECTR_ID
|
||||||
|
@ -264,11 +279,15 @@ t_mqtt_conn_bridge_egress(_) ->
|
||||||
%% ... and a MQTT bridge, using POST
|
%% ... and a MQTT bridge, using POST
|
||||||
%% we bind this bridge to the connector created just now
|
%% we bind this bridge to the connector created just now
|
||||||
{ok, 201, Bridge} = request(post, uri(["bridges"]),
|
{ok, 201, Bridge} = request(post, uri(["bridges"]),
|
||||||
?MQTT_BRIDGE_EGRESS(?CONNECTR_ID)#{<<"id">> => ?BRIDGE_ID_EGRESS}),
|
?MQTT_BRIDGE_EGRESS(?CONNECTR_ID)#{
|
||||||
|
<<"type">> => ?CONNECTR_TYPE,
|
||||||
|
<<"name">> => ?BRIDGE_NAME_EGRESS
|
||||||
|
}),
|
||||||
|
|
||||||
%ct:pal("---bridge: ~p", [Bridge]),
|
%ct:pal("---bridge: ~p", [Bridge]),
|
||||||
?assertMatch(#{ <<"id">> := ?BRIDGE_ID_EGRESS
|
?assertMatch(#{ <<"id">> := ?BRIDGE_ID_EGRESS
|
||||||
, <<"bridge_type">> := <<"mqtt">>
|
, <<"type">> := ?CONNECTR_TYPE
|
||||||
|
, <<"name">> := ?BRIDGE_NAME_EGRESS
|
||||||
, <<"status">> := <<"connected">>
|
, <<"status">> := <<"connected">>
|
||||||
, <<"connector">> := ?CONNECTR_ID
|
, <<"connector">> := ?CONNECTR_ID
|
||||||
}, jsx:decode(Bridge)),
|
}, jsx:decode(Bridge)),
|
||||||
|
@ -322,7 +341,10 @@ t_mqtt_conn_update(_) ->
|
||||||
|
|
||||||
%% then we add a mqtt connector, using POST
|
%% then we add a mqtt connector, using POST
|
||||||
{ok, 201, Connector} = request(post, uri(["connectors"]),
|
{ok, 201, Connector} = request(post, uri(["connectors"]),
|
||||||
?MQTT_CONNECOTR2(<<"127.0.0.1:1883">>)#{<<"id">> => ?CONNECTR_ID}),
|
?MQTT_CONNECOTR2(<<"127.0.0.1:1883">>)
|
||||||
|
#{ <<"type">> => ?CONNECTR_TYPE
|
||||||
|
, <<"name">> => ?CONNECTR_NAME
|
||||||
|
}),
|
||||||
|
|
||||||
%ct:pal("---connector: ~p", [Connector]),
|
%ct:pal("---connector: ~p", [Connector]),
|
||||||
?assertMatch(#{ <<"id">> := ?CONNECTR_ID
|
?assertMatch(#{ <<"id">> := ?CONNECTR_ID
|
||||||
|
@ -332,9 +354,13 @@ t_mqtt_conn_update(_) ->
|
||||||
%% ... and a MQTT bridge, using POST
|
%% ... and a MQTT bridge, using POST
|
||||||
%% we bind this bridge to the connector created just now
|
%% we bind this bridge to the connector created just now
|
||||||
{ok, 201, Bridge} = request(post, uri(["bridges"]),
|
{ok, 201, Bridge} = request(post, uri(["bridges"]),
|
||||||
?MQTT_BRIDGE_EGRESS(?CONNECTR_ID)#{<<"id">> => ?BRIDGE_ID_EGRESS}),
|
?MQTT_BRIDGE_EGRESS(?CONNECTR_ID)#{
|
||||||
|
<<"type">> => ?CONNECTR_TYPE,
|
||||||
|
<<"name">> => ?BRIDGE_NAME_EGRESS
|
||||||
|
}),
|
||||||
?assertMatch(#{ <<"id">> := ?BRIDGE_ID_EGRESS
|
?assertMatch(#{ <<"id">> := ?BRIDGE_ID_EGRESS
|
||||||
, <<"bridge_type">> := <<"mqtt">>
|
, <<"type">> := <<"mqtt">>
|
||||||
|
, <<"name">> := ?BRIDGE_NAME_EGRESS
|
||||||
, <<"status">> := <<"connected">>
|
, <<"status">> := <<"connected">>
|
||||||
, <<"connector">> := ?CONNECTR_ID
|
, <<"connector">> := ?CONNECTR_ID
|
||||||
}, jsx:decode(Bridge)),
|
}, jsx:decode(Bridge)),
|
||||||
|
@ -358,9 +384,15 @@ t_mqtt_conn_testing(_) ->
|
||||||
%% APIs for testing the connectivity
|
%% APIs for testing the connectivity
|
||||||
%% then we add a mqtt connector, using POST
|
%% then we add a mqtt connector, using POST
|
||||||
{ok, 200, <<>>} = request(post, uri(["connectors_test"]),
|
{ok, 200, <<>>} = request(post, uri(["connectors_test"]),
|
||||||
?MQTT_CONNECOTR2(<<"127.0.0.1:1883">>)#{<<"bridge_type">> => <<"mqtt">>}),
|
?MQTT_CONNECOTR2(<<"127.0.0.1:1883">>)#{
|
||||||
|
<<"type">> => ?CONNECTR_TYPE,
|
||||||
|
<<"name">> => ?BRIDGE_NAME_EGRESS
|
||||||
|
}),
|
||||||
{ok, 400, _} = request(post, uri(["connectors_test"]),
|
{ok, 400, _} = request(post, uri(["connectors_test"]),
|
||||||
?MQTT_CONNECOTR2(<<"127.0.0.1:2883">>)#{<<"bridge_type">> => <<"mqtt">>}).
|
?MQTT_CONNECOTR2(<<"127.0.0.1:2883">>)#{
|
||||||
|
<<"type">> => ?CONNECTR_TYPE,
|
||||||
|
<<"name">> => ?BRIDGE_NAME_EGRESS
|
||||||
|
}).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% HTTP Request
|
%% HTTP Request
|
||||||
|
|
|
@ -182,12 +182,12 @@ check_parameter([{Name, Type} | Spec], Bindings, QueryStr, Module, BindingsAcc,
|
||||||
Schema = ?INIT_SCHEMA#{roots => [{Name, Type}]},
|
Schema = ?INIT_SCHEMA#{roots => [{Name, Type}]},
|
||||||
case hocon_schema:field_schema(Type, in) of
|
case hocon_schema:field_schema(Type, in) of
|
||||||
path ->
|
path ->
|
||||||
Option = #{atom_key => true, override_env => false},
|
Option = #{atom_key => true},
|
||||||
NewBindings = hocon_schema:check_plain(Schema, Bindings, Option),
|
NewBindings = hocon_schema:check_plain(Schema, Bindings, Option),
|
||||||
NewBindingsAcc = maps:merge(BindingsAcc, NewBindings),
|
NewBindingsAcc = maps:merge(BindingsAcc, NewBindings),
|
||||||
check_parameter(Spec, Bindings, QueryStr, Module, NewBindingsAcc, QueryStrAcc);
|
check_parameter(Spec, Bindings, QueryStr, Module, NewBindingsAcc, QueryStrAcc);
|
||||||
query ->
|
query ->
|
||||||
Option = #{override_env => false},
|
Option = #{},
|
||||||
NewQueryStr = hocon_schema:check_plain(Schema, QueryStr, Option),
|
NewQueryStr = hocon_schema:check_plain(Schema, QueryStr, Option),
|
||||||
NewQueryStrAcc = maps:merge(QueryStrAcc, NewQueryStr),
|
NewQueryStrAcc = maps:merge(QueryStrAcc, NewQueryStr),
|
||||||
check_parameter(Spec, Bindings, QueryStr, Module,BindingsAcc, NewQueryStrAcc)
|
check_parameter(Spec, Bindings, QueryStr, Module,BindingsAcc, NewQueryStrAcc)
|
||||||
|
@ -201,7 +201,7 @@ check_request_body(#{body := Body}, Schema, Module, CheckFun, true) ->
|
||||||
_ -> Type0
|
_ -> Type0
|
||||||
end,
|
end,
|
||||||
NewSchema = ?INIT_SCHEMA#{roots => [{root, Type}]},
|
NewSchema = ?INIT_SCHEMA#{roots => [{root, Type}]},
|
||||||
Option = #{override_env => false, nullable => true},
|
Option = #{nullable => true},
|
||||||
#{<<"root">> := NewBody} = CheckFun(NewSchema, #{<<"root">> => Body}, Option),
|
#{<<"root">> := NewBody} = CheckFun(NewSchema, #{<<"root">> => Body}, Option),
|
||||||
NewBody;
|
NewBody;
|
||||||
%% TODO not support nest object check yet, please use ref!
|
%% TODO not support nest object check yet, please use ref!
|
||||||
|
@ -214,7 +214,7 @@ check_request_body(#{body := Body}, Schema, Module, CheckFun, true) ->
|
||||||
check_request_body(#{body := Body}, Spec, _Module, CheckFun, false) ->
|
check_request_body(#{body := Body}, Spec, _Module, CheckFun, false) ->
|
||||||
lists:foldl(fun({Name, Type}, Acc) ->
|
lists:foldl(fun({Name, Type}, Acc) ->
|
||||||
Schema = ?INIT_SCHEMA#{roots => [{Name, Type}]},
|
Schema = ?INIT_SCHEMA#{roots => [{Name, Type}]},
|
||||||
maps:merge(Acc, CheckFun(Schema, Body, #{override_env => false}))
|
maps:merge(Acc, CheckFun(Schema, Body, #{}))
|
||||||
end, #{}, Spec).
|
end, #{}, Spec).
|
||||||
|
|
||||||
%% tags, description, summary, security, deprecated
|
%% tags, description, summary, security, deprecated
|
||||||
|
@ -337,19 +337,28 @@ components(Refs) ->
|
||||||
components([], SpecAcc, []) -> SpecAcc;
|
components([], SpecAcc, []) -> SpecAcc;
|
||||||
components([], SpecAcc, SubRefAcc) -> components(SubRefAcc, SpecAcc, []);
|
components([], SpecAcc, SubRefAcc) -> components(SubRefAcc, SpecAcc, []);
|
||||||
components([{Module, Field} | Refs], SpecAcc, SubRefsAcc) ->
|
components([{Module, Field} | Refs], SpecAcc, SubRefsAcc) ->
|
||||||
Props = apply(Module, fields, [Field]),
|
Props = hocon_schema_fields(Module, Field),
|
||||||
Namespace = namespace(Module),
|
Namespace = namespace(Module),
|
||||||
{Object, SubRefs} = parse_object(Props, Module),
|
{Object, SubRefs} = parse_object(Props, Module),
|
||||||
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Object},
|
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Object},
|
||||||
components(Refs, NewSpecAcc, SubRefs ++ SubRefsAcc);
|
components(Refs, NewSpecAcc, SubRefs ++ SubRefsAcc);
|
||||||
%% parameters in ref only have one value, not array
|
%% parameters in ref only have one value, not array
|
||||||
components([{Module, Field, parameter} | Refs], SpecAcc, SubRefsAcc) ->
|
components([{Module, Field, parameter} | Refs], SpecAcc, SubRefsAcc) ->
|
||||||
Props = apply(Module, fields, [Field]),
|
Props = hocon_schema_fields(Module, Field),
|
||||||
{[Param], SubRefs} = parameters(Props, Module),
|
{[Param], SubRefs} = parameters(Props, Module),
|
||||||
Namespace = namespace(Module),
|
Namespace = namespace(Module),
|
||||||
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Param},
|
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Param},
|
||||||
components(Refs, NewSpecAcc, SubRefs ++ SubRefsAcc).
|
components(Refs, NewSpecAcc, SubRefs ++ SubRefsAcc).
|
||||||
|
|
||||||
|
hocon_schema_fields(Module, StructName) ->
|
||||||
|
case apply(Module, fields, [StructName]) of
|
||||||
|
#{fields := Fields, desc := _} ->
|
||||||
|
%% evil here, as it's match hocon_schema's internal representation
|
||||||
|
Fields; %% TODO: make use of desc ?
|
||||||
|
Other ->
|
||||||
|
Other
|
||||||
|
end.
|
||||||
|
|
||||||
%% Semantic error at components.schemas.xxx:xx:xx
|
%% Semantic error at components.schemas.xxx:xx:xx
|
||||||
%% Component names can only contain the characters A-Z a-z 0-9 - . _
|
%% Component names can only contain the characters A-Z a-z 0-9 - . _
|
||||||
%% So replace ':' by '-'.
|
%% So replace ':' by '-'.
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
-export([paths/0, api_spec/0, schema/1, fields/1]).
|
-export([paths/0, api_spec/0, schema/1, fields/1]).
|
||||||
-export([t_object/1, t_nest_object/1, t_api_spec/1,
|
-export([t_object/1, t_nest_object/1, t_api_spec/1,
|
||||||
t_local_ref/1, t_remote_ref/1, t_bad_ref/1, t_none_ref/1, t_nest_ref/1,
|
t_local_ref/1, t_remote_ref/1, t_bad_ref/1, t_none_ref/1, t_nest_ref/1,
|
||||||
t_ref_array_with_key/1, t_ref_array_without_key/1
|
t_ref_array_with_key/1, t_ref_array_without_key/1, t_sub_fields/1
|
||||||
]).
|
]).
|
||||||
-export([
|
-export([
|
||||||
t_object_trans/1, t_object_notrans/1, t_nest_object_trans/1, t_local_ref_trans/1,
|
t_object_trans/1, t_object_notrans/1, t_nest_object_trans/1, t_local_ref_trans/1,
|
||||||
|
@ -154,6 +154,17 @@ t_none_ref(_Config) ->
|
||||||
emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path)),
|
emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path)),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_sub_fields(_Config) ->
|
||||||
|
Spec = #{
|
||||||
|
post => #{parameters => [],
|
||||||
|
requestBody => #{<<"content">> => #{<<"application/json">> =>
|
||||||
|
#{<<"schema">> => #{<<"$ref">> =>
|
||||||
|
<<"#/components/schemas/emqx_swagger_requestBody_SUITE.sub_fields">>}}}},
|
||||||
|
responses => #{<<"200">> => #{description => <<"ok">>}}}},
|
||||||
|
Refs = [{?MODULE, sub_fields}],
|
||||||
|
validate("/fields/sub", Spec, Refs),
|
||||||
|
ok.
|
||||||
|
|
||||||
t_bad_ref(_Config) ->
|
t_bad_ref(_Config) ->
|
||||||
Path = "/ref/bad",
|
Path = "/ref/bad",
|
||||||
Spec = #{
|
Spec = #{
|
||||||
|
@ -483,7 +494,7 @@ trans_requestBody(Path, Body, Filter) ->
|
||||||
|
|
||||||
api_spec() -> emqx_dashboard_swagger:spec(?MODULE).
|
api_spec() -> emqx_dashboard_swagger:spec(?MODULE).
|
||||||
paths() ->
|
paths() ->
|
||||||
["/object", "/nest/object", "/ref/local", "/ref/nest/ref",
|
["/object", "/nest/object", "/ref/local", "/ref/nest/ref", "/fields/sub",
|
||||||
"/ref/array/with/key", "/ref/array/without/key"].
|
"/ref/array/with/key", "/ref/array/without/key"].
|
||||||
|
|
||||||
schema("/object") ->
|
schema("/object") ->
|
||||||
|
@ -506,6 +517,8 @@ schema("/nest/object") ->
|
||||||
]);
|
]);
|
||||||
schema("/ref/local") ->
|
schema("/ref/local") ->
|
||||||
to_schema(mk(hoconsc:ref(good_ref), #{}));
|
to_schema(mk(hoconsc:ref(good_ref), #{}));
|
||||||
|
schema("/fields/sub") ->
|
||||||
|
to_schema(mk(hoconsc:ref(sub_fields), #{}));
|
||||||
schema("/ref/remote") ->
|
schema("/ref/remote") ->
|
||||||
to_schema(mk(hoconsc:ref(emqx_swagger_remote_schema, "ref2"), #{}));
|
to_schema(mk(hoconsc:ref(emqx_swagger_remote_schema, "ref2"), #{}));
|
||||||
schema("/ref/bad") ->
|
schema("/ref/bad") ->
|
||||||
|
@ -544,4 +557,20 @@ fields(bad_ref) -> %% don't support maps
|
||||||
#{
|
#{
|
||||||
username => mk(string(), #{}),
|
username => mk(string(), #{}),
|
||||||
is_admin => mk(boolean(), #{})
|
is_admin => mk(boolean(), #{})
|
||||||
}.
|
};
|
||||||
|
fields(sub_fields) ->
|
||||||
|
#{fields => [
|
||||||
|
{enable, fun enable/1},
|
||||||
|
{init_file, fun init_file/1}
|
||||||
|
],
|
||||||
|
desc => <<"test sub fields">>}.
|
||||||
|
|
||||||
|
enable(type) -> boolean();
|
||||||
|
enable(desc) -> <<"Whether to enable tls psk support">>;
|
||||||
|
enable(default) -> false;
|
||||||
|
enable(_) -> undefined.
|
||||||
|
|
||||||
|
init_file(type) -> binary();
|
||||||
|
init_file(desc) -> <<"test test desc">>;
|
||||||
|
init_file(nullable) -> true;
|
||||||
|
init_file(_) -> undefined.
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
-export([paths/0, api_spec/0, schema/1, fields/1]).
|
-export([paths/0, api_spec/0, schema/1, fields/1]).
|
||||||
-export([t_simple_binary/1, t_object/1, t_nest_object/1, t_empty/1, t_error/1,
|
-export([t_simple_binary/1, t_object/1, t_nest_object/1, t_empty/1, t_error/1,
|
||||||
t_raw_local_ref/1, t_raw_remote_ref/1, t_hocon_schema_function/1, t_complicated_type/1,
|
t_raw_local_ref/1, t_raw_remote_ref/1, t_hocon_schema_function/1, t_complicated_type/1,
|
||||||
t_local_ref/1, t_remote_ref/1, t_bad_ref/1, t_none_ref/1, t_nest_ref/1,
|
t_local_ref/1, t_remote_ref/1, t_bad_ref/1, t_none_ref/1, t_nest_ref/1, t_sub_fields/1,
|
||||||
t_ref_array_with_key/1, t_ref_array_without_key/1, t_api_spec/1]).
|
t_ref_array_with_key/1, t_ref_array_without_key/1, t_api_spec/1]).
|
||||||
|
|
||||||
all() -> [{group, spec}].
|
all() -> [{group, spec}].
|
||||||
|
@ -23,7 +23,7 @@ groups() -> [
|
||||||
{spec, [parallel], [
|
{spec, [parallel], [
|
||||||
t_api_spec, t_simple_binary, t_object, t_nest_object, t_error, t_complicated_type,
|
t_api_spec, t_simple_binary, t_object, t_nest_object, t_error, t_complicated_type,
|
||||||
t_raw_local_ref, t_raw_remote_ref, t_empty, t_hocon_schema_function,
|
t_raw_local_ref, t_raw_remote_ref, t_empty, t_hocon_schema_function,
|
||||||
t_local_ref, t_remote_ref, t_bad_ref, t_none_ref,
|
t_local_ref, t_remote_ref, t_bad_ref, t_none_ref, t_sub_fields,
|
||||||
t_ref_array_with_key, t_ref_array_without_key, t_nest_ref]}
|
t_ref_array_with_key, t_ref_array_without_key, t_nest_ref]}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
@ -163,6 +163,14 @@ t_nest_ref(_Config) ->
|
||||||
validate(Path, Object, ExpectRefs),
|
validate(Path, Object, ExpectRefs),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_sub_fields(_Config) ->
|
||||||
|
Path = "/fields/sub",
|
||||||
|
Object = #{<<"content">> => #{<<"application/json">> => #{<<"schema">> => #{
|
||||||
|
<<"$ref">> => <<"#/components/schemas/emqx_swagger_response_SUITE.sub_fields">>}}}},
|
||||||
|
ExpectRefs = [{?MODULE, sub_fields}],
|
||||||
|
validate(Path, Object, ExpectRefs),
|
||||||
|
ok.
|
||||||
|
|
||||||
t_complicated_type(_Config) ->
|
t_complicated_type(_Config) ->
|
||||||
Path = "/ref/complicated_type",
|
Path = "/ref/complicated_type",
|
||||||
Object = #{<<"content">> => #{<<"application/json">> =>
|
Object = #{<<"content">> => #{<<"application/json">> =>
|
||||||
|
@ -366,7 +374,9 @@ schema("/ref/complicated_type") ->
|
||||||
{fix_integer, hoconsc:mk(typerefl:integer(100), #{})}
|
{fix_integer, hoconsc:mk(typerefl:integer(100), #{})}
|
||||||
]
|
]
|
||||||
}}
|
}}
|
||||||
}.
|
};
|
||||||
|
schema("/fields/sub") ->
|
||||||
|
to_schema(hoconsc:ref(sub_fields)).
|
||||||
|
|
||||||
validate(Path, ExpectObject, ExpectRefs) ->
|
validate(Path, ExpectObject, ExpectRefs) ->
|
||||||
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
|
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
|
||||||
|
@ -400,4 +410,20 @@ fields(bad_ref) -> %% don't support maps
|
||||||
#{
|
#{
|
||||||
username => mk(string(), #{}),
|
username => mk(string(), #{}),
|
||||||
is_admin => mk(boolean(), #{})
|
is_admin => mk(boolean(), #{})
|
||||||
}.
|
};
|
||||||
|
fields(sub_fields) ->
|
||||||
|
#{fields => [
|
||||||
|
{enable, fun enable/1},
|
||||||
|
{init_file, fun init_file/1}
|
||||||
|
],
|
||||||
|
desc => <<"test sub fields">>}.
|
||||||
|
|
||||||
|
enable(type) -> boolean();
|
||||||
|
enable(desc) -> <<"Whether to enable tls psk support">>;
|
||||||
|
enable(default) -> false;
|
||||||
|
enable(_) -> undefined.
|
||||||
|
|
||||||
|
init_file(type) -> binary();
|
||||||
|
init_file(desc) -> <<"test test desc">>;
|
||||||
|
init_file(nullable) -> true;
|
||||||
|
init_file(_) -> undefined.
|
||||||
|
|
|
@ -18,9 +18,6 @@
|
||||||
|
|
||||||
-behaviour(emqx_gateway_channel).
|
-behaviour(emqx_gateway_channel).
|
||||||
|
|
||||||
-include_lib("emqx/include/logger.hrl").
|
|
||||||
-include("emqx_coap.hrl").
|
|
||||||
|
|
||||||
%% API
|
%% API
|
||||||
-export([ info/1
|
-export([ info/1
|
||||||
, info/2
|
, info/2
|
||||||
|
@ -44,6 +41,12 @@
|
||||||
|
|
||||||
-export_type([channel/0]).
|
-export_type([channel/0]).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
-include_lib("emqx_gateway/src/coap/include/emqx_coap.hrl").
|
||||||
|
-include_lib("emqx/include/emqx_authentication.hrl").
|
||||||
|
|
||||||
|
-define(AUTHN, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM).
|
||||||
|
|
||||||
-record(channel, {
|
-record(channel, {
|
||||||
%% Context
|
%% Context
|
||||||
ctx :: emqx_gateway_ctx:context(),
|
ctx :: emqx_gateway_ctx:context(),
|
||||||
|
@ -98,10 +101,10 @@ info(ctx, #channel{ctx = Ctx}) ->
|
||||||
stats(_) ->
|
stats(_) ->
|
||||||
[].
|
[].
|
||||||
|
|
||||||
init(ConnInfo = #{peername := {PeerHost, _},
|
init(ConnInfoT = #{peername := {PeerHost, _},
|
||||||
sockname := {_, SockPort}},
|
sockname := {_, SockPort}},
|
||||||
#{ctx := Ctx} = Config) ->
|
#{ctx := Ctx} = Config) ->
|
||||||
Peercert = maps:get(peercert, ConnInfo, undefined),
|
Peercert = maps:get(peercert, ConnInfoT, undefined),
|
||||||
Mountpoint = maps:get(mountpoint, Config, <<>>),
|
Mountpoint = maps:get(mountpoint, Config, <<>>),
|
||||||
ListenerId = case maps:get(listener, Config, undefined) of
|
ListenerId = case maps:get(listener, Config, undefined) of
|
||||||
undefined -> undefined;
|
undefined -> undefined;
|
||||||
|
@ -123,6 +126,10 @@ init(ConnInfo = #{peername := {PeerHost, _},
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
|
||||||
|
%% because it is possible to disconnect after init, and then trigger the $event.disconnected hook
|
||||||
|
%% and these two fields are required in the hook
|
||||||
|
ConnInfo = ConnInfoT#{proto_name => <<"CoAP">>, proto_ver => <<"1">>},
|
||||||
|
|
||||||
Heartbeat = ?GET_IDLE_TIME(Config),
|
Heartbeat = ?GET_IDLE_TIME(Config),
|
||||||
#channel{ ctx = Ctx
|
#channel{ ctx = Ctx
|
||||||
, conninfo = ConnInfo
|
, conninfo = ConnInfo
|
||||||
|
@ -279,7 +286,7 @@ try_takeover(idle, DesireId, Msg, Channel) ->
|
||||||
%% udp connection baseon the clientid
|
%% udp connection baseon the clientid
|
||||||
call_session(handle_request, Msg, Channel);
|
call_session(handle_request, Msg, Channel);
|
||||||
_ ->
|
_ ->
|
||||||
case emqx_conf:get([gateway, coap, authentication], undefined) of
|
case emqx_conf:get([gateway, coap, ?AUTHN], undefined) of
|
||||||
undefined ->
|
undefined ->
|
||||||
call_session(handle_request, Msg, Channel);
|
call_session(handle_request, Msg, Channel);
|
||||||
_ ->
|
_ ->
|
||||||
|
@ -349,8 +356,6 @@ ensure_connected(Channel = #channel{ctx = Ctx,
|
||||||
conninfo = ConnInfo,
|
conninfo = ConnInfo,
|
||||||
clientinfo = ClientInfo}) ->
|
clientinfo = ClientInfo}) ->
|
||||||
NConnInfo = ConnInfo#{ connected_at => erlang:system_time(millisecond)
|
NConnInfo = ConnInfo#{ connected_at => erlang:system_time(millisecond)
|
||||||
, proto_name => <<"COAP">>
|
|
||||||
, proto_ver => <<"1">>
|
|
||||||
},
|
},
|
||||||
ok = run_hooks(Ctx, 'client.connected', [ClientInfo, NConnInfo]),
|
ok = run_hooks(Ctx, 'client.connected', [ClientInfo, NConnInfo]),
|
||||||
_ = run_hooks(Ctx, 'client.connack', [NConnInfo, connection_accepted, []]),
|
_ = run_hooks(Ctx, 'client.connack', [NConnInfo, connection_accepted, []]),
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
-module(emqx_gateway_api).
|
-module(emqx_gateway_api).
|
||||||
|
|
||||||
-include_lib("emqx/include/emqx_placeholder.hrl").
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
|
-include_lib("emqx/include/emqx_authentication.hrl").
|
||||||
|
|
||||||
-behaviour(minirest_api).
|
-behaviour(minirest_api).
|
||||||
|
|
||||||
|
@ -243,7 +244,7 @@ schema_gateway_overview_list() ->
|
||||||
%%
|
%%
|
||||||
%% NOTE: It is a temporary measure to generate swagger-schema
|
%% NOTE: It is a temporary measure to generate swagger-schema
|
||||||
-define(COAP_GATEWAY_CONFS,
|
-define(COAP_GATEWAY_CONFS,
|
||||||
#{<<"authentication">> =>
|
#{?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY =>
|
||||||
#{<<"mechanism">> => <<"password-based">>,
|
#{<<"mechanism">> => <<"password-based">>,
|
||||||
<<"name">> => <<"authenticator1">>,
|
<<"name">> => <<"authenticator1">>,
|
||||||
<<"server_type">> => <<"built-in-database">>,
|
<<"server_type">> => <<"built-in-database">>,
|
||||||
|
@ -331,7 +332,7 @@ schema_gateway_overview_list() ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-define(STOMP_GATEWAY_CONFS,
|
-define(STOMP_GATEWAY_CONFS,
|
||||||
#{<<"authentication">> =>
|
#{?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY =>
|
||||||
#{<<"mechanism">> => <<"password-based">>,
|
#{<<"mechanism">> => <<"password-based">>,
|
||||||
<<"name">> => <<"authenticator1">>,
|
<<"name">> => <<"authenticator1">>,
|
||||||
<<"server_type">> => <<"built-in-database">>,
|
<<"server_type">> => <<"built-in-database">>,
|
||||||
|
|
|
@ -17,8 +17,6 @@
|
||||||
%% @doc The gateway configuration management module
|
%% @doc The gateway configuration management module
|
||||||
-module(emqx_gateway_conf).
|
-module(emqx_gateway_conf).
|
||||||
|
|
||||||
-include_lib("emqx/include/logger.hrl").
|
|
||||||
|
|
||||||
%% Load/Unload
|
%% Load/Unload
|
||||||
-export([ load/0
|
-export([ load/0
|
||||||
, unload/0
|
, unload/0
|
||||||
|
@ -56,6 +54,10 @@
|
||||||
, post_config_update/5
|
, post_config_update/5
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
-include_lib("emqx/include/emqx_authentication.hrl").
|
||||||
|
-define(AUTHN_BIN, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY).
|
||||||
|
|
||||||
-type atom_or_bin() :: atom() | binary().
|
-type atom_or_bin() :: atom() | binary().
|
||||||
-type ok_or_err() :: ok_or_err().
|
-type ok_or_err() :: ok_or_err().
|
||||||
-type listener_ref() :: {ListenerType :: atom_or_bin(),
|
-type listener_ref() :: {ListenerType :: atom_or_bin(),
|
||||||
|
@ -106,8 +108,9 @@ maps_key_take([K | Ks], M, Acc) ->
|
||||||
|
|
||||||
-spec update_gateway(atom_or_bin(), map()) -> ok_or_err().
|
-spec update_gateway(atom_or_bin(), map()) -> ok_or_err().
|
||||||
update_gateway(GwName, Conf0) ->
|
update_gateway(GwName, Conf0) ->
|
||||||
Conf = maps:without([listeners, authentication,
|
Exclude0 = [listeners, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM],
|
||||||
<<"listeners">>, <<"authentication">>], Conf0),
|
Exclude1 = [atom_to_binary(K, utf8) || K <- Exclude0],
|
||||||
|
Conf = maps:without(Exclude0 ++ Exclude1, Conf0),
|
||||||
update({?FUNCTION_NAME, bin(GwName), Conf}).
|
update({?FUNCTION_NAME, bin(GwName), Conf}).
|
||||||
|
|
||||||
%% FIXME: delete cert files ??
|
%% FIXME: delete cert files ??
|
||||||
|
@ -232,7 +235,7 @@ update(Req) ->
|
||||||
res(emqx_conf:update([gateway], Req, #{override_to => cluster})).
|
res(emqx_conf:update([gateway], Req, #{override_to => cluster})).
|
||||||
|
|
||||||
res({ok, _Result}) -> ok;
|
res({ok, _Result}) -> ok;
|
||||||
res({error, {error, {pre_config_update,emqx_gateway_conf,Reason}}}) -> {error, Reason};
|
res({error, {pre_config_update, emqx_gateway_conf, Reason}}) -> {error, Reason};
|
||||||
res({error, Reason}) -> {error, Reason}.
|
res({error, Reason}) -> {error, Reason}.
|
||||||
|
|
||||||
bin({LType, LName}) ->
|
bin({LType, LName}) ->
|
||||||
|
@ -263,8 +266,7 @@ pre_config_update(_, {update_gateway, GwName, Conf}, RawConf) ->
|
||||||
undefined ->
|
undefined ->
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
_ ->
|
_ ->
|
||||||
NConf = maps:without([<<"listeners">>,
|
NConf = maps:without([<<"listeners">>, ?AUTHN_BIN], Conf),
|
||||||
<<"authentication">>], Conf),
|
|
||||||
{ok, emqx_map_lib:deep_merge(RawConf, #{GwName => NConf})}
|
{ok, emqx_map_lib:deep_merge(RawConf, #{GwName => NConf})}
|
||||||
end;
|
end;
|
||||||
pre_config_update(_, {unload_gateway, GwName}, RawConf) ->
|
pre_config_update(_, {unload_gateway, GwName}, RawConf) ->
|
||||||
|
@ -311,11 +313,11 @@ pre_config_update(_, {remove_listener, GwName, {LType, LName}}, RawConf) ->
|
||||||
|
|
||||||
pre_config_update(_, {add_authn, GwName, Conf}, RawConf) ->
|
pre_config_update(_, {add_authn, GwName, Conf}, RawConf) ->
|
||||||
case emqx_map_lib:deep_get(
|
case emqx_map_lib:deep_get(
|
||||||
[GwName, <<"authentication">>], RawConf, undefined) of
|
[GwName, ?AUTHN_BIN], RawConf, undefined) of
|
||||||
undefined ->
|
undefined ->
|
||||||
{ok, emqx_map_lib:deep_merge(
|
{ok, emqx_map_lib:deep_merge(
|
||||||
RawConf,
|
RawConf,
|
||||||
#{GwName => #{<<"authentication">> => Conf}})};
|
#{GwName => #{?AUTHN_BIN => Conf}})};
|
||||||
_ ->
|
_ ->
|
||||||
{error, already_exist}
|
{error, already_exist}
|
||||||
end;
|
end;
|
||||||
|
@ -326,9 +328,9 @@ pre_config_update(_, {add_authn, GwName, {LType, LName}, Conf}, RawConf) ->
|
||||||
undefined ->
|
undefined ->
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
Listener ->
|
Listener ->
|
||||||
case maps:get(<<"authentication">>, Listener, undefined) of
|
case maps:get(?AUTHN_BIN, Listener, undefined) of
|
||||||
undefined ->
|
undefined ->
|
||||||
NListener = maps:put(<<"authentication">>, Conf, Listener),
|
NListener = maps:put(?AUTHN_BIN, Conf, Listener),
|
||||||
NGateway = #{GwName =>
|
NGateway = #{GwName =>
|
||||||
#{<<"listeners">> =>
|
#{<<"listeners">> =>
|
||||||
#{LType => #{LName => NListener}}}},
|
#{LType => #{LName => NListener}}}},
|
||||||
|
@ -339,13 +341,13 @@ pre_config_update(_, {add_authn, GwName, {LType, LName}, Conf}, RawConf) ->
|
||||||
end;
|
end;
|
||||||
pre_config_update(_, {update_authn, GwName, Conf}, RawConf) ->
|
pre_config_update(_, {update_authn, GwName, Conf}, RawConf) ->
|
||||||
case emqx_map_lib:deep_get(
|
case emqx_map_lib:deep_get(
|
||||||
[GwName, <<"authentication">>], RawConf, undefined) of
|
[GwName, ?AUTHN_BIN], RawConf, undefined) of
|
||||||
undefined ->
|
undefined ->
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
_ ->
|
_ ->
|
||||||
{ok, emqx_map_lib:deep_merge(
|
{ok, emqx_map_lib:deep_merge(
|
||||||
RawConf,
|
RawConf,
|
||||||
#{GwName => #{<<"authentication">> => Conf}})}
|
#{GwName => #{?AUTHN_BIN => Conf}})}
|
||||||
end;
|
end;
|
||||||
pre_config_update(_, {update_authn, GwName, {LType, LName}, Conf}, RawConf) ->
|
pre_config_update(_, {update_authn, GwName, {LType, LName}, Conf}, RawConf) ->
|
||||||
case emqx_map_lib:deep_get(
|
case emqx_map_lib:deep_get(
|
||||||
|
@ -354,12 +356,12 @@ pre_config_update(_, {update_authn, GwName, {LType, LName}, Conf}, RawConf) ->
|
||||||
undefined ->
|
undefined ->
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
Listener ->
|
Listener ->
|
||||||
case maps:get(<<"authentication">>, Listener, undefined) of
|
case maps:get(?AUTHN_BIN, Listener, undefined) of
|
||||||
undefined ->
|
undefined ->
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
Auth ->
|
Auth ->
|
||||||
NListener = maps:put(
|
NListener = maps:put(
|
||||||
<<"authentication">>,
|
?AUTHN_BIN,
|
||||||
emqx_map_lib:deep_merge(Auth, Conf),
|
emqx_map_lib:deep_merge(Auth, Conf),
|
||||||
Listener
|
Listener
|
||||||
),
|
),
|
||||||
|
@ -371,9 +373,9 @@ pre_config_update(_, {update_authn, GwName, {LType, LName}, Conf}, RawConf) ->
|
||||||
end;
|
end;
|
||||||
pre_config_update(_, {remove_authn, GwName}, RawConf) ->
|
pre_config_update(_, {remove_authn, GwName}, RawConf) ->
|
||||||
{ok, emqx_map_lib:deep_remove(
|
{ok, emqx_map_lib:deep_remove(
|
||||||
[GwName, <<"authentication">>], RawConf)};
|
[GwName, ?AUTHN_BIN], RawConf)};
|
||||||
pre_config_update(_, {remove_authn, GwName, {LType, LName}}, RawConf) ->
|
pre_config_update(_, {remove_authn, GwName, {LType, LName}}, RawConf) ->
|
||||||
Path = [GwName, <<"listeners">>, LType, LName, <<"authentication">>],
|
Path = [GwName, <<"listeners">>, LType, LName, ?AUTHN_BIN],
|
||||||
{ok, emqx_map_lib:deep_remove(Path, RawConf)};
|
{ok, emqx_map_lib:deep_remove(Path, RawConf)};
|
||||||
|
|
||||||
pre_config_update(_, UnknownReq, _RawConf) ->
|
pre_config_update(_, UnknownReq, _RawConf) ->
|
||||||
|
|
|
@ -19,6 +19,9 @@
|
||||||
|
|
||||||
-include("include/emqx_gateway.hrl").
|
-include("include/emqx_gateway.hrl").
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
-include_lib("emqx/include/emqx_authentication.hrl").
|
||||||
|
|
||||||
|
-define(AUTHN, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM).
|
||||||
|
|
||||||
%% Mgmt APIs - gateway
|
%% Mgmt APIs - gateway
|
||||||
-export([ gateways/1
|
-export([ gateways/1
|
||||||
|
@ -166,7 +169,7 @@ remove_listener(ListenerId) ->
|
||||||
-spec authn(gateway_name()) -> map().
|
-spec authn(gateway_name()) -> map().
|
||||||
authn(GwName) ->
|
authn(GwName) ->
|
||||||
%% XXX: Need append chain-nanme, authenticator-id?
|
%% XXX: Need append chain-nanme, authenticator-id?
|
||||||
Path = [gateway, GwName, authentication],
|
Path = [gateway, GwName, ?AUTHN],
|
||||||
ChainName = emqx_gateway_utils:global_chain(GwName),
|
ChainName = emqx_gateway_utils:global_chain(GwName),
|
||||||
wrap_chain_name(
|
wrap_chain_name(
|
||||||
ChainName,
|
ChainName,
|
||||||
|
@ -176,7 +179,7 @@ authn(GwName) ->
|
||||||
-spec authn(gateway_name(), binary()) -> map().
|
-spec authn(gateway_name(), binary()) -> map().
|
||||||
authn(GwName, ListenerId) ->
|
authn(GwName, ListenerId) ->
|
||||||
{_, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId),
|
{_, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId),
|
||||||
Path = [gateway, GwName, listeners, Type, Name, authentication],
|
Path = [gateway, GwName, listeners, Type, Name, ?AUTHN],
|
||||||
ChainName = emqx_gateway_utils:listener_chain(GwName, Type, Name),
|
ChainName = emqx_gateway_utils:listener_chain(GwName, Type, Name),
|
||||||
wrap_chain_name(
|
wrap_chain_name(
|
||||||
ChainName,
|
ChainName,
|
||||||
|
|
|
@ -24,6 +24,7 @@
|
||||||
-dialyzer(no_unused).
|
-dialyzer(no_unused).
|
||||||
-dialyzer(no_fail_call).
|
-dialyzer(no_fail_call).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/emqx_authentication.hrl").
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
|
||||||
-type ip_port() :: tuple().
|
-type ip_port() :: tuple().
|
||||||
|
@ -144,7 +145,7 @@ The client just sends its PUBLISH messages to a GW"
|
||||||
, desc =>
|
, desc =>
|
||||||
"The Pre-defined topic ids and topic names.<br>
|
"The Pre-defined topic ids and topic names.<br>
|
||||||
A 'pre-defined' topic id is a topic id whose mapping to a topic name
|
A 'pre-defined' topic id is a topic id whose mapping to a topic name
|
||||||
is known in advance by both the client’s application and the gateway"
|
is known in advance by both the client's application and the gateway"
|
||||||
})}
|
})}
|
||||||
, {listeners, sc(ref(udp_listeners))}
|
, {listeners, sc(ref(udp_listeners))}
|
||||||
] ++ gateway_common_options();
|
] ++ gateway_common_options();
|
||||||
|
@ -407,30 +408,14 @@ fields(dtls_opts) ->
|
||||||
, ciphers => dtls_all_available
|
, ciphers => dtls_all_available
|
||||||
}, false).
|
}, false).
|
||||||
|
|
||||||
authentication() ->
|
authentication_schema() ->
|
||||||
sc(hoconsc:union(
|
sc(emqx_authn_schema:authenticator_type(),
|
||||||
[ hoconsc:ref(emqx_authn_mnesia, config)
|
#{ nullable => {true, recursively}
|
||||||
, hoconsc:ref(emqx_authn_mysql, config)
|
, desc =>
|
||||||
, hoconsc:ref(emqx_authn_pgsql, config)
|
|
||||||
, hoconsc:ref(emqx_authn_mongodb, standalone)
|
|
||||||
, hoconsc:ref(emqx_authn_mongodb, 'replica-set')
|
|
||||||
, hoconsc:ref(emqx_authn_mongodb, 'sharded-cluster')
|
|
||||||
, hoconsc:ref(emqx_authn_redis, standalone)
|
|
||||||
, hoconsc:ref(emqx_authn_redis, cluster)
|
|
||||||
, hoconsc:ref(emqx_authn_redis, sentinel)
|
|
||||||
, hoconsc:ref(emqx_authn_http, get)
|
|
||||||
, hoconsc:ref(emqx_authn_http, post)
|
|
||||||
, hoconsc:ref(emqx_authn_jwt, 'hmac-based')
|
|
||||||
, hoconsc:ref(emqx_authn_jwt, 'public-key')
|
|
||||||
, hoconsc:ref(emqx_authn_jwt, 'jwks')
|
|
||||||
, hoconsc:ref(emqx_enhanced_authn_scram_mnesia, config)
|
|
||||||
]),
|
|
||||||
#{ nullable => {true, recursively}
|
|
||||||
, desc =>
|
|
||||||
"""Default authentication configs for all of the gateway listeners.<br>
|
"""Default authentication configs for all of the gateway listeners.<br>
|
||||||
For per-listener overrides see <code>authentication</code>
|
For per-listener overrides see <code>authentication</code>
|
||||||
in listener configs"""
|
in listener configs"""
|
||||||
}).
|
}).
|
||||||
|
|
||||||
gateway_common_options() ->
|
gateway_common_options() ->
|
||||||
[ {enable,
|
[ {enable,
|
||||||
|
@ -464,7 +449,7 @@ it has two purposes:
|
||||||
sc(ref(clientinfo_override),
|
sc(ref(clientinfo_override),
|
||||||
#{ desc => ""
|
#{ desc => ""
|
||||||
})}
|
})}
|
||||||
, {authentication, authentication()}
|
, {?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, authentication_schema()}
|
||||||
].
|
].
|
||||||
|
|
||||||
common_listener_opts() ->
|
common_listener_opts() ->
|
||||||
|
@ -483,7 +468,7 @@ common_listener_opts() ->
|
||||||
sc(integer(),
|
sc(integer(),
|
||||||
#{ default => 1000
|
#{ default => 1000
|
||||||
})}
|
})}
|
||||||
, {authentication, authentication()}
|
, {?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, authentication_schema()}
|
||||||
, {mountpoint,
|
, {mountpoint,
|
||||||
sc(binary(),
|
sc(binary(),
|
||||||
#{ default => undefined
|
#{ default => undefined
|
||||||
|
|
|
@ -93,10 +93,10 @@ info(ctx, #channel{ctx = Ctx}) ->
|
||||||
stats(_) ->
|
stats(_) ->
|
||||||
[].
|
[].
|
||||||
|
|
||||||
init(ConnInfo = #{peername := {PeerHost, _},
|
init(ConnInfoT = #{peername := {PeerHost, _},
|
||||||
sockname := {_, SockPort}},
|
sockname := {_, SockPort}},
|
||||||
#{ctx := Ctx} = Config) ->
|
#{ctx := Ctx} = Config) ->
|
||||||
Peercert = maps:get(peercert, ConnInfo, undefined),
|
Peercert = maps:get(peercert, ConnInfoT, undefined),
|
||||||
Mountpoint = maps:get(mountpoint, Config, undefined),
|
Mountpoint = maps:get(mountpoint, Config, undefined),
|
||||||
ListenerId = case maps:get(listener, Config, undefined) of
|
ListenerId = case maps:get(listener, Config, undefined) of
|
||||||
undefined -> undefined;
|
undefined -> undefined;
|
||||||
|
@ -118,18 +118,20 @@ init(ConnInfo = #{peername := {PeerHost, _},
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
|
||||||
|
ConnInfo = ConnInfoT#{proto_name => <<"LwM2M">>, proto_ver => <<"0.0">>},
|
||||||
|
|
||||||
#channel{ ctx = Ctx
|
#channel{ ctx = Ctx
|
||||||
, conninfo = ConnInfo
|
, conninfo = ConnInfo
|
||||||
, clientinfo = ClientInfo
|
, clientinfo = ClientInfo
|
||||||
, timers = #{}
|
, timers = #{}
|
||||||
, session = emqx_lwm2m_session:new()
|
, session = emqx_lwm2m_session:new()
|
||||||
%% FIXME: don't store anonymouse func
|
%% FIXME: don't store anonymouse func
|
||||||
, with_context = with_context(Ctx, ClientInfo)
|
, with_context = with_context(Ctx, ClientInfo)
|
||||||
}.
|
}.
|
||||||
|
|
||||||
with_context(Ctx, ClientInfo) ->
|
with_context(Ctx, ClientInfo) ->
|
||||||
fun(Type, Topic) ->
|
fun(Type, Topic) ->
|
||||||
with_context(Type, Topic, Ctx, ClientInfo)
|
with_context(Type, Topic, Ctx, ClientInfo)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
lookup_cmd(Channel, Path, Action) ->
|
lookup_cmd(Channel, Path, Action) ->
|
||||||
|
@ -293,7 +295,6 @@ check_lwm2m_version(#coap_message{options = Opts},
|
||||||
end,
|
end,
|
||||||
if IsValid ->
|
if IsValid ->
|
||||||
NConnInfo = ConnInfo#{ connected_at => erlang:system_time(millisecond)
|
NConnInfo = ConnInfo#{ connected_at => erlang:system_time(millisecond)
|
||||||
, proto_name => <<"LwM2M">>
|
|
||||||
, proto_ver => Ver
|
, proto_ver => Ver
|
||||||
},
|
},
|
||||||
{ok, Channel#channel{conninfo = NConnInfo}};
|
{ok, Channel#channel{conninfo = NConnInfo}};
|
||||||
|
|
|
@ -28,6 +28,8 @@
|
||||||
|
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
|
||||||
|
%% this parses to #{}, will not cause config cleanup
|
||||||
|
%% so we will need call emqx_config:erase
|
||||||
-define(CONF_DEFAULT, <<"
|
-define(CONF_DEFAULT, <<"
|
||||||
gateway {}
|
gateway {}
|
||||||
">>).
|
">>).
|
||||||
|
@ -39,6 +41,7 @@ gateway {}
|
||||||
all() -> emqx_common_test_helpers:all(?MODULE).
|
all() -> emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
init_per_suite(Conf) ->
|
init_per_suite(Conf) ->
|
||||||
|
emqx_config:erase(gateway),
|
||||||
emqx_config:init_load(emqx_gateway_schema, ?CONF_DEFAULT),
|
emqx_config:init_load(emqx_gateway_schema, ?CONF_DEFAULT),
|
||||||
emqx_mgmt_api_test_util:init_suite([emqx_conf, emqx_authn, emqx_gateway]),
|
emqx_mgmt_api_test_util:init_suite([emqx_conf, emqx_authn, emqx_gateway]),
|
||||||
Conf.
|
Conf.
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue