Merge branch 'master' into doc/william/github-issue-flow
This commit is contained in:
commit
e97b69b047
|
@ -91,6 +91,12 @@ emqx_test(){
|
|||
;;
|
||||
"rpm")
|
||||
packagename=$(basename "${PACKAGE_PATH}/${EMQX_NAME}"-*.rpm)
|
||||
|
||||
if [[ "${ARCH}" == "amd64" && $(rpm -E '%{rhel}') == 7 ]] ;
|
||||
then
|
||||
# EMQX OTP requires openssl11 to have TLS1.3 support
|
||||
yum install -y openssl11;
|
||||
fi
|
||||
rpm -ivh "${PACKAGE_PATH}/${packagename}"
|
||||
if ! rpm -q emqx | grep -q emqx; then
|
||||
echo "package install error"
|
||||
|
@ -126,7 +132,7 @@ export EMQX_LOG__FILE_HANDLERS__DEFAULT__LEVEL=debug
|
|||
EOF
|
||||
## for ARM, due to CI env issue, skip start of quic listener for the moment
|
||||
[[ $(arch) == *arm* || $(arch) == aarch64 ]] && tee -a "$emqx_env_vars" <<EOF
|
||||
export EMQX_ZONES__DEFAULT__LISTENERS__MQTT_QUIC__ENABLED=false
|
||||
export EMQX_LISTENERS__QUIC__DEFAULT__ENABLED=false
|
||||
EOF
|
||||
else
|
||||
echo "Error: cannot locate emqx_vars"
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
name: API Test Suite
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- e*
|
||||
- v*
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
container: "emqx/build-env:erl23.2.7.2-emqx-2-ubuntu20.04"
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: zip emqx-broker
|
||||
if: endsWith(github.repository, 'emqx')
|
||||
run: |
|
||||
make emqx-zip
|
||||
- name: zip emqx-broker
|
||||
if: endsWith(github.repository, 'enterprise')
|
||||
run: |
|
||||
echo "https://ci%40emqx.io:${{ secrets.CI_GIT_TOKEN }}@github.com" > $HOME/.git-credentials
|
||||
git config --global credential.helper store
|
||||
make emqx-ee-zip
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: emqx-broker
|
||||
path: _packages/**/*.zip
|
||||
api-test:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
script_name:
|
||||
- api_metrics
|
||||
- api_subscriptions
|
||||
- api_clients
|
||||
- api_routes
|
||||
- api_publish
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
repository: emqx/emqx-fvt
|
||||
path: .
|
||||
- uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: '8.0.282' # The JDK version to make available on the path.
|
||||
java-package: jdk # (jre, jdk, or jdk+fx) - defaults to jdk
|
||||
architecture: x64 # (x64 or x86) - defaults to x64
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: emqx-broker
|
||||
path: .
|
||||
- name: start emqx-broker
|
||||
env:
|
||||
EMQX_LISTENERS__WSS__DEFAULT__BIND: "0.0.0.0:8085"
|
||||
run: |
|
||||
unzip ./emqx/*.zip
|
||||
./emqx/bin/emqx start
|
||||
- name: install jmeter
|
||||
timeout-minutes: 10
|
||||
env:
|
||||
JMETER_VERSION: 5.3
|
||||
run: |
|
||||
wget --no-verbose --no-check-certificate -O /tmp/apache-jmeter.tgz https://downloads.apache.org/jmeter/binaries/apache-jmeter-$JMETER_VERSION.tgz
|
||||
cd /tmp && tar -xvf apache-jmeter.tgz
|
||||
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
|
||||
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-2.0.2-jar-with-dependencies.jar
|
||||
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
|
||||
- name: run ${{ matrix.script_name }}
|
||||
run: |
|
||||
/opt/jmeter/bin/jmeter.sh \
|
||||
-Jjmeter.save.saveservice.output_format=xml -n \
|
||||
-t .ci/api-test-suite/${{ matrix.script_name }}.jmx \
|
||||
-Demqx_ip="127.0.0.1" \
|
||||
-l jmeter_logs/${{ matrix.script_name }}.jtl \
|
||||
-j jmeter_logs/logs/${{ matrix.script_name }}.log
|
||||
- name: check test logs
|
||||
run: |
|
||||
if cat jmeter_logs/${{ matrix.script_name }}.jtl | grep -e '<failure>true</failure>' > /dev/null 2>&1; then
|
||||
grep -A 5 -B 3 '<failure>true</failure>' jmeter_logs/${{ matrix.script_name }}.jtl > jmeter_logs/${{ matrix.script_name }}_err_api.txt
|
||||
echo "check logs failed"
|
||||
exit 1
|
||||
fi
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: failure()
|
||||
with:
|
||||
name: jmeter_logs
|
||||
path: ./jmeter_logs
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: failure()
|
||||
with:
|
||||
name: jmeter_logs
|
||||
path: emqx/log
|
||||
delete-package:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: api-test
|
||||
if: always()
|
||||
steps:
|
||||
- uses: geekyeggo/delete-artifact@v1
|
||||
with:
|
||||
name: emqx-broker
|
2
Makefile
2
Makefile
|
@ -5,7 +5,7 @@ BUILD = $(CURDIR)/build
|
|||
SCRIPTS = $(CURDIR)/scripts
|
||||
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
|
||||
export EMQX_DESC ?= EMQ X
|
||||
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.9
|
||||
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.13
|
||||
ifeq ($(OS),Windows_NT)
|
||||
export REBAR_COLOR=none
|
||||
endif
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
[](https://travis-ci.org/emqx/emqx)
|
||||
[](https://coveralls.io/github/emqx/emqx)
|
||||
[](https://hub.docker.com/r/emqx/emqx)
|
||||
[](https://slack-invite.emqx.io)
|
||||
[](https://slack-invite.emqx.io/)
|
||||
[](https://twitter.com/EMQTech)
|
||||
[](https://askemq.com)
|
||||
[](https://www.youtube.com/channel/UCir_r04HIsLjf2qqyZ4A8Cg)
|
||||
|
@ -90,7 +90,7 @@ make eunit ct
|
|||
### 执行部分应用的 common tests
|
||||
|
||||
```bash
|
||||
make apps/emqx_bridge_mqtt-ct
|
||||
make apps/emqx_retainer-ct
|
||||
```
|
||||
|
||||
### 静态分析(Dialyzer)
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
[](https://travis-ci.org/emqx/emqx)
|
||||
[](https://coveralls.io/github/emqx/emqx)
|
||||
[](https://hub.docker.com/r/emqx/emqx)
|
||||
[](https://slack-invite.emqx.io)
|
||||
[](https://slack-invite.emqx.io/)
|
||||
[](https://twitter.com/EMQTech)
|
||||
[](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
|
||||
|
||||
|
@ -84,7 +84,7 @@ make eunit ct
|
|||
### common test の一部を実行する
|
||||
|
||||
```bash
|
||||
make apps/emqx_bridge_mqtt-ct
|
||||
make apps/emqx_retainer-ct
|
||||
```
|
||||
|
||||
### Dialyzer
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
[](https://travis-ci.org/emqx/emqx)
|
||||
[](https://coveralls.io/github/emqx/emqx?branch=master)
|
||||
[](https://hub.docker.com/r/emqx/emqx)
|
||||
[](https://slack-invite.emqx.io)
|
||||
[](https://slack-invite.emqx.io/)
|
||||
[](https://twitter.com/EMQTech)
|
||||
[](https://github.com/emqx/emqx/discussions)
|
||||
[](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
|
||||
|
@ -93,7 +93,7 @@ make eunit ct
|
|||
Пример:
|
||||
|
||||
```bash
|
||||
make apps/emqx_bridge_mqtt-ct
|
||||
make apps/emqx_retainer-ct
|
||||
```
|
||||
|
||||
### Dialyzer
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
[](https://travis-ci.org/emqx/emqx)
|
||||
[](https://coveralls.io/github/emqx/emqx?branch=master)
|
||||
[](https://hub.docker.com/r/emqx/emqx)
|
||||
[](https://slack-invite.emqx.io)
|
||||
[](https://slack-invite.emqx.io/)
|
||||
[](https://twitter.com/EMQTech)
|
||||
[](https://www.youtube.com/channel/UC5FjR77ErAxvZENEWzQaO5Q)
|
||||
|
||||
|
@ -92,7 +92,7 @@ make eunit ct
|
|||
Examples
|
||||
|
||||
```bash
|
||||
make apps/emqx_bridge_mqtt-ct
|
||||
make apps/emqx_retainer-ct
|
||||
```
|
||||
|
||||
### Dialyzer
|
||||
|
|
|
@ -64,7 +64,7 @@ listeners.tcp.default {
|
|||
proxy_protocol = false
|
||||
|
||||
## Sets the timeout for proxy protocol. EMQ X will close the TCP connection
|
||||
## if no proxy protocol packet recevied within the timeout.
|
||||
## if no proxy protocol packet received within the timeout.
|
||||
##
|
||||
## @doc listeners.tcp.<name>.proxy_protocol_timeout
|
||||
## ValueType: Duration
|
||||
|
@ -163,7 +163,7 @@ listeners.ssl.default {
|
|||
proxy_protocol = false
|
||||
|
||||
## Sets the timeout for proxy protocol. EMQ X will close the TCP connection
|
||||
## if no proxy protocol packet recevied within the timeout.
|
||||
## if no proxy protocol packet received within the timeout.
|
||||
##
|
||||
## @doc listeners.ssl.<name>.proxy_protocol_timeout
|
||||
## ValueType: Duration
|
||||
|
@ -345,7 +345,7 @@ listeners.ws.default {
|
|||
proxy_protocol = false
|
||||
|
||||
## Sets the timeout for proxy protocol. EMQ X will close the TCP connection
|
||||
## if no proxy protocol packet recevied within the timeout.
|
||||
## if no proxy protocol packet received within the timeout.
|
||||
##
|
||||
## @doc listeners.ws.<name>.proxy_protocol_timeout
|
||||
## ValueType: Duration
|
||||
|
@ -448,7 +448,7 @@ listeners.wss.default {
|
|||
proxy_protocol = false
|
||||
|
||||
## Sets the timeout for proxy protocol. EMQ X will close the TCP connection
|
||||
## if no proxy protocol packet recevied within the timeout.
|
||||
## if no proxy protocol packet received within the timeout.
|
||||
##
|
||||
## @doc listeners.wss.<name>.proxy_protocol_timeout
|
||||
## ValueType: Duration
|
||||
|
|
|
@ -134,3 +134,19 @@
|
|||
}).
|
||||
|
||||
-endif.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Authentication
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-record(authenticator,
|
||||
{ id :: binary()
|
||||
, provider :: module()
|
||||
, enable :: boolean()
|
||||
, state :: map()
|
||||
}).
|
||||
|
||||
-record(chain,
|
||||
{ name :: atom()
|
||||
, authenticators :: [#authenticator{}]
|
||||
}).
|
|
@ -29,7 +29,7 @@
|
|||
|
||||
-ifndef(EMQX_ENTERPRISE).
|
||||
|
||||
-define(EMQX_RELEASE, {opensource, "5.0-alpha.5"}).
|
||||
-define(EMQX_RELEASE, {opensource, "5.0-alpha.6"}).
|
||||
|
||||
-else.
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.2"}}}
|
||||
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.10.8"}}}
|
||||
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
|
||||
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.14.0"}}}
|
||||
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.17.0"}}}
|
||||
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
|
||||
, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}
|
||||
, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "0.14.1"}}}
|
||||
|
|
|
@ -29,9 +29,9 @@
|
|||
-spec(authenticate(emqx_types:clientinfo()) ->
|
||||
{ok, map()} | {ok, map(), binary()} | {continue, map()} | {continue, binary(), map()} | {error, term()}).
|
||||
authenticate(Credential) ->
|
||||
case run_hooks('client.authenticate', [Credential], {ok, #{superuser => false}}) of
|
||||
case run_hooks('client.authenticate', [Credential], {ok, #{is_superuser => false}}) of
|
||||
ok ->
|
||||
{ok, #{superuser => false}};
|
||||
{ok, #{is_superuser => false}};
|
||||
Other ->
|
||||
Other
|
||||
end.
|
||||
|
|
|
@ -408,6 +408,8 @@ normalize_message(high_cpu_usage, #{usage := Usage}) ->
|
|||
list_to_binary(io_lib:format("~s cpu usage", [Usage]));
|
||||
normalize_message(too_many_processes, #{usage := Usage}) ->
|
||||
list_to_binary(io_lib:format("~s process usage", [Usage]));
|
||||
normalize_message(cluster_rpc_apply_failed, #{tnx_id := TnxId}) ->
|
||||
list_to_binary(io_lib:format("cluster_rpc_apply_failed:~w", [TnxId]));
|
||||
normalize_message(partition, #{occurred := Node}) ->
|
||||
list_to_binary(io_lib:format("Partition occurs at node ~s", [Node]));
|
||||
normalize_message(<<"resource", _/binary>>, #{type := Type, id := ID}) ->
|
||||
|
|
|
@ -0,0 +1,779 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authentication).
|
||||
|
||||
-behaviour(gen_server).
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_config_handler).
|
||||
|
||||
-include("emqx.hrl").
|
||||
-include("logger.hrl").
|
||||
|
||||
-export([ roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ pre_config_update/2
|
||||
, post_config_update/4
|
||||
]).
|
||||
|
||||
-export([ authenticate/2
|
||||
]).
|
||||
|
||||
-export([ initialize_authentication/2 ]).
|
||||
|
||||
-export([ start_link/0
|
||||
, stop/0
|
||||
]).
|
||||
|
||||
-export([ add_provider/2
|
||||
, remove_provider/1
|
||||
, create_chain/1
|
||||
, delete_chain/1
|
||||
, lookup_chain/1
|
||||
, list_chains/0
|
||||
, create_authenticator/2
|
||||
, delete_authenticator/2
|
||||
, update_authenticator/3
|
||||
, lookup_authenticator/2
|
||||
, list_authenticators/1
|
||||
, move_authenticator/3
|
||||
]).
|
||||
|
||||
-export([ import_users/3
|
||||
, add_user/3
|
||||
, delete_user/3
|
||||
, update_user/4
|
||||
, lookup_user/3
|
||||
, list_users/2
|
||||
]).
|
||||
|
||||
-export([ generate_id/1 ]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([ init/1
|
||||
, handle_call/3
|
||||
, handle_cast/2
|
||||
, handle_info/2
|
||||
, terminate/2
|
||||
, code_change/3
|
||||
]).
|
||||
|
||||
-define(CHAINS_TAB, emqx_authn_chains).
|
||||
|
||||
-define(VER_1, <<"1">>).
|
||||
-define(VER_2, <<"2">>).
|
||||
|
||||
-type chain_name() :: atom().
|
||||
-type authenticator_id() :: binary().
|
||||
-type position() :: top | bottom | {before, authenticator_id()}.
|
||||
-type update_request() :: {create_authenticator, chain_name(), map()}
|
||||
| {delete_authenticator, chain_name(), authenticator_id()}
|
||||
| {update_authenticator, chain_name(), authenticator_id(), map()}
|
||||
| {move_authenticator, chain_name(), authenticator_id(), position()}.
|
||||
-type authn_type() :: atom() | {atom(), atom()}.
|
||||
-type provider() :: module().
|
||||
|
||||
-type chain() :: #{name := chain_name(),
|
||||
authenticators := [authenticator()]}.
|
||||
|
||||
-type authenticator() :: #{id := authenticator_id(),
|
||||
provider := provider(),
|
||||
enable := boolean(),
|
||||
state := map()}.
|
||||
|
||||
|
||||
-type config() :: #{atom() => term()}.
|
||||
-type state() :: #{atom() => term()}.
|
||||
-type extra() :: #{is_superuser := boolean(),
|
||||
atom() => term()}.
|
||||
-type user_info() :: #{user_id := binary(),
|
||||
atom() => term()}.
|
||||
|
||||
-callback refs() -> [{ref, Module, Name}] when Module::module(), Name::atom().
|
||||
|
||||
-callback create(Config)
|
||||
-> {ok, State}
|
||||
| {error, term()}
|
||||
when Config::config(), State::state().
|
||||
|
||||
-callback update(Config, State)
|
||||
-> {ok, NewState}
|
||||
| {error, term()}
|
||||
when Config::config(), State::state(), NewState::state().
|
||||
|
||||
-callback authenticate(Credential, State)
|
||||
-> ignore
|
||||
| {ok, Extra}
|
||||
| {ok, Extra, AuthData}
|
||||
| {continue, AuthCache}
|
||||
| {continue, AuthData, AuthCache}
|
||||
| {error, term()}
|
||||
when Credential::map(), State::state(), Extra::extra(), AuthData::binary(), AuthCache::map().
|
||||
|
||||
-callback destroy(State)
|
||||
-> ok
|
||||
when State::state().
|
||||
|
||||
-callback import_users(Filename, State)
|
||||
-> ok
|
||||
| {error, term()}
|
||||
when Filename::binary(), State::state().
|
||||
|
||||
-callback add_user(UserInfo, State)
|
||||
-> {ok, User}
|
||||
| {error, term()}
|
||||
when UserInfo::user_info(), State::state(), User::user_info().
|
||||
|
||||
-callback delete_user(UserID, State)
|
||||
-> ok
|
||||
| {error, term()}
|
||||
when UserID::binary(), State::state().
|
||||
|
||||
-callback update_user(UserID, UserInfo, State)
|
||||
-> {ok, User}
|
||||
| {error, term()}
|
||||
when UserID::binary(), UserInfo::map(), State::state(), User::user_info().
|
||||
|
||||
-callback lookup_user(UserID, UserInfo, State)
|
||||
-> {ok, User}
|
||||
| {error, term()}
|
||||
when UserID::binary(), UserInfo::map(), State::state(), User::user_info().
|
||||
|
||||
-callback list_users(State)
|
||||
-> {ok, Users}
|
||||
when State::state(), Users::[user_info()].
|
||||
|
||||
-optional_callbacks([ import_users/2
|
||||
, add_user/2
|
||||
, delete_user/2
|
||||
, update_user/3
|
||||
, lookup_user/3
|
||||
, list_users/1
|
||||
]).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
roots() -> [{authentication, fun authentication/1}].
|
||||
|
||||
fields(_) -> [].
|
||||
|
||||
authentication(type) ->
|
||||
{ok, Refs} = get_refs(),
|
||||
hoconsc:union([hoconsc:array(hoconsc:union(Refs)) | Refs]);
|
||||
authentication(default) -> [];
|
||||
authentication(_) -> undefined.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Callbacks of config handler
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
-spec pre_config_update(update_request(), emqx_config:raw_config())
|
||||
-> {ok, map() | list()} | {error, term()}.
|
||||
pre_config_update(UpdateReq, OldConfig) ->
|
||||
case do_pre_config_update(UpdateReq, to_list(OldConfig)) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
{ok, NewConfig} -> {ok, may_to_map(NewConfig)}
|
||||
end.
|
||||
|
||||
do_pre_config_update({create_authenticator, _ChainName, Config}, OldConfig) ->
|
||||
{ok, OldConfig ++ [Config]};
|
||||
do_pre_config_update({delete_authenticator, _ChainName, AuthenticatorID}, OldConfig) ->
|
||||
NewConfig = lists:filter(fun(OldConfig0) ->
|
||||
AuthenticatorID =/= generate_id(OldConfig0)
|
||||
end, OldConfig),
|
||||
{ok, NewConfig};
|
||||
do_pre_config_update({update_authenticator, _ChainName, AuthenticatorID, Config}, OldConfig) ->
|
||||
NewConfig = lists:map(fun(OldConfig0) ->
|
||||
case AuthenticatorID =:= generate_id(OldConfig0) of
|
||||
true -> maps:merge(OldConfig0, Config);
|
||||
false -> OldConfig0
|
||||
end
|
||||
end, OldConfig),
|
||||
{ok, NewConfig};
|
||||
do_pre_config_update({move_authenticator, _ChainName, AuthenticatorID, Position}, OldConfig) ->
|
||||
case split_by_id(AuthenticatorID, OldConfig) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
{ok, Part1, [Found | Part2]} ->
|
||||
case Position of
|
||||
top ->
|
||||
{ok, [Found | Part1] ++ Part2};
|
||||
bottom ->
|
||||
{ok, Part1 ++ Part2 ++ [Found]};
|
||||
{before, Before} ->
|
||||
case split_by_id(Before, Part1 ++ Part2) of
|
||||
{error, Reason} ->
|
||||
{error, Reason};
|
||||
{ok, NPart1, [NFound | NPart2]} ->
|
||||
{ok, NPart1 ++ [Found, NFound | NPart2]}
|
||||
end
|
||||
end
|
||||
end.
|
||||
|
||||
-spec post_config_update(update_request(), map() | list(), emqx_config:raw_config(), emqx_config:app_envs())
|
||||
-> ok | {ok, map()} | {error, term()}.
|
||||
post_config_update(UpdateReq, NewConfig, OldConfig, AppEnvs) ->
|
||||
do_post_config_update(UpdateReq, check_config(to_list(NewConfig)), OldConfig, AppEnvs).
|
||||
|
||||
do_post_config_update({create_authenticator, ChainName, Config}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
NConfig = check_config(Config),
|
||||
_ = create_chain(ChainName),
|
||||
create_authenticator(ChainName, NConfig);
|
||||
|
||||
do_post_config_update({delete_authenticator, ChainName, AuthenticatorID}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
delete_authenticator(ChainName, AuthenticatorID);
|
||||
|
||||
do_post_config_update({update_authenticator, ChainName, AuthenticatorID, _Config}, NewConfig, _OldConfig, _AppEnvs) ->
|
||||
[Config] = lists:filter(fun(NewConfig0) ->
|
||||
AuthenticatorID =:= generate_id(NewConfig0)
|
||||
end, NewConfig),
|
||||
NConfig = check_config(Config),
|
||||
update_authenticator(ChainName, AuthenticatorID, NConfig);
|
||||
|
||||
do_post_config_update({move_authenticator, ChainName, AuthenticatorID, Position}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
move_authenticator(ChainName, AuthenticatorID, Position).
|
||||
|
||||
check_config(Config) ->
|
||||
#{authentication := CheckedConfig} = hocon_schema:check_plain(emqx_authentication,
|
||||
#{<<"authentication">> => Config}, #{nullable => true, atom_key => true}),
|
||||
CheckedConfig.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Authenticate
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
authenticate(#{listener := Listener, protocol := Protocol} = Credential, _AuthResult) ->
|
||||
case ets:lookup(?CHAINS_TAB, Listener) of
|
||||
[#chain{authenticators = Authenticators}] when Authenticators =/= [] ->
|
||||
do_authenticate(Authenticators, Credential);
|
||||
_ ->
|
||||
case ets:lookup(?CHAINS_TAB, global_chain(Protocol)) of
|
||||
[#chain{authenticators = Authenticators}] when Authenticators =/= [] ->
|
||||
do_authenticate(Authenticators, Credential);
|
||||
_ ->
|
||||
ignore
|
||||
end
|
||||
end.
|
||||
|
||||
do_authenticate([], _) ->
|
||||
{stop, {error, not_authorized}};
|
||||
do_authenticate([#authenticator{provider = Provider, state = State} | More], Credential) ->
|
||||
case Provider:authenticate(Credential, State) of
|
||||
ignore ->
|
||||
do_authenticate(More, Credential);
|
||||
Result ->
|
||||
%% {ok, Extra}
|
||||
%% {ok, Extra, AuthData}
|
||||
%% {continue, AuthCache}
|
||||
%% {continue, AuthData, AuthCache}
|
||||
%% {error, Reason}
|
||||
{stop, Result}
|
||||
end.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
-spec initialize_authentication(chain_name(), [#{binary() => term()}]) -> ok.
|
||||
initialize_authentication(_, []) ->
|
||||
ok;
|
||||
initialize_authentication(ChainName, AuthenticatorsConfig) ->
|
||||
_ = create_chain(ChainName),
|
||||
CheckedConfig = check_config(to_list(AuthenticatorsConfig)),
|
||||
lists:foreach(fun(AuthenticatorConfig) ->
|
||||
case create_authenticator(ChainName, AuthenticatorConfig) of
|
||||
{ok, _} ->
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
?LOG(error, "Failed to create authenticator '~s': ~p", [generate_id(AuthenticatorConfig), Reason])
|
||||
end
|
||||
end, CheckedConfig).
|
||||
|
||||
-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
-spec stop() -> ok.
|
||||
stop() ->
|
||||
gen_server:stop(?MODULE).
|
||||
|
||||
-spec get_refs() -> {ok, Refs} when Refs :: [{authn_type(), module()}].
|
||||
get_refs() ->
|
||||
gen_server:call(?MODULE, get_refs).
|
||||
|
||||
-spec add_provider(authn_type(), module()) -> ok.
|
||||
add_provider(AuthNType, Provider) ->
|
||||
gen_server:call(?MODULE, {add_provider, AuthNType, Provider}).
|
||||
|
||||
-spec remove_provider(authn_type()) -> ok.
|
||||
remove_provider(AuthNType) ->
|
||||
gen_server:call(?MODULE, {remove_provider, AuthNType}).
|
||||
|
||||
-spec create_chain(chain_name()) -> {ok, chain()} | {error, term()}.
|
||||
create_chain(Name) ->
|
||||
gen_server:call(?MODULE, {create_chain, Name}).
|
||||
|
||||
-spec delete_chain(chain_name()) -> ok | {error, term()}.
|
||||
delete_chain(Name) ->
|
||||
gen_server:call(?MODULE, {delete_chain, Name}).
|
||||
|
||||
-spec lookup_chain(chain_name()) -> {ok, chain()} | {error, term()}.
|
||||
lookup_chain(Name) ->
|
||||
gen_server:call(?MODULE, {lookup_chain, Name}).
|
||||
|
||||
-spec list_chains() -> {ok, [chain()]}.
|
||||
list_chains() ->
|
||||
Chains = ets:tab2list(?CHAINS_TAB),
|
||||
{ok, [serialize_chain(Chain) || Chain <- Chains]}.
|
||||
|
||||
-spec create_authenticator(chain_name(), config()) -> {ok, authenticator()} | {error, term()}.
|
||||
create_authenticator(ChainName, Config) ->
|
||||
gen_server:call(?MODULE, {create_authenticator, ChainName, Config}).
|
||||
|
||||
-spec delete_authenticator(chain_name(), authenticator_id()) -> ok | {error, term()}.
|
||||
delete_authenticator(ChainName, AuthenticatorID) ->
|
||||
gen_server:call(?MODULE, {delete_authenticator, ChainName, AuthenticatorID}).
|
||||
|
||||
-spec update_authenticator(chain_name(), authenticator_id(), config()) -> {ok, authenticator()} | {error, term()}.
|
||||
update_authenticator(ChainName, AuthenticatorID, Config) ->
|
||||
gen_server:call(?MODULE, {update_authenticator, ChainName, AuthenticatorID, Config}).
|
||||
|
||||
-spec lookup_authenticator(chain_name(), authenticator_id()) -> {ok, authenticator()} | {error, term()}.
|
||||
lookup_authenticator(ChainName, AuthenticatorID) ->
|
||||
case ets:lookup(?CHAINS_TAB, ChainName) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainName}}};
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
Authenticator ->
|
||||
{ok, serialize_authenticator(Authenticator)}
|
||||
end
|
||||
end.
|
||||
|
||||
-spec list_authenticators(chain_name()) -> {ok, [authenticator()]} | {error, term()}.
|
||||
list_authenticators(ChainName) ->
|
||||
case ets:lookup(?CHAINS_TAB, ChainName) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainName}}};
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
{ok, serialize_authenticators(Authenticators)}
|
||||
end.
|
||||
|
||||
-spec move_authenticator(chain_name(), authenticator_id(), position()) -> ok | {error, term()}.
|
||||
move_authenticator(ChainName, AuthenticatorID, Position) ->
|
||||
gen_server:call(?MODULE, {move_authenticator, ChainName, AuthenticatorID, Position}).
|
||||
|
||||
-spec import_users(chain_name(), authenticator_id(), binary()) -> ok | {error, term()}.
|
||||
import_users(ChainName, AuthenticatorID, Filename) ->
|
||||
gen_server:call(?MODULE, {import_users, ChainName, AuthenticatorID, Filename}).
|
||||
|
||||
-spec add_user(chain_name(), authenticator_id(), user_info()) -> {ok, user_info()} | {error, term()}.
|
||||
add_user(ChainName, AuthenticatorID, UserInfo) ->
|
||||
gen_server:call(?MODULE, {add_user, ChainName, AuthenticatorID, UserInfo}).
|
||||
|
||||
-spec delete_user(chain_name(), authenticator_id(), binary()) -> ok | {error, term()}.
|
||||
delete_user(ChainName, AuthenticatorID, UserID) ->
|
||||
gen_server:call(?MODULE, {delete_user, ChainName, AuthenticatorID, UserID}).
|
||||
|
||||
-spec update_user(chain_name(), authenticator_id(), binary(), map()) -> {ok, user_info()} | {error, term()}.
|
||||
update_user(ChainName, AuthenticatorID, UserID, NewUserInfo) ->
|
||||
gen_server:call(?MODULE, {update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}).
|
||||
|
||||
-spec lookup_user(chain_name(), authenticator_id(), binary()) -> {ok, user_info()} | {error, term()}.
|
||||
lookup_user(ChainName, AuthenticatorID, UserID) ->
|
||||
gen_server:call(?MODULE, {lookup_user, ChainName, AuthenticatorID, UserID}).
|
||||
|
||||
%% TODO: Support pagination
|
||||
-spec list_users(chain_name(), authenticator_id()) -> {ok, [user_info()]} | {error, term()}.
|
||||
list_users(ChainName, AuthenticatorID) ->
|
||||
gen_server:call(?MODULE, {list_users, ChainName, AuthenticatorID}).
|
||||
|
||||
-spec generate_id(config()) -> authenticator_id().
|
||||
generate_id(#{mechanism := Mechanism0, backend := Backend0}) ->
|
||||
Mechanism = atom_to_binary(Mechanism0),
|
||||
Backend = atom_to_binary(Backend0),
|
||||
<<Mechanism/binary, ":", Backend/binary>>;
|
||||
generate_id(#{mechanism := Mechanism}) ->
|
||||
atom_to_binary(Mechanism);
|
||||
generate_id(#{<<"mechanism">> := Mechanism, <<"backend">> := Backend}) ->
|
||||
<<Mechanism/binary, ":", Backend/binary>>;
|
||||
generate_id(#{<<"mechanism">> := Mechanism}) ->
|
||||
Mechanism.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% gen_server callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init(_Opts) ->
|
||||
_ = ets:new(?CHAINS_TAB, [ named_table, set, public
|
||||
, {keypos, #chain.name}
|
||||
, {read_concurrency, true}]),
|
||||
ok = emqx_config_handler:add_handler([authentication], ?MODULE),
|
||||
ok = emqx_config_handler:add_handler([listeners, '?', '?', authentication], ?MODULE),
|
||||
{ok, #{hooked => false, providers => #{}}}.
|
||||
|
||||
handle_call({add_provider, AuthNType, Provider}, _From, #{providers := Providers} = State) ->
|
||||
reply(ok, State#{providers := Providers#{AuthNType => Provider}});
|
||||
|
||||
handle_call({remove_provider, AuthNType}, _From, #{providers := Providers} = State) ->
|
||||
reply(ok, State#{providers := maps:remove(AuthNType, Providers)});
|
||||
|
||||
handle_call(get_refs, _From, #{providers := Providers} = State) ->
|
||||
Refs = lists:foldl(fun({_, Provider}, Acc) ->
|
||||
Acc ++ Provider:refs()
|
||||
end, [], maps:to_list(Providers)),
|
||||
reply({ok, Refs}, State);
|
||||
|
||||
handle_call({create_chain, Name}, _From, State) ->
|
||||
case ets:member(?CHAINS_TAB, Name) of
|
||||
true ->
|
||||
reply({error, {already_exists, {chain, Name}}}, State);
|
||||
false ->
|
||||
Chain = #chain{name = Name,
|
||||
authenticators = []},
|
||||
true = ets:insert(?CHAINS_TAB, Chain),
|
||||
reply({ok, serialize_chain(Chain)}, State)
|
||||
end;
|
||||
|
||||
handle_call({delete_chain, Name}, _From, State) ->
|
||||
case ets:lookup(?CHAINS_TAB, Name) of
|
||||
[] ->
|
||||
reply({error, {not_found, {chain, Name}}}, State);
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
_ = [do_delete_authenticator(Authenticator) || Authenticator <- Authenticators],
|
||||
true = ets:delete(?CHAINS_TAB, Name),
|
||||
reply(ok, may_unhook(State))
|
||||
end;
|
||||
|
||||
handle_call({lookup_chain, Name}, _From, State) ->
|
||||
case ets:lookup(?CHAINS_TAB, Name) of
|
||||
[] ->
|
||||
reply({error, {not_found, {chain, Name}}}, State);
|
||||
[Chain] ->
|
||||
reply({ok, serialize_chain(Chain)}, State)
|
||||
end;
|
||||
|
||||
handle_call({create_authenticator, ChainName, Config}, _From, #{providers := Providers} = State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
AuthenticatorID = generate_id(Config),
|
||||
case lists:keymember(AuthenticatorID, #authenticator.id, Authenticators) of
|
||||
true ->
|
||||
{error, {already_exists, {authenticator, AuthenticatorID}}};
|
||||
false ->
|
||||
case do_create_authenticator(ChainName, AuthenticatorID, Config, Providers) of
|
||||
{ok, Authenticator} ->
|
||||
NAuthenticators = Authenticators ++ [Authenticator],
|
||||
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
|
||||
{ok, serialize_authenticator(Authenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainName, UpdateFun),
|
||||
reply(Reply, may_hook(State));
|
||||
|
||||
handle_call({delete_authenticator, ChainName, AuthenticatorID}, _From, State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
case lists:keytake(AuthenticatorID, #authenticator.id, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
{value, Authenticator, NAuthenticators} ->
|
||||
_ = do_delete_authenticator(Authenticator),
|
||||
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
|
||||
ok
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainName, UpdateFun),
|
||||
reply(Reply, may_unhook(State));
|
||||
|
||||
handle_call({update_authenticator, ChainName, AuthenticatorID, Config}, _From, State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
#authenticator{provider = Provider,
|
||||
state = #{version := Version} = ST} = Authenticator ->
|
||||
case AuthenticatorID =:= generate_id(Config) of
|
||||
true ->
|
||||
Unique = unique(ChainName, AuthenticatorID, Version),
|
||||
case Provider:update(Config#{'_unique' => Unique}, ST) of
|
||||
{ok, NewST} ->
|
||||
NewAuthenticator = Authenticator#authenticator{state = switch_version(NewST)},
|
||||
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
|
||||
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NewAuthenticators}),
|
||||
{ok, serialize_authenticator(NewAuthenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
false ->
|
||||
{error, change_of_authentication_type_is_not_allowed}
|
||||
end
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainName, UpdateFun),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({move_authenticator, ChainName, AuthenticatorID, Position}, _From, State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
case do_move_authenticator(AuthenticatorID, Authenticators, Position) of
|
||||
{ok, NAuthenticators} ->
|
||||
true = ets:insert(?CHAINS_TAB, Chain#chain{authenticators = NAuthenticators}),
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainName, UpdateFun),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({import_users, ChainName, AuthenticatorID, Filename}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, import_users, [Filename]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({add_user, ChainName, AuthenticatorID, UserInfo}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, add_user, [UserInfo]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({delete_user, ChainName, AuthenticatorID, UserID}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, delete_user, [UserID]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({update_user, ChainName, AuthenticatorID, UserID, NewUserInfo}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, update_user, [UserID, NewUserInfo]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({lookup_user, ChainName, AuthenticatorID, UserID}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, lookup_user, [UserID]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({list_users, ChainName, AuthenticatorID}, _From, State) ->
|
||||
Reply = call_authenticator(ChainName, AuthenticatorID, list_users, []),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call(Req, _From, State) ->
|
||||
?LOG(error, "Unexpected call: ~p", [Req]),
|
||||
{reply, ignored, State}.
|
||||
|
||||
handle_cast(Req, State) ->
|
||||
?LOG(error, "Unexpected case: ~p", [Req]),
|
||||
{noreply, State}.
|
||||
|
||||
handle_info(Info, State) ->
|
||||
?LOG(error, "Unexpected info: ~p", [Info]),
|
||||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, _State) ->
|
||||
emqx_config_handler:remove_handler([authentication]),
|
||||
emqx_config_handler:remove_handler([listeners, '?', '?', authentication]),
|
||||
ok.
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
reply(Reply, State) ->
|
||||
{reply, Reply, State}.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
split_by_id(ID, AuthenticatorsConfig) ->
|
||||
case lists:foldl(
|
||||
fun(C, {P1, P2, F0}) ->
|
||||
F = case ID =:= generate_id(C) of
|
||||
true -> true;
|
||||
false -> F0
|
||||
end,
|
||||
case F of
|
||||
false -> {[C | P1], P2, F};
|
||||
true -> {P1, [C | P2], F}
|
||||
end
|
||||
end, {[], [], false}, AuthenticatorsConfig) of
|
||||
{_, _, false} ->
|
||||
{error, {not_found, {authenticator, ID}}};
|
||||
{Part1, Part2, true} ->
|
||||
{ok, lists:reverse(Part1), lists:reverse(Part2)}
|
||||
end.
|
||||
|
||||
global_chain(mqtt) ->
|
||||
'mqtt:global';
|
||||
global_chain('mqtt-sn') ->
|
||||
'mqtt-sn:global';
|
||||
global_chain(coap) ->
|
||||
'coap:global';
|
||||
global_chain(lwm2m) ->
|
||||
'lwm2m:global';
|
||||
global_chain(stomp) ->
|
||||
'stomp:global';
|
||||
global_chain(_) ->
|
||||
'unknown:global'.
|
||||
|
||||
may_hook(#{hooked := false} = State) ->
|
||||
case lists:any(fun(#chain{authenticators = []}) -> false;
|
||||
(_) -> true
|
||||
end, ets:tab2list(?CHAINS_TAB)) of
|
||||
true ->
|
||||
_ = emqx:hook('client.authenticate', {emqx_authentication, authenticate, []}),
|
||||
State#{hooked => true};
|
||||
false ->
|
||||
State
|
||||
end;
|
||||
may_hook(State) ->
|
||||
State.
|
||||
|
||||
may_unhook(#{hooked := true} = State) ->
|
||||
case lists:all(fun(#chain{authenticators = []}) -> true;
|
||||
(_) -> false
|
||||
end, ets:tab2list(?CHAINS_TAB)) of
|
||||
true ->
|
||||
_ = emqx:unhook('client.authenticate', {emqx_authentication, authenticate, []}),
|
||||
State#{hooked => false};
|
||||
false ->
|
||||
State
|
||||
end;
|
||||
may_unhook(State) ->
|
||||
State.
|
||||
|
||||
do_create_authenticator(ChainName, AuthenticatorID, #{enable := Enable} = Config, Providers) ->
|
||||
case maps:get(authn_type(Config), Providers, undefined) of
|
||||
undefined ->
|
||||
{error, no_available_provider};
|
||||
Provider ->
|
||||
Unique = unique(ChainName, AuthenticatorID, ?VER_1),
|
||||
case Provider:create(Config#{'_unique' => Unique}) of
|
||||
{ok, State} ->
|
||||
Authenticator = #authenticator{id = AuthenticatorID,
|
||||
provider = Provider,
|
||||
enable = Enable,
|
||||
state = switch_version(State)},
|
||||
{ok, Authenticator};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end.
|
||||
|
||||
do_delete_authenticator(#authenticator{provider = Provider, state = State}) ->
|
||||
_ = Provider:destroy(State),
|
||||
ok.
|
||||
|
||||
replace_authenticator(ID, Authenticator, Authenticators) ->
|
||||
lists:keyreplace(ID, #authenticator.id, Authenticators, Authenticator).
|
||||
|
||||
do_move_authenticator(ID, Authenticators, Position) ->
|
||||
case lists:keytake(ID, #authenticator.id, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, ID}}};
|
||||
{value, Authenticator, NAuthenticators} ->
|
||||
case Position of
|
||||
top ->
|
||||
{ok, [Authenticator | NAuthenticators]};
|
||||
bottom ->
|
||||
{ok, NAuthenticators ++ [Authenticator]};
|
||||
{before, ID0} ->
|
||||
insert(Authenticator, NAuthenticators, ID0, [])
|
||||
end
|
||||
end.
|
||||
|
||||
insert(_, [], ID, _) ->
|
||||
{error, {not_found, {authenticator, ID}}};
|
||||
insert(Authenticator, [#authenticator{id = ID} | _] = Authenticators, ID, Acc) ->
|
||||
{ok, lists:reverse(Acc) ++ [Authenticator | Authenticators]};
|
||||
insert(Authenticator, [Authenticator0 | More], ID, Acc) ->
|
||||
insert(Authenticator, More, ID, [Authenticator0 | Acc]).
|
||||
|
||||
update_chain(ChainName, UpdateFun) ->
|
||||
case ets:lookup(?CHAINS_TAB, ChainName) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainName}}};
|
||||
[Chain] ->
|
||||
UpdateFun(Chain)
|
||||
end.
|
||||
|
||||
call_authenticator(ChainName, AuthenticatorID, Func, Args) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators}) ->
|
||||
case lists:keyfind(AuthenticatorID, #authenticator.id, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
#authenticator{provider = Provider, state = State} ->
|
||||
case erlang:function_exported(Provider, Func, length(Args) + 1) of
|
||||
true ->
|
||||
erlang:apply(Provider, Func, Args ++ [State]);
|
||||
false ->
|
||||
{error, unsupported_operation}
|
||||
end
|
||||
end
|
||||
end,
|
||||
update_chain(ChainName, UpdateFun).
|
||||
|
||||
serialize_chain(#chain{name = Name,
|
||||
authenticators = Authenticators}) ->
|
||||
#{ name => Name
|
||||
, authenticators => serialize_authenticators(Authenticators)
|
||||
}.
|
||||
|
||||
serialize_authenticators(Authenticators) ->
|
||||
[serialize_authenticator(Authenticator) || Authenticator <- Authenticators].
|
||||
|
||||
serialize_authenticator(#authenticator{id = ID,
|
||||
provider = Provider,
|
||||
enable = Enable,
|
||||
state = State}) ->
|
||||
#{ id => ID
|
||||
, provider => Provider
|
||||
, enable => Enable
|
||||
, state => State
|
||||
}.
|
||||
|
||||
unique(ChainName, AuthenticatorID, Version) ->
|
||||
NChainName = atom_to_binary(ChainName),
|
||||
<<NChainName/binary, "/", AuthenticatorID/binary, ":", Version/binary>>.
|
||||
|
||||
switch_version(State = #{version := ?VER_1}) ->
|
||||
State#{version := ?VER_2};
|
||||
switch_version(State = #{version := ?VER_2}) ->
|
||||
State#{version := ?VER_1};
|
||||
switch_version(State) ->
|
||||
State#{version => ?VER_1}.
|
||||
|
||||
authn_type(#{mechanism := Mechanism, backend := Backend}) ->
|
||||
{Mechanism, Backend};
|
||||
authn_type(#{mechanism := Mechanism}) ->
|
||||
Mechanism.
|
||||
|
||||
may_to_map([L]) ->
|
||||
L;
|
||||
may_to_map(L) ->
|
||||
L.
|
||||
|
||||
to_list(undefined) ->
|
||||
[];
|
||||
to_list(M) when M =:= #{} ->
|
||||
[];
|
||||
to_list(M) when is_map(M) ->
|
||||
[M];
|
||||
to_list(L) when is_list(L) ->
|
||||
L.
|
|
@ -37,6 +37,7 @@
|
|||
, delete/1
|
||||
, info/1
|
||||
, format/1
|
||||
, parse/1
|
||||
]).
|
||||
|
||||
%% gen_server callbacks
|
||||
|
@ -107,6 +108,33 @@ format(#banned{who = Who0,
|
|||
until => to_rfc3339(Until)
|
||||
}.
|
||||
|
||||
parse(Params) ->
|
||||
Who = pares_who(Params),
|
||||
By = maps:get(<<"by">>, Params, <<"mgmt_api">>),
|
||||
Reason = maps:get(<<"reason">>, Params, <<"">>),
|
||||
At = pares_time(maps:get(<<"at">>, Params, undefined), erlang:system_time(second)),
|
||||
Until = pares_time(maps:get(<<"until">>, Params, undefined), At + 5 * 60),
|
||||
#banned{
|
||||
who = Who,
|
||||
by = By,
|
||||
reason = Reason,
|
||||
at = At,
|
||||
until = Until
|
||||
}.
|
||||
|
||||
pares_who(#{as := As, who := Who}) ->
|
||||
pares_who(#{<<"as">> => As, <<"who">> => Who});
|
||||
pares_who(#{<<"as">> := <<"peerhost">>, <<"who">> := Peerhost0}) ->
|
||||
{ok, Peerhost} = inet:parse_address(binary_to_list(Peerhost0)),
|
||||
{peerhost, Peerhost};
|
||||
pares_who(#{<<"as">> := As, <<"who">> := Who}) ->
|
||||
{binary_to_atom(As, utf8), Who}.
|
||||
|
||||
pares_time(undefined, Default) ->
|
||||
Default;
|
||||
pares_time(Rfc3339, _Default) ->
|
||||
to_timestamp(Rfc3339).
|
||||
|
||||
maybe_format_host({peerhost, Host}) ->
|
||||
AddrBinary = list_to_binary(inet:ntoa(Host)),
|
||||
{peerhost, AddrBinary};
|
||||
|
@ -116,6 +144,11 @@ maybe_format_host({As, Who}) ->
|
|||
to_rfc3339(Timestamp) ->
|
||||
list_to_binary(calendar:system_time_to_rfc3339(Timestamp, [{unit, second}])).
|
||||
|
||||
to_timestamp(Rfc3339) when is_binary(Rfc3339) ->
|
||||
to_timestamp(binary_to_list(Rfc3339));
|
||||
to_timestamp(Rfc3339) ->
|
||||
calendar:rfc3339_to_system_time(Rfc3339, [{unit, second}]).
|
||||
|
||||
-spec(create(emqx_types:banned() | map()) -> ok).
|
||||
create(#{who := Who,
|
||||
by := By,
|
||||
|
@ -130,12 +163,16 @@ create(#{who := Who,
|
|||
create(Banned) when is_record(Banned, banned) ->
|
||||
ekka_mnesia:dirty_write(?BANNED_TAB, Banned).
|
||||
|
||||
look_up(Who) when is_map(Who) ->
|
||||
look_up(pares_who(Who));
|
||||
look_up(Who) ->
|
||||
mnesia:dirty_read(?BANNED_TAB, Who).
|
||||
|
||||
-spec(delete({clientid, emqx_types:clientid()}
|
||||
| {username, emqx_types:username()}
|
||||
| {peerhost, emqx_types:peerhost()}) -> ok).
|
||||
delete(Who) when is_map(Who)->
|
||||
delete(pares_who(Who));
|
||||
delete(Who) ->
|
||||
ekka_mnesia:dirty_delete(?BANNED_TAB, Who).
|
||||
|
||||
|
|
|
@ -43,6 +43,14 @@ init([]) ->
|
|||
type => worker,
|
||||
modules => [emqx_shared_sub]},
|
||||
|
||||
%% Authentication
|
||||
AuthN = #{id => authn,
|
||||
start => {emqx_authentication, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 2000,
|
||||
type => worker,
|
||||
modules => [emqx_authentication]},
|
||||
|
||||
%% Broker helper
|
||||
Helper = #{id => helper,
|
||||
start => {emqx_broker_helper, start_link, []},
|
||||
|
@ -51,5 +59,5 @@ init([]) ->
|
|||
type => worker,
|
||||
modules => [emqx_broker_helper]},
|
||||
|
||||
{ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, Helper]}}.
|
||||
{ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, AuthN, Helper]}}.
|
||||
|
||||
|
|
|
@ -214,7 +214,7 @@ init(ConnInfo = #{peername := {PeerHost, _Port},
|
|||
ClientInfo = set_peercert_infos(
|
||||
Peercert,
|
||||
#{zone => Zone,
|
||||
listener => Listener,
|
||||
listener => emqx_listeners:listener_id(Type, Listener),
|
||||
protocol => Protocol,
|
||||
peerhost => PeerHost,
|
||||
sockport => SockPort,
|
||||
|
@ -223,7 +223,7 @@ init(ConnInfo = #{peername := {PeerHost, _Port},
|
|||
mountpoint => MountPoint,
|
||||
is_bridge => false,
|
||||
is_superuser => false
|
||||
}, Zone, Listener),
|
||||
}, Zone),
|
||||
{NClientInfo, NConnInfo} = take_ws_cookie(ClientInfo, ConnInfo),
|
||||
#channel{conninfo = NConnInfo,
|
||||
clientinfo = NClientInfo,
|
||||
|
@ -244,12 +244,12 @@ quota_policy(RawPolicy) ->
|
|||
erlang:trunc(hocon_postprocess:duration(StrWind) / 1000)}}
|
||||
|| {Name, [StrCount, StrWind]} <- maps:to_list(RawPolicy)].
|
||||
|
||||
set_peercert_infos(NoSSL, ClientInfo, _, _)
|
||||
set_peercert_infos(NoSSL, ClientInfo, _)
|
||||
when NoSSL =:= nossl;
|
||||
NoSSL =:= undefined ->
|
||||
ClientInfo#{username => undefined};
|
||||
|
||||
set_peercert_infos(Peercert, ClientInfo, Zone, _Listener) ->
|
||||
set_peercert_infos(Peercert, ClientInfo, Zone) ->
|
||||
{DN, CN} = {esockd_peercert:subject(Peercert),
|
||||
esockd_peercert:common_name(Peercert)},
|
||||
PeercetAs = fun(Key) ->
|
||||
|
@ -1303,11 +1303,11 @@ do_authenticate(#{auth_method := AuthMethod} = Credential, #channel{clientinfo =
|
|||
case emqx_access_control:authenticate(Credential) of
|
||||
{ok, Result} ->
|
||||
{ok, Properties,
|
||||
Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(superuser, Result, false)},
|
||||
Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
|
||||
auth_cache = #{}}};
|
||||
{ok, Result, AuthData} ->
|
||||
{ok, Properties#{'Authentication-Data' => AuthData},
|
||||
Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(superuser, Result, false)},
|
||||
Channel#channel{clientinfo = ClientInfo#{is_superuser => maps:get(is_superuser, Result, false)},
|
||||
auth_cache = #{}}};
|
||||
{continue, AuthCache} ->
|
||||
{continue, Properties, Channel#channel{auth_cache = AuthCache}};
|
||||
|
@ -1320,8 +1320,8 @@ do_authenticate(#{auth_method := AuthMethod} = Credential, #channel{clientinfo =
|
|||
|
||||
do_authenticate(Credential, #channel{clientinfo = ClientInfo} = Channel) ->
|
||||
case emqx_access_control:authenticate(Credential) of
|
||||
{ok, #{superuser := Superuser}} ->
|
||||
{ok, #{}, Channel#channel{clientinfo = ClientInfo#{is_superuser => Superuser}}};
|
||||
{ok, #{is_superuser := IsSuperuser}} ->
|
||||
{ok, #{}, Channel#channel{clientinfo = ClientInfo#{is_superuser => IsSuperuser}}};
|
||||
{error, Reason} ->
|
||||
{error, emqx_reason_codes:connack_error(Reason)}
|
||||
end.
|
||||
|
|
|
@ -22,49 +22,38 @@
|
|||
|
||||
-export([init/1]).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% API
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
start_link() ->
|
||||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Supervisor callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init([]) ->
|
||||
Banned = #{id => banned,
|
||||
start => {emqx_banned, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 1000,
|
||||
type => worker,
|
||||
modules => [emqx_banned]},
|
||||
Flapping = #{id => flapping,
|
||||
start => {emqx_flapping, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 1000,
|
||||
type => worker,
|
||||
modules => [emqx_flapping]},
|
||||
%% Channel locker
|
||||
Locker = #{id => locker,
|
||||
start => {emqx_cm_locker, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 5000,
|
||||
type => worker,
|
||||
modules => [emqx_cm_locker]
|
||||
},
|
||||
%% Channel registry
|
||||
Registry = #{id => registry,
|
||||
start => {emqx_cm_registry, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 5000,
|
||||
type => worker,
|
||||
modules => [emqx_cm_registry]
|
||||
},
|
||||
%% Channel Manager
|
||||
Manager = #{id => manager,
|
||||
start => {emqx_cm, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 5000,
|
||||
type => worker,
|
||||
modules => [emqx_cm]
|
||||
},
|
||||
SupFlags = #{strategy => one_for_one,
|
||||
intensity => 100,
|
||||
period => 10
|
||||
},
|
||||
Banned = child_spec(emqx_banned, 1000, worker),
|
||||
Flapping = child_spec(emqx_flapping, 1000, worker),
|
||||
Locker = child_spec(emqx_cm_locker, 5000, worker),
|
||||
Registry = child_spec(emqx_cm_registry, 5000, worker),
|
||||
Manager = child_spec(emqx_cm, 5000, worker),
|
||||
{ok, {SupFlags, [Banned, Flapping, Locker, Registry, Manager]}}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
child_spec(Mod, Shutdown, Type) ->
|
||||
#{id => Mod,
|
||||
start => {Mod, start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => Shutdown,
|
||||
type => Type,
|
||||
modules => [Mod]
|
||||
}.
|
||||
|
|
|
@ -87,8 +87,14 @@
|
|||
-type update_request() :: term().
|
||||
-type update_cmd() :: {update, update_request()} | remove.
|
||||
-type update_opts() :: #{
|
||||
%% fill the default values into the rawconf map
|
||||
rawconf_with_defaults => boolean()
|
||||
%% rawconf_with_defaults:
|
||||
%% fill the default values into the `raw_config` field of the return value
|
||||
%% defaults to `false`
|
||||
rawconf_with_defaults => boolean(),
|
||||
%% persistent:
|
||||
%% save the updated config to the emqx_override.conf file
|
||||
%% defaults to `true`
|
||||
persistent => boolean()
|
||||
}.
|
||||
-type update_args() :: {update_cmd(), Opts :: update_opts()}.
|
||||
-type update_stage() :: pre_config_update | post_config_update.
|
||||
|
@ -235,7 +241,7 @@ put_raw(KeyPath, Config) -> do_put(?RAW_CONF, KeyPath, Config).
|
|||
%% in the rear of the list overrides prior values.
|
||||
-spec init_load(module(), [string()] | binary() | hocon:config()) -> ok.
|
||||
init_load(SchemaMod, Conf) when is_list(Conf) orelse is_binary(Conf) ->
|
||||
ParseOptions = #{format => richmap},
|
||||
ParseOptions = #{format => map},
|
||||
Parser = case is_binary(Conf) of
|
||||
true -> fun hocon:binary/2;
|
||||
false -> fun hocon:files/2
|
||||
|
@ -249,19 +255,17 @@ init_load(SchemaMod, Conf) when is_list(Conf) orelse is_binary(Conf) ->
|
|||
}),
|
||||
error(failed_to_load_hocon_conf)
|
||||
end;
|
||||
init_load(SchemaMod, RawRichConf) when is_map(RawRichConf) ->
|
||||
%% check with richmap for line numbers in error reports (future enhancement)
|
||||
Opts = #{return_plain => true,
|
||||
nullable => true
|
||||
},
|
||||
%% this call throws exception in case of check failure
|
||||
{_AppEnvs, CheckedConf} = hocon_schema:map_translate(SchemaMod, RawRichConf, Opts),
|
||||
init_load(SchemaMod, RawConf0) when is_map(RawConf0) ->
|
||||
ok = save_schema_mod_and_names(SchemaMod),
|
||||
ok = save_to_config_map(emqx_map_lib:unsafe_atom_key_map(normalize_conf(CheckedConf)),
|
||||
normalize_conf(hocon_schema:richmap_to_map(RawRichConf))).
|
||||
%% override part of the input conf using emqx_override.conf
|
||||
RawConf = merge_with_override_conf(RawConf0),
|
||||
%% check and save configs
|
||||
{_AppEnvs, CheckedConf} = check_config(SchemaMod, RawConf),
|
||||
ok = save_to_config_map(maps:with(get_atom_root_names(), CheckedConf),
|
||||
maps:with(get_root_names(), RawConf)).
|
||||
|
||||
normalize_conf(Conf) ->
|
||||
maps:with(get_root_names(), Conf).
|
||||
merge_with_override_conf(RawConf) ->
|
||||
maps:merge(RawConf, maps:with(maps:keys(RawConf), read_override_conf())).
|
||||
|
||||
-spec check_config(module(), raw_config()) -> {AppEnvs, CheckedConf}
|
||||
when AppEnvs :: app_envs(), CheckedConf :: config().
|
||||
|
@ -320,6 +324,9 @@ get_schema_mod(RootName) ->
|
|||
get_root_names() ->
|
||||
maps:get(names, persistent_term:get(?PERSIS_SCHEMA_MODS, #{names => []})).
|
||||
|
||||
get_atom_root_names() ->
|
||||
[atom(N) || N <- get_root_names()].
|
||||
|
||||
-spec save_configs(app_envs(), config(), raw_config(), raw_config()) -> ok | {error, term()}.
|
||||
save_configs(_AppEnvs, Conf, RawConf, OverrideConf) ->
|
||||
%% We may need also support hot config update for the apps that use application envs.
|
||||
|
@ -341,14 +348,19 @@ save_to_config_map(Conf, RawConf) ->
|
|||
?MODULE:put_raw(RawConf).
|
||||
|
||||
-spec save_to_override_conf(raw_config()) -> ok | {error, term()}.
|
||||
save_to_override_conf(undefined) ->
|
||||
ok;
|
||||
save_to_override_conf(RawConf) ->
|
||||
FileName = emqx_override_conf_name(),
|
||||
ok = filelib:ensure_dir(FileName),
|
||||
case file:write_file(FileName, jsx:prettify(jsx:encode(RawConf))) of
|
||||
ok -> ok;
|
||||
{error, Reason} ->
|
||||
logger:error("write to ~s failed, ~p", [FileName, Reason]),
|
||||
{error, Reason}
|
||||
case emqx_override_conf_name() of
|
||||
undefined -> ok;
|
||||
FileName ->
|
||||
ok = filelib:ensure_dir(FileName),
|
||||
case file:write_file(FileName, jsx:prettify(jsx:encode(RawConf))) of
|
||||
ok -> ok;
|
||||
{error, Reason} ->
|
||||
logger:error("write to ~s failed, ~p", [FileName, Reason]),
|
||||
{error, Reason}
|
||||
end
|
||||
end.
|
||||
|
||||
load_hocon_file(FileName, LoadType) ->
|
||||
|
@ -360,7 +372,7 @@ load_hocon_file(FileName, LoadType) ->
|
|||
end.
|
||||
|
||||
emqx_override_conf_name() ->
|
||||
application:get_env(emqx, override_conf_file, "emqx_override.conf").
|
||||
application:get_env(emqx, override_conf_file, undefined).
|
||||
|
||||
do_get(Type, KeyPath) ->
|
||||
Ref = make_ref(),
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
code_change/3]).
|
||||
|
||||
-define(MOD, {mod}).
|
||||
-define(WKEY, '?').
|
||||
|
||||
-define(ATOM_CONF_PATH(PATH, EXP, EXP_ON_FAIL),
|
||||
try [safe_atom(Key) || Key <- PATH] of
|
||||
|
@ -80,11 +81,11 @@ update_config(SchemaModule, ConfKeyPath, UpdateArgs) ->
|
|||
|
||||
-spec add_handler(emqx_config:config_key_path(), handler_name()) -> ok.
|
||||
add_handler(ConfKeyPath, HandlerName) ->
|
||||
gen_server:call(?MODULE, {add_child, ConfKeyPath, HandlerName}).
|
||||
gen_server:call(?MODULE, {add_handler, ConfKeyPath, HandlerName}).
|
||||
|
||||
-spec remove_handler(emqx_config:config_key_path()) -> ok.
|
||||
remove_handler(ConfKeyPath) ->
|
||||
gen_server:call(?MODULE, {remove_child, ConfKeyPath}).
|
||||
gen_server:call(?MODULE, {remove_handler, ConfKeyPath}).
|
||||
|
||||
%%============================================================================
|
||||
|
||||
|
@ -92,15 +93,18 @@ remove_handler(ConfKeyPath) ->
|
|||
init(_) ->
|
||||
{ok, #{handlers => #{?MOD => ?MODULE}}}.
|
||||
|
||||
handle_call({add_child, ConfKeyPath, HandlerName}, _From,
|
||||
State = #{handlers := Handlers}) ->
|
||||
{reply, ok, State#{handlers =>
|
||||
emqx_map_lib:deep_put(ConfKeyPath, Handlers, #{?MOD => HandlerName})}};
|
||||
handle_call({add_handler, ConfKeyPath, HandlerName}, _From, State = #{handlers := Handlers}) ->
|
||||
case deep_put_handler(ConfKeyPath, Handlers, HandlerName) of
|
||||
{ok, NewHandlers} ->
|
||||
{reply, ok, State#{handlers => NewHandlers}};
|
||||
Error ->
|
||||
{reply, Error, State}
|
||||
end;
|
||||
|
||||
handle_call({remove_child, ConfKeyPath}, _From,
|
||||
handle_call({remove_handler, ConfKeyPath}, _From,
|
||||
State = #{handlers := Handlers}) ->
|
||||
{reply, ok, State#{handlers =>
|
||||
emqx_map_lib:deep_remove(ConfKeyPath, Handlers)}};
|
||||
emqx_map_lib:deep_remove(ConfKeyPath ++ [?MOD], Handlers)}};
|
||||
|
||||
handle_call({change_config, SchemaModule, ConfKeyPath, UpdateArgs}, _From,
|
||||
#{handlers := Handlers} = State) ->
|
||||
|
@ -134,17 +138,40 @@ terminate(_Reason, _State) ->
|
|||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
process_update_request(ConfKeyPath, _Handlers, {remove, _Opts}) ->
|
||||
deep_put_handler([], Handlers, Mod) when is_map(Handlers) ->
|
||||
{ok, Handlers#{?MOD => Mod}};
|
||||
deep_put_handler([], _Handlers, Mod) ->
|
||||
{ok, #{?MOD => Mod}};
|
||||
deep_put_handler([?WKEY | KeyPath], Handlers, Mod) ->
|
||||
deep_put_handler2(?WKEY, KeyPath, Handlers, Mod);
|
||||
deep_put_handler([Key | KeyPath], Handlers, Mod) ->
|
||||
case maps:find(?WKEY, Handlers) of
|
||||
error ->
|
||||
deep_put_handler2(Key, KeyPath, Handlers, Mod);
|
||||
{ok, _SubHandlers} ->
|
||||
{error, {cannot_override_a_wildcard_path, [?WKEY | KeyPath]}}
|
||||
end.
|
||||
|
||||
deep_put_handler2(Key, KeyPath, Handlers, Mod) ->
|
||||
SubHandlers = maps:get(Key, Handlers, #{}),
|
||||
case deep_put_handler(KeyPath, SubHandlers, Mod) of
|
||||
{ok, SubHandlers1} ->
|
||||
{ok, Handlers#{Key => SubHandlers1}};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
process_update_request(ConfKeyPath, _Handlers, {remove, Opts}) ->
|
||||
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
|
||||
BinKeyPath = bin_path(ConfKeyPath),
|
||||
NewRawConf = emqx_map_lib:deep_remove(BinKeyPath, OldRawConf),
|
||||
OverrideConf = emqx_map_lib:deep_remove(BinKeyPath, emqx_config:read_override_conf()),
|
||||
OverrideConf = remove_from_override_config(BinKeyPath, Opts),
|
||||
{ok, NewRawConf, OverrideConf};
|
||||
process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, _Opts}) ->
|
||||
process_update_request(ConfKeyPath, Handlers, {{update, UpdateReq}, Opts}) ->
|
||||
OldRawConf = emqx_config:get_root_raw(ConfKeyPath),
|
||||
case do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) of
|
||||
{ok, NewRawConf} ->
|
||||
OverrideConf = update_override_config(NewRawConf),
|
||||
OverrideConf = update_override_config(NewRawConf, Opts),
|
||||
{ok, NewRawConf, OverrideConf};
|
||||
Error -> Error
|
||||
end.
|
||||
|
@ -153,7 +180,7 @@ do_update_config([], Handlers, OldRawConf, UpdateReq) ->
|
|||
call_pre_config_update(Handlers, OldRawConf, UpdateReq);
|
||||
do_update_config([ConfKey | ConfKeyPath], Handlers, OldRawConf, UpdateReq) ->
|
||||
SubOldRawConf = get_sub_config(bin(ConfKey), OldRawConf),
|
||||
SubHandlers = maps:get(ConfKey, Handlers, #{}),
|
||||
SubHandlers = get_sub_handlers(ConfKey, Handlers),
|
||||
case do_update_config(ConfKeyPath, SubHandlers, SubOldRawConf, UpdateReq) of
|
||||
{ok, NewUpdateReq} ->
|
||||
call_pre_config_update(Handlers, OldRawConf, #{bin(ConfKey) => NewUpdateReq});
|
||||
|
@ -184,7 +211,7 @@ do_post_config_update([ConfKey | ConfKeyPath], Handlers, OldConf, NewConf, AppEn
|
|||
Result) ->
|
||||
SubOldConf = get_sub_config(ConfKey, OldConf),
|
||||
SubNewConf = get_sub_config(ConfKey, NewConf),
|
||||
SubHandlers = maps:get(ConfKey, Handlers, #{}),
|
||||
SubHandlers = get_sub_handlers(ConfKey, Handlers),
|
||||
case do_post_config_update(ConfKeyPath, SubHandlers, SubOldConf, SubNewConf, AppEnvs,
|
||||
UpdateArgs, Result) of
|
||||
{ok, Result1} ->
|
||||
|
@ -193,6 +220,12 @@ do_post_config_update([ConfKey | ConfKeyPath], Handlers, OldConf, NewConf, AppEn
|
|||
Error -> Error
|
||||
end.
|
||||
|
||||
get_sub_handlers(ConfKey, Handlers) ->
|
||||
case maps:find(ConfKey, Handlers) of
|
||||
error -> maps:get(?WKEY, Handlers, #{});
|
||||
{ok, SubHandlers} -> SubHandlers
|
||||
end.
|
||||
|
||||
get_sub_config(ConfKey, Conf) when is_map(Conf) ->
|
||||
maps:get(ConfKey, Conf, undefined);
|
||||
get_sub_config(_, _Conf) -> %% the Conf is a primitive
|
||||
|
@ -237,7 +270,15 @@ merge_to_old_config(UpdateReq, RawConf) when is_map(UpdateReq), is_map(RawConf)
|
|||
merge_to_old_config(UpdateReq, _RawConf) ->
|
||||
{ok, UpdateReq}.
|
||||
|
||||
update_override_config(RawConf) ->
|
||||
remove_from_override_config(_BinKeyPath, #{persistent := false}) ->
|
||||
undefined;
|
||||
remove_from_override_config(BinKeyPath, _Opts) ->
|
||||
OldConf = emqx_config:read_override_conf(),
|
||||
emqx_map_lib:deep_remove(BinKeyPath, OldConf).
|
||||
|
||||
update_override_config(_RawConf, #{persistent := false}) ->
|
||||
undefined;
|
||||
update_override_config(RawConf, _Opts) ->
|
||||
OldConf = emqx_config:read_override_conf(),
|
||||
maps:merge(OldConf, RawConf).
|
||||
|
||||
|
|
|
@ -100,14 +100,10 @@ parse(<<Type:4, Dup:1, QoS:2, Retain:1, Rest/binary>>,
|
|||
StrictMode andalso validate_header(Type, Dup, QoS, Retain),
|
||||
Header = #mqtt_packet_header{type = Type,
|
||||
dup = bool(Dup),
|
||||
qos = QoS,
|
||||
qos = fixqos(Type, QoS),
|
||||
retain = bool(Retain)
|
||||
},
|
||||
Header1 = case fixqos(Type, QoS) of
|
||||
QoS -> Header;
|
||||
FixedQoS -> Header#mqtt_packet_header{qos = FixedQoS}
|
||||
end,
|
||||
parse_remaining_len(Rest, Header1, Options);
|
||||
parse_remaining_len(Rest, Header, Options);
|
||||
|
||||
parse(Bin, {{len, #{hdr := Header,
|
||||
len := {Multiplier, Length}}
|
||||
|
|
|
@ -26,6 +26,8 @@
|
|||
, restart/0
|
||||
, stop/0
|
||||
, is_running/1
|
||||
, current_conns/2
|
||||
, max_conns/2
|
||||
]).
|
||||
|
||||
-export([ start_listener/1
|
||||
|
@ -48,7 +50,7 @@
|
|||
%% @doc List configured listeners.
|
||||
-spec(list() -> [{ListenerId :: atom(), ListenerConf :: map()}]).
|
||||
list() ->
|
||||
[{listener_id(ZoneName, LName), LConf} || {ZoneName, LName, LConf} <- do_list()].
|
||||
[{listener_id(Type, LName), LConf} || {Type, LName, LConf} <- do_list()].
|
||||
|
||||
do_list() ->
|
||||
Listeners = maps:to_list(emqx:get_config([listeners], #{})),
|
||||
|
@ -62,7 +64,7 @@ list(Type, Conf) ->
|
|||
|
||||
-spec is_running(ListenerId :: atom()) -> boolean() | {error, no_found}.
|
||||
is_running(ListenerId) ->
|
||||
case lists:filtermap(fun({_Zone, Id, #{running := IsRunning}}) ->
|
||||
case lists:filtermap(fun({_Type, Id, #{running := IsRunning}}) ->
|
||||
Id =:= ListenerId andalso {true, IsRunning}
|
||||
end, do_list()) of
|
||||
[IsRunning] -> IsRunning;
|
||||
|
@ -89,6 +91,28 @@ is_running(quic, _ListenerId, _Conf)->
|
|||
%% TODO: quic support
|
||||
{error, no_found}.
|
||||
|
||||
current_conns(ID, ListenOn) ->
|
||||
{Type, Name} = parse_listener_id(ID),
|
||||
current_conns(Type, Name, ListenOn).
|
||||
|
||||
current_conns(Type, Name, ListenOn) when Type == tcl; Type == ssl ->
|
||||
esockd:get_current_connections({listener_id(Type, Name), ListenOn});
|
||||
current_conns(Type, Name, _ListenOn) when Type =:= ws; Type =:= wss ->
|
||||
proplists:get_value(all_connections, ranch:info(listener_id(Type, Name)));
|
||||
current_conns(_, _, _) ->
|
||||
{error, not_support}.
|
||||
|
||||
max_conns(ID, ListenOn) ->
|
||||
{Type, Name} = parse_listener_id(ID),
|
||||
max_conns(Type, Name, ListenOn).
|
||||
|
||||
max_conns(Type, Name, ListenOn) when Type == tcl; Type == ssl ->
|
||||
esockd:get_max_connections({listener_id(Type, Name), ListenOn});
|
||||
max_conns(Type, Name, _ListenOn) when Type =:= ws; Type =:= wss ->
|
||||
proplists:get_value(max_connections, ranch:info(listener_id(Type, Name)));
|
||||
max_conns(_, _, _) ->
|
||||
{error, not_support}.
|
||||
|
||||
%% @doc Start all listeners.
|
||||
-spec(start() -> ok).
|
||||
start() ->
|
||||
|
@ -228,11 +252,15 @@ do_start_listener(quic, ListenerName, #{bind := ListenOn} = Opts) ->
|
|||
{ok, {skipped, quic_app_missing}}
|
||||
end.
|
||||
|
||||
delete_authentication(Type, ListenerName, _Conf) ->
|
||||
emqx_authentication:delete_chain(listener_id(Type, ListenerName)).
|
||||
|
||||
%% Update the listeners at runtime
|
||||
post_config_update(_Req, NewListeners, OldListeners, _AppEnvs) ->
|
||||
#{added := Added, removed := Removed, changed := Updated}
|
||||
= diff_listeners(NewListeners, OldListeners),
|
||||
perform_listener_changes(fun stop_listener/3, Removed),
|
||||
perform_listener_changes(fun delete_authentication/3, Removed),
|
||||
perform_listener_changes(fun start_listener/3, Added),
|
||||
perform_listener_changes(fun restart_listener/3, Updated).
|
||||
|
||||
|
@ -252,7 +280,7 @@ flatten_listeners(Conf0) ->
|
|||
|| {Type, Conf} <- maps:to_list(Conf0)])).
|
||||
|
||||
do_flatten_listeners(Type, Conf0) ->
|
||||
[{listener_id(Type, Name), Conf} || {Name, Conf} <- maps:to_list(Conf0)].
|
||||
[{listener_id(Type, Name), maps:remove(authentication, Conf)} || {Name, Conf} <- maps:to_list(Conf0)].
|
||||
|
||||
esockd_opts(Type, Opts0) ->
|
||||
Opts1 = maps:with([acceptors, max_connections, proxy_protocol, proxy_protocol_timeout], Opts0),
|
||||
|
|
|
@ -65,13 +65,11 @@ deep_find(_KeyPath, Data) ->
|
|||
{not_found, _KeyPath, Data}.
|
||||
|
||||
-spec deep_put(config_key_path(), map(), term()) -> map().
|
||||
deep_put([], Map, Data) when is_map(Map) ->
|
||||
Data;
|
||||
deep_put([], _Map, Data) -> %% not map, replace it
|
||||
deep_put([], _Map, Data) ->
|
||||
Data;
|
||||
deep_put([Key | KeyPath], Map, Data) ->
|
||||
SubMap = deep_put(KeyPath, maps:get(Key, Map, #{}), Data),
|
||||
Map#{Key => SubMap}.
|
||||
SubMap = maps:get(Key, Map, #{}),
|
||||
Map#{Key => deep_put(KeyPath, SubMap, Data)}.
|
||||
|
||||
-spec deep_remove(config_key_path(), map()) -> map().
|
||||
deep_remove([], Map) ->
|
||||
|
|
|
@ -22,8 +22,6 @@
|
|||
-include("logger.hrl").
|
||||
-include("types.hrl").
|
||||
-include("emqx_mqtt.hrl").
|
||||
-include("emqx.hrl").
|
||||
|
||||
|
||||
-export([ start_link/0
|
||||
, stop/0
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -13,13 +13,22 @@
|
|||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
%%
|
||||
-module(emqx_gateway_api_client).
|
||||
|
||||
-behaviour(minirest_api).
|
||||
-module(emqx_zone_schema).
|
||||
|
||||
%% minirest behaviour callbacks
|
||||
-export([api_spec/0]).
|
||||
-export([namespace/0, roots/0, fields/1]).
|
||||
|
||||
api_spec() ->
|
||||
{[], []}.
|
||||
namespace() -> zone.
|
||||
|
||||
roots() -> [].
|
||||
|
||||
%% zone schemas are clones from the same name from root level
|
||||
%% only not allowed to have default values.
|
||||
fields(Name) ->
|
||||
[{N, no_default(Sc)} || {N, Sc} <- emqx_schema:fields(Name)].
|
||||
|
||||
%% no default values for zone settings
|
||||
no_default(Sc) ->
|
||||
fun(default) -> undefined;
|
||||
(Other) -> hocon_schema:field_schema(Sc, Other)
|
||||
end.
|
|
@ -0,0 +1,238 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authentication_SUITE).
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-export([ fields/1 ]).
|
||||
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
]).
|
||||
|
||||
-define(AUTHN, emqx_authentication).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
fields(type1) ->
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, ['built-in-database']}}
|
||||
, {enable, fun enable/1}
|
||||
];
|
||||
|
||||
fields(type2) ->
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, ['mysql']}}
|
||||
, {enable, fun enable/1}
|
||||
].
|
||||
|
||||
enable(type) -> boolean();
|
||||
enable(default) -> true;
|
||||
enable(_) -> undefined.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Callbacks
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[ hoconsc:ref(?MODULE, type1)
|
||||
, hoconsc:ref(?MODULE, type2)
|
||||
].
|
||||
|
||||
create(_Config) ->
|
||||
{ok, #{mark => 1}}.
|
||||
|
||||
update(_Config, _State) ->
|
||||
{ok, #{mark => 2}}.
|
||||
|
||||
authenticate(#{username := <<"good">>}, _State) ->
|
||||
{ok, #{is_superuser => true}};
|
||||
authenticate(#{username := _}, _State) ->
|
||||
{error, bad_username_or_password}.
|
||||
|
||||
destroy(_State) ->
|
||||
ok.
|
||||
|
||||
all() ->
|
||||
emqx_ct:all(?MODULE).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
application:set_env(ekka, strict_mode, true),
|
||||
emqx_ct_helpers:start_apps([]),
|
||||
Config.
|
||||
|
||||
end_per_suite(_) ->
|
||||
emqx_ct_helpers:stop_apps([]),
|
||||
ok.
|
||||
|
||||
t_chain(_) ->
|
||||
% CRUD of authentication chain
|
||||
ChainName = 'test',
|
||||
?assertMatch({ok, []}, ?AUTHN:list_chains()),
|
||||
?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:create_chain(ChainName)),
|
||||
?assertEqual({error, {already_exists, {chain, ChainName}}}, ?AUTHN:create_chain(ChainName)),
|
||||
?assertMatch({ok, #{name := ChainName, authenticators := []}}, ?AUTHN:lookup_chain(ChainName)),
|
||||
?assertMatch({ok, [#{name := ChainName}]}, ?AUTHN:list_chains()),
|
||||
?assertEqual(ok, ?AUTHN:delete_chain(ChainName)),
|
||||
?assertMatch({error, {not_found, {chain, ChainName}}}, ?AUTHN:lookup_chain(ChainName)),
|
||||
ok.
|
||||
|
||||
t_authenticator(_) ->
|
||||
ChainName = 'test',
|
||||
AuthenticatorConfig1 = #{mechanism => 'password-based',
|
||||
backend => 'built-in-database',
|
||||
enable => true},
|
||||
|
||||
% Create an authenticator when the authentication chain does not exist
|
||||
?assertEqual({error, {not_found, {chain, ChainName}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
?AUTHN:create_chain(ChainName),
|
||||
% Create an authenticator when the provider does not exist
|
||||
?assertEqual({error, no_available_provider}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
|
||||
AuthNType1 = {'password-based', 'built-in-database'},
|
||||
?AUTHN:add_provider(AuthNType1, ?MODULE),
|
||||
ID1 = <<"password-based:built-in-database">>,
|
||||
|
||||
% CRUD of authencaticator
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
?assertMatch({ok, #{id := ID1}}, ?AUTHN:lookup_authenticator(ChainName, ID1)),
|
||||
?assertMatch({ok, [#{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
?assertEqual({error, {already_exists, {authenticator, ID1}}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
|
||||
?assertEqual(ok, ?AUTHN:delete_authenticator(ChainName, ID1)),
|
||||
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:update_authenticator(ChainName, ID1, AuthenticatorConfig1)),
|
||||
?assertMatch({ok, []}, ?AUTHN:list_authenticators(ChainName)),
|
||||
|
||||
% Multiple authenticators exist at the same time
|
||||
AuthNType2 = {'password-based', mysql},
|
||||
?AUTHN:add_provider(AuthNType2, ?MODULE),
|
||||
ID2 = <<"password-based:mysql">>,
|
||||
AuthenticatorConfig2 = #{mechanism => 'password-based',
|
||||
backend => mysql,
|
||||
enable => true},
|
||||
?assertMatch({ok, #{id := ID1}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig1)),
|
||||
?assertMatch({ok, #{id := ID2}}, ?AUTHN:create_authenticator(ChainName, AuthenticatorConfig2)),
|
||||
|
||||
% Move authenticator
|
||||
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, top)),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, bottom)),
|
||||
?assertMatch({ok, [#{id := ID1}, #{id := ID2}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
?assertEqual(ok, ?AUTHN:move_authenticator(ChainName, ID2, {before, ID1})),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ChainName)),
|
||||
|
||||
?AUTHN:delete_chain(ChainName),
|
||||
?AUTHN:remove_provider(AuthNType1),
|
||||
?AUTHN:remove_provider(AuthNType2),
|
||||
ok.
|
||||
|
||||
t_authenticate(_) ->
|
||||
ListenerID = 'tcp:default',
|
||||
ClientInfo = #{zone => default,
|
||||
listener => ListenerID,
|
||||
protocol => mqtt,
|
||||
username => <<"good">>,
|
||||
password => <<"any">>},
|
||||
?assertEqual({ok, #{is_superuser => false}}, emqx_access_control:authenticate(ClientInfo)),
|
||||
|
||||
AuthNType = {'password-based', 'built-in-database'},
|
||||
?AUTHN:add_provider(AuthNType, ?MODULE),
|
||||
|
||||
AuthenticatorConfig = #{mechanism => 'password-based',
|
||||
backend => 'built-in-database',
|
||||
enable => true},
|
||||
?AUTHN:create_chain(ListenerID),
|
||||
?assertMatch({ok, _}, ?AUTHN:create_authenticator(ListenerID, AuthenticatorConfig)),
|
||||
?assertEqual({ok, #{is_superuser => true}}, emqx_access_control:authenticate(ClientInfo)),
|
||||
?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo#{username => <<"bad">>})),
|
||||
|
||||
?AUTHN:delete_chain(ListenerID),
|
||||
?AUTHN:remove_provider(AuthNType),
|
||||
ok.
|
||||
|
||||
t_update_config(_) ->
|
||||
emqx_config_handler:add_handler([authentication], emqx_authentication),
|
||||
|
||||
AuthNType1 = {'password-based', 'built-in-database'},
|
||||
AuthNType2 = {'password-based', mysql},
|
||||
?AUTHN:add_provider(AuthNType1, ?MODULE),
|
||||
?AUTHN:add_provider(AuthNType2, ?MODULE),
|
||||
|
||||
Global = 'mqtt:global',
|
||||
AuthenticatorConfig1 = #{mechanism => 'password-based',
|
||||
backend => 'built-in-database',
|
||||
enable => true},
|
||||
AuthenticatorConfig2 = #{mechanism => 'password-based',
|
||||
backend => mysql,
|
||||
enable => true},
|
||||
ID1 = <<"password-based:built-in-database">>,
|
||||
ID2 = <<"password-based:mysql">>,
|
||||
|
||||
?assertMatch({ok, []}, ?AUTHN:list_chains()),
|
||||
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig1})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
|
||||
|
||||
?assertMatch({ok, _}, update_config([authentication], {create_authenticator, Global, AuthenticatorConfig2})),
|
||||
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(Global, ID2)),
|
||||
|
||||
?assertMatch({ok, _}, update_config([authentication], {update_authenticator, Global, ID1, #{}})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
|
||||
|
||||
?assertMatch({ok, _}, update_config([authentication], {move_authenticator, Global, ID2, top})),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(Global)),
|
||||
|
||||
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID1})),
|
||||
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
|
||||
|
||||
ListenerID = 'tcp:default',
|
||||
ConfKeyPath = [listeners, tcp, default, authentication],
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig1})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig2})),
|
||||
?assertMatch({ok, #{id := ID2, state := #{mark := 1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID2)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {update_authenticator, ListenerID, ID1, #{}})),
|
||||
?assertMatch({ok, #{id := ID1, state := #{mark := 2}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {move_authenticator, ListenerID, ID2, top})),
|
||||
?assertMatch({ok, [#{id := ID2}, #{id := ID1}]}, ?AUTHN:list_authenticators(ListenerID)),
|
||||
|
||||
?assertMatch({ok, _}, update_config(ConfKeyPath, {delete_authenticator, ListenerID, ID1})),
|
||||
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(ListenerID, ID1)),
|
||||
|
||||
?AUTHN:delete_chain(Global),
|
||||
?AUTHN:remove_provider(AuthNType1),
|
||||
?AUTHN:remove_provider(AuthNType2),
|
||||
ok.
|
||||
|
||||
update_config(Path, ConfigRequest) ->
|
||||
emqx:update_config(Path, ConfigRequest, #{rawconf_with_defaults => true}).
|
|
@ -144,7 +144,7 @@ init_per_suite(Config) ->
|
|||
%% Access Control Meck
|
||||
ok = meck:new(emqx_access_control, [passthrough, no_history, no_link]),
|
||||
ok = meck:expect(emqx_access_control, authenticate,
|
||||
fun(_) -> {ok, #{superuser => false}} end),
|
||||
fun(_) -> {ok, #{is_superuser => false}} end),
|
||||
ok = meck:expect(emqx_access_control, authorize, fun(_, _, _) -> allow end),
|
||||
%% Broker Meck
|
||||
ok = meck:new(emqx_broker, [passthrough, no_history, no_link]),
|
||||
|
|
|
@ -114,8 +114,8 @@ t_cm(_) ->
|
|||
emqx_config:put_zone_conf(default, [mqtt, idle_timeout], 15000).
|
||||
|
||||
t_cm_registry(_) ->
|
||||
Info = supervisor:which_children(emqx_cm_sup),
|
||||
{_, Pid, _, _} = lists:keyfind(registry, 1, Info),
|
||||
Children = supervisor:which_children(emqx_cm_sup),
|
||||
{_, Pid, _, _} = lists:keyfind(emqx_cm_registry, 1, Children),
|
||||
ignored = gen_server:call(Pid, <<"Unexpected call">>),
|
||||
gen_server:cast(Pid, <<"Unexpected cast">>),
|
||||
Pid ! <<"Unexpected info">>.
|
||||
|
|
|
@ -55,8 +55,8 @@ t_detect_check(_) ->
|
|||
true = emqx_banned:check(ClientInfo),
|
||||
timer:sleep(3000),
|
||||
false = emqx_banned:check(ClientInfo),
|
||||
Childrens = supervisor:which_children(emqx_cm_sup),
|
||||
{flapping, Pid, _, _} = lists:keyfind(flapping, 1, Childrens),
|
||||
Children = supervisor:which_children(emqx_cm_sup),
|
||||
{emqx_flapping, Pid, _, _} = lists:keyfind(emqx_flapping, 1, Children),
|
||||
gen_server:call(Pid, unexpected_msg),
|
||||
gen_server:cast(Pid, unexpected_msg),
|
||||
Pid ! test,
|
||||
|
@ -72,4 +72,4 @@ t_expired_detecting(_) ->
|
|||
(_) -> false end, ets:tab2list(emqx_flapping))),
|
||||
timer:sleep(200),
|
||||
?assertEqual(true, lists:all(fun({flapping, <<"client008">>, _, _, _}) -> false;
|
||||
(_) -> true end, ets:tab2list(emqx_flapping))).
|
||||
(_) -> true end, ets:tab2list(emqx_flapping))).
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
user_id,password_hash,salt,superuser
|
||||
user_id,password_hash,salt,is_superuser
|
||||
myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235,true
|
||||
myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8,ad773b5be9dd0613fe6c2f4d8c403139,false
|
||||
|
|
|
|
@ -3,12 +3,12 @@
|
|||
"user_id":"myuser1",
|
||||
"password_hash":"c5e46903df45e5dc096dc74657610dbee8deaacae656df88a1788f1847390242",
|
||||
"salt": "e378187547bf2d6f0545a3f441aa4d8a",
|
||||
"superuser": true
|
||||
"is_superuser": true
|
||||
},
|
||||
{
|
||||
"user_id":"myuser2",
|
||||
"password_hash":"f4d17f300b11e522fd33f497c11b126ef1ea5149c74d2220f9a16dc876d4567b",
|
||||
"salt": "6d3f9bd5b54d94b98adbcfe10b6d181f",
|
||||
"superuser": false
|
||||
"is_superuser": false
|
||||
}
|
||||
]
|
||||
|
|
|
@ -1,37 +1,6 @@
|
|||
authentication {
|
||||
enable = false
|
||||
authenticators = [
|
||||
# {
|
||||
# name: "authenticator1"
|
||||
# mechanism: password-based
|
||||
# server_type: built-in-database
|
||||
# user_id_type: clientid
|
||||
# },
|
||||
# {
|
||||
# name: "authenticator2"
|
||||
# mechanism: password-based
|
||||
# server_type: mongodb
|
||||
# server: "127.0.0.1:27017"
|
||||
# database: mqtt
|
||||
# collection: users
|
||||
# selector: {
|
||||
# username: "${mqtt-username}"
|
||||
# }
|
||||
# password_hash_field: password_hash
|
||||
# salt_field: salt
|
||||
# password_hash_algorithm: sha256
|
||||
# salt_position: prefix
|
||||
# },
|
||||
# {
|
||||
# name: "authenticator 3"
|
||||
# mechanism: password-based
|
||||
# server_type: redis
|
||||
# server: "127.0.0.1:6379"
|
||||
# password: "public"
|
||||
# database: 0
|
||||
# query: "HMGET ${mqtt-username} password_hash salt"
|
||||
# password_hash_algorithm: sha256
|
||||
# salt_position: prefix
|
||||
# }
|
||||
]
|
||||
}
|
||||
# authentication: {
|
||||
# mechanism: password-based
|
||||
# backend: built-in-database
|
||||
# user_id_type: clientid
|
||||
# }
|
||||
|
||||
|
|
|
@ -15,24 +15,11 @@
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(APP, emqx_authn).
|
||||
-define(CHAIN, <<"mqtt">>).
|
||||
|
||||
-define(VER_1, <<"1">>).
|
||||
-define(VER_2, <<"2">>).
|
||||
-define(AUTHN, emqx_authentication).
|
||||
|
||||
-define(GLOBAL, 'mqtt:global').
|
||||
|
||||
-define(RE_PLACEHOLDER, "\\$\\{[a-z0-9\\-]+\\}").
|
||||
|
||||
-record(authenticator,
|
||||
{ id :: binary()
|
||||
, name :: binary()
|
||||
, provider :: module()
|
||||
, state :: map()
|
||||
}).
|
||||
|
||||
-record(chain,
|
||||
{ id :: binary()
|
||||
, authenticators :: [{binary(), binary(), #authenticator{}}]
|
||||
, created_at :: integer()
|
||||
}).
|
||||
|
||||
-define(AUTH_SHARD, emqx_authn_shard).
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
{deps, [
|
||||
{jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.1"}}}
|
||||
]}.
|
||||
{deps, []}.
|
||||
|
||||
{edoc_opts, [{preprocess, true}]}.
|
||||
{erl_opts, [warn_unused_vars,
|
||||
|
|
|
@ -15,640 +15,3 @@
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_authn).
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
-behaviour(emqx_config_handler).
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
|
||||
-export([ pre_config_update/2
|
||||
, post_config_update/4
|
||||
, update_config/2
|
||||
]).
|
||||
|
||||
-export([ enable/0
|
||||
, disable/0
|
||||
, is_enabled/0
|
||||
]).
|
||||
|
||||
-export([authenticate/2]).
|
||||
|
||||
-export([ start_link/0
|
||||
, stop/0
|
||||
]).
|
||||
|
||||
-export([ create_chain/1
|
||||
, delete_chain/1
|
||||
, lookup_chain/1
|
||||
, list_chains/0
|
||||
, create_authenticator/2
|
||||
, delete_authenticator/2
|
||||
, update_authenticator/3
|
||||
, update_or_create_authenticator/3
|
||||
, lookup_authenticator/2
|
||||
, list_authenticators/1
|
||||
, move_authenticator/3
|
||||
]).
|
||||
|
||||
-export([ import_users/3
|
||||
, add_user/3
|
||||
, delete_user/3
|
||||
, update_user/4
|
||||
, lookup_user/3
|
||||
, list_users/2
|
||||
]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([ init/1
|
||||
, handle_call/3
|
||||
, handle_cast/2
|
||||
, handle_info/2
|
||||
, terminate/2
|
||||
, code_change/3
|
||||
]).
|
||||
|
||||
-define(CHAIN_TAB, emqx_authn_chain).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
pre_config_update({enable, Enable}, _OldConfig) ->
|
||||
{ok, Enable};
|
||||
pre_config_update({create_authenticator, Config}, OldConfig) ->
|
||||
{ok, OldConfig ++ [Config]};
|
||||
pre_config_update({delete_authenticator, ID}, OldConfig) ->
|
||||
case lookup_authenticator(?CHAIN, ID) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
{ok, #{name := Name}} ->
|
||||
NewConfig = lists:filter(fun(#{<<"name">> := N}) ->
|
||||
N =/= Name
|
||||
end, OldConfig),
|
||||
{ok, NewConfig}
|
||||
end;
|
||||
pre_config_update({update_authenticator, ID, Config}, OldConfig) ->
|
||||
case lookup_authenticator(?CHAIN, ID) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
{ok, #{name := Name}} ->
|
||||
NewConfig = lists:map(fun(#{<<"name">> := N} = C) ->
|
||||
case N =:= Name of
|
||||
true -> Config;
|
||||
false -> C
|
||||
end
|
||||
end, OldConfig),
|
||||
{ok, NewConfig}
|
||||
end;
|
||||
pre_config_update({update_or_create_authenticator, ID, Config}, OldConfig) ->
|
||||
case lookup_authenticator(?CHAIN, ID) of
|
||||
{error, _Reason} -> OldConfig ++ [Config];
|
||||
{ok, #{name := Name}} ->
|
||||
NewConfig = lists:map(fun(#{<<"name">> := N} = C) ->
|
||||
case N =:= Name of
|
||||
true -> Config;
|
||||
false -> C
|
||||
end
|
||||
end, OldConfig),
|
||||
{ok, NewConfig}
|
||||
end;
|
||||
pre_config_update({move_authenticator, ID, Position}, OldConfig) ->
|
||||
case lookup_authenticator(?CHAIN, ID) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
{ok, #{name := Name}} ->
|
||||
{ok, Found, Part1, Part2} = split_by_name(Name, OldConfig),
|
||||
case Position of
|
||||
<<"top">> ->
|
||||
{ok, [Found | Part1] ++ Part2};
|
||||
<<"bottom">> ->
|
||||
{ok, Part1 ++ Part2 ++ [Found]};
|
||||
Before ->
|
||||
case binary:split(Before, <<":">>, [global]) of
|
||||
[<<"before">>, ID0] ->
|
||||
case lookup_authenticator(?CHAIN, ID0) of
|
||||
{error, Reason} -> {error, Reason};
|
||||
{ok, #{name := Name1}} ->
|
||||
{ok, NFound, NPart1, NPart2} = split_by_name(Name1, Part1 ++ Part2),
|
||||
{ok, NPart1 ++ [Found, NFound | NPart2]}
|
||||
end;
|
||||
_ ->
|
||||
{error, {invalid_parameter, position}}
|
||||
end
|
||||
end
|
||||
end.
|
||||
|
||||
post_config_update({enable, true}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
emqx_authn:enable();
|
||||
post_config_update({enable, false}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
emqx_authn:disable();
|
||||
post_config_update({create_authenticator, #{<<"name">> := Name}}, NewConfig, _OldConfig, _AppEnvs) ->
|
||||
case lists:filter(
|
||||
fun(#{name := N}) ->
|
||||
N =:= Name
|
||||
end, NewConfig) of
|
||||
[Config] ->
|
||||
create_authenticator(?CHAIN, Config);
|
||||
[_Config | _] ->
|
||||
{error, name_has_be_used}
|
||||
end;
|
||||
post_config_update({delete_authenticator, ID}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
case delete_authenticator(?CHAIN, ID) of
|
||||
ok -> ok;
|
||||
{error, Reason} -> throw(Reason)
|
||||
end;
|
||||
post_config_update({update_authenticator, ID, #{<<"name">> := Name}}, NewConfig, _OldConfig, _AppEnvs) ->
|
||||
case lists:filter(
|
||||
fun(#{name := N}) ->
|
||||
N =:= Name
|
||||
end, NewConfig) of
|
||||
[Config] ->
|
||||
update_authenticator(?CHAIN, ID, Config);
|
||||
[_Config | _] ->
|
||||
{error, name_has_be_used}
|
||||
end;
|
||||
post_config_update({update_or_create_authenticator, ID, #{<<"name">> := Name}}, NewConfig, _OldConfig, _AppEnvs) ->
|
||||
case lists:filter(
|
||||
fun(#{name := N}) ->
|
||||
N =:= Name
|
||||
end, NewConfig) of
|
||||
[Config] ->
|
||||
update_or_create_authenticator(?CHAIN, ID, Config);
|
||||
[_Config | _] ->
|
||||
{error, name_has_be_used}
|
||||
end;
|
||||
post_config_update({move_authenticator, ID, Position}, _NewConfig, _OldConfig, _AppEnvs) ->
|
||||
NPosition = case Position of
|
||||
<<"top">> -> top;
|
||||
<<"bottom">> -> bottom;
|
||||
Before ->
|
||||
case binary:split(Before, <<":">>, [global]) of
|
||||
[<<"before">>, ID0] ->
|
||||
{before, ID0};
|
||||
_ ->
|
||||
{error, {invalid_parameter, position}}
|
||||
end
|
||||
end,
|
||||
move_authenticator(?CHAIN, ID, NPosition).
|
||||
|
||||
update_config(Path, ConfigRequest) ->
|
||||
emqx:update_config(Path, ConfigRequest, #{rawconf_with_defaults => true}).
|
||||
|
||||
enable() ->
|
||||
case emqx:hook('client.authenticate', {?MODULE, authenticate, []}) of
|
||||
ok -> ok;
|
||||
{error, already_exists} -> ok
|
||||
end.
|
||||
|
||||
disable() ->
|
||||
emqx:unhook('client.authenticate', {?MODULE, authenticate, []}),
|
||||
ok.
|
||||
|
||||
is_enabled() ->
|
||||
Callbacks = emqx_hooks:lookup('client.authenticate'),
|
||||
lists:any(fun({callback, {?MODULE, authenticate, []}, _, _}) ->
|
||||
true;
|
||||
(_) ->
|
||||
false
|
||||
end, Callbacks).
|
||||
|
||||
authenticate(Credential, _AuthResult) ->
|
||||
case ets:lookup(?CHAIN_TAB, ?CHAIN) of
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
do_authenticate(Authenticators, Credential);
|
||||
[] ->
|
||||
{stop, {error, not_authorized}}
|
||||
end.
|
||||
|
||||
do_authenticate([], _) ->
|
||||
{stop, {error, not_authorized}};
|
||||
do_authenticate([{_, _, #authenticator{provider = Provider, state = State}} | More], Credential) ->
|
||||
case Provider:authenticate(Credential, State) of
|
||||
ignore ->
|
||||
do_authenticate(More, Credential);
|
||||
Result ->
|
||||
%% {ok, Extra}
|
||||
%% {ok, Extra, AuthData}
|
||||
%% {ok, MetaData}
|
||||
%% {continue, AuthCache}
|
||||
%% {continue, AuthData, AuthCache}
|
||||
%% {error, Reason}
|
||||
{stop, Result}
|
||||
end.
|
||||
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
stop() ->
|
||||
gen_server:stop(?MODULE).
|
||||
|
||||
create_chain(#{id := ID}) ->
|
||||
gen_server:call(?MODULE, {create_chain, ID}).
|
||||
|
||||
delete_chain(ID) ->
|
||||
gen_server:call(?MODULE, {delete_chain, ID}).
|
||||
|
||||
lookup_chain(ID) ->
|
||||
gen_server:call(?MODULE, {lookup_chain, ID}).
|
||||
|
||||
list_chains() ->
|
||||
Chains = ets:tab2list(?CHAIN_TAB),
|
||||
{ok, [serialize_chain(Chain) || Chain <- Chains]}.
|
||||
|
||||
create_authenticator(ChainID, Config) ->
|
||||
gen_server:call(?MODULE, {create_authenticator, ChainID, Config}).
|
||||
|
||||
delete_authenticator(ChainID, AuthenticatorID) ->
|
||||
gen_server:call(?MODULE, {delete_authenticator, ChainID, AuthenticatorID}).
|
||||
|
||||
update_authenticator(ChainID, AuthenticatorID, Config) ->
|
||||
gen_server:call(?MODULE, {update_authenticator, ChainID, AuthenticatorID, Config}).
|
||||
|
||||
update_or_create_authenticator(ChainID, AuthenticatorID, Config) ->
|
||||
gen_server:call(?MODULE, {update_or_create_authenticator, ChainID, AuthenticatorID, Config}).
|
||||
|
||||
lookup_authenticator(ChainID, AuthenticatorID) ->
|
||||
case ets:lookup(?CHAIN_TAB, ChainID) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainID}}};
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
case lists:keyfind(AuthenticatorID, 1, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
{_, _, Authenticator} ->
|
||||
{ok, serialize_authenticator(Authenticator)}
|
||||
end
|
||||
end.
|
||||
|
||||
list_authenticators(ChainID) ->
|
||||
case ets:lookup(?CHAIN_TAB, ChainID) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainID}}};
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
{ok, serialize_authenticators(Authenticators)}
|
||||
end.
|
||||
|
||||
move_authenticator(ChainID, AuthenticatorID, Position) ->
|
||||
gen_server:call(?MODULE, {move_authenticator, ChainID, AuthenticatorID, Position}).
|
||||
|
||||
import_users(ChainID, AuthenticatorID, Filename) ->
|
||||
gen_server:call(?MODULE, {import_users, ChainID, AuthenticatorID, Filename}).
|
||||
|
||||
add_user(ChainID, AuthenticatorID, UserInfo) ->
|
||||
gen_server:call(?MODULE, {add_user, ChainID, AuthenticatorID, UserInfo}).
|
||||
|
||||
delete_user(ChainID, AuthenticatorID, UserID) ->
|
||||
gen_server:call(?MODULE, {delete_user, ChainID, AuthenticatorID, UserID}).
|
||||
|
||||
update_user(ChainID, AuthenticatorID, UserID, NewUserInfo) ->
|
||||
gen_server:call(?MODULE, {update_user, ChainID, AuthenticatorID, UserID, NewUserInfo}).
|
||||
|
||||
lookup_user(ChainID, AuthenticatorID, UserID) ->
|
||||
gen_server:call(?MODULE, {lookup_user, ChainID, AuthenticatorID, UserID}).
|
||||
|
||||
%% TODO: Support pagination
|
||||
list_users(ChainID, AuthenticatorID) ->
|
||||
gen_server:call(?MODULE, {list_users, ChainID, AuthenticatorID}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% gen_server callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init(_Opts) ->
|
||||
_ = ets:new(?CHAIN_TAB, [ named_table, set, public
|
||||
, {keypos, #chain.id}
|
||||
, {read_concurrency, true}]),
|
||||
{ok, #{}}.
|
||||
|
||||
handle_call({create_chain, ID}, _From, State) ->
|
||||
case ets:member(?CHAIN_TAB, ID) of
|
||||
true ->
|
||||
reply({error, {already_exists, {chain, ID}}}, State);
|
||||
false ->
|
||||
Chain = #chain{id = ID,
|
||||
authenticators = [],
|
||||
created_at = erlang:system_time(millisecond)},
|
||||
true = ets:insert(?CHAIN_TAB, Chain),
|
||||
reply({ok, serialize_chain(Chain)}, State)
|
||||
end;
|
||||
|
||||
handle_call({delete_chain, ID}, _From, State) ->
|
||||
case ets:lookup(?CHAIN_TAB, ID) of
|
||||
[] ->
|
||||
reply({error, {not_found, {chain, ID}}}, State);
|
||||
[#chain{authenticators = Authenticators}] ->
|
||||
_ = [do_delete_authenticator(Authenticator) || {_, _, Authenticator} <- Authenticators],
|
||||
true = ets:delete(?CHAIN_TAB, ID),
|
||||
reply(ok, State)
|
||||
end;
|
||||
|
||||
handle_call({lookup_chain, ID}, _From, State) ->
|
||||
case ets:lookup(?CHAIN_TAB, ID) of
|
||||
[] ->
|
||||
reply({error, {not_found, {chain, ID}}}, State);
|
||||
[Chain] ->
|
||||
reply({ok, serialize_chain(Chain)}, State)
|
||||
end;
|
||||
|
||||
handle_call({create_authenticator, ChainID, #{name := Name} = Config}, _From, State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
case lists:keymember(Name, 2, Authenticators) of
|
||||
true ->
|
||||
{error, name_has_be_used};
|
||||
false ->
|
||||
AlreadyExist = fun(ID) ->
|
||||
lists:keymember(ID, 1, Authenticators)
|
||||
end,
|
||||
AuthenticatorID = gen_id(AlreadyExist),
|
||||
case do_create_authenticator(ChainID, AuthenticatorID, Config) of
|
||||
{ok, Authenticator} ->
|
||||
NAuthenticators = Authenticators ++ [{AuthenticatorID, Name, Authenticator}],
|
||||
true = ets:insert(?CHAIN_TAB, Chain#chain{authenticators = NAuthenticators}),
|
||||
{ok, serialize_authenticator(Authenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainID, UpdateFun),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({delete_authenticator, ChainID, AuthenticatorID}, _From, State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
case lists:keytake(AuthenticatorID, 1, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
{value, {_, _, Authenticator}, NAuthenticators} ->
|
||||
_ = do_delete_authenticator(Authenticator),
|
||||
true = ets:insert(?CHAIN_TAB, Chain#chain{authenticators = NAuthenticators}),
|
||||
ok
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainID, UpdateFun),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({update_authenticator, ChainID, AuthenticatorID, Config}, _From, State) ->
|
||||
Reply = update_or_create_authenticator(ChainID, AuthenticatorID, Config, false),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({update_or_create_authenticator, ChainID, AuthenticatorID, Config}, _From, State) ->
|
||||
Reply = update_or_create_authenticator(ChainID, AuthenticatorID, Config, true),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({move_authenticator, ChainID, AuthenticatorID, Position}, _From, State) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
case do_move_authenticator(AuthenticatorID, Authenticators, Position) of
|
||||
{ok, NAuthenticators} ->
|
||||
true = ets:insert(?CHAIN_TAB, Chain#chain{authenticators = NAuthenticators}),
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end,
|
||||
Reply = update_chain(ChainID, UpdateFun),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({import_users, ChainID, AuthenticatorID, Filename}, _From, State) ->
|
||||
Reply = call_authenticator(ChainID, AuthenticatorID, import_users, [Filename]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({add_user, ChainID, AuthenticatorID, UserInfo}, _From, State) ->
|
||||
Reply = call_authenticator(ChainID, AuthenticatorID, add_user, [UserInfo]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({delete_user, ChainID, AuthenticatorID, UserID}, _From, State) ->
|
||||
Reply = call_authenticator(ChainID, AuthenticatorID, delete_user, [UserID]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({update_user, ChainID, AuthenticatorID, UserID, NewUserInfo}, _From, State) ->
|
||||
Reply = call_authenticator(ChainID, AuthenticatorID, update_user, [UserID, NewUserInfo]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({lookup_user, ChainID, AuthenticatorID, UserID}, _From, State) ->
|
||||
Reply = call_authenticator(ChainID, AuthenticatorID, lookup_user, [UserID]),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call({list_users, ChainID, AuthenticatorID}, _From, State) ->
|
||||
Reply = call_authenticator(ChainID, AuthenticatorID, list_users, []),
|
||||
reply(Reply, State);
|
||||
|
||||
handle_call(Req, _From, State) ->
|
||||
?LOG(error, "Unexpected call: ~p", [Req]),
|
||||
{reply, ignored, State}.
|
||||
|
||||
handle_cast(Req, State) ->
|
||||
?LOG(error, "Unexpected case: ~p", [Req]),
|
||||
{noreply, State}.
|
||||
|
||||
handle_info(Info, State) ->
|
||||
?LOG(error, "Unexpected info: ~p", [Info]),
|
||||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, _State) ->
|
||||
ok.
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
reply(Reply, State) ->
|
||||
{reply, Reply, State}.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'built-in-database'}) ->
|
||||
emqx_authn_mnesia;
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'mysql'}) ->
|
||||
emqx_authn_mysql;
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'pgsql'}) ->
|
||||
emqx_authn_pgsql;
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'mongodb'}) ->
|
||||
emqx_authn_mongodb;
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'redis'}) ->
|
||||
emqx_authn_redis;
|
||||
authenticator_provider(#{mechanism := 'password-based', server_type := 'http-server'}) ->
|
||||
emqx_authn_http;
|
||||
authenticator_provider(#{mechanism := jwt}) ->
|
||||
emqx_authn_jwt;
|
||||
authenticator_provider(#{mechanism := scram, server_type := 'built-in-database'}) ->
|
||||
emqx_enhanced_authn_scram_mnesia.
|
||||
|
||||
gen_id(AlreadyExist) ->
|
||||
ID = list_to_binary(emqx_rule_id:gen()),
|
||||
case AlreadyExist(ID) of
|
||||
true -> gen_id(AlreadyExist);
|
||||
false -> ID
|
||||
end.
|
||||
|
||||
switch_version(State = #{version := ?VER_1}) ->
|
||||
State#{version := ?VER_2};
|
||||
switch_version(State = #{version := ?VER_2}) ->
|
||||
State#{version := ?VER_1};
|
||||
switch_version(State) ->
|
||||
State#{version => ?VER_1}.
|
||||
|
||||
split_by_name(Name, Config) ->
|
||||
{Part1, Part2, true} = lists:foldl(
|
||||
fun(#{<<"name">> := N} = C, {P1, P2, F0}) ->
|
||||
F = case N =:= Name of
|
||||
true -> true;
|
||||
false -> F0
|
||||
end,
|
||||
case F of
|
||||
false -> {[C | P1], P2, F};
|
||||
true -> {P1, [C | P2], F}
|
||||
end
|
||||
end, {[], [], false}, Config),
|
||||
[Found | NPart2] = lists:reverse(Part2),
|
||||
{ok, Found, lists:reverse(Part1), NPart2}.
|
||||
|
||||
do_create_authenticator(ChainID, AuthenticatorID, #{name := Name} = Config) ->
|
||||
Provider = authenticator_provider(Config),
|
||||
Unique = <<ChainID/binary, "/", AuthenticatorID/binary, ":", ?VER_1/binary>>,
|
||||
case Provider:create(Config#{'_unique' => Unique}) of
|
||||
{ok, State} ->
|
||||
Authenticator = #authenticator{id = AuthenticatorID,
|
||||
name = Name,
|
||||
provider = Provider,
|
||||
state = switch_version(State)},
|
||||
{ok, Authenticator};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
do_delete_authenticator(#authenticator{provider = Provider, state = State}) ->
|
||||
_ = Provider:destroy(State),
|
||||
ok.
|
||||
|
||||
update_or_create_authenticator(ChainID, AuthenticatorID, #{name := NewName} = Config, CreateWhenNotFound) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators} = Chain) ->
|
||||
case lists:keytake(AuthenticatorID, 1, Authenticators) of
|
||||
false ->
|
||||
case CreateWhenNotFound of
|
||||
true ->
|
||||
case lists:keymember(NewName, 2, Authenticators) of
|
||||
true ->
|
||||
{error, name_has_be_used};
|
||||
false ->
|
||||
case do_create_authenticator(ChainID, AuthenticatorID, Config) of
|
||||
{ok, Authenticator} ->
|
||||
NAuthenticators = Authenticators ++ [{AuthenticatorID, NewName, Authenticator}],
|
||||
true = ets:insert(?CHAIN_TAB, Chain#chain{authenticators = NAuthenticators}),
|
||||
{ok, serialize_authenticator(Authenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end;
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}}
|
||||
end;
|
||||
{value,
|
||||
{_, _, #authenticator{provider = Provider,
|
||||
state = #{version := Version} = State} = Authenticator},
|
||||
Others} ->
|
||||
case lists:keymember(NewName, 2, Others) of
|
||||
true ->
|
||||
{error, name_has_be_used};
|
||||
false ->
|
||||
case (NewProvider = authenticator_provider(Config)) =:= Provider of
|
||||
true ->
|
||||
Unique = <<ChainID/binary, "/", AuthenticatorID/binary, ":", Version/binary>>,
|
||||
case Provider:update(Config#{'_unique' => Unique}, State) of
|
||||
{ok, NewState} ->
|
||||
NewAuthenticator = Authenticator#authenticator{name = NewName,
|
||||
state = switch_version(NewState)},
|
||||
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
|
||||
true = ets:insert(?CHAIN_TAB, Chain#chain{authenticators = NewAuthenticators}),
|
||||
{ok, serialize_authenticator(NewAuthenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
false ->
|
||||
Unique = <<ChainID/binary, "/", AuthenticatorID/binary, ":", Version/binary>>,
|
||||
case NewProvider:create(Config#{'_unique' => Unique}) of
|
||||
{ok, NewState} ->
|
||||
NewAuthenticator = Authenticator#authenticator{name = NewName,
|
||||
provider = NewProvider,
|
||||
state = switch_version(NewState)},
|
||||
NewAuthenticators = replace_authenticator(AuthenticatorID, NewAuthenticator, Authenticators),
|
||||
true = ets:insert(?CHAIN_TAB, Chain#chain{authenticators = NewAuthenticators}),
|
||||
_ = Provider:destroy(State),
|
||||
{ok, serialize_authenticator(NewAuthenticator)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end,
|
||||
update_chain(ChainID, UpdateFun).
|
||||
|
||||
replace_authenticator(ID, #authenticator{name = Name} = Authenticator, Authenticators) ->
|
||||
lists:keyreplace(ID, 1, Authenticators, {ID, Name, Authenticator}).
|
||||
|
||||
do_move_authenticator(AuthenticatorID, Authenticators, Position) when is_binary(AuthenticatorID) ->
|
||||
case lists:keytake(AuthenticatorID, 1, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
{value, Authenticator, NAuthenticators} ->
|
||||
do_move_authenticator(Authenticator, NAuthenticators, Position)
|
||||
end;
|
||||
|
||||
do_move_authenticator(Authenticator, Authenticators, top) ->
|
||||
{ok, [Authenticator | Authenticators]};
|
||||
do_move_authenticator(Authenticator, Authenticators, bottom) ->
|
||||
{ok, Authenticators ++ [Authenticator]};
|
||||
do_move_authenticator(Authenticator, Authenticators, {before, ID}) ->
|
||||
insert(Authenticator, Authenticators, ID, []).
|
||||
|
||||
insert(_, [], ID, _) ->
|
||||
{error, {not_found, {authenticator, ID}}};
|
||||
insert(Authenticator, [{ID, _, _} | _] = Authenticators, ID, Acc) ->
|
||||
{ok, lists:reverse(Acc) ++ [Authenticator | Authenticators]};
|
||||
insert(Authenticator, [{_, _, _} = Authenticator0 | More], ID, Acc) ->
|
||||
insert(Authenticator, More, ID, [Authenticator0 | Acc]).
|
||||
|
||||
update_chain(ChainID, UpdateFun) ->
|
||||
case ets:lookup(?CHAIN_TAB, ChainID) of
|
||||
[] ->
|
||||
{error, {not_found, {chain, ChainID}}};
|
||||
[Chain] ->
|
||||
UpdateFun(Chain)
|
||||
end.
|
||||
|
||||
call_authenticator(ChainID, AuthenticatorID, Func, Args) ->
|
||||
UpdateFun =
|
||||
fun(#chain{authenticators = Authenticators}) ->
|
||||
case lists:keyfind(AuthenticatorID, 1, Authenticators) of
|
||||
false ->
|
||||
{error, {not_found, {authenticator, AuthenticatorID}}};
|
||||
{_, _, #authenticator{provider = Provider, state = State}} ->
|
||||
case erlang:function_exported(Provider, Func, length(Args) + 1) of
|
||||
true ->
|
||||
erlang:apply(Provider, Func, Args ++ [State]);
|
||||
false ->
|
||||
{error, unsupported_feature}
|
||||
end
|
||||
end
|
||||
end,
|
||||
update_chain(ChainID, UpdateFun).
|
||||
|
||||
serialize_chain(#chain{id = ID,
|
||||
authenticators = Authenticators,
|
||||
created_at = CreatedAt}) ->
|
||||
#{id => ID,
|
||||
authenticators => serialize_authenticators(Authenticators),
|
||||
created_at => CreatedAt}.
|
||||
|
||||
serialize_authenticators(Authenticators) ->
|
||||
[serialize_authenticator(Authenticator) || {_, _, Authenticator} <- Authenticators].
|
||||
|
||||
serialize_authenticator(#authenticator{id = ID,
|
||||
name = Name,
|
||||
provider = Provider,
|
||||
state = State}) ->
|
||||
#{id => ID, name => Name, provider => Provider, state => State}.
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -17,7 +17,6 @@
|
|||
-module(emqx_authn_app).
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
|
||||
-behaviour(application).
|
||||
|
||||
|
@ -26,33 +25,45 @@
|
|||
, stop/1
|
||||
]).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
start(_StartType, _StartArgs) ->
|
||||
ok = ekka_rlog:wait_for_shards([?AUTH_SHARD], infinity),
|
||||
{ok, Sup} = emqx_authn_sup:start_link(),
|
||||
emqx_config_handler:add_handler([authentication, authenticators], emqx_authn),
|
||||
initialize(),
|
||||
ok = add_providers(),
|
||||
ok = initialize(),
|
||||
{ok, Sup}.
|
||||
|
||||
stop(_State) ->
|
||||
ok = remove_providers(),
|
||||
ok.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
add_providers() ->
|
||||
_ = [?AUTHN:add_provider(AuthNType, Provider) || {AuthNType, Provider} <- providers()], ok.
|
||||
|
||||
remove_providers() ->
|
||||
_ = [?AUTHN:remove_provider(AuthNType) || {AuthNType, _} <- providers()], ok.
|
||||
|
||||
initialize() ->
|
||||
AuthNConfig = emqx:get_config([authentication], #{enable => false,
|
||||
authenticators => []}),
|
||||
initialize(AuthNConfig).
|
||||
|
||||
initialize(#{enable := Enable, authenticators := AuthenticatorsConfig}) ->
|
||||
{ok, _} = emqx_authn:create_chain(#{id => ?CHAIN}),
|
||||
initialize_authenticators(AuthenticatorsConfig),
|
||||
Enable =:= true andalso emqx_authn:enable(),
|
||||
?AUTHN:initialize_authentication(?GLOBAL, emqx:get_raw_config([authentication], [])),
|
||||
lists:foreach(fun({ListenerID, ListenerConfig}) ->
|
||||
?AUTHN:initialize_authentication(ListenerID, maps:get(authentication, ListenerConfig, []))
|
||||
end, emqx_listeners:list()),
|
||||
ok.
|
||||
|
||||
initialize_authenticators([]) ->
|
||||
ok;
|
||||
initialize_authenticators([#{name := Name} = AuthenticatorConfig | More]) ->
|
||||
case emqx_authn:create_authenticator(?CHAIN, AuthenticatorConfig) of
|
||||
{ok, _} ->
|
||||
initialize_authenticators(More);
|
||||
{error, Reason} ->
|
||||
?LOG(error, "Failed to create authenticator '~s': ~p", [Name, Reason])
|
||||
end.
|
||||
providers() ->
|
||||
[ {{'password-based', 'built-in-database'}, emqx_authn_mnesia}
|
||||
, {{'password-based', mysql}, emqx_authn_mysql}
|
||||
, {{'password-based', posgresql}, emqx_authn_pgsql}
|
||||
, {{'password-based', mongodb}, emqx_authn_mongodb}
|
||||
, {{'password-based', redis}, emqx_authn_redis}
|
||||
, {{'password-based', 'http-server'}, emqx_authn_http}
|
||||
, {jwt, emqx_authn_jwt}
|
||||
, {{scram, 'built-in-database'}, emqx_enhanced_authn_scram_mnesia}
|
||||
].
|
|
@ -16,53 +16,15 @@
|
|||
|
||||
-module(emqx_authn_schema).
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
|
||||
-export([ roots/0
|
||||
, fields/1
|
||||
-export([ common_fields/0
|
||||
]).
|
||||
|
||||
-export([ authenticator_name/1
|
||||
]).
|
||||
|
||||
%% Export it for emqx_gateway_schema module
|
||||
-export([ authenticators/1
|
||||
]).
|
||||
|
||||
roots() -> [ "authentication" ].
|
||||
|
||||
fields("authentication") ->
|
||||
[ {enable, fun enable/1}
|
||||
, {authenticators, fun authenticators/1}
|
||||
common_fields() ->
|
||||
[ {enable, fun enable/1}
|
||||
].
|
||||
|
||||
authenticator_name(type) -> binary();
|
||||
authenticator_name(nullable) -> false;
|
||||
authenticator_name(_) -> undefined.
|
||||
|
||||
enable(type) -> boolean();
|
||||
enable(default) -> false;
|
||||
enable(default) -> true;
|
||||
enable(_) -> undefined.
|
||||
|
||||
authenticators(type) ->
|
||||
hoconsc:array({union, [ hoconsc:ref(emqx_authn_mnesia, config)
|
||||
, hoconsc:ref(emqx_authn_mysql, config)
|
||||
, hoconsc:ref(emqx_authn_pgsql, config)
|
||||
, hoconsc:ref(emqx_authn_mongodb, standalone)
|
||||
, hoconsc:ref(emqx_authn_mongodb, 'replica-set')
|
||||
, hoconsc:ref(emqx_authn_mongodb, 'sharded-cluster')
|
||||
, hoconsc:ref(emqx_authn_redis, standalone)
|
||||
, hoconsc:ref(emqx_authn_redis, cluster)
|
||||
, hoconsc:ref(emqx_authn_redis, sentinel)
|
||||
, hoconsc:ref(emqx_authn_http, get)
|
||||
, hoconsc:ref(emqx_authn_http, post)
|
||||
, hoconsc:ref(emqx_authn_jwt, 'hmac-based')
|
||||
, hoconsc:ref(emqx_authn_jwt, 'public-key')
|
||||
, hoconsc:ref(emqx_authn_jwt, 'jwks')
|
||||
, hoconsc:ref(emqx_enhanced_authn_scram_mnesia, config)
|
||||
]});
|
||||
authenticators(default) -> [];
|
||||
authenticators(_) -> undefined.
|
||||
|
|
|
@ -26,11 +26,5 @@ start_link() ->
|
|||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||
|
||||
init([]) ->
|
||||
ChildSpecs = [
|
||||
#{id => emqx_authn,
|
||||
start => {emqx_authn, start_link, []},
|
||||
restart => permanent,
|
||||
type => worker,
|
||||
modules => [emqx_authn]}
|
||||
],
|
||||
ChildSpecs = [],
|
||||
{ok, {{one_for_one, 10, 10}, ChildSpecs}}.
|
||||
|
|
|
@ -20,12 +20,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ roots/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -50,7 +53,7 @@
|
|||
, stored_key
|
||||
, server_key
|
||||
, salt
|
||||
, superuser
|
||||
, is_superuser
|
||||
}).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
|
@ -74,19 +77,16 @@ mnesia(copy) ->
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
namespace() -> "authn:scram:builtin-db".
|
||||
|
||||
roots() -> [config].
|
||||
|
||||
fields(config) ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, [scram]}}
|
||||
, {server_type, fun server_type/1}
|
||||
[ {mechanism, {enum, [scram]}}
|
||||
, {backend, {enum, ['built-in-database']}}
|
||||
, {algorithm, fun algorithm/1}
|
||||
, {iteration_count, fun iteration_count/1}
|
||||
].
|
||||
|
||||
server_type(type) -> hoconsc:enum(['built-in-database']);
|
||||
server_type(default) -> 'built-in-database';
|
||||
server_type(_) -> undefined.
|
||||
] ++ emqx_authn_schema:common_fields().
|
||||
|
||||
algorithm(type) -> hoconsc:enum([sha256, sha512]);
|
||||
algorithm(default) -> sha256;
|
||||
|
@ -100,6 +100,9 @@ iteration_count(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[hoconsc:ref(?MODULE, config)].
|
||||
|
||||
create(#{ algorithm := Algorithm
|
||||
, iteration_count := IterationCount
|
||||
, '_unique' := Unique
|
||||
|
@ -144,9 +147,9 @@ add_user(#{user_id := UserID,
|
|||
fun() ->
|
||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||
[] ->
|
||||
Superuser = maps:get(superuser, UserInfo, false),
|
||||
add_user(UserID, Password, Superuser, State),
|
||||
{ok, #{user_id => UserID, superuser => Superuser}};
|
||||
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
||||
add_user(UserID, Password, IsSuperuser, State),
|
||||
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
||||
[_] ->
|
||||
{error, already_exist}
|
||||
end
|
||||
|
@ -170,8 +173,8 @@ update_user(UserID, User,
|
|||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||
[] ->
|
||||
{error, not_found};
|
||||
[#user_info{superuser = Superuser} = UserInfo] ->
|
||||
UserInfo1 = UserInfo#user_info{superuser = maps:get(superuser, User, Superuser)},
|
||||
[#user_info{is_superuser = IsSuperuser} = UserInfo] ->
|
||||
UserInfo1 = UserInfo#user_info{is_superuser = maps:get(is_superuser, User, IsSuperuser)},
|
||||
UserInfo2 = case maps:get(password, User, undefined) of
|
||||
undefined ->
|
||||
UserInfo1;
|
||||
|
@ -226,36 +229,36 @@ check_client_first_message(Bin, _Cache, #{iteration_count := IterationCount} = S
|
|||
{error, not_authorized}
|
||||
end.
|
||||
|
||||
check_client_final_message(Bin, #{superuser := Superuser} = Cache, #{algorithm := Alg}) ->
|
||||
check_client_final_message(Bin, #{is_superuser := IsSuperuser} = Cache, #{algorithm := Alg}) ->
|
||||
case esasl_scram:check_client_final_message(
|
||||
Bin,
|
||||
Cache#{algorithm => Alg}
|
||||
) of
|
||||
{ok, ServerFinalMessage} ->
|
||||
{ok, #{superuser => Superuser}, ServerFinalMessage};
|
||||
{ok, #{is_superuser => IsSuperuser}, ServerFinalMessage};
|
||||
{error, _Reason} ->
|
||||
{error, not_authorized}
|
||||
end.
|
||||
|
||||
add_user(UserID, Password, Superuser, State) ->
|
||||
add_user(UserID, Password, IsSuperuser, State) ->
|
||||
{StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State),
|
||||
UserInfo = #user_info{user_id = UserID,
|
||||
stored_key = StoredKey,
|
||||
server_key = ServerKey,
|
||||
salt = Salt,
|
||||
superuser = Superuser},
|
||||
UserInfo = #user_info{user_id = UserID,
|
||||
stored_key = StoredKey,
|
||||
server_key = ServerKey,
|
||||
salt = Salt,
|
||||
is_superuser = IsSuperuser},
|
||||
mnesia:write(?TAB, UserInfo, write).
|
||||
|
||||
retrieve(UserID, #{user_group := UserGroup}) ->
|
||||
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
||||
[#user_info{stored_key = StoredKey,
|
||||
server_key = ServerKey,
|
||||
salt = Salt,
|
||||
superuser = Superuser}] ->
|
||||
[#user_info{stored_key = StoredKey,
|
||||
server_key = ServerKey,
|
||||
salt = Salt,
|
||||
is_superuser = IsSuperuser}] ->
|
||||
{ok, #{stored_key => StoredKey,
|
||||
server_key => ServerKey,
|
||||
salt => Salt,
|
||||
superuser => Superuser}};
|
||||
is_superuser => IsSuperuser}};
|
||||
[] ->
|
||||
{error, not_found}
|
||||
end.
|
||||
|
@ -270,5 +273,5 @@ trans(Fun, Args) ->
|
|||
{aborted, Reason} -> {error, Reason}
|
||||
end.
|
||||
|
||||
serialize_user_info(#user_info{user_id = {_, UserID}, superuser = Superuser}) ->
|
||||
#{user_id => UserID, superuser => Superuser}.
|
||||
serialize_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) ->
|
||||
#{user_id => UserID, is_superuser => IsSuperuser}.
|
||||
|
|
|
@ -21,13 +21,16 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ roots/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
, validations/0
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -37,9 +40,11 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
namespace() -> "authn:password-based:http-server".
|
||||
|
||||
roots() ->
|
||||
[ {config, {union, [ hoconsc:t(get)
|
||||
, hoconsc:t(post)
|
||||
[ {config, {union, [ hoconsc:ref(?MODULE, get)
|
||||
, hoconsc:ref(?MODULE, post)
|
||||
]}}
|
||||
].
|
||||
|
||||
|
@ -56,15 +61,15 @@ fields(post) ->
|
|||
] ++ common_fields().
|
||||
|
||||
common_fields() ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, ['http-server']}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, ['http-server']}}
|
||||
, {url, fun url/1}
|
||||
, {form_data, fun form_data/1}
|
||||
, {body, fun body/1}
|
||||
, {request_timeout, fun request_timeout/1}
|
||||
] ++ maps:to_list(maps:without([ base_url
|
||||
, pool_type],
|
||||
maps:from_list(emqx_connector_http:fields(config)))).
|
||||
] ++ emqx_authn_schema:common_fields()
|
||||
++ maps:to_list(maps:without([ base_url
|
||||
, pool_type],
|
||||
maps:from_list(emqx_connector_http:fields(config)))).
|
||||
|
||||
validations() ->
|
||||
[ {check_ssl_opts, fun check_ssl_opts/1}
|
||||
|
@ -92,11 +97,10 @@ headers_no_content_type(converter) ->
|
|||
headers_no_content_type(default) -> default_headers_no_content_type();
|
||||
headers_no_content_type(_) -> undefined.
|
||||
|
||||
%% TODO: Using map()
|
||||
form_data(type) -> map();
|
||||
form_data(nullable) -> false;
|
||||
form_data(validate) -> [fun check_form_data/1];
|
||||
form_data(_) -> undefined.
|
||||
body(type) -> map();
|
||||
body(nullable) -> false;
|
||||
body(validate) -> [fun check_body/1];
|
||||
body(_) -> undefined.
|
||||
|
||||
request_timeout(type) -> non_neg_integer();
|
||||
request_timeout(default) -> 5000;
|
||||
|
@ -106,10 +110,15 @@ request_timeout(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[ hoconsc:ref(?MODULE, get)
|
||||
, hoconsc:ref(?MODULE, post)
|
||||
].
|
||||
|
||||
create(#{ method := Method
|
||||
, url := URL
|
||||
, headers := Headers
|
||||
, form_data := FormData
|
||||
, body := Body
|
||||
, request_timeout := RequestTimeout
|
||||
, '_unique' := Unique
|
||||
} = Config) ->
|
||||
|
@ -118,8 +127,8 @@ create(#{ method := Method
|
|||
State = #{ method => Method
|
||||
, path => Path
|
||||
, base_query => cow_qs:parse_qs(list_to_binary(Query))
|
||||
, headers => normalize_headers(Headers)
|
||||
, form_data => maps:to_list(FormData)
|
||||
, headers => maps:to_list(Headers)
|
||||
, body => maps:to_list(Body)
|
||||
, request_timeout => RequestTimeout
|
||||
, '_unique' => Unique
|
||||
},
|
||||
|
@ -152,16 +161,16 @@ authenticate(Credential, #{'_unique' := Unique,
|
|||
try
|
||||
Request = generate_request(Credential, State),
|
||||
case emqx_resource:query(Unique, {Method, Request, RequestTimeout}) of
|
||||
{ok, 204, _Headers} -> {ok, #{superuser => false}};
|
||||
{ok, 204, _Headers} -> {ok, #{is_superuser => false}};
|
||||
{ok, 200, Headers, Body} ->
|
||||
ContentType = proplists:get_value(<<"content-type">>, Headers, <<"application/json">>),
|
||||
case safely_parse_body(ContentType, Body) of
|
||||
{ok, NBody} ->
|
||||
%% TODO: Return by user property
|
||||
{ok, #{superuser => maps:get(<<"superuser">>, NBody, false),
|
||||
{ok, #{is_superuser => maps:get(<<"is_superuser">>, NBody, false),
|
||||
user_property => NBody}};
|
||||
{error, _Reason} ->
|
||||
{ok, #{superuser => false}}
|
||||
{ok, #{is_superuser => false}}
|
||||
end;
|
||||
{error, _Reason} ->
|
||||
ignore
|
||||
|
@ -186,10 +195,10 @@ check_url(URL) ->
|
|||
{error, _} -> false
|
||||
end.
|
||||
|
||||
check_form_data(FormData) ->
|
||||
check_body(Body) ->
|
||||
lists:any(fun({_, V}) ->
|
||||
not is_binary(V)
|
||||
end, maps:to_list(FormData)).
|
||||
end, maps:to_list(Body)).
|
||||
|
||||
default_headers() ->
|
||||
maps:put(<<"content-type">>,
|
||||
|
@ -205,7 +214,7 @@ default_headers_no_content_type() ->
|
|||
|
||||
transform_header_name(Headers) ->
|
||||
maps:fold(fun(K0, V, Acc) ->
|
||||
K = list_to_binary(string:to_lower(binary_to_list(K0))),
|
||||
K = list_to_binary(string:to_lower(to_list(K0))),
|
||||
maps:put(K, V, Acc)
|
||||
end, #{}, Headers).
|
||||
|
||||
|
@ -229,24 +238,21 @@ parse_url(URL) ->
|
|||
URIMap
|
||||
end.
|
||||
|
||||
normalize_headers(Headers) ->
|
||||
[{atom_to_binary(K), V} || {K, V} <- maps:to_list(Headers)].
|
||||
|
||||
generate_request(Credential, #{method := Method,
|
||||
path := Path,
|
||||
base_query := BaseQuery,
|
||||
headers := Headers,
|
||||
form_data := FormData0}) ->
|
||||
FormData = replace_placeholders(FormData0, Credential),
|
||||
body := Body0}) ->
|
||||
Body = replace_placeholders(Body0, Credential),
|
||||
case Method of
|
||||
get ->
|
||||
NPath = append_query(Path, BaseQuery ++ FormData),
|
||||
NPath = append_query(Path, BaseQuery ++ Body),
|
||||
{NPath, Headers};
|
||||
post ->
|
||||
NPath = append_query(Path, BaseQuery),
|
||||
ContentType = proplists:get_value(<<"content-type">>, Headers),
|
||||
Body = serialize_body(ContentType, FormData),
|
||||
{NPath, Headers, Body}
|
||||
NBody = serialize_body(ContentType, Body),
|
||||
{NPath, Headers, NBody}
|
||||
end.
|
||||
|
||||
replace_placeholders(KVs, Credential) ->
|
||||
|
@ -276,10 +282,10 @@ qs([], Acc) ->
|
|||
qs([{K, V} | More], Acc) ->
|
||||
qs(More, [["&", emqx_http_lib:uri_encode(K), "=", emqx_http_lib:uri_encode(V)] | Acc]).
|
||||
|
||||
serialize_body(<<"application/json">>, FormData) ->
|
||||
emqx_json:encode(FormData);
|
||||
serialize_body(<<"application/x-www-form-urlencoded">>, FormData) ->
|
||||
qs(FormData).
|
||||
serialize_body(<<"application/json">>, Body) ->
|
||||
emqx_json:encode(Body);
|
||||
serialize_body(<<"application/x-www-form-urlencoded">>, Body) ->
|
||||
qs(Body).
|
||||
|
||||
safely_parse_body(ContentType, Body) ->
|
||||
try parse_body(ContentType, Body) of
|
||||
|
@ -295,3 +301,8 @@ parse_body(<<"application/x-www-form-urlencoded">>, Body) ->
|
|||
{ok, maps:from_list(cow_qs:parse_qs(Body))};
|
||||
parse_body(ContentType, _) ->
|
||||
{error, {unsupported_content_type, ContentType}}.
|
||||
|
||||
to_list(A) when is_atom(A) ->
|
||||
atom_to_list(A);
|
||||
to_list(B) when is_binary(B) ->
|
||||
binary_to_list(B).
|
||||
|
|
|
@ -19,12 +19,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ roots/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -34,10 +37,12 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
namespace() -> "authn:jwt".
|
||||
|
||||
roots() ->
|
||||
[ {config, {union, [ hoconsc:t('hmac-based')
|
||||
, hoconsc:t('public-key')
|
||||
, hoconsc:t('jwks')
|
||||
[ {config, {union, [ hoconsc:mk('hmac-based')
|
||||
, hoconsc:mk('public-key')
|
||||
, hoconsc:mk('jwks')
|
||||
]}}
|
||||
].
|
||||
|
||||
|
@ -78,12 +83,11 @@ fields(ssl_disable) ->
|
|||
[ {enable, #{type => false}} ].
|
||||
|
||||
common_fields() ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, [jwt]}}
|
||||
[ {mechanism, {enum, [jwt]}}
|
||||
, {verify_claims, fun verify_claims/1}
|
||||
].
|
||||
] ++ emqx_authn_schema:common_fields().
|
||||
|
||||
secret(type) -> string();
|
||||
secret(type) -> binary();
|
||||
secret(_) -> undefined.
|
||||
|
||||
secret_base64_encoded(type) -> boolean();
|
||||
|
@ -130,6 +134,12 @@ verify_claims(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[ hoconsc:ref(?MODULE, 'hmac-based')
|
||||
, hoconsc:ref(?MODULE, 'public-key')
|
||||
, hoconsc:ref(?MODULE, 'jwks')
|
||||
].
|
||||
|
||||
create(#{verify_claims := VerifyClaims} = Config) ->
|
||||
create2(Config#{verify_claims => handle_verify_claims(VerifyClaims)}).
|
||||
|
||||
|
@ -239,7 +249,7 @@ verify(JWS, [JWK | More], VerifyClaims) ->
|
|||
Claims = emqx_json:decode(Payload, [return_maps]),
|
||||
case verify_claims(Claims, VerifyClaims) of
|
||||
ok ->
|
||||
{ok, #{superuser => maps:get(<<"superuser">>, Claims, false)}};
|
||||
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Claims, false)}};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
|
|
|
@ -20,10 +20,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ roots/0, fields/1 ]).
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -46,7 +51,7 @@
|
|||
{ user_id :: {user_group(), user_id()}
|
||||
, password_hash :: binary()
|
||||
, salt :: binary()
|
||||
, superuser :: boolean()
|
||||
, is_superuser :: boolean()
|
||||
}).
|
||||
|
||||
-reflect_type([ user_id_type/0 ]).
|
||||
|
@ -79,15 +84,16 @@ mnesia(copy) ->
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
namespace() -> "authn:password-based:builtin-db".
|
||||
|
||||
roots() -> [config].
|
||||
|
||||
fields(config) ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, ['built-in-database']}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, ['built-in-database']}}
|
||||
, {user_id_type, fun user_id_type/1}
|
||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||
];
|
||||
] ++ emqx_authn_schema:common_fields();
|
||||
|
||||
fields(bcrypt) ->
|
||||
[ {name, {enum, [bcrypt]}}
|
||||
|
@ -102,7 +108,8 @@ user_id_type(type) -> user_id_type();
|
|||
user_id_type(default) -> username;
|
||||
user_id_type(_) -> undefined.
|
||||
|
||||
password_hash_algorithm(type) -> {union, [hoconsc:ref(bcrypt), hoconsc:ref(other_algorithms)]};
|
||||
password_hash_algorithm(type) -> hoconsc:union([hoconsc:ref(?MODULE, bcrypt),
|
||||
hoconsc:ref(?MODULE, other_algorithms)]);
|
||||
password_hash_algorithm(default) -> #{<<"name">> => sha256};
|
||||
password_hash_algorithm(_) -> undefined.
|
||||
|
||||
|
@ -114,6 +121,9 @@ salt_rounds(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[hoconsc:ref(?MODULE, config)].
|
||||
|
||||
create(#{ user_id_type := Type
|
||||
, password_hash_algorithm := #{name := bcrypt,
|
||||
salt_rounds := SaltRounds}
|
||||
|
@ -148,13 +158,13 @@ authenticate(#{password := Password} = Credential,
|
|||
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
||||
[] ->
|
||||
ignore;
|
||||
[#user_info{password_hash = PasswordHash, salt = Salt0, superuser = Superuser}] ->
|
||||
[#user_info{password_hash = PasswordHash, salt = Salt0, is_superuser = IsSuperuser}] ->
|
||||
Salt = case Algorithm of
|
||||
bcrypt -> PasswordHash;
|
||||
_ -> Salt0
|
||||
end,
|
||||
case PasswordHash =:= hash(Algorithm, Password, Salt) of
|
||||
true -> {ok, #{superuser => Superuser}};
|
||||
true -> {ok, #{is_superuser => IsSuperuser}};
|
||||
false -> {error, bad_username_or_password}
|
||||
end
|
||||
end.
|
||||
|
@ -187,9 +197,9 @@ add_user(#{user_id := UserID,
|
|||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||
[] ->
|
||||
{PasswordHash, Salt} = hash(Password, State),
|
||||
Superuser = maps:get(superuser, UserInfo, false),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, Superuser),
|
||||
{ok, #{user_id => UserID, superuser => Superuser}};
|
||||
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
|
||||
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
||||
[_] ->
|
||||
{error, already_exist}
|
||||
end
|
||||
|
@ -215,8 +225,8 @@ update_user(UserID, UserInfo,
|
|||
{error, not_found};
|
||||
[#user_info{ password_hash = PasswordHash
|
||||
, salt = Salt
|
||||
, superuser = Superuser}] ->
|
||||
NSuperuser = maps:get(superuser, UserInfo, Superuser),
|
||||
, is_superuser = IsSuperuser}] ->
|
||||
NSuperuser = maps:get(is_superuser, UserInfo, IsSuperuser),
|
||||
{NPasswordHash, NSalt} = case maps:get(password, UserInfo, undefined) of
|
||||
undefined ->
|
||||
{PasswordHash, Salt};
|
||||
|
@ -224,7 +234,7 @@ update_user(UserID, UserInfo,
|
|||
hash(Password, State)
|
||||
end,
|
||||
insert_user(UserGroup, UserID, NPasswordHash, NSalt, NSuperuser),
|
||||
{ok, #{user_id => UserID, superuser => NSuperuser}}
|
||||
{ok, #{user_id => UserID, is_superuser => NSuperuser}}
|
||||
end
|
||||
end).
|
||||
|
||||
|
@ -280,8 +290,8 @@ import(UserGroup, [#{<<"user_id">> := UserID,
|
|||
<<"password_hash">> := PasswordHash} = UserInfo | More])
|
||||
when is_binary(UserID) andalso is_binary(PasswordHash) ->
|
||||
Salt = maps:get(<<"salt">>, UserInfo, <<>>),
|
||||
Superuser = maps:get(<<"superuser">>, UserInfo, false),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, Superuser),
|
||||
IsSuperuser = maps:get(<<"is_superuser">>, UserInfo, false),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
|
||||
import(UserGroup, More);
|
||||
import(_UserGroup, [_ | _More]) ->
|
||||
{error, bad_format}.
|
||||
|
@ -295,8 +305,8 @@ import(UserGroup, File, Seq) ->
|
|||
{ok, #{user_id := UserID,
|
||||
password_hash := PasswordHash} = UserInfo} ->
|
||||
Salt = maps:get(salt, UserInfo, <<>>),
|
||||
Superuser = maps:get(superuser, UserInfo, false),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, Superuser),
|
||||
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
|
||||
import(UserGroup, File, Seq);
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
|
@ -331,10 +341,10 @@ get_user_info_by_seq([PasswordHash | More1], [<<"password_hash">> | More2], Acc)
|
|||
get_user_info_by_seq(More1, More2, Acc#{password_hash => PasswordHash});
|
||||
get_user_info_by_seq([Salt | More1], [<<"salt">> | More2], Acc) ->
|
||||
get_user_info_by_seq(More1, More2, Acc#{salt => Salt});
|
||||
get_user_info_by_seq([<<"true">> | More1], [<<"superuser">> | More2], Acc) ->
|
||||
get_user_info_by_seq(More1, More2, Acc#{superuser => true});
|
||||
get_user_info_by_seq([<<"false">> | More1], [<<"superuser">> | More2], Acc) ->
|
||||
get_user_info_by_seq(More1, More2, Acc#{superuser => false});
|
||||
get_user_info_by_seq([<<"true">> | More1], [<<"is_superuser">> | More2], Acc) ->
|
||||
get_user_info_by_seq(More1, More2, Acc#{is_superuser => true});
|
||||
get_user_info_by_seq([<<"false">> | More1], [<<"is_superuser">> | More2], Acc) ->
|
||||
get_user_info_by_seq(More1, More2, Acc#{is_superuser => false});
|
||||
get_user_info_by_seq(_, _, _) ->
|
||||
{error, bad_format}.
|
||||
|
||||
|
@ -358,11 +368,11 @@ hash(Password, #{password_hash_algorithm := Algorithm} = State) ->
|
|||
PasswordHash = hash(Algorithm, Password, Salt),
|
||||
{PasswordHash, Salt}.
|
||||
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, Superuser) ->
|
||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) ->
|
||||
UserInfo = #user_info{user_id = {UserGroup, UserID},
|
||||
password_hash = PasswordHash,
|
||||
salt = Salt,
|
||||
superuser = Superuser},
|
||||
is_superuser = IsSuperuser},
|
||||
mnesia:write(?TAB, UserInfo, write).
|
||||
|
||||
delete_user2(UserInfo) ->
|
||||
|
@ -390,5 +400,5 @@ to_binary(B) when is_binary(B) ->
|
|||
to_binary(L) when is_list(L) ->
|
||||
iolist_to_binary(L).
|
||||
|
||||
serialize_user_info(#user_info{user_id = {_, UserID}, superuser = Superuser}) ->
|
||||
#{user_id => UserID, superuser => Superuser}.
|
||||
serialize_user_info(#user_info{user_id = {_, UserID}, is_superuser = IsSuperuser}) ->
|
||||
#{user_id => UserID, is_superuser => IsSuperuser}.
|
||||
|
|
|
@ -21,12 +21,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ roots/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -36,10 +39,12 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
namespace() -> "authn:password-based:mongodb".
|
||||
|
||||
roots() ->
|
||||
[ {config, {union, [ hoconsc:t(standalone)
|
||||
, hoconsc:t('replica-set')
|
||||
, hoconsc:t('sharded-cluster')
|
||||
[ {config, {union, [ hoconsc:mk(standalone)
|
||||
, hoconsc:mk('replica-set')
|
||||
, hoconsc:mk('sharded-cluster')
|
||||
]}}
|
||||
].
|
||||
|
||||
|
@ -53,16 +58,16 @@ fields('sharded-cluster') ->
|
|||
common_fields() ++ emqx_connector_mongo:fields(sharded).
|
||||
|
||||
common_fields() ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, [mongodb]}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, [mongodb]}}
|
||||
, {collection, fun collection/1}
|
||||
, {selector, fun selector/1}
|
||||
, {password_hash_field, fun password_hash_field/1}
|
||||
, {salt_field, fun salt_field/1}
|
||||
, {is_superuser_field, fun is_superuser_field/1}
|
||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||
, {salt_position, fun salt_position/1}
|
||||
].
|
||||
] ++ emqx_authn_schema:common_fields().
|
||||
|
||||
collection(type) -> binary();
|
||||
collection(nullable) -> false;
|
||||
|
@ -80,6 +85,10 @@ salt_field(type) -> binary();
|
|||
salt_field(nullable) -> true;
|
||||
salt_field(_) -> undefined.
|
||||
|
||||
is_superuser_field(type) -> binary();
|
||||
is_superuser_field(nullable) -> true;
|
||||
is_superuser_field(_) -> undefined.
|
||||
|
||||
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
|
||||
password_hash_algorithm(default) -> sha256;
|
||||
password_hash_algorithm(_) -> undefined.
|
||||
|
@ -92,6 +101,12 @@ salt_position(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[ hoconsc:ref(?MODULE, standalone)
|
||||
, hoconsc:ref(?MODULE, 'replica-set')
|
||||
, hoconsc:ref(?MODULE, 'sharded-cluster')
|
||||
].
|
||||
|
||||
create(#{ selector := Selector
|
||||
, '_unique' := Unique
|
||||
} = Config) ->
|
||||
|
@ -99,6 +114,7 @@ create(#{ selector := Selector
|
|||
State = maps:with([ collection
|
||||
, password_hash_field
|
||||
, salt_field
|
||||
, is_superuser_field
|
||||
, password_hash_algorithm
|
||||
, salt_position
|
||||
, '_unique'], Config),
|
||||
|
@ -139,7 +155,7 @@ authenticate(#{password := Password} = Credential,
|
|||
Doc ->
|
||||
case check_password(Password, Doc, State) of
|
||||
ok ->
|
||||
{ok, #{superuser => superuser(Doc, State)}};
|
||||
{ok, #{is_superuser => is_superuser(Doc, State)}};
|
||||
{error, {cannot_find_password_hash_field, PasswordHashField}} ->
|
||||
?LOG(error, "['~s'] Can't find password hash field: ~s", [Unique, PasswordHashField]),
|
||||
{error, bad_username_or_password};
|
||||
|
@ -220,9 +236,9 @@ check_password(Password,
|
|||
end
|
||||
end.
|
||||
|
||||
superuser(Doc, #{superuser_field := SuperuserField}) ->
|
||||
maps:get(SuperuserField, Doc, false);
|
||||
superuser(_, _) ->
|
||||
is_superuser(Doc, #{is_superuser_field := IsSuperuserField}) ->
|
||||
maps:get(IsSuperuserField, Doc, false);
|
||||
is_superuser(_, _) ->
|
||||
false.
|
||||
|
||||
hash(Algorithm, Password, Salt, prefix) ->
|
||||
|
|
|
@ -21,12 +21,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ roots/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -36,17 +39,19 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
namespace() -> "authn:password-based:mysql".
|
||||
|
||||
roots() -> [config].
|
||||
|
||||
fields(config) ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, [mysql]}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, [mysql]}}
|
||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||
, {salt_position, fun salt_position/1}
|
||||
, {query, fun query/1}
|
||||
, {query_timeout, fun query_timeout/1}
|
||||
] ++ emqx_connector_schema_lib:relational_db_fields()
|
||||
] ++ emqx_authn_schema:common_fields()
|
||||
++ emqx_connector_schema_lib:relational_db_fields()
|
||||
++ emqx_connector_schema_lib:ssl_fields().
|
||||
|
||||
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
|
||||
|
@ -69,6 +74,9 @@ query_timeout(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[hoconsc:ref(?MODULE, config)].
|
||||
|
||||
create(#{ password_hash_algorithm := Algorithm
|
||||
, salt_position := SaltPosition
|
||||
, query := Query0
|
||||
|
@ -115,7 +123,7 @@ authenticate(#{password := Password} = Credential,
|
|||
Selected = maps:from_list(lists:zip(Columns, Rows)),
|
||||
case check_password(Password, Selected, State) of
|
||||
ok ->
|
||||
{ok, #{superuser => maps:get(<<"superuser">>, Selected, false)}};
|
||||
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
|
|
|
@ -22,10 +22,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ roots/0, fields/1 ]).
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -35,16 +40,18 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
namespace() -> "authn:password-based:postgresql".
|
||||
|
||||
roots() -> [config].
|
||||
|
||||
fields(config) ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, [pgsql]}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, [postgresql]}}
|
||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||
, {salt_position, {enum, [prefix, suffix]}}
|
||||
, {query, fun query/1}
|
||||
] ++ emqx_connector_schema_lib:relational_db_fields()
|
||||
] ++ emqx_authn_schema:common_fields()
|
||||
++ emqx_connector_schema_lib:relational_db_fields()
|
||||
++ emqx_connector_schema_lib:ssl_fields().
|
||||
|
||||
password_hash_algorithm(type) -> {enum, [plain, md5, sha, sha256, sha512, bcrypt]};
|
||||
|
@ -59,6 +66,9 @@ query(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[hoconsc:ref(?MODULE, config)].
|
||||
|
||||
create(#{ query := Query0
|
||||
, password_hash_algorithm := Algorithm
|
||||
, salt_position := SaltPosition
|
||||
|
@ -103,7 +113,7 @@ authenticate(#{password := Password} = Credential,
|
|||
Selected = maps:from_list(lists:zip(NColumns, Rows)),
|
||||
case check_password(Password, Selected, State) of
|
||||
ok ->
|
||||
{ok, #{superuser => maps:get(<<"superuser">>, Selected, false)}};
|
||||
{ok, #{is_superuser => maps:get(<<"is_superuser">>, Selected, false)}};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
|
|
|
@ -21,12 +21,15 @@
|
|||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
-behaviour(emqx_authentication).
|
||||
|
||||
-export([ roots/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
-export([ create/1
|
||||
-export([ refs/0
|
||||
, create/1
|
||||
, update/2
|
||||
, authenticate/2
|
||||
, destroy/1
|
||||
|
@ -36,10 +39,12 @@
|
|||
%% Hocon Schema
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
namespace() -> "authn:password-based:redis".
|
||||
|
||||
roots() ->
|
||||
[ {config, {union, [ hoconsc:t(standalone)
|
||||
, hoconsc:t(cluster)
|
||||
, hoconsc:t(sentinel)
|
||||
[ {config, {union, [ hoconsc:mk(standalone)
|
||||
, hoconsc:mk(cluster)
|
||||
, hoconsc:mk(sentinel)
|
||||
]}}
|
||||
].
|
||||
|
||||
|
@ -53,13 +58,12 @@ fields(sentinel) ->
|
|||
common_fields() ++ emqx_connector_redis:fields(sentinel).
|
||||
|
||||
common_fields() ->
|
||||
[ {name, fun emqx_authn_schema:authenticator_name/1}
|
||||
, {mechanism, {enum, ['password-based']}}
|
||||
, {server_type, {enum, [redis]}}
|
||||
[ {mechanism, {enum, ['password-based']}}
|
||||
, {backend, {enum, [redis]}}
|
||||
, {query, fun query/1}
|
||||
, {password_hash_algorithm, fun password_hash_algorithm/1}
|
||||
, {salt_position, fun salt_position/1}
|
||||
].
|
||||
] ++ emqx_authn_schema:common_fields().
|
||||
|
||||
query(type) -> string();
|
||||
query(nullable) -> false;
|
||||
|
@ -77,6 +81,12 @@ salt_position(_) -> undefined.
|
|||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
refs() ->
|
||||
[ hoconsc:ref(?MODULE, standalone)
|
||||
, hoconsc:ref(?MODULE, cluster)
|
||||
, hoconsc:ref(?MODULE, sentinel)
|
||||
].
|
||||
|
||||
create(#{ query := Query
|
||||
, '_unique' := Unique
|
||||
} = Config) ->
|
||||
|
@ -125,7 +135,7 @@ authenticate(#{password := Password} = Credential,
|
|||
Selected = merge(Fields, Values),
|
||||
case check_password(Password, Selected, State) of
|
||||
ok ->
|
||||
{ok, #{superuser => maps:get("superuser", Selected, false)}};
|
||||
{ok, #{is_superuser => maps:get("is_superuser", Selected, false)}};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end;
|
||||
|
@ -170,7 +180,7 @@ check_fields(["password_hash" | More], false) ->
|
|||
check_fields(More, true);
|
||||
check_fields(["salt" | More], HasPassHash) ->
|
||||
check_fields(More, HasPassHash);
|
||||
check_fields(["superuser" | More], HasPassHash) ->
|
||||
check_fields(["is_superuser" | More], HasPassHash) ->
|
||||
check_fields(More, HasPassHash);
|
||||
check_fields([Field | _], _) ->
|
||||
error({unsupported_field, Field}).
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
user_id,password_hash,salt,superuser
|
||||
user_id,password_hash,salt,is_superuser
|
||||
myuser3,b6c743545a7817ae8c8f624371d5f5f0373234bb0ff36b8ffbf19bce0e06ab75,de1024f462fb83910fd13151bd4bd235,true
|
||||
myuser4,ee68c985a69208b6eda8c6c9b4c7c2d2b15ee2352cdd64a903171710a99182e8,ad773b5be9dd0613fe6c2f4d8c403139,false
|
||||
|
|
|
|
@ -3,12 +3,12 @@
|
|||
"user_id":"myuser1",
|
||||
"password_hash":"c5e46903df45e5dc096dc74657610dbee8deaacae656df88a1788f1847390242",
|
||||
"salt": "e378187547bf2d6f0545a3f441aa4d8a",
|
||||
"superuser": true
|
||||
"is_superuser": true
|
||||
},
|
||||
{
|
||||
"user_id":"myuser2",
|
||||
"password_hash":"f4d17f300b11e522fd33f497c11b126ef1ea5149c74d2220f9a16dc876d4567b",
|
||||
"salt": "6d3f9bd5b54d94b98adbcfe10b6d181f",
|
||||
"superuser": false
|
||||
"is_superuser": false
|
||||
}
|
||||
]
|
||||
|
|
|
@ -19,97 +19,4 @@
|
|||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
|
||||
-define(AUTH, emqx_authn).
|
||||
|
||||
all() ->
|
||||
emqx_ct:all(?MODULE).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
application:set_env(ekka, strict_mode, true),
|
||||
emqx_ct_helpers:start_apps([emqx_authn]),
|
||||
Config.
|
||||
|
||||
end_per_suite(_) ->
|
||||
emqx_ct_helpers:stop_apps([emqx_authn]),
|
||||
ok.
|
||||
|
||||
t_chain(_) ->
|
||||
?assertMatch({ok, #{id := ?CHAIN, authenticators := []}}, ?AUTH:lookup_chain(?CHAIN)),
|
||||
|
||||
ChainID = <<"mychain">>,
|
||||
Chain = #{id => ChainID},
|
||||
?assertMatch({ok, #{id := ChainID, authenticators := []}}, ?AUTH:create_chain(Chain)),
|
||||
?assertEqual({error, {already_exists, {chain, ChainID}}}, ?AUTH:create_chain(Chain)),
|
||||
?assertMatch({ok, #{id := ChainID, authenticators := []}}, ?AUTH:lookup_chain(ChainID)),
|
||||
?assertEqual(ok, ?AUTH:delete_chain(ChainID)),
|
||||
?assertMatch({error, {not_found, {chain, ChainID}}}, ?AUTH:lookup_chain(ChainID)),
|
||||
ok.
|
||||
|
||||
t_authenticator(_) ->
|
||||
AuthenticatorName1 = <<"myauthenticator1">>,
|
||||
AuthenticatorConfig1 = #{name => AuthenticatorName1,
|
||||
mechanism => 'password-based',
|
||||
server_type => 'built-in-database',
|
||||
user_id_type => username,
|
||||
password_hash_algorithm => #{
|
||||
name => sha256
|
||||
}},
|
||||
{ok, #{name := AuthenticatorName1, id := ID1}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig1),
|
||||
?assertMatch({ok, #{name := AuthenticatorName1}}, ?AUTH:lookup_authenticator(?CHAIN, ID1)),
|
||||
?assertMatch({ok, [#{name := AuthenticatorName1}]}, ?AUTH:list_authenticators(?CHAIN)),
|
||||
?assertEqual({error, name_has_be_used}, ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig1)),
|
||||
|
||||
AuthenticatorConfig2 = #{name => AuthenticatorName1,
|
||||
mechanism => jwt,
|
||||
use_jwks => false,
|
||||
algorithm => 'hmac-based',
|
||||
secret => <<"abcdef">>,
|
||||
secret_base64_encoded => false,
|
||||
verify_claims => []},
|
||||
{ok, #{name := AuthenticatorName1, id := ID1}} = ?AUTH:update_authenticator(?CHAIN, ID1, AuthenticatorConfig2),
|
||||
|
||||
ID2 = <<"random">>,
|
||||
?assertEqual({error, {not_found, {authenticator, ID2}}}, ?AUTH:update_authenticator(?CHAIN, ID2, AuthenticatorConfig2)),
|
||||
?assertEqual({error, name_has_be_used}, ?AUTH:update_or_create_authenticator(?CHAIN, ID2, AuthenticatorConfig2)),
|
||||
|
||||
AuthenticatorName2 = <<"myauthenticator2">>,
|
||||
AuthenticatorConfig3 = AuthenticatorConfig2#{name => AuthenticatorName2},
|
||||
{ok, #{name := AuthenticatorName2, id := ID2}} = ?AUTH:update_or_create_authenticator(?CHAIN, ID2, AuthenticatorConfig3),
|
||||
?assertMatch({ok, #{name := AuthenticatorName2}}, ?AUTH:lookup_authenticator(?CHAIN, ID2)),
|
||||
{ok, #{name := AuthenticatorName2, id := ID2}} = ?AUTH:update_or_create_authenticator(?CHAIN, ID2, AuthenticatorConfig3#{secret := <<"fedcba">>}),
|
||||
|
||||
?assertMatch({ok, #{id := ?CHAIN, authenticators := [#{name := AuthenticatorName1}, #{name := AuthenticatorName2}]}}, ?AUTH:lookup_chain(?CHAIN)),
|
||||
?assertMatch({ok, [#{name := AuthenticatorName1}, #{name := AuthenticatorName2}]}, ?AUTH:list_authenticators(?CHAIN)),
|
||||
|
||||
?assertEqual(ok, ?AUTH:move_authenticator(?CHAIN, ID2, top)),
|
||||
?assertMatch({ok, [#{name := AuthenticatorName2}, #{name := AuthenticatorName1}]}, ?AUTH:list_authenticators(?CHAIN)),
|
||||
|
||||
?assertEqual(ok, ?AUTH:move_authenticator(?CHAIN, ID2, bottom)),
|
||||
?assertMatch({ok, [#{name := AuthenticatorName1}, #{name := AuthenticatorName2}]}, ?AUTH:list_authenticators(?CHAIN)),
|
||||
|
||||
?assertEqual(ok, ?AUTH:move_authenticator(?CHAIN, ID2, {before, ID1})),
|
||||
|
||||
?assertMatch({ok, [#{name := AuthenticatorName2}, #{name := AuthenticatorName1}]}, ?AUTH:list_authenticators(?CHAIN)),
|
||||
|
||||
?assertEqual({error, {not_found, {authenticator, <<"nonexistent">>}}}, ?AUTH:move_authenticator(?CHAIN, ID2, {before, <<"nonexistent">>})),
|
||||
|
||||
?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID1)),
|
||||
?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID2)),
|
||||
?assertEqual({ok, []}, ?AUTH:list_authenticators(?CHAIN)),
|
||||
ok.
|
||||
|
||||
t_authenticate(_) ->
|
||||
ClientInfo = #{zone => default,
|
||||
listener => {tcp, default},
|
||||
username => <<"myuser">>,
|
||||
password => <<"mypass">>},
|
||||
?assertEqual({ok, #{superuser => false}}, emqx_access_control:authenticate(ClientInfo)),
|
||||
?assertEqual(false, emqx_authn:is_enabled()),
|
||||
emqx_authn:enable(),
|
||||
?assertEqual(true, emqx_authn:is_enabled()),
|
||||
?assertEqual({error, not_authorized}, emqx_access_control:authenticate(ClientInfo)).
|
||||
all() -> emqx_ct:all(?MODULE).
|
|
@ -19,140 +19,140 @@
|
|||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
% -include_lib("common_test/include/ct.hrl").
|
||||
% -include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
% -include("emqx_authn.hrl").
|
||||
|
||||
-define(AUTH, emqx_authn).
|
||||
% -define(AUTH, emqx_authn).
|
||||
|
||||
all() ->
|
||||
emqx_ct:all(?MODULE).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
emqx_ct_helpers:start_apps([emqx_authn]),
|
||||
Config.
|
||||
% init_per_suite(Config) ->
|
||||
% emqx_ct_helpers:start_apps([emqx_authn]),
|
||||
% Config.
|
||||
|
||||
end_per_suite(_) ->
|
||||
emqx_ct_helpers:stop_apps([emqx_authn]),
|
||||
ok.
|
||||
% end_per_suite(_) ->
|
||||
% emqx_ct_helpers:stop_apps([emqx_authn]),
|
||||
% ok.
|
||||
|
||||
t_jwt_authenticator(_) ->
|
||||
AuthenticatorName = <<"myauthenticator">>,
|
||||
Config = #{name => AuthenticatorName,
|
||||
mechanism => jwt,
|
||||
use_jwks => false,
|
||||
algorithm => 'hmac-based',
|
||||
secret => <<"abcdef">>,
|
||||
secret_base64_encoded => false,
|
||||
verify_claims => []},
|
||||
{ok, #{name := AuthenticatorName, id := ID}} = ?AUTH:create_authenticator(?CHAIN, Config),
|
||||
% t_jwt_authenticator(_) ->
|
||||
% AuthenticatorName = <<"myauthenticator">>,
|
||||
% Config = #{name => AuthenticatorName,
|
||||
% mechanism => jwt,
|
||||
% use_jwks => false,
|
||||
% algorithm => 'hmac-based',
|
||||
% secret => <<"abcdef">>,
|
||||
% secret_base64_encoded => false,
|
||||
% verify_claims => []},
|
||||
% {ok, #{name := AuthenticatorName, id := ID}} = ?AUTH:create_authenticator(?CHAIN, Config),
|
||||
|
||||
Payload = #{<<"username">> => <<"myuser">>},
|
||||
JWS = generate_jws('hmac-based', Payload, <<"abcdef">>),
|
||||
ClientInfo = #{username => <<"myuser">>,
|
||||
password => JWS},
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)),
|
||||
% Payload = #{<<"username">> => <<"myuser">>},
|
||||
% JWS = generate_jws('hmac-based', Payload, <<"abcdef">>),
|
||||
% ClientInfo = #{username => <<"myuser">>,
|
||||
% password => JWS},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)),
|
||||
|
||||
Payload1 = #{<<"username">> => <<"myuser">>, <<"superuser">> => true},
|
||||
JWS1 = generate_jws('hmac-based', Payload1, <<"abcdef">>),
|
||||
ClientInfo1 = #{username => <<"myuser">>,
|
||||
password => JWS1},
|
||||
?assertEqual({stop, {ok, #{superuser => true}}}, ?AUTH:authenticate(ClientInfo1, ignored)),
|
||||
% Payload1 = #{<<"username">> => <<"myuser">>, <<"is_superuser">> => true},
|
||||
% JWS1 = generate_jws('hmac-based', Payload1, <<"abcdef">>),
|
||||
% ClientInfo1 = #{username => <<"myuser">>,
|
||||
% password => JWS1},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => true}}}, ?AUTH:authenticate(ClientInfo1, ignored)),
|
||||
|
||||
BadJWS = generate_jws('hmac-based', Payload, <<"bad_secret">>),
|
||||
ClientInfo2 = ClientInfo#{password => BadJWS},
|
||||
?assertEqual({stop, {error, not_authorized}}, ?AUTH:authenticate(ClientInfo2, ignored)),
|
||||
% BadJWS = generate_jws('hmac-based', Payload, <<"bad_secret">>),
|
||||
% ClientInfo2 = ClientInfo#{password => BadJWS},
|
||||
% ?assertEqual({stop, {error, not_authorized}}, ?AUTH:authenticate(ClientInfo2, ignored)),
|
||||
|
||||
%% secret_base64_encoded
|
||||
Config2 = Config#{secret => base64:encode(<<"abcdef">>),
|
||||
secret_base64_encoded => true},
|
||||
?assertMatch({ok, _}, ?AUTH:update_authenticator(?CHAIN, ID, Config2)),
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)),
|
||||
% %% secret_base64_encoded
|
||||
% Config2 = Config#{secret => base64:encode(<<"abcdef">>),
|
||||
% secret_base64_encoded => true},
|
||||
% ?assertMatch({ok, _}, ?AUTH:update_authenticator(?CHAIN, ID, Config2)),
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)),
|
||||
|
||||
Config3 = Config#{verify_claims => [{<<"username">>, <<"${mqtt-username}">>}]},
|
||||
?assertMatch({ok, _}, ?AUTH:update_authenticator(?CHAIN, ID, Config3)),
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)),
|
||||
?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo#{username => <<"otheruser">>}, ok)),
|
||||
% Config3 = Config#{verify_claims => [{<<"username">>, <<"${mqtt-username}">>}]},
|
||||
% ?assertMatch({ok, _}, ?AUTH:update_authenticator(?CHAIN, ID, Config3)),
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)),
|
||||
% ?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo#{username => <<"otheruser">>}, ok)),
|
||||
|
||||
%% Expiration
|
||||
Payload3 = #{ <<"username">> => <<"myuser">>
|
||||
, <<"exp">> => erlang:system_time(second) - 60},
|
||||
JWS3 = generate_jws('hmac-based', Payload3, <<"abcdef">>),
|
||||
ClientInfo3 = ClientInfo#{password => JWS3},
|
||||
?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo3, ignored)),
|
||||
% %% Expiration
|
||||
% Payload3 = #{ <<"username">> => <<"myuser">>
|
||||
% , <<"exp">> => erlang:system_time(second) - 60},
|
||||
% JWS3 = generate_jws('hmac-based', Payload3, <<"abcdef">>),
|
||||
% ClientInfo3 = ClientInfo#{password => JWS3},
|
||||
% ?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo3, ignored)),
|
||||
|
||||
Payload4 = #{ <<"username">> => <<"myuser">>
|
||||
, <<"exp">> => erlang:system_time(second) + 60},
|
||||
JWS4 = generate_jws('hmac-based', Payload4, <<"abcdef">>),
|
||||
ClientInfo4 = ClientInfo#{password => JWS4},
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo4, ignored)),
|
||||
% Payload4 = #{ <<"username">> => <<"myuser">>
|
||||
% , <<"exp">> => erlang:system_time(second) + 60},
|
||||
% JWS4 = generate_jws('hmac-based', Payload4, <<"abcdef">>),
|
||||
% ClientInfo4 = ClientInfo#{password => JWS4},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo4, ignored)),
|
||||
|
||||
%% Issued At
|
||||
Payload5 = #{ <<"username">> => <<"myuser">>
|
||||
, <<"iat">> => erlang:system_time(second) - 60},
|
||||
JWS5 = generate_jws('hmac-based', Payload5, <<"abcdef">>),
|
||||
ClientInfo5 = ClientInfo#{password => JWS5},
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo5, ignored)),
|
||||
% %% Issued At
|
||||
% Payload5 = #{ <<"username">> => <<"myuser">>
|
||||
% , <<"iat">> => erlang:system_time(second) - 60},
|
||||
% JWS5 = generate_jws('hmac-based', Payload5, <<"abcdef">>),
|
||||
% ClientInfo5 = ClientInfo#{password => JWS5},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo5, ignored)),
|
||||
|
||||
Payload6 = #{ <<"username">> => <<"myuser">>
|
||||
, <<"iat">> => erlang:system_time(second) + 60},
|
||||
JWS6 = generate_jws('hmac-based', Payload6, <<"abcdef">>),
|
||||
ClientInfo6 = ClientInfo#{password => JWS6},
|
||||
?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo6, ignored)),
|
||||
% Payload6 = #{ <<"username">> => <<"myuser">>
|
||||
% , <<"iat">> => erlang:system_time(second) + 60},
|
||||
% JWS6 = generate_jws('hmac-based', Payload6, <<"abcdef">>),
|
||||
% ClientInfo6 = ClientInfo#{password => JWS6},
|
||||
% ?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo6, ignored)),
|
||||
|
||||
%% Not Before
|
||||
Payload7 = #{ <<"username">> => <<"myuser">>
|
||||
, <<"nbf">> => erlang:system_time(second) - 60},
|
||||
JWS7 = generate_jws('hmac-based', Payload7, <<"abcdef">>),
|
||||
ClientInfo7 = ClientInfo#{password => JWS7},
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo7, ignored)),
|
||||
% %% Not Before
|
||||
% Payload7 = #{ <<"username">> => <<"myuser">>
|
||||
% , <<"nbf">> => erlang:system_time(second) - 60},
|
||||
% JWS7 = generate_jws('hmac-based', Payload7, <<"abcdef">>),
|
||||
% ClientInfo7 = ClientInfo#{password => JWS7},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo7, ignored)),
|
||||
|
||||
Payload8 = #{ <<"username">> => <<"myuser">>
|
||||
, <<"nbf">> => erlang:system_time(second) + 60},
|
||||
JWS8 = generate_jws('hmac-based', Payload8, <<"abcdef">>),
|
||||
ClientInfo8 = ClientInfo#{password => JWS8},
|
||||
?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo8, ignored)),
|
||||
% Payload8 = #{ <<"username">> => <<"myuser">>
|
||||
% , <<"nbf">> => erlang:system_time(second) + 60},
|
||||
% JWS8 = generate_jws('hmac-based', Payload8, <<"abcdef">>),
|
||||
% ClientInfo8 = ClientInfo#{password => JWS8},
|
||||
% ?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo8, ignored)),
|
||||
|
||||
?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID)),
|
||||
ok.
|
||||
% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID)),
|
||||
% ok.
|
||||
|
||||
t_jwt_authenticator2(_) ->
|
||||
Dir = code:lib_dir(emqx_authn, test),
|
||||
PublicKey = list_to_binary(filename:join([Dir, "data/public_key.pem"])),
|
||||
PrivateKey = list_to_binary(filename:join([Dir, "data/private_key.pem"])),
|
||||
AuthenticatorName = <<"myauthenticator">>,
|
||||
Config = #{name => AuthenticatorName,
|
||||
mechanism => jwt,
|
||||
use_jwks => false,
|
||||
algorithm => 'public-key',
|
||||
certificate => PublicKey,
|
||||
verify_claims => []},
|
||||
{ok, #{name := AuthenticatorName, id := ID}} = ?AUTH:create_authenticator(?CHAIN, Config),
|
||||
% t_jwt_authenticator2(_) ->
|
||||
% Dir = code:lib_dir(emqx_authn, test),
|
||||
% PublicKey = list_to_binary(filename:join([Dir, "data/public_key.pem"])),
|
||||
% PrivateKey = list_to_binary(filename:join([Dir, "data/private_key.pem"])),
|
||||
% AuthenticatorName = <<"myauthenticator">>,
|
||||
% Config = #{name => AuthenticatorName,
|
||||
% mechanism => jwt,
|
||||
% use_jwks => false,
|
||||
% algorithm => 'public-key',
|
||||
% certificate => PublicKey,
|
||||
% verify_claims => []},
|
||||
% {ok, #{name := AuthenticatorName, id := ID}} = ?AUTH:create_authenticator(?CHAIN, Config),
|
||||
|
||||
Payload = #{<<"username">> => <<"myuser">>},
|
||||
JWS = generate_jws('public-key', Payload, PrivateKey),
|
||||
ClientInfo = #{username => <<"myuser">>,
|
||||
password => JWS},
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)),
|
||||
?assertEqual({stop, {error, not_authorized}}, ?AUTH:authenticate(ClientInfo#{password => <<"badpassword">>}, ignored)),
|
||||
% Payload = #{<<"username">> => <<"myuser">>},
|
||||
% JWS = generate_jws('public-key', Payload, PrivateKey),
|
||||
% ClientInfo = #{username => <<"myuser">>,
|
||||
% password => JWS},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)),
|
||||
% ?assertEqual({stop, {error, not_authorized}}, ?AUTH:authenticate(ClientInfo#{password => <<"badpassword">>}, ignored)),
|
||||
|
||||
?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID)),
|
||||
ok.
|
||||
% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID)),
|
||||
% ok.
|
||||
|
||||
generate_jws('hmac-based', Payload, Secret) ->
|
||||
JWK = jose_jwk:from_oct(Secret),
|
||||
Header = #{ <<"alg">> => <<"HS256">>
|
||||
, <<"typ">> => <<"JWT">>
|
||||
},
|
||||
Signed = jose_jwt:sign(JWK, Header, Payload),
|
||||
{_, JWS} = jose_jws:compact(Signed),
|
||||
JWS;
|
||||
generate_jws('public-key', Payload, PrivateKey) ->
|
||||
JWK = jose_jwk:from_pem_file(PrivateKey),
|
||||
Header = #{ <<"alg">> => <<"RS256">>
|
||||
, <<"typ">> => <<"JWT">>
|
||||
},
|
||||
Signed = jose_jwt:sign(JWK, Header, Payload),
|
||||
{_, JWS} = jose_jws:compact(Signed),
|
||||
JWS.
|
||||
% generate_jws('hmac-based', Payload, Secret) ->
|
||||
% JWK = jose_jwk:from_oct(Secret),
|
||||
% Header = #{ <<"alg">> => <<"HS256">>
|
||||
% , <<"typ">> => <<"JWT">>
|
||||
% },
|
||||
% Signed = jose_jwt:sign(JWK, Header, Payload),
|
||||
% {_, JWS} = jose_jws:compact(Signed),
|
||||
% JWS;
|
||||
% generate_jws('public-key', Payload, PrivateKey) ->
|
||||
% JWK = jose_jwk:from_pem_file(PrivateKey),
|
||||
% Header = #{ <<"alg">> => <<"RS256">>
|
||||
% , <<"typ">> => <<"JWT">>
|
||||
% },
|
||||
% Signed = jose_jwt:sign(JWK, Header, Payload),
|
||||
% {_, JWS} = jose_jws:compact(Signed),
|
||||
% JWS.
|
||||
|
|
|
@ -19,146 +19,146 @@
|
|||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
% -include_lib("common_test/include/ct.hrl").
|
||||
% -include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
-include("emqx_authn.hrl").
|
||||
% -include("emqx_authn.hrl").
|
||||
|
||||
-define(AUTH, emqx_authn).
|
||||
% -define(AUTH, emqx_authn).
|
||||
|
||||
all() ->
|
||||
emqx_ct:all(?MODULE).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
emqx_ct_helpers:start_apps([emqx_authn]),
|
||||
Config.
|
||||
% init_per_suite(Config) ->
|
||||
% emqx_ct_helpers:start_apps([emqx_authn]),
|
||||
% Config.
|
||||
|
||||
end_per_suite(_) ->
|
||||
emqx_ct_helpers:stop_apps([emqx_authn]),
|
||||
ok.
|
||||
% end_per_suite(_) ->
|
||||
% emqx_ct_helpers:stop_apps([emqx_authn]),
|
||||
% ok.
|
||||
|
||||
t_mnesia_authenticator(_) ->
|
||||
AuthenticatorName = <<"myauthenticator">>,
|
||||
AuthenticatorConfig = #{name => AuthenticatorName,
|
||||
mechanism => 'password-based',
|
||||
server_type => 'built-in-database',
|
||||
user_id_type => username,
|
||||
password_hash_algorithm => #{
|
||||
name => sha256
|
||||
}},
|
||||
{ok, #{name := AuthenticatorName, id := ID}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig),
|
||||
% t_mnesia_authenticator(_) ->
|
||||
% AuthenticatorName = <<"myauthenticator">>,
|
||||
% AuthenticatorConfig = #{name => AuthenticatorName,
|
||||
% mechanism => 'password-based',
|
||||
% server_type => 'built-in-database',
|
||||
% user_id_type => username,
|
||||
% password_hash_algorithm => #{
|
||||
% name => sha256
|
||||
% }},
|
||||
% {ok, #{name := AuthenticatorName, id := ID}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig),
|
||||
|
||||
UserInfo = #{user_id => <<"myuser">>,
|
||||
password => <<"mypass">>},
|
||||
?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:add_user(?CHAIN, ID, UserInfo)),
|
||||
?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser">>)),
|
||||
% UserInfo = #{user_id => <<"myuser">>,
|
||||
% password => <<"mypass">>},
|
||||
% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:add_user(?CHAIN, ID, UserInfo)),
|
||||
% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser">>)),
|
||||
|
||||
ClientInfo = #{zone => external,
|
||||
username => <<"myuser">>,
|
||||
password => <<"mypass">>},
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)),
|
||||
?AUTH:enable(),
|
||||
?assertEqual({ok, #{superuser => false}}, emqx_access_control:authenticate(ClientInfo)),
|
||||
% ClientInfo = #{zone => external,
|
||||
% username => <<"myuser">>,
|
||||
% password => <<"mypass">>},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo, ignored)),
|
||||
% ?AUTH:enable(),
|
||||
% ?assertEqual({ok, #{is_superuser => false}}, emqx_access_control:authenticate(ClientInfo)),
|
||||
|
||||
ClientInfo2 = ClientInfo#{username => <<"baduser">>},
|
||||
?assertEqual({stop, {error, not_authorized}}, ?AUTH:authenticate(ClientInfo2, ignored)),
|
||||
?assertEqual({error, not_authorized}, emqx_access_control:authenticate(ClientInfo2)),
|
||||
% ClientInfo2 = ClientInfo#{username => <<"baduser">>},
|
||||
% ?assertEqual({stop, {error, not_authorized}}, ?AUTH:authenticate(ClientInfo2, ignored)),
|
||||
% ?assertEqual({error, not_authorized}, emqx_access_control:authenticate(ClientInfo2)),
|
||||
|
||||
ClientInfo3 = ClientInfo#{password => <<"badpass">>},
|
||||
?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo3, ignored)),
|
||||
?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo3)),
|
||||
% ClientInfo3 = ClientInfo#{password => <<"badpass">>},
|
||||
% ?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo3, ignored)),
|
||||
% ?assertEqual({error, bad_username_or_password}, emqx_access_control:authenticate(ClientInfo3)),
|
||||
|
||||
UserInfo2 = UserInfo#{password => <<"mypass2">>},
|
||||
?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:update_user(?CHAIN, ID, <<"myuser">>, UserInfo2)),
|
||||
ClientInfo4 = ClientInfo#{password => <<"mypass2">>},
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo4, ignored)),
|
||||
% UserInfo2 = UserInfo#{password => <<"mypass2">>},
|
||||
% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:update_user(?CHAIN, ID, <<"myuser">>, UserInfo2)),
|
||||
% ClientInfo4 = ClientInfo#{password => <<"mypass2">>},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo4, ignored)),
|
||||
|
||||
?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:update_user(?CHAIN, ID, <<"myuser">>, #{superuser => true})),
|
||||
?assertEqual({stop, {ok, #{superuser => true}}}, ?AUTH:authenticate(ClientInfo4, ignored)),
|
||||
% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:update_user(?CHAIN, ID, <<"myuser">>, #{is_superuser => true})),
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => true}}}, ?AUTH:authenticate(ClientInfo4, ignored)),
|
||||
|
||||
?assertEqual(ok, ?AUTH:delete_user(?CHAIN, ID, <<"myuser">>)),
|
||||
?assertEqual({error, not_found}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser">>)),
|
||||
% ?assertEqual(ok, ?AUTH:delete_user(?CHAIN, ID, <<"myuser">>)),
|
||||
% ?assertEqual({error, not_found}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser">>)),
|
||||
|
||||
?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:add_user(?CHAIN, ID, UserInfo)),
|
||||
?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser">>)),
|
||||
?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID)),
|
||||
% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:add_user(?CHAIN, ID, UserInfo)),
|
||||
% ?assertMatch({ok, #{user_id := <<"myuser">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser">>)),
|
||||
% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID)),
|
||||
|
||||
{ok, #{name := AuthenticatorName, id := ID1}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig),
|
||||
?assertMatch({error, not_found}, ?AUTH:lookup_user(?CHAIN, ID1, <<"myuser">>)),
|
||||
?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID1)),
|
||||
ok.
|
||||
% {ok, #{name := AuthenticatorName, id := ID1}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig),
|
||||
% ?assertMatch({error, not_found}, ?AUTH:lookup_user(?CHAIN, ID1, <<"myuser">>)),
|
||||
% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID1)),
|
||||
% ok.
|
||||
|
||||
t_import(_) ->
|
||||
AuthenticatorName = <<"myauthenticator">>,
|
||||
AuthenticatorConfig = #{name => AuthenticatorName,
|
||||
mechanism => 'password-based',
|
||||
server_type => 'built-in-database',
|
||||
user_id_type => username,
|
||||
password_hash_algorithm => #{
|
||||
name => sha256
|
||||
}},
|
||||
{ok, #{name := AuthenticatorName, id := ID}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig),
|
||||
% t_import(_) ->
|
||||
% AuthenticatorName = <<"myauthenticator">>,
|
||||
% AuthenticatorConfig = #{name => AuthenticatorName,
|
||||
% mechanism => 'password-based',
|
||||
% server_type => 'built-in-database',
|
||||
% user_id_type => username,
|
||||
% password_hash_algorithm => #{
|
||||
% name => sha256
|
||||
% }},
|
||||
% {ok, #{name := AuthenticatorName, id := ID}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig),
|
||||
|
||||
Dir = code:lib_dir(emqx_authn, test),
|
||||
?assertEqual(ok, ?AUTH:import_users(?CHAIN, ID, filename:join([Dir, "data/user-credentials.json"]))),
|
||||
?assertEqual(ok, ?AUTH:import_users(?CHAIN, ID, filename:join([Dir, "data/user-credentials.csv"]))),
|
||||
?assertMatch({ok, #{user_id := <<"myuser1">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser1">>)),
|
||||
?assertMatch({ok, #{user_id := <<"myuser3">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser3">>)),
|
||||
% Dir = code:lib_dir(emqx_authn, test),
|
||||
% ?assertEqual(ok, ?AUTH:import_users(?CHAIN, ID, filename:join([Dir, "data/user-credentials.json"]))),
|
||||
% ?assertEqual(ok, ?AUTH:import_users(?CHAIN, ID, filename:join([Dir, "data/user-credentials.csv"]))),
|
||||
% ?assertMatch({ok, #{user_id := <<"myuser1">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser1">>)),
|
||||
% ?assertMatch({ok, #{user_id := <<"myuser3">>}}, ?AUTH:lookup_user(?CHAIN, ID, <<"myuser3">>)),
|
||||
|
||||
ClientInfo1 = #{username => <<"myuser1">>,
|
||||
password => <<"mypassword1">>},
|
||||
?assertEqual({stop, {ok, #{superuser => true}}}, ?AUTH:authenticate(ClientInfo1, ignored)),
|
||||
% ClientInfo1 = #{username => <<"myuser1">>,
|
||||
% password => <<"mypassword1">>},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => true}}}, ?AUTH:authenticate(ClientInfo1, ignored)),
|
||||
|
||||
ClientInfo2 = ClientInfo1#{username => <<"myuser2">>,
|
||||
password => <<"mypassword2">>},
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo2, ignored)),
|
||||
% ClientInfo2 = ClientInfo1#{username => <<"myuser2">>,
|
||||
% password => <<"mypassword2">>},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo2, ignored)),
|
||||
|
||||
ClientInfo3 = ClientInfo1#{username => <<"myuser3">>,
|
||||
password => <<"mypassword3">>},
|
||||
?assertEqual({stop, {ok, #{superuser => true}}}, ?AUTH:authenticate(ClientInfo3, ignored)),
|
||||
% ClientInfo3 = ClientInfo1#{username => <<"myuser3">>,
|
||||
% password => <<"mypassword3">>},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => true}}}, ?AUTH:authenticate(ClientInfo3, ignored)),
|
||||
|
||||
?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID)),
|
||||
ok.
|
||||
% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID)),
|
||||
% ok.
|
||||
|
||||
t_multi_mnesia_authenticator(_) ->
|
||||
AuthenticatorName1 = <<"myauthenticator1">>,
|
||||
AuthenticatorConfig1 = #{name => AuthenticatorName1,
|
||||
mechanism => 'password-based',
|
||||
server_type => 'built-in-database',
|
||||
user_id_type => username,
|
||||
password_hash_algorithm => #{
|
||||
name => sha256
|
||||
}},
|
||||
AuthenticatorName2 = <<"myauthenticator2">>,
|
||||
AuthenticatorConfig2 = #{name => AuthenticatorName2,
|
||||
mechanism => 'password-based',
|
||||
server_type => 'built-in-database',
|
||||
user_id_type => clientid,
|
||||
password_hash_algorithm => #{
|
||||
name => sha256
|
||||
}},
|
||||
{ok, #{name := AuthenticatorName1, id := ID1}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig1),
|
||||
{ok, #{name := AuthenticatorName2, id := ID2}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig2),
|
||||
% t_multi_mnesia_authenticator(_) ->
|
||||
% AuthenticatorName1 = <<"myauthenticator1">>,
|
||||
% AuthenticatorConfig1 = #{name => AuthenticatorName1,
|
||||
% mechanism => 'password-based',
|
||||
% server_type => 'built-in-database',
|
||||
% user_id_type => username,
|
||||
% password_hash_algorithm => #{
|
||||
% name => sha256
|
||||
% }},
|
||||
% AuthenticatorName2 = <<"myauthenticator2">>,
|
||||
% AuthenticatorConfig2 = #{name => AuthenticatorName2,
|
||||
% mechanism => 'password-based',
|
||||
% server_type => 'built-in-database',
|
||||
% user_id_type => clientid,
|
||||
% password_hash_algorithm => #{
|
||||
% name => sha256
|
||||
% }},
|
||||
% {ok, #{name := AuthenticatorName1, id := ID1}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig1),
|
||||
% {ok, #{name := AuthenticatorName2, id := ID2}} = ?AUTH:create_authenticator(?CHAIN, AuthenticatorConfig2),
|
||||
|
||||
?assertMatch({ok, #{user_id := <<"myuser">>}},
|
||||
?AUTH:add_user(?CHAIN, ID1,
|
||||
#{user_id => <<"myuser">>,
|
||||
password => <<"mypass1">>})),
|
||||
?assertMatch({ok, #{user_id := <<"myclient">>}},
|
||||
?AUTH:add_user(?CHAIN, ID2,
|
||||
#{user_id => <<"myclient">>,
|
||||
password => <<"mypass2">>})),
|
||||
% ?assertMatch({ok, #{user_id := <<"myuser">>}},
|
||||
% ?AUTH:add_user(?CHAIN, ID1,
|
||||
% #{user_id => <<"myuser">>,
|
||||
% password => <<"mypass1">>})),
|
||||
% ?assertMatch({ok, #{user_id := <<"myclient">>}},
|
||||
% ?AUTH:add_user(?CHAIN, ID2,
|
||||
% #{user_id => <<"myclient">>,
|
||||
% password => <<"mypass2">>})),
|
||||
|
||||
ClientInfo1 = #{username => <<"myuser">>,
|
||||
clientid => <<"myclient">>,
|
||||
password => <<"mypass1">>},
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo1, ignored)),
|
||||
?assertEqual(ok, ?AUTH:move_authenticator(?CHAIN, ID2, top)),
|
||||
% ClientInfo1 = #{username => <<"myuser">>,
|
||||
% clientid => <<"myclient">>,
|
||||
% password => <<"mypass1">>},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo1, ignored)),
|
||||
% ?assertEqual(ok, ?AUTH:move_authenticator(?CHAIN, ID2, top)),
|
||||
|
||||
?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo1, ignored)),
|
||||
ClientInfo2 = ClientInfo1#{password => <<"mypass2">>},
|
||||
?assertEqual({stop, {ok, #{superuser => false}}}, ?AUTH:authenticate(ClientInfo2, ignored)),
|
||||
% ?assertEqual({stop, {error, bad_username_or_password}}, ?AUTH:authenticate(ClientInfo1, ignored)),
|
||||
% ClientInfo2 = ClientInfo1#{password => <<"mypass2">>},
|
||||
% ?assertEqual({stop, {ok, #{is_superuser => false}}}, ?AUTH:authenticate(ClientInfo2, ignored)),
|
||||
|
||||
?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID1)),
|
||||
?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID2)),
|
||||
ok.
|
||||
% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID1)),
|
||||
% ?assertEqual(ok, ?AUTH:delete_authenticator(?CHAIN, ID2)),
|
||||
% ok.
|
||||
|
|
|
@ -2,72 +2,62 @@ authorization {
|
|||
sources = [
|
||||
# {
|
||||
# type: http
|
||||
# config: {
|
||||
# url: "https://emqx.com"
|
||||
# headers: {
|
||||
# Accept: "application/json"
|
||||
# Content-Type: "application/json"
|
||||
# }
|
||||
# url: "https://emqx.com"
|
||||
# headers: {
|
||||
# Accept: "application/json"
|
||||
# Content-Type: "application/json"
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# type: mysql
|
||||
# config: {
|
||||
# server: "127.0.0.1:3306"
|
||||
# database: mqtt
|
||||
# pool_size: 1
|
||||
# username: root
|
||||
# password: public
|
||||
# auto_reconnect: true
|
||||
# ssl: {
|
||||
# enable: true
|
||||
# cacertfile: "{{ platform_etc_dir }}/certs/cacert.pem"
|
||||
# certfile: "{{ platform_etc_dir }}/certs/client-cert.pem"
|
||||
# keyfile: "{{ platform_etc_dir }}/certs/client-key.pem"
|
||||
# }
|
||||
# server: "127.0.0.1:3306"
|
||||
# database: mqtt
|
||||
# pool_size: 1
|
||||
# username: root
|
||||
# password: public
|
||||
# auto_reconnect: true
|
||||
# ssl: {
|
||||
# enable: true
|
||||
# cacertfile: "{{ platform_etc_dir }}/certs/cacert.pem"
|
||||
# certfile: "{{ platform_etc_dir }}/certs/client-cert.pem"
|
||||
# keyfile: "{{ platform_etc_dir }}/certs/client-key.pem"
|
||||
# }
|
||||
# sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or clientid = '%c'"
|
||||
# query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or clientid = '%c'"
|
||||
# },
|
||||
# {
|
||||
# type: pgsql
|
||||
# config: {
|
||||
# server: "127.0.0.1:5432"
|
||||
# database: mqtt
|
||||
# pool_size: 1
|
||||
# username: root
|
||||
# password: public
|
||||
# auto_reconnect: true
|
||||
# ssl: {enable: false}
|
||||
# }
|
||||
# sql: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or username = '$all' or clientid = '%c'"
|
||||
# server: "127.0.0.1:5432"
|
||||
# database: mqtt
|
||||
# pool_size: 1
|
||||
# username: root
|
||||
# password: public
|
||||
# auto_reconnect: true
|
||||
# ssl: {enable: false}
|
||||
# query: "select ipaddress, username, clientid, action, permission, topic from mqtt_authz where ipaddr = '%a' or username = '%u' or username = '$all' or clientid = '%c'"
|
||||
# },
|
||||
# {
|
||||
# type: redis
|
||||
# config: {
|
||||
# server: "127.0.0.1:6379"
|
||||
# database: 0
|
||||
# pool_size: 1
|
||||
# password: public
|
||||
# auto_reconnect: true
|
||||
# ssl: {enable: false}
|
||||
# }
|
||||
# server: "127.0.0.1:6379"
|
||||
# database: 0
|
||||
# pool_size: 1
|
||||
# password: public
|
||||
# auto_reconnect: true
|
||||
# ssl: {enable: false}
|
||||
# cmd: "HGETALL mqtt_authz:%u"
|
||||
# },
|
||||
# {
|
||||
# type: mongo
|
||||
# config: {
|
||||
# mongo_type: single
|
||||
# server: "127.0.0.1:27017"
|
||||
# pool_size: 1
|
||||
# database: mqtt
|
||||
# ssl: {enable: false}
|
||||
# }
|
||||
# mongo_type: single
|
||||
# server: "127.0.0.1:27017"
|
||||
# pool_size: 1
|
||||
# database: mqtt
|
||||
# ssl: {enable: false}
|
||||
# collection: mqtt_authz
|
||||
# find: { "$or": [ { "username": "%u" }, { "clientid": "%c" } ] }
|
||||
# selector: { "$or": [ { "username": "%u" }, { "clientid": "%c" } ] }
|
||||
# },
|
||||
{
|
||||
type: file
|
||||
path: "{{ platform_etc_dir }}/authorization_rules.conf"
|
||||
path: "{{ platform_etc_dir }}/acl.conf"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -30,13 +30,16 @@
|
|||
, lookup/0
|
||||
, lookup/1
|
||||
, move/2
|
||||
, move/3
|
||||
, update/2
|
||||
, update/3
|
||||
, authorize/5
|
||||
]).
|
||||
|
||||
-export([post_config_update/4, pre_config_update/2]).
|
||||
|
||||
-define(CONF_KEY_PATH, [authorization, sources]).
|
||||
-define(SOURCE_TYPES, [file, http, mongo, mysql, pgsql, redis]).
|
||||
|
||||
-spec(register_metrics() -> ok).
|
||||
register_metrics() ->
|
||||
|
@ -45,7 +48,9 @@ register_metrics() ->
|
|||
init() ->
|
||||
ok = register_metrics(),
|
||||
emqx_config_handler:add_handler(?CONF_KEY_PATH, ?MODULE),
|
||||
NSources = [init_source(Source) || Source <- emqx:get_config(?CONF_KEY_PATH, [])],
|
||||
Sources = emqx:get_config(?CONF_KEY_PATH, []),
|
||||
ok = check_dup_types(Sources),
|
||||
NSources = [init_source(Source) || Source <- Sources],
|
||||
ok = emqx_hooks:add('client.authorize', {?MODULE, authorize, [NSources]}, -1).
|
||||
|
||||
lookup() ->
|
||||
|
@ -58,29 +63,39 @@ lookup(Type) ->
|
|||
error:Reason -> {error, Reason}
|
||||
end.
|
||||
|
||||
move(Type, #{<<"before">> := Before}) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), #{<<"before">> => atom(Before)}});
|
||||
move(Type, #{<<"after">> := After}) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), #{<<"after">> => atom(After)}});
|
||||
move(Type, Position) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), Position}).
|
||||
move(Type, Cmd) ->
|
||||
move(Type, Cmd, #{}).
|
||||
|
||||
move(Type, #{<<"before">> := Before}, Opts) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), #{<<"before">> => atom(Before)}}, Opts);
|
||||
move(Type, #{<<"after">> := After}, Opts) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), #{<<"after">> => atom(After)}}, Opts);
|
||||
move(Type, Position, Opts) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), Position}, Opts).
|
||||
|
||||
update({replace_once, Type}, Sources) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {{replace_once, atom(Type)}, Sources});
|
||||
update({delete_once, Type}, Sources) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {{delete_once, atom(Type)}, Sources});
|
||||
update(Cmd, Sources) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {Cmd, Sources}).
|
||||
update(Cmd, Sources, #{}).
|
||||
|
||||
update({replace_once, Type}, Sources, Opts) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {{replace_once, atom(Type)}, Sources}, Opts);
|
||||
update({delete_once, Type}, Sources, Opts) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {{delete_once, atom(Type)}, Sources}, Opts);
|
||||
update(Cmd, Sources, Opts) ->
|
||||
emqx:update_config(?CONF_KEY_PATH, {Cmd, Sources}, Opts).
|
||||
|
||||
pre_config_update({move, Type, <<"top">>}, Conf) when is_list(Conf) ->
|
||||
{Index, _} = find_source_by_type(Type),
|
||||
{List1, List2} = lists:split(Index, Conf),
|
||||
{ok, [lists:nth(Index, Conf)] ++ lists:droplast(List1) ++ List2};
|
||||
NConf = [lists:nth(Index, Conf)] ++ lists:droplast(List1) ++ List2,
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
|
||||
pre_config_update({move, Type, <<"bottom">>}, Conf) when is_list(Conf) ->
|
||||
{Index, _} = find_source_by_type(Type),
|
||||
{List1, List2} = lists:split(Index, Conf),
|
||||
{ok, lists:droplast(List1) ++ List2 ++ [lists:nth(Index, Conf)]};
|
||||
NConf = lists:droplast(List1) ++ List2 ++ [lists:nth(Index, Conf)],
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
|
||||
pre_config_update({move, Type, #{<<"before">> := Before}}, Conf) when is_list(Conf) ->
|
||||
{Index1, _} = find_source_by_type(Type),
|
||||
|
@ -89,9 +104,11 @@ pre_config_update({move, Type, #{<<"before">> := Before}}, Conf) when is_list(Co
|
|||
Conf2 = lists:nth(Index2, Conf),
|
||||
|
||||
{List1, List2} = lists:split(Index2, Conf),
|
||||
{ok, lists:delete(Conf1, lists:droplast(List1))
|
||||
++ [Conf1] ++ [Conf2]
|
||||
++ lists:delete(Conf1, List2)};
|
||||
NConf = lists:delete(Conf1, lists:droplast(List1))
|
||||
++ [Conf1] ++ [Conf2]
|
||||
++ lists:delete(Conf1, List2),
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
|
||||
pre_config_update({move, Type, #{<<"after">> := After}}, Conf) when is_list(Conf) ->
|
||||
{Index1, _} = find_source_by_type(Type),
|
||||
|
@ -99,21 +116,31 @@ pre_config_update({move, Type, #{<<"after">> := After}}, Conf) when is_list(Conf
|
|||
{Index2, _} = find_source_by_type(After),
|
||||
|
||||
{List1, List2} = lists:split(Index2, Conf),
|
||||
{ok, lists:delete(Conf1, List1)
|
||||
++ [Conf1]
|
||||
++ lists:delete(Conf1, List2)};
|
||||
NConf = lists:delete(Conf1, List1)
|
||||
++ [Conf1]
|
||||
++ lists:delete(Conf1, List2),
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
|
||||
pre_config_update({head, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
|
||||
NConf = Sources ++ Conf,
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, Sources ++ Conf};
|
||||
pre_config_update({tail, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
|
||||
NConf = Conf ++ Sources,
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, Conf ++ Sources};
|
||||
pre_config_update({{replace_once, Type}, Source}, Conf) when is_map(Source), is_list(Conf) ->
|
||||
{Index, _} = find_source_by_type(Type),
|
||||
{List1, List2} = lists:split(Index, Conf),
|
||||
{ok, lists:droplast(List1) ++ [Source] ++ List2};
|
||||
NConf = lists:droplast(List1) ++ [Source] ++ List2,
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
pre_config_update({{delete_once, Type}, _Source}, Conf) when is_list(Conf) ->
|
||||
{_, Source} = find_source_by_type(Type),
|
||||
{ok, lists:delete(Source, Conf)};
|
||||
NConf = lists:delete(Source, Conf),
|
||||
ok = check_dup_types(NConf),
|
||||
{ok, NConf};
|
||||
pre_config_update({_, Sources}, _Conf) when is_list(Sources)->
|
||||
%% overwrite the entire config!
|
||||
{ok, Sources}.
|
||||
|
@ -171,6 +198,7 @@ post_config_update({{replace_once, Type}, #{type := Type} = Source}, _NewSources
|
|||
{Index, OldSource} = find_source_by_type(Type, OldInitedSources),
|
||||
case maps:get(type, OldSource, undefined) of
|
||||
undefined -> ok;
|
||||
file -> ok;
|
||||
_ ->
|
||||
#{annotations := #{id := Id}} = OldSource,
|
||||
ok = emqx_resource:remove(Id)
|
||||
|
@ -204,6 +232,27 @@ post_config_update(_, NewSources, _OldConf, _AppEnvs) ->
|
|||
%% Initialize source
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
check_dup_types(Sources) ->
|
||||
check_dup_types(Sources, ?SOURCE_TYPES).
|
||||
check_dup_types(_Sources, []) -> ok;
|
||||
check_dup_types(Sources, [T0 | Tail]) ->
|
||||
case lists:foldl(fun (#{type := T1}, AccIn) ->
|
||||
case T0 =:= T1 of
|
||||
true -> AccIn + 1;
|
||||
false -> AccIn
|
||||
end;
|
||||
(#{<<"type">> := T1}, AccIn) ->
|
||||
case T0 =:= atom(T1) of
|
||||
true -> AccIn + 1;
|
||||
false -> AccIn
|
||||
end
|
||||
end, 0, Sources) > 1 of
|
||||
true ->
|
||||
?LOG(error, "The type is duplicated in the Authorization source"),
|
||||
{error, authz_source_dup};
|
||||
false -> check_dup_types(Sources, Tail)
|
||||
end.
|
||||
|
||||
init_source(#{enable := true,
|
||||
type := file,
|
||||
path := Path
|
||||
|
@ -224,10 +273,10 @@ init_source(#{enable := true,
|
|||
Source#{annotations => #{rules => Rules}};
|
||||
init_source(#{enable := true,
|
||||
type := http,
|
||||
config := #{url := Url} = Config
|
||||
url := Url
|
||||
} = Source) ->
|
||||
NConfig = maps:merge(Config, #{base_url => maps:remove(query, Url)}),
|
||||
case create_resource(Source#{config := NConfig}) of
|
||||
NSource= maps:put(base_url, maps:remove(query, Url), Source),
|
||||
case create_resource(NSource) of
|
||||
{error, Reason} -> error({load_config_error, Reason});
|
||||
Id -> Source#{annotations => #{id => Id}}
|
||||
end;
|
||||
|
@ -241,7 +290,7 @@ init_source(#{enable := true,
|
|||
end;
|
||||
init_source(#{enable := true,
|
||||
type := DB,
|
||||
sql := SQL
|
||||
query := SQL
|
||||
} = Source) when DB =:= mysql;
|
||||
DB =:= pgsql ->
|
||||
Mod = authz_module(DB),
|
||||
|
@ -249,7 +298,7 @@ init_source(#{enable := true,
|
|||
{error, Reason} -> error({load_config_error, Reason});
|
||||
Id -> Source#{annotations =>
|
||||
#{id => Id,
|
||||
sql => Mod:parse_query(SQL)
|
||||
query => Mod:parse_query(SQL)
|
||||
}
|
||||
}
|
||||
end;
|
||||
|
@ -302,14 +351,14 @@ do_authorize(Client, PubSub, Topic,
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
check_sources(RawSources) ->
|
||||
{ok, Conf} = hocon:binary(jsx:encode(#{<<"authorization">> => #{<<"sources">> => RawSources}}), #{format => richmap}),
|
||||
CheckConf = hocon_schema:check(emqx_authz_schema, Conf, #{atom_key => true}),
|
||||
#{authorization:= #{sources := Sources}} = hocon_schema:richmap_to_map(CheckConf),
|
||||
Schema = #{roots => emqx_authz_schema:fields("authorization"), fields => #{}},
|
||||
Conf = #{<<"sources">> => RawSources},
|
||||
#{sources := Sources} = hocon_schema:check_plain(Schema, Conf, #{atom_key => true}),
|
||||
Sources.
|
||||
|
||||
find_source_by_type(Type) -> find_source_by_type(Type, lookup()).
|
||||
find_source_by_type(Type, Sources) -> find_source_by_type(Type, Sources, 1).
|
||||
find_source_by_type(_, [], _N) -> error(not_found_rule);
|
||||
find_source_by_type(_, [], _N) -> error(not_found_source);
|
||||
find_source_by_type(Type, [ Source = #{type := T} | Tail], N) ->
|
||||
case Type =:= T of
|
||||
true -> {N, Source};
|
||||
|
@ -325,16 +374,14 @@ gen_id(Type) ->
|
|||
iolist_to_binary([io_lib:format("~s_~s",[?APP, Type])]).
|
||||
|
||||
create_resource(#{type := DB,
|
||||
config := Config,
|
||||
annotations := #{id := ResourceID}}) ->
|
||||
case emqx_resource:update(ResourceID, connector_module(DB), Config, []) of
|
||||
annotations := #{id := ResourceID}} = Source) ->
|
||||
case emqx_resource:update(ResourceID, connector_module(DB), Source, []) of
|
||||
{ok, _} -> ResourceID;
|
||||
{error, Reason} -> {error, Reason}
|
||||
end;
|
||||
create_resource(#{type := DB,
|
||||
config := Config}) ->
|
||||
create_resource(#{type := DB} = Source) ->
|
||||
ResourceID = gen_id(DB),
|
||||
case emqx_resource:create(ResourceID, connector_module(DB), Config) of
|
||||
case emqx_resource:create(ResourceID, connector_module(DB), Source) of
|
||||
{ok, already_created} -> ResourceID;
|
||||
{ok, _} -> ResourceID;
|
||||
{error, Reason} -> {error, Reason}
|
||||
|
|
|
@ -26,6 +26,9 @@ definitions() ->
|
|||
type => object,
|
||||
required => [status],
|
||||
properties => #{
|
||||
id => #{
|
||||
type => string
|
||||
},
|
||||
status => #{
|
||||
type => string,
|
||||
example => <<"healthy">>
|
||||
|
@ -38,12 +41,332 @@ definitions() ->
|
|||
]
|
||||
},
|
||||
Sources = #{
|
||||
oneOf => [ minirest:ref(<<"connector_redis">>)
|
||||
oneOf => [ minirest:ref(<<"http">>)
|
||||
, minirest:ref(<<"mongo_single">>)
|
||||
, minirest:ref(<<"mongo_rs">>)
|
||||
, minirest:ref(<<"mongo_sharded">>)
|
||||
, minirest:ref(<<"mysql">>)
|
||||
, minirest:ref(<<"pgsql">>)
|
||||
, minirest:ref(<<"redis_single">>)
|
||||
, minirest:ref(<<"redis_sentinel">>)
|
||||
, minirest:ref(<<"redis_cluster">>)
|
||||
, minirest:ref(<<"file">>)
|
||||
]
|
||||
},
|
||||
ConnectorRedis= #{
|
||||
SSL = #{
|
||||
type => object,
|
||||
required => [enable],
|
||||
properties => #{
|
||||
enable => #{type => boolean, example => true},
|
||||
cacertfile => #{type => string},
|
||||
keyfile => #{type => string},
|
||||
certfile => #{type => string},
|
||||
verify => #{type => boolean, example => false}
|
||||
}
|
||||
},
|
||||
HTTP = #{
|
||||
type => object,
|
||||
required => [type, enable, config, cmd],
|
||||
required => [ type
|
||||
, enable
|
||||
, method
|
||||
, headers
|
||||
, request_timeout
|
||||
, connect_timeout
|
||||
, max_retries
|
||||
, retry_interval
|
||||
, pool_type
|
||||
, pool_size
|
||||
, enable_pipelining
|
||||
, ssl
|
||||
],
|
||||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"http">>],
|
||||
example => <<"http">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
example => true
|
||||
},
|
||||
url => #{
|
||||
type => string,
|
||||
example => <<"https://emqx.com">>
|
||||
},
|
||||
method => #{
|
||||
type => string,
|
||||
enum => [<<"get">>, <<"post">>],
|
||||
example => <<"get">>
|
||||
},
|
||||
headers => #{type => object},
|
||||
body => #{type => object},
|
||||
connect_timeout => #{type => integer},
|
||||
max_retries => #{type => integer},
|
||||
retry_interval => #{type => integer},
|
||||
pool_type => #{
|
||||
type => string,
|
||||
enum => [<<"random">>, <<"hash">>],
|
||||
example => <<"random">>
|
||||
},
|
||||
pool_size => #{type => integer},
|
||||
enable_pipelining => #{type => boolean},
|
||||
ssl => minirest:ref(<<"ssl">>)
|
||||
}
|
||||
},
|
||||
MongoSingle= #{
|
||||
type => object,
|
||||
required => [ type
|
||||
, enable
|
||||
, collection
|
||||
, selector
|
||||
, mongo_type
|
||||
, server
|
||||
, pool_size
|
||||
, username
|
||||
, password
|
||||
, auth_source
|
||||
, database
|
||||
, topology
|
||||
, ssl
|
||||
],
|
||||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"mongo">>],
|
||||
example => <<"mongo">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
example => true
|
||||
},
|
||||
collection => #{type => string},
|
||||
selector => #{type => object},
|
||||
mongo_type => #{type => string,
|
||||
enum => [<<"single">>],
|
||||
example => <<"single">>},
|
||||
server => #{type => string, example => <<"127.0.0.1:27017">>},
|
||||
pool_size => #{type => integer},
|
||||
username => #{type => string},
|
||||
password => #{type => string},
|
||||
auth_source => #{type => string},
|
||||
database => #{type => string},
|
||||
topology => #{type => object,
|
||||
properties => #{
|
||||
pool_size => #{type => integer},
|
||||
max_overflow => #{type => integer},
|
||||
overflow_ttl => #{type => integer},
|
||||
overflow_check_period => #{type => integer},
|
||||
local_threshold_ms => #{type => integer},
|
||||
connect_timeout_ms => #{type => integer},
|
||||
socket_timeout_ms => #{type => integer},
|
||||
server_selection_timeout_ms => #{type => integer},
|
||||
wait_queue_timeout_ms => #{type => integer},
|
||||
heartbeat_frequency_ms => #{type => integer},
|
||||
min_heartbeat_frequency_ms => #{type => integer}
|
||||
}
|
||||
},
|
||||
ssl => minirest:ref(<<"ssl">>)
|
||||
}
|
||||
},
|
||||
MongoRs= #{
|
||||
type => object,
|
||||
required => [ type
|
||||
, enable
|
||||
, collection
|
||||
, selector
|
||||
, mongo_type
|
||||
, servers
|
||||
, replica_set_name
|
||||
, pool_size
|
||||
, username
|
||||
, password
|
||||
, auth_source
|
||||
, database
|
||||
, topology
|
||||
, ssl
|
||||
],
|
||||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"mongo">>],
|
||||
example => <<"mongo">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
example => true
|
||||
},
|
||||
collection => #{type => string},
|
||||
selector => #{type => object},
|
||||
mongo_type => #{type => string,
|
||||
enum => [<<"rs">>],
|
||||
example => <<"rs">>},
|
||||
servers => #{type => array,
|
||||
items => #{type => string,example => <<"127.0.0.1:27017">>}},
|
||||
replica_set_name => #{type => string},
|
||||
pool_size => #{type => integer},
|
||||
username => #{type => string},
|
||||
password => #{type => string},
|
||||
auth_source => #{type => string},
|
||||
database => #{type => string},
|
||||
topology => #{type => object,
|
||||
properties => #{
|
||||
pool_size => #{type => integer},
|
||||
max_overflow => #{type => integer},
|
||||
overflow_ttl => #{type => integer},
|
||||
overflow_check_period => #{type => integer},
|
||||
local_threshold_ms => #{type => integer},
|
||||
connect_timeout_ms => #{type => integer},
|
||||
socket_timeout_ms => #{type => integer},
|
||||
server_selection_timeout_ms => #{type => integer},
|
||||
wait_queue_timeout_ms => #{type => integer},
|
||||
heartbeat_frequency_ms => #{type => integer},
|
||||
min_heartbeat_frequency_ms => #{type => integer}
|
||||
}
|
||||
},
|
||||
ssl => minirest:ref(<<"ssl">>)
|
||||
}
|
||||
},
|
||||
MongoSharded = #{
|
||||
type => object,
|
||||
required => [ type
|
||||
, enable
|
||||
, collection
|
||||
, selector
|
||||
, mongo_type
|
||||
, servers
|
||||
, pool_size
|
||||
, username
|
||||
, password
|
||||
, auth_source
|
||||
, database
|
||||
, topology
|
||||
, ssl
|
||||
],
|
||||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"mongo">>],
|
||||
example => <<"mongo">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
example => true
|
||||
},
|
||||
collection => #{type => string},
|
||||
selector => #{type => object},
|
||||
mongo_type => #{type => string,
|
||||
enum => [<<"sharded">>],
|
||||
example => <<"sharded">>},
|
||||
servers => #{type => array,
|
||||
items => #{type => string,example => <<"127.0.0.1:27017">>}},
|
||||
pool_size => #{type => integer},
|
||||
username => #{type => string},
|
||||
password => #{type => string},
|
||||
auth_source => #{type => string},
|
||||
database => #{type => string},
|
||||
topology => #{type => object,
|
||||
properties => #{
|
||||
pool_size => #{type => integer},
|
||||
max_overflow => #{type => integer},
|
||||
overflow_ttl => #{type => integer},
|
||||
overflow_check_period => #{type => integer},
|
||||
local_threshold_ms => #{type => integer},
|
||||
connect_timeout_ms => #{type => integer},
|
||||
socket_timeout_ms => #{type => integer},
|
||||
server_selection_timeout_ms => #{type => integer},
|
||||
wait_queue_timeout_ms => #{type => integer},
|
||||
heartbeat_frequency_ms => #{type => integer},
|
||||
min_heartbeat_frequency_ms => #{type => integer}
|
||||
}
|
||||
},
|
||||
ssl => minirest:ref(<<"ssl">>)
|
||||
}
|
||||
},
|
||||
Mysql = #{
|
||||
type => object,
|
||||
required => [ type
|
||||
, enable
|
||||
, query
|
||||
, server
|
||||
, database
|
||||
, pool_size
|
||||
, username
|
||||
, password
|
||||
, auto_reconnect
|
||||
, ssl
|
||||
],
|
||||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"mysql">>],
|
||||
example => <<"mysql">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
example => true
|
||||
},
|
||||
query => #{type => string},
|
||||
server => #{type => string,
|
||||
example => <<"127.0.0.1:3306">>
|
||||
},
|
||||
database => #{type => string},
|
||||
pool_size => #{type => integer},
|
||||
username => #{type => string},
|
||||
password => #{type => string},
|
||||
auto_reconnect => #{type => boolean,
|
||||
example => true
|
||||
},
|
||||
ssl => minirest:ref(<<"ssl">>)
|
||||
}
|
||||
},
|
||||
Pgsql = #{
|
||||
type => object,
|
||||
required => [ type
|
||||
, enable
|
||||
, query
|
||||
, server
|
||||
, database
|
||||
, pool_size
|
||||
, username
|
||||
, password
|
||||
, auto_reconnect
|
||||
, ssl
|
||||
],
|
||||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"pgsql">>],
|
||||
example => <<"pgsql">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
example => true
|
||||
},
|
||||
query => #{type => string},
|
||||
server => #{type => string,
|
||||
example => <<"127.0.0.1:5432">>
|
||||
},
|
||||
database => #{type => string},
|
||||
pool_size => #{type => integer},
|
||||
username => #{type => string},
|
||||
password => #{type => string},
|
||||
auto_reconnect => #{type => boolean,
|
||||
example => true
|
||||
},
|
||||
ssl => minirest:ref(<<"ssl">>)
|
||||
}
|
||||
},
|
||||
RedisSingle = #{
|
||||
type => object,
|
||||
required => [ type
|
||||
, enable
|
||||
, cmd
|
||||
, server
|
||||
, redis_type
|
||||
, pool_size
|
||||
, auto_reconnect
|
||||
, ssl
|
||||
],
|
||||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
|
@ -54,59 +377,133 @@ definitions() ->
|
|||
type => boolean,
|
||||
example => true
|
||||
},
|
||||
config => #{
|
||||
oneOf => [ #{type => object,
|
||||
required => [server, redis_type, pool_size, auto_reconnect],
|
||||
properties => #{
|
||||
server => #{type => string, example => <<"127.0.0.1:3306">>},
|
||||
redis_type => #{type => string,
|
||||
enum => [<<"single">>],
|
||||
example => <<"single">>},
|
||||
pool_size => #{type => integer},
|
||||
auto_reconnect => #{type => boolean, example => true},
|
||||
password => #{type => string},
|
||||
database => #{type => string, example => mqtt}
|
||||
}
|
||||
}
|
||||
, #{type => object,
|
||||
required => [servers, redis_type, sentinel, pool_size, auto_reconnect],
|
||||
properties => #{
|
||||
servers => #{type => array,
|
||||
items => #{type => string,example => <<"127.0.0.1:3306">>}},
|
||||
redis_type => #{type => string,
|
||||
enum => [<<"sentinel">>],
|
||||
example => <<"sentinel">>},
|
||||
sentinel => #{type => string},
|
||||
pool_size => #{type => integer},
|
||||
auto_reconnect => #{type => boolean, example => true},
|
||||
password => #{type => string},
|
||||
database => #{type => string, example => mqtt}
|
||||
}
|
||||
}
|
||||
, #{type => object,
|
||||
required => [servers, redis_type, pool_size, auto_reconnect],
|
||||
properties => #{
|
||||
servers => #{type => array,
|
||||
items => #{type => string, example => <<"127.0.0.1:3306">>}},
|
||||
redis_type => #{type => string,
|
||||
enum => [<<"cluster">>],
|
||||
example => <<"cluster">>},
|
||||
pool_size => #{type => integer},
|
||||
auto_reconnect => #{type => boolean, example => true},
|
||||
password => #{type => string},
|
||||
database => #{type => string, example => mqtt}
|
||||
}
|
||||
}
|
||||
],
|
||||
type => object
|
||||
cmd => #{
|
||||
type => string,
|
||||
example => <<"HGETALL mqtt_authz">>
|
||||
},
|
||||
server => #{type => string, example => <<"127.0.0.1:3306">>},
|
||||
redis_type => #{type => string,
|
||||
enum => [<<"single">>],
|
||||
example => <<"single">>},
|
||||
pool_size => #{type => integer},
|
||||
auto_reconnect => #{type => boolean, example => true},
|
||||
password => #{type => string},
|
||||
database => #{type => integer},
|
||||
ssl => minirest:ref(<<"ssl">>)
|
||||
}
|
||||
},
|
||||
RedisSentinel= #{
|
||||
type => object,
|
||||
required => [ type
|
||||
, enable
|
||||
, cmd
|
||||
, servers
|
||||
, redis_type
|
||||
, sentinel
|
||||
, pool_size
|
||||
, auto_reconnect
|
||||
, ssl
|
||||
],
|
||||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"redis">>],
|
||||
example => <<"redis">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
example => true
|
||||
},
|
||||
cmd => #{
|
||||
type => string,
|
||||
example => <<"HGETALL mqtt_authz">>
|
||||
},
|
||||
servers => #{type => array,
|
||||
items => #{type => string,example => <<"127.0.0.1:3306">>}},
|
||||
redis_type => #{type => string,
|
||||
enum => [<<"sentinel">>],
|
||||
example => <<"sentinel">>},
|
||||
sentinel => #{type => string},
|
||||
pool_size => #{type => integer},
|
||||
auto_reconnect => #{type => boolean, example => true},
|
||||
password => #{type => string},
|
||||
database => #{type => integer},
|
||||
ssl => minirest:ref(<<"ssl">>)
|
||||
}
|
||||
},
|
||||
RedisCluster= #{
|
||||
type => object,
|
||||
required => [ type
|
||||
, enable
|
||||
, cmd
|
||||
, servers
|
||||
, redis_type
|
||||
, pool_size
|
||||
, auto_reconnect
|
||||
, ssl],
|
||||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"redis">>],
|
||||
example => <<"redis">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
example => true
|
||||
},
|
||||
cmd => #{
|
||||
type => string,
|
||||
example => <<"HGETALL mqtt_authz">>
|
||||
},
|
||||
servers => #{type => array,
|
||||
items => #{type => string, example => <<"127.0.0.1:3306">>}},
|
||||
redis_type => #{type => string,
|
||||
enum => [<<"cluster">>],
|
||||
example => <<"cluster">>},
|
||||
pool_size => #{type => integer},
|
||||
auto_reconnect => #{type => boolean, example => true},
|
||||
password => #{type => string},
|
||||
database => #{type => integer},
|
||||
ssl => minirest:ref(<<"ssl">>)
|
||||
}
|
||||
},
|
||||
File = #{
|
||||
type => object,
|
||||
required => [type, enable, rules],
|
||||
properties => #{
|
||||
type => #{
|
||||
type => string,
|
||||
enum => [<<"redis">>],
|
||||
example => <<"redis">>
|
||||
},
|
||||
enable => #{
|
||||
type => boolean,
|
||||
example => true
|
||||
},
|
||||
rules => #{
|
||||
type => array,
|
||||
items => #{
|
||||
type => string,
|
||||
example => <<"{allow,{username,\"^dashboard?\"},subscribe,[\"$SYS/#\"]}.\n{allow,{ipaddr,\"127.0.0.1\"},all,[\"$SYS/#\",\"#\"]}.">>
|
||||
}
|
||||
},
|
||||
path => #{
|
||||
type => string,
|
||||
example => <<"/path/to/authorizaiton_rules.conf">>
|
||||
}
|
||||
}
|
||||
},
|
||||
[ #{<<"returned_sources">> => RetruenedSources}
|
||||
, #{<<"sources">> => Sources}
|
||||
, #{<<"connector_redis">> => ConnectorRedis}
|
||||
, #{<<"ssl">> => SSL}
|
||||
, #{<<"http">> => HTTP}
|
||||
, #{<<"mongo_single">> => MongoSingle}
|
||||
, #{<<"mongo_rs">> => MongoRs}
|
||||
, #{<<"mongo_sharded">> => MongoSharded}
|
||||
, #{<<"mysql">> => Mysql}
|
||||
, #{<<"pgsql">> => Pgsql}
|
||||
, #{<<"redis_single">> => RedisSingle}
|
||||
, #{<<"redis_sentinel">> => RedisSentinel}
|
||||
, #{<<"redis_cluster">> => RedisCluster}
|
||||
, #{<<"file">> => File}
|
||||
].
|
||||
|
|
|
@ -19,21 +19,32 @@
|
|||
-behavior(minirest_api).
|
||||
|
||||
-include("emqx_authz.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
|
||||
-define(EXAMPLE_REDIS,
|
||||
#{type=> redis,
|
||||
config => #{server => <<"127.0.0.1:3306">>,
|
||||
redis_type => single,
|
||||
pool_size => 1,
|
||||
auto_reconnect => true
|
||||
},
|
||||
enable => true,
|
||||
server => <<"127.0.0.1:3306">>,
|
||||
redis_type => single,
|
||||
pool_size => 1,
|
||||
auto_reconnect => true,
|
||||
cmd => <<"HGETALL mqtt_authz">>}).
|
||||
-define(EXAMPLE_FILE,
|
||||
#{type=> file,
|
||||
enable => true,
|
||||
rules => <<"{allow,{username,\"^dashboard?\"},subscribe,[\"$SYS/#\"]}.\n{allow,{ipaddr,\"127.0.0.1\"},all,[\"$SYS/#\",\"#\"]}.">>
|
||||
}).
|
||||
|
||||
-define(EXAMPLE_RETURNED_REDIS,
|
||||
maps:put(annotations, #{status => healthy}, ?EXAMPLE_REDIS)
|
||||
).
|
||||
-define(EXAMPLE_RETURNED_FILE,
|
||||
maps:put(annotations, #{status => healthy}, ?EXAMPLE_FILE)
|
||||
).
|
||||
|
||||
-define(EXAMPLE_RETURNED_RULES,
|
||||
#{sources => [?EXAMPLE_RETURNED_REDIS
|
||||
-define(EXAMPLE_RETURNED,
|
||||
#{sources => [ ?EXAMPLE_RETURNED_REDIS
|
||||
, ?EXAMPLE_RETURNED_FILE
|
||||
]
|
||||
}).
|
||||
|
||||
|
@ -55,24 +66,6 @@ sources_api() ->
|
|||
Metadata = #{
|
||||
get => #{
|
||||
description => "List authorization sources",
|
||||
parameters => [
|
||||
#{
|
||||
name => page,
|
||||
in => query,
|
||||
schema => #{
|
||||
type => integer
|
||||
},
|
||||
required => false
|
||||
},
|
||||
#{
|
||||
name => limit,
|
||||
in => query,
|
||||
schema => #{
|
||||
type => integer
|
||||
},
|
||||
required => false
|
||||
}
|
||||
],
|
||||
responses => #{
|
||||
<<"200">> => #{
|
||||
description => <<"OK">>,
|
||||
|
@ -90,7 +83,7 @@ sources_api() ->
|
|||
examples => #{
|
||||
sources => #{
|
||||
summary => <<"Sources">>,
|
||||
value => jsx:encode(?EXAMPLE_RETURNED_RULES)
|
||||
value => jsx:encode(?EXAMPLE_RETURNED)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -108,6 +101,10 @@ sources_api() ->
|
|||
redis => #{
|
||||
summary => <<"Redis">>,
|
||||
value => jsx:encode(?EXAMPLE_REDIS)
|
||||
},
|
||||
file => #{
|
||||
summary => <<"File">>,
|
||||
value => jsx:encode(?EXAMPLE_FILE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -130,7 +127,11 @@ sources_api() ->
|
|||
examples => #{
|
||||
redis => #{
|
||||
summary => <<"Redis">>,
|
||||
value => jsx:encode([?EXAMPLE_REDIS])
|
||||
value => jsx:encode(?EXAMPLE_REDIS)
|
||||
},
|
||||
file => #{
|
||||
summary => <<"File">>,
|
||||
value => jsx:encode(?EXAMPLE_FILE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -165,9 +166,13 @@ source_api() ->
|
|||
'application/json' => #{
|
||||
schema => minirest:ref(<<"returned_sources">>),
|
||||
examples => #{
|
||||
sources => #{
|
||||
summary => <<"Sources">>,
|
||||
redis => #{
|
||||
summary => <<"Redis">>,
|
||||
value => jsx:encode(?EXAMPLE_RETURNED_REDIS)
|
||||
},
|
||||
file => #{
|
||||
summary => <<"File">>,
|
||||
value => jsx:encode(?EXAMPLE_RETURNED_FILE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -196,6 +201,10 @@ source_api() ->
|
|||
redis => #{
|
||||
summary => <<"Redis">>,
|
||||
value => jsx:encode(?EXAMPLE_REDIS)
|
||||
},
|
||||
file => #{
|
||||
summary => <<"File">>,
|
||||
value => jsx:encode(?EXAMPLE_FILE)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -287,53 +296,65 @@ move_source_api() ->
|
|||
},
|
||||
{"/authorization/sources/:type/move", Metadata, move_source}.
|
||||
|
||||
sources(get, #{query_string := Query}) ->
|
||||
Sources = lists:foldl(fun (#{type := _Type, enable := true, config := #{server := Server} = Config, annotations := #{id := Id}} = Source, AccIn) ->
|
||||
NSource = case emqx_resource:health_check(Id) of
|
||||
ok ->
|
||||
Source#{config => Config#{server => emqx_connector_schema_lib:ip_port_to_string(Server)},
|
||||
annotations => #{id => Id,
|
||||
status => healthy}};
|
||||
_ ->
|
||||
Source#{config => Config#{server => emqx_connector_schema_lib:ip_port_to_string(Server)},
|
||||
annotations => #{id => Id,
|
||||
status => unhealthy}}
|
||||
end,
|
||||
lists:append(AccIn, [NSource]);
|
||||
(#{type := _Type, enable := true, annotations := #{id := Id}} = Source, AccIn) ->
|
||||
NSource = case emqx_resource:health_check(Id) of
|
||||
ok ->
|
||||
Source#{annotations => #{status => healthy}};
|
||||
_ ->
|
||||
Source#{annotations => #{status => unhealthy}}
|
||||
end,
|
||||
lists:append(AccIn, [NSource]);
|
||||
(Source, AccIn) ->
|
||||
lists:append(AccIn, [Source])
|
||||
sources(get, _) ->
|
||||
Sources = lists:foldl(fun (#{type := file, enable := Enable, path := Path}, AccIn) ->
|
||||
{ok, Rules} = file:consult(Path),
|
||||
lists:append(AccIn, [#{type => file,
|
||||
enable => Enable,
|
||||
rules => [ iolist_to_binary(io_lib:format("~p.", [R])) || R <- Rules],
|
||||
annotations => #{status => healthy}
|
||||
}]);
|
||||
(#{enable := false} = Source, AccIn) ->
|
||||
lists:append(AccIn, [Source#{annotations => #{status => unhealthy}}]);
|
||||
(#{type := _Type, annotations := #{id := Id}} = Source, AccIn) ->
|
||||
NSource0 = case maps:get(server, Source, undefined) of
|
||||
undefined -> Source;
|
||||
Server ->
|
||||
Source#{server => emqx_connector_schema_lib:ip_port_to_string(Server)}
|
||||
end,
|
||||
NSource1 = case maps:get(servers, Source, undefined) of
|
||||
undefined -> NSource0;
|
||||
Servers ->
|
||||
NSource0#{servers => [emqx_connector_schema_lib:ip_port_to_string(Server) || Server <- Servers]}
|
||||
end,
|
||||
NSource2 = case emqx_resource:health_check(Id) of
|
||||
ok ->
|
||||
NSource1#{annotations => #{status => healthy}};
|
||||
_ ->
|
||||
NSource1#{annotations => #{status => unhealthy}}
|
||||
end,
|
||||
lists:append(AccIn, [read_cert(NSource2)]);
|
||||
(Source, AccIn) ->
|
||||
lists:append(AccIn, [Source#{annotations => #{status => healthy}}])
|
||||
end, [], emqx_authz:lookup()),
|
||||
case maps:is_key(<<"page">>, Query) andalso maps:is_key(<<"limit">>, Query) of
|
||||
true ->
|
||||
Page = maps:get(<<"page">>, Query),
|
||||
Limit = maps:get(<<"limit">>, Query),
|
||||
Index = (binary_to_integer(Page) - 1) * binary_to_integer(Limit),
|
||||
{_, Sources1} = lists:split(Index, Sources),
|
||||
case binary_to_integer(Limit) < length(Sources1) of
|
||||
true ->
|
||||
{Sources2, _} = lists:split(binary_to_integer(Limit), Sources1),
|
||||
{200, #{sources => Sources2}};
|
||||
false -> {200, #{sources => Sources1}}
|
||||
end;
|
||||
false -> {200, #{sources => Sources}}
|
||||
end;
|
||||
sources(post, #{body := RawConfig}) ->
|
||||
case emqx_authz:update(head, [RawConfig]) of
|
||||
{200, #{sources => Sources}};
|
||||
sources(post, #{body := #{<<"type">> := <<"file">>, <<"rules">> := Rules, <<"enable">> := Enable}}) when is_list(Rules) ->
|
||||
{ok, Filename} = write_file(filename:join([emqx:get_config([node, data_dir]), "acl.conf"]),
|
||||
erlang:list_to_bitstring([<<Rule/binary, "\n">> || Rule <- Rules])
|
||||
),
|
||||
case emqx_authz:update(head, [#{type => file, enable => Enable, path => Filename}]) of
|
||||
{ok, _} -> {204};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
end;
|
||||
sources(put, #{body := RawConfig}) ->
|
||||
case emqx_authz:update(replace, RawConfig) of
|
||||
sources(post, #{body := Body}) when is_map(Body) ->
|
||||
case emqx_authz:update(head, [write_cert(Body)]) of
|
||||
{ok, _} -> {204};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
end;
|
||||
sources(put, #{body := Body}) when is_list(Body) ->
|
||||
NBody = [ begin
|
||||
case Source of
|
||||
#{<<"type">> := <<"file">>, <<"rules">> := Rules, <<"enable">> := Enable} ->
|
||||
{ok, Filename} = write_file(filename:join([emqx:get_config([node, data_dir]), "acl.conf"]), Rules),
|
||||
#{type => file, enable => Enable, path => Filename};
|
||||
_ -> write_cert(Source)
|
||||
end
|
||||
end || Source <- Body],
|
||||
case emqx_authz:update(replace, NBody) of
|
||||
{ok, _} -> {204};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
|
@ -343,29 +364,44 @@ sources(put, #{body := RawConfig}) ->
|
|||
source(get, #{bindings := #{type := Type}}) ->
|
||||
case emqx_authz:lookup(Type) of
|
||||
{error, Reason} -> {404, #{messgae => atom_to_binary(Reason)}};
|
||||
#{enable := false} = Source -> {200, Source};
|
||||
#{type := file} = Source -> {200, Source};
|
||||
#{config := #{server := Server,
|
||||
annotations := #{id := Id}
|
||||
} = Config} = Source ->
|
||||
case emqx_resource:health_check(Id) of
|
||||
#{type := file, enable := Enable, path := Path}->
|
||||
{ok, Rules} = file:consult(Path),
|
||||
{200, #{type => file,
|
||||
enable => Enable,
|
||||
rules => [ iolist_to_binary(io_lib:format("~p.", [R])) || R <- Rules],
|
||||
annotations => #{status => healthy}
|
||||
}
|
||||
};
|
||||
#{enable := false} = Source -> {200, Source#{annotations => #{status => unhealthy}}};
|
||||
#{annotations := #{id := Id}} = Source ->
|
||||
NSource0 = case maps:get(server, Source, undefined) of
|
||||
undefined -> Source;
|
||||
Server ->
|
||||
Source#{server => emqx_connector_schema_lib:ip_port_to_string(Server)}
|
||||
end,
|
||||
NSource1 = case maps:get(servers, Source, undefined) of
|
||||
undefined -> NSource0;
|
||||
Servers ->
|
||||
NSource0#{servers => [emqx_connector_schema_lib:ip_port_to_string(Server) || Server <- Servers]}
|
||||
end,
|
||||
NSource2 = case emqx_resource:health_check(Id) of
|
||||
ok ->
|
||||
{200, Source#{config => Config#{server => emqx_connector_schema_lib:ip_port_to_string(Server)},
|
||||
annotations => #{status => healthy}}};
|
||||
NSource1#{annotations => #{status => healthy}};
|
||||
_ ->
|
||||
{200, Source#{config => Config#{server => emqx_connector_schema_lib:ip_port_to_string(Server)},
|
||||
annotations => #{status => unhealthy}}}
|
||||
end;
|
||||
#{config := #{annotations := #{id := Id}}} = Source ->
|
||||
case emqx_resource:health_check(Id) of
|
||||
ok ->
|
||||
{200, Source#{annotations => #{status => healthy}}};
|
||||
_ ->
|
||||
{200, Source#{annotations => #{status => unhealthy}}}
|
||||
end
|
||||
NSource1#{annotations => #{status => unhealthy}}
|
||||
end,
|
||||
{200, read_cert(NSource2)}
|
||||
end;
|
||||
source(put, #{bindings := #{type := Type}, body := RawConfig}) ->
|
||||
case emqx_authz:update({replace_once, Type}, RawConfig) of
|
||||
source(put, #{bindings := #{type := <<"file">>}, body := #{<<"type">> := <<"file">>, <<"rules">> := Rules, <<"enable">> := Enable}}) ->
|
||||
{ok, Filename} = write_file(maps:get(path, emqx_authz:lookup(file), ""), Rules),
|
||||
case emqx_authz:update({replace_once, file}, #{type => file, enable => Enable, path => Filename}) of
|
||||
{ok, _} -> {204};
|
||||
{error, Reason} ->
|
||||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
end;
|
||||
source(put, #{bindings := #{type := Type}, body := Body}) when is_map(Body) ->
|
||||
case emqx_authz:update({replace_once, Type}, write_cert(Body)) of
|
||||
{ok, _} -> {204};
|
||||
{error, not_found_source} ->
|
||||
{404, #{code => <<"NOT_FOUND">>,
|
||||
|
@ -391,3 +427,62 @@ move_source(post, #{bindings := #{type := Type}, body := #{<<"position">> := Pos
|
|||
{400, #{code => <<"BAD_REQUEST">>,
|
||||
messgae => atom_to_binary(Reason)}}
|
||||
end.
|
||||
|
||||
read_cert(#{ssl := #{enable := true} = SSL} = Source) ->
|
||||
CaCert = case file:read_file(maps:get(cacertfile, SSL, "")) of
|
||||
{ok, CaCert0} -> CaCert0;
|
||||
_ -> ""
|
||||
end,
|
||||
Cert = case file:read_file(maps:get(certfile, SSL, "")) of
|
||||
{ok, Cert0} -> Cert0;
|
||||
_ -> ""
|
||||
end,
|
||||
Key = case file:read_file(maps:get(keyfile, SSL, "")) of
|
||||
{ok, Key0} -> Key0;
|
||||
_ -> ""
|
||||
end,
|
||||
Source#{ssl => SSL#{cacertfile => CaCert,
|
||||
certfile => Cert,
|
||||
keyfile => Key
|
||||
}
|
||||
};
|
||||
read_cert(Source) -> Source.
|
||||
|
||||
write_cert(#{<<"ssl">> := #{<<"enable">> := true} = SSL} = Source) ->
|
||||
CertPath = filename:join([emqx:get_config([node, data_dir]), "certs"]),
|
||||
CaCert = case maps:is_key(<<"cacertfile">>, SSL) of
|
||||
true ->
|
||||
{ok, CaCertFile} = write_file(filename:join([CertPath, "cacert-" ++ emqx_plugin_libs_id:gen() ++".pem"]),
|
||||
maps:get(<<"cacertfile">>, SSL)),
|
||||
CaCertFile;
|
||||
false -> ""
|
||||
end,
|
||||
Cert = case maps:is_key(<<"certfile">>, SSL) of
|
||||
true ->
|
||||
{ok, CertFile} = write_file(filename:join([CertPath, "cert-" ++ emqx_plugin_libs_id:gen() ++".pem"]),
|
||||
maps:get(<<"certfile">>, SSL)),
|
||||
CertFile;
|
||||
false -> ""
|
||||
end,
|
||||
Key = case maps:is_key(<<"keyfile">>, SSL) of
|
||||
true ->
|
||||
{ok, KeyFile} = write_file(filename:join([CertPath, "key-" ++ emqx_plugin_libs_id:gen() ++".pem"]),
|
||||
maps:get(<<"keyfile">>, SSL)),
|
||||
KeyFile;
|
||||
false -> ""
|
||||
end,
|
||||
Source#{<<"ssl">> => SSL#{<<"cacertfile">> => CaCert,
|
||||
<<"certfile">> => Cert,
|
||||
<<"keyfile">> => Key
|
||||
}
|
||||
};
|
||||
write_cert(Source) -> Source.
|
||||
|
||||
write_file(Filename, Bytes) ->
|
||||
ok = filelib:ensure_dir(Filename),
|
||||
case file:write_file(Filename, Bytes) of
|
||||
ok -> {ok, iolist_to_binary(Filename)};
|
||||
{error, Reason} ->
|
||||
?LOG(error, "Write File ~p Error: ~p", [Filename, Reason]),
|
||||
error(Reason)
|
||||
end.
|
||||
|
|
|
@ -35,12 +35,12 @@ description() ->
|
|||
|
||||
authorize(Client, PubSub, Topic,
|
||||
#{type := http,
|
||||
config := #{url := #{path := Path} = Url,
|
||||
headers := Headers,
|
||||
method := Method,
|
||||
request_timeout := RequestTimeout} = Config,
|
||||
url := #{path := Path} = Url,
|
||||
headers := Headers,
|
||||
method := Method,
|
||||
request_timeout := RequestTimeout,
|
||||
annotations := #{id := ResourceID}
|
||||
}) ->
|
||||
} = Source) ->
|
||||
Request = case Method of
|
||||
get ->
|
||||
Query = maps:get(query, Url, ""),
|
||||
|
@ -49,7 +49,7 @@ authorize(Client, PubSub, Topic,
|
|||
_ ->
|
||||
Body0 = serialize_body(
|
||||
maps:get('Accept', Headers, <<"application/json">>),
|
||||
maps:get(body, Config, #{})
|
||||
maps:get(body, Source, #{})
|
||||
),
|
||||
Body1 = replvar(Body0, PubSub, Topic, Client),
|
||||
Path1 = replvar(Path, PubSub, Topic, Client),
|
||||
|
|
|
@ -35,10 +35,10 @@ description() ->
|
|||
|
||||
authorize(Client, PubSub, Topic,
|
||||
#{collection := Collection,
|
||||
find := Find,
|
||||
selector := Selector,
|
||||
annotations := #{id := ResourceID}
|
||||
}) ->
|
||||
case emqx_resource:query(ResourceID, {find, Collection, replvar(Find, Client), #{}}) of
|
||||
case emqx_resource:query(ResourceID, {find, Collection, replvar(Selector, Client), #{}}) of
|
||||
{error, Reason} ->
|
||||
?LOG(error, "[AuthZ] Query mongo error: ~p", [Reason]),
|
||||
nomatch;
|
||||
|
@ -57,7 +57,7 @@ do_authorize(Client, PubSub, Topic, [Rule | Tail]) ->
|
|||
nomatch -> do_authorize(Client, PubSub, Topic, Tail)
|
||||
end.
|
||||
|
||||
replvar(Find, #{clientid := Clientid,
|
||||
replvar(Selector, #{clientid := Clientid,
|
||||
username := Username,
|
||||
peerhost := IpAddress
|
||||
}) ->
|
||||
|
@ -76,7 +76,7 @@ replvar(Find, #{clientid := Clientid,
|
|||
maps:put(K, V3, AccIn);
|
||||
_Fun(K, V, AccIn) -> maps:put(K, V, AccIn)
|
||||
end,
|
||||
maps:fold(Fun, #{}, Find).
|
||||
maps:fold(Fun, #{}, Selector).
|
||||
|
||||
bin(A) when is_atom(A) -> atom_to_binary(A, utf8);
|
||||
bin(B) when is_binary(B) -> B;
|
||||
|
|
|
@ -47,10 +47,10 @@ parse_query(Sql) ->
|
|||
|
||||
authorize(Client, PubSub, Topic,
|
||||
#{annotations := #{id := ResourceID,
|
||||
sql := {SQL, Params}
|
||||
query := {Query, Params}
|
||||
}
|
||||
}) ->
|
||||
case emqx_resource:query(ResourceID, {sql, SQL, replvar(Params, Client)}) of
|
||||
case emqx_resource:query(ResourceID, {sql, Query, replvar(Params, Client)}) of
|
||||
{ok, _Columns, []} -> nomatch;
|
||||
{ok, Columns, Rows} ->
|
||||
do_authorize(Client, PubSub, Topic, Columns, Rows);
|
||||
|
|
|
@ -51,10 +51,10 @@ parse_query(Sql) ->
|
|||
|
||||
authorize(Client, PubSub, Topic,
|
||||
#{annotations := #{id := ResourceID,
|
||||
sql := {SQL, Params}
|
||||
query := {Query, Params}
|
||||
}
|
||||
}) ->
|
||||
case emqx_resource:query(ResourceID, {sql, SQL, replvar(Params, Client)}) of
|
||||
case emqx_resource:query(ResourceID, {sql, Query, replvar(Params, Client)}) of
|
||||
{ok, _Columns, []} -> nomatch;
|
||||
{ok, Columns, Rows} ->
|
||||
do_authorize(Client, PubSub, Topic, Columns, Rows);
|
||||
|
|
|
@ -13,14 +13,32 @@
|
|||
-type permission() :: allow | deny.
|
||||
-type url() :: emqx_http_lib:uri_map().
|
||||
|
||||
-export([ roots/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1
|
||||
]).
|
||||
|
||||
roots() -> ["authorization"].
|
||||
namespace() -> authz.
|
||||
|
||||
%% @doc authorization schema is not exported
|
||||
%% but directly used by emqx_schema
|
||||
roots() -> [].
|
||||
|
||||
fields("authorization") ->
|
||||
[ {sources, sources()}
|
||||
[ {sources, #{type => union_array(
|
||||
[ hoconsc:ref(?MODULE, file)
|
||||
, hoconsc:ref(?MODULE, http_get)
|
||||
, hoconsc:ref(?MODULE, http_post)
|
||||
, hoconsc:ref(?MODULE, mongo_single)
|
||||
, hoconsc:ref(?MODULE, mongo_rs)
|
||||
, hoconsc:ref(?MODULE, mongo_sharded)
|
||||
, hoconsc:ref(?MODULE, mysql)
|
||||
, hoconsc:ref(?MODULE, pgsql)
|
||||
, hoconsc:ref(?MODULE, redis_single)
|
||||
, hoconsc:ref(?MODULE, redis_sentinel)
|
||||
, hoconsc:ref(?MODULE, redis_cluster)
|
||||
])}
|
||||
}
|
||||
];
|
||||
fields(file) ->
|
||||
[ {type, #{type => file}}
|
||||
|
@ -34,17 +52,12 @@ fields(file) ->
|
|||
end
|
||||
}}
|
||||
];
|
||||
fields(http) ->
|
||||
fields(http_get) ->
|
||||
[ {type, #{type => http}}
|
||||
, {enable, #{type => boolean(),
|
||||
default => true}}
|
||||
, {config, #{type => hoconsc:union([ hoconsc:ref(?MODULE, http_get)
|
||||
, hoconsc:ref(?MODULE, http_post)
|
||||
])}
|
||||
}
|
||||
];
|
||||
fields(http_get) ->
|
||||
[ {url, #{type => url()}}
|
||||
, {url, #{type => url()}}
|
||||
, {method, #{type => get, default => get }}
|
||||
, {headers, #{type => map(),
|
||||
default => #{ <<"accept">> => <<"application/json">>
|
||||
, <<"cache-control">> => <<"no-cache">>
|
||||
|
@ -53,7 +66,7 @@ fields(http_get) ->
|
|||
},
|
||||
converter => fun (Headers0) ->
|
||||
Headers1 = maps:fold(fun(K0, V, AccIn) ->
|
||||
K1 = iolist_to_binary(string:to_lower(binary_to_list(K0))),
|
||||
K1 = iolist_to_binary(string:to_lower(to_list(K0))),
|
||||
maps:put(K1, V, AccIn)
|
||||
end, #{}, Headers0),
|
||||
maps:merge(#{ <<"accept">> => <<"application/json">>
|
||||
|
@ -64,11 +77,15 @@ fields(http_get) ->
|
|||
end
|
||||
}
|
||||
}
|
||||
, {method, #{type => get, default => get }}
|
||||
, {request_timeout, #{type => timeout(), default => 30000 }}
|
||||
] ++ proplists:delete(base_url, emqx_connector_http:fields(config));
|
||||
fields(http_post) ->
|
||||
[ {url, #{type => url()}}
|
||||
[ {type, #{type => http}}
|
||||
, {enable, #{type => boolean(),
|
||||
default => true}}
|
||||
, {url, #{type => url()}}
|
||||
, {method, #{type => post,
|
||||
default => get}}
|
||||
, {headers, #{type => map(),
|
||||
default => #{ <<"accept">> => <<"application/json">>
|
||||
, <<"cache-control">> => <<"no-cache">>
|
||||
|
@ -90,54 +107,42 @@ fields(http_post) ->
|
|||
end
|
||||
}
|
||||
}
|
||||
, {method, #{type => hoconsc:enum([post, put]),
|
||||
default => get}}
|
||||
, {request_timeout, #{type => timeout(), default => 30000 }}
|
||||
, {body, #{type => map(),
|
||||
nullable => true
|
||||
}
|
||||
}
|
||||
] ++ proplists:delete(base_url, emqx_connector_http:fields(config));
|
||||
fields(mongo) ->
|
||||
connector_fields(mongo) ++
|
||||
fields(mongo_single) ->
|
||||
connector_fields(mongo, single) ++
|
||||
[ {collection, #{type => atom()}}
|
||||
, {find, #{type => map()}}
|
||||
, {selector, #{type => map()}}
|
||||
];
|
||||
fields(mongo_rs) ->
|
||||
connector_fields(mongo, rs) ++
|
||||
[ {collection, #{type => atom()}}
|
||||
, {selector, #{type => map()}}
|
||||
];
|
||||
fields(mongo_sharded) ->
|
||||
connector_fields(mongo, sharded) ++
|
||||
[ {collection, #{type => atom()}}
|
||||
, {selector, #{type => map()}}
|
||||
];
|
||||
fields(redis) ->
|
||||
connector_fields(redis) ++
|
||||
[ {cmd, query()} ];
|
||||
fields(mysql) ->
|
||||
connector_fields(mysql) ++
|
||||
[ {sql, query()} ];
|
||||
[ {query, query()} ];
|
||||
fields(pgsql) ->
|
||||
connector_fields(pgsql) ++
|
||||
[ {sql, query()} ];
|
||||
fields(username) ->
|
||||
[{username, #{type => binary()}}];
|
||||
fields(clientid) ->
|
||||
[{clientid, #{type => binary()}}];
|
||||
fields(ipaddress) ->
|
||||
[{ipaddress, #{type => string()}}];
|
||||
fields(andlist) ->
|
||||
[{'and', #{type => union_array(
|
||||
[ hoconsc:ref(?MODULE, username)
|
||||
, hoconsc:ref(?MODULE, clientid)
|
||||
, hoconsc:ref(?MODULE, ipaddress)
|
||||
])
|
||||
}
|
||||
}
|
||||
];
|
||||
fields(orlist) ->
|
||||
[{'or', #{type => union_array(
|
||||
[ hoconsc:ref(?MODULE, username)
|
||||
, hoconsc:ref(?MODULE, clientid)
|
||||
, hoconsc:ref(?MODULE, ipaddress)
|
||||
])
|
||||
}
|
||||
}
|
||||
];
|
||||
fields(eq_topic) ->
|
||||
[{eq, #{type => binary()}}].
|
||||
|
||||
[ {query, query()} ];
|
||||
fields(redis_single) ->
|
||||
connector_fields(redis, single) ++
|
||||
[ {cmd, query()} ];
|
||||
fields(redis_sentinel) ->
|
||||
connector_fields(redis, sentinel) ++
|
||||
[ {cmd, query()} ];
|
||||
fields(redis_cluster) ->
|
||||
connector_fields(redis, cluster) ++
|
||||
[ {cmd, query()} ].
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
|
@ -146,17 +151,6 @@ fields(eq_topic) ->
|
|||
union_array(Item) when is_list(Item) ->
|
||||
hoconsc:array(hoconsc:union(Item)).
|
||||
|
||||
sources() ->
|
||||
#{type => union_array(
|
||||
[ hoconsc:ref(?MODULE, file)
|
||||
, hoconsc:ref(?MODULE, http)
|
||||
, hoconsc:ref(?MODULE, mysql)
|
||||
, hoconsc:ref(?MODULE, pgsql)
|
||||
, hoconsc:ref(?MODULE, redis)
|
||||
, hoconsc:ref(?MODULE, mongo)
|
||||
])
|
||||
}.
|
||||
|
||||
query() ->
|
||||
#{type => binary(),
|
||||
validator => fun(S) ->
|
||||
|
@ -168,6 +162,8 @@ query() ->
|
|||
}.
|
||||
|
||||
connector_fields(DB) ->
|
||||
connector_fields(DB, config).
|
||||
connector_fields(DB, Fields) ->
|
||||
Mod0 = io_lib:format("~s_~s",[emqx_connector, DB]),
|
||||
Mod = try
|
||||
list_to_existing_atom(Mod0)
|
||||
|
@ -180,4 +176,9 @@ connector_fields(DB) ->
|
|||
[ {type, #{type => DB}}
|
||||
, {enable, #{type => boolean(),
|
||||
default => true}}
|
||||
] ++ Mod:roots().
|
||||
] ++ Mod:fields(Fields).
|
||||
|
||||
to_list(A) when is_atom(A) ->
|
||||
atom_to_list(A);
|
||||
to_list(B) when is_binary(B) ->
|
||||
binary_to_list(B).
|
|
@ -62,58 +62,58 @@ init_per_testcase(_, Config) ->
|
|||
|
||||
-define(SOURCE1, #{<<"type">> => <<"http">>,
|
||||
<<"enable">> => true,
|
||||
<<"config">> => #{
|
||||
<<"url">> => <<"https://fake.com:443/">>,
|
||||
<<"headers">> => #{},
|
||||
<<"method">> => <<"get">>,
|
||||
<<"request_timeout">> => 5000}
|
||||
<<"url">> => <<"https://fake.com:443/">>,
|
||||
<<"headers">> => #{},
|
||||
<<"method">> => <<"get">>,
|
||||
<<"request_timeout">> => 5000
|
||||
}).
|
||||
-define(SOURCE2, #{<<"type">> => <<"mongo">>,
|
||||
<<"enable">> => true,
|
||||
<<"config">> => #{
|
||||
<<"mongo_type">> => <<"single">>,
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"mongo_type">> => <<"single">>,
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"collection">> => <<"fake">>,
|
||||
<<"find">> => #{<<"a">> => <<"b">>}
|
||||
<<"selector">> => #{<<"a">> => <<"b">>}
|
||||
}).
|
||||
-define(SOURCE3, #{<<"type">> => <<"mysql">>,
|
||||
<<"enable">> => true,
|
||||
<<"config">> => #{
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"sql">> => <<"abcb">>
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"query">> => <<"abcb">>
|
||||
}).
|
||||
-define(SOURCE4, #{<<"type">> => <<"pgsql">>,
|
||||
<<"enable">> => true,
|
||||
<<"config">> => #{
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"sql">> => <<"abcb">>
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"query">> => <<"abcb">>
|
||||
}).
|
||||
-define(SOURCE5, #{<<"type">> => <<"redis">>,
|
||||
<<"enable">> => true,
|
||||
<<"config">> => #{
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => 0,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => 0,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"cmd">> => <<"HGETALL mqtt_authz:%u">>
|
||||
}).
|
||||
-define(SOURCE6, #{<<"type">> => <<"file">>,
|
||||
<<"enable">> => true,
|
||||
<<"path">> => emqx_ct_helpers:deps_path(emqx_authz, "etc/acl.conf")
|
||||
}).
|
||||
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Testcases
|
||||
|
@ -125,12 +125,14 @@ t_update_source(_) ->
|
|||
{ok, _} = emqx_authz:update(head, [?SOURCE1]),
|
||||
{ok, _} = emqx_authz:update(tail, [?SOURCE4]),
|
||||
{ok, _} = emqx_authz:update(tail, [?SOURCE5]),
|
||||
{ok, _} = emqx_authz:update(tail, [?SOURCE6]),
|
||||
|
||||
?assertMatch([ #{type := http, enable := true}
|
||||
, #{type := mongo, enable := true}
|
||||
, #{type := mysql, enable := true}
|
||||
, #{type := pgsql, enable := true}
|
||||
, #{type := redis, enable := true}
|
||||
, #{type := file, enable := true}
|
||||
], emqx:get_config([authorization, sources], [])),
|
||||
|
||||
{ok, _} = emqx_authz:update({replace_once, http}, ?SOURCE1#{<<"enable">> := false}),
|
||||
|
@ -138,23 +140,26 @@ t_update_source(_) ->
|
|||
{ok, _} = emqx_authz:update({replace_once, mysql}, ?SOURCE3#{<<"enable">> := false}),
|
||||
{ok, _} = emqx_authz:update({replace_once, pgsql}, ?SOURCE4#{<<"enable">> := false}),
|
||||
{ok, _} = emqx_authz:update({replace_once, redis}, ?SOURCE5#{<<"enable">> := false}),
|
||||
{ok, _} = emqx_authz:update({replace_once, file}, ?SOURCE6#{<<"enable">> := false}),
|
||||
|
||||
?assertMatch([ #{type := http, enable := false}
|
||||
, #{type := mongo, enable := false}
|
||||
, #{type := mysql, enable := false}
|
||||
, #{type := pgsql, enable := false}
|
||||
, #{type := redis, enable := false}
|
||||
, #{type := file, enable := false}
|
||||
], emqx:get_config([authorization, sources], [])),
|
||||
|
||||
{ok, _} = emqx_authz:update(replace, []).
|
||||
|
||||
t_move_source(_) ->
|
||||
{ok, _} = emqx_authz:update(replace, [?SOURCE1, ?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5]),
|
||||
{ok, _} = emqx_authz:update(replace, [?SOURCE1, ?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5, ?SOURCE6]),
|
||||
?assertMatch([ #{type := http}
|
||||
, #{type := mongo}
|
||||
, #{type := mysql}
|
||||
, #{type := pgsql}
|
||||
, #{type := redis}
|
||||
, #{type := file}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
{ok, _} = emqx_authz:move(pgsql, <<"top">>),
|
||||
|
@ -163,6 +168,7 @@ t_move_source(_) ->
|
|||
, #{type := mongo}
|
||||
, #{type := mysql}
|
||||
, #{type := redis}
|
||||
, #{type := file}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
{ok, _} = emqx_authz:move(http, <<"bottom">>),
|
||||
|
@ -170,6 +176,7 @@ t_move_source(_) ->
|
|||
, #{type := mongo}
|
||||
, #{type := mysql}
|
||||
, #{type := redis}
|
||||
, #{type := file}
|
||||
, #{type := http}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
|
@ -178,6 +185,7 @@ t_move_source(_) ->
|
|||
, #{type := pgsql}
|
||||
, #{type := mongo}
|
||||
, #{type := redis}
|
||||
, #{type := file}
|
||||
, #{type := http}
|
||||
], emqx_authz:lookup()),
|
||||
|
||||
|
@ -185,6 +193,7 @@ t_move_source(_) ->
|
|||
?assertMatch([ #{type := mysql}
|
||||
, #{type := pgsql}
|
||||
, #{type := redis}
|
||||
, #{type := file}
|
||||
, #{type := http}
|
||||
, #{type := mongo}
|
||||
], emqx_authz:lookup()),
|
||||
|
|
|
@ -39,58 +39,61 @@
|
|||
|
||||
-define(SOURCE1, #{<<"type">> => <<"http">>,
|
||||
<<"enable">> => true,
|
||||
<<"config">> => #{
|
||||
<<"url">> => <<"https://fake.com:443/">>,
|
||||
<<"headers">> => #{},
|
||||
<<"method">> => <<"get">>,
|
||||
<<"request_timeout">> => 5000}
|
||||
<<"url">> => <<"https://fake.com:443/">>,
|
||||
<<"headers">> => #{},
|
||||
<<"method">> => <<"get">>,
|
||||
<<"request_timeout">> => 5000
|
||||
}).
|
||||
-define(SOURCE2, #{<<"type">> => <<"mongo">>,
|
||||
<<"enable">> => true,
|
||||
<<"config">> => #{
|
||||
<<"mongo_type">> => <<"single">>,
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"mongo_type">> => <<"sharded">>,
|
||||
<<"servers">> => [<<"127.0.0.1:27017">>,
|
||||
<<"192.168.0.1:27017">>
|
||||
],
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"collection">> => <<"fake">>,
|
||||
<<"find">> => #{<<"a">> => <<"b">>}
|
||||
<<"selector">> => #{<<"a">> => <<"b">>}
|
||||
}).
|
||||
-define(SOURCE3, #{<<"type">> => <<"mysql">>,
|
||||
<<"enable">> => true,
|
||||
<<"config">> => #{
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"sql">> => <<"abcb">>
|
||||
<<"server">> => <<"127.0.0.1:3306">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"query">> => <<"abcb">>
|
||||
}).
|
||||
-define(SOURCE4, #{<<"type">> => <<"pgsql">>,
|
||||
<<"enable">> => true,
|
||||
<<"config">> => #{
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"sql">> => <<"abcb">>
|
||||
<<"server">> => <<"127.0.0.1:5432">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"query">> => <<"abcb">>
|
||||
}).
|
||||
-define(SOURCE5, #{<<"type">> => <<"redis">>,
|
||||
<<"enable">> => true,
|
||||
<<"config">> => #{
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => 0,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"servers">> => [<<"127.0.0.1:6379">>,
|
||||
<<"127.0.0.1:6380">>
|
||||
],
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => 0,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"cmd">> => <<"HGETALL mqtt_authz:%u">>
|
||||
}).
|
||||
-define(SOURCE6, #{<<"type">> => <<"file">>,
|
||||
<<"enable">> => true,
|
||||
<<"rules">> => <<"{allow,{username,\"^dashboard?\"},subscribe,[\"$SYS/#\"]}.\n{allow,{ipaddr,\"127.0.0.1\"},all,[\"$SYS/#\",\"#\"]}.">>
|
||||
}).
|
||||
|
||||
all() ->
|
||||
emqx_ct:all(?MODULE).
|
||||
|
@ -144,6 +147,26 @@ set_special_configs(emqx_authz) ->
|
|||
set_special_configs(_App) ->
|
||||
ok.
|
||||
|
||||
init_per_testcase(t_api, Config) ->
|
||||
meck:new(emqx_plugin_libs_id, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(emqx_plugin_libs_id, gen, fun() -> "fake" end),
|
||||
|
||||
meck:new(emqx, [non_strict, passthrough, no_history, no_link]),
|
||||
meck:expect(emqx, get_config, fun([node, data_dir]) ->
|
||||
% emqx_ct_helpers:deps_path(emqx_authz, "test");
|
||||
{data_dir, Data} = lists:keyfind(data_dir, 1, Config),
|
||||
Data;
|
||||
(C) -> meck:passthrough([C])
|
||||
end),
|
||||
Config;
|
||||
init_per_testcase(_, Config) -> Config.
|
||||
|
||||
end_per_testcase(t_api, _Config) ->
|
||||
meck:unload(emqx_plugin_libs_id),
|
||||
meck:unload(emqx),
|
||||
ok;
|
||||
end_per_testcase(_, _Config) -> ok.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Testcases
|
||||
%%------------------------------------------------------------------------------
|
||||
|
@ -152,33 +175,44 @@ t_api(_) ->
|
|||
{ok, 200, Result1} = request(get, uri(["authorization", "sources"]), []),
|
||||
?assertEqual([], get_sources(Result1)),
|
||||
|
||||
lists:foreach(fun(_) ->
|
||||
{ok, 204, _} = request(post, uri(["authorization", "sources"]), ?SOURCE1)
|
||||
end, lists:seq(1, 20)),
|
||||
{ok, 204, _} = request(put, uri(["authorization", "sources"]), [?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5, ?SOURCE6]),
|
||||
{ok, 204, _} = request(post, uri(["authorization", "sources"]), ?SOURCE1),
|
||||
|
||||
{ok, 200, Result2} = request(get, uri(["authorization", "sources"]), []),
|
||||
?assertEqual(20, length(get_sources(Result2))),
|
||||
|
||||
lists:foreach(fun(Page) ->
|
||||
Query = "?page=" ++ integer_to_list(Page) ++ "&&limit=10",
|
||||
Url = uri(["authorization/sources" ++ Query]),
|
||||
{ok, 200, Result} = request(get, Url, []),
|
||||
?assertEqual(10, length(get_sources(Result)))
|
||||
end, lists:seq(1, 2)),
|
||||
|
||||
{ok, 204, _} = request(put, uri(["authorization", "sources"]), [?SOURCE1, ?SOURCE2, ?SOURCE3, ?SOURCE4]),
|
||||
|
||||
{ok, 200, Result3} = request(get, uri(["authorization", "sources"]), []),
|
||||
Sources = get_sources(Result3),
|
||||
Sources = get_sources(Result2),
|
||||
?assertMatch([ #{<<"type">> := <<"http">>}
|
||||
, #{<<"type">> := <<"mongo">>}
|
||||
, #{<<"type">> := <<"mysql">>}
|
||||
, #{<<"type">> := <<"pgsql">>}
|
||||
, #{<<"type">> := <<"redis">>}
|
||||
, #{<<"type">> := <<"file">>}
|
||||
], Sources),
|
||||
?assert(filelib:is_file(filename:join([emqx:get_config([node, data_dir]), "acl.conf"]))),
|
||||
|
||||
{ok, 204, _} = request(put, uri(["authorization", "sources", "http"]), ?SOURCE1#{<<"enable">> := false}),
|
||||
{ok, 200, Result3} = request(get, uri(["authorization", "sources", "http"]), []),
|
||||
?assertMatch(#{<<"type">> := <<"http">>, <<"enable">> := false}, jsx:decode(Result3)),
|
||||
|
||||
{ok, 200, Result4} = request(get, uri(["authorization", "sources", "http"]), []),
|
||||
?assertMatch(#{<<"type">> := <<"http">>, <<"enable">> := false}, jsx:decode(Result4)),
|
||||
{ok, 204, _} = request(put, uri(["authorization", "sources", "mongo"]),
|
||||
?SOURCE2#{<<"ssl">> := #{
|
||||
<<"enable">> => true,
|
||||
<<"cacertfile">> => <<"fake cacert file">>,
|
||||
<<"certfile">> => <<"fake cert file">>,
|
||||
<<"keyfile">> => <<"fake key file">>,
|
||||
<<"verify">> => false
|
||||
}}),
|
||||
{ok, 200, Result4} = request(get, uri(["authorization", "sources", "mongo"]), []),
|
||||
?assertMatch(#{<<"type">> := <<"mongo">>,
|
||||
<<"ssl">> := #{<<"enable">> := true,
|
||||
<<"cacertfile">> := <<"fake cacert file">>,
|
||||
<<"certfile">> := <<"fake cert file">>,
|
||||
<<"keyfile">> := <<"fake key file">>,
|
||||
<<"verify">> := false
|
||||
}
|
||||
}, jsx:decode(Result4)),
|
||||
?assert(filelib:is_file(filename:join([emqx:get_config([node, data_dir]), "certs", "cacert-fake.pem"]))),
|
||||
?assert(filelib:is_file(filename:join([emqx:get_config([node, data_dir]), "certs", "cert-fake.pem"]))),
|
||||
?assert(filelib:is_file(filename:join([emqx:get_config([node, data_dir]), "certs", "key-fake.pem"]))),
|
||||
|
||||
lists:foreach(fun(#{<<"type">> := Type}) ->
|
||||
{ok, 204, _} = request(delete, uri(["authorization", "sources", binary_to_list(Type)]), [])
|
||||
|
|
|
@ -47,12 +47,11 @@ init_per_suite(Config) ->
|
|||
{ok, _} = emqx:update_config([authorization, cache, enable], false),
|
||||
{ok, _} = emqx:update_config([authorization, no_match], deny),
|
||||
Rules = [#{<<"type">> => <<"http">>,
|
||||
<<"config">> => #{
|
||||
<<"url">> => <<"https://fake.com:443/">>,
|
||||
<<"headers">> => #{},
|
||||
<<"method">> => <<"get">>,
|
||||
<<"request_timeout">> => 5000
|
||||
}}
|
||||
<<"url">> => <<"https://fake.com:443/">>,
|
||||
<<"headers">> => #{},
|
||||
<<"method">> => <<"get">>,
|
||||
<<"request_timeout">> => 5000
|
||||
}
|
||||
],
|
||||
{ok, _} = emqx_authz:update(replace, Rules),
|
||||
Config.
|
||||
|
|
|
@ -47,14 +47,13 @@ init_per_suite(Config) ->
|
|||
{ok, _} = emqx:update_config([authorization, cache, enable], false),
|
||||
{ok, _} = emqx:update_config([authorization, no_match], deny),
|
||||
Rules = [#{<<"type">> => <<"mongo">>,
|
||||
<<"config">> => #{
|
||||
<<"mongo_type">> => <<"single">>,
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"mongo_type">> => <<"single">>,
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"collection">> => <<"fake">>,
|
||||
<<"find">> => #{<<"a">> => <<"b">>}
|
||||
<<"selector">> => #{<<"a">> => <<"b">>}
|
||||
}],
|
||||
{ok, _} = emqx_authz:update(replace, Rules),
|
||||
Config.
|
||||
|
|
|
@ -48,15 +48,14 @@ init_per_suite(Config) ->
|
|||
{ok, _} = emqx:update_config([authorization, cache, enable], false),
|
||||
{ok, _} = emqx:update_config([authorization, no_match], deny),
|
||||
Rules = [#{<<"type">> => <<"mysql">>,
|
||||
<<"config">> => #{
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"sql">> => <<"abcb">>
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"query">> => <<"abcb">>
|
||||
}],
|
||||
{ok, _} = emqx_authz:update(replace, Rules),
|
||||
Config.
|
||||
|
|
|
@ -48,15 +48,14 @@ init_per_suite(Config) ->
|
|||
{ok, _} = emqx:update_config([authorization, cache, enable], false),
|
||||
{ok, _} = emqx:update_config([authorization, no_match], deny),
|
||||
Rules = [#{<<"type">> => <<"pgsql">>,
|
||||
<<"config">> => #{
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"sql">> => <<"abcb">>
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => <<"mqtt">>,
|
||||
<<"username">> => <<"xx">>,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"query">> => <<"abcb">>
|
||||
}],
|
||||
{ok, _} = emqx_authz:update(replace, Rules),
|
||||
Config.
|
||||
|
|
|
@ -47,13 +47,12 @@ init_per_suite(Config) ->
|
|||
{ok, _} = emqx:update_config([authorization, cache, enable], false),
|
||||
{ok, _} = emqx:update_config([authorization, no_match], deny),
|
||||
Rules = [#{<<"type">> => <<"redis">>,
|
||||
<<"config">> => #{
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => 0,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false}},
|
||||
<<"server">> => <<"127.0.0.1:27017">>,
|
||||
<<"pool_size">> => 1,
|
||||
<<"database">> => 0,
|
||||
<<"password">> => <<"ee">>,
|
||||
<<"auto_reconnect">> => true,
|
||||
<<"ssl">> => #{<<"enable">> => false},
|
||||
<<"cmd">> => <<"HGETALL mqtt_authz:%u">>
|
||||
}],
|
||||
{ok, _} = emqx_authz:update(replace, Rules),
|
||||
|
|
|
@ -19,25 +19,30 @@
|
|||
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-export([ roots/0
|
||||
-export([ namespace/0
|
||||
, roots/0
|
||||
, fields/1]).
|
||||
|
||||
namespace() -> "auto_subscribe".
|
||||
|
||||
roots() ->
|
||||
["auto_subscribe"].
|
||||
|
||||
fields("auto_subscribe") ->
|
||||
[ {topics, hoconsc:array(hoconsc:ref(?MODULE, "topic"))}];
|
||||
[ {topics, hoconsc:array(hoconsc:ref(?MODULE, "topic"))}
|
||||
];
|
||||
|
||||
fields("topic") ->
|
||||
[ {topic, emqx_schema:t(binary())}
|
||||
, {qos, t(hoconsc:union([0, 1, 2]), 0)}
|
||||
, {rh, t(hoconsc:union([0, 1, 2]), 0)}
|
||||
, {rap, t(hoconsc:union([0, 1]), 0)}
|
||||
, {nl, t(hoconsc:union([0, 1]), 0)}
|
||||
[ {topic, sc(binary(), #{})}
|
||||
, {qos, sc(typerefl:union([0, 1, 2]), #{default => 0})}
|
||||
, {rh, sc(typerefl:union([0, 1, 2]), #{default => 0})}
|
||||
, {rap, sc(typerefl:union([0, 1]), #{default => 0})}
|
||||
, {nl, sc(typerefl:union([0, 1]), #{default => 0})}
|
||||
].
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
t(Type, Default) ->
|
||||
hoconsc:t(Type, #{default => Default}).
|
||||
|
||||
sc(Type, Meta) ->
|
||||
hoconsc:mk(Type, Meta).
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# emqx_data_bridge
|
||||
# emqx_bridge
|
||||
|
||||
EMQ X Data Bridge is an application that managing the resources (see emqx_resource) used by emqx
|
||||
rule engine.
|
|
@ -0,0 +1,49 @@
|
|||
##--------------------------------------------------------------------
|
||||
## EMQ X Bridge
|
||||
##--------------------------------------------------------------------
|
||||
|
||||
#bridges.mqtt.my_mqtt_bridge_to_aws {
|
||||
# server = "127.0.0.1:1883"
|
||||
# proto_ver = "v4"
|
||||
# username = "username1"
|
||||
# password = ""
|
||||
# clean_start = true
|
||||
# keepalive = 300
|
||||
# retry_interval = "30s"
|
||||
# max_inflight = 32
|
||||
# reconnect_interval = "30s"
|
||||
# bridge_mode = true
|
||||
# replayq {
|
||||
# dir = "{{ platform_data_dir }}/replayq/bridge_mqtt/"
|
||||
# seg_bytes = "100MB"
|
||||
# offload = false
|
||||
# max_total_bytes = "1GB"
|
||||
# }
|
||||
# ssl {
|
||||
# enable = false
|
||||
# keyfile = "{{ platform_etc_dir }}/certs/client-key.pem"
|
||||
# certfile = "{{ platform_etc_dir }}/certs/client-cert.pem"
|
||||
# cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
|
||||
# }
|
||||
# ## we will create one MQTT connection for each element of the `message_in`
|
||||
# message_in: [{
|
||||
# ## the `id` will be used as part of the clientid
|
||||
# id = "pull_msgs_from_aws"
|
||||
# subscribe_remote_topic = "aws/#"
|
||||
# subscribe_qos = 1
|
||||
# local_topic = "from_aws/${topic}"
|
||||
# payload = "${payload}"
|
||||
# qos = "${qos}"
|
||||
# retain = "${retain}"
|
||||
# }]
|
||||
# ## we will create one MQTT connection for each element of the `message_out`
|
||||
# message_out: [{
|
||||
# ## the `id` will be used as part of the clientid
|
||||
# id = "push_msgs_to_aws"
|
||||
# subscribe_local_topic = "emqx/#"
|
||||
# remote_topic = "from_emqx/${topic}"
|
||||
# payload = "${payload}"
|
||||
# qos = 1
|
||||
# retain = false
|
||||
# }]
|
||||
#}
|
|
@ -3,5 +3,5 @@
|
|||
|
||||
{shell, [
|
||||
% {config, "config/sys.config"},
|
||||
{apps, [emqx_data_bridge]}
|
||||
{apps, [emqx_bridge]}
|
||||
]}.
|
|
@ -1,12 +1,13 @@
|
|||
{application, emqx_data_bridge,
|
||||
{application, emqx_bridge,
|
||||
[{description, "An OTP application"},
|
||||
{vsn, "0.1.0"},
|
||||
{registered, []},
|
||||
{mod, {emqx_data_bridge_app, []}},
|
||||
{mod, {emqx_bridge_app, []}},
|
||||
{applications,
|
||||
[kernel,
|
||||
stdlib,
|
||||
emqx
|
||||
emqx,
|
||||
emqx_connector
|
||||
]},
|
||||
{env,[]},
|
||||
{modules, []},
|
|
@ -13,7 +13,7 @@
|
|||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_data_bridge).
|
||||
-module(emqx_bridge).
|
||||
|
||||
-export([ load_bridges/0
|
||||
, resource_type/1
|
||||
|
@ -27,15 +27,17 @@
|
|||
]).
|
||||
|
||||
load_bridges() ->
|
||||
Bridges = emqx:get_config([emqx_data_bridge, bridges], []),
|
||||
emqx_data_bridge_monitor:ensure_all_started(Bridges).
|
||||
Bridges = emqx:get_config([bridges], #{}),
|
||||
emqx_bridge_monitor:ensure_all_started(Bridges).
|
||||
|
||||
resource_type(mqtt) -> emqx_connector_mqtt;
|
||||
resource_type(mysql) -> emqx_connector_mysql;
|
||||
resource_type(pgsql) -> emqx_connector_pgsql;
|
||||
resource_type(mongo) -> emqx_connector_mongo;
|
||||
resource_type(redis) -> emqx_connector_redis;
|
||||
resource_type(ldap) -> emqx_connector_ldap.
|
||||
|
||||
bridge_type(emqx_connector_mqtt) -> mqtt;
|
||||
bridge_type(emqx_connector_mysql) -> mysql;
|
||||
bridge_type(emqx_connector_pgsql) -> pgsql;
|
||||
bridge_type(emqx_connector_mongo) -> mongo;
|
||||
|
@ -43,13 +45,14 @@ bridge_type(emqx_connector_redis) -> redis;
|
|||
bridge_type(emqx_connector_ldap) -> ldap.
|
||||
|
||||
name_to_resource_id(BridgeName) ->
|
||||
<<"bridge:", BridgeName/binary>>.
|
||||
Name = bin(BridgeName),
|
||||
<<"bridge:", Name/binary>>.
|
||||
|
||||
resource_id_to_name(<<"bridge:", BridgeName/binary>> = _ResourceId) ->
|
||||
BridgeName.
|
||||
|
||||
list_bridges() ->
|
||||
emqx_resource_api:list_instances(fun emqx_data_bridge:is_bridge/1).
|
||||
emqx_resource_api:list_instances(fun emqx_bridge:is_bridge/1).
|
||||
|
||||
is_bridge(#{id := <<"bridge:", _/binary>>}) ->
|
||||
true;
|
||||
|
@ -57,7 +60,11 @@ is_bridge(_Data) ->
|
|||
false.
|
||||
|
||||
config_key_path() ->
|
||||
[emqx_data_bridge, bridges].
|
||||
[emqx_bridge, bridges].
|
||||
|
||||
update_config(ConfigReq) ->
|
||||
emqx:update_config(config_key_path(), ConfigReq).
|
||||
|
||||
bin(Bin) when is_binary(Bin) -> Bin;
|
||||
bin(Str) when is_list(Str) -> list_to_binary(Str);
|
||||
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
|
|
@ -13,7 +13,7 @@
|
|||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_data_bridge_api).
|
||||
-module(emqx_bridge_api).
|
||||
|
||||
-rest_api(#{ name => list_data_bridges
|
||||
, method => 'GET'
|
||||
|
@ -61,10 +61,10 @@
|
|||
|
||||
list_bridges(_Binding, _Params) ->
|
||||
{200, #{code => 0, data => [format_api_reply(Data) ||
|
||||
Data <- emqx_data_bridge:list_bridges()]}}.
|
||||
Data <- emqx_bridge:list_bridges()]}}.
|
||||
|
||||
get_bridge(#{name := Name}, _Params) ->
|
||||
case emqx_resource:get_instance(emqx_data_bridge:name_to_resource_id(Name)) of
|
||||
case emqx_resource:get_instance(emqx_bridge:name_to_resource_id(Name)) of
|
||||
{ok, Data} ->
|
||||
{200, #{code => 0, data => format_api_reply(emqx_resource_api:format_data(Data))}};
|
||||
{error, not_found} ->
|
||||
|
@ -75,8 +75,8 @@ create_bridge(#{name := Name}, Params) ->
|
|||
Config = proplists:get_value(<<"config">>, Params),
|
||||
BridgeType = proplists:get_value(<<"type">>, Params),
|
||||
case emqx_resource:check_and_create(
|
||||
emqx_data_bridge:name_to_resource_id(Name),
|
||||
emqx_data_bridge:resource_type(atom(BridgeType)), maps:from_list(Config)) of
|
||||
emqx_bridge:name_to_resource_id(Name),
|
||||
emqx_bridge:resource_type(atom(BridgeType)), maps:from_list(Config)) of
|
||||
{ok, already_created} ->
|
||||
{400, #{code => 102, message => <<"bridge already created: ", Name/binary>>}};
|
||||
{ok, Data} ->
|
||||
|
@ -91,8 +91,8 @@ update_bridge(#{name := Name}, Params) ->
|
|||
Config = proplists:get_value(<<"config">>, Params),
|
||||
BridgeType = proplists:get_value(<<"type">>, Params),
|
||||
case emqx_resource:check_and_update(
|
||||
emqx_data_bridge:name_to_resource_id(Name),
|
||||
emqx_data_bridge:resource_type(atom(BridgeType)), maps:from_list(Config), []) of
|
||||
emqx_bridge:name_to_resource_id(Name),
|
||||
emqx_bridge:resource_type(atom(BridgeType)), maps:from_list(Config), []) of
|
||||
{ok, Data} ->
|
||||
update_config_and_reply(Name, BridgeType, Config, Data);
|
||||
{error, not_found} ->
|
||||
|
@ -104,26 +104,26 @@ update_bridge(#{name := Name}, Params) ->
|
|||
end.
|
||||
|
||||
delete_bridge(#{name := Name}, _Params) ->
|
||||
case emqx_resource:remove(emqx_data_bridge:name_to_resource_id(Name)) of
|
||||
case emqx_resource:remove(emqx_bridge:name_to_resource_id(Name)) of
|
||||
ok -> delete_config_and_reply(Name);
|
||||
{error, Reason} ->
|
||||
{500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}}
|
||||
end.
|
||||
|
||||
format_api_reply(#{resource_type := Type, id := Id, config := Conf, status := Status}) ->
|
||||
#{type => emqx_data_bridge:bridge_type(Type),
|
||||
name => emqx_data_bridge:resource_id_to_name(Id),
|
||||
#{type => emqx_bridge:bridge_type(Type),
|
||||
name => emqx_bridge:resource_id_to_name(Id),
|
||||
config => Conf, status => Status}.
|
||||
|
||||
% format_conf(#{resource_type := Type, id := Id, config := Conf}) ->
|
||||
% #{type => Type, name => emqx_data_bridge:resource_id_to_name(Id),
|
||||
% #{type => Type, name => emqx_bridge:resource_id_to_name(Id),
|
||||
% config => Conf}.
|
||||
|
||||
% get_all_configs() ->
|
||||
% [format_conf(Data) || Data <- emqx_data_bridge:list_bridges()].
|
||||
% [format_conf(Data) || Data <- emqx_bridge:list_bridges()].
|
||||
|
||||
update_config_and_reply(Name, BridgeType, Config, Data) ->
|
||||
case emqx_data_bridge:update_config({update, ?BRIDGE(Name, BridgeType, Config)}) of
|
||||
case emqx_bridge:update_config({update, ?BRIDGE(Name, BridgeType, Config)}) of
|
||||
{ok, _} ->
|
||||
{200, #{code => 0, data => format_api_reply(
|
||||
emqx_resource_api:format_data(Data))}};
|
||||
|
@ -132,7 +132,7 @@ update_config_and_reply(Name, BridgeType, Config, Data) ->
|
|||
end.
|
||||
|
||||
delete_config_and_reply(Name) ->
|
||||
case emqx_data_bridge:update_config({delete, Name}) of
|
||||
case emqx_bridge:update_config({delete, Name}) of
|
||||
{ok, _} -> {200, #{code => 0, data => #{}}};
|
||||
{error, Reason} ->
|
||||
{500, #{code => 102, message => emqx_resource_api:stringnify(Reason)}}
|
|
@ -13,7 +13,7 @@
|
|||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_data_bridge_app).
|
||||
-module(emqx_bridge_app).
|
||||
|
||||
-behaviour(application).
|
||||
|
||||
|
@ -22,9 +22,9 @@
|
|||
-export([start/2, stop/1, pre_config_update/2]).
|
||||
|
||||
start(_StartType, _StartArgs) ->
|
||||
{ok, Sup} = emqx_data_bridge_sup:start_link(),
|
||||
ok = emqx_data_bridge:load_bridges(),
|
||||
emqx_config_handler:add_handler(emqx_data_bridge:config_key_path(), ?MODULE),
|
||||
{ok, Sup} = emqx_bridge_sup:start_link(),
|
||||
ok = emqx_bridge:load_bridges(),
|
||||
emqx_config_handler:add_handler(emqx_bridge:config_key_path(), ?MODULE),
|
||||
{ok, Sup}.
|
||||
|
||||
stop(_State) ->
|
|
@ -15,7 +15,7 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% This process monitors all the data bridges, and try to restart a bridge
|
||||
%% when one of it stopped.
|
||||
-module(emqx_data_bridge_monitor).
|
||||
-module(emqx_bridge_monitor).
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
|
@ -65,14 +65,18 @@ code_change(_OldVsn, State, _Extra) ->
|
|||
|
||||
%%============================================================================
|
||||
load_bridges(Configs) ->
|
||||
lists:foreach(fun load_bridge/1, Configs).
|
||||
lists:foreach(fun({Type, NamedConf}) ->
|
||||
lists:foreach(fun({Name, Conf}) ->
|
||||
load_bridge(Name, Type, Conf)
|
||||
end, maps:to_list(NamedConf))
|
||||
end, maps:to_list(Configs)).
|
||||
|
||||
%% TODO: move this monitor into emqx_resource
|
||||
%% emqx_resource:check_and_create_local(ResourceId, ResourceType, Config, #{keep_retry => true}).
|
||||
load_bridge(#{name := Name, type := Type, config := Config}) ->
|
||||
load_bridge(Name, Type, Config) ->
|
||||
case emqx_resource:create_local(
|
||||
emqx_data_bridge:name_to_resource_id(Name),
|
||||
emqx_data_bridge:resource_type(Type), Config) of
|
||||
emqx_bridge:name_to_resource_id(Name),
|
||||
emqx_bridge:resource_type(Type), Config) of
|
||||
{ok, already_created} -> ok;
|
||||
{ok, _} -> ok;
|
||||
{error, Reason} ->
|
|
@ -0,0 +1,17 @@
|
|||
-module(emqx_bridge_schema).
|
||||
|
||||
-export([roots/0, fields/1]).
|
||||
|
||||
%%======================================================================================
|
||||
%% Hocon Schema Definitions
|
||||
|
||||
roots() -> ["bridges"].
|
||||
|
||||
fields("bridges") ->
|
||||
[{mqtt, hoconsc:ref(?MODULE, "mqtt")}];
|
||||
|
||||
fields("mqtt") ->
|
||||
[{"$name", hoconsc:ref(?MODULE, "mqtt_bridge")}];
|
||||
|
||||
fields("mqtt_bridge") ->
|
||||
emqx_connector_mqtt:fields("config").
|
|
@ -13,7 +13,7 @@
|
|||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_data_bridge_sup).
|
||||
-module(emqx_bridge_sup).
|
||||
|
||||
-behaviour(supervisor).
|
||||
|
||||
|
@ -31,11 +31,11 @@ init([]) ->
|
|||
intensity => 10,
|
||||
period => 10},
|
||||
ChildSpecs = [
|
||||
#{id => emqx_data_bridge_monitor,
|
||||
start => {emqx_data_bridge_monitor, start_link, []},
|
||||
#{id => emqx_bridge_monitor,
|
||||
start => {emqx_bridge_monitor, start_link, []},
|
||||
restart => permanent,
|
||||
type => worker,
|
||||
modules => [emqx_data_bridge_monitor]}
|
||||
modules => [emqx_bridge_monitor]}
|
||||
],
|
||||
{ok, {SupFlags, ChildSpecs}}.
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
.eunit
|
||||
deps
|
||||
*.o
|
||||
*.beam
|
||||
*.plt
|
||||
erl_crash.dump
|
||||
ebin/*.beam
|
||||
rel
|
||||
_build
|
||||
.concrete/DEV_MODE
|
||||
.rebar
|
||||
.erlang.mk
|
||||
data
|
||||
ebin
|
||||
emqx_bridge_mqtt.d
|
||||
*.rendered
|
||||
.rebar3/
|
||||
*.coverdata
|
||||
rebar.lock
|
||||
.DS_Store
|
||||
Mnesia.*/
|
|
@ -1,265 +0,0 @@
|
|||
# EMQ Bridge MQTT
|
||||
|
||||
The concept of **Bridge** means that EMQ X supports forwarding messages
|
||||
of one of its own topics to another MQTT Broker in some way.
|
||||
|
||||
**Bridge** differs from **Cluster** in that the bridge does not
|
||||
replicate the topic trie and routing tables and only forwards MQTT
|
||||
messages based on bridging rules.
|
||||
|
||||
At present, the bridging methods supported by EMQ X are as follows:
|
||||
|
||||
- RPC bridge: RPC Bridge only supports message forwarding and does not
|
||||
support subscribing to the topic of remote nodes to synchronize
|
||||
data;
|
||||
- MQTT Bridge: MQTT Bridge supports both forwarding and data
|
||||
synchronization through subscription topic.
|
||||
|
||||
These concepts are shown below:
|
||||
|
||||

|
||||
|
||||
In addition, the EMQ X message broker supports multi-node bridge mode interconnection
|
||||
|
||||
```
|
||||
--------- --------- ---------
|
||||
Publisher --> | Node1 | --Bridge Forward--> | Node2 | --Bridge Forward--> | Node3 | --> Subscriber
|
||||
--------- --------- ---------
|
||||
```
|
||||
|
||||
In EMQ X, bridge is configured by modifying `etc/emqx.conf`. EMQ X distinguishes between different bridges based on different names. E.g
|
||||
|
||||
```
|
||||
## Bridge address: node name for local bridge, host:port for remote.
|
||||
bridge.mqtt.aws.address = 127.0.0.1:1883
|
||||
```
|
||||
|
||||
This configuration declares a bridge named `aws` and specifies that it is bridged to the MQTT broker of 127.0.0.1:1883 by MQTT mode.
|
||||
|
||||
In case of creating multiple bridges, it is convenient to replicate all configuration items of the first bridge, and modify the bridge name and other configuration items if necessary (such as bridge.$name.address, where $name refers to the name of bridge)
|
||||
|
||||
The next two sections describe how to create a bridge in RPC and MQTT mode respectively and create a forwarding rule that forwards the messages from sensors. Assuming that two EMQ X nodes are running on two hosts:
|
||||
|
||||
|
||||
| Name | Node | MQTT Port |
|
||||
|------|-------------------|-----------|
|
||||
| emqx1| emqx1@192.168.1.1.| 1883 |
|
||||
| emqx2| emqx2@192.168.1.2 | 1883 |
|
||||
|
||||
|
||||
## EMQ X RPC Bridge Configuration
|
||||
|
||||
The following is the basic configuration of RPC bridging. A simplest RPC bridging only requires the following three items
|
||||
|
||||
```
|
||||
## Bridge Address: Use node name (nodename@host) for rpc bridging, and host:port for mqtt connection
|
||||
bridge.mqtt.emqx2.address = "emqx2@192.168.1.2"
|
||||
|
||||
## Forwarding topics of the message
|
||||
bridge.mqtt.emqx2.forwards = "sensor1/#,sensor2/#"
|
||||
|
||||
## bridged mountpoint
|
||||
bridge.mqtt.emqx2.mountpoint = "bridge/emqx2/${node}/"
|
||||
```
|
||||
|
||||
If the messages received by the local node emqx1 matches the topic `sersor1/#` or `sensor2/#`, these messages will be forwarded to the `sensor1/#` or `sensor2/#` topic of the remote node emqx2.
|
||||
|
||||
`forwards` is used to specify topics. Messages of the in `forwards` specified topics on local node are forwarded to the remote node.
|
||||
|
||||
`mountpoint` is used to add a topic prefix when forwarding a message. To use `mountpoint`, the `forwards` directive must be set. In the above example, a message with the topic `sensor1/hello` received by the local node will be forwarded to the remote node with the topic `bridge/emqx2/emqx1@192.168.1.1/sensor1/hello`.
|
||||
|
||||
Limitations of RPC bridging:
|
||||
|
||||
1. The RPC bridge of emqx can only forward local messages to the remote node, and cannot synchronize the messages of the remote node to the local node;
|
||||
|
||||
2. RPC bridge can only bridge two EMQ X broker together and cannot bridge EMQ X broker to other MQTT brokers.
|
||||
|
||||
## EMQ X MQTT Bridge Configuration
|
||||
|
||||
EMQ X 3.0 officially introduced MQTT bridge, so that EMQ X can bridge any MQTT broker. Because of the characteristics of the MQTT protocol, EMQ X can subscribe to the remote mqtt broker's topic through MQTT bridge, and then synchronize the remote MQTT broker's message to the local.
|
||||
|
||||
EMQ X MQTT bridging principle: Create an MQTT client on the EMQ X broker, and connect this MQTT client to the remote MQTT broker. Therefore, in the MQTT bridge configuration, following fields may be set for the EMQ X to connect to the remote broker as an mqtt client
|
||||
|
||||
```
|
||||
## Bridge Address: Use node name for rpc bridging, use host:port for mqtt connection
|
||||
bridge.mqtt.emqx2.address = "192.168.1.2:1883"
|
||||
|
||||
## Bridged Protocol Version
|
||||
## Enumeration value: mqttv3 | mqttv4 | mqttv5
|
||||
bridge.mqtt.emqx2.proto_ver = "mqttv4"
|
||||
|
||||
## mqtt client's clientid
|
||||
bridge.mqtt.emqx2.clientid = "bridge_emq"
|
||||
|
||||
## mqtt client's clean_start field
|
||||
## Note: Some MQTT Brokers need to set the clean_start value as `true`
|
||||
bridge.mqtt.emqx2.clean_start = true
|
||||
|
||||
## mqtt client's username field
|
||||
bridge.mqtt.emqx2.username = "user"
|
||||
|
||||
## mqtt client's password field
|
||||
bridge.mqtt.emqx2.password = "passwd"
|
||||
|
||||
## Whether the mqtt client uses ssl to connect to a remote serve or not
|
||||
bridge.mqtt.emqx2.ssl = off
|
||||
|
||||
## CA Certificate of Client SSL Connection (PEM format)
|
||||
bridge.mqtt.emqx2.cacertfile = "etc/certs/cacert.pem"
|
||||
|
||||
## SSL certificate of Client SSL connection
|
||||
bridge.mqtt.emqx2.certfile = "etc/certs/client-cert.pem"
|
||||
|
||||
## Key file of Client SSL connection
|
||||
bridge.mqtt.emqx2.keyfile = "etc/certs/client-key.pem"
|
||||
|
||||
## SSL encryption
|
||||
bridge.mqtt.emqx2.ciphers = "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384"
|
||||
|
||||
## TTLS PSK password
|
||||
## Note 'listener.ssl.external.ciphers' and 'listener.ssl.external.psk_ciphers' cannot be configured at the same time
|
||||
##
|
||||
## See 'https://tools.ietf.org/html/rfc4279#section-2'.
|
||||
## bridge.mqtt.emqx2.psk_ciphers = "PSK-AES128-CBC-SHA,PSK-AES256-CBC-SHA,PSK-3DES-EDE-CBC-SHA,PSK-RC4-SHA"
|
||||
|
||||
## Client's heartbeat interval
|
||||
bridge.mqtt.emqx2.keepalive = 60s
|
||||
|
||||
## Supported TLS version
|
||||
bridge.mqtt.emqx2.tls_versions = "tlsv1.3,tlsv1.2,tlsv1.1,tlsv1"
|
||||
|
||||
## Forwarding topics of the message
|
||||
bridge.mqtt.emqx2.forwards = "sensor1/#,sensor2/#"
|
||||
|
||||
## Bridged mountpoint
|
||||
bridge.mqtt.emqx2.mountpoint = "bridge/emqx2/${node}/"
|
||||
|
||||
## Subscription topic for bridging
|
||||
bridge.mqtt.emqx2.subscription.1.topic = "cmd/topic1"
|
||||
|
||||
## Subscription qos for bridging
|
||||
bridge.mqtt.emqx2.subscription.1.qos = 1
|
||||
|
||||
## Subscription topic for bridging
|
||||
bridge.mqtt.emqx2.subscription.2.topic = "cmd/topic2"
|
||||
|
||||
## Subscription qos for bridging
|
||||
bridge.mqtt.emqx2.subscription.2.qos = 1
|
||||
|
||||
## Bridging reconnection interval
|
||||
## Default: 30s
|
||||
bridge.mqtt.emqx2.reconnect_interval = 30s
|
||||
|
||||
## QoS1 message retransmission interval
|
||||
bridge.mqtt.emqx2.retry_interval = 20s
|
||||
|
||||
## Inflight Size.
|
||||
bridge.mqtt.emqx2.max_inflight_batches = 32
|
||||
```
|
||||
|
||||
## Bridge Cache Configuration
|
||||
|
||||
The bridge of EMQ X has a message caching mechanism. The caching mechanism is applicable to both RPC bridging and MQTT bridging. When the bridge is disconnected (such as when the network connection is unstable), the messages with a topic specified in `forwards` can be cached to the local message queue. Until the bridge is restored, these messages are re-forwarded to the remote node. The configuration of the cache queue is as follows
|
||||
|
||||
```
|
||||
## emqx_bridge internal number of messages used for batch
|
||||
bridge.mqtt.emqx2.queue.batch_count_limit = 32
|
||||
|
||||
## emqx_bridge internal number of message bytes used for batch
|
||||
bridge.mqtt.emqx2.queue.batch_bytes_limit = 1000MB
|
||||
|
||||
## The path for placing replayq queue. If it is not specified, then replayq will run in `mem-only` mode and messages will not be cached on disk.
|
||||
bridge.mqtt.emqx2.queue.replayq_dir = data/emqx_emqx2_bridge/
|
||||
|
||||
## Replayq data segment size
|
||||
bridge.mqtt.emqx2.queue.replayq_seg_bytes = 10MB
|
||||
```
|
||||
|
||||
`bridge.mqtt.emqx2.queue.replayq_dir` is a configuration parameter for specifying the path of the bridge storage queue.
|
||||
|
||||
`bridge.mqtt.emqx2.queue.replayq_seg_bytes` is used to specify the size of the largest single file of the message queue that is cached on disk. If the message queue size exceeds the specified value, a new file is created to store the message queue.
|
||||
|
||||
## CLI for EMQ X Bridge MQTT
|
||||
|
||||
CLI for EMQ X Bridge MQTT:
|
||||
|
||||
``` bash
|
||||
$ cd emqx1/ && ./bin/emqx_ctl bridges
|
||||
bridges list # List bridges
|
||||
bridges start <Name> # Start a bridge
|
||||
bridges stop <Name> # Stop a bridge
|
||||
bridges forwards <Name> # Show a bridge forward topic
|
||||
bridges add-forward <Name> <Topic> # Add bridge forward topic
|
||||
bridges del-forward <Name> <Topic> # Delete bridge forward topic
|
||||
bridges subscriptions <Name> # Show a bridge subscriptions topic
|
||||
bridges add-subscription <Name> <Topic> <Qos> # Add bridge subscriptions topic
|
||||
```
|
||||
|
||||
List all bridge states
|
||||
|
||||
``` bash
|
||||
$ ./bin/emqx_ctl bridges list
|
||||
name: emqx status: Stopped $ ./bin/emqx_ctl bridges list
|
||||
name: emqx status: Stopped
|
||||
```
|
||||
|
||||
Start the specified bridge
|
||||
|
||||
``` bash
|
||||
$ ./bin/emqx_ctl bridges start emqx
|
||||
Start bridge successfully.
|
||||
```
|
||||
|
||||
Stop the specified bridge
|
||||
|
||||
``` bash
|
||||
$ ./bin/emqx_ctl bridges stop emqx
|
||||
Stop bridge successfully.
|
||||
```
|
||||
List the forwarding topics for the specified bridge
|
||||
|
||||
``` bash
|
||||
$ ./bin/emqx_ctl bridges forwards emqx
|
||||
topic: topic1/#
|
||||
topic: topic2/#
|
||||
```
|
||||
|
||||
Add a forwarding topic for the specified bridge
|
||||
|
||||
``` bash
|
||||
$ ./bin/emqx_ctl bridges add-forwards emqx topic3/#
|
||||
Add-forward topic successfully.
|
||||
```
|
||||
|
||||
Delete the forwarding topic for the specified bridge
|
||||
|
||||
|
||||
``` bash
|
||||
$ ./bin/emqx_ctl bridges del-forwards emqx topic3/#
|
||||
Del-forward topic successfully.
|
||||
```
|
||||
|
||||
List subscriptions for the specified bridge
|
||||
|
||||
``` bash
|
||||
$ ./bin/emqx_ctl bridges subscriptions emqx
|
||||
topic: cmd/topic1, qos: 1
|
||||
topic: cmd/topic2, qos: 1
|
||||
```
|
||||
|
||||
Add a subscription topic for the specified bridge
|
||||
|
||||
``` bash
|
||||
$ ./bin/emqx_ctl bridges add-subscription emqx cmd/topic3 1
|
||||
Add-subscription topic successfully.
|
||||
```
|
||||
|
||||
Delete the subscription topic for the specified bridge
|
||||
|
||||
``` bash
|
||||
$ ./bin/emqx_ctl bridges del-subscription emqx cmd/topic3
|
||||
Del-subscription topic successfully.
|
||||
```
|
||||
|
||||
Note: In case of creating multiple bridges, it is convenient to replicate all configuration items of the first bridge, and modify the bridge name and other configuration items if necessary.
|
||||
|
|
@ -1,286 +0,0 @@
|
|||
|
||||
EMQ Bridge MQTT
|
||||
===============
|
||||
|
||||
The concept of **Bridge** means that EMQ X supports forwarding messages
|
||||
of one of its own topics to another MQTT Broker in some way.
|
||||
|
||||
**Bridge** differs from **Cluster** in that the bridge does not
|
||||
replicate the topic trie and routing tables and only forwards MQTT
|
||||
messages based on bridging rules.
|
||||
|
||||
At present, the bridging methods supported by EMQ X are as follows:
|
||||
|
||||
|
||||
* RPC bridge: RPC Bridge only supports message forwarding and does not
|
||||
support subscribing to the topic of remote nodes to synchronize
|
||||
data;
|
||||
* MQTT Bridge: MQTT Bridge supports both forwarding and data
|
||||
synchronization through subscription topic.
|
||||
|
||||
These concepts are shown below:
|
||||
|
||||
|
||||
.. image:: images/bridge.png
|
||||
:target: images/bridge.png
|
||||
:alt: bridge
|
||||
|
||||
|
||||
In addition, the EMQ X message broker supports multi-node bridge mode interconnection
|
||||
|
||||
.. code-block::
|
||||
|
||||
--------- --------- ---------
|
||||
Publisher --> | Node1 | --Bridge Forward--> | Node2 | --Bridge Forward--> | Node3 | --> Subscriber
|
||||
--------- --------- ---------
|
||||
|
||||
In EMQ X, bridge is configured by modifying ``etc/emqx.conf``. EMQ X distinguishes between different bridges based on different names. E.g
|
||||
|
||||
.. code-block::
|
||||
|
||||
## Bridge address: node name for local bridge, host:port for remote.
|
||||
bridge.mqtt.aws.address = "127.0.0.1:1883"
|
||||
|
||||
This configuration declares a bridge named ``aws`` and specifies that it is bridged to the MQTT broker of 127.0.0.1:1883 by MQTT mode.
|
||||
|
||||
In case of creating multiple bridges, it is convenient to replicate all configuration items of the first bridge, and modify the bridge name and other configuration items if necessary (such as bridge.$name.address, where $name refers to the name of bridge)
|
||||
|
||||
The next two sections describe how to create a bridge in RPC and MQTT mode respectively and create a forwarding rule that forwards the messages from sensors. Assuming that two EMQ X nodes are running on two hosts:
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Name
|
||||
- Node
|
||||
- MQTT Port
|
||||
* - emqx1
|
||||
- emqx1@192.168.1.1.
|
||||
- 1883
|
||||
* - emqx2
|
||||
- emqx2@192.168.1.2
|
||||
- 1883
|
||||
|
||||
|
||||
EMQ X RPC Bridge Configuration
|
||||
------------------------------
|
||||
|
||||
The following is the basic configuration of RPC bridging. A simplest RPC bridging only requires the following three items
|
||||
|
||||
.. code-block::
|
||||
|
||||
## Bridge Address: Use node name (nodename@host) for rpc bridging, and host:port for mqtt connection
|
||||
bridge.mqtt.emqx2.address = "emqx2@192.168.1.2"
|
||||
|
||||
## Forwarding topics of the message
|
||||
bridge.mqtt.emqx2.forwards = "sensor1/#,sensor2/#"
|
||||
|
||||
## bridged mountpoint
|
||||
bridge.mqtt.emqx2.mountpoint = "bridge/emqx2/${node}/"
|
||||
|
||||
If the messages received by the local node emqx1 matches the topic ``sersor1/#`` or ``sensor2/#``\ , these messages will be forwarded to the ``sensor1/#`` or ``sensor2/#`` topic of the remote node emqx2.
|
||||
|
||||
``forwards`` is used to specify topics. Messages of the in ``forwards`` specified topics on local node are forwarded to the remote node.
|
||||
|
||||
``mountpoint`` is used to add a topic prefix when forwarding a message. To use ``mountpoint``\ , the ``forwards`` directive must be set. In the above example, a message with the topic ``sensor1/hello`` received by the local node will be forwarded to the remote node with the topic ``bridge/emqx2/emqx1@192.168.1.1/sensor1/hello``.
|
||||
|
||||
Limitations of RPC bridging:
|
||||
|
||||
|
||||
#.
|
||||
The RPC bridge of emqx can only forward local messages to the remote node, and cannot synchronize the messages of the remote node to the local node;
|
||||
|
||||
#.
|
||||
RPC bridge can only bridge two EMQ X broker together and cannot bridge EMQ X broker to other MQTT brokers.
|
||||
|
||||
EMQ X MQTT Bridge Configuration
|
||||
-------------------------------
|
||||
|
||||
EMQ X 3.0 officially introduced MQTT bridge, so that EMQ X can bridge any MQTT broker. Because of the characteristics of the MQTT protocol, EMQ X can subscribe to the remote mqtt broker's topic through MQTT bridge, and then synchronize the remote MQTT broker's message to the local.
|
||||
|
||||
EMQ X MQTT bridging principle: Create an MQTT client on the EMQ X broker, and connect this MQTT client to the remote MQTT broker. Therefore, in the MQTT bridge configuration, following fields may be set for the EMQ X to connect to the remote broker as an mqtt client
|
||||
|
||||
.. code-block::
|
||||
|
||||
## Bridge Address: Use node name for rpc bridging, use host:port for mqtt connection
|
||||
bridge.mqtt.emqx2.address = "192.168.1.2:1883"
|
||||
|
||||
## Bridged Protocol Version
|
||||
## Enumeration value: mqttv3 | mqttv4 | mqttv5
|
||||
bridge.mqtt.emqx2.proto_ver = "mqttv4"
|
||||
|
||||
## mqtt client's clientid
|
||||
bridge.mqtt.emqx2.clientid = "bridge_emq"
|
||||
|
||||
## mqtt client's clean_start field
|
||||
## Note: Some MQTT Brokers need to set the clean_start value as `true`
|
||||
bridge.mqtt.emqx2.clean_start = true
|
||||
|
||||
## mqtt client's username field
|
||||
bridge.mqtt.emqx2.username = "user"
|
||||
|
||||
## mqtt client's password field
|
||||
bridge.mqtt.emqx2.password = "passwd"
|
||||
|
||||
## Whether the mqtt client uses ssl to connect to a remote serve or not
|
||||
bridge.mqtt.emqx2.ssl = off
|
||||
|
||||
## CA Certificate of Client SSL Connection (PEM format)
|
||||
bridge.mqtt.emqx2.cacertfile = "etc/certs/cacert.pem"
|
||||
|
||||
## SSL certificate of Client SSL connection
|
||||
bridge.mqtt.emqx2.certfile = "etc/certs/client-cert.pem"
|
||||
|
||||
## Key file of Client SSL connection
|
||||
bridge.mqtt.emqx2.keyfile = "etc/certs/client-key.pem"
|
||||
|
||||
## TTLS PSK password
|
||||
## Note 'listener.ssl.external.ciphers' and 'listener.ssl.external.psk_ciphers' cannot be configured at the same time
|
||||
##
|
||||
## See 'https://tools.ietf.org/html/rfc4279#section-2'.
|
||||
## bridge.mqtt.emqx2.psk_ciphers = "PSK-AES128-CBC-SHA,PSK-AES256-CBC-SHA,PSK-3DES-EDE-CBC-SHA,PSK-RC4-SHA"
|
||||
|
||||
## Client's heartbeat interval
|
||||
bridge.mqtt.emqx2.keepalive = 60s
|
||||
|
||||
## Supported TLS version
|
||||
bridge.mqtt.emqx2.tls_versions = "tlsv1.2"
|
||||
|
||||
## SSL encryption
|
||||
bridge.mqtt.emqx2.ciphers = "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384"
|
||||
|
||||
## Forwarding topics of the message
|
||||
bridge.mqtt.emqx2.forwards = "sensor1/#,sensor2/#"
|
||||
|
||||
## Bridged mountpoint
|
||||
bridge.mqtt.emqx2.mountpoint = "bridge/emqx2/${node}/"
|
||||
|
||||
## Subscription topic for bridging
|
||||
bridge.mqtt.emqx2.subscription.1.topic = "cmd/topic1"
|
||||
|
||||
## Subscription qos for bridging
|
||||
bridge.mqtt.emqx2.subscription.1.qos = 1
|
||||
|
||||
## Subscription topic for bridging
|
||||
bridge.mqtt.emqx2.subscription.2.topic = "cmd/topic2"
|
||||
|
||||
## Subscription qos for bridging
|
||||
bridge.mqtt.emqx2.subscription.2.qos = 1
|
||||
|
||||
## Bridging reconnection interval
|
||||
## Default: 30s
|
||||
bridge.mqtt.emqx2.reconnect_interval = 30s
|
||||
|
||||
## QoS1 message retransmission interval
|
||||
bridge.mqtt.emqx2.retry_interval = 20s
|
||||
|
||||
## Inflight Size.
|
||||
bridge.mqtt.emqx2.max_inflight_batches = 32
|
||||
|
||||
Bridge Cache Configuration
|
||||
--------------------------
|
||||
|
||||
The bridge of EMQ X has a message caching mechanism. The caching mechanism is applicable to both RPC bridging and MQTT bridging. When the bridge is disconnected (such as when the network connection is unstable), the messages with a topic specified in ``forwards`` can be cached to the local message queue. Until the bridge is restored, these messages are re-forwarded to the remote node. The configuration of the cache queue is as follows
|
||||
|
||||
.. code-block::
|
||||
|
||||
## emqx_bridge internal number of messages used for batch
|
||||
bridge.mqtt.emqx2.queue.batch_count_limit = 32
|
||||
|
||||
## emqx_bridge internal number of message bytes used for batch
|
||||
bridge.mqtt.emqx2.queue.batch_bytes_limit = 1000MB
|
||||
|
||||
## The path for placing replayq queue. If it is not specified, then replayq will run in `mem-only` mode and messages will not be cached on disk.
|
||||
bridge.mqtt.emqx2.queue.replayq_dir = "data/emqx_emqx2_bridge/"
|
||||
|
||||
## Replayq data segment size
|
||||
bridge.mqtt.emqx2.queue.replayq_seg_bytes = 10MB
|
||||
|
||||
``bridge.mqtt.emqx2.queue.replayq_dir`` is a configuration parameter for specifying the path of the bridge storage queue.
|
||||
|
||||
``bridge.mqtt.emqx2.queue.replayq_seg_bytes`` is used to specify the size of the largest single file of the message queue that is cached on disk. If the message queue size exceeds the specified value, a new file is created to store the message queue.
|
||||
|
||||
CLI for EMQ X Bridge MQTT
|
||||
-------------------------
|
||||
|
||||
CLI for EMQ X Bridge MQTT:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ cd emqx1/ && ./bin/emqx_ctl bridges
|
||||
bridges list # List bridges
|
||||
bridges start <Name> # Start a bridge
|
||||
bridges stop <Name> # Stop a bridge
|
||||
bridges forwards <Name> # Show a bridge forward topic
|
||||
bridges add-forward <Name> <Topic> # Add bridge forward topic
|
||||
bridges del-forward <Name> <Topic> # Delete bridge forward topic
|
||||
bridges subscriptions <Name> # Show a bridge subscriptions topic
|
||||
bridges add-subscription <Name> <Topic> <Qos> # Add bridge subscriptions topic
|
||||
|
||||
List all bridge states
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ ./bin/emqx_ctl bridges list
|
||||
name: emqx status: Stopped $ ./bin/emqx_ctl bridges list
|
||||
name: emqx status: Stopped
|
||||
|
||||
Start the specified bridge
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ ./bin/emqx_ctl bridges start emqx
|
||||
Start bridge successfully.
|
||||
|
||||
Stop the specified bridge
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ ./bin/emqx_ctl bridges stop emqx
|
||||
Stop bridge successfully.
|
||||
|
||||
List the forwarding topics for the specified bridge
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ ./bin/emqx_ctl bridges forwards emqx
|
||||
topic: topic1/#
|
||||
topic: topic2/#
|
||||
|
||||
Add a forwarding topic for the specified bridge
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ ./bin/emqx_ctl bridges add-forwards emqx topic3/#
|
||||
Add-forward topic successfully.
|
||||
|
||||
Delete the forwarding topic for the specified bridge
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ ./bin/emqx_ctl bridges del-forwards emqx topic3/#
|
||||
Del-forward topic successfully.
|
||||
|
||||
List subscriptions for the specified bridge
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ ./bin/emqx_ctl bridges subscriptions emqx
|
||||
topic: cmd/topic1, qos: 1
|
||||
topic: cmd/topic2, qos: 1
|
||||
|
||||
Add a subscription topic for the specified bridge
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ ./bin/emqx_ctl bridges add-subscription emqx cmd/topic3 1
|
||||
Add-subscription topic successfully.
|
||||
|
||||
Delete the subscription topic for the specified bridge
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ ./bin/emqx_ctl bridges del-subscription emqx cmd/topic3
|
||||
Del-subscription topic successfully.
|
||||
|
||||
Note: In case of creating multiple bridges, it is convenient to replicate all configuration items of the first bridge, and modify the bridge name and other configuration items if necessary.
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 144 KiB |
|
@ -1,56 +0,0 @@
|
|||
##====================================================================
|
||||
## Configuration for EMQ X MQTT Broker Bridge
|
||||
##====================================================================
|
||||
|
||||
bridge_mqtt: [
|
||||
# {
|
||||
# name: "mqtt1"
|
||||
# start_type: auto
|
||||
# forwards: ["test/#"],
|
||||
# forward_mountpoint: ""
|
||||
# reconnect_interval: "30s"
|
||||
# batch_size: 100
|
||||
# queue {
|
||||
# replayq_dir: "{{ platform_data_dir }}/replayq/bridge_mqtt/"
|
||||
# replayq_seg_bytes: "100MB"
|
||||
# replayq_offload_mode: false
|
||||
# replayq_max_total_bytes: "1GB"
|
||||
# },
|
||||
# config {
|
||||
# conn_type: mqtt
|
||||
# address: "127.0.0.1:1883"
|
||||
# proto_ver: v4
|
||||
# bridge_mode: true
|
||||
# clientid: "client1"
|
||||
# clean_start: true
|
||||
# username: "username1"
|
||||
# password: ""
|
||||
# keepalive: 300
|
||||
# subscriptions: [{
|
||||
# topic: "t/#"
|
||||
# qos: 1
|
||||
# }]
|
||||
# receive_mountpoint: ""
|
||||
# retry_interval: "30s"
|
||||
# max_inflight: 32
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# name: "rpc1"
|
||||
# start_type: auto
|
||||
# forwards: ["test/#"],
|
||||
# forward_mountpoint: ""
|
||||
# reconnect_interval: "30s"
|
||||
# batch_size: 100
|
||||
# queue {
|
||||
# replayq_dir: "{{ platform_data_dir }}/replayq/bridge_mqtt/"
|
||||
# replayq_seg_bytes: "100MB"
|
||||
# replayq_offload_mode: false
|
||||
# replayq_max_total_bytes: "1GB"
|
||||
# },
|
||||
# config {
|
||||
# conn_type: rpc
|
||||
# node: "emqx@127.0.0.1"
|
||||
# }
|
||||
# }
|
||||
]
|
|
@ -1,18 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(APP, emqx_bridge_mqtt).
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
{deps, []}.
|
||||
{edoc_opts, [{preprocess, true}]}.
|
||||
{erl_opts, [warn_unused_vars,
|
||||
warn_shadow_vars,
|
||||
warn_unused_import,
|
||||
warn_obsolete_guard,
|
||||
debug_info]}.
|
||||
|
||||
{xref_checks, [undefined_function_calls, undefined_functions,
|
||||
locals_not_used, deprecated_function_calls,
|
||||
warnings_as_errors, deprecated_functions]}.
|
||||
{cover_enabled, true}.
|
||||
{cover_opts, [verbose]}.
|
||||
{cover_export_enabled, true}.
|
||||
|
||||
{shell, [
|
||||
% {config, "config/sys.config"},
|
||||
{apps, [emqx, emqx_bridge_mqtt]}
|
||||
]}.
|
|
@ -1,14 +0,0 @@
|
|||
{application, emqx_bridge_mqtt,
|
||||
[{description, "EMQ X Bridge to MQTT Broker"},
|
||||
{vsn, "5.0.0"}, % strict semver, bump manually!
|
||||
{modules, []},
|
||||
{registered, []},
|
||||
{applications, [kernel,stdlib,replayq,emqtt,emqx]},
|
||||
{mod, {emqx_bridge_mqtt_app, []}},
|
||||
{env, []},
|
||||
{licenses, ["Apache-2.0"]},
|
||||
{maintainers, ["EMQ X Team <contact@emqx.io>"]},
|
||||
{links, [{"Homepage", "https://emqx.io/"},
|
||||
{"Github", "https://github.com/emqx/emqx-bridge-mqtt"}
|
||||
]}
|
||||
]}.
|
|
@ -1,31 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_mqtt_app).
|
||||
|
||||
-behaviour(application).
|
||||
|
||||
-export([start/2, stop/1]).
|
||||
|
||||
start(_StartType, _StartArgs) ->
|
||||
emqx_ctl:register_command(bridges, {emqx_bridge_mqtt_cli, cli}, []),
|
||||
emqx_bridge_worker:register_metrics(),
|
||||
emqx_bridge_mqtt_sup:start_link().
|
||||
|
||||
stop(_State) ->
|
||||
emqx_ctl:unregister_command(bridges),
|
||||
ok.
|
||||
|
|
@ -1,92 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_mqtt_cli).
|
||||
|
||||
-include("emqx_bridge_mqtt.hrl").
|
||||
|
||||
-import(lists, [foreach/2]).
|
||||
|
||||
-export([cli/1]).
|
||||
|
||||
cli(["list"]) ->
|
||||
foreach(fun({Name, State0}) ->
|
||||
State = case State0 of
|
||||
connected -> <<"Running">>;
|
||||
_ -> <<"Stopped">>
|
||||
end,
|
||||
emqx_ctl:print("name: ~s status: ~s~n", [Name, State])
|
||||
end, emqx_bridge_mqtt_sup:bridges());
|
||||
|
||||
cli(["start", Name]) ->
|
||||
emqx_ctl:print("~s.~n", [try emqx_bridge_worker:ensure_started(Name) of
|
||||
ok -> <<"Start bridge successfully">>;
|
||||
connected -> <<"Bridge already started">>;
|
||||
_ -> <<"Start bridge failed">>
|
||||
catch
|
||||
_Error:_Reason ->
|
||||
<<"Start bridge failed">>
|
||||
end]);
|
||||
|
||||
cli(["stop", Name]) ->
|
||||
emqx_ctl:print("~s.~n", [try emqx_bridge_worker:ensure_stopped(Name) of
|
||||
ok -> <<"Stop bridge successfully">>;
|
||||
_ -> <<"Stop bridge failed">>
|
||||
catch
|
||||
_Error:_Reason ->
|
||||
<<"Stop bridge failed">>
|
||||
end]);
|
||||
|
||||
cli(["forwards", Name]) ->
|
||||
foreach(fun(Topic) ->
|
||||
emqx_ctl:print("topic: ~s~n", [Topic])
|
||||
end, emqx_bridge_worker:get_forwards(Name));
|
||||
|
||||
cli(["add-forward", Name, Topic]) ->
|
||||
ok = emqx_bridge_worker:ensure_forward_present(Name, iolist_to_binary(Topic)),
|
||||
emqx_ctl:print("Add-forward topic successfully.~n");
|
||||
|
||||
cli(["del-forward", Name, Topic]) ->
|
||||
ok = emqx_bridge_worker:ensure_forward_absent(Name, iolist_to_binary(Topic)),
|
||||
emqx_ctl:print("Del-forward topic successfully.~n");
|
||||
|
||||
cli(["subscriptions", Name]) ->
|
||||
foreach(fun({Topic, Qos}) ->
|
||||
emqx_ctl:print("topic: ~s, qos: ~p~n", [Topic, Qos])
|
||||
end, emqx_bridge_worker:get_subscriptions(Name));
|
||||
|
||||
cli(["add-subscription", Name, Topic, Qos]) ->
|
||||
case emqx_bridge_worker:ensure_subscription_present(Name, Topic, list_to_integer(Qos)) of
|
||||
ok -> emqx_ctl:print("Add-subscription topic successfully.~n");
|
||||
{error, Reason} -> emqx_ctl:print("Add-subscription failed reason: ~p.~n", [Reason])
|
||||
end;
|
||||
|
||||
cli(["del-subscription", Name, Topic]) ->
|
||||
ok = emqx_bridge_worker:ensure_subscription_absent(Name, Topic),
|
||||
emqx_ctl:print("Del-subscription topic successfully.~n");
|
||||
|
||||
cli(_) ->
|
||||
emqx_ctl:usage([{"bridges list", "List bridges"},
|
||||
{"bridges start <Name>", "Start a bridge"},
|
||||
{"bridges stop <Name>", "Stop a bridge"},
|
||||
{"bridges forwards <Name>", "Show a bridge forward topic"},
|
||||
{"bridges add-forward <Name> <Topic>", "Add bridge forward topic"},
|
||||
{"bridges del-forward <Name> <Topic>", "Delete bridge forward topic"},
|
||||
{"bridges subscriptions <Name>", "Show a bridge subscriptions topic"},
|
||||
{"bridges add-subscription <Name> <Topic> <Qos>", "Add bridge subscriptions topic"},
|
||||
{"bridges del-subscription <Name> <Topic>", "Delete bridge subscriptions topic"}]).
|
||||
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_mqtt_schema).
|
||||
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
|
||||
-behaviour(hocon_schema).
|
||||
|
||||
-export([ roots/0
|
||||
, fields/1]).
|
||||
|
||||
roots() -> [array("bridge_mqtt")].
|
||||
|
||||
array(Name) -> {Name, hoconsc:array(hoconsc:ref(Name))}.
|
||||
|
||||
fields("bridge_mqtt") ->
|
||||
[ {name, emqx_schema:t(string(), undefined, true)}
|
||||
, {start_type, fun start_type/1}
|
||||
, {forwards, fun forwards/1}
|
||||
, {forward_mountpoint, emqx_schema:t(string())}
|
||||
, {reconnect_interval, emqx_schema:t(emqx_schema:duration_ms(), undefined, "30s")}
|
||||
, {batch_size, emqx_schema:t(integer(), undefined, 100)}
|
||||
, {queue, emqx_schema:t(hoconsc:ref(?MODULE, "queue"))}
|
||||
, {config, hoconsc:union([hoconsc:ref(?MODULE, "mqtt"), hoconsc:ref(?MODULE, "rpc")])}
|
||||
];
|
||||
|
||||
fields("mqtt") ->
|
||||
[ {conn_type, fun conn_type/1}
|
||||
, {address, emqx_schema:t(string(), undefined, "127.0.0.1:1883")}
|
||||
, {proto_ver, fun proto_ver/1}
|
||||
, {bridge_mode, emqx_schema:t(boolean(), undefined, true)}
|
||||
, {clientid, emqx_schema:t(string())}
|
||||
, {username, emqx_schema:t(string())}
|
||||
, {password, emqx_schema:t(string())}
|
||||
, {clean_start, emqx_schema:t(boolean(), undefined, true)}
|
||||
, {keepalive, emqx_schema:t(integer(), undefined, 300)}
|
||||
, {subscriptions, hoconsc:array("subscriptions")}
|
||||
, {receive_mountpoint, emqx_schema:t(string())}
|
||||
, {retry_interval, emqx_schema:t(emqx_schema:duration_ms(), undefined, "30s")}
|
||||
, {max_inflight, emqx_schema:t(integer(), undefined, 32)}
|
||||
];
|
||||
|
||||
fields("rpc") ->
|
||||
[ {conn_type, fun conn_type/1}
|
||||
, {node, emqx_schema:t(atom(), undefined, 'emqx@127.0.0.1')}
|
||||
];
|
||||
|
||||
fields("subscriptions") ->
|
||||
[ {topic, #{type => binary(), nullable => false}}
|
||||
, {qos, emqx_schema:t(integer(), undefined, 1)}
|
||||
];
|
||||
|
||||
fields("queue") ->
|
||||
[ {replayq_dir, hoconsc:union([boolean(), string()])}
|
||||
, {replayq_seg_bytes, emqx_schema:t(emqx_schema:bytesize(), undefined, "100MB")}
|
||||
, {replayq_offload_mode, emqx_schema:t(boolean(), undefined, false)}
|
||||
, {replayq_max_total_bytes, emqx_schema:t(emqx_schema:bytesize(), undefined, "1024MB")}
|
||||
].
|
||||
|
||||
conn_type(type) -> hoconsc:enum([mqtt, rpc]);
|
||||
conn_type(_) -> undefined.
|
||||
|
||||
proto_ver(type) -> hoconsc:enum([v3, v4, v5]);
|
||||
proto_ver(default) -> v4;
|
||||
proto_ver(_) -> undefined.
|
||||
|
||||
start_type(type) -> hoconsc:enum([auto, manual]);
|
||||
start_type(default) -> auto;
|
||||
start_type(_) -> undefined.
|
||||
|
||||
forwards(type) -> hoconsc:array(binary());
|
||||
forwards(default) -> [];
|
||||
forwards(_) -> undefined.
|
|
@ -1,72 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_mqtt_sup).
|
||||
-behaviour(supervisor).
|
||||
|
||||
-include("emqx_bridge_mqtt.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
|
||||
|
||||
%% APIs
|
||||
-export([ start_link/0
|
||||
]).
|
||||
|
||||
-export([ create_bridge/1
|
||||
, drop_bridge/1
|
||||
, bridges/0
|
||||
]).
|
||||
|
||||
%% supervisor callbacks
|
||||
-export([init/1]).
|
||||
|
||||
-define(WORKER_SUP, emqx_bridge_worker_sup).
|
||||
|
||||
start_link() ->
|
||||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||
|
||||
init([]) ->
|
||||
BridgesConf = emqx:get_config([?APP, bridges], []),
|
||||
BridgeSpec = lists:map(fun bridge_spec/1, BridgesConf),
|
||||
SupFlag = #{strategy => one_for_one,
|
||||
intensity => 100,
|
||||
period => 10},
|
||||
{ok, {SupFlag, BridgeSpec}}.
|
||||
|
||||
bridge_spec(Config) ->
|
||||
Name = list_to_atom(maps:get(name, Config)),
|
||||
#{id => Name,
|
||||
start => {emqx_bridge_worker, start_link, [Config]},
|
||||
restart => permanent,
|
||||
shutdown => 5000,
|
||||
type => worker,
|
||||
modules => [emqx_bridge_worker]}.
|
||||
|
||||
-spec(bridges() -> [{node(), map()}]).
|
||||
bridges() ->
|
||||
[{Name, emqx_bridge_worker:status(Name)} || {Name, _Pid, _, _} <- supervisor:which_children(?MODULE)].
|
||||
|
||||
create_bridge(Config) ->
|
||||
supervisor:start_child(?MODULE, bridge_spec(Config)).
|
||||
|
||||
drop_bridge(Name) ->
|
||||
case supervisor:terminate_child(?MODULE, Name) of
|
||||
ok ->
|
||||
supervisor:delete_child(?MODULE, Name);
|
||||
{error, Error} ->
|
||||
?LOG(error, "Delete bridge failed, error : ~p", [Error]),
|
||||
{error, Error}
|
||||
end.
|
|
@ -1,99 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_msg).
|
||||
|
||||
-export([ to_binary/1
|
||||
, from_binary/1
|
||||
, to_export/3
|
||||
, to_broker_msgs/1
|
||||
, to_broker_msg/1
|
||||
, to_broker_msg/2
|
||||
, estimate_size/1
|
||||
]).
|
||||
|
||||
-export_type([msg/0]).
|
||||
|
||||
-include_lib("emqx/include/emqx.hrl").
|
||||
|
||||
-include_lib("emqx_bridge_mqtt/include/emqx_bridge_mqtt.hrl").
|
||||
-include_lib("emqtt/include/emqtt.hrl").
|
||||
|
||||
|
||||
-type msg() :: emqx_types:message().
|
||||
-type exp_msg() :: emqx_types:message() | #mqtt_msg{}.
|
||||
|
||||
%% @doc Make export format:
|
||||
%% 1. Mount topic to a prefix
|
||||
%% 2. Fix QoS to 1
|
||||
%% @end
|
||||
%% Shame that we have to know the callback module here
|
||||
%% would be great if we can get rid of #mqtt_msg{} record
|
||||
%% and use #message{} in all places.
|
||||
-spec to_export(emqx_bridge_rpc | emqx_bridge_worker,
|
||||
undefined | binary(), msg()) -> exp_msg().
|
||||
to_export(emqx_bridge_mqtt, Mountpoint,
|
||||
#message{topic = Topic,
|
||||
payload = Payload,
|
||||
flags = Flags,
|
||||
qos = QoS
|
||||
}) ->
|
||||
Retain = maps:get(retain, Flags, false),
|
||||
#mqtt_msg{qos = QoS,
|
||||
retain = Retain,
|
||||
topic = topic(Mountpoint, Topic),
|
||||
props = #{},
|
||||
payload = Payload};
|
||||
to_export(_Module, Mountpoint,
|
||||
#message{topic = Topic} = Msg) ->
|
||||
Msg#message{topic = topic(Mountpoint, Topic)}.
|
||||
|
||||
%% @doc Make `binary()' in order to make iodata to be persisted on disk.
|
||||
-spec to_binary(msg()) -> binary().
|
||||
to_binary(Msg) -> term_to_binary(Msg).
|
||||
|
||||
%% @doc Unmarshal binary into `msg()'.
|
||||
-spec from_binary(binary()) -> msg().
|
||||
from_binary(Bin) -> binary_to_term(Bin).
|
||||
|
||||
%% @doc Estimate the size of a message.
|
||||
%% Count only the topic length + payload size
|
||||
-spec estimate_size(msg()) -> integer().
|
||||
estimate_size(#message{topic = Topic, payload = Payload}) ->
|
||||
size(Topic) + size(Payload).
|
||||
|
||||
%% @doc By message/batch receiver, transform received batch into
|
||||
%% messages to deliver to local brokers.
|
||||
to_broker_msgs(Batch) -> lists:map(fun to_broker_msg/1, Batch).
|
||||
|
||||
to_broker_msg(#message{} = Msg) ->
|
||||
%% internal format from another EMQX node via rpc
|
||||
Msg;
|
||||
to_broker_msg(Msg) ->
|
||||
to_broker_msg(Msg, undefined).
|
||||
to_broker_msg(#{qos := QoS, dup := Dup, retain := Retain, topic := Topic,
|
||||
properties := Props, payload := Payload}, Mountpoint) ->
|
||||
%% published from remote node over a MQTT connection
|
||||
set_headers(Props,
|
||||
emqx_message:set_flags(#{dup => Dup, retain => Retain},
|
||||
emqx_message:make(bridge, QoS, topic(Mountpoint, Topic), Payload))).
|
||||
|
||||
set_headers(undefined, Msg) ->
|
||||
Msg;
|
||||
set_headers(Val, Msg) ->
|
||||
emqx_message:set_headers(Val, Msg).
|
||||
topic(undefined, Topic) -> Topic;
|
||||
topic(Prefix, Topic) -> emqx_topic:prepend(Prefix, Topic).
|
|
@ -1,95 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
%% @doc This module implements EMQX Bridge transport layer based on gen_rpc.
|
||||
|
||||
-module(emqx_bridge_rpc).
|
||||
|
||||
-export([ start/1
|
||||
, send/2
|
||||
, stop/1
|
||||
]).
|
||||
|
||||
%% Internal exports
|
||||
-export([ handle_send/1
|
||||
, heartbeat/2
|
||||
]).
|
||||
|
||||
-type ack_ref() :: emqx_bridge_worker:ack_ref().
|
||||
-type batch() :: emqx_bridge_worker:batch().
|
||||
-define(HEARTBEAT_INTERVAL, timer:seconds(1)).
|
||||
|
||||
-define(RPC, emqx_rpc).
|
||||
|
||||
start(#{node := RemoteNode}) ->
|
||||
case poke(RemoteNode) of
|
||||
ok ->
|
||||
Pid = proc_lib:spawn_link(?MODULE, heartbeat, [self(), RemoteNode]),
|
||||
{ok, #{client_pid => Pid, remote_node => RemoteNode}};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
stop(#{client_pid := Pid}) when is_pid(Pid) ->
|
||||
Ref = erlang:monitor(process, Pid),
|
||||
unlink(Pid),
|
||||
Pid ! stop,
|
||||
receive
|
||||
{'DOWN', Ref, process, Pid, _Reason} ->
|
||||
ok
|
||||
after
|
||||
1000 ->
|
||||
exit(Pid, kill)
|
||||
end,
|
||||
ok.
|
||||
|
||||
%% @doc Callback for `emqx_bridge_connect' behaviour
|
||||
-spec send(#{remote_node := atom(), _ => _}, batch()) -> {ok, ack_ref()} | {error, any()}.
|
||||
send(#{remote_node := RemoteNode}, Batch) ->
|
||||
case ?RPC:call(RemoteNode, ?MODULE, handle_send, [Batch]) of
|
||||
ok ->
|
||||
Ref = make_ref(),
|
||||
self() ! {batch_ack, Ref},
|
||||
{ok, Ref};
|
||||
{badrpc, Reason} -> {error, Reason}
|
||||
end.
|
||||
|
||||
%% @doc Handle send on receiver side.
|
||||
-spec handle_send(batch()) -> ok.
|
||||
handle_send(Batch) ->
|
||||
lists:foreach(fun(Msg) -> emqx_broker:publish(Msg) end, Batch).
|
||||
|
||||
%% @hidden Heartbeat loop
|
||||
heartbeat(Parent, RemoteNode) ->
|
||||
Interval = ?HEARTBEAT_INTERVAL,
|
||||
receive
|
||||
stop -> exit(normal)
|
||||
after
|
||||
Interval ->
|
||||
case poke(RemoteNode) of
|
||||
ok ->
|
||||
?MODULE:heartbeat(Parent, RemoteNode);
|
||||
{error, Reason} ->
|
||||
Parent ! {disconnected, self(), Reason},
|
||||
exit(normal)
|
||||
end
|
||||
end.
|
||||
|
||||
poke(RemoteNode) ->
|
||||
case ?RPC:call(RemoteNode, erlang, node, []) of
|
||||
RemoteNode -> ok;
|
||||
{badrpc, Reason} -> {error, Reason}
|
||||
end.
|
|
@ -1,42 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_rpc_tests).
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
send_and_ack_test() ->
|
||||
%% delegate from emqx_rpc to rpc for unit test
|
||||
meck:new(emqx_rpc, [passthrough, no_history]),
|
||||
meck:expect(emqx_rpc, call, 4,
|
||||
fun(Node, Module, Fun, Args) ->
|
||||
rpc:call(Node, Module, Fun, Args)
|
||||
end),
|
||||
meck:expect(emqx_rpc, cast, 4,
|
||||
fun(Node, Module, Fun, Args) ->
|
||||
rpc:cast(Node, Module, Fun, Args)
|
||||
end),
|
||||
meck:new(emqx_bridge_worker, [passthrough, no_history]),
|
||||
try
|
||||
{ok, #{client_pid := Pid, remote_node := Node}} = emqx_bridge_rpc:start(#{node => node()}),
|
||||
{ok, Ref} = emqx_bridge_rpc:send(#{remote_node => Node}, []),
|
||||
receive
|
||||
{batch_ack, Ref} ->
|
||||
ok
|
||||
end,
|
||||
ok = emqx_bridge_rpc:stop( #{client_pid => Pid})
|
||||
after
|
||||
meck:unload(emqx_rpc)
|
||||
end.
|
|
@ -1,38 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_stub_conn).
|
||||
|
||||
-export([ start/1
|
||||
, send/2
|
||||
, stop/1
|
||||
]).
|
||||
|
||||
-type ack_ref() :: emqx_bridge_worker:ack_ref().
|
||||
-type batch() :: emqx_bridge_worker:batch().
|
||||
|
||||
start(#{client_pid := Pid} = Cfg) ->
|
||||
Pid ! {self(), ?MODULE, ready},
|
||||
{ok, Cfg}.
|
||||
|
||||
stop(_) -> ok.
|
||||
|
||||
%% @doc Callback for `emqx_bridge_connect' behaviour
|
||||
-spec send(_, batch()) -> {ok, ack_ref()} | {error, any()}.
|
||||
send(#{client_pid := Pid}, Batch) ->
|
||||
Ref = make_ref(),
|
||||
Pid ! {stub_message, self(), Ref, Batch},
|
||||
{ok, Ref}.
|
|
@ -1,372 +0,0 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_bridge_worker_SUITE).
|
||||
|
||||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||
-include_lib("emqx/include/emqx.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
|
||||
-define(wait(For, Timeout), emqx_ct_helpers:wait_for(?FUNCTION_NAME, ?LINE, fun() -> For end, Timeout)).
|
||||
|
||||
-define(SNK_WAIT(WHAT), ?assertMatch({ok, _}, ?block_until(#{?snk_kind := WHAT}, 2000, 1000))).
|
||||
|
||||
receive_messages(Count) ->
|
||||
receive_messages(Count, []).
|
||||
|
||||
receive_messages(0, Msgs) ->
|
||||
Msgs;
|
||||
receive_messages(Count, Msgs) ->
|
||||
receive
|
||||
{publish, Msg} ->
|
||||
receive_messages(Count-1, [Msg|Msgs]);
|
||||
_Other ->
|
||||
receive_messages(Count, Msgs)
|
||||
after 1000 ->
|
||||
Msgs
|
||||
end.
|
||||
|
||||
all() ->
|
||||
lists:filtermap(
|
||||
fun({FunName, _Arity}) ->
|
||||
case atom_to_list(FunName) of
|
||||
"t_" ++ _ -> {true, FunName};
|
||||
_ -> false
|
||||
end
|
||||
end,
|
||||
?MODULE:module_info(exports)).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
case node() of
|
||||
nonode@nohost -> net_kernel:start(['emqx@127.0.0.1', longnames]);
|
||||
_ -> ok
|
||||
end,
|
||||
emqx_ct_helpers:start_apps([emqx_bridge_mqtt]),
|
||||
emqx_logger:set_log_level(error),
|
||||
[{log_level, error} | Config].
|
||||
|
||||
end_per_suite(_Config) ->
|
||||
emqx_ct_helpers:stop_apps([emqx_bridge_mqtt]).
|
||||
|
||||
init_per_testcase(_TestCase, Config) ->
|
||||
ok = snabbkaffe:start_trace(),
|
||||
Config.
|
||||
|
||||
end_per_testcase(_TestCase, _Config) ->
|
||||
ok = snabbkaffe:stop().
|
||||
|
||||
t_rpc_mngr(_Config) ->
|
||||
Name = "rpc_name",
|
||||
Cfg = #{
|
||||
name => Name,
|
||||
forwards => [<<"mngr">>],
|
||||
forward_mountpoint => <<"forwarded">>,
|
||||
start_type => auto,
|
||||
config => #{
|
||||
conn_type => rpc,
|
||||
node => node()
|
||||
}
|
||||
},
|
||||
{ok, Pid} = emqx_bridge_mqtt_sup:create_bridge(Cfg),
|
||||
?assertEqual([<<"mngr">>], emqx_bridge_worker:get_forwards(Name)),
|
||||
?assertEqual(ok, emqx_bridge_worker:ensure_forward_present(Name, "mngr")),
|
||||
?assertEqual(ok, emqx_bridge_worker:ensure_forward_present(Name, "mngr2")),
|
||||
?assertEqual([<<"mngr2">>, <<"mngr">>], emqx_bridge_worker:get_forwards(Name)),
|
||||
?assertEqual(ok, emqx_bridge_worker:ensure_forward_absent(Name, "mngr2")),
|
||||
?assertEqual(ok, emqx_bridge_worker:ensure_forward_absent(Name, "mngr3")),
|
||||
?assertEqual([<<"mngr">>], emqx_bridge_worker:get_forwards(Name)),
|
||||
?assertEqual({error, no_remote_subscription_support},
|
||||
emqx_bridge_worker:ensure_subscription_present(Name, <<"t">>, 0)),
|
||||
?assertEqual({error, no_remote_subscription_support},
|
||||
emqx_bridge_worker:ensure_subscription_absent(Name, <<"t">>)),
|
||||
ok = emqx_bridge_worker:stop(Pid).
|
||||
|
||||
t_mqtt_mngr(_Config) ->
|
||||
Name = "mqtt_name",
|
||||
Cfg = #{
|
||||
name => Name,
|
||||
forwards => [<<"mngr">>],
|
||||
forward_mountpoint => <<"forwarded">>,
|
||||
start_type => auto,
|
||||
config => #{
|
||||
address => "127.0.0.1:1883",
|
||||
conn_type => mqtt,
|
||||
clientid => <<"client1">>,
|
||||
keepalive => 300,
|
||||
subscriptions => [#{topic => <<"t/#">>, qos => 1}]
|
||||
}
|
||||
},
|
||||
{ok, Pid} = emqx_bridge_mqtt_sup:create_bridge(Cfg),
|
||||
?assertEqual([<<"mngr">>], emqx_bridge_worker:get_forwards(Name)),
|
||||
?assertEqual(ok, emqx_bridge_worker:ensure_forward_present(Name, "mngr")),
|
||||
?assertEqual(ok, emqx_bridge_worker:ensure_forward_present(Name, "mngr2")),
|
||||
?assertEqual([<<"mngr2">>, <<"mngr">>], emqx_bridge_worker:get_forwards(Name)),
|
||||
?assertEqual(ok, emqx_bridge_worker:ensure_forward_absent(Name, "mngr2")),
|
||||
?assertEqual(ok, emqx_bridge_worker:ensure_forward_absent(Name, "mngr3")),
|
||||
?assertEqual([<<"mngr">>], emqx_bridge_worker:get_forwards(Name)),
|
||||
?assertEqual(ok, emqx_bridge_worker:ensure_subscription_present(Name, <<"t">>, 0)),
|
||||
?assertEqual(ok, emqx_bridge_worker:ensure_subscription_absent(Name, <<"t">>)),
|
||||
?assertEqual([{<<"t/#">>,1}], emqx_bridge_worker:get_subscriptions(Name)),
|
||||
ok = emqx_bridge_worker:stop(Pid).
|
||||
|
||||
%% A loopback RPC to local node
|
||||
t_rpc(_Config) ->
|
||||
Name = "rpc",
|
||||
Cfg = #{
|
||||
name => Name,
|
||||
forwards => [<<"t_rpc/#">>],
|
||||
forward_mountpoint => <<"forwarded">>,
|
||||
start_type => auto,
|
||||
config => #{
|
||||
conn_type => rpc,
|
||||
node => node()
|
||||
}
|
||||
},
|
||||
{ok, Pid} = emqx_bridge_mqtt_sup:create_bridge(Cfg),
|
||||
{ok, ConnPid} = emqtt:start_link([{clientid, <<"ClientId">>}]),
|
||||
{ok, _Props} = emqtt:connect(ConnPid),
|
||||
{ok, _Props, [1]} = emqtt:subscribe(ConnPid, {<<"forwarded/t_rpc/one">>, ?QOS_1}),
|
||||
timer:sleep(100),
|
||||
{ok, _PacketId} = emqtt:publish(ConnPid, <<"t_rpc/one">>, <<"hello">>, ?QOS_1),
|
||||
timer:sleep(100),
|
||||
?assertEqual(1, length(receive_messages(1))),
|
||||
emqtt:disconnect(ConnPid),
|
||||
emqx_bridge_worker:stop(Pid).
|
||||
|
||||
%% Full data loopback flow explained:
|
||||
%% mqtt-client ----> local-broker ---(local-subscription)--->
|
||||
%% bridge(export) --- (mqtt-connection)--> local-broker ---(remote-subscription) -->
|
||||
%% bridge(import) --> mqtt-client
|
||||
t_mqtt(_Config) ->
|
||||
SendToTopic = <<"t_mqtt/one">>,
|
||||
SendToTopic2 = <<"t_mqtt/two">>,
|
||||
SendToTopic3 = <<"t_mqtt/three">>,
|
||||
Mountpoint = <<"forwarded/${node}/">>,
|
||||
Name = "mqtt",
|
||||
Cfg = #{
|
||||
name => Name,
|
||||
forwards => [SendToTopic],
|
||||
forward_mountpoint => Mountpoint,
|
||||
start_type => auto,
|
||||
config => #{
|
||||
address => "127.0.0.1:1883",
|
||||
conn_type => mqtt,
|
||||
clientid => <<"client1">>,
|
||||
keepalive => 300,
|
||||
subscriptions => [#{topic => SendToTopic2, qos => 1}],
|
||||
receive_mountpoint => <<"receive/aws/">>
|
||||
},
|
||||
queue => #{
|
||||
replayq_dir => "data/t_mqtt/",
|
||||
replayq_seg_bytes => 10000,
|
||||
batch_bytes_limit => 1000,
|
||||
batch_count_limit => 10
|
||||
}
|
||||
},
|
||||
{ok, Pid} = emqx_bridge_mqtt_sup:create_bridge(Cfg),
|
||||
?assertEqual([{SendToTopic2, 1}], emqx_bridge_worker:get_subscriptions(Name)),
|
||||
ok = emqx_bridge_worker:ensure_subscription_present(Name, SendToTopic3, _QoS = 1),
|
||||
?assertEqual([{SendToTopic3, 1},{SendToTopic2, 1}],
|
||||
emqx_bridge_worker:get_subscriptions(Name)),
|
||||
{ok, ConnPid} = emqtt:start_link([{clientid, <<"client-1">>}]),
|
||||
{ok, _Props} = emqtt:connect(ConnPid),
|
||||
emqtt:subscribe(ConnPid, <<"forwarded/+/t_mqtt/one">>, 1),
|
||||
%% message from a different client, to avoid getting terminated by no-local
|
||||
Max = 10,
|
||||
Msgs = lists:seq(1, Max),
|
||||
lists:foreach(fun(I) ->
|
||||
{ok, _PacketId} = emqtt:publish(ConnPid, SendToTopic, integer_to_binary(I), ?QOS_1)
|
||||
end, Msgs),
|
||||
?assertEqual(10, length(receive_messages(200))),
|
||||
|
||||
emqtt:subscribe(ConnPid, <<"receive/aws/t_mqtt/two">>, 1),
|
||||
%% message from a different client, to avoid getting terminated by no-local
|
||||
Max = 10,
|
||||
Msgs = lists:seq(1, Max),
|
||||
lists:foreach(fun(I) ->
|
||||
{ok, _PacketId} = emqtt:publish(ConnPid, SendToTopic2, integer_to_binary(I), ?QOS_1)
|
||||
end, Msgs),
|
||||
?assertEqual(10, length(receive_messages(200))),
|
||||
|
||||
emqtt:disconnect(ConnPid),
|
||||
ok = emqx_bridge_worker:stop(Pid).
|
||||
|
||||
t_stub_normal(Config) when is_list(Config) ->
|
||||
Name = "stub_normal",
|
||||
Cfg = #{
|
||||
name => Name,
|
||||
forwards => [<<"t_stub_normal/#">>],
|
||||
forward_mountpoint => <<"forwarded">>,
|
||||
start_type => auto,
|
||||
config => #{
|
||||
conn_type => emqx_bridge_stub_conn,
|
||||
client_pid => self()
|
||||
}
|
||||
},
|
||||
{ok, Pid} = emqx_bridge_mqtt_sup:create_bridge(Cfg),
|
||||
receive
|
||||
{Pid, emqx_bridge_stub_conn, ready} -> ok
|
||||
after
|
||||
5000 ->
|
||||
error(timeout)
|
||||
end,
|
||||
{ok, ConnPid} = emqtt:start_link([{clientid, <<"ClientId">>}]),
|
||||
{ok, _} = emqtt:connect(ConnPid),
|
||||
{ok, _PacketId} = emqtt:publish(ConnPid, <<"t_stub_normal/one">>, <<"hello">>, ?QOS_1),
|
||||
receive
|
||||
{stub_message, WorkerPid, BatchRef, _Batch} ->
|
||||
WorkerPid ! {batch_ack, BatchRef},
|
||||
ok
|
||||
after
|
||||
5000 ->
|
||||
error(timeout)
|
||||
end,
|
||||
?SNK_WAIT(inflight_drained),
|
||||
?SNK_WAIT(replayq_drained),
|
||||
emqtt:disconnect(ConnPid),
|
||||
ok = emqx_bridge_worker:stop(Pid).
|
||||
|
||||
t_stub_overflow(_Config) ->
|
||||
Topic = <<"t_stub_overflow/one">>,
|
||||
MaxInflight = 20,
|
||||
Name = "stub_overflow",
|
||||
Cfg = #{
|
||||
name => Name,
|
||||
forwards => [<<"t_stub_overflow/one">>],
|
||||
forward_mountpoint => <<"forwarded">>,
|
||||
start_type => auto,
|
||||
max_inflight => MaxInflight,
|
||||
config => #{
|
||||
conn_type => emqx_bridge_stub_conn,
|
||||
client_pid => self()
|
||||
}
|
||||
},
|
||||
{ok, Worker} = emqx_bridge_mqtt_sup:create_bridge(Cfg),
|
||||
{ok, ConnPid} = emqtt:start_link([{clientid, <<"ClientId">>}]),
|
||||
{ok, _} = emqtt:connect(ConnPid),
|
||||
lists:foreach(
|
||||
fun(I) ->
|
||||
Data = integer_to_binary(I),
|
||||
_ = emqtt:publish(ConnPid, Topic, Data, ?QOS_1)
|
||||
end, lists:seq(1, MaxInflight * 2)),
|
||||
?SNK_WAIT(inflight_full),
|
||||
Acks = stub_receive(MaxInflight),
|
||||
lists:foreach(fun({Pid, Ref}) -> Pid ! {batch_ack, Ref} end, Acks),
|
||||
Acks2 = stub_receive(MaxInflight),
|
||||
lists:foreach(fun({Pid, Ref}) -> Pid ! {batch_ack, Ref} end, Acks2),
|
||||
?SNK_WAIT(inflight_drained),
|
||||
?SNK_WAIT(replayq_drained),
|
||||
emqtt:disconnect(ConnPid),
|
||||
ok = emqx_bridge_worker:stop(Worker).
|
||||
|
||||
t_stub_random_order(_Config) ->
|
||||
Topic = <<"t_stub_random_order/a">>,
|
||||
MaxInflight = 10,
|
||||
Name = "stub_random_order",
|
||||
Cfg = #{
|
||||
name => Name,
|
||||
forwards => [Topic],
|
||||
forward_mountpoint => <<"forwarded">>,
|
||||
start_type => auto,
|
||||
max_inflight => MaxInflight,
|
||||
config => #{
|
||||
conn_type => emqx_bridge_stub_conn,
|
||||
client_pid => self()
|
||||
}
|
||||
},
|
||||
{ok, Worker} = emqx_bridge_mqtt_sup:create_bridge(Cfg),
|
||||
ClientId = <<"ClientId">>,
|
||||
{ok, ConnPid} = emqtt:start_link([{clientid, ClientId}]),
|
||||
{ok, _} = emqtt:connect(ConnPid),
|
||||
lists:foreach(
|
||||
fun(I) ->
|
||||
Data = integer_to_binary(I),
|
||||
_ = emqtt:publish(ConnPid, Topic, Data, ?QOS_1)
|
||||
end, lists:seq(1, MaxInflight)),
|
||||
Acks = stub_receive(MaxInflight),
|
||||
lists:foreach(fun({Pid, Ref}) -> Pid ! {batch_ack, Ref} end,
|
||||
lists:reverse(Acks)),
|
||||
?SNK_WAIT(inflight_drained),
|
||||
?SNK_WAIT(replayq_drained),
|
||||
emqtt:disconnect(ConnPid),
|
||||
ok = emqx_bridge_worker:stop(Worker).
|
||||
|
||||
t_stub_retry_inflight(_Config) ->
|
||||
Topic = <<"to_stub_retry_inflight/a">>,
|
||||
MaxInflight = 10,
|
||||
Name = "stub_retry_inflight",
|
||||
Cfg = #{
|
||||
name => Name,
|
||||
forwards => [Topic],
|
||||
forward_mountpoint => <<"forwarded">>,
|
||||
reconnect_interval => 10,
|
||||
start_type => auto,
|
||||
max_inflight => MaxInflight,
|
||||
config => #{
|
||||
conn_type => emqx_bridge_stub_conn,
|
||||
client_pid => self()
|
||||
}
|
||||
},
|
||||
{ok, Worker} = emqx_bridge_mqtt_sup:create_bridge(Cfg),
|
||||
ClientId = <<"ClientId2">>,
|
||||
case ?block_until(#{?snk_kind := connected, inflight := 0}, 2000, 1000) of
|
||||
{ok, #{inflight := 0}} -> ok;
|
||||
Other -> ct:fail("~p", [Other])
|
||||
end,
|
||||
{ok, ConnPid} = emqtt:start_link([{clientid, ClientId}]),
|
||||
{ok, _} = emqtt:connect(ConnPid),
|
||||
lists:foreach(
|
||||
fun(I) ->
|
||||
Data = integer_to_binary(I),
|
||||
_ = emqtt:publish(ConnPid, Topic, Data, ?QOS_1)
|
||||
end, lists:seq(1, MaxInflight)),
|
||||
%% receive acks but do not ack
|
||||
Acks1 = stub_receive(MaxInflight),
|
||||
?assertEqual(MaxInflight, length(Acks1)),
|
||||
%% simulate a disconnect
|
||||
Worker ! {disconnected, self(), test},
|
||||
?SNK_WAIT(disconnected),
|
||||
case ?block_until(#{?snk_kind := connected, inflight := MaxInflight}, 2000, 20) of
|
||||
{ok, _} -> ok;
|
||||
Error -> ct:fail("~p", [Error])
|
||||
end,
|
||||
%% expect worker to retry inflight, so to receive acks again
|
||||
Acks2 = stub_receive(MaxInflight),
|
||||
?assertEqual(MaxInflight, length(Acks2)),
|
||||
lists:foreach(fun({Pid, Ref}) -> Pid ! {batch_ack, Ref} end,
|
||||
lists:reverse(Acks2)),
|
||||
?SNK_WAIT(inflight_drained),
|
||||
?SNK_WAIT(replayq_drained),
|
||||
emqtt:disconnect(ConnPid),
|
||||
ok = emqx_bridge_worker:stop(Worker).
|
||||
|
||||
stub_receive(N) ->
|
||||
stub_receive(N, []).
|
||||
|
||||
stub_receive(0, Acc) -> lists:reverse(Acc);
|
||||
stub_receive(N, Acc) ->
|
||||
receive
|
||||
{stub_message, WorkerPid, BatchRef, _Batch} ->
|
||||
stub_receive(N - 1, [{WorkerPid, BatchRef} | Acc])
|
||||
after
|
||||
5000 ->
|
||||
lists:reverse(Acc)
|
||||
end.
|
|
@ -17,7 +17,8 @@
|
|||
%% By accident, We have always been using the upstream fork due to
|
||||
%% eredis_cluster's dependency getting resolved earlier.
|
||||
%% Here we pin 1.5.2 to avoid surprises in the future.
|
||||
{poolboy, {git, "https://github.com/emqx/poolboy.git", {tag, "1.5.2"}}}
|
||||
{poolboy, {git, "https://github.com/emqx/poolboy.git", {tag, "1.5.2"}}},
|
||||
{emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.4.3"}}}
|
||||
]}.
|
||||
|
||||
{shell, [
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue