Merge branch 'master' into fix/authn2

This commit is contained in:
tigercl 2021-09-28 09:27:08 +08:00 committed by GitHub
commit 79685a77ba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
133 changed files with 5007 additions and 7588 deletions

1
.gitattributes vendored
View File

@ -1,5 +1,6 @@
* text=auto
*.* text eol=lf
*.cmd text eol=crlf
*.jpg -text
*.png -text
*.pdf -text

View File

@ -140,7 +140,6 @@ jobs:
path: source/_packages/${{ matrix.profile }}/.
mac:
runs-on: macos-10.15
needs: prepare
@ -148,11 +147,16 @@ jobs:
fail-fast: false
matrix:
profile: ${{fromJSON(needs.prepare.outputs.profiles)}}
macos:
- macos-11
- macos-10.15
otp:
- 24.0.5-emqx-1
exclude:
- profile: emqx-edge
runs-on: ${{ matrix.macos }}
steps:
- uses: actions/download-artifact@v2
with:
@ -170,16 +174,12 @@ jobs:
id: cache
with:
path: ~/.kerl
key: erl${{ matrix.otp }}-macos10.15
key: otp-${{ matrix.otp }}-${{ matrix.macos }}
- name: build erlang
if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60
env:
KERL_BUILD_BACKEND: git
OTP_GITHUB_URL: https://github.com/emqx/otp
run: |
kerl update releases
kerl build ${{ matrix.otp }}
kerl build git https://github.com/emqx/otp.git OTP-${{ matrix.otp }} ${{ matrix.otp }}
kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build
working-directory: source
@ -191,8 +191,8 @@ jobs:
- name: test
working-directory: source
run: |
pkg_name=$(basename _packages/${{ matrix.profile }}/${{ matrix.profile }}-*.zip)
unzip -q _packages/${{ matrix.profile }}/$pkg_name
pkg_name=$(find _packages/${{ matrix.profile }} -mindepth 1 -maxdepth 1 -iname \*.zip | head)
unzip -q $pkg_name
# gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no'
@ -211,7 +211,7 @@ jobs:
./emqx/bin/emqx_ctl status
./emqx/bin/emqx stop
rm -rf emqx
openssl dgst -sha256 ./_packages/${{ matrix.profile }}/$pkg_name | awk '{print $2}' > ./_packages/${{ matrix.profile }}/$pkg_name.sha256
openssl dgst -sha256 $pkg_name | awk '{print $2}' > $pkg_name.sha256
- uses: actions/upload-artifact@v1
if: startsWith(github.ref, 'refs/tags/')
with:

View File

@ -13,6 +13,7 @@ jobs:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
otp:
- 24.0.5-emqx-1
@ -53,13 +54,18 @@ jobs:
path: _packages/**/*.zip
mac:
runs-on: macos-10.15
strategy:
fail-fast: false
matrix:
macos:
- macos-11
- macos-10.15
otp:
- 24.0.5-emqx-1
runs-on: ${{ matrix.macos }}
steps:
- uses: actions/checkout@v1
- name: prepare
@ -82,16 +88,12 @@ jobs:
id: cache
with:
path: ~/.kerl
key: erl${{ matrix.otp }}-macos10.15
key: otp-${{ matrix.otp }}-${{ matrix.macos }}
- name: build erlang
if: steps.cache.outputs.cache-hit != 'true'
timeout-minutes: 60
env:
KERL_BUILD_BACKEND: git
OTP_GITHUB_URL: https://github.com/emqx/otp
run: |
kerl update releases
kerl build ${{ matrix.otp }}
kerl build git https://github.com/emqx/otp.git OTP-${{ matrix.otp }} ${{ matrix.otp }}
kerl install ${{ matrix.otp }} $HOME/.kerl/${{ matrix.otp }}
- name: build
run: |
@ -106,8 +108,7 @@ jobs:
path: ./rebar3.crashdump
- name: test
run: |
pkg_name=$(basename _packages/${EMQX_NAME}/emqx-*.zip)
unzip -q _packages/${EMQX_NAME}/$pkg_name
unzip -q $(find _packages/${EMQX_NAME} -mindepth 1 -maxdepth 1 -iname \*.zip | head)
# gsed -i '/emqx_telemetry/d' ./emqx/data/loaded_plugins
./emqx/bin/emqx start || cat emqx/log/erlang.log.1
ready='no'

View File

@ -45,6 +45,13 @@ jobs:
- api_login
- api_banned
- api_alarms
- api_nodes
- api_topic_metrics
- api_retainer
- api_auto_subscribe
- api_delayed_publish
- api_topic_rewrite
- api_event_message
steps:
- uses: actions/checkout@v2
with:
@ -74,7 +81,7 @@ jobs:
cd /tmp && tar -xvf apache-jmeter.tgz
echo "jmeter.save.saveservice.output_format=xml" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
echo "jmeter.save.saveservice.response_data.on_error=true" >> /tmp/apache-jmeter-$JMETER_VERSION/user.properties
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-2.0.2-jar-with-dependencies.jar
wget --no-verbose -O /tmp/apache-jmeter-$JMETER_VERSION/lib/ext/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar https://raw.githubusercontent.com/xmeter-net/mqtt-jmeter/master/Download/v2.0.2/mqtt-xmeter-fuse-2.0.2-jar-with-dependencies.jar
ln -s /tmp/apache-jmeter-$JMETER_VERSION /opt/jmeter
- name: run ${{ matrix.script_name }}
run: |

2
.gitignore vendored
View File

@ -50,3 +50,5 @@ _upgrade_base/
TAGS
erlang_ls.config
.els_cache/
.vs/
.vscode/

View File

@ -5,7 +5,7 @@ BUILD = $(CURDIR)/build
SCRIPTS = $(CURDIR)/scripts
export PKG_VSN ?= $(shell $(CURDIR)/pkg-vsn.sh)
export EMQX_DESC ?= EMQ X
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.14
export EMQX_DASHBOARD_VERSION ?= v5.0.0-beta.15
ifeq ($(OS),Windows_NT)
export REBAR_COLOR=none
endif

View File

@ -14,7 +14,7 @@ English | [简体中文](./README-CN.md) | [日本語](./README-JP.md) | [рус
*EMQ X* broker is a fully open source, highly scalable, highly available distributed MQTT messaging broker for IoT, M2M and Mobile applications that can handle tens of millions of concurrent clients.
Starting from 3.0 release, *EMQ X* broker fully supports MQTT V5.0 protocol specifications and backward compatible with MQTT V3.1 and V3.1.1, as well as other communication protocols such as MQTT-SN, CoAP, LwM2M, WebSocket and STOMP. The 3.0 release of the *EMQ X* broker can scaled to 10+ million concurrent MQTT connections on one cluster.
Starting from 3.0 release, *EMQ X* broker fully supports MQTT V5.0 protocol specifications and backward compatible with MQTT V3.1 and V3.1.1, as well as other communication protocols such as MQTT-SN, CoAP, LwM2M, WebSocket and STOMP. The 3.0 release of the *EMQ X* broker can scale to 10+ million concurrent MQTT connections on one cluster.
- For full list of new features, please read [EMQ X Release Notes](https://github.com/emqx/emqx/releases).
- For more information, please visit [EMQ X homepage](https://www.emqx.io/).

View File

@ -198,7 +198,7 @@ listeners.ssl.default {
ssl.certfile = "{{ platform_etc_dir }}/certs/cert.pem"
ssl.cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
ssl.versions = ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
# ssl.versions = ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
# TLS 1.3: "TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256,TLS_CHACHA20_POLY1305_SHA256,TLS_AES_128_CCM_SHA256,TLS_AES_128_CCM_8_SHA256"
# TLS 1-1.2 "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES256-SHA384,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-DES-CBC3-SHA,ECDH-ECDSA-AES256-GCM-SHA384,ECDH-RSA-AES256-GCM-SHA384,ECDH-ECDSA-AES256-SHA384,ECDH-RSA-AES256-SHA384,DHE-DSS-AES256-GCM-SHA384,DHE-DSS-AES256-SHA256,AES256-GCM-SHA384,AES256-SHA256,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256,ECDHE-ECDSA-AES128-SHA256,ECDHE-RSA-AES128-SHA256,ECDH-ECDSA-AES128-GCM-SHA256,ECDH-RSA-AES128-GCM-SHA256,ECDH-ECDSA-AES128-SHA256,ECDH-RSA-AES128-SHA256,DHE-DSS-AES128-GCM-SHA256,DHE-DSS-AES128-SHA256,AES128-GCM-SHA256,AES128-SHA256,ECDHE-ECDSA-AES256-SHA,ECDHE-RSA-AES256-SHA,DHE-DSS-AES256-SHA,ECDH-ECDSA-AES256-SHA,ECDH-RSA-AES256-SHA,AES256-SHA,ECDHE-ECDSA-AES128-SHA,ECDHE-RSA-AES128-SHA,DHE-DSS-AES128-SHA,ECDH-ECDSA-AES128-SHA,ECDH-RSA-AES128-SHA,AES128-SHA"
# PSK: "PSK-AES128-CBC-SHA,PSK-AES256-CBC-SHA,PSK-3DES-EDE-CBC-SHA,PSK-RC4-SHA"
@ -1350,7 +1350,7 @@ example_common_ssl_options {
## Default: true
ssl.honor_cipher_order = true
ssl.versions = ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
# ssl.versions = ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
# TLS 1.3: "TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256,TLS_CHACHA20_POLY1305_SHA256,TLS_AES_128_CCM_SHA256,TLS_AES_128_CCM_8_SHA256"
# TLS 1-1.2 "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES256-SHA384,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-DES-CBC3-SHA,ECDH-ECDSA-AES256-GCM-SHA384,ECDH-RSA-AES256-GCM-SHA384,ECDH-ECDSA-AES256-SHA384,ECDH-RSA-AES256-SHA384,DHE-DSS-AES256-GCM-SHA384,DHE-DSS-AES256-SHA256,AES256-GCM-SHA384,AES256-SHA256,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256,ECDHE-ECDSA-AES128-SHA256,ECDHE-RSA-AES128-SHA256,ECDH-ECDSA-AES128-GCM-SHA256,ECDH-RSA-AES128-GCM-SHA256,ECDH-ECDSA-AES128-SHA256,ECDH-RSA-AES128-SHA256,DHE-DSS-AES128-GCM-SHA256,DHE-DSS-AES128-SHA256,AES128-GCM-SHA256,AES128-SHA256,ECDHE-ECDSA-AES256-SHA,ECDHE-RSA-AES256-SHA,DHE-DSS-AES256-SHA,ECDH-ECDSA-AES256-SHA,ECDH-RSA-AES256-SHA,AES256-SHA,ECDHE-ECDSA-AES128-SHA,ECDHE-RSA-AES128-SHA,DHE-DSS-AES128-SHA,ECDH-ECDSA-AES128-SHA,ECDH-RSA-AES128-SHA,AES128-SHA"
# PSK: "PSK-AES128-CBC-SHA,PSK-AES256-CBC-SHA,PSK-3DES-EDE-CBC-SHA,PSK-RC4-SHA"

View File

@ -13,7 +13,7 @@
, {typerefl, {git, "https://github.com/k32/typerefl", {tag, "0.8.5"}}}
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.8.3"}}}
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.2"}}}
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.8.3"}}}
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.10.8"}}}
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.5.1"}}}
, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.19.5"}}}

View File

@ -77,7 +77,7 @@ stop() ->
{ok, emqx_config:update_result()} | {error, emqx_config:update_error()}.
update_config(SchemaModule, ConfKeyPath, UpdateArgs) ->
?ATOM_CONF_PATH(ConfKeyPath, gen_server:call(?MODULE, {change_config, SchemaModule,
AtomKeyPath, UpdateArgs}), {error, ConfKeyPath}).
AtomKeyPath, UpdateArgs}), {error, {not_found, ConfKeyPath}}).
-spec add_handler(emqx_config:config_key_path(), handler_name()) -> ok.
add_handler(ConfKeyPath, HandlerName) ->

View File

@ -67,7 +67,7 @@
%% - The execution order is the adding order of callbacks if they have
%% equal priority values.
-type(hookpoint() :: atom()).
-type(hookpoint() :: atom() | binary()).
-type(action() :: {module(), atom(), [term()] | undefined}).
-type(filter() :: {module(), atom(), [term()] | undefined}).
@ -158,12 +158,12 @@ del(HookPoint, Action) ->
gen_server:cast(?SERVER, {del, HookPoint, Action}).
%% @doc Run hooks.
-spec(run(atom(), list(Arg::term())) -> ok).
-spec(run(hookpoint(), list(Arg::term())) -> ok).
run(HookPoint, Args) ->
do_run(lookup(HookPoint), Args).
%% @doc Run hooks with Accumulator.
-spec(run_fold(atom(), list(Arg::term()), Acc::term()) -> Acc::term()).
-spec(run_fold(hookpoint(), list(Arg::term()), Acc::term()) -> Acc::term()).
run_fold(HookPoint, Args, Acc) ->
do_run_fold(lookup(HookPoint), Args, Acc).

View File

@ -1,66 +0,0 @@
-module(emqx_rule_actions_trans).
-include_lib("syntax_tools/include/merl.hrl").
-export([parse_transform/2]).
parse_transform(Forms, _Options) ->
trans(Forms, []).
trans([], ResAST) ->
lists:reverse(ResAST);
trans([{eof, L} | AST], ResAST) ->
lists:reverse([{eof, L} | ResAST]) ++ AST;
trans([{function, LineNo, FuncName, Arity, Clauses} | AST], ResAST) ->
NewClauses = trans_func_clauses(atom_to_list(FuncName), Clauses),
trans(AST, [{function, LineNo, FuncName, Arity, NewClauses} | ResAST]);
trans([Form | AST], ResAST) ->
trans(AST, [Form | ResAST]).
trans_func_clauses("on_action_create_" ++ _ = _FuncName , Clauses) ->
NewClauses = [
begin
Bindings = lists:flatten(get_vars(Args) ++ get_vars(Body, lefth)),
Body2 = append_to_result(Bindings, Body),
{clause, LineNo, Args, Guards, Body2}
end || {clause, LineNo, Args, Guards, Body} <- Clauses],
NewClauses;
trans_func_clauses(_FuncName, Clauses) ->
Clauses.
get_vars(Exprs) ->
get_vars(Exprs, all).
get_vars(Exprs, Type) ->
do_get_vars(Exprs, [], Type).
do_get_vars([], Vars, _Type) -> Vars;
do_get_vars([Line | Expr], Vars, all) ->
do_get_vars(Expr, [syntax_vars(erl_syntax:form_list([Line])) | Vars], all);
do_get_vars([Line | Expr], Vars, lefth) ->
do_get_vars(Expr,
case (Line) of
?Q("_@LeftV = _@@_") -> Vars ++ syntax_vars(LeftV);
_ -> Vars
end, lefth).
syntax_vars(Line) ->
sets:to_list(erl_syntax_lib:variables(Line)).
%% append bindings to the return value as the first tuple element.
%% e.g. if the original result is R, then the new result will be {[binding()], R}.
append_to_result(Bindings, Exprs) ->
erl_syntax:revert_forms(do_append_to_result(to_keyword(Bindings), Exprs, [])).
do_append_to_result(KeyWordVars, [Line], Res) ->
case Line of
?Q("_@LeftV = _@RightV") ->
lists:reverse([?Q("{[_@KeyWordVars], _@LeftV}"), Line | Res]);
_ ->
lists:reverse([?Q("{[_@KeyWordVars], _@Line}") | Res])
end;
do_append_to_result(KeyWordVars, [Line | Exprs], Res) ->
do_append_to_result(KeyWordVars, Exprs, [Line | Res]).
to_keyword(Vars) ->
[erl_syntax:tuple([erl_syntax:atom(Var), merl:var(Var)])
|| Var <- Vars].

View File

@ -55,7 +55,7 @@
% workaround: prevent being recognized as unused functions
-export([to_duration/1, to_duration_s/1, to_duration_ms/1,
to_bytesize/1, to_wordsize/1,
mk_duration/2, to_bytesize/1, to_wordsize/1,
to_percent/1, to_comma_separated_list/1,
to_bar_separated_list/1, to_ip_port/1,
to_erl_cipher_suite/1,
@ -159,11 +159,11 @@ fields("stats") ->
fields("authorization") ->
[ {"no_match",
sc(hoconsc:union([allow, deny]),
sc(hoconsc:enum([allow, deny]),
#{ default => allow
})}
, {"deny_action",
sc(hoconsc:union([ignore, disconnect]),
sc(hoconsc:enum([ignore, disconnect]),
#{ default => ignore
})}
, {"cache",
@ -297,7 +297,7 @@ fields("mqtt") ->
})
}
, {"mqueue_default_priority",
sc(union(highest, lowest),
sc(hoconsc:enum([highest, lowest]),
#{ default => lowest
})
}
@ -312,11 +312,11 @@ fields("mqtt") ->
})
}
, {"peer_cert_as_username",
sc(hoconsc:union([disabled, cn, dn, crt, pem, md5]),
sc(hoconsc:enum([disabled, cn, dn, crt, pem, md5]),
#{ default => disabled
})}
, {"peer_cert_as_clientid",
sc(hoconsc:union([disabled, cn, dn, crt, pem, md5]),
sc(hoconsc:enum([disabled, cn, dn, crt, pem, md5]),
#{ default => disabled
})}
];
@ -525,7 +525,7 @@ fields("ws_opts") ->
})
}
, {"mqtt_piggyback",
sc(hoconsc:union([single, multiple]),
sc(hoconsc:enum([single, multiple]),
#{ default => multiple
})
}
@ -637,23 +637,23 @@ fields("listener_ssl_opts") ->
server_ssl_opts_schema(
#{ depth => 10
, reuse_sessions => true
, versions => tcp
, ciphers => tcp_all
, versions => tls_all_available
, ciphers => tls_all_available
}, false);
fields("listener_wss_opts") ->
server_ssl_opts_schema(
#{ depth => 10
, reuse_sessions => true
, versions => tcp
, ciphers => tcp_all
, versions => tls_all_available
, ciphers => tls_all_available
}, true);
fields(ssl_client_opts) ->
client_ssl_opts_schema(#{});
fields("deflate_opts") ->
[ {"level",
sc(hoconsc:union([none, default, best_compression, best_speed]),
sc(hoconsc:enum([none, default, best_compression, best_speed]),
#{})
}
, {"mem_level",
@ -662,15 +662,15 @@ fields("deflate_opts") ->
})
}
, {"strategy",
sc(hoconsc:union([default, filtered, huffman_only, rle]),
sc(hoconsc:enum([default, filtered, huffman_only, rle]),
#{})
}
, {"server_context_takeover",
sc(hoconsc:union([takeover, no_takeover]),
sc(hoconsc:enum([takeover, no_takeover]),
#{})
}
, {"client_context_takeover",
sc(hoconsc:union([takeover, no_takeover]),
sc(hoconsc:enum([takeover, no_takeover]),
#{})
}
, {"server_max_window_bits",
@ -709,12 +709,12 @@ fields("broker") ->
})
}
, {"session_locking_strategy",
sc(hoconsc:union([local, leader, quorum, all]),
sc(hoconsc:enum([local, leader, quorum, all]),
#{ default => quorum
})
}
, {"shared_subscription_strategy",
sc(hoconsc:union([random, round_robin]),
sc(hoconsc:enum([random, round_robin]),
#{ default => round_robin
})
}
@ -736,7 +736,7 @@ fields("broker") ->
fields("broker_perf") ->
[ {"route_lock_type",
sc(hoconsc:union([key, tab, global]),
sc(hoconsc:enum([key, tab, global]),
#{ default => key
})}
, {"trie_compaction",
@ -962,7 +962,7 @@ the file if it is to be added.
})
}
, {"verify",
sc(hoconsc:union([verify_peer, verify_none]),
sc(hoconsc:enum([verify_peer, verify_none]),
#{ default => Df("verify", verify_none)
})
}
@ -987,13 +987,14 @@ keyfile is password-protected."""
}
, {"versions",
sc(hoconsc:array(typerefl:atom()),
#{ default => default_tls_vsns(maps:get(versions, Defaults, tcp))
#{ default => default_tls_vsns(maps:get(versions, Defaults, tls_all_available))
, desc =>
"""All TLS/DTLS versions to be supported.<br>
NOTE: PSK ciphers are suppresed by 'tlsv1.3' version config<br>
In case PSK cipher suites are intended, make sure to configured
<code>['tlsv1.2', 'tlsv1.1']</code> here.
"""
, validator => fun validate_tls_versions/1
})
}
, {"ciphers", ciphers_schema(D("ciphers"))}
@ -1086,7 +1087,7 @@ client_ssl_opts_schema(Defaults) ->
, desc =>
"""Specify the host name to be used in TLS Server Name Indication extension.<br>
For instance, when connecting to \"server.example.net\", the genuine server
which accedpts the connection and performs TSL handshake may differ from the
which accedpts the connection and performs TLS handshake may differ from the
host the TLS client initially connects to, e.g. when connecting to an IP address
or when the host has multiple resolvable DNS records <br>
If not specified, it will default to the host name string which is used
@ -1099,12 +1100,12 @@ verification check."""
].
default_tls_vsns(dtls) ->
[<<"dtlsv1.2">>, <<"dtlsv1">>];
default_tls_vsns(tcp) ->
[<<"tlsv1.3">>, <<"tlsv1.2">>, <<"tlsv1.1">>, <<"tlsv1">>].
default_tls_vsns(dtls_all_available) ->
proplists:get_value(available_dtls, ssl:versions());
default_tls_vsns(tls_all_available) ->
emqx_tls_lib:default_versions().
-spec ciphers_schema(quic | dtls | tcp_all | undefined) -> hocon_schema:field_schema().
-spec ciphers_schema(quic | dtls_all_available | tls_all_available | undefined) -> hocon_schema:field_schema().
ciphers_schema(Default) ->
sc(hoconsc:array(string()),
#{ default => default_ciphers(Default)
@ -1113,7 +1114,10 @@ ciphers_schema(Default) ->
(Ciphers) when is_list(Ciphers) ->
Ciphers
end
, validator => fun validate_ciphers/1
, validator => case Default =:= quic of
true -> undefined; %% quic has openssl statically linked
false -> fun validate_ciphers/1
end
, desc =>
"""TLS cipher suite names separated by comma, or as an array of strings
<code>\"TLS_AES_256_GCM_SHA384,TLS_AES_128_GCM_SHA256\"</code> or
@ -1146,52 +1150,24 @@ RSA-PSK-DES-CBC3-SHA,RSA-PSK-RC4-SHA\"</code><br>
end}).
default_ciphers(undefined) ->
default_ciphers(tcp_all);
default_ciphers(tls_all_available);
default_ciphers(quic) -> [
"TLS_AES_256_GCM_SHA384",
"TLS_AES_128_GCM_SHA256",
"TLS_CHACHA20_POLY1305_SHA256"
];
default_ciphers(tcp_all) ->
default_ciphers('tlsv1.3') ++
default_ciphers('tlsv1.2') ++
default_ciphers(psk);
default_ciphers(dtls) ->
default_ciphers(dtls_all_available) ->
%% as of now, dtls does not support tlsv1.3 ciphers
default_ciphers('tlsv1.2') ++ default_ciphers('psk');
default_ciphers('tlsv1.3') ->
["TLS_AES_256_GCM_SHA384", "TLS_AES_128_GCM_SHA256",
"TLS_CHACHA20_POLY1305_SHA256", "TLS_AES_128_CCM_SHA256",
"TLS_AES_128_CCM_8_SHA256"]
++ default_ciphers('tlsv1.2');
default_ciphers('tlsv1.2') -> [
"ECDHE-ECDSA-AES256-GCM-SHA384",
"ECDHE-RSA-AES256-GCM-SHA384", "ECDHE-ECDSA-AES256-SHA384", "ECDHE-RSA-AES256-SHA384",
"ECDHE-ECDSA-DES-CBC3-SHA", "ECDH-ECDSA-AES256-GCM-SHA384", "ECDH-RSA-AES256-GCM-SHA384",
"ECDH-ECDSA-AES256-SHA384", "ECDH-RSA-AES256-SHA384", "DHE-DSS-AES256-GCM-SHA384",
"DHE-DSS-AES256-SHA256", "AES256-GCM-SHA384", "AES256-SHA256",
"ECDHE-ECDSA-AES128-GCM-SHA256", "ECDHE-RSA-AES128-GCM-SHA256",
"ECDHE-ECDSA-AES128-SHA256", "ECDHE-RSA-AES128-SHA256", "ECDH-ECDSA-AES128-GCM-SHA256",
"ECDH-RSA-AES128-GCM-SHA256", "ECDH-ECDSA-AES128-SHA256", "ECDH-RSA-AES128-SHA256",
"DHE-DSS-AES128-GCM-SHA256", "DHE-DSS-AES128-SHA256", "AES128-GCM-SHA256", "AES128-SHA256",
"ECDHE-ECDSA-AES256-SHA", "ECDHE-RSA-AES256-SHA", "DHE-DSS-AES256-SHA",
"ECDH-ECDSA-AES256-SHA", "ECDH-RSA-AES256-SHA", "AES256-SHA", "ECDHE-ECDSA-AES128-SHA",
"ECDHE-RSA-AES128-SHA", "DHE-DSS-AES128-SHA", "ECDH-ECDSA-AES128-SHA",
"ECDH-RSA-AES128-SHA", "AES128-SHA"
];
default_ciphers(psk) ->
[ "RSA-PSK-AES256-GCM-SHA384","RSA-PSK-AES256-CBC-SHA384",
"RSA-PSK-AES128-GCM-SHA256","RSA-PSK-AES128-CBC-SHA256",
"RSA-PSK-AES256-CBC-SHA","RSA-PSK-AES128-CBC-SHA",
"RSA-PSK-DES-CBC3-SHA","RSA-PSK-RC4-SHA"
].
emqx_tls_lib:selected_ciphers(['dtlsv1.2', 'dtlsv1']);
default_ciphers(tls_all_available) ->
emqx_tls_lib:default_ciphers().
%% @private return a list of keys in a parent field
-spec(keys(string(), hocon:config()) -> [string()]).
keys(Parent, Conf) ->
[binary_to_list(B) || B <- maps:keys(conf_get(Parent, Conf, #{}))].
-spec ceiling(float()) -> integer().
-spec ceiling(number()) -> integer().
ceiling(X) ->
T = erlang:trunc(X),
case (X - T) of
@ -1210,6 +1186,15 @@ ref(Field) -> hoconsc:ref(?MODULE, Field).
ref(Module, Field) -> hoconsc:ref(Module, Field).
mk_duration(Desc, OverrideMeta) ->
DefaultMeta = #{desc => Desc ++ " Time span. A text string with number followed by time units:
`ms` for milli-seconds,
`s` for seconds,
`m` for minutes,
`h` for hours;
or combined representation like `1h5m0s`"},
hoconsc:mk(typerefl:alias("string", duration()), maps:merge(DefaultMeta, OverrideMeta)).
to_duration(Str) ->
case hocon_postprocess:duration(Str) of
I when is_integer(I) -> {ok, I};
@ -1218,13 +1203,15 @@ to_duration(Str) ->
to_duration_s(Str) ->
case hocon_postprocess:duration(Str) of
I when is_integer(I) -> {ok, ceiling(I / 1000)};
I when is_number(I) -> {ok, ceiling(I / 1000)};
_ -> {error, Str}
end.
-spec to_duration_ms(Input) -> {ok, integer()} | {error, Input}
when Input :: string() | binary().
to_duration_ms(Str) ->
case hocon_postprocess:duration(Str) of
I when is_integer(I) -> {ok, ceiling(I)};
I when is_number(I) -> {ok, ceiling(I)};
_ -> {error, Str}
end.
@ -1303,9 +1290,22 @@ parse_user_lookup_fun(StrConf) ->
{fun Mod:Fun/3, <<>>}.
validate_ciphers(Ciphers) ->
All = ssl:cipher_suites(all, 'tlsv1.3', openssl) ++
ssl:cipher_suites(all, 'tlsv1.2', openssl), %% includes older version ciphers
All = case is_tlsv13_available() of
true -> ssl:cipher_suites(all, 'tlsv1.3', openssl);
false -> []
end ++ ssl:cipher_suites(all, 'tlsv1.2', openssl),
case lists:filter(fun(Cipher) -> not lists:member(Cipher, All) end, Ciphers) of
[] -> ok;
Bad -> {error, {bad_ciphers, Bad}}
end.
validate_tls_versions(Versions) ->
AvailableVersions = proplists:get_value(available, ssl:versions()) ++
proplists:get_value(available_dtls, ssl:versions()),
case lists:filter(fun(V) -> not lists:member(V, AvailableVersions) end, Versions) of
[] -> ok;
Vs -> {error, {unsupported_ssl_versions, Vs}}
end.
is_tlsv13_available() ->
lists:member('tlsv1.3', proplists:get_value(available, ssl:versions())).

View File

@ -19,7 +19,7 @@
-export([ default_versions/0
, integral_versions/1
, default_ciphers/0
, default_ciphers/1
, selected_ciphers/1
, integral_ciphers/2
, drop_tls13_for_old_otp/1
]).
@ -31,9 +31,7 @@
%% @doc Returns the default supported tls versions.
-spec default_versions() -> [atom()].
default_versions() ->
OtpRelease = list_to_integer(erlang:system_info(otp_release)),
integral_versions(default_versions(OtpRelease)).
default_versions() -> available_versions().
%% @doc Validate a given list of desired tls versions.
%% raise an error exception if non of them are available.
@ -51,7 +49,7 @@ integral_versions(Desired) when ?IS_STRING(Desired) ->
integral_versions(Desired) when is_binary(Desired) ->
integral_versions(parse_versions(Desired));
integral_versions(Desired) ->
{_, Available} = lists:keyfind(available, 1, ssl:versions()),
Available = available_versions(),
case lists:filter(fun(V) -> lists:member(V, Available) end, Desired) of
[] -> erlang:error(#{ reason => no_available_tls_version
, desired => Desired
@ -61,27 +59,61 @@ integral_versions(Desired) ->
Filtered
end.
%% @doc Return a list of default (openssl string format) cipher suites.
-spec default_ciphers() -> [string()].
default_ciphers() -> default_ciphers(default_versions()).
%% @doc Return a list of (openssl string format) cipher suites.
-spec default_ciphers([ssl:tls_version()]) -> [string()].
default_ciphers(['tlsv1.3']) ->
-spec all_ciphers([ssl:tls_version()]) -> [string()].
all_ciphers(['tlsv1.3']) ->
%% When it's only tlsv1.3 wanted, use 'exclusive' here
%% because 'all' returns legacy cipher suites too,
%% which does not make sense since tlsv1.3 can not use
%% legacy cipher suites.
ssl:cipher_suites(exclusive, 'tlsv1.3', openssl);
default_ciphers(Versions) ->
all_ciphers(Versions) ->
%% assert non-empty
[_ | _] = dedup(lists:append([ssl:cipher_suites(all, V, openssl) || V <- Versions])).
%% @doc All Pre-selected TLS ciphers.
default_ciphers() ->
selected_ciphers(available_versions()).
%% @doc Pre-selected TLS ciphers for given versions..
selected_ciphers(Vsns) ->
All = all_ciphers(Vsns),
dedup(lists:filter(fun(Cipher) -> lists:member(Cipher, All) end,
lists:flatmap(fun do_selected_ciphers/1, Vsns))).
do_selected_ciphers('tlsv1.3') ->
case lists:member('tlsv1.3', proplists:get_value(available, ssl:versions())) of
true -> ssl:cipher_suites(exclusive, 'tlsv1.3', openssl);
false -> []
end ++ do_selected_ciphers('tlsv1.2');
do_selected_ciphers(_) ->
[ "ECDHE-ECDSA-AES256-GCM-SHA384",
"ECDHE-RSA-AES256-GCM-SHA384", "ECDHE-ECDSA-AES256-SHA384", "ECDHE-RSA-AES256-SHA384",
"ECDHE-ECDSA-DES-CBC3-SHA", "ECDH-ECDSA-AES256-GCM-SHA384", "ECDH-RSA-AES256-GCM-SHA384",
"ECDH-ECDSA-AES256-SHA384", "ECDH-RSA-AES256-SHA384", "DHE-DSS-AES256-GCM-SHA384",
"DHE-DSS-AES256-SHA256", "AES256-GCM-SHA384", "AES256-SHA256",
"ECDHE-ECDSA-AES128-GCM-SHA256", "ECDHE-RSA-AES128-GCM-SHA256",
"ECDHE-ECDSA-AES128-SHA256", "ECDHE-RSA-AES128-SHA256", "ECDH-ECDSA-AES128-GCM-SHA256",
"ECDH-RSA-AES128-GCM-SHA256", "ECDH-ECDSA-AES128-SHA256", "ECDH-RSA-AES128-SHA256",
"DHE-DSS-AES128-GCM-SHA256", "DHE-DSS-AES128-SHA256", "AES128-GCM-SHA256", "AES128-SHA256",
"ECDHE-ECDSA-AES256-SHA", "ECDHE-RSA-AES256-SHA", "DHE-DSS-AES256-SHA",
"ECDH-ECDSA-AES256-SHA", "ECDH-RSA-AES256-SHA", "AES256-SHA", "ECDHE-ECDSA-AES128-SHA",
"ECDHE-RSA-AES128-SHA", "DHE-DSS-AES128-SHA", "ECDH-ECDSA-AES128-SHA",
"ECDH-RSA-AES128-SHA", "AES128-SHA",
%% psk
"RSA-PSK-AES256-GCM-SHA384","RSA-PSK-AES256-CBC-SHA384",
"RSA-PSK-AES128-GCM-SHA256","RSA-PSK-AES128-CBC-SHA256",
"RSA-PSK-AES256-CBC-SHA","RSA-PSK-AES128-CBC-SHA",
"RSA-PSK-DES-CBC3-SHA","RSA-PSK-RC4-SHA"
].
%% @doc Ensure version & cipher-suites integrity.
-spec integral_ciphers([ssl:tls_version()], binary() | string() | [string()]) -> [string()].
integral_ciphers(Versions, Ciphers) when Ciphers =:= [] orelse Ciphers =:= undefined ->
%% not configured
integral_ciphers(Versions, default_ciphers(Versions));
integral_ciphers(Versions, selected_ciphers(Versions));
integral_ciphers(Versions, Ciphers) when ?IS_STRING_LIST(Ciphers) ->
%% ensure tlsv1.3 ciphers if none of them is found in Ciphers
dedup(ensure_tls13_cipher(lists:member('tlsv1.3', Versions), Ciphers));
@ -95,7 +127,7 @@ integral_ciphers(Versions, Ciphers) ->
%% In case tlsv1.3 is present, ensure tlsv1.3 cipher is added if user
%% did not provide it from config --- which is a common mistake
ensure_tls13_cipher(true, Ciphers) ->
Tls13Ciphers = default_ciphers(['tlsv1.3']),
Tls13Ciphers = selected_ciphers(['tlsv1.3']),
case lists:any(fun(C) -> lists:member(C, Tls13Ciphers) end, Ciphers) of
true -> Ciphers;
false -> Tls13Ciphers ++ Ciphers
@ -103,11 +135,17 @@ ensure_tls13_cipher(true, Ciphers) ->
ensure_tls13_cipher(false, Ciphers) ->
Ciphers.
%% default ssl versions based on available versions.
-spec available_versions() -> [atom()].
available_versions() ->
OtpRelease = list_to_integer(erlang:system_info(otp_release)),
default_versions(OtpRelease).
%% tlsv1.3 is available from OTP-22 but we do not want to use until 23.
default_versions(OtpRelease) when OtpRelease >= 23 ->
['tlsv1.3' | default_versions(22)];
proplists:get_value(available, ssl:versions());
default_versions(_) ->
['tlsv1.2', 'tlsv1.1', tlsv1].
lists:delete('tlsv1.3', proplists:get_value(available, ssl:versions())).
%% Deduplicate a list without re-ordering the elements.
dedup([]) -> [];
@ -175,10 +213,12 @@ drop_tls13(SslOpts0) ->
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
all_ciphers() -> all_ciphers(default_versions()).
drop_tls13_test() ->
Versions = default_versions(),
?assert(lists:member('tlsv1.3', Versions)),
Ciphers = default_ciphers(),
Ciphers = all_ciphers(),
?assert(has_tlsv13_cipher(Ciphers)),
Opts0 = #{versions => Versions, ciphers => Ciphers, other => true},
Opts = drop_tls13(Opts0),

View File

@ -236,6 +236,9 @@ t_update_config(Config) when is_list(Config) ->
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID1})),
?assertEqual({error, {not_found, {authenticator, ID1}}}, ?AUTHN:lookup_authenticator(Global, ID1)),
?assertMatch({ok, _}, update_config([authentication], {delete_authenticator, Global, ID2})),
?assertEqual({error, {not_found, {authenticator, ID2}}}, ?AUTHN:lookup_authenticator(Global, ID2)),
ListenerID = 'tcp:default',
ConfKeyPath = [listeners, tcp, default, authentication],
?assertMatch({ok, _}, update_config(ConfKeyPath, {create_authenticator, ListenerID, AuthenticatorConfig1})),

View File

@ -19,8 +19,8 @@
-include_lib("eunit/include/eunit.hrl").
ssl_opts_dtls_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{versions => dtls,
ciphers => dtls}, false),
Sc = emqx_schema:server_ssl_opts_schema(#{versions => dtls_all_available,
ciphers => dtls_all_available}, false),
Checked = validate(Sc, #{<<"versions">> => [<<"dtlsv1.2">>, <<"dtlsv1">>]}),
?assertMatch(#{versions := ['dtlsv1.2', 'dtlsv1'],
ciphers := ["ECDHE-ECDSA-AES256-GCM-SHA384" | _]
@ -62,19 +62,14 @@ ssl_opts_cipher_comma_separated_string_test() ->
ssl_opts_tls_psk_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
Checked = validate(Sc, #{<<"versions">> => [<<"tlsv1.2">>]}),
?assertMatch(#{versions := ['tlsv1.2']}, Checked),
#{ciphers := Ciphers} = Checked,
PskCiphers = emqx_schema:default_ciphers(psk),
lists:foreach(fun(Cipher) ->
?assert(lists:member(Cipher, Ciphers))
end, PskCiphers).
?assertMatch(#{versions := ['tlsv1.2']}, Checked).
bad_cipher_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
Reason = {bad_ciphers, ["foo"]},
?assertThrow({_Sc, [{validation_error, #{reason := Reason}}]},
[validate(Sc, #{<<"versions">> => [<<"tlsv1.2">>],
<<"ciphers">> => [<<"foo">>]})]),
validate(Sc, #{<<"versions">> => [<<"tlsv1.2">>],
<<"ciphers">> => [<<"foo">>]})),
ok.
validate(Schema, Data0) ->
@ -95,3 +90,10 @@ ciperhs_schema_test() ->
WSc = #{roots => [{ciphers, Sc}]},
?assertThrow({_, [{validation_error, _}]},
hocon_schema:check_plain(WSc, #{<<"ciphers">> => <<"foo,bar">>})).
bad_tls_version_test() ->
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
Reason = {unsupported_ssl_versions, [foo]},
?assertThrow({_Sc, [{validation_error, #{reason := Reason}}]},
validate(Sc, #{<<"versions">> => [<<"foo">>]})),
ok.

View File

@ -91,7 +91,7 @@
enable => true})).
-define(INSTANCE_EXAMPLE_2, maps:merge(?EXAMPLE_2, #{id => <<"password-based:http-server">>,
connect_timeout => 5000,
connect_timeout => "5s",
enable_pipelining => true,
headers => #{
<<"accept">> => <<"application/json">>,
@ -102,8 +102,8 @@
},
max_retries => 5,
pool_size => 8,
request_timeout => 5000,
retry_interval => 1000,
request_timeout => "5s",
retry_interval => "1s",
enable => true})).
-define(INSTANCE_EXAMPLE_3, maps:merge(?EXAMPLE_3, #{id => <<"jwt">>,
@ -1259,9 +1259,9 @@ definitions() ->
example => <<"SELECT password_hash FROM mqtt_user WHERE username = ${mqtt-username}">>
},
query_timeout => #{
type => integer,
description => <<"Query timeout, Unit: Milliseconds">>,
default => 5000
type => string,
description => <<"Query timeout">>,
default => "5s"
}
}
},
@ -1528,16 +1528,16 @@ definitions() ->
type => object
},
connect_timeout => #{
type => integer,
default => 5000
type => string,
default => <<"5s">>
},
max_retries => #{
type => integer,
default => 5
},
retry_interval => #{
type => integer,
default => 1000
type => string,
default => <<"1s">>
},
request_timout => #{
type => integer,

View File

@ -100,8 +100,8 @@ body(type) -> map();
body(validator) -> [fun check_body/1];
body(_) -> undefined.
request_timeout(type) -> non_neg_integer();
request_timeout(default) -> 5000;
request_timeout(type) -> emqx_schema:duration_ms();
request_timeout(default) -> "5s";
request_timeout(_) -> undefined.
%%------------------------------------------------------------------------------

View File

@ -65,8 +65,8 @@ salt_position(_) -> undefined.
query(type) -> string();
query(_) -> undefined.
query_timeout(type) -> integer();
query_timeout(default) -> 5000;
query_timeout(type) -> emqx_schema:duration_ms();
query_timeout(default) -> "5s";
query_timeout(_) -> undefined.
%%------------------------------------------------------------------------------

View File

@ -3,9 +3,9 @@
%%
%% -type(ipaddrs() :: {ipaddrs, string()}).
%%
%% -type(username() :: {username, regex()}).
%% -type(username() :: {user | username, string()} | {user | username, {re, regex()}}).
%%
%% -type(clientid() :: {clientid, regex()}).
%% -type(clientid() :: {client | clientid, string()} | {client | clientid, {re, regex()}}).
%%
%% -type(who() :: ipaddr() | ipaddrs() |username() | clientid() |
%% {'and', [ipaddr() | ipaddrs()| username() | clientid()]} |
@ -20,7 +20,7 @@
%%
%% -type(permission() :: allow | deny).
%%
%% -type(rule() :: {permission(), who(), access(), topics()}).
%% -type(rule() :: {permission(), who(), access(), topics()} | {permission(), all}).
%%--------------------------------------------------------------------
{allow, {username, "^dashboard?"}, subscribe, ["$SYS/#"]}.

View File

@ -55,8 +55,12 @@ authorization {
# collection: mqtt_authz
# selector: { "$or": [ { "username": "%u" }, { "clientid": "%c" } ] }
# },
{
type: built-in-database
}
{
type: file
# file is loaded into cache
path: "{{ platform_etc_dir }}/acl.conf"
}
]

View File

@ -29,12 +29,32 @@
(A =:= all) orelse (A =:= <<"all">>)
)).
-define(ACL_SHARDED, emqx_acl_sharded).
-define(ACL_TABLE, emqx_acl).
%% To save some space, use an integer for label, 0 for 'all', {1, Username} and {2, ClientId}.
-define(ACL_TABLE_ALL, 0).
-define(ACL_TABLE_USERNAME, 1).
-define(ACL_TABLE_CLIENTID, 2).
-record(emqx_acl, {
who :: ?ACL_TABLE_ALL| {?ACL_TABLE_USERNAME, binary()} | {?ACL_TABLE_CLIENTID, binary()},
rules :: [ {permission(), action(), emqx_topic:topic()} ]
}).
-record(authz_metrics, {
allow = 'client.authorize.allow',
deny = 'client.authorize.deny',
ignore = 'client.authorize.ignore'
}).
-define(CMD_REPLCAE, replace).
-define(CMD_DELETE, delete).
-define(CMD_PREPEND, prepend).
-define(CMD_APPEND, append).
-define(CMD_MOVE, move).
-define(METRICS(Type), tl(tuple_to_list(#Type{}))).
-define(METRICS(Type, K), #Type{}#Type.K).

View File

@ -39,7 +39,6 @@
-export([post_config_update/4, pre_config_update/2]).
-define(CONF_KEY_PATH, [authorization, sources]).
-define(SOURCE_TYPES, [file, http, mongodb, mysql, postgresql, redis]).
-spec(register_metrics() -> ok).
register_metrics() ->
@ -50,228 +49,151 @@ init() ->
emqx_config_handler:add_handler(?CONF_KEY_PATH, ?MODULE),
Sources = emqx:get_config(?CONF_KEY_PATH, []),
ok = check_dup_types(Sources),
NSources = [init_source(Source) || Source <- Sources],
NSources = init_sources(Sources),
ok = emqx_hooks:add('client.authorize', {?MODULE, authorize, [NSources]}, -1).
lookup() ->
{_M, _F, [A]}= find_action_in_hooks(),
A.
lookup(Type) ->
try find_source_by_type(atom(Type), lookup()) of
{_, Source} -> Source
catch
error:Reason -> {error, Reason}
end.
{Source, _Front, _Rear} = take(Type),
Source.
move(Type, Cmd) ->
move(Type, Cmd, #{}).
move(Type, #{<<"before">> := Before}, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), #{<<"before">> => atom(Before)}}, Opts);
emqx:update_config(?CONF_KEY_PATH, {?CMD_MOVE, type(Type), #{<<"before">> => type(Before)}}, Opts);
move(Type, #{<<"after">> := After}, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), #{<<"after">> => atom(After)}}, Opts);
emqx:update_config(?CONF_KEY_PATH, {?CMD_MOVE, type(Type), #{<<"after">> => type(After)}}, Opts);
move(Type, Position, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {move, atom(Type), Position}, Opts).
emqx:update_config(?CONF_KEY_PATH, {?CMD_MOVE, type(Type), Position}, Opts).
update(Cmd, Sources) ->
update(Cmd, Sources, #{}).
update({replace_once, Type}, Sources, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {{replace_once, atom(Type)}, Sources}, Opts);
update({delete_once, Type}, Sources, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {{delete_once, atom(Type)}, Sources}, Opts);
update({replace, Type}, Sources, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {{replace, type(Type)}, Sources}, Opts);
update({delete, Type}, Sources, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {{delete, type(Type)}, Sources}, Opts);
update(Cmd, Sources, Opts) ->
emqx:update_config(?CONF_KEY_PATH, {Cmd, Sources}, Opts).
pre_config_update({move, Type, <<"top">>}, Conf) when is_list(Conf) ->
{Index, _} = find_source_by_type(Type),
{List1, List2} = lists:split(Index, Conf),
NConf = [lists:nth(Index, Conf)] ++ lists:droplast(List1) ++ List2,
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({move, Type, <<"bottom">>}, Conf) when is_list(Conf) ->
{Index, _} = find_source_by_type(Type),
{List1, List2} = lists:split(Index, Conf),
NConf = lists:droplast(List1) ++ List2 ++ [lists:nth(Index, Conf)],
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({move, Type, #{<<"before">> := Before}}, Conf) when is_list(Conf) ->
{Index1, _} = find_source_by_type(Type),
Conf1 = lists:nth(Index1, Conf),
{Index2, _} = find_source_by_type(Before),
Conf2 = lists:nth(Index2, Conf),
{List1, List2} = lists:split(Index2, Conf),
NConf = lists:delete(Conf1, lists:droplast(List1))
++ [Conf1] ++ [Conf2]
++ lists:delete(Conf1, List2),
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({move, Type, #{<<"after">> := After}}, Conf) when is_list(Conf) ->
{Index1, _} = find_source_by_type(Type),
Conf1 = lists:nth(Index1, Conf),
{Index2, _} = find_source_by_type(After),
{List1, List2} = lists:split(Index2, Conf),
NConf = lists:delete(Conf1, List1)
++ [Conf1]
++ lists:delete(Conf1, List2),
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({head, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
do_update({?CMD_MOVE, Type, <<"top">>}, Conf) when is_list(Conf) ->
{Source, Front, Rear} = take(Type, Conf),
[Source | Front] ++ Rear;
do_update({?CMD_MOVE, Type, <<"bottom">>}, Conf) when is_list(Conf) ->
{Source, Front, Rear} = take(Type, Conf),
Front ++ Rear ++ [Source];
do_update({?CMD_MOVE, Type, #{<<"before">> := Before}}, Conf) when is_list(Conf) ->
{S1, Front1, Rear1} = take(Type, Conf),
{S2, Front2, Rear2} = take(Before, Front1 ++ Rear1),
Front2 ++ [S1, S2] ++ Rear2;
do_update({?CMD_MOVE, Type, #{<<"after">> := After}}, Conf) when is_list(Conf) ->
{S1, Front1, Rear1} = take(Type, Conf),
{S2, Front2, Rear2} = take(After, Front1 ++ Rear1),
Front2 ++ [S2, S1] ++ Rear2;
do_update({?CMD_PREPEND, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
NConf = Sources ++ Conf,
case check_dup_types(NConf) of
ok -> {ok, Sources ++ Conf};
Error -> Error
end;
pre_config_update({tail, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
ok = check_dup_types(NConf),
NConf;
do_update({?CMD_APPEND, Sources}, Conf) when is_list(Sources), is_list(Conf) ->
NConf = Conf ++ Sources,
case check_dup_types(NConf) of
ok -> {ok, Conf ++ Sources};
Error -> Error
end;
pre_config_update({{replace_once, Type}, Source}, Conf) when is_map(Source), is_list(Conf) ->
{Index, _} = find_source_by_type(Type),
{List1, List2} = lists:split(Index, Conf),
NConf = lists:droplast(List1) ++ [Source] ++ List2,
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({{delete_once, Type}, _Source}, Conf) when is_list(Conf) ->
{Index, _} = find_source_by_type(Type),
{List1, List2} = lists:split(Index, Conf),
NConf = lists:droplast(List1) ++ List2,
case check_dup_types(NConf) of
ok -> {ok, NConf};
Error -> Error
end;
pre_config_update({_, Sources}, _Conf) when is_list(Sources)->
ok = check_dup_types(NConf),
NConf;
do_update({{replace, Type}, Source}, Conf) when is_map(Source), is_list(Conf) ->
{_Old, Front, Rear} = take(Type, Conf),
NConf = Front ++ [Source | Rear],
ok = check_dup_types(NConf),
NConf;
do_update({{delete, Type}, _Source}, Conf) when is_list(Conf) ->
{_Old, Front, Rear} = take(Type, Conf),
NConf = Front ++ Rear,
NConf;
do_update({_, Sources}, _Conf) when is_list(Sources)->
%% overwrite the entire config!
{ok, Sources}.
Sources.
pre_config_update(Cmd, Conf) ->
{ok, do_update(Cmd, Conf)}.
post_config_update(_, undefined, _Conf, _AppEnvs) ->
ok;
post_config_update({move, Type, <<"top">>}, _NewSources, _OldSources, _AppEnvs) ->
InitedSources = lookup(),
{Index, Source} = find_source_by_type(Type, InitedSources),
{Sources1, Sources2 } = lists:split(Index, InitedSources),
Sources3 = [Source] ++ lists:droplast(Sources1) ++ Sources2,
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Sources3]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({move, Type, <<"bottom">>}, _NewSources, _OldSources, _AppEnvs) ->
InitedSources = lookup(),
{Index, Source} = find_source_by_type(Type, InitedSources),
{Sources1, Sources2 } = lists:split(Index, InitedSources),
Sources3 = lists:droplast(Sources1) ++ Sources2 ++ [Source],
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Sources3]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({move, Type, #{<<"before">> := Before}}, _NewSources, _OldSources, _AppEnvs) ->
InitedSources = lookup(),
{_, Source0} = find_source_by_type(Type, InitedSources),
{Index, Source1} = find_source_by_type(Before, InitedSources),
{Sources1, Sources2} = lists:split(Index, InitedSources),
Sources3 = lists:delete(Source0, lists:droplast(Sources1))
++ [Source0] ++ [Source1]
++ lists:delete(Source0, Sources2),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Sources3]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({move, Type, #{<<"after">> := After}}, _NewSources, _OldSources, _AppEnvs) ->
InitedSources = lookup(),
{_, Source} = find_source_by_type(Type, InitedSources),
{Index, _} = find_source_by_type(After, InitedSources),
{Sources1, Sources2} = lists:split(Index, InitedSources),
Sources3 = lists:delete(Source, Sources1)
++ [Source]
++ lists:delete(Source, Sources2),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Sources3]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({head, Sources}, _NewSources, _OldConf, _AppEnvs) ->
InitedSources = [init_source(R) || R <- check_sources(Sources)],
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [InitedSources ++ lookup()]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({tail, Sources}, _NewSources, _OldConf, _AppEnvs) ->
InitedSources = [init_source(R) || R <- check_sources(Sources)],
emqx_hooks:put('client.authorize', {?MODULE, authorize, [lookup() ++ InitedSources]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({{replace_once, Type}, #{type := Type} = Source}, _NewSources, _OldConf, _AppEnvs) when is_map(Source) ->
OldInitedSources = lookup(),
{Index, OldSource} = find_source_by_type(Type, OldInitedSources),
case maps:get(type, OldSource, undefined) of
undefined -> ok;
file -> ok;
_ ->
#{annotations := #{id := Id}} = OldSource,
ok = emqx_resource:remove(Id)
end,
{OldSources1, OldSources2 } = lists:split(Index, OldInitedSources),
InitedSources = [init_source(R) || R <- check_sources([Source])],
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [lists:droplast(OldSources1) ++ InitedSources ++ OldSources2]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update({{delete_once, Type}, _Source}, _NewSources, _OldConf, _AppEnvs) ->
OldInitedSources = lookup(),
{_, OldSource} = find_source_by_type(Type, OldInitedSources),
case OldSource of
#{annotations := #{id := Id}} ->
ok = emqx_resource:remove(Id);
_ -> ok
end,
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [lists:delete(OldSource, OldInitedSources)]}, -1),
ok = emqx_authz_cache:drain_cache();
post_config_update(_, NewSources, _OldConf, _AppEnvs) ->
%% overwrite the entire config!
OldInitedSources = lookup(),
InitedSources = [init_source(Source) || Source <- NewSources],
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [InitedSources]}, -1),
lists:foreach(fun (#{type := _Type, enable := true, annotations := #{id := Id}}) ->
ok = emqx_resource:remove(Id);
(_) -> ok
end, OldInitedSources),
post_config_update(Cmd, NewSources, _OldSource, _AppEnvs) ->
ok = do_post_update(Cmd, NewSources),
ok = emqx_authz_cache:drain_cache().
%%--------------------------------------------------------------------
%% Initialize source
%%--------------------------------------------------------------------
do_post_update({?CMD_MOVE, _Type, _Where} = Cmd, _NewSources) ->
InitedSources = lookup(),
MovedSources = do_update(Cmd, InitedSources),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [MovedSources]}, -1),
ok = emqx_authz_cache:drain_cache();
do_post_update({?CMD_PREPEND, Sources}, _NewSources) ->
InitedSources = init_sources(check_sources(Sources)),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [InitedSources ++ lookup()]}, -1),
ok = emqx_authz_cache:drain_cache();
do_post_update({?CMD_APPEND, Sources}, _NewSources) ->
InitedSources = init_sources(check_sources(Sources)),
emqx_hooks:put('client.authorize', {?MODULE, authorize, [lookup() ++ InitedSources]}, -1),
ok = emqx_authz_cache:drain_cache();
do_post_update({{replace, Type}, #{type := Type} = Source}, _NewSources) when is_map(Source) ->
OldInitedSources = lookup(),
{OldSource, Front, Rear} = take(Type, OldInitedSources),
ok = ensure_resource_deleted(OldSource),
InitedSources = init_sources(check_sources([Source])),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [Front ++ InitedSources ++ Rear]}, -1),
ok = emqx_authz_cache:drain_cache();
do_post_update({{delete, Type}, _Source}, _NewSources) ->
OldInitedSources = lookup(),
{OldSource, Front, Rear} = take(Type, OldInitedSources),
ok = ensure_resource_deleted(OldSource),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, Front ++ Rear}, -1),
ok = emqx_authz_cache:drain_cache();
do_post_update(_, NewSources) ->
%% overwrite the entire config!
OldInitedSources = lookup(),
InitedSources = init_sources(NewSources),
ok = emqx_hooks:put('client.authorize', {?MODULE, authorize, [InitedSources]}, -1),
lists:foreach(fun ensure_resource_deleted/1, OldInitedSources),
ok = emqx_authz_cache:drain_cache().
ensure_resource_deleted(#{type := file}) -> ok;
ensure_resource_deleted(#{type := 'built-in-database'}) -> ok;
ensure_resource_deleted(#{annotations := #{id := Id}}) -> ok = emqx_resource:remove(Id).
check_dup_types(Sources) ->
check_dup_types(Sources, ?SOURCE_TYPES).
check_dup_types(_Sources, []) -> ok;
check_dup_types(Sources, [T0 | Tail]) ->
case lists:foldl(fun (#{type := T1}, AccIn) ->
case T0 =:= T1 of
true -> AccIn + 1;
false -> AccIn
end;
(#{<<"type">> := T1}, AccIn) ->
case T0 =:= atom(T1) of
true -> AccIn + 1;
false -> AccIn
end
end, 0, Sources) > 1 of
check_dup_types(Sources, []).
check_dup_types([], _Checked) -> ok;
check_dup_types([Source | Sources], Checked) ->
%% the input might be raw or type-checked result, so lookup both 'type' and <<"type">>
%% TODO: check: really?
Type = case maps:get(<<"type">>, Source, maps:get(type, Source, undefined)) of
undefined ->
%% this should never happen if the value is type checked by honcon schema
error({bad_source_input, Source});
Type0 ->
type(Type0)
end,
case lists:member(Type, Checked) of
true ->
?LOG(error, "The type is duplicated in the Authorization source"),
{error, 'The type is duplicated in the Authorization source'};
false -> check_dup_types(Sources, Tail)
%% we have made it clear not to support more than one authz instance for each type
error({duplicated_authz_source_type, Type});
false ->
check_dup_types(Sources, [Type | Checked])
end.
init_source(#{enable := true,
type := file,
init_sources(Sources) ->
{Enabled, Disabled} = lists:partition(fun(#{enable := Enable}) -> Enable end, Sources),
case Disabled =/= [] of
true -> ?SLOG(info, #{msg => "disabled_sources_ignored", sources => Disabled});
false -> ok
end,
lists:map(fun init_source/1, Enabled).
init_source(#{type := file,
path := Path
} = Source) ->
Rules = case file:consult(Path) of
@ -288,8 +210,7 @@ init_source(#{enable := true,
error(Reason)
end,
Source#{annotations => #{rules => Rules}};
init_source(#{enable := true,
type := http,
init_source(#{type := http,
url := Url
} = Source) ->
NSource= maps:put(base_url, maps:remove(query, Url), Source),
@ -297,16 +218,17 @@ init_source(#{enable := true,
{error, Reason} -> error({load_config_error, Reason});
Id -> Source#{annotations => #{id => Id}}
end;
init_source(#{enable := true,
type := DB
init_source(#{type := 'built-in-database'
} = Source) ->
Source;
init_source(#{type := DB
} = Source) when DB =:= redis;
DB =:= mongodb ->
case create_resource(Source) of
{error, Reason} -> error({load_config_error, Reason});
Id -> Source#{annotations => #{id => Id}}
end;
init_source(#{enable := true,
type := DB,
init_source(#{type := DB,
query := SQL
} = Source) when DB =:= mysql;
DB =:= postgresql ->
@ -318,8 +240,7 @@ init_source(#{enable := true,
query => Mod:parse_query(SQL)
}
}
end;
init_source(#{enable := false} = Source) ->Source.
end.
%%--------------------------------------------------------------------
%% AuthZ callbacks
@ -373,13 +294,17 @@ check_sources(RawSources) ->
#{sources := Sources} = hocon_schema:check_plain(Schema, Conf, #{atom_key => true}),
Sources.
find_source_by_type(Type) -> find_source_by_type(Type, lookup()).
find_source_by_type(Type, Sources) -> find_source_by_type(Type, Sources, 1).
find_source_by_type(_, [], _N) -> error(not_found_source);
find_source_by_type(Type, [ Source = #{type := T} | Tail], N) ->
case Type =:= T of
true -> {N, Source};
false -> find_source_by_type(Type, Tail, N + 1)
take(Type) -> take(Type, lookup()).
%% Take the source of give type, the sources list is split into two parts
%% front part and rear part.
take(Type, Sources) ->
{Front, Rear} = lists:splitwith(fun(T) -> type(T) =/= type(Type) end, Sources),
case Rear =:= [] of
true ->
error({authz_source_of_type_not_found, Type});
_ ->
{hd(Rear), Front, tl(Rear)}
end.
find_action_in_hooks() ->
@ -404,6 +329,8 @@ create_resource(#{type := DB} = Source) ->
{error, Reason} -> {error, Reason}
end.
authz_module('built-in-database') ->
emqx_authz_mnesia;
authz_module(Type) ->
list_to_existing_atom("emqx_authz_" ++ atom_to_list(Type)).
@ -414,9 +341,20 @@ connector_module(postgresql) ->
connector_module(Type) ->
list_to_existing_atom("emqx_connector_" ++ atom_to_list(Type)).
atom(B) when is_binary(B) ->
try binary_to_existing_atom(B, utf8)
catch
_ -> binary_to_atom(B)
end;
atom(A) when is_atom(A) -> A.
type(#{type := Type}) -> type(Type);
type(#{<<"type">> := Type}) -> type(Type);
type(file) -> file;
type(<<"file">>) -> file;
type(http) -> http;
type(<<"http">>) -> http;
type(mongodb) -> mongodb;
type(<<"mongodb">>) -> mongodb;
type(mysql) -> mysql;
type(<<"mysql">>) -> mysql;
type(redis) -> redis;
type(<<"redis">>) -> redis;
type(postgresql) -> postgresql;
type(<<"postgresql">>) -> postgresql;
type('built-in-database') -> 'built-in-database';
type(<<"built-in-database">>) -> 'built-in-database';
type(Unknown) -> error({unknown_authz_source_type, Unknown}). % should never happend if the input is type-checked by hocon schema

View File

@ -0,0 +1,590 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authz_api_mnesia).
-behavior(minirest_api).
-include("emqx_authz.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
-define(EXAMPLE_USERNAME, #{username => user1,
rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
-define(EXAMPLE_CLIENTID, #{clientid => client1,
rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
-define(EXAMPLE_ALL , #{rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
-export([ api_spec/0
, purge/2
, records/2
, record/2
]).
api_spec() ->
{[ purge_api()
, records_api()
, record_api()
], definitions()}.
definitions() ->
Rules = #{
type => array,
items => #{
type => object,
required => [topic, permission, action],
properties => #{
topic => #{
type => string,
example => <<"test/topic/1">>
},
permission => #{
type => string,
enum => [<<"allow">>, <<"deny">>],
example => <<"allow">>
},
action => #{
type => string,
enum => [<<"publish">>, <<"subscribe">>, <<"all">>],
example => <<"publish">>
}
}
}
},
Username = #{
type => object,
required => [username, rules],
properties => #{
username => #{
type => string,
example => <<"username">>
},
rules => minirest:ref(<<"rules">>)
}
},
Clientid = #{
type => object,
required => [clientid, rules],
properties => #{
clientid => #{
type => string,
example => <<"clientid">>
},
rules => minirest:ref(<<"rules">>)
}
},
ALL = #{
type => object,
required => [rules],
properties => #{
rules => minirest:ref(<<"rules">>)
}
},
[ #{<<"rules">> => Rules}
, #{<<"username">> => Username}
, #{<<"clientid">> => Clientid}
, #{<<"all">> => ALL}
].
purge_api() ->
Metadata = #{
delete => #{
description => "Purge all records",
responses => #{
<<"204">> => #{description => <<"No Content">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
}
},
{"/authorization/sources/built-in-database/purge-all", Metadata, purge}.
records_api() ->
Metadata = #{
get => #{
description => "List records",
parameters => [
#{
name => type,
in => path,
schema => #{
type => string,
enum => [<<"username">>, <<"clientid">>, <<"all">>]
},
required => true
},
#{
name => page,
in => query,
required => false,
description => <<"Page Index">>,
schema => #{type => integer}
},
#{
name => limit,
in => query,
required => false,
description => <<"Page limit">>,
schema => #{type => integer}
}
],
responses => #{
<<"200">> => #{
description => <<"OK">>,
content => #{
'application/json' => #{
schema => #{
type => array,
items => #{
oneOf => [ minirest:ref(<<"username">>)
, minirest:ref(<<"clientid">>)
, minirest:ref(<<"all">>)
]
}
},
examples => #{
username => #{
summary => <<"Username">>,
value => jsx:encode([?EXAMPLE_USERNAME])
},
clientid => #{
summary => <<"Clientid">>,
value => jsx:encode([?EXAMPLE_CLIENTID])
},
all => #{
summary => <<"All">>,
value => jsx:encode([?EXAMPLE_ALL])
}
}
}
}
}
}
},
post => #{
description => "Add new records",
parameters => [
#{
name => type,
in => path,
schema => #{
type => string,
enum => [<<"username">>, <<"clientid">>]
},
required => true
}
],
requestBody => #{
content => #{
'application/json' => #{
schema => #{
type => array,
items => #{
oneOf => [ minirest:ref(<<"username">>)
, minirest:ref(<<"clientid">>)
]
}
},
examples => #{
username => #{
summary => <<"Username">>,
value => jsx:encode([?EXAMPLE_USERNAME])
},
clientid => #{
summary => <<"Clientid">>,
value => jsx:encode([?EXAMPLE_CLIENTID])
}
}
}
}
},
responses => #{
<<"204">> => #{description => <<"Created">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
},
put => #{
description => "Set the list of rules for all",
parameters => [
#{
name => type,
in => path,
schema => #{
type => string,
enum => [<<"all">>]
},
required => true
}
],
requestBody => #{
content => #{
'application/json' => #{
schema => #{
type => array,
items => #{
oneOf => [ minirest:ref(<<"username">>)
, minirest:ref(<<"clientid">>)
, minirest:ref(<<"all">>)
]
}
},
examples => #{
username => #{
summary => <<"Username">>,
value => jsx:encode(?EXAMPLE_USERNAME)
},
clientid => #{
summary => <<"Clientid">>,
value => jsx:encode(?EXAMPLE_CLIENTID)
},
all => #{
summary => <<"All">>,
value => jsx:encode(?EXAMPLE_ALL)
}
}
}
}
},
responses => #{
<<"204">> => #{description => <<"Created">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
}
},
{"/authorization/sources/built-in-database/:type", Metadata, records}.
record_api() ->
Metadata = #{
get => #{
description => "Get record info",
parameters => [
#{
name => type,
in => path,
schema => #{
type => string,
enum => [<<"username">>, <<"clientid">>]
},
required => true
},
#{
name => key,
in => path,
schema => #{
type => string
},
required => true
}
],
responses => #{
<<"200">> => #{
description => <<"OK">>,
content => #{
'application/json' => #{
schema => #{
oneOf => [ minirest:ref(<<"username">>)
, minirest:ref(<<"clientid">>)
]
},
examples => #{
username => #{
summary => <<"Username">>,
value => jsx:encode(?EXAMPLE_USERNAME)
},
clientid => #{
summary => <<"Clientid">>,
value => jsx:encode(?EXAMPLE_CLIENTID)
}
}
}
}
},
<<"404">> => emqx_mgmt_util:bad_request(<<"Not Found">>)
}
},
put => #{
description => "Update one record",
parameters => [
#{
name => type,
in => path,
schema => #{
type => string,
enum => [<<"username">>, <<"clientid">>]
},
required => true
},
#{
name => key,
in => path,
schema => #{
type => string
},
required => true
}
],
requestBody => #{
content => #{
'application/json' => #{
schema => #{
oneOf => [ minirest:ref(<<"username">>)
, minirest:ref(<<"clientid">>)
]
},
examples => #{
username => #{
summary => <<"Username">>,
value => jsx:encode(?EXAMPLE_USERNAME)
},
clientid => #{
summary => <<"Clientid">>,
value => jsx:encode(?EXAMPLE_CLIENTID)
}
}
}
}
},
responses => #{
<<"204">> => #{description => <<"Updated">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
},
delete => #{
description => "Delete one record",
parameters => [
#{
name => type,
in => path,
schema => #{
type => string,
enum => [<<"username">>, <<"clientid">>]
},
required => true
},
#{
name => key,
in => path,
schema => #{
type => string
},
required => true
}
],
responses => #{
<<"204">> => #{description => <<"No Content">>},
<<"400">> => emqx_mgmt_util:bad_request()
}
}
},
{"/authorization/sources/built-in-database/:type/:key", Metadata, record}.
purge(delete, _) ->
case emqx_authz_api_sources:get_raw_source(<<"built-in-database">>) of
[#{enable := false}] ->
ok = lists:foreach(fun(Key) ->
ok = ekka_mnesia:dirty_delete(?ACL_TABLE, Key)
end, mnesia:dirty_all_keys(?ACL_TABLE)),
{204};
_ ->
{400, #{code => <<"BAD_REQUEST">>,
message => <<"'built-in-database' type source must be disabled before purge.">>}}
end.
records(get, #{bindings := #{type := <<"username">>},
query_string := Qs
}) ->
MatchSpec = ets:fun2ms(
fun({?ACL_TABLE, {?ACL_TABLE_USERNAME, Username}, Rules}) ->
[{username, Username}, {rules, Rules}]
end),
Format = fun ([{username, Username}, {rules, Rules}]) ->
#{username => Username,
rules => [ #{topic => Topic,
action => Action,
permission => Permission
} || {Permission, Action, Topic} <- Rules]
}
end,
case Qs of
#{<<"limit">> := _, <<"page">> := _} = Page ->
{200, emqx_mgmt_api:paginate(?ACL_TABLE, MatchSpec, Page, Format)};
#{<<"limit">> := Limit} ->
case ets:select(?ACL_TABLE, MatchSpec, binary_to_integer(Limit)) of
{Rows, _Continuation} -> {200, [Format(Row) || Row <- Rows ]};
'$end_of_table' -> {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}
end;
_ ->
{200, [Format(Row) || Row <- ets:select(?ACL_TABLE, MatchSpec)]}
end;
records(get, #{bindings := #{type := <<"clientid">>},
query_string := Qs
}) ->
MatchSpec = ets:fun2ms(
fun({?ACL_TABLE, {?ACL_TABLE_CLIENTID, Clientid}, Rules}) ->
[{clientid, Clientid}, {rules, Rules}]
end),
Format = fun ([{clientid, Clientid}, {rules, Rules}]) ->
#{clientid => Clientid,
rules => [ #{topic => Topic,
action => Action,
permission => Permission
} || {Permission, Action, Topic} <- Rules]
}
end,
case Qs of
#{<<"limit">> := _, <<"page">> := _} = Page ->
{200, emqx_mgmt_api:paginate(?ACL_TABLE, MatchSpec, Page, Format)};
#{<<"limit">> := Limit} ->
case ets:select(?ACL_TABLE, MatchSpec, binary_to_integer(Limit)) of
{Rows, _Continuation} -> {200, [Format(Row) || Row <- Rows ]};
'$end_of_table' -> {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}
end;
_ ->
{200, [Format(Row) || Row <- ets:select(?ACL_TABLE, MatchSpec)]}
end;
records(get, #{bindings := #{type := <<"all">>}}) ->
MatchSpec = ets:fun2ms(
fun({?ACL_TABLE, ?ACL_TABLE_ALL, Rules}) ->
[{rules, Rules}]
end),
{200, [ #{rules => [ #{topic => Topic,
action => Action,
permission => Permission
} || {Permission, Action, Topic} <- Rules]
} || [{rules, Rules}] <- ets:select(?ACL_TABLE, MatchSpec)]};
records(post, #{bindings := #{type := <<"username">>},
body := Body}) when is_list(Body) ->
lists:foreach(fun(#{<<"username">> := Username, <<"rules">> := Rules}) ->
ekka_mnesia:dirty_write(#emqx_acl{
who = {?ACL_TABLE_USERNAME, Username},
rules = format_rules(Rules)
})
end, Body),
{204};
records(post, #{bindings := #{type := <<"clientid">>},
body := Body}) when is_list(Body) ->
lists:foreach(fun(#{<<"clientid">> := Clientid, <<"rules">> := Rules}) ->
ekka_mnesia:dirty_write(#emqx_acl{
who = {?ACL_TABLE_CLIENTID, Clientid},
rules = format_rules(Rules)
})
end, Body),
{204};
records(put, #{bindings := #{type := <<"all">>},
body := #{<<"rules">> := Rules}}) ->
ekka_mnesia:dirty_write(#emqx_acl{
who = ?ACL_TABLE_ALL,
rules = format_rules(Rules)
}),
{204}.
record(get, #{bindings := #{type := <<"username">>, key := Key}}) ->
case mnesia:dirty_read(?ACL_TABLE, {?ACL_TABLE_USERNAME, Key}) of
[] -> {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}};
[#emqx_acl{who = {?ACL_TABLE_USERNAME, Username}, rules = Rules}] ->
{200, #{username => Username,
rules => [ #{topic => Topic,
action => Action,
permission => Permission
} || {Permission, Action, Topic} <- Rules]}
}
end;
record(get, #{bindings := #{type := <<"clientid">>, key := Key}}) ->
case mnesia:dirty_read(?ACL_TABLE, {?ACL_TABLE_CLIENTID, Key}) of
[] -> {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}};
[#emqx_acl{who = {?ACL_TABLE_CLIENTID, Clientid}, rules = Rules}] ->
{200, #{clientid => Clientid,
rules => [ #{topic => Topic,
action => Action,
permission => Permission
} || {Permission, Action, Topic} <- Rules]}
}
end;
record(put, #{bindings := #{type := <<"username">>, key := Username},
body := #{<<"username">> := Username, <<"rules">> := Rules}}) ->
ekka_mnesia:dirty_write(#emqx_acl{
who = {?ACL_TABLE_USERNAME, Username},
rules = format_rules(Rules)
}),
{204};
record(put, #{bindings := #{type := <<"clientid">>, key := Clientid},
body := #{<<"clientid">> := Clientid, <<"rules">> := Rules}}) ->
ekka_mnesia:dirty_write(#emqx_acl{
who = {?ACL_TABLE_CLIENTID, Clientid},
rules = format_rules(Rules)
}),
{204};
record(delete, #{bindings := #{type := <<"username">>, key := Key}}) ->
ekka_mnesia:dirty_delete({?ACL_TABLE, {?ACL_TABLE_USERNAME, Key}}),
{204};
record(delete, #{bindings := #{type := <<"clientid">>, key := Key}}) ->
ekka_mnesia:dirty_delete({?ACL_TABLE, {?ACL_TABLE_CLIENTID, Key}}),
{204}.
format_rules(Rules) when is_list(Rules) ->
lists:foldl(fun(#{<<"topic">> := Topic,
<<"action">> := Action,
<<"permission">> := Permission
}, AccIn) when ?PUBSUB(Action)
andalso ?ALLOW_DENY(Permission) ->
AccIn ++ [{ atom(Permission), atom(Action), Topic }]
end, [], Rules).
atom(B) when is_binary(B) ->
try binary_to_existing_atom(B, utf8)
catch
_ -> binary_to_atom(B)
end;
atom(A) when is_atom(A) -> A.

View File

@ -21,6 +21,7 @@
definitions() ->
Sources = #{
oneOf => [ minirest:ref(<<"http">>)
, minirest:ref(<<"built-in-database">>)
, minirest:ref(<<"mongo_single">>)
, minirest:ref(<<"mongo_rs">>)
, minirest:ref(<<"mongo_sharded">>)
@ -79,9 +80,9 @@ definitions() ->
},
headers => #{type => object},
body => #{type => object},
connect_timeout => #{type => integer},
connect_timeout => #{type => string},
max_retries => #{type => integer},
retry_interval => #{type => integer},
retry_interval => #{type => string},
pool_type => #{
type => string,
enum => [<<"random">>, <<"hash">>],
@ -133,8 +134,8 @@ definitions() ->
properties => #{
pool_size => #{type => integer},
max_overflow => #{type => integer},
overflow_ttl => #{type => integer},
overflow_check_period => #{type => integer},
overflow_ttl => #{type => string},
overflow_check_period => #{type => string},
local_threshold_ms => #{type => integer},
connect_timeout_ms => #{type => integer},
socket_timeout_ms => #{type => integer},
@ -191,8 +192,8 @@ definitions() ->
properties => #{
pool_size => #{type => integer},
max_overflow => #{type => integer},
overflow_ttl => #{type => integer},
overflow_check_period => #{type => integer},
overflow_ttl => #{type => string},
overflow_check_period => #{type => string},
local_threshold_ms => #{type => integer},
connect_timeout_ms => #{type => integer},
socket_timeout_ms => #{type => integer},
@ -247,8 +248,8 @@ definitions() ->
properties => #{
pool_size => #{type => integer},
max_overflow => #{type => integer},
overflow_ttl => #{type => integer},
overflow_check_period => #{type => integer},
overflow_ttl => #{type => string},
overflow_check_period => #{type => string},
local_threshold_ms => #{type => integer},
connect_timeout_ms => #{type => integer},
socket_timeout_ms => #{type => integer},
@ -446,6 +447,21 @@ definitions() ->
ssl => minirest:ref(<<"ssl">>)
}
},
Mnesia = #{
type => object,
required => [type, enable],
properties => #{
type => #{
type => string,
enum => [<<"redis">>],
example => <<"redis">>
},
enable => #{
type => boolean,
example => true
}
}
},
File = #{
type => object,
required => [type, enable, rules],
@ -475,6 +491,7 @@ definitions() ->
[ #{<<"sources">> => Sources}
, #{<<"ssl">> => SSL}
, #{<<"http">> => HTTP}
, #{<<"built-in-database">> => Mnesia}
, #{<<"mongo_single">> => MongoSingle}
, #{<<"mongo_rs">> => MongoRs}
, #{<<"mongo_sharded">> => MongoSharded}

View File

@ -41,6 +41,10 @@
]
}).
-export([ get_raw_sources/0
, get_raw_source/1
]).
-export([ api_spec/0
, sources/2
, source/2
@ -147,7 +151,15 @@ source_api() ->
name => type,
in => path,
schema => #{
type => string
type => string,
enum => [ <<"file">>
, <<"http">>
, <<"mongodb">>
, <<"mysql">>
, <<"postgresql">>
, <<"redis">>
, <<"built-in-database">>
]
},
required => true
}
@ -181,7 +193,15 @@ source_api() ->
name => type,
in => path,
schema => #{
type => string
type => string,
enum => [ <<"file">>
, <<"http">>
, <<"mongodb">>
, <<"mysql">>
, <<"postgresql">>
, <<"redis">>
, <<"built-in-database">>
]
},
required => true
}
@ -216,7 +236,15 @@ source_api() ->
name => type,
in => path,
schema => #{
type => string
type => string,
enum => [ <<"file">>
, <<"http">>
, <<"mongodb">>
, <<"mysql">>
, <<"postgresql">>
, <<"redis">>
, <<"built-in-database">>
]
},
required => true
}
@ -238,7 +266,15 @@ move_source_api() ->
name => type,
in => path,
schema => #{
type => string
type => string,
enum => [ <<"file">>
, <<"http">>
, <<"mongodb">>
, <<"mysql">>
, <<"postgresql">>
, <<"redis">>
, <<"built-in-database">>
]
},
required => true
}
@ -290,7 +326,7 @@ move_source_api() ->
{"/authorization/sources/:type/move", Metadata, move_source}.
sources(get, _) ->
Sources = lists:foldl(fun (#{type := file, enable := Enable, path := Path}, AccIn) ->
Sources = lists:foldl(fun (#{<<"type">> := <<"file">>, <<"enable">> := Enable, <<"path">> := Path}, AccIn) ->
case file:read_file(Path) of
{ok, Rules} ->
lists:append(AccIn, [#{type => file,
@ -309,7 +345,7 @@ sources(get, _) ->
{200, #{sources => Sources}};
sources(post, #{body := #{<<"type">> := <<"file">>, <<"rules">> := Rules}}) ->
{ok, Filename} = write_file(filename:join([emqx:get_config([node, data_dir]), "acl.conf"]), Rules),
update_config(head, [#{type => file, enable => true, path => Filename}]);
update_config(head, [#{<<"type">> => <<"file">>, <<"enable">> => true, <<"path">> => Filename}]);
sources(post, #{body := Body}) when is_map(Body) ->
update_config(head, [write_cert(Body)]);
sources(put, #{body := Body}) when is_list(Body) ->
@ -317,16 +353,16 @@ sources(put, #{body := Body}) when is_list(Body) ->
case Source of
#{<<"type">> := <<"file">>, <<"rules">> := Rules, <<"enable">> := Enable} ->
{ok, Filename} = write_file(filename:join([emqx:get_config([node, data_dir]), "acl.conf"]), Rules),
#{type => file, enable => Enable, path => Filename};
#{<<"type">> => <<"file">>, <<"enable">> => Enable, <<"path">> => Filename};
_ -> write_cert(Source)
end
end || Source <- Body],
update_config(replace, NBody).
update_config(?CMD_REPLCAE, NBody).
source(get, #{bindings := #{type := Type}}) ->
case get_raw_source(Type) of
[] -> {404, #{message => <<"Not found ", Type/binary>>}};
[#{type := <<"file">>, enable := Enable, path := Path}] ->
[#{<<"type">> := <<"file">>, <<"enable">> := Enable, <<"path">> := Path}] ->
case file:read_file(Path) of
{ok, Rules} ->
{200, #{type => file,
@ -336,23 +372,23 @@ source(get, #{bindings := #{type := Type}}) ->
};
{error, Reason} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}}
message => bin(Reason)}}
end;
[Source] ->
{200, read_cert(Source)}
end;
source(put, #{bindings := #{type := <<"file">>}, body := #{<<"type">> := <<"file">>, <<"rules">> := Rules, <<"enable">> := Enable}}) ->
{ok, Filename} = write_file(maps:get(path, emqx_authz:lookup(file), ""), Rules),
case emqx_authz:update({replace_once, file}, #{type => file, enable => Enable, path => Filename}) of
case emqx_authz:update({?CMD_REPLCAE, file}, #{<<"type">> => file, <<"enable">> => Enable, <<"path">> => Filename}) of
{ok, _} -> {204};
{error, Reason} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}}
message => bin(Reason)}}
end;
source(put, #{bindings := #{type := Type}, body := Body}) when is_map(Body) ->
update_config({replace_once, Type}, write_cert(Body));
update_config({?CMD_REPLCAE, Type}, write_cert(Body));
source(delete, #{bindings := #{type := Type}}) ->
update_config({delete_once, Type}, #{}).
update_config({?CMD_DELETE, Type}, #{}).
move_source(post, #{bindings := #{type := Type}, body := #{<<"position">> := Position}}) ->
case emqx_authz:move(Type, Position) of
@ -362,18 +398,18 @@ move_source(post, #{bindings := #{type := Type}, body := #{<<"position">> := Pos
message => <<"source ", Type/binary, " not found">>}};
{error, Reason} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}}
message => bin(Reason)}}
end.
get_raw_sources() ->
RawSources = emqx:get_raw_config([authorization, sources]),
Schema = #{roots => emqx_authz_schema:fields("authorization"), fields => #{}},
Conf = #{<<"sources">> => RawSources},
#{sources := Sources} = hocon_schema:check_plain(Schema, Conf, #{atom_key => true, no_conversion => true}),
#{<<"sources">> := Sources} = hocon_schema:check_plain(Schema, Conf, #{only_fill_defaults => true}),
Sources.
get_raw_source(Type) ->
lists:filter(fun (#{type := T}) ->
lists:filter(fun (#{<<"type">> := T}) ->
T =:= Type
end, get_raw_sources()).
@ -382,16 +418,16 @@ update_config(Cmd, Sources) ->
{ok, _} -> {204};
{error, {pre_config_update, emqx_authz, Reason}} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}};
message => bin(Reason)}};
{error, {post_config_update, emqx_authz, Reason}} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}};
message => bin(Reason)}};
{error, Reason} ->
{400, #{code => <<"BAD_REQUEST">>,
message => atom_to_binary(Reason)}}
message => bin(Reason)}}
end.
read_cert(#{ssl := #{enable := true} = SSL} = Source) ->
read_cert(#{<<"ssl">> := #{<<"enable">> := true} = SSL} = Source) ->
CaCert = case file:read_file(maps:get(cacertfile, SSL, "")) of
{ok, CaCert0} -> CaCert0;
_ -> ""
@ -459,3 +495,6 @@ do_write_file(Filename, Bytes) ->
?LOG(error, "Write File ~p Error: ~p", [Filename, Reason]),
error(Reason)
end.
bin(Term) ->
erlang:iolist_to_binary(io_lib:format("~p", [Term])).

View File

@ -7,9 +7,12 @@
-behaviour(application).
-include("emqx_authz.hrl").
-export([start/2, stop/1]).
start(_StartType, _StartArgs) ->
ok = ekka_rlog:wait_for_shards([?ACL_SHARDED], infinity),
{ok, Sup} = emqx_authz_sup:start_link(),
ok = emqx_authz:init(),
{ok, Sup}.

View File

@ -0,0 +1,76 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authz_mnesia).
-include("emqx_authz.hrl").
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
%% AuthZ Callbacks
-export([ mnesia/1
, authorize/4
, description/0
]).
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
-endif.
-boot_mnesia({mnesia, [boot]}).
-copy_mnesia({mnesia, [copy]}).
-spec(mnesia(boot | copy) -> ok).
mnesia(boot) ->
ok = ekka_mnesia:create_table(?ACL_TABLE, [
{type, ordered_set},
{rlog_shard, ?ACL_SHARDED},
{disc_copies, [node()]},
{attributes, record_info(fields, ?ACL_TABLE)},
{storage_properties, [{ets, [{read_concurrency, true}]}]}]);
mnesia(copy) ->
ok = ekka_mnesia:copy_table(?ACL_TABLE, disc_copies).
description() ->
"AuthZ with Mnesia".
authorize(#{username := Username,
clientid := Clientid
} = Client, PubSub, Topic, #{type := 'built-in-database'}) ->
Rules = case mnesia:dirty_read(?ACL_TABLE, {?ACL_TABLE_CLIENTID, Clientid}) of
[] -> [];
[#emqx_acl{rules = Rules0}] when is_list(Rules0) -> Rules0
end
++ case mnesia:dirty_read(?ACL_TABLE, {?ACL_TABLE_USERNAME, Username}) of
[] -> [];
[#emqx_acl{rules = Rules1}] when is_list(Rules1) -> Rules1
end
++ case mnesia:dirty_read(?ACL_TABLE, ?ACL_TABLE_ALL) of
[] -> [];
[#emqx_acl{rules = Rules2}] when is_list(Rules2) -> Rules2
end,
do_authorize(Client, PubSub, Topic, Rules).
do_authorize(_Client, _PubSub, _Topic, []) -> nomatch;
do_authorize(Client, PubSub, Topic, [ {Permission, Action, TopicFilter} | Tail]) ->
case emqx_authz_rule:match(Client, PubSub, Topic,
emqx_authz_rule:compile({Permission, all, Action, [TopicFilter]})
) of
{matched, Permission} -> {matched, Permission};
nomatch -> do_authorize(Client, PubSub, Topic, Tail)
end.

View File

@ -58,9 +58,9 @@ do_authorize(Client, PubSub, Topic, [Rule | Tail]) ->
end.
replvar(Selector, #{clientid := Clientid,
username := Username,
peerhost := IpAddress
}) ->
username := Username,
peerhost := IpAddress
}) ->
Fun = fun
_Fun(K, V, AccIn) when is_map(V) -> maps:put(K, maps:fold(_Fun, AccIn, V), AccIn);
_Fun(K, V, AccIn) when is_list(V) ->

View File

@ -69,7 +69,6 @@ do_authorize(Client, PubSub, Topic, Columns, [Row | Tail]) ->
nomatch -> do_authorize(Client, PubSub, Topic, Columns, Tail)
end.
format_result(Columns, Row) ->
Permission = lists:nth(index(<<"permission">>, Columns), Row),
Action = lists:nth(index(<<"action">>, Columns), Row),

View File

@ -32,16 +32,21 @@
-export_type([rule/0]).
compile({Permission, all}) when ?ALLOW_DENY(Permission) -> {Permission, all, all, [compile_topic(<<"#">>)]};
compile({Permission, Who, Action, TopicFilters}) when ?ALLOW_DENY(Permission), ?PUBSUB(Action), is_list(TopicFilters) ->
{atom(Permission), compile_who(Who), atom(Action), [compile_topic(Topic) || Topic <- TopicFilters]}.
compile_who(all) -> all;
compile_who({username, Username}) ->
compile_who({user, Username}) -> compile_who({username, Username});
compile_who({username, {re, Username}}) ->
{ok, MP} = re:compile(bin(Username)),
{username, MP};
compile_who({clientid, Clientid}) ->
compile_who({username, Username}) -> {username, {eq, bin(Username)}};
compile_who({client, Clientid}) -> compile_who({clientid, Clientid});
compile_who({clientid, {re, Clientid}}) ->
{ok, MP} = re:compile(bin(Clientid)),
{clientid, MP};
compile_who({clientid, Clientid}) -> {clientid, {eq, bin(Clientid)}};
compile_who({ipaddr, CIDR}) ->
{ipaddr, esockd_cidr:parse(CIDR, true)};
compile_who({ipaddrs, CIDRs}) ->
@ -102,14 +107,16 @@ match_action(_, all) -> true;
match_action(_, _) -> false.
match_who(_, all) -> true;
match_who(#{username := undefined}, {username, _MP}) ->
match_who(#{username := undefined}, {username, _}) ->
false;
match_who(#{username := Username}, {username, MP}) ->
match_who(#{username := Username}, {username, {eq, Username}}) -> true;
match_who(#{username := Username}, {username, {re_pattern, _, _, _, _} = MP}) ->
case re:run(Username, MP) of
{match, _} -> true;
_ -> false
end;
match_who(#{clientid := Clientid}, {clientid, MP}) ->
match_who(#{clientid := Clientid}, {clientid, {eq, Clientid}}) -> true;
match_who(#{clientid := Clientid}, {clientid, {re_pattern, _, _, _, _} = MP}) ->
case re:run(Clientid, MP) of
{match, _} -> true;
_ -> false

View File

@ -18,6 +18,8 @@
, fields/1
]).
-import(emqx_schema, [mk_duration/2]).
namespace() -> authz.
%% @doc authorization schema is not exported
@ -29,6 +31,7 @@ fields("authorization") ->
[ hoconsc:ref(?MODULE, file)
, hoconsc:ref(?MODULE, http_get)
, hoconsc:ref(?MODULE, http_post)
, hoconsc:ref(?MODULE, mnesia)
, hoconsc:ref(?MODULE, mongo_single)
, hoconsc:ref(?MODULE, mongo_rs)
, hoconsc:ref(?MODULE, mongo_sharded)
@ -45,11 +48,7 @@ fields(file) ->
, {enable, #{type => boolean(),
default => true}}
, {path, #{type => string(),
validator => fun(S) -> case filelib:is_file(S) of
true -> ok;
_ -> {error, "File does not exist"}
end
end
desc => "Path to the file which contains the ACL rules."
}}
];
fields(http_get) ->
@ -77,7 +76,7 @@ fields(http_get) ->
end
}
}
, {request_timeout, #{type => timeout(), default => 30000 }}
, {request_timeout, mk_duration("request timeout", #{default => "30s"})}
] ++ proplists:delete(base_url, emqx_connector_http:fields(config));
fields(http_post) ->
[ {type, #{type => http}}
@ -107,12 +106,17 @@ fields(http_post) ->
end
}
}
, {request_timeout, #{type => timeout(), default => 30000 }}
, {request_timeout, mk_duration("request timeout", #{default => "30s"})}
, {body, #{type => map(),
nullable => true
}
}
] ++ proplists:delete(base_url, emqx_connector_http:fields(config));
fields(mnesia) ->
[ {type, #{type => 'built-in-database'}}
, {enable, #{type => boolean(),
default => true}}
];
fields(mongo_single) ->
[ {collection, #{type => atom()}}
, {selector, #{type => map()}}

View File

@ -50,14 +50,14 @@ init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
{ok, _} = emqx_authz:update(replace, []),
{ok, _} = emqx_authz:update(?CMD_REPLCAE, []),
emqx_ct_helpers:stop_apps([emqx_authz, emqx_resource]),
meck:unload(emqx_resource),
meck:unload(emqx_schema),
ok.
init_per_testcase(_, Config) ->
{ok, _} = emqx_authz:update(replace, []),
{ok, _} = emqx_authz:update(?CMD_REPLCAE, []),
Config.
-define(SOURCE1, #{<<"type">> => <<"http">>,
@ -120,12 +120,12 @@ init_per_testcase(_, Config) ->
%%------------------------------------------------------------------------------
t_update_source(_) ->
{ok, _} = emqx_authz:update(replace, [?SOURCE3]),
{ok, _} = emqx_authz:update(head, [?SOURCE2]),
{ok, _} = emqx_authz:update(head, [?SOURCE1]),
{ok, _} = emqx_authz:update(tail, [?SOURCE4]),
{ok, _} = emqx_authz:update(tail, [?SOURCE5]),
{ok, _} = emqx_authz:update(tail, [?SOURCE6]),
{ok, _} = emqx_authz:update(?CMD_REPLCAE, [?SOURCE3]),
{ok, _} = emqx_authz:update(?CMD_PREPEND, [?SOURCE2]),
{ok, _} = emqx_authz:update(?CMD_PREPEND, [?SOURCE1]),
{ok, _} = emqx_authz:update(?CMD_APPEND, [?SOURCE4]),
{ok, _} = emqx_authz:update(?CMD_APPEND, [?SOURCE5]),
{ok, _} = emqx_authz:update(?CMD_APPEND, [?SOURCE6]),
?assertMatch([ #{type := http, enable := true}
, #{type := mongodb, enable := true}
@ -135,12 +135,12 @@ t_update_source(_) ->
, #{type := file, enable := true}
], emqx:get_config([authorization, sources], [])),
{ok, _} = emqx_authz:update({replace_once, http}, ?SOURCE1#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({replace_once, mongodb}, ?SOURCE2#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({replace_once, mysql}, ?SOURCE3#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({replace_once, postgresql}, ?SOURCE4#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({replace_once, redis}, ?SOURCE5#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({replace_once, file}, ?SOURCE6#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, http}, ?SOURCE1#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, mongodb}, ?SOURCE2#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, mysql}, ?SOURCE3#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, postgresql}, ?SOURCE4#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, redis}, ?SOURCE5#{<<"enable">> := false}),
{ok, _} = emqx_authz:update({?CMD_REPLCAE, file}, ?SOURCE6#{<<"enable">> := false}),
?assertMatch([ #{type := http, enable := false}
, #{type := mongodb, enable := false}
@ -150,10 +150,10 @@ t_update_source(_) ->
, #{type := file, enable := false}
], emqx:get_config([authorization, sources], [])),
{ok, _} = emqx_authz:update(replace, []).
{ok, _} = emqx_authz:update(?CMD_REPLCAE, []).
t_move_source(_) ->
{ok, _} = emqx_authz:update(replace, [?SOURCE1, ?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5, ?SOURCE6]),
{ok, _} = emqx_authz:update(?CMD_REPLCAE, [?SOURCE1, ?SOURCE2, ?SOURCE3, ?SOURCE4, ?SOURCE5, ?SOURCE6]),
?assertMatch([ #{type := http}
, #{type := mongodb}
, #{type := mysql}

View File

@ -0,0 +1,224 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authz_api_mnesia_SUITE).
-compile(nowarn_export_all).
-compile(export_all).
-include("emqx_authz.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-define(CONF_DEFAULT, <<"authorization: {sources: []}">>).
-import(emqx_ct_http, [ request_api/3
, request_api/5
, get_http_data/1
, create_default_app/0
, delete_default_app/0
, default_auth_header/0
, auth_header/2
]).
-define(HOST, "http://127.0.0.1:18083/").
-define(API_VERSION, "v5").
-define(BASE_PATH, "api").
-define(EXAMPLE_USERNAME, #{username => user1,
rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
-define(EXAMPLE_CLIENTID, #{clientid => client1,
rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
-define(EXAMPLE_ALL , #{rules => [ #{topic => <<"test/toopic/1">>,
permission => <<"allow">>,
action => <<"publish">>
}
, #{topic => <<"test/toopic/2">>,
permission => <<"allow">>,
action => <<"subscribe">>
}
, #{topic => <<"eq test/#">>,
permission => <<"deny">>,
action => <<"all">>
}
]
}).
all() ->
[]. %% Todo: Waiting for @terry-xiaoyu to fix the config_not_found error
% emqx_ct:all(?MODULE).
groups() ->
[].
init_per_suite(Config) ->
meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]),
meck:expect(emqx_schema, fields, fun("authorization") ->
meck:passthrough(["authorization"]) ++
emqx_authz_schema:fields("authorization");
(F) -> meck:passthrough([F])
end),
ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT),
ok = emqx_ct_helpers:start_apps([emqx_authz, emqx_dashboard], fun set_special_configs/1),
{ok, _} = emqx:update_config([authorization, cache, enable], false),
{ok, _} = emqx:update_config([authorization, no_match], deny),
Config.
end_per_suite(_Config) ->
{ok, _} = emqx_authz:update(replace, []),
emqx_ct_helpers:stop_apps([emqx_authz, emqx_dashboard]),
meck:unload(emqx_schema),
ok.
set_special_configs(emqx_dashboard) ->
Config = #{
default_username => <<"admin">>,
default_password => <<"public">>,
listeners => [#{
protocol => http,
port => 18083
}]
},
emqx_config:put([emqx_dashboard], Config),
ok;
set_special_configs(emqx_authz) ->
emqx_config:put([authorization], #{sources => [#{type => 'built-in-database',
enable => true}
]}),
ok;
set_special_configs(_App) ->
ok.
%%------------------------------------------------------------------------------
%% Testcases
%%------------------------------------------------------------------------------
t_api(_) ->
{ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "username"]), [?EXAMPLE_USERNAME]),
{ok, 200, Request1} = request(get, uri(["authorization", "sources", "built-in-database", "username"]), []),
{ok, 200, Request2} = request(get, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []),
[#{<<"username">> := <<"user1">>, <<"rules">> := Rules1}] = jsx:decode(Request1),
#{<<"username">> := <<"user1">>, <<"rules">> := Rules1} = jsx:decode(Request2),
?assertEqual(3, length(Rules1)),
{ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "username", "user1"]), ?EXAMPLE_USERNAME#{rules => []}),
{ok, 200, Request3} = request(get, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []),
#{<<"username">> := <<"user1">>, <<"rules">> := Rules2} = jsx:decode(Request3),
?assertEqual(0, length(Rules2)),
{ok, 204, _} = request(delete, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []),
{ok, 404, _} = request(get, uri(["authorization", "sources", "built-in-database", "username", "user1"]), []),
{ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "clientid"]), [?EXAMPLE_CLIENTID]),
{ok, 200, Request4} = request(get, uri(["authorization", "sources", "built-in-database", "clientid"]), []),
{ok, 200, Request5} = request(get, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []),
[#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3}] = jsx:decode(Request4),
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules3} = jsx:decode(Request5),
?assertEqual(3, length(Rules3)),
{ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), ?EXAMPLE_CLIENTID#{rules => []}),
{ok, 200, Request6} = request(get, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []),
#{<<"clientid">> := <<"client1">>, <<"rules">> := Rules4} = jsx:decode(Request6),
?assertEqual(0, length(Rules4)),
{ok, 204, _} = request(delete, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []),
{ok, 404, _} = request(get, uri(["authorization", "sources", "built-in-database", "clientid", "client1"]), []),
{ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "all"]), ?EXAMPLE_ALL),
{ok, 200, Request7} = request(get, uri(["authorization", "sources", "built-in-database", "all"]), []),
[#{<<"rules">> := Rules5}] = jsx:decode(Request7),
?assertEqual(3, length(Rules5)),
{ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database", "all"]), ?EXAMPLE_ALL#{rules => []}),
{ok, 200, Request8} = request(get, uri(["authorization", "sources", "built-in-database", "all"]), []),
[#{<<"rules">> := Rules6}] = jsx:decode(Request8),
?assertEqual(0, length(Rules6)),
{ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "username"]), [ #{username => N, rules => []} || N <- lists:seq(1, 20) ]),
{ok, 200, Request9} = request(get, uri(["authorization", "sources", "built-in-database", "username?page=2&limit=5"]), []),
#{<<"data">> := Data1} = jsx:decode(Request9),
?assertEqual(5, length(Data1)),
{ok, 204, _} = request(post, uri(["authorization", "sources", "built-in-database", "clientid"]), [ #{clientid => N, rules => []} || N <- lists:seq(1, 20) ]),
{ok, 200, Request10} = request(get, uri(["authorization", "sources", "built-in-database", "clientid?limit=5"]), []),
?assertEqual(5, length(jsx:decode(Request10))),
{ok, 400, _} = request(delete, uri(["authorization", "sources", "built-in-database", "purge-all"]), []),
{ok, 204, _} = request(put, uri(["authorization", "sources", "built-in-database"]), #{<<"enable">> => false}),
{ok, 204, _} = request(delete, uri(["authorization", "sources", "built-in-database", "purge-all"]), []),
?assertEqual([], mnesia:dirty_all_keys(?ACL_TABLE)),
ok.
%%--------------------------------------------------------------------
%% HTTP Request
%%--------------------------------------------------------------------
request(Method, Url, Body) ->
Request = case Body of
[] -> {Url, [auth_header_()]};
_ -> {Url, [auth_header_()], "application/json", jsx:encode(Body)}
end,
ct:pal("Method: ~p, Request: ~p", [Method, Request]),
case httpc:request(Method, Request, [], [{body_format, binary}]) of
{error, socket_closed_remotely} ->
{error, socket_closed_remotely};
{ok, {{"HTTP/1.1", Code, _}, _Headers, Return} } ->
{ok, Code, Return};
{ok, {Reason, _, _}} ->
{error, Reason}
end.
uri() -> uri([]).
uri(Parts) when is_list(Parts) ->
NParts = [E || E <- Parts],
?HOST ++ filename:join([?BASE_PATH, ?API_VERSION | NParts]).
get_sources(Result) -> jsx:decode(Result).
auth_header_() ->
Username = <<"admin">>,
Password = <<"public">>,
{ok, Token} = emqx_dashboard_admin:sign_token(Username, Password),
{"Authorization", "Bearer " ++ binary_to_list(Token)}.

View File

@ -42,7 +42,7 @@
<<"url">> => <<"https://fake.com:443/">>,
<<"headers">> => #{},
<<"method">> => <<"get">>,
<<"request_timeout">> => 5000
<<"request_timeout">> => <<"5s">>
}).
-define(SOURCE2, #{<<"type">> => <<"mongodb">>,
<<"enable">> => true,

View File

@ -0,0 +1,109 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authz_mnesia_SUITE).
-compile(nowarn_export_all).
-compile(export_all).
-include("emqx_authz.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-define(CONF_DEFAULT, <<"authorization: {sources: []}">>).
all() ->
emqx_ct:all(?MODULE).
groups() ->
[].
init_per_suite(Config) ->
meck:new(emqx_schema, [non_strict, passthrough, no_history, no_link]),
meck:expect(emqx_schema, fields, fun("authorization") ->
meck:passthrough(["authorization"]) ++
emqx_authz_schema:fields("authorization");
(F) -> meck:passthrough([F])
end),
ok = emqx_config:init_load(emqx_authz_schema, ?CONF_DEFAULT),
ok = emqx_ct_helpers:start_apps([emqx_authz]),
{ok, _} = emqx:update_config([authorization, cache, enable], false),
{ok, _} = emqx:update_config([authorization, no_match], deny),
Rules = [#{<<"type">> => <<"built-in-database">>}],
{ok, _} = emqx_authz:update(replace, Rules),
Config.
end_per_suite(_Config) ->
{ok, _} = emqx_authz:update(replace, []),
emqx_ct_helpers:stop_apps([emqx_authz]),
meck:unload(emqx_schema),
ok.
init_per_testcase(t_authz, Config) ->
mnesia:transaction(fun ekka_mnesia:dirty_write/1, [#emqx_acl{who = {?ACL_TABLE_USERNAME, <<"test_username">>},
rules = [{allow, publish, <<"test/%u">>},
{allow, subscribe, <<"eq #">>}
]
}]),
mnesia:transaction(fun ekka_mnesia:dirty_write/1, [#emqx_acl{who = {?ACL_TABLE_CLIENTID, <<"test_clientid">>},
rules = [{allow, publish, <<"test/%c">>},
{deny, subscribe, <<"eq #">>}
]
}]),
mnesia:transaction(fun ekka_mnesia:dirty_write/1, [#emqx_acl{who = ?ACL_TABLE_ALL,
rules = [{deny, all, <<"#">>}]
}]),
Config;
init_per_testcase(_, Config) -> Config.
end_per_testcase(t_authz, Config) ->
[ ekka_mnesia:dirty_delete(?ACL_TABLE, K) || K <- mnesia:dirty_all_keys(?ACL_TABLE)],
Config;
end_per_testcase(_, Config) -> Config.
%%------------------------------------------------------------------------------
%% Testcases
%%------------------------------------------------------------------------------
t_authz(_) ->
ClientInfo1 = #{clientid => <<"test">>,
username => <<"test">>,
peerhost => {127,0,0,1},
listener => {tcp, default}
},
ClientInfo2 = #{clientid => <<"fake_clientid">>,
username => <<"test_username">>,
peerhost => {127,0,0,1},
listener => {tcp, default}
},
ClientInfo3 = #{clientid => <<"test_clientid">>,
username => <<"fake_username">>,
peerhost => {127,0,0,1},
listener => {tcp, default}
},
?assertEqual(deny, emqx_access_control:authorize(ClientInfo1, subscribe, <<"#">>)),
?assertEqual(deny, emqx_access_control:authorize(ClientInfo1, publish, <<"#">>)),
?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, publish, <<"test/test_username">>)),
?assertEqual(allow, emqx_access_control:authorize(ClientInfo2, subscribe, <<"#">>)),
?assertEqual(allow, emqx_access_control:authorize(ClientInfo3, publish, <<"test/test_clientid">>)),
?assertEqual(deny, emqx_access_control:authorize(ClientInfo3, subscribe, <<"#">>)),
ok.

View File

@ -22,11 +22,11 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-define(SOURCE1, {deny, all, all, ["#"]}).
-define(SOURCE1, {deny, all}).
-define(SOURCE2, {allow, {ipaddr, "127.0.0.1"}, all, [{eq, "#"}, {eq, "+"}]}).
-define(SOURCE3, {allow, {ipaddrs, ["127.0.0.1", "192.168.1.0/24"]}, subscribe, ["%c"]}).
-define(SOURCE4, {allow, {'and', [{clientid, "^test?"}, {username, "^test?"}]}, publish, ["topic/test"]}).
-define(SOURCE5, {allow, {'or', [{username, "^test"}, {clientid, "test?"}]}, publish, ["%u", "%c"]}).
-define(SOURCE4, {allow, {'and', [{client, "test"}, {user, "test"}]}, publish, ["topic/test"]}).
-define(SOURCE5, {allow, {'or', [{username, {re, "^test"}}, {clientid, {re, "test?"}}]}, publish, ["%u", "%c"]}).
all() ->
emqx_ct:all(?MODULE).
@ -52,7 +52,7 @@ t_compile(_) ->
}, emqx_authz_rule:compile(?SOURCE3)),
?assertMatch({allow,
{'and', [{clientid, {re_pattern, _, _, _, _}}, {username, {re_pattern, _, _, _, _}}]},
{'and', [{clientid, {eq, <<"test">>}}, {username, {eq, <<"test">>}}]},
publish,
[[<<"topic">>, <<"test">>]]
}, emqx_authz_rule:compile(?SOURCE4)),

View File

@ -45,3 +45,30 @@
# retain = false
# }
#}
#
#bridges.http.my_http_bridge {
# base_url: "http://localhost:9901"
# connect_timeout: "30s"
# max_retries: 3
# retry_interval = "10s"
# pool_type = "random"
# pool_size = 4
# enable_pipelining = true
# ssl {
# enable = false
# keyfile = "{{ platform_etc_dir }}/certs/client-key.pem"
# certfile = "{{ platform_etc_dir }}/certs/client-cert.pem"
# cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
# }
# egress_channels.post_messages {
# subscribe_local_topic = "emqx_http/#"
# request_timeout: "30s"
# ## following config entries can use placehodler variables
# method = post
# path = "/messages/${topic}"
# body = "${payload}"
# headers {
# "content-type": "application/json"
# }
# }
#}

View File

@ -15,9 +15,15 @@
%%--------------------------------------------------------------------
-module(emqx_bridge).
-behaviour(emqx_config_handler).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-export([post_config_update/4]).
-export([reload_hook/0, unload_hook/0]).
-export([on_message_publish/1]).
-export([ load_bridges/0
, get_bridge/2
, get_bridge/3
@ -28,6 +34,7 @@
, start_bridge/2
, stop_bridge/2
, restart_bridge/2
, send_message/2
]).
-export([ config_key_path/0
@ -38,24 +45,57 @@
, resource_id/1
, resource_id/2
, parse_bridge_id/1
, channel_id/4
, parse_channel_id/1
]).
reload_hook() ->
unload_hook(),
Bridges = emqx:get_config([bridges], #{}),
lists:foreach(fun({_Type, Bridge}) ->
lists:foreach(fun({_Name, BridgeConf}) ->
load_hook(BridgeConf)
end, maps:to_list(Bridge))
end, maps:to_list(Bridges)).
load_hook(#{egress_channels := Channels}) ->
case has_subscribe_local_topic(Channels) of
true -> ok;
false -> emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []})
end;
load_hook(_Conf) -> ok.
unload_hook() ->
ok = emqx_hooks:del('message.publish', {?MODULE, on_message_publish}).
on_message_publish(Message = #message{topic = Topic, flags = Flags}) ->
case maps:get(sys, Flags, false) of
false ->
ChannelIds = get_matched_channels(Topic),
lists:foreach(fun(ChannelId) ->
send_message(ChannelId, emqx_message:to_map(Message))
end, ChannelIds);
true -> ok
end,
{ok, Message}.
%% TODO: remove this clause, treat mqtt bridges the same as other bridges
send_message(ChannelId, Message) ->
{BridgeType, BridgeName, _, _} = parse_channel_id(ChannelId),
ResId = emqx_bridge:resource_id(BridgeType, BridgeName),
do_send_message(ResId, ChannelId, Message).
do_send_message(ResId, ChannelId, Message) ->
emqx_resource:query(ResId, {send_message, ChannelId, Message}).
config_key_path() ->
[bridges].
resource_type(mqtt) -> emqx_connector_mqtt;
resource_type(mysql) -> emqx_connector_mysql;
resource_type(pgsql) -> emqx_connector_pgsql;
resource_type(mongo) -> emqx_connector_mongo;
resource_type(redis) -> emqx_connector_redis;
resource_type(ldap) -> emqx_connector_ldap.
resource_type(http) -> emqx_connector_http.
bridge_type(emqx_connector_mqtt) -> mqtt;
bridge_type(emqx_connector_mysql) -> mysql;
bridge_type(emqx_connector_pgsql) -> pgsql;
bridge_type(emqx_connector_mongo) -> mongo;
bridge_type(emqx_connector_redis) -> redis;
bridge_type(emqx_connector_ldap) -> ldap.
bridge_type(emqx_connector_http) -> http.
post_config_update(_Req, NewConf, OldConf, _AppEnv) ->
#{added := Added, removed := Removed, changed := Updated}
@ -100,11 +140,23 @@ bridge_id(BridgeType, BridgeName) ->
<<Type/binary, ":", Name/binary>>.
parse_bridge_id(BridgeId) ->
try
[Type, Name] = string:split(str(BridgeId), ":", leading),
{list_to_existing_atom(Type), list_to_atom(Name)}
catch
_ : _ -> error({invalid_bridge_id, BridgeId})
case string:split(bin(BridgeId), ":", all) of
[Type, Name] -> {binary_to_atom(Type, utf8), binary_to_atom(Name, utf8)};
_ -> error({invalid_bridge_id, BridgeId})
end.
channel_id(BridgeType, BridgeName, ChannelType, ChannelName) ->
BType = bin(BridgeType),
BName = bin(BridgeName),
CType = bin(ChannelType),
CName = bin(ChannelName),
<<BType/binary, ":", BName/binary, ":", CType/binary, ":", CName/binary>>.
parse_channel_id(ChannelId) ->
case string:split(bin(ChannelId), ":", all) of
[BridgeType, BridgeName, ChannelType, ChannelName] ->
{BridgeType, BridgeName, ChannelType, ChannelName};
_ -> error({invalid_bridge_id, ChannelId})
end.
list_bridges() ->
@ -137,7 +189,8 @@ restart_bridge(Type, Name) ->
emqx_resource:restart(resource_id(Type, Name)).
create_bridge(Type, Name, Conf) ->
logger:info("create ~p bridge ~p use config: ~p", [Type, Name, Conf]),
?SLOG(info, #{msg => "create bridge", type => Type, name => Name,
config => Conf}),
ResId = resource_id(Type, Name),
case emqx_resource:create(ResId,
emqx_bridge:resource_type(Type), Conf) of
@ -158,12 +211,13 @@ update_bridge(Type, Name, {_OldConf, Conf}) ->
%% `egress_channels` are changed, then we should not restart the bridge, we only restart/start
%% the channels.
%%
logger:info("update ~p bridge ~p use config: ~p", [Type, Name, Conf]),
?SLOG(info, #{msg => "update bridge", type => Type, name => Name,
config => Conf}),
emqx_resource:recreate(resource_id(Type, Name),
emqx_bridge:resource_type(Type), Conf, []).
remove_bridge(Type, Name, _Conf) ->
logger:info("remove ~p bridge ~p", [Type, Name]),
?SLOG(info, #{msg => "remove bridge", type => Type, name => Name}),
case emqx_resource:remove(resource_id(Type, Name)) of
ok -> ok;
{error, not_found} -> ok;
@ -184,13 +238,35 @@ flatten_confs(Conf0) ->
do_flatten_confs(Type, Conf0) ->
[{{Type, Name}, Conf} || {Name, Conf} <- maps:to_list(Conf0)].
has_subscribe_local_topic(Channels) ->
lists:any(fun (#{subscribe_local_topic := _}) -> true;
(_) -> false
end, maps:to_list(Channels)).
get_matched_channels(Topic) ->
Bridges = emqx:get_config([bridges], #{}),
maps:fold(fun
%% TODO: also trigger 'message.publish' for mqtt bridges.
(mqtt, _Conf, Acc0) -> Acc0;
(BType, Conf, Acc0) ->
maps:fold(fun
(BName, #{egress_channels := Channels}, Acc1) ->
do_get_matched_channels(Topic, Channels, BType, BName, egress_channels)
++ Acc1;
(_Name, _BridgeConf, Acc1) -> Acc1
end, Acc0, Conf)
end, [], Bridges).
do_get_matched_channels(Topic, Channels, BType, BName, CType) ->
maps:fold(fun
(ChannName, #{subscribe_local_topic := Filter}, Acc) ->
case emqx_topic:match(Topic, Filter) of
true -> [channel_id(BType, BName, CType, ChannName) | Acc];
false -> Acc
end;
(_ChannName, _ChannConf, Acc) -> Acc
end, [], Channels).
bin(Bin) when is_binary(Bin) -> Bin;
bin(Str) when is_list(Str) -> list_to_binary(Str);
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
str(A) when is_atom(A) ->
atom_to_list(A);
str(B) when is_binary(B) ->
binary_to_list(B);
str(S) when is_list(S) ->
S.

View File

@ -22,10 +22,12 @@
start(_StartType, _StartArgs) ->
{ok, Sup} = emqx_bridge_sup:start_link(),
ok = emqx_bridge:load_bridges(),
ok = emqx_bridge:reload_hook(),
emqx_config_handler:add_handler(emqx_bridge:config_key_path(), emqx_bridge),
{ok, Sup}.
stop(_State) ->
ok = emqx_bridge:unload_hook(),
ok.
%% internal functions

View File

@ -1,5 +1,7 @@
-module(emqx_bridge_schema).
-include_lib("typerefl/include/types.hrl").
-export([roots/0, fields/1]).
%%======================================================================================
@ -8,7 +10,16 @@
roots() -> [bridges].
fields(bridges) ->
[{mqtt, hoconsc:mk(hoconsc:map(name, hoconsc:ref(?MODULE, "mqtt_bridge")))}];
[ {mqtt, hoconsc:mk(hoconsc:map(name, hoconsc:ref(?MODULE, "mqtt_bridge")))}
, {http, hoconsc:mk(hoconsc:map(name, hoconsc:ref(?MODULE, "http_bridge")))}
];
fields("mqtt_bridge") ->
emqx_connector_mqtt:fields("config").
emqx_connector_mqtt:fields("config");
fields("http_bridge") ->
emqx_connector_http:fields(config) ++ http_channels().
http_channels() ->
[{egress_channels, hoconsc:mk(hoconsc:map(id,
hoconsc:ref(emqx_connector_http, "http_request")))}].

View File

@ -21,6 +21,8 @@
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
%% callbacks of behaviour emqx_resource
-export([ on_start/2
, on_stop/2
@ -38,7 +40,7 @@
-export([ check_ssl_opts/2 ]).
-type connect_timeout() :: non_neg_integer() | infinity.
-type connect_timeout() :: emqx_schema:duration() | infinity.
-type pool_type() :: random | hash.
-reflect_type([ connect_timeout/0
@ -50,6 +52,22 @@
roots() ->
[{config, #{type => hoconsc:ref(?MODULE, config)}}].
fields("http_request") ->
[ {subscribe_local_topic, hoconsc:mk(binary())}
, {method, hoconsc:mk(method(), #{default => post})}
, {path, hoconsc:mk(binary(), #{default => <<"">>})}
, {headers, hoconsc:mk(map(),
#{default => #{
<<"accept">> => <<"application/json">>,
<<"cache-control">> => <<"no-cache">>,
<<"connection">> => <<"keep-alive">>,
<<"content-type">> => <<"application/json">>,
<<"keep-alive">> => <<"timeout=5">>}})
}
, {body, hoconsc:mk(binary(), #{default => <<"${payload}">>})}
, {request_timeout, hoconsc:mk(emqx_schema:duration_ms(), #{default => <<"30s">>})}
];
fields(config) ->
[ {base_url, fun base_url/1}
, {connect_timeout, fun connect_timeout/1}
@ -60,6 +78,13 @@ fields(config) ->
, {enable_pipelining, fun enable_pipelining/1}
] ++ emqx_connector_schema_lib:ssl_fields().
method() ->
hoconsc:union([ typerefl:atom(post)
, typerefl:atom(put)
, typerefl:atom(get)
, typerefl:atom(delete)
]).
validations() ->
[ {check_ssl_opts, fun check_ssl_opts/1} ].
@ -71,16 +96,16 @@ base_url(validator) -> fun(#{query := _Query}) ->
end;
base_url(_) -> undefined.
connect_timeout(type) -> connect_timeout();
connect_timeout(default) -> 5000;
connect_timeout(type) -> emqx_schema:duration_ms();
connect_timeout(default) -> "5s";
connect_timeout(_) -> undefined.
max_retries(type) -> non_neg_integer();
max_retries(default) -> 5;
max_retries(_) -> undefined.
retry_interval(type) -> non_neg_integer();
retry_interval(default) -> 1000;
retry_interval(type) -> emqx_schema:duration();
retry_interval(default) -> "1s";
retry_interval(_) -> undefined.
pool_type(type) -> pool_type();
@ -105,13 +130,14 @@ on_start(InstId, #{base_url := #{scheme := Scheme,
retry_interval := RetryInterval,
pool_type := PoolType,
pool_size := PoolSize} = Config) ->
logger:info("starting http connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting http connector",
connector => InstId, config => Config}),
{Transport, TransportOpts} = case Scheme of
http ->
{tcp, []};
https ->
SSLOpts = emqx_plugin_libs_ssl:save_files_return_opts(
maps:get(ssl_opts, Config), "connectors", InstId),
maps:get(ssl, Config), "connectors", InstId),
{tls, SSLOpts}
end,
NTransportOpts = emqx_misc:ipv6_probe(TransportOpts),
@ -126,30 +152,51 @@ on_start(InstId, #{base_url := #{scheme := Scheme,
, {transport, Transport}
, {transport_opts, NTransportOpts}],
PoolName = emqx_plugin_libs_pool:pool_name(InstId),
{ok, _} = ehttpc_sup:start_pool(PoolName, PoolOpts),
{ok, #{pool_name => PoolName,
host => Host,
port => Port,
base_path => BasePath}}.
State = #{
pool_name => PoolName,
host => Host,
port => Port,
base_path => BasePath,
channels => preproc_channels(InstId, Config)
},
case ehttpc_sup:start_pool(PoolName, PoolOpts) of
{ok, _} -> {ok, State};
{error, {already_started, _}} -> {ok, State};
{error, Reason} ->
{error, Reason}
end.
on_stop(InstId, #{pool_name := PoolName}) ->
logger:info("stopping http connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping http connector",
connector => InstId}),
ehttpc_sup:stop_pool(PoolName).
on_query(InstId, {send_message, ChannelId, Msg}, AfterQuery, #{channels := Channels} = State) ->
case maps:find(ChannelId, Channels) of
error -> ?SLOG(error, #{msg => "channel not found", channel_id => ChannelId});
{ok, ChannConf} ->
#{method := Method, path := Path, body := Body, headers := Headers,
request_timeout := Timeout} = proc_channel_conf(ChannConf, Msg),
on_query(InstId, {Method, {Path, Headers, Body}, Timeout}, AfterQuery, State)
end;
on_query(InstId, {Method, Request}, AfterQuery, State) ->
on_query(InstId, {undefined, Method, Request, 5000}, AfterQuery, State);
on_query(InstId, {Method, Request, Timeout}, AfterQuery, State) ->
on_query(InstId, {undefined, Method, Request, Timeout}, AfterQuery, State);
on_query(InstId, {KeyOrNum, Method, Request, Timeout}, AfterQuery, #{pool_name := PoolName,
base_path := BasePath} = State) ->
logger:debug("http connector ~p received request: ~p, at state: ~p", [InstId, Request, State]),
on_query(InstId, {KeyOrNum, Method, Request, Timeout}, AfterQuery,
#{pool_name := PoolName, base_path := BasePath} = State) ->
?SLOG(debug, #{msg => "http connector received request",
request => Request, connector => InstId,
state => State}),
NRequest = update_path(BasePath, Request),
case Result = ehttpc:request(case KeyOrNum of
undefined -> PoolName;
_ -> {PoolName, KeyOrNum}
end, Method, NRequest, Timeout) of
{error, Reason} ->
logger:debug("http connector ~p do reqeust failed, sql: ~p, reason: ~p", [InstId, NRequest, Reason]),
?SLOG(error, #{msg => "http connector do reqeust failed",
request => NRequest, reason => Reason,
connector => InstId}),
emqx_resource:query_failed(AfterQuery);
_ ->
emqx_resource:query_success(AfterQuery)
@ -169,6 +216,52 @@ on_health_check(_InstId, #{host := Host, port := Port} = State) ->
%% Internal functions
%%--------------------------------------------------------------------
preproc_channels(<<"bridge:", BridgeId/binary>>, Config) ->
{BridgeType, BridgeName} = emqx_bridge:parse_bridge_id(BridgeId),
maps:fold(fun(ChannName, ChannConf, Acc) ->
Acc#{emqx_bridge:channel_id(BridgeType, BridgeName, egress_channels, ChannName) =>
preproc_channel_conf(ChannConf)}
end, #{}, maps:get(egress_channels, Config, #{})).
preproc_channel_conf(#{
method := Method,
path := Path,
body := Body,
headers := Headers} = Conf) ->
Conf#{ method => emqx_plugin_libs_rule:preproc_tmpl(bin(Method))
, path => emqx_plugin_libs_rule:preproc_tmpl(Path)
, body => emqx_plugin_libs_rule:preproc_tmpl(Body)
, headers => preproc_headers(Headers)
}.
preproc_headers(Headers) ->
maps:fold(fun(K, V, Acc) ->
Acc#{emqx_plugin_libs_rule:preproc_tmpl(bin(K)) =>
emqx_plugin_libs_rule:preproc_tmpl(bin(V))}
end, #{}, Headers).
proc_channel_conf(#{
method := MethodTks,
path := PathTks,
body := BodyTks,
headers := HeadersTks} = Conf, Msg) ->
Conf#{ method => make_method(emqx_plugin_libs_rule:proc_tmpl(MethodTks, Msg))
, path => emqx_plugin_libs_rule:proc_tmpl(PathTks, Msg)
, body => emqx_plugin_libs_rule:proc_tmpl(BodyTks, Msg)
, headers => maps:to_list(proc_headers(HeadersTks, Msg))
}.
proc_headers(HeaderTks, Msg) ->
maps:fold(fun(K, V, Acc) ->
Acc#{emqx_plugin_libs_rule:proc_tmpl(K, Msg) =>
emqx_plugin_libs_rule:proc_tmpl(V, Msg)}
end, #{}, HeaderTks).
make_method(M) when M == <<"POST">>; M == <<"post">> -> post;
make_method(M) when M == <<"PUT">>; M == <<"put">> -> put;
make_method(M) when M == <<"GET">>; M == <<"get">> -> get;
make_method(M) when M == <<"DELETE">>; M == <<"delete">> -> delete.
check_ssl_opts(Conf) ->
check_ssl_opts("base_url", Conf).
@ -185,3 +278,10 @@ update_path(BasePath, {Path, Headers}) ->
{filename:join(BasePath, Path), Headers};
update_path(BasePath, {Path, Headers, Body}) ->
{filename:join(BasePath, Path), Headers, Body}.
bin(Bin) when is_binary(Bin) ->
Bin;
bin(Str) when is_list(Str) ->
list_to_binary(Str);
bin(Atom) when is_atom(Atom) ->
atom_to_binary(Atom, utf8).

View File

@ -18,6 +18,7 @@
-include("emqx_connector.hrl").
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
-export([roots/0, fields/1]).
@ -53,7 +54,8 @@ on_start(InstId, #{servers := Servers0,
pool_size := PoolSize,
auto_reconnect := AutoReconn,
ssl := SSL} = Config) ->
logger:info("starting ldap connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting ldap connector",
connector => InstId, config => Config}),
Servers = [begin proplists:get_value(host, S) end || S <- Servers0],
SslOpts = case maps:get(enable, SSL) of
true ->
@ -75,14 +77,20 @@ on_start(InstId, #{servers := Servers0,
{ok, #{poolname => PoolName}}.
on_stop(InstId, #{poolname := PoolName}) ->
logger:info("stopping ldap connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping ldap connector",
connector => InstId}),
emqx_plugin_libs_pool:stop_pool(PoolName).
on_query(InstId, {search, Base, Filter, Attributes}, AfterQuery, #{poolname := PoolName} = State) ->
logger:debug("ldap connector ~p received request: ~p, at state: ~p", [InstId, {Base, Filter, Attributes}, State]),
Request = {Base, Filter, Attributes},
?SLOG(debug, #{msg => "ldap connector received request",
request => Request, connector => InstId,
state => State}),
case Result = ecpool:pick_and_do(PoolName, {?MODULE, search, [Base, Filter, Attributes]}, no_handover) of
{error, Reason} ->
logger:debug("ldap connector ~p do request failed, request: ~p, reason: ~p", [InstId, {Base, Filter, Attributes}, Reason]),
?SLOG(error, #{msg => "ldap connector do request failed",
request => Request, connector => InstId,
reason => Reason}),
emqx_resource:query_failed(AfterQuery);
_ ->
emqx_resource:query_success(AfterQuery)

View File

@ -18,6 +18,7 @@
-include("emqx_connector.hrl").
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
-type server() :: emqx_schema:ip_port().
-reflect_type([server/0]).
@ -93,7 +94,8 @@ on_jsonify(Config) ->
%% ===================================================================
on_start(InstId, Config = #{server := Server,
mongo_type := single}) ->
logger:info("starting mongodb connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting mongodb single connector",
connector => InstId, config => Config}),
Opts = [{type, single},
{hosts, [emqx_connector_schema_lib:ip_port_to_string(Server)]}
],
@ -102,7 +104,8 @@ on_start(InstId, Config = #{server := Server,
on_start(InstId, Config = #{servers := Servers,
mongo_type := rs,
replica_set_name := RsName}) ->
logger:info("starting mongodb connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting mongodb rs connector",
connector => InstId, config => Config}),
Opts = [{type, {rs, RsName}},
{hosts, [emqx_connector_schema_lib:ip_port_to_string(S)
|| S <- Servers]}
@ -111,7 +114,8 @@ on_start(InstId, Config = #{servers := Servers,
on_start(InstId, Config = #{servers := Servers,
mongo_type := sharded}) ->
logger:info("starting mongodb connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting mongodb sharded connector",
connector => InstId, config => Config}),
Opts = [{type, sharded},
{hosts, [emqx_connector_schema_lib:ip_port_to_string(S)
|| S <- Servers]}
@ -119,14 +123,20 @@ on_start(InstId, Config = #{servers := Servers,
do_start(InstId, Opts, Config).
on_stop(InstId, #{poolname := PoolName}) ->
logger:info("stopping mongodb connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping mongodb connector",
connector => InstId}),
emqx_plugin_libs_pool:stop_pool(PoolName).
on_query(InstId, {Action, Collection, Selector, Docs}, AfterQuery, #{poolname := PoolName} = State) ->
logger:debug("mongodb connector ~p received request: ~p, at state: ~p", [InstId, {Action, Collection, Selector, Docs}, State]),
Request = {Action, Collection, Selector, Docs},
?SLOG(debug, #{msg => "mongodb connector received request",
request => Request, connector => InstId,
state => State}),
case ecpool:pick_and_do(PoolName, {?MODULE, mongo_query, [Action, Collection, Selector, Docs]}, no_handover) of
{error, Reason} ->
logger:debug("mongodb connector ~p do sql query failed, request: ~p, reason: ~p", [InstId, {Action, Collection, Selector, Docs}, Reason]),
?SLOG(error, #{msg => "mongodb connector do query failed",
request => Request, reason => Reason,
connector => InstId}),
emqx_resource:query_failed(AfterQuery),
{error, Reason};
{ok, Cursor} when is_pid(Cursor) ->

View File

@ -17,6 +17,7 @@
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
-behaviour(supervisor).
@ -88,13 +89,15 @@ drop_bridge(Name) ->
%% ===================================================================
%% When use this bridge as a data source, ?MODULE:on_message_received/2 will be called
%% if the bridge received msgs from the remote broker.
on_message_received(Msg, ChannelName) ->
emqx:run_hook(ChannelName, [Msg]).
on_message_received(Msg, ChannId) ->
Name = atom_to_binary(ChannId, utf8),
emqx:run_hook(<<"$bridges/", Name/binary>>, [Msg]).
%% ===================================================================
on_start(InstId, Conf) ->
logger:info("starting mqtt connector: ~p, ~p", [InstId, Conf]),
NamePrefix = binary_to_list(InstId),
?SLOG(info, #{msg => "starting mqtt connector",
connector => InstId, config => Conf}),
"bridge:" ++ NamePrefix = binary_to_list(InstId),
BasicConf = basic_config(Conf),
InitRes = {ok, #{name_prefix => NamePrefix, baisc_conf => BasicConf, channels => []}},
InOutConfigs = taged_map_list(ingress_channels, maps:get(ingress_channels, Conf, #{}))
@ -110,7 +113,8 @@ on_start(InstId, Conf) ->
end, InitRes, InOutConfigs).
on_stop(InstId, #{channels := NameList}) ->
logger:info("stopping mqtt connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping mqtt connector",
connector => InstId}),
lists:foreach(fun(Name) ->
remove_channel(Name)
end, NameList).
@ -120,9 +124,10 @@ on_stop(InstId, #{channels := NameList}) ->
on_query(_InstId, {create_channel, Conf}, _AfterQuery, #{name_prefix := Prefix,
baisc_conf := BasicConf}) ->
create_channel(Conf, Prefix, BasicConf);
on_query(_InstId, {send_to_remote, ChannelName, Msg}, _AfterQuery, _State) ->
logger:debug("send msg to remote node on channel: ~p, msg: ~p", [ChannelName, Msg]),
emqx_connector_mqtt_worker:send_to_remote(ChannelName, Msg).
on_query(_InstId, {send_message, ChannelId, Msg}, _AfterQuery, _State) ->
?SLOG(debug, #{msg => "send msg to remote node", message => Msg,
channel_id => ChannelId}),
emqx_connector_mqtt_worker:send_to_remote(ChannelId, Msg).
on_health_check(_InstId, #{channels := NameList} = State) ->
Results = [{Name, emqx_connector_mqtt_worker:ping(Name)} || Name <- NameList],
@ -134,35 +139,43 @@ on_health_check(_InstId, #{channels := NameList} = State) ->
create_channel({{ingress_channels, Id}, #{subscribe_remote_topic := RemoteT} = Conf},
NamePrefix, BasicConf) ->
LocalT = maps:get(local_topic, Conf, undefined),
Name = ingress_channel_name(NamePrefix, Id),
logger:info("creating ingress channel ~p, remote ~s -> local ~s", [Name, RemoteT, LocalT]),
ChannId = ingress_channel_id(NamePrefix, Id),
?SLOG(info, #{msg => "creating ingress channel",
remote_topic => RemoteT,
local_topic => LocalT,
channel_id => ChannId}),
do_create_channel(BasicConf#{
name => Name,
clientid => clientid(Name),
name => ChannId,
clientid => clientid(ChannId),
subscriptions => Conf#{
local_topic => LocalT,
on_message_received => {fun ?MODULE:on_message_received/2, [Name]}
on_message_received => {fun ?MODULE:on_message_received/2, [ChannId]}
},
forwards => undefined});
create_channel({{egress_channels, Id}, #{remote_topic := RemoteT} = Conf},
NamePrefix, BasicConf) ->
LocalT = maps:get(subscribe_local_topic, Conf, undefined),
Name = egress_channel_name(NamePrefix, Id),
logger:info("creating egress channel ~p, local ~s -> remote ~s", [Name, LocalT, RemoteT]),
ChannId = egress_channel_id(NamePrefix, Id),
?SLOG(info, #{msg => "creating egress channel",
remote_topic => RemoteT,
local_topic => LocalT,
channel_id => ChannId}),
do_create_channel(BasicConf#{
name => Name,
clientid => clientid(Name),
name => ChannId,
clientid => clientid(ChannId),
subscriptions => undefined,
forwards => Conf#{subscribe_local_topic => LocalT}}).
remove_channel(ChannelName) ->
logger:info("removing channel ~p", [ChannelName]),
case ?MODULE:drop_bridge(ChannelName) of
remove_channel(ChannId) ->
?SLOG(info, #{msg => "removing channel",
channel_id => ChannId}),
case ?MODULE:drop_bridge(ChannId) of
ok -> ok;
{error, not_found} -> ok;
{error, Reason} ->
logger:error("stop channel ~p failed, error: ~p", [ChannelName, Reason])
?SLOG(error, #{msg => "stop channel failed",
channel_id => ChannId, reason => Reason})
end.
do_create_channel(#{name := Name} = Conf) ->
@ -215,9 +228,9 @@ basic_config(#{
taged_map_list(Tag, Map) ->
[{{Tag, K}, V} || {K, V} <- maps:to_list(Map)].
ingress_channel_name(Prefix, Id) ->
ingress_channel_id(Prefix, Id) ->
channel_name("ingress_channels", Prefix, Id).
egress_channel_name(Prefix, Id) ->
egress_channel_id(Prefix, Id) ->
channel_name("egress_channels", Prefix, Id).
channel_name(Type, Prefix, Id) ->

View File

@ -17,6 +17,7 @@
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
%% callbacks of behaviour emqx_resource
-export([ on_start/2
@ -54,7 +55,8 @@ on_start(InstId, #{server := {Host, Port},
auto_reconnect := AutoReconn,
pool_size := PoolSize,
ssl := SSL } = Config) ->
logger:info("starting mysql connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting mysql connector",
connector => InstId, config => Config}),
SslOpts = case maps:get(enable, SSL) of
true ->
[{ssl, [{server_name_indication, disable} |
@ -73,7 +75,8 @@ on_start(InstId, #{server := {Host, Port},
{ok, #{poolname => PoolName}}.
on_stop(InstId, #{poolname := PoolName}) ->
logger:info("stopping mysql connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping mysql connector",
connector => InstId}),
emqx_plugin_libs_pool:stop_pool(PoolName).
on_query(InstId, {sql, SQL}, AfterQuery, #{poolname := _PoolName} = State) ->
@ -81,10 +84,12 @@ on_query(InstId, {sql, SQL}, AfterQuery, #{poolname := _PoolName} = State) ->
on_query(InstId, {sql, SQL, Params}, AfterQuery, #{poolname := _PoolName} = State) ->
on_query(InstId, {sql, SQL, Params, default_timeout}, AfterQuery, State);
on_query(InstId, {sql, SQL, Params, Timeout}, AfterQuery, #{poolname := PoolName} = State) ->
logger:debug("mysql connector ~p received sql query: ~p, at state: ~p", [InstId, SQL, State]),
?SLOG(debug, #{msg => "mysql connector received sql query",
connector => InstId, sql => SQL, state => State}),
case Result = ecpool:pick_and_do(PoolName, {mysql, query, [SQL, Params, Timeout]}, no_handover) of
{error, Reason} ->
logger:debug("mysql connector ~p do sql query failed, sql: ~p, reason: ~p", [InstId, SQL, Reason]),
?SLOG(error, #{msg => "mysql connector do sql query failed",
connector => InstId, sql => SQL, reason => Reason}),
emqx_resource:query_failed(AfterQuery);
_ ->
emqx_resource:query_success(AfterQuery)

View File

@ -17,6 +17,7 @@
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
-export([roots/0, fields/1]).
@ -54,7 +55,8 @@ on_start(InstId, #{server := {Host, Port},
auto_reconnect := AutoReconn,
pool_size := PoolSize,
ssl := SSL } = Config) ->
logger:info("starting postgresql connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting postgresql connector",
connector => InstId, config => Config}),
SslOpts = case maps:get(enable, SSL) of
true ->
[{ssl, [{server_name_indication, disable} |
@ -73,16 +75,20 @@ on_start(InstId, #{server := {Host, Port},
{ok, #{poolname => PoolName}}.
on_stop(InstId, #{poolname := PoolName}) ->
logger:info("stopping postgresql connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping postgresql connector",
connector => InstId}),
emqx_plugin_libs_pool:stop_pool(PoolName).
on_query(InstId, {sql, SQL}, AfterQuery, #{poolname := _PoolName} = State) ->
on_query(InstId, {sql, SQL, []}, AfterQuery, State);
on_query(InstId, {sql, SQL, Params}, AfterQuery, #{poolname := PoolName} = State) ->
logger:debug("postgresql connector ~p received sql query: ~p, at state: ~p", [InstId, SQL, State]),
?SLOG(debug, #{msg => "postgresql connector received sql query",
connector => InstId, sql => SQL, state => State}),
case Result = ecpool:pick_and_do(PoolName, {?MODULE, query, [SQL, Params]}, no_handover) of
{error, Reason} ->
logger:debug("postgresql connector ~p do sql query failed, sql: ~p, reason: ~p", [InstId, SQL, Reason]),
?SLOG(error, #{
msg => "postgresql connector do sql query failed",
connector => InstId, sql => SQL, reason => Reason}),
emqx_resource:query_failed(AfterQuery);
_ ->
emqx_resource:query_success(AfterQuery)

View File

@ -18,6 +18,7 @@
-include("emqx_connector.hrl").
-include_lib("typerefl/include/types.hrl").
-include_lib("emqx_resource/include/emqx_resource_behaviour.hrl").
-include_lib("emqx/include/logger.hrl").
-type server() :: tuple().
@ -85,7 +86,8 @@ on_start(InstId, #{redis_type := Type,
pool_size := PoolSize,
auto_reconnect := AutoReconn,
ssl := SSL } = Config) ->
logger:info("starting redis connector: ~p, config: ~p", [InstId, Config]),
?SLOG(info, #{msg => "starting redis connector",
connector => InstId, config => Config}),
Servers = case Type of
single -> [{servers, [maps:get(server, Config)]}];
_ ->[{servers, maps:get(servers, Config)}]
@ -116,18 +118,21 @@ on_start(InstId, #{redis_type := Type,
{ok, #{poolname => PoolName, type => Type}}.
on_stop(InstId, #{poolname := PoolName}) ->
logger:info("stopping redis connector: ~p", [InstId]),
?SLOG(info, #{msg => "stopping redis connector",
connector => InstId}),
emqx_plugin_libs_pool:stop_pool(PoolName).
on_query(InstId, {cmd, Command}, AfterCommand, #{poolname := PoolName, type := Type} = State) ->
logger:debug("redis connector ~p received cmd query: ~p, at state: ~p", [InstId, Command, State]),
?SLOG(debug, #{msg => "redis connector received cmd query",
connector => InstId, sql => Command, state => State}),
Result = case Type of
cluster -> eredis_cluster:q(PoolName, Command);
_ -> ecpool:pick_and_do(PoolName, {?MODULE, cmd, [Type, Command]}, no_handover)
end,
case Result of
{error, Reason} ->
logger:debug("redis connector ~p do cmd query failed, cmd: ~p, reason: ~p", [InstId, Command, Reason]),
?SLOG(error, #{msg => "redis connector do cmd query failed",
connector => InstId, sql => Command, reason => Reason}),
emqx_resource:query_failed(AfterCommand);
_ ->
emqx_resource:query_success(AfterCommand)

View File

@ -155,14 +155,18 @@ handle_puback(#{packet_id := PktId, reason_code := RC}, Parent)
RC =:= ?RC_NO_MATCHING_SUBSCRIBERS ->
Parent ! {batch_ack, PktId}, ok;
handle_puback(#{packet_id := PktId, reason_code := RC}, _Parent) ->
?LOG(warning, "publish ~p to remote node falied, reason_code: ~p", [PktId, RC]).
?SLOG(warning, #{msg => "publish to remote node falied",
packet_id => PktId, reason_code => RC}).
handle_publish(Msg, undefined) ->
?LOG(error, "cannot publish to local broker as 'bridge.mqtt.<name>.in' not configured, msg: ~p", [Msg]);
?SLOG(error, #{msg => "cannot publish to local broker as"
" ingress_channles' is not configured",
message => Msg});
handle_publish(Msg, #{on_message_received := {OnMsgRcvdFunc, Args}} = Vars) ->
?LOG(debug, "publish to local broker, msg: ~p, vars: ~p", [Msg, Vars]),
?SLOG(debug, #{msg => "publish to local broker",
message => Msg, vars => Vars}),
emqx_metrics:inc('bridge.mqtt.message_received_from_remote', 1),
_ = erlang:apply(OnMsgRcvdFunc, [Msg, Args]),
_ = erlang:apply(OnMsgRcvdFunc, [Msg | Args]),
case maps:get(local_topic, Vars, undefined) of
undefined -> ok;
_Topic ->

View File

@ -23,19 +23,21 @@
-export([ roots/0
, fields/1]).
-import(emqx_schema, [mk_duration/2]).
roots() ->
[{config, #{type => hoconsc:ref(?MODULE, "config")}}].
fields("config") ->
[ {server, hoconsc:mk(emqx_schema:ip_port(), #{default => "127.0.0.1:1883"})}
, {reconnect_interval, hoconsc:mk(emqx_schema:duration_ms(), #{default => "30s"})}
, {reconnect_interval, mk_duration("reconnect interval", #{default => "30s"})}
, {proto_ver, fun proto_ver/1}
, {bridge_mode, hoconsc:mk(boolean(), #{default => true})}
, {username, hoconsc:mk(string())}
, {password, hoconsc:mk(string())}
, {clean_start, hoconsc:mk(boolean(), #{default => true})}
, {keepalive, hoconsc:mk(integer(), #{default => 300})}
, {retry_interval, hoconsc:mk(emqx_schema:duration_ms(), #{default => "30s"})}
, {keepalive, mk_duration("keepalive", #{default => "300s"})}
, {retry_interval, mk_duration("retry interval", #{default => "30s"})}
, {max_inflight, hoconsc:mk(integer(), #{default => 32})}
, {replayq, hoconsc:mk(hoconsc:ref(?MODULE, "replayq"))}
, {ingress_channels, hoconsc:mk(hoconsc:map(id, hoconsc:ref(?MODULE, "ingress_channels")), #{default => []})}

View File

@ -63,6 +63,7 @@
-behaviour(gen_statem).
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-include_lib("emqx/include/logger.hrl").
%% APIs
-export([ start_link/1
@ -189,7 +190,8 @@ callback_mode() -> [state_functions].
%% @doc Config should be a map().
init(#{name := Name} = ConnectOpts) ->
?LOG(debug, "starting bridge worker for ~p", [Name]),
?SLOG(debug, #{msg => "starting bridge worker",
name => Name}),
erlang:process_flag(trap_exit, true),
Queue = open_replayq(Name, maps:get(replayq, ConnectOpts, #{})),
State = init_state(ConnectOpts),
@ -335,8 +337,9 @@ common(_StateName, cast, {send_to_remote, Msg}, #{replayq := Q} = State) ->
NewQ = replayq:append(Q, [Msg]),
{keep_state, State#{replayq => NewQ}, {next_event, internal, maybe_send}};
common(StateName, Type, Content, #{name := Name} = State) ->
?LOG(notice, "Bridge ~p discarded ~p type event at state ~p:~p",
[Name, Type, StateName, Content]),
?SLOG(notice, #{msg => "Bridge discarded event",
name => Name, type => Type, state_name => StateName,
content => Content}),
{keep_state, State}.
do_connect(#{connect_opts := ConnectOpts = #{forwards := Forwards},
@ -352,8 +355,8 @@ do_connect(#{connect_opts := ConnectOpts = #{forwards := Forwards},
{ok, State#{connection => Conn}};
{error, Reason} ->
ConnectOpts1 = obfuscate(ConnectOpts),
?LOG(error, "Failed to connect \n"
"config=~p\nreason:~p", [ConnectOpts1, Reason]),
?SLOG(error, #{msg => "Failed to connect",
config => ConnectOpts1, reason => Reason}),
{error, Reason, State}
end.
@ -399,7 +402,9 @@ pop_and_send_loop(#{replayq := Q} = State, N) ->
%% Assert non-empty batch because we have a is_empty check earlier.
do_send(#{connect_opts := #{forwards := undefined}}, _QAckRef, Batch) ->
?LOG(error, "cannot forward messages to remote broker as 'bridge.mqtt.<name>.in' not configured, msg: ~p", [Batch]);
?SLOG(error, #{msg => "cannot forward messages to remote broker"
" as egress_channel is not configured",
messages => Batch});
do_send(#{inflight := Inflight,
connection := Connection,
mountpoint := Mountpoint,
@ -409,14 +414,16 @@ do_send(#{inflight := Inflight,
emqx_metrics:inc('bridge.mqtt.message_sent_to_remote'),
emqx_connector_mqtt_msg:to_remote_msg(Message, Vars)
end,
?LOG(debug, "publish to remote broker, msg: ~p, vars: ~p", [Batch, Vars]),
?SLOG(debug, #{msg => "publish to remote broker",
message => Batch, vars => Vars}),
case emqx_connector_mqtt_mod:send(Connection, [ExportMsg(M) || M <- Batch]) of
{ok, Refs} ->
{ok, State#{inflight := Inflight ++ [#{q_ack_ref => QAckRef,
send_ack_ref => map_set(Refs),
batch => Batch}]}};
{error, Reason} ->
?LOG(info, "mqtt_bridge_produce_failed ~p", [Reason]),
?SLOG(info, #{msg => "mqtt_bridge_produce_failed",
reason => Reason}),
{error, State}
end.
@ -436,7 +443,8 @@ handle_batch_ack(#{inflight := Inflight0, replayq := Q} = State, Ref) ->
State#{inflight := Inflight}.
do_ack([], Ref) ->
?LOG(debug, "stale_batch_ack_reference ~p", [Ref]),
?SLOG(debug, #{msg => "stale_batch_ack_reference",
ref => Ref}),
[];
do_ack([#{send_ack_ref := Refs} = First | Rest], Ref) ->
case maps:is_key(Ref, Refs) of

View File

@ -139,7 +139,10 @@ update_pwd(Username, Fun) ->
-spec(lookup_user(binary()) -> [mqtt_admin()]).
lookup_user(Username) when is_binary(Username) -> mnesia:dirty_read(mqtt_admin, Username).
lookup_user(Username) when is_binary(Username) ->
Fun = fun() -> mnesia:read(mqtt_admin, Username) end,
{atomic, User} = ekka_mnesia:ro_transaction(?DASHBOARD_SHARD, Fun),
User.
-spec(all_users() -> [#mqtt_admin{}]).
all_users() -> ets:tab2list(mqtt_admin).

View File

@ -162,7 +162,8 @@ flush({Connection, Route, Subscription}, {Received0, Sent0, Dropped0}) ->
diff(Sent, Sent0),
diff(Dropped, Dropped0)},
Ts = get_local_time(),
_ = mnesia:dirty_write(emqx_collect, #mqtt_collect{timestamp = Ts, collect = Collect}),
ekka_mnesia:transaction(ekka_mnesia:local_content_shard(),
fun mnesia:write/1, [#mqtt_collect{timestamp = Ts, collect = Collect}]),
{Received, Sent, Dropped}.
avg(Items) ->

View File

@ -6,6 +6,11 @@
%% API
-export([spec/1, spec/2]).
-export([translate_req/2]).
-export([namespace/0, fields/1]).
-export([error_codes/1, error_codes/2]).
-define(MAX_ROW_LIMIT, 100).
%% API
-ifdef(TEST).
-compile(export_all).
@ -22,7 +27,8 @@
-define(INIT_SCHEMA, #{fields => #{}, translations => #{}, validations => [], namespace => undefined}).
-define(TO_REF(_N_, _F_), iolist_to_binary([to_bin(_N_), ".", to_bin(_F_)])).
-define(TO_COMPONENTS(_M_, _F_), iolist_to_binary([<<"#/components/schemas/">>, ?TO_REF(namespace(_M_), _F_)])).
-define(TO_COMPONENTS_SCHEMA(_M_, _F_), iolist_to_binary([<<"#/components/schemas/">>, ?TO_REF(namespace(_M_), _F_)])).
-define(TO_COMPONENTS_PARAM(_M_, _F_), iolist_to_binary([<<"#/components/parameters/">>, ?TO_REF(namespace(_M_), _F_)])).
%% @equiv spec(Module, #{check_schema => false})
-spec(spec(module()) ->
@ -54,7 +60,6 @@ spec(Module, Options) ->
end, {[], []}, Paths),
{ApiSpec, components(lists:usort(AllRefs))}.
-spec(translate_req(#{binding => list(), query_string => list(), body => map()},
#{module => module(), path => string(), method => atom()}) ->
{ok, #{binding => list(), query_string => list(), body => map()}}|
@ -64,7 +69,7 @@ translate_req(Request, #{module := Module, path := Path, method := Method}) ->
try
Params = maps:get(parameters, Spec, []),
Body = maps:get(requestBody, Spec, []),
{Bindings, QueryStr} = check_parameters(Request, Params),
{Bindings, QueryStr} = check_parameters(Request, Params, Module),
NewBody = check_requestBody(Request, Body, Module, hoconsc:is_schema(Body)),
{ok, Request#{bindings => Bindings, query_string => QueryStr, body => NewBody}}
catch throw:Error ->
@ -73,6 +78,30 @@ translate_req(Request, #{module := Module, path := Path, method := Method}) ->
{400, 'BAD_REQUEST', iolist_to_binary(io_lib:format("~s : ~p", [Key, Reason]))}
end.
namespace() -> "public".
fields(page) ->
Desc = <<"Page number of the results to fetch.">>,
Meta = #{in => query, desc => Desc, default => 1, example => 1},
[{page, hoconsc:mk(integer(), Meta)}];
fields(limit) ->
Desc = iolist_to_binary([<<"Results per page(max ">>,
integer_to_binary(?MAX_ROW_LIMIT), <<")">>]),
Meta = #{in => query, desc => Desc, default => ?MAX_ROW_LIMIT, example => 50},
[{limit, hoconsc:mk(range(1, ?MAX_ROW_LIMIT), Meta)}].
error_codes(Codes) ->
error_codes(Codes, <<"Error code to troubleshoot problems.">>).
error_codes(Codes = [_ | _], MsgExample) ->
[
{code, hoconsc:mk(hoconsc:enum(Codes))},
{message, hoconsc:mk(string(), #{
desc => <<"Details description of the error.">>,
example => MsgExample
})}
].
support_check_schema(#{check_schema := true}) -> ?DEFAULT_FILTER;
support_check_schema(#{check_schema := Func})when is_function(Func, 2) -> #{filter => Func};
support_check_schema(_) -> #{filter => undefined}.
@ -93,23 +122,28 @@ parse_spec_ref(Module, Path) ->
maps:without([operationId], Schema)),
{maps:get(operationId, Schema), Specs, Refs}.
check_parameters(Request, Spec) ->
check_parameters(Request, Spec, Module) ->
#{bindings := Bindings, query_string := QueryStr} = Request,
BindingsBin = maps:fold(fun(Key, Value, Acc) -> Acc#{atom_to_binary(Key) => Value} end, #{}, Bindings),
check_parameter(Spec, BindingsBin, QueryStr, #{}, #{}).
check_parameter(Spec, BindingsBin, QueryStr, Module, #{}, #{}).
check_parameter([], _Bindings, _QueryStr, NewBindings, NewQueryStr) -> {NewBindings, NewQueryStr};
check_parameter([{Name, Type} | Spec], Bindings, QueryStr, BindingsAcc, QueryStrAcc) ->
check_parameter([?REF(Fields) | Spec], Bindings, QueryStr, LocalMod, BindingsAcc, QueryStrAcc) ->
check_parameter([?R_REF(LocalMod, Fields) | Spec], Bindings, QueryStr, LocalMod, BindingsAcc, QueryStrAcc);
check_parameter([?R_REF(Module, Fields) | Spec], Bindings, QueryStr, LocalMod, BindingsAcc, QueryStrAcc) ->
Params = apply(Module, fields, [Fields]),
check_parameter(Params ++ Spec, Bindings, QueryStr, LocalMod, BindingsAcc, QueryStrAcc);
check_parameter([], _Bindings, _QueryStr, _Module, NewBindings, NewQueryStr) -> {NewBindings, NewQueryStr};
check_parameter([{Name, Type} | Spec], Bindings, QueryStr, Module, BindingsAcc, QueryStrAcc) ->
Schema = ?INIT_SCHEMA#{roots => [{Name, Type}]},
case hocon_schema:field_schema(Type, in) of
path ->
NewBindings = hocon_schema:check_plain(Schema, Bindings, #{atom_key => true, override_env => false}),
NewBindingsAcc = maps:merge(BindingsAcc, NewBindings),
check_parameter(Spec, Bindings, QueryStr, NewBindingsAcc, QueryStrAcc);
check_parameter(Spec, Bindings, QueryStr, Module, NewBindingsAcc, QueryStrAcc);
query ->
NewQueryStr = hocon_schema:check_plain(Schema, QueryStr, #{override_env => false}),
NewQueryStrAcc = maps:merge(QueryStrAcc, NewQueryStr),
check_parameter(Spec, Bindings, QueryStr, BindingsAcc, NewQueryStrAcc)
check_parameter(Spec, Bindings, QueryStr, Module, BindingsAcc, NewQueryStrAcc)
end.
check_requestBody(#{body := Body}, Schema, Module, true) ->
@ -154,19 +188,28 @@ to_spec(Meta, Params, RequestBody, Responses) ->
parameters(Params, Module) ->
{SpecList, AllRefs} =
lists:foldl(fun({Name, Type}, {Acc, RefsAcc}) ->
In = hocon_schema:field_schema(Type, in),
In =:= undefined andalso throw({error, <<"missing in:path/query field in parameters">>}),
Nullable = hocon_schema:field_schema(Type, nullable),
Default = hocon_schema:field_schema(Type, default),
HoconType = hocon_schema:field_schema(Type, type),
Meta = init_meta(Nullable, Default),
{ParamType, Refs} = hocon_schema_to_spec(HoconType, Module),
Spec0 = init_prop([required | ?DEFAULT_FIELDS],
#{schema => maps:merge(ParamType, Meta), name => Name, in => In}, Type),
Spec1 = trans_required(Spec0, Nullable, In),
Spec2 = trans_desc(Spec1, Type),
{[Spec2 | Acc], Refs ++ RefsAcc}
lists:foldl(fun(Param, {Acc, RefsAcc}) ->
case Param of
?REF(StructName) ->
{[#{<<"$ref">> => ?TO_COMPONENTS_PARAM(Module, StructName)} |Acc],
[{Module, StructName, parameter}|RefsAcc]};
?R_REF(RModule, StructName) ->
{[#{<<"$ref">> => ?TO_COMPONENTS_PARAM(RModule, StructName)} |Acc],
[{RModule, StructName, parameter}|RefsAcc]};
{Name, Type} ->
In = hocon_schema:field_schema(Type, in),
In =:= undefined andalso throw({error, <<"missing in:path/query field in parameters">>}),
Nullable = hocon_schema:field_schema(Type, nullable),
Default = hocon_schema:field_schema(Type, default),
HoconType = hocon_schema:field_schema(Type, type),
Meta = init_meta(Nullable, Default),
{ParamType, Refs} = hocon_schema_to_spec(HoconType, Module),
Spec0 = init_prop([required | ?DEFAULT_FIELDS],
#{schema => maps:merge(ParamType, Meta), name => Name, in => In}, Type),
Spec1 = trans_required(Spec0, Nullable, In),
Spec2 = trans_desc(Spec1, Type),
{[Spec2 | Acc], Refs ++ RefsAcc}
end
end, {[], []}, Params),
{lists:reverse(SpecList), AllRefs}.
@ -196,7 +239,7 @@ trans_required(Spec, _, _) -> Spec.
trans_desc(Spec, Hocon) ->
case hocon_schema:field_schema(Hocon, desc) of
undefined -> Spec;
Desc -> Spec#{description => Desc}
Desc -> Spec#{description => to_bin(Desc)}
end.
requestBody([], _Module) -> {[], []};
@ -248,6 +291,13 @@ components([{Module, Field} | Refs], SpecAcc, SubRefsAcc) ->
Namespace = namespace(Module),
{Object, SubRefs} = parse_object(Props, Module),
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Object},
components(Refs, NewSpecAcc, SubRefs ++ SubRefsAcc);
%% parameters in ref only have one value, not array
components([{Module, Field, parameter} | Refs], SpecAcc, SubRefsAcc) ->
Props = apply(Module, fields, [Field]),
{[Param], SubRefs} = parameters(Props, Module),
Namespace = namespace(Module),
NewSpecAcc = SpecAcc#{?TO_REF(Namespace, Field) => Param},
components(Refs, NewSpecAcc, SubRefs ++ SubRefsAcc).
namespace(Module) ->
@ -257,10 +307,10 @@ namespace(Module) ->
end.
hocon_schema_to_spec(?R_REF(Module, StructName), _LocalModule) ->
{#{<<"$ref">> => ?TO_COMPONENTS(Module, StructName)},
{#{<<"$ref">> => ?TO_COMPONENTS_SCHEMA(Module, StructName)},
[{Module, StructName}]};
hocon_schema_to_spec(?REF(StructName), LocalModule) ->
{#{<<"$ref">> => ?TO_COMPONENTS(LocalModule, StructName)},
{#{<<"$ref">> => ?TO_COMPONENTS_SCHEMA(LocalModule, StructName)},
[{LocalModule, StructName}]};
hocon_schema_to_spec(Type, _LocalModule) when ?IS_TYPEREFL(Type) ->
{typename_to_spec(typerefl:name(Type)), []};

View File

@ -103,7 +103,8 @@ do_sign(Username, Password) ->
},
Signed = jose_jwt:sign(JWK, JWS, JWT),
{_, Token} = jose_jws:compact(Signed),
ok = ekka_mnesia:dirty_write(format(Token, Username, ExpTime)),
JWTRec = format(Token, Username, ExpTime),
ekka_mnesia:transaction(?DASHBOARD_SHARD, fun mnesia:write/1, [JWTRec]),
{ok, Token}.
do_verify(Token)->
@ -111,8 +112,9 @@ do_verify(Token)->
{ok, JWT = #mqtt_admin_jwt{exptime = ExpTime}} ->
case ExpTime > erlang:system_time(millisecond) of
true ->
ekka_mnesia:dirty_write(JWT#mqtt_admin_jwt{exptime = jwt_expiration_time()}),
ok;
NewJWT = JWT#mqtt_admin_jwt{exptime = jwt_expiration_time()},
{atomic, Res} = ekka_mnesia:transaction(?DASHBOARD_SHARD, fun mnesia:write/1, [NewJWT]),
Res;
_ ->
{error, token_timeout}
end;
@ -132,14 +134,18 @@ do_destroy_by_username(Username) ->
%% jwt internal util function
-spec(lookup(Token :: binary()) -> {ok, #mqtt_admin_jwt{}} | {error, not_found}).
lookup(Token) ->
case mnesia:dirty_read(?TAB, Token) of
[JWT] -> {ok, JWT};
[] -> {error, not_found}
Fun = fun() -> mnesia:read(?TAB, Token) end,
case ekka_mnesia:ro_transaction(?DASHBOARD_SHARD, Fun) of
{atomic, [JWT]} -> {ok, JWT};
{atomic, []} -> {error, not_found}
end.
lookup_by_username(Username) ->
Spec = [{{mqtt_admin_jwt, '_', Username, '_'}, [], ['$_']}],
mnesia:dirty_select(?TAB, Spec).
Fun = fun() -> mnesia:select(?TAB, Spec) end,
{atomic, List} = ekka_mnesia:ro_transaction(?DASHBOARD_SHARD, Fun),
List.
jwk(Username, Password, Salt) ->
Key = erlang:md5(<<Salt/binary, Username/binary, Password/binary>>),
@ -187,7 +193,8 @@ handle_info(clean_jwt, State) ->
timer_clean(self()),
Now = erlang:system_time(millisecond),
Spec = [{{mqtt_admin_jwt, '_', '_', '$1'}, [{'<', '$1', Now}], ['$_']}],
JWTList = mnesia:dirty_select(?TAB, Spec),
{atomic, JWTList} = ekka_mnesia:ro_transaction(?DASHBOARD_SHARD,
fun() -> mnesia:select(?TAB, Spec) end),
destroy(JWTList),
{noreply, State};
handle_info(_Info, State) ->

View File

@ -1,13 +0,0 @@
%%%-------------------------------------------------------------------
%%% @author zhongwen
%%% @copyright (C) 2021, <COMPANY>
%%% @doc
%%%
%%% @end
%%% Created : 22. 9 2021 13:38
%%%-------------------------------------------------------------------
-module(emqx_swagger_util).
-author("zhongwen").
%% API
-export([]).

View File

@ -3,10 +3,10 @@
-behaviour(hocon_schema).
%% API
-export([paths/0, api_spec/0, schema/1]).
-export([t_in_path/1, t_in_query/1, t_in_mix/1, t_without_in/1]).
-export([paths/0, api_spec/0, schema/1, fields/1]).
-export([t_in_path/1, t_in_query/1, t_in_mix/1, t_without_in/1, t_ref/1, t_public_ref/1]).
-export([t_require/1, t_nullable/1, t_method/1, t_api_spec/1]).
-export([t_in_path_trans/1, t_in_query_trans/1, t_in_mix_trans/1]).
-export([t_in_path_trans/1, t_in_query_trans/1, t_in_mix_trans/1, t_ref_trans/1]).
-export([t_in_path_trans_error/1, t_in_query_trans_error/1, t_in_mix_trans_error/1]).
-export([all/0, suite/0, groups/0]).
@ -20,9 +20,9 @@
all() -> [{group, spec}, {group, validation}].
suite() -> [{timetrap, {minutes, 1}}].
groups() -> [
{spec, [parallel], [t_api_spec, t_in_path, t_in_query, t_in_mix,
t_without_in, t_require, t_nullable, t_method]},
{validation, [parallel], [t_in_path_trans, t_in_query_trans, t_in_mix_trans,
{spec, [parallel], [t_api_spec, t_in_path, t_ref, t_in_query, t_in_mix,
t_without_in, t_require, t_nullable, t_method, t_public_ref]},
{validation, [parallel], [t_in_path_trans, t_ref_trans, t_in_query_trans, t_in_mix_trans,
t_in_path_trans_error, t_in_query_trans_error, t_in_mix_trans_error]}
].
@ -44,6 +44,41 @@ t_in_query(_Config) ->
validate("/test/in/query", Expect),
ok.
t_ref(_Config) ->
LocalPath = "/test/in/ref/local",
Path = "/test/in/ref",
Expect = [#{<<"$ref">> => <<"#/components/parameters/emqx_swagger_parameter_SUITE.page">>}],
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, LocalPath),
?assertEqual(test, OperationId),
Params = maps:get(parameters, maps:get(post, Spec)),
?assertEqual(Expect, Params),
?assertEqual([{?MODULE, page, parameter}], Refs),
ok.
t_public_ref(_Config) ->
Path = "/test/in/ref/public",
Expect = [
#{<<"$ref">> => <<"#/components/parameters/public.page">>},
#{<<"$ref">> => <<"#/components/parameters/public.limit">>}
],
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
?assertEqual(test, OperationId),
Params = maps:get(parameters, maps:get(post, Spec)),
?assertEqual(Expect, Params),
?assertEqual([
{emqx_dashboard_swagger, limit, parameter},
{emqx_dashboard_swagger, page, parameter}
], Refs),
ExpectRefs = [
#{<<"public.limit">> => #{description => <<"Results per page(max 100)">>, example => 50,in => query,name => limit,
schema => #{default => 100,example => 1,maximum => 100, minimum => 1,type => integer}}},
#{<<"public.page">> => #{description => <<"Page number of the results to fetch.">>,
example => 1,in => query,name => page,
schema => #{default => 1,example => 100,type => integer}}}],
?assertEqual(ExpectRefs, emqx_dashboard_swagger:components(Refs)),
ok.
t_in_mix(_Config) ->
Expect =
[#{description => <<"Indicates which sorts of issues to return">>,
@ -115,6 +150,18 @@ t_in_query_trans(_Config) ->
?assertEqual(Expect, trans_parameters(Path, #{}, #{<<"per_page">> => 100})),
ok.
t_ref_trans(_Config) ->
LocalPath = "/test/in/ref/local",
Path = "/test/in/ref",
Expect = {ok, #{bindings => #{},body => #{},
query_string => #{<<"per_page">> => 100}}},
?assertEqual(Expect, trans_parameters(Path, #{}, #{<<"per_page">> => 100})),
?assertEqual(Expect, trans_parameters(LocalPath, #{}, #{<<"per_page">> => 100})),
{400,'BAD_REQUEST', Reason} = trans_parameters(Path, #{}, #{<<"per_page">> => 1010}),
?assertNotEqual(nomatch, binary:match(Reason, [<<"per_page">>])),
{400,'BAD_REQUEST', Reason} = trans_parameters(LocalPath, #{}, #{<<"per_page">> => 1010}),
ok.
t_in_mix_trans(_Config) ->
Path = "/test/in/mix/:state",
Bindings = #{
@ -186,7 +233,7 @@ trans_parameters(Path, Bindings, QueryStr) ->
api_spec() -> emqx_dashboard_swagger:spec(?MODULE).
paths() -> ["/test/in/:filter", "/test/in/query", "/test/in/mix/:state",
paths() -> ["/test/in/:filter", "/test/in/query", "/test/in/mix/:state", "/test/in/ref",
"/required/false", "/nullable/false", "/nullable/true", "/method/ok"].
schema("/test/in/:filter") ->
@ -213,6 +260,33 @@ schema("/test/in/query") ->
responses => #{200 => <<"ok">>}
}
};
schema("/test/in/ref/local") ->
#{
operationId => test,
post => #{
parameters => [hoconsc:ref(page)],
responses => #{200 => <<"ok">>}
}
};
schema("/test/in/ref") ->
#{
operationId => test,
post => #{
parameters => [hoconsc:ref(?MODULE, page)],
responses => #{200 => <<"ok">>}
}
};
schema("/test/in/ref/public") ->
#{
operationId => test,
post => #{
parameters => [
hoconsc:ref(emqx_dashboard_swagger, page),
hoconsc:ref(emqx_dashboard_swagger, limit)
],
responses => #{200 => <<"ok">>}
}
};
schema("/test/in/mix/:state") ->
#{
operationId => test,
@ -257,6 +331,13 @@ schema("/method/ok") ->
#{operationId => test}, ?METHODS);
schema("/method/error") ->
#{operationId => test, bar => #{200 => <<"ok">>}}.
fields(page) ->
[
{per_page,
mk(range(1, 100),
#{in => query, desc => <<"results per page (max 100)">>, example => 1})}
].
to_schema(Params) ->
#{
operationId => test,

View File

@ -101,7 +101,7 @@ t_remote_ref(_Config) ->
{<<"another_ref">>, #{<<"$ref">> => <<"#/components/schemas/emqx_swagger_remote_schema.ref3">>}}], <<"type">> => object}},
#{<<"emqx_swagger_remote_schema.ref3">> => #{<<"properties">> => [
{<<"ip">>, #{description => <<"IP:Port">>, example => <<"127.0.0.1:80">>,type => string}},
{<<"version">>, #{description => "a good version", example => <<"1.0.0">>,type => string}}],
{<<"version">>, #{description => <<"a good version">>, example => <<"1.0.0">>,type => string}}],
<<"type">> => object}}],
?assertEqual(ExpectComponents, Components),
ok.
@ -116,7 +116,7 @@ t_nest_ref(_Config) ->
ExpectComponents = lists:sort([
#{<<"emqx_swagger_requestBody_SUITE.nest_ref">> => #{<<"properties">> => [
{<<"env">>, #{enum => [test,dev,prod],type => string}},
{<<"another_ref">>, #{description => "nest ref", <<"$ref">> => <<"#/components/schemas/emqx_swagger_requestBody_SUITE.good_ref">>}}],
{<<"another_ref">>, #{description => <<"nest ref">>, <<"$ref">> => <<"#/components/schemas/emqx_swagger_requestBody_SUITE.good_ref">>}}],
<<"type">> => object}},
#{<<"emqx_swagger_requestBody_SUITE.good_ref">> => #{<<"properties">> => [
{<<"webhook-host">>, #{default => <<"127.0.0.1:80">>, example => <<"127.0.0.1:80">>,type => string}},

View File

@ -12,7 +12,7 @@
-export([all/0, suite/0, groups/0]).
-export([paths/0, api_spec/0, schema/1, fields/1]).
-export([t_simple_binary/1, t_object/1, t_nest_object/1, t_empty/1,
-export([t_simple_binary/1, t_object/1, t_nest_object/1, t_empty/1, t_error/1,
t_raw_local_ref/1, t_raw_remote_ref/1, t_hocon_schema_function/1,
t_local_ref/1, t_remote_ref/1, t_bad_ref/1, t_none_ref/1, t_nest_ref/1,
t_ref_array_with_key/1, t_ref_array_without_key/1, t_api_spec/1]).
@ -21,7 +21,7 @@ all() -> [{group, spec}].
suite() -> [{timetrap, {minutes, 1}}].
groups() -> [
{spec, [parallel], [
t_api_spec, t_simple_binary, t_object, t_nest_object,
t_api_spec, t_simple_binary, t_object, t_nest_object, t_error,
t_raw_local_ref, t_raw_remote_ref, t_empty, t_hocon_schema_function,
t_local_ref, t_remote_ref, t_bad_ref, t_none_ref,
t_ref_array_with_key, t_ref_array_without_key, t_nest_ref]}
@ -48,6 +48,33 @@ t_object(_config) ->
validate(Path, Object, ExpectRefs),
ok.
t_error(_Config) ->
Path = "/error",
Error400 = #{<<"content">> =>
#{<<"application/json">> => #{<<"schema">> => #{<<"type">> => object,
<<"properties">> =>
[
{<<"code">>, #{enum => ['Bad1','Bad2'], type => string}},
{<<"message">>, #{description => <<"Details description of the error.">>,
example => <<"Bad request desc">>, type => string}}]
}}}},
Error404 = #{<<"content">> =>
#{<<"application/json">> => #{<<"schema">> => #{<<"type">> => object,
<<"properties">> =>
[
{<<"code">>, #{enum => ['Not-Found'], type => string}},
{<<"message">>, #{description => <<"Details description of the error.">>,
example => <<"Error code to troubleshoot problems.">>, type => string}}]
}}}},
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),
?assertEqual(test, OperationId),
Response = maps:get(responses, maps:get(get, Spec)),
?assertEqual(Error400, maps:get(<<"400">>, Response)),
?assertEqual(Error404, maps:get(<<"404">>, Response)),
?assertEqual(#{}, maps:without([<<"400">>, <<"404">>], Response)),
?assertEqual([], Refs),
ok.
t_nest_object(_Config) ->
Path = "/nest/object",
Object =
@ -175,7 +202,7 @@ t_hocon_schema_function(_Config) ->
#{<<"emqx_swagger_remote_schema.ref3">> => #{<<"type">> => object,
<<"properties">> => [
{<<"ip">>, #{description => <<"IP:Port">>, example => <<"127.0.0.1:80">>,type => string}},
{<<"version">>, #{description => "a good version", example => <<"1.0.0">>, type => string}}]
{<<"version">>, #{description => <<"a good version">>, example => <<"1.0.0">>, type => string}}]
}},
#{<<"emqx_swagger_remote_schema.root">> => #{required => [<<"default_password">>, <<"default_username">>],
<<"properties">> => [{<<"listeners">>, #{items =>
@ -255,7 +282,15 @@ schema("/ref/array/with/key") ->
schema("/ref/array/without/key") ->
to_schema(mk(hoconsc:array(hoconsc:ref(?MODULE, good_ref)), #{}));
schema("/ref/hocon/schema/function") ->
to_schema(mk(hoconsc:ref(emqx_swagger_remote_schema, "root"), #{})).
to_schema(mk(hoconsc:ref(emqx_swagger_remote_schema, "root"), #{}));
schema("/error") ->
#{
operationId => test,
get => #{responses => #{
400 => emqx_dashboard_swagger:error_codes(['Bad1', 'Bad2'], <<"Bad request desc">>),
404 => emqx_dashboard_swagger:error_codes(['Not-Found'])
}}
}.
validate(Path, ExpectObject, ExpectRefs) ->
{OperationId, Spec, Refs} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path),

View File

@ -5,296 +5,345 @@
## TODO: These configuration options are temporary example here.
## In the final version, it will be commented out.
gateway.stomp {
## How long time the connection will be disconnected if the
## connection is established but no bytes received
idle_timeout = 30s
## To control whether write statistics data into ETS table
## for dashbord to read.
enable_stats = true
## When publishing or subscribing, prefix all topics with a mountpoint string.
mountpoint = ""
frame {
max_headers = 10
max_headers_length = 1024
max_body_length = 8192
}
clientinfo_override {
username = "${Packet.headers.login}"
password = "${Packet.headers.passcode}"
}
authentication: [
# {
# name = "authenticator1"
# type = "password-based:built-in-database"
# user_id_type = clientid
# }
]
listeners.tcp.default {
bind = 61613
acceptors = 16
max_connections = 1024000
max_conn_rate = 1000
access_rules = [
"allow all"
]
## TCP options
## See ${example_common_tcp_options} for more information
tcp.active_n = 100
tcp.backlog = 1024
tcp.buffer = 4KB
}
listeners.ssl.default {
bind = 61614
acceptors = 16
max_connections = 1024000
max_conn_rate = 1000
## TCP options
## See ${example_common_tcp_options} for more information
tcp.active_n = 100
tcp.backlog = 1024
tcp.buffer = 4KB
## SSL options
## See ${example_common_ssl_options} for more information
ssl.versions = ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
ssl.keyfile = "{{ platform_etc_dir }}/certs/key.pem"
ssl.certfile = "{{ platform_etc_dir }}/certs/cert.pem"
ssl.cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
#ssl.verify = verify_none
#ssl.fail_if_no_peer_cert = false
#ssl.server_name_indication = disable
#ssl.secure_renegotiate = false
#ssl.reuse_sessions = false
#ssl.honor_cipher_order = false
#ssl.handshake_timeout = 15s
#ssl.depth = 10
#ssl.password = foo
#ssl.dhfile = path-to-your-file
}
}
gateway.coap {
## How long time the connection will be disconnected if the
## connection is established but no bytes received
idle_timeout = 30s
## To control whether write statistics data into ETS table
## for dashbord to read.
enable_stats = true
## When publishing or subscribing, prefix all topics with a mountpoint string.
mountpoint = ""
notify_type = qos
## if true, you need to establish a connection before use
connection_required = false
subscribe_qos = qos0
publish_qos = qos1
listeners.udp.default {
bind = 5683
acceptors = 4
max_connections = 102400
max_conn_rate = 1000
## UDP Options
## See ${example_common_udp_options} for more information
udp.active_n = 100
udp.buffer = 16KB
}
listeners.dtls.default {
bind = 5684
acceptors = 4
max_connections = 102400
max_conn_rate = 1000
## UDP Options
## See ${example_common_udp_options} for more information
udp.active_n = 100
udp.buffer = 16KB
## DTLS Options
## See #{example_common_dtls_options} for more information
dtls.versions = ["dtlsv1.2", "dtlsv1"]
dtls.keyfile = "{{ platform_etc_dir }}/certs/key.pem"
dtls.certfile = "{{ platform_etc_dir }}/certs/cert.pem"
dtls.cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
}
}
gateway.mqttsn {
## How long time the connection will be disconnected if the
## connection is established but no bytes received
idle_timeout = 30s
## To control whether write statistics data into ETS table
## for dashbord to read.
enable_stats = true
## When publishing or subscribing, prefix all topics with a mountpoint string.
mountpoint = ""
## The MQTT-SN Gateway ID in ADVERTISE message.
gateway_id = 1
## Enable broadcast this gateway to WLAN
broadcast = true
## To control whether accept and process the received
## publish message with qos=-1.
enable_qos3 = true
## The pre-defined topic name corresponding to the pre-defined topic
## id of N.
## Note that the pre-defined topic id of 0 is reserved.
predefined = [
{ id = 1
topic = "/predefined/topic/name/hello"
},
{ id = 2
topic = "/predefined/topic/name/nice"
}
]
### ClientInfo override
clientinfo_override {
username = "mqtt_sn_user"
password = "abc"
}
listeners.udp.default {
bind = 1884
max_connections = 10240000
max_conn_rate = 1000
}
listeners.dtls.default {
bind = 1885
acceptors = 4
max_connections = 102400
max_conn_rate = 1000
## UDP Options
## See ${example_common_udp_options} for more information
udp.active_n = 100
udp.buffer = 16KB
## DTLS Options
## See #{example_common_dtls_options} for more information
dtls.versions = ["dtlsv1.2", "dtlsv1"]
dtls.keyfile = "{{ platform_etc_dir }}/certs/key.pem"
dtls.certfile = "{{ platform_etc_dir }}/certs/cert.pem"
dtls.cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
}
}
gateway.lwm2m {
## How long time the connection will be disconnected if the
## connection is established but no bytes received
idle_timeout = 30s
## To control whether write statistics data into ETS table
## for dashbord to read.
enable_stats = true
## When publishing or subscribing, prefix all topics with a mountpoint string.
mountpoint = "lwm2m/%u"
xml_dir = "{{ platform_etc_dir }}/lwm2m_xml"
lifetime_min = 1s
lifetime_max = 86400s
qmode_time_windonw = 22
auto_observe = false
## always | contains_object_list
update_msg_publish_condition = contains_object_list
translators {
command {
topic = "/dn/#"
qos = 0
}
response {
topic = "/up/resp"
qos = 0
}
notify {
topic = "/up/notify"
qos = 0
}
register {
topic = "/up/resp"
qos = 0
}
update {
topic = "/up/resp"
qos = 0
}
}
listeners.udp.default {
bind = 5783
}
}
gateway.exproto {
## How long time the connection will be disconnected if the
## connection is established but no bytes received
idle_timeout = 30s
## To control whether write statistics data into ETS table
## for dashbord to read.
enable_stats = true
## When publishing or subscribing, prefix all topics with a mountpoint string.
mountpoint = ""
## The gRPC server to accept requests
server {
bind = 9100
#ssl.keyfile:
#ssl.certfile:
#ssl.cacertfile:
}
handler {
address = "http://127.0.0.1:9001"
#ssl.keyfile:
#ssl.certfile:
#ssl.cacertfile:
}
listeners.tcp.default {
bind = 7993
acceptors = 8
max_connections = 10240
max_conn_rate = 1000
}
#listeners.ssl.default: {}
#listeners.udp.default: {}
#listeners.dtls.default: {}
}
#gateway.stomp {
#
# ## How long time the connection will be disconnected if the
# ## connection is established but no bytes received
# idle_timeout = 30s
#
# ## To control whether write statistics data into ETS table
# ## for dashbord to read.
# enable_stats = true
#
# ## When publishing or subscribing, prefix all topics with a mountpoint string.
# mountpoint = ""
#
# frame {
# max_headers = 10
# max_headers_length = 1024
# max_body_length = 8192
# }
#
# clientinfo_override {
# username = "${Packet.headers.login}"
# password = "${Packet.headers.passcode}"
# }
#
# authentication: {
# mechanism = password-based
# backend = built-in-database
# user_id_type = clientid
# }
#
# listeners.tcp.default {
# bind = 61613
# acceptors = 16
# max_connections = 1024000
# max_conn_rate = 1000
#
# access_rules = [
# "allow all"
# ]
#
# authentication: {
# mechanism = password-based
# backend = built-in-database
# user_id_type = username
# }
#
# ## TCP options
# ## See ${example_common_tcp_options} for more information
# tcp.active_n = 100
# tcp.backlog = 1024
# tcp.buffer = 4KB
# }
#
# listeners.ssl.default {
# bind = 61614
# acceptors = 16
# max_connections = 1024000
# max_conn_rate = 1000
#
# ## TCP options
# ## See ${example_common_tcp_options} for more information
# tcp.active_n = 100
# tcp.backlog = 1024
# tcp.buffer = 4KB
#
# ## SSL options
# ## See ${example_common_ssl_options} for more information
# ssl.versions = ["tlsv1.3", "tlsv1.2", "tlsv1.1", "tlsv1"]
# ssl.keyfile = "{{ platform_etc_dir }}/certs/key.pem"
# ssl.certfile = "{{ platform_etc_dir }}/certs/cert.pem"
# ssl.cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
# #ssl.verify = verify_none
# #ssl.fail_if_no_peer_cert = false
# #ssl.server_name_indication = disable
# #ssl.secure_renegotiate = false
# #ssl.reuse_sessions = false
# #ssl.honor_cipher_order = false
# #ssl.handshake_timeout = 15s
# #ssl.depth = 10
# #ssl.password = foo
# #ssl.dhfile = path-to-your-file
# }
#}
#
#gateway.coap {
#
# ## How long time the connection will be disconnected if the
# ## connection is established but no bytes received
# idle_timeout = 30s
#
# ## To control whether write statistics data into ETS table
# ## for dashbord to read.
# enable_stats = true
#
# ## When publishing or subscribing, prefix all topics with a mountpoint string.
# mountpoint = ""
#
# ## Enable or disable connection mode
# ## If true, you need to establish a connection before send any publish/subscribe
# ## requests
# ##
# ## Default: false
# #connection_required = false
#
# ## The Notification Message Type.
# ## The notification message will be delivered to the CoAP client if a new
# ## message received on an observed topic.
# ## The type of delivered coap message can be set to:
# ## - non: Non-confirmable
# ## - con: Confirmable
# ## - qos: Mapping from QoS type of the recevied message.
# ## QoS0 -> non, QoS1,2 -> con.
# ##
# ## Enum: non | con | qos
# ## Default: qos
# #notify_type = qos
#
# ## The *Default QoS Level* indicator for subscribe request.
# ## This option specifies the QoS level for the CoAP Client when establishing
# ## a subscription membership, if the subscribe request is not carried `qos`
# ## option.
# ## The indicator can be set to:
# ## - qos0, qos1, qos2: Fixed default QoS level
# ## - coap: Dynamic QoS level by the message type of subscribe request
# ## * qos0: If the subscribe request is non-confirmable
# ## * qos1: If the subscribe request is confirmable
# ##
# ## Enum: qos0 | qos1 | qos2 | coap
# ## Default: coap
# #subscribe_qos = coap
#
# ## The *Default QoS Level* indicator for publish request.
# ## This option specifies the QoS level for the CoAP Client when publishing a
# ## message to EMQ X PUB/SUB system, if the publish request is not carried `qos`
# ## option.
# ## The indicator can be set to:
# ## - qos0, qos1, qos2: Fixed default QoS level
# ## - coap: Dynamic QoS level by the message type of publish request
# ## * qos0: If the publish request is non-confirmable
# ## * qos1: If the publish request is confirmable
# ##
# ## Enum: qos0 | qos1 | qos2 | coap
# #publish_qos = coap
#
# listeners.udp.default {
# bind = 5683
# max_connections = 102400
# max_conn_rate = 1000
#
# ## UDP Options
# ## See ${example_common_udp_options} for more information
# udp.active_n = 100
# udp.buffer = 16KB
# }
# listeners.dtls.default {
# bind = 5684
# acceptors = 4
# max_connections = 102400
# max_conn_rate = 1000
#
# ## UDP Options
# ## See ${example_common_udp_options} for more information
# udp.active_n = 100
# udp.buffer = 16KB
#
# ## DTLS Options
# ## See #{example_common_dtls_options} for more information
# dtls.versions = ["dtlsv1.2", "dtlsv1"]
# dtls.keyfile = "{{ platform_etc_dir }}/certs/key.pem"
# dtls.certfile = "{{ platform_etc_dir }}/certs/cert.pem"
# dtls.cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
# dtls.handshake_timeout = 15s
# }
#}
#
#gateway.mqttsn {
#
# ## How long time the connection will be disconnected if the
# ## connection is established but no bytes received
# idle_timeout = 30s
#
# ## To control whether write statistics data into ETS table
# ## for dashbord to read.
# enable_stats = true
#
# ## When publishing or subscribing, prefix all topics with a mountpoint string.
# mountpoint = ""
#
# ## The MQTT-SN Gateway ID in ADVERTISE message.
# gateway_id = 1
#
# ## Enable broadcast this gateway to WLAN
# broadcast = true
#
# ## To control whether accept and process the received
# ## publish message with qos=-1.
# enable_qos3 = true
#
# ## The pre-defined topic name corresponding to the pre-defined topic
# ## id of N.
# ## Note that the pre-defined topic id of 0 is reserved.
# predefined = [
# { id = 1
# topic = "/predefined/topic/name/hello"
# },
# { id = 2
# topic = "/predefined/topic/name/nice"
# }
# ]
#
# ### ClientInfo override
# clientinfo_override {
# username = "mqtt_sn_user"
# password = "abc"
# }
#
# listeners.udp.default {
# bind = 1884
# max_connections = 10240000
# max_conn_rate = 1000
# }
#
# listeners.dtls.default {
# bind = 1885
# acceptors = 4
# max_connections = 102400
# max_conn_rate = 1000
#
# ## UDP Options
# ## See ${example_common_udp_options} for more information
# udp.active_n = 100
# udp.buffer = 16KB
#
# ## DTLS Options
# ## See #{example_common_dtls_options} for more information
# dtls.versions = ["dtlsv1.2", "dtlsv1"]
# dtls.keyfile = "{{ platform_etc_dir }}/certs/key.pem"
# dtls.certfile = "{{ platform_etc_dir }}/certs/cert.pem"
# dtls.cacertfile = "{{ platform_etc_dir }}/certs/cacert.pem"
# }
#
#}
#
#gateway.lwm2m {
#
# ## How long time the connection will be disconnected if the
# ## connection is established but no bytes received
# idle_timeout = 30s
#
# ## To control whether write statistics data into ETS table
# ## for dashbord to read.
# enable_stats = true
#
# ## When publishing or subscribing, prefix all topics with a mountpoint string.
# mountpoint = "lwm2m/%u"
#
# xml_dir = "{{ platform_etc_dir }}/lwm2m_xml"
#
# ##
# ##
# lifetime_min = 1s
#
# lifetime_max = 86400s
#
# qmode_time_window = 22
#
# auto_observe = false
#
# ## always | contains_object_list
# update_msg_publish_condition = contains_object_list
#
#
# translators {
# command {
# topic = "/dn/#"
# qos = 0
# }
#
# response {
# topic = "/up/resp"
# qos = 0
# }
#
# notify {
# topic = "/up/notify"
# qos = 0
# }
#
# register {
# topic = "/up/resp"
# qos = 0
# }
#
# update {
# topic = "/up/resp"
# qos = 0
# }
# }
#
# listeners.udp.default {
# bind = 5783
# }
#}
#
#gateway.exproto {
#
# ## How long time the connection will be disconnected if the
# ## connection is established but no bytes received
# idle_timeout = 30s
#
# ## To control whether write statistics data into ETS table
# ## for dashbord to read.
# enable_stats = true
#
# ## When publishing or subscribing, prefix all topics with a mountpoint string.
# mountpoint = ""
#
# ## The gRPC server to accept requests
# server {
# bind = 9100
# #ssl.keyfile:
# #ssl.certfile:
# #ssl.cacertfile:
# }
#
# handler {
# address = "http://127.0.0.1:9001"
# #ssl.keyfile:
# #ssl.certfile:
# #ssl.cacertfile:
# }
#
# listeners.tcp.default {
# bind = 7993
# acceptors = 8
# max_connections = 10240
# max_conn_rate = 1000
# }
# #listeners.ssl.default: {}
# #listeners.udp.default: {}
# #listeners.dtls.default: {}
#}

View File

@ -19,8 +19,6 @@
-type gateway_name() :: atom().
-type listener() :: #{}.
%% @doc The Gateway defination
-type gateway() ::
#{ name := gateway_name()

View File

@ -81,10 +81,13 @@
%% Frame Module
frame_mod :: atom(),
%% Channel Module
chann_mod :: atom()
chann_mod :: atom(),
%% Listener Tag
listener :: listener() | undefined
}).
-type(state() :: #state{}).
-type listener() :: {GwName :: atom(), LisType :: atom(), LisName :: atom()}.
-type state() :: #state{}.
-define(INFO_KEYS, [socktype, peername, sockname, sockstate, active_n]).
-define(CONN_STATS, [recv_pkt, recv_msg, send_pkt, send_msg]).
@ -279,7 +282,8 @@ init_state(WrappedSock, Peername, Options, FrameMod, ChannMod) ->
idle_timer = IdleTimer,
oom_policy = OomPolicy,
frame_mod = FrameMod,
chann_mod = ChannMod
chann_mod = ChannMod,
listener = maps:get(listener, Options, undefined)
}.
run_loop(Parent, State = #state{socket = Socket,

View File

@ -52,8 +52,8 @@ request(post, #{body := Body, bindings := Bindings}) ->
CT = maps:get(<<"content_type">>, Body, <<"text/plain">>),
Token = maps:get(<<"token">>, Body, <<>>),
Payload = maps:get(<<"payload">>, Body, <<>>),
WaitTime = maps:get(<<"timeout">>, Body, ?DEF_WAIT_TIME),
BinWaitTime = maps:get(<<"timeout">>, Body, <<"10s">>),
{ok, WaitTime} = emqx_schema:to_duration_ms(BinWaitTime),
Payload2 = parse_payload(CT, Payload),
ReqType = erlang:binary_to_atom(Method),
@ -83,7 +83,7 @@ request_parameters() ->
request_properties() ->
properties([ {token, string, "message token, can be empty"}
, {method, string, "request method type", ["get", "put", "post", "delete"]}
, {timeout, integer, "timespan for response"}
, {timeout, string, "timespan for response", "10s"}
, {content_type, string, "payload type",
[<<"text/plain">>, <<"application/json">>, <<"application/octet-stream">>]}
, {payload, string, "payload"}]).

View File

@ -103,9 +103,15 @@ init(ConnInfo = #{peername := {PeerHost, _},
#{ctx := Ctx} = Config) ->
Peercert = maps:get(peercert, ConnInfo, undefined),
Mountpoint = maps:get(mountpoint, Config, <<>>),
ListenerId = case maps:get(listener, Config, undefined) of
undefined -> undefined;
{GwName, Type, LisName} ->
emqx_gateway_utils:listener_id(GwName, Type, LisName)
end,
ClientInfo = set_peercert_infos(
Peercert,
#{ zone => default
, listener => ListenerId
, protocol => 'coap'
, peerhost => PeerHost
, sockport => SockPort

View File

@ -100,8 +100,8 @@ start_listener(GwName, Ctx, {Type, LisName, ListenOn, SocketOpts, Cfg}) ->
start_listener(GwName, Ctx, Type, LisName, ListenOn, SocketOpts, Cfg) ->
Name = emqx_gateway_utils:listener_id(GwName, Type, LisName),
NCfg = Cfg#{
ctx => Ctx,
NCfg = Cfg#{ctx => Ctx,
listener => {GwName, Type, LisName},
frame_mod => emqx_coap_frame,
chann_mod => emqx_coap_channel
},

View File

@ -20,11 +20,6 @@
-include("include/emqx_gateway.hrl").
%% callbacks for emqx_config_handler
-export([ pre_config_update/2
, post_config_update/4
]).
%% Gateway APIs
-export([ registered_gateway/0
, load/2
@ -36,8 +31,6 @@
, list/0
]).
-export([update_rawconf/2]).
%%--------------------------------------------------------------------
%% APIs
%%--------------------------------------------------------------------
@ -84,37 +77,6 @@ start(Name) ->
stop(Name) ->
emqx_gateway_sup:stop_gateway_insta(Name).
-spec update_rawconf(binary(), emqx_config:raw_config())
-> ok
| {error, any()}.
update_rawconf(RawName, RawConfDiff) ->
case emqx:update_config([gateway], {RawName, RawConfDiff}) of
{ok, _Result} -> ok;
{error, Reason} -> {error, Reason}
end.
%%--------------------------------------------------------------------
%% Config Handler
-spec pre_config_update(emqx_config:update_request(),
emqx_config:raw_config()) ->
{ok, emqx_config:update_request()} | {error, term()}.
pre_config_update({RawName, RawConfDiff}, RawConf) ->
{ok, emqx_map_lib:deep_merge(RawConf, #{RawName => RawConfDiff})}.
-spec post_config_update(emqx_config:update_request(), emqx_config:config(),
emqx_config:config(), emqx_config:app_envs())
-> ok | {ok, Result::any()} | {error, Reason::term()}.
post_config_update({RawName, _}, NewConfig, OldConfig, _AppEnvs) ->
GwName = binary_to_existing_atom(RawName),
SubConf = maps:get(GwName, NewConfig),
case maps:get(GwName, OldConfig, undefined) of
undefined ->
emqx_gateway:load(GwName, SubConf);
_ ->
emqx_gateway:update(GwName, SubConf)
end.
%%--------------------------------------------------------------------
%% Internal funcs
%%--------------------------------------------------------------------

View File

@ -48,6 +48,7 @@ apis() ->
, {"/gateway/:name", gateway_insta}
, {"/gateway/:name/stats", gateway_insta_stats}
].
%%--------------------------------------------------------------------
%% http handlers
@ -57,7 +58,29 @@ gateway(get, Request) ->
undefined -> all;
S0 -> binary_to_existing_atom(S0, utf8)
end,
{200, emqx_gateway_http:gateways(Status)}.
{200, emqx_gateway_http:gateways(Status)};
gateway(post, Request) ->
Body = maps:get(body, Request, #{}),
try
Name0 = maps:get(<<"name">>, Body),
GwName = binary_to_existing_atom(Name0),
case emqx_gateway_registry:lookup(GwName) of
undefined -> error(badarg);
_ ->
GwConf = maps:without([<<"name">>], Body),
case emqx_gateway_conf:load_gateway(GwName, GwConf) of
ok ->
{204};
{error, Reason} ->
return_http_error(500, Reason)
end
end
catch
error : {badkey, K} ->
return_http_error(400, [K, " is required"]);
error : badarg ->
return_http_error(404, "Bad gateway name")
end.
gateway_insta(delete, #{bindings := #{name := Name0}}) ->
with_gateway(Name0, fun(GwName, _) ->
@ -69,18 +92,16 @@ gateway_insta(get, #{bindings := #{name := Name0}}) ->
GwConf = filled_raw_confs([<<"gateway">>, Name0]),
LisConf = maps:get(<<"listeners">>, GwConf, #{}),
NLisConf = emqx_gateway_http:mapping_listener_m2l(Name0, LisConf),
{200, GwConf#{<<"listeners">> => NLisConf}}
{200, GwConf#{<<"name">> => Name0, <<"listeners">> => NLisConf}}
end);
gateway_insta(put, #{body := GwConf0,
bindings := #{name := Name0}
}) ->
with_gateway(Name0, fun(_, _) ->
with_gateway(Name0, fun(GwName, _) ->
GwConf = maps:without([<<"authentication">>, <<"listeners">>], GwConf0),
case emqx_gateway:update_rawconf(Name0, GwConf) of
case emqx_gateway_conf:update_gateway(GwName, GwConf) of
ok ->
{200};
{error, not_found} ->
return_http_error(404, "Gateway not found");
{error, Reason} ->
return_http_error(500, Reason)
end
@ -122,6 +143,16 @@ swagger("/gateway", get) ->
, responses =>
#{ <<"200">> => schema_gateway_overview_list() }
};
swagger("/gateway", post) ->
#{ description => <<"Load a gateway">>
, requestBody => schema_gateway_conf()
, responses =>
#{ <<"400">> => schema_bad_request()
, <<"404">> => schema_not_found()
, <<"500">> => schema_internal_error()
, <<"204">> => schema_no_content()
}
};
swagger("/gateway/:name", get) ->
#{ description => <<"Get the gateway configurations">>
, parameters => params_gateway_name_in_path()
@ -189,7 +220,7 @@ schema_gateway_overview_list() ->
#{ type => object
, properties => properties_gateway_overview()
},
<<"Gateway Overview list">>
<<"Gateway list">>
).
%% XXX: This is whole confs for all type gateways. It is used to fill the
@ -202,6 +233,7 @@ schema_gateway_overview_list() ->
<<"name">> => <<"authenticator1">>,
<<"server_type">> => <<"built-in-database">>,
<<"user_id_type">> => <<"clientid">>},
<<"name">> => <<"coap">>,
<<"enable">> => true,
<<"enable_stats">> => true,<<"heartbeat">> => <<"30s">>,
<<"idle_timeout">> => <<"30s">>,
@ -219,6 +251,7 @@ schema_gateway_overview_list() ->
-define(EXPROTO_GATEWAY_CONFS,
#{<<"enable">> => true,
<<"name">> => <<"exproto">>,
<<"enable_stats">> => true,
<<"handler">> =>
#{<<"address">> => <<"http://127.0.0.1:9001">>},
@ -236,6 +269,7 @@ schema_gateway_overview_list() ->
-define(LWM2M_GATEWAY_CONFS,
#{<<"auto_observe">> => false,
<<"name">> => <<"lwm2m">>,
<<"enable">> => true,
<<"enable_stats">> => true,
<<"idle_timeout">> => <<"30s">>,
@ -264,6 +298,7 @@ schema_gateway_overview_list() ->
#{<<"password">> => <<"abc">>,
<<"username">> => <<"mqtt_sn_user">>},
<<"enable">> => true,
<<"name">> => <<"mqtt-sn">>,
<<"enable_qos3">> => true,<<"enable_stats">> => true,
<<"gateway_id">> => 1,<<"idle_timeout">> => <<"30s">>,
<<"listeners">> => [
@ -290,6 +325,7 @@ schema_gateway_overview_list() ->
#{<<"password">> => <<"${Packet.headers.passcode}">>,
<<"username">> => <<"${Packet.headers.login}">>},
<<"enable">> => true,
<<"name">> => <<"stomp">>,
<<"enable_stats">> => true,
<<"frame">> =>
#{<<"max_body_length">> => 8192,<<"max_headers">> => 10,

View File

@ -18,8 +18,144 @@
-behaviour(minirest_api).
-import(emqx_gateway_http,
[ return_http_error/2
, schema_bad_request/0
, schema_not_found/0
, schema_internal_error/0
, schema_no_content/0
, with_gateway/2
, checks/2
]).
%% minirest behaviour callbacks
-export([api_spec/0]).
%% http handlers
-export([authn/2]).
%% internal export for emqx_gateway_api_listeners module
-export([schema_authn/0]).
%%--------------------------------------------------------------------
%% minirest behaviour callbacks
%%--------------------------------------------------------------------
api_spec() ->
{[], []}.
{metadata(apis()), []}.
apis() ->
[ {"/gateway/:name/authentication", authn}
].
%%--------------------------------------------------------------------
%% http handlers
authn(get, #{bindings := #{name := Name0}}) ->
with_gateway(Name0, fun(GwName, _) ->
{200, emqx_gateway_http:authn(GwName)}
end);
authn(put, #{bindings := #{name := Name0},
body := Body}) ->
with_gateway(Name0, fun(GwName, _) ->
ok = emqx_gateway_http:update_authn(GwName, Body),
{204}
end);
authn(post, #{bindings := #{name := Name0},
body := Body}) ->
with_gateway(Name0, fun(GwName, _) ->
ok = emqx_gateway_http:add_authn(GwName, Body),
{204}
end);
authn(delete, #{bindings := #{name := Name0}}) ->
with_gateway(Name0, fun(GwName, _) ->
ok = emqx_gateway_http:remove_authn(GwName),
{204}
end).
%%--------------------------------------------------------------------
%% Swagger defines
%%--------------------------------------------------------------------
metadata(APIs) ->
metadata(APIs, []).
metadata([], APIAcc) ->
lists:reverse(APIAcc);
metadata([{Path, Fun}|More], APIAcc) ->
Methods = [get, post, put, delete, patch],
Mds = lists:foldl(fun(M, Acc) ->
try
Acc#{M => swagger(Path, M)}
catch
error : function_clause ->
Acc
end
end, #{}, Methods),
metadata(More, [{Path, Mds, Fun} | APIAcc]).
swagger("/gateway/:name/authentication", get) ->
#{ description => <<"Get the gateway authentication">>
, parameters => params_gateway_name_in_path()
, responses =>
#{ <<"400">> => schema_bad_request()
, <<"404">> => schema_not_found()
, <<"500">> => schema_internal_error()
, <<"200">> => schema_authn()
}
};
swagger("/gateway/:name/authentication", put) ->
#{ description => <<"Create the gateway authentication">>
, parameters => params_gateway_name_in_path()
, requestBody => schema_authn()
, responses =>
#{ <<"400">> => schema_bad_request()
, <<"404">> => schema_not_found()
, <<"500">> => schema_internal_error()
, <<"204">> => schema_no_content()
}
};
swagger("/gateway/:name/authentication", post) ->
#{ description => <<"Add authentication for the gateway">>
, parameters => params_gateway_name_in_path()
, requestBody => schema_authn()
, responses =>
#{ <<"400">> => schema_bad_request()
, <<"404">> => schema_not_found()
, <<"500">> => schema_internal_error()
, <<"204">> => schema_no_content()
}
};
swagger("/gateway/:name/authentication", delete) ->
#{ description => <<"Remove the gateway authentication">>
, parameters => params_gateway_name_in_path()
, responses =>
#{ <<"400">> => schema_bad_request()
, <<"404">> => schema_not_found()
, <<"500">> => schema_internal_error()
, <<"204">> => schema_no_content()
}
}.
%%--------------------------------------------------------------------
%% params defines
params_gateway_name_in_path() ->
[#{ name => name
, in => path
, schema => #{type => string}
, required => true
}].
%%--------------------------------------------------------------------
%% schemas
schema_authn() ->
#{ description => <<"OK">>
, content => #{
'application/json' => #{
schema => minirest:ref(<<"AuthenticatorInstance">>)
}}
}.

View File

@ -20,20 +20,23 @@
-import(emqx_gateway_http,
[ return_http_error/2
, with_gateway/2
, checks/2
, schema_bad_request/0
, schema_not_found/0
, schema_internal_error/0
, schema_no_content/0
, with_gateway/2
, checks/2
]).
-import(emqx_gateway_api_authn, [schema_authn/0]).
%% minirest behaviour callbacks
-export([api_spec/0]).
%% http handlers
-export([ listeners/2
, listeners_insta/2
, listeners_insta_authn/2
]).
%%--------------------------------------------------------------------
@ -46,7 +49,9 @@ api_spec() ->
apis() ->
[ {"/gateway/:name/listeners", listeners}
, {"/gateway/:name/listeners/:id", listeners_insta}
, {"/gateway/:name/listeners/:id/authentication", listeners_insta_authn}
].
%%--------------------------------------------------------------------
%% http handlers
@ -69,13 +74,8 @@ listeners(post, #{bindings := #{name := Name0}, body := LConf}) ->
undefined ->
ListenerId = emqx_gateway_utils:listener_id(
GwName, Type, LName),
case emqx_gateway_http:update_listener(
ListenerId, LConf) of
ok ->
{204};
{error, Reason} ->
return_http_error(500, Reason)
end;
ok = emqx_gateway_http:add_listener(ListenerId, LConf),
{204};
_ ->
return_http_error(400, "Listener name has occupied")
end
@ -84,12 +84,8 @@ listeners(post, #{bindings := #{name := Name0}, body := LConf}) ->
listeners_insta(delete, #{bindings := #{name := Name0, id := ListenerId0}}) ->
ListenerId = emqx_mgmt_util:urldecode(ListenerId0),
with_gateway(Name0, fun(_GwName, _) ->
case emqx_gateway_http:remove_listener(ListenerId) of
ok -> {204};
{error, not_found} -> {204};
{error, Reason} ->
return_http_error(500, Reason)
end
ok = emqx_gateway_http:remove_listener(ListenerId),
{204}
end);
listeners_insta(get, #{bindings := #{name := Name0, id := ListenerId0}}) ->
ListenerId = emqx_mgmt_util:urldecode(ListenerId0),
@ -108,12 +104,38 @@ listeners_insta(put, #{body := LConf,
}) ->
ListenerId = emqx_mgmt_util:urldecode(ListenerId0),
with_gateway(Name0, fun(_GwName, _) ->
case emqx_gateway_http:update_listener(ListenerId, LConf) of
ok ->
{204};
{error, Reason} ->
return_http_error(500, Reason)
end
ok = emqx_gateway_http:update_listener(ListenerId, LConf),
{204}
end).
listeners_insta_authn(get, #{bindings := #{name := Name0,
id := ListenerId0}}) ->
ListenerId = emqx_mgmt_util:urldecode(ListenerId0),
with_gateway(Name0, fun(GwName, _) ->
{200, emqx_gateway_http:authn(GwName, ListenerId)}
end);
listeners_insta_authn(post, #{body := Conf,
bindings := #{name := Name0,
id := ListenerId0}}) ->
ListenerId = emqx_mgmt_util:urldecode(ListenerId0),
with_gateway(Name0, fun(GwName, _) ->
ok = emqx_gateway_http:add_authn(GwName, ListenerId, Conf),
{204}
end);
listeners_insta_authn(put, #{body := Conf,
bindings := #{name := Name0,
id := ListenerId0}}) ->
ListenerId = emqx_mgmt_util:urldecode(ListenerId0),
with_gateway(Name0, fun(GwName, _) ->
ok = emqx_gateway_http:update_authn(GwName, ListenerId, Conf),
{204}
end);
listeners_insta_authn(delete, #{bindings := #{name := Name0,
id := ListenerId0}}) ->
ListenerId = emqx_mgmt_util:urldecode(ListenerId0),
with_gateway(Name0, fun(GwName, _) ->
ok = emqx_gateway_http:remove_authn(GwName, ListenerId),
{204}
end).
%%--------------------------------------------------------------------
@ -190,6 +212,52 @@ swagger("/gateway/:name/listeners/:id", put) ->
, <<"500">> => schema_internal_error()
, <<"200">> => schema_no_content()
}
};
swagger("/gateway/:name/listeners/:id/authentication", get) ->
#{ description => <<"Get the listener's authentication info">>
, parameters => params_gateway_name_in_path()
++ params_listener_id_in_path()
, responses =>
#{ <<"400">> => schema_bad_request()
, <<"404">> => schema_not_found()
, <<"500">> => schema_internal_error()
, <<"200">> => schema_authn()
}
};
swagger("/gateway/:name/listeners/:id/authentication", post) ->
#{ description => <<"Add authentication for the listener">>
, parameters => params_gateway_name_in_path()
++ params_listener_id_in_path()
, requestBody => schema_authn()
, responses =>
#{ <<"400">> => schema_bad_request()
, <<"404">> => schema_not_found()
, <<"500">> => schema_internal_error()
, <<"204">> => schema_no_content()
}
};
swagger("/gateway/:name/listeners/:id/authentication", put) ->
#{ description => <<"Update authentication for the listener">>
, parameters => params_gateway_name_in_path()
++ params_listener_id_in_path()
, requestBody => schema_authn()
, responses =>
#{ <<"400">> => schema_bad_request()
, <<"404">> => schema_not_found()
, <<"500">> => schema_internal_error()
, <<"204">> => schema_no_content()
}
};
swagger("/gateway/:name/listeners/:id/authentication", delete) ->
#{ description => <<"Remove authentication for the listener">>
, parameters => params_gateway_name_in_path()
++ params_listener_id_in_path()
, responses =>
#{ <<"400">> => schema_bad_request()
, <<"404">> => schema_not_found()
, <<"500">> => schema_internal_error()
, <<"204">> => schema_no_content()
}
}.
%%--------------------------------------------------------------------
@ -301,7 +369,6 @@ raw_properties_common_listener() ->
<<"Listener type. Enum: tcp, udp, ssl, dtls">>,
[<<"tcp">>, <<"ssl">>, <<"udp">>, <<"dtls">>]}
, {running, boolean, <<"Listener running status">>}
%% FIXME:
, {bind, string, <<"Listener bind address or port">>}
, {acceptors, integer, <<"Listener acceptors number">>}
, {access_rules, {array, string}, <<"Listener Access rules for client">>}

View File

@ -22,20 +22,17 @@
-export([start/2, stop/1]).
-define(CONF_CALLBACK_MODULE, emqx_gateway).
start(_StartType, _StartArgs) ->
{ok, Sup} = emqx_gateway_sup:start_link(),
emqx_gateway_cli:load(),
load_default_gateway_applications(),
load_gateway_by_default(),
emqx_config_handler:add_handler([gateway], ?CONF_CALLBACK_MODULE),
emqx_gateway_conf:load(),
{ok, Sup}.
stop(_State) ->
emqx_gateway_conf:unload(),
emqx_gateway_cli:unload(),
%% XXX: No api now
%emqx_config_handler:remove_handler([gateway], ?MODULE),
ok.
%%--------------------------------------------------------------------

View File

@ -0,0 +1,270 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc The gateway configuration management module
-module(emqx_gateway_conf).
%% Load/Unload
-export([ load/0
, unload/0
]).
%% APIs
-export([ load_gateway/2
, update_gateway/2
, remove_gateway/1
, add_listener/3
, update_listener/3
, remove_listener/2
, add_authn/2
, add_authn/3
, update_authn/2
, update_authn/3
, remove_authn/1
, remove_authn/2
]).
%% callbacks for emqx_config_handler
-export([ pre_config_update/2
, post_config_update/4
]).
-type atom_or_bin() :: atom() | binary().
-type ok_or_err() :: ok_or_err().
-type listener_ref() :: {ListenerType :: atom_or_bin(),
ListenerName :: atom_or_bin()}.
%%--------------------------------------------------------------------
%% Load/Unload
%%--------------------------------------------------------------------
-spec load() -> ok.
load() ->
emqx_config_handler:add_handler([gateway], ?MODULE).
-spec unload() -> ok.
unload() ->
emqx_config_handler:remove_handler([gateway]).
%%--------------------------------------------------------------------
%% APIs
-spec load_gateway(atom_or_bin(), map()) -> ok_or_err().
load_gateway(GwName, Conf) ->
update({?FUNCTION_NAME, bin(GwName), Conf}).
-spec update_gateway(atom_or_bin(), map()) -> ok_or_err().
update_gateway(GwName, Conf) ->
update({?FUNCTION_NAME, bin(GwName), Conf}).
-spec remove_gateway(atom_or_bin()) -> ok_or_err().
remove_gateway(GwName) ->
update({?FUNCTION_NAME, bin(GwName)}).
-spec add_listener(atom_or_bin(), listener_ref(), map()) -> ok_or_err().
add_listener(GwName, ListenerRef, Conf) ->
update({?FUNCTION_NAME, bin(GwName), bin(ListenerRef), Conf}).
-spec update_listener(atom_or_bin(), listener_ref(), map()) -> ok_or_err().
update_listener(GwName, ListenerRef, Conf) ->
update({?FUNCTION_NAME, bin(GwName), bin(ListenerRef), Conf}).
-spec remove_listener(atom_or_bin(), listener_ref()) -> ok_or_err().
remove_listener(GwName, ListenerRef) ->
update({?FUNCTION_NAME, bin(GwName), bin(ListenerRef)}).
-spec add_authn(atom_or_bin(), map()) -> ok_or_err().
add_authn(GwName, Conf) ->
update({?FUNCTION_NAME, bin(GwName), Conf}).
-spec add_authn(atom_or_bin(), listener_ref(), map()) -> ok_or_err().
add_authn(GwName, ListenerRef, Conf) ->
update({?FUNCTION_NAME, bin(GwName), bin(ListenerRef), Conf}).
-spec update_authn(atom_or_bin(), map()) -> ok_or_err().
update_authn(GwName, Conf) ->
update({?FUNCTION_NAME, bin(GwName), Conf}).
-spec update_authn(atom_or_bin(), listener_ref(), map()) -> ok_or_err().
update_authn(GwName, ListenerRef, Conf) ->
update({?FUNCTION_NAME, bin(GwName), bin(ListenerRef), Conf}).
-spec remove_authn(atom_or_bin()) -> ok_or_err().
remove_authn(GwName) ->
update({?FUNCTION_NAME, bin(GwName)}).
-spec remove_authn(atom_or_bin(), listener_ref()) -> ok_or_err().
remove_authn(GwName, ListenerRef) ->
update({?FUNCTION_NAME, bin(GwName), bin(ListenerRef)}).
%% @private
update(Req) ->
res(emqx:update_config([gateway], Req)).
res({ok, _Result}) -> ok;
res({error, {pre_config_update,emqx_gateway_conf,Reason}}) -> {error, Reason};
res({error, Reason}) -> {error, Reason}.
bin({LType, LName}) ->
{bin(LType), bin(LName)};
bin(A) when is_atom(A) ->
atom_to_binary(A);
bin(B) when is_binary(B) ->
B.
%%--------------------------------------------------------------------
%% Config Handler
%%--------------------------------------------------------------------
-spec pre_config_update(emqx_config:update_request(),
emqx_config:raw_config()) ->
{ok, emqx_config:update_request()} | {error, term()}.
pre_config_update({load_gateway, GwName, Conf}, RawConf) ->
case maps:get(GwName, RawConf, undefined) of
undefined ->
{ok, emqx_map_lib:deep_merge(RawConf, #{GwName => Conf})};
_ ->
{error, already_exist}
end;
pre_config_update({update_gateway, GwName, Conf}, RawConf) ->
case maps:get(GwName, RawConf, undefined) of
undefined ->
{error, not_found};
_ ->
NConf = maps:without([<<"listeners">>,
<<"authentication">>], Conf),
{ok, emqx_map_lib:deep_merge(RawConf, #{GwName => NConf})}
end;
pre_config_update({remove_gateway, GwName}, RawConf) ->
{ok, maps:remove(GwName, RawConf)};
pre_config_update({add_listener, GwName, {LType, LName}, Conf}, RawConf) ->
case emqx_map_lib:deep_get(
[GwName, <<"listeners">>, LType, LName], RawConf, undefined) of
undefined ->
NListener = #{LType => #{LName => Conf}},
{ok, emqx_map_lib:deep_merge(
RawConf,
#{GwName => #{<<"listeners">> => NListener}})};
_ ->
{error, already_exist}
end;
pre_config_update({update_listener, GwName, {LType, LName}, Conf}, RawConf) ->
case emqx_map_lib:deep_get(
[GwName, <<"listeners">>, LType, LName], RawConf, undefined) of
undefined ->
{error, not_found};
_OldConf ->
NListener = #{LType => #{LName => Conf}},
{ok, emqx_map_lib:deep_merge(
RawConf,
#{GwName => #{<<"listeners">> => NListener}})}
end;
pre_config_update({remove_listener, GwName, {LType, LName}}, RawConf) ->
{ok, emqx_map_lib:deep_remove(
[GwName, <<"listeners">>, LType, LName], RawConf)};
pre_config_update({add_authn, GwName, Conf}, RawConf) ->
case emqx_map_lib:deep_get(
[GwName, <<"authentication">>], RawConf, undefined) of
undefined ->
{ok, emqx_map_lib:deep_merge(
RawConf,
#{GwName => #{<<"authentication">> => Conf}})};
_ ->
{error, already_exist}
end;
pre_config_update({add_authn, GwName, {LType, LName}, Conf}, RawConf) ->
case emqx_map_lib:deep_get(
[GwName, <<"listeners">>, LType, LName],
RawConf, undefined) of
undefined ->
{error, not_found};
Listener ->
case maps:get(<<"authentication">>, Listener, undefined) of
undefined ->
NListener = maps:put(<<"authentication">>, Conf, Listener),
NGateway = #{GwName =>
#{<<"listeners">> =>
#{LType => #{LName => NListener}}}},
{ok, emqx_map_lib:deep_merge(RawConf, NGateway)};
_ ->
{error, already_exist}
end
end;
pre_config_update({update_authn, GwName, Conf}, RawConf) ->
case emqx_map_lib:deep_get(
[GwName, <<"authentication">>], RawConf, undefined) of
undefined ->
{error, not_found};
_ ->
{ok, emqx_map_lib:deep_merge(
RawConf,
#{GwName => #{<<"authentication">> => Conf}})}
end;
pre_config_update({update_authn, GwName, {LType, LName}, Conf}, RawConf) ->
case emqx_map_lib:deep_get(
[GwName, <<"listeners">>, LType, LName],
RawConf, undefined) of
undefined ->
{error, not_found};
Listener ->
case maps:get(<<"authentication">>, Listener, undefined) of
undefined ->
{error, not_found};
Auth ->
NListener = maps:put(
<<"authentication">>,
emqx_map_lib:deep_merge(Auth, Conf),
Listener
),
NGateway = #{GwName =>
#{<<"listeners">> =>
#{LType => #{LName => NListener}}}},
{ok, emqx_map_lib:deep_merge(RawConf, NGateway)}
end
end;
pre_config_update({remove_authn, GwName}, RawConf) ->
{ok, emqx_map_lib:deep_remove(
[GwName, <<"authentication">>], RawConf)};
pre_config_update({remove_authn, GwName, {LType, LName}}, RawConf) ->
Path = [GwName, <<"listeners">>, LType, LName, <<"authentication">>],
{ok, emqx_map_lib:deep_remove(Path, RawConf)};
pre_config_update(UnknownReq, _RawConf) ->
logger:error("Unknown configuration update request: ~0p", [UnknownReq]),
{error, badreq}.
-spec post_config_update(emqx_config:update_request(), emqx_config:config(),
emqx_config:config(), emqx_config:app_envs())
-> ok | {ok, Result::any()} | {error, Reason::term()}.
post_config_update(Req, NewConfig, OldConfig, _AppEnvs) ->
[_Tag, GwName0|_] = tuple_to_list(Req),
GwName = binary_to_existing_atom(GwName0),
case {maps:get(GwName, NewConfig, undefined),
maps:get(GwName, OldConfig, undefined)} of
{undefined, undefined} ->
ok; %% nothing to change
{undefined, Old} when is_map(Old) ->
emqx_gateway:unload(GwName);
{New, undefined} when is_map(New) ->
emqx_gateway:load(GwName, New);
{New, Old} when is_map(New), is_map(Old) ->
emqx_gateway:update(GwName, New)
end.

View File

@ -29,8 +29,8 @@
-type context() ::
#{ %% Gateway Name
gwname := gateway_name()
%% Autenticator
, auth := emqx_authn:chain_id() | undefined
%% Authentication chains
, auth := [emqx_authentication:chain_name()] | undefined
%% The ConnectionManager PID
, cm := pid()
}.
@ -66,12 +66,8 @@
| {error, any()}.
authenticate(_Ctx = #{auth := undefined}, ClientInfo) ->
{ok, mountpoint(ClientInfo)};
authenticate(_Ctx = #{auth := ChainId}, ClientInfo0) ->
ClientInfo = ClientInfo0#{
zone => default,
listener => {tcp, default},
chain_id => ChainId
},
authenticate(_Ctx = #{auth := _ChainName}, ClientInfo0) ->
ClientInfo = ClientInfo0#{zone => default},
case emqx_access_control:authenticate(ClientInfo) of
{ok, _} ->
{ok, mountpoint(ClientInfo)};

View File

@ -27,11 +27,22 @@
%% Mgmt APIs - listeners
-export([ listeners/1
, listener/1
, add_listener/2
, remove_listener/1
, update_listener/2
, mapping_listener_m2l/2
]).
-export([ authn/1
, authn/2
, add_authn/2
, add_authn/3
, update_authn/2
, update_authn/3
, remove_authn/1
, remove_authn/2
]).
%% Mgmt APIs - clients
-export([ lookup_client/3
, lookup_client/4
@ -171,12 +182,13 @@ listener(GwName, Type, Conf) ->
[begin
ListenerId = emqx_gateway_utils:listener_id(GwName, Type, LName),
Running = is_running(ListenerId, LConf),
LConf#{
id => ListenerId,
type => Type,
name => LName,
running => Running
}
bind2str(
LConf#{
id => ListenerId,
type => Type,
name => LName,
running => Running
})
end || {LName, LConf} <- Conf, is_map(LConf)].
is_running(ListenerId, #{<<"bind">> := ListenOn0}) ->
@ -188,27 +200,78 @@ is_running(ListenerId, #{<<"bind">> := ListenOn0}) ->
false
end.
-spec remove_listener(binary()) -> ok | {error, not_found} | {error, any()}.
remove_listener(ListenerId) ->
{GwName, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId),
LConf = emqx:get_raw_config(
[<<"gateway">>, GwName, <<"listeners">>, Type]
),
NLConf = maps:remove(Name, LConf),
emqx_gateway:update_rawconf(
GwName,
#{<<"listeners">> => #{Type => NLConf}}
).
bind2str(LConf = #{bind := Bind}) when is_integer(Bind) ->
maps:put(bind, integer_to_binary(Bind), LConf);
bind2str(LConf = #{<<"bind">> := Bind}) when is_integer(Bind) ->
maps:put(<<"bind">>, integer_to_binary(Bind), LConf);
bind2str(LConf = #{bind := Bind}) when is_binary(Bind) ->
LConf;
bind2str(LConf = #{<<"bind">> := Bind}) when is_binary(Bind) ->
LConf.
-spec update_listener(atom() | binary(), map()) -> ok | {error, any()}.
update_listener(ListenerId, NewConf0) ->
-spec add_listener(atom() | binary(), map()) -> ok.
add_listener(ListenerId, NewConf0) ->
{GwName, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId),
NewConf = maps:without([<<"id">>, <<"name">>,
<<"type">>, <<"running">>], NewConf0),
emqx_gateway:update_rawconf(
GwName,
#{<<"listeners">> => #{Type => #{Name => NewConf}}
}).
confexp(emqx_gateway_conf:add_listener(GwName, {Type, Name}, NewConf)).
-spec update_listener(atom() | binary(), map()) -> ok.
update_listener(ListenerId, NewConf0) ->
{GwName, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId),
NewConf = maps:without([<<"id">>, <<"name">>,
<<"type">>, <<"running">>], NewConf0),
confexp(emqx_gateway_conf:update_listener(GwName, {Type, Name}, NewConf)).
-spec remove_listener(binary()) -> ok.
remove_listener(ListenerId) ->
{GwName, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId),
confexp(emqx_gateway_conf:remove_listener(GwName, {Type, Name})).
-spec authn(gateway_name()) -> map().
authn(GwName) ->
Path = [gateway, GwName, authentication],
emqx_map_lib:jsonable_map(emqx:get_config(Path)).
-spec authn(gateway_name(), binary()) -> map().
authn(GwName, ListenerId) ->
{_, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId),
Path = [gateway, GwName, listeners, Type, Name, authentication],
emqx_map_lib:jsonable_map(emqx:get_config(Path)).
-spec add_authn(gateway_name(), map()) -> ok.
add_authn(GwName, AuthConf) ->
confexp(emqx_gateway_conf:add_authn(GwName, AuthConf)).
-spec add_authn(gateway_name(), binary(), map()) -> ok.
add_authn(GwName, ListenerId, AuthConf) ->
{_, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId),
confexp(emqx_gateway_conf:add_authn(GwName, {Type, Name}, AuthConf)).
-spec update_authn(gateway_name(), map()) -> ok.
update_authn(GwName, AuthConf) ->
confexp(emqx_gateway_conf:update_authn(GwName, AuthConf)).
-spec update_authn(gateway_name(), binary(), map()) -> ok.
update_authn(GwName, ListenerId, AuthConf) ->
{_, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId),
confexp(emqx_gateway_conf:update_authn(GwName, {Type, Name}, AuthConf)).
-spec remove_authn(gateway_name()) -> ok.
remove_authn(GwName) ->
confexp(emqx_gateway_conf:remove_authn(GwName)).
-spec remove_authn(gateway_name(), binary()) -> ok.
remove_authn(GwName, ListenerId) ->
{_, Type, Name} = emqx_gateway_utils:parse_listener_id(ListenerId),
confexp(emqx_gateway_conf:remove_authn(GwName, {Type, Name})).
confexp(ok) -> ok;
confexp({error, not_found}) ->
error({update_conf_error, not_found});
confexp({error, already_exist}) ->
error({update_conf_error, already_exist}).
%%--------------------------------------------------------------------
%% Mgmt APIs - clients
@ -328,10 +391,22 @@ with_gateway(GwName0, Fun) ->
catch
error : badname ->
return_http_error(404, "Bad gateway name");
%% Exceptions from: checks/2
error : {miss_param, K} ->
return_http_error(400, [K, " is required"]);
%% Exceptions from emqx_gateway_utils:parse_listener_id/1
error : {invalid_listener_id, Id} ->
return_http_error(400, ["invalid listener id: ", Id]);
%% Exceptions from: emqx:get_config/1
error : {config_not_found, Path0} ->
Path = lists:concat(
lists:join(".", lists:map(fun to_list/1, Path0))),
return_http_error(404, "Resource not found. path: " ++ Path);
%% Exceptions from: confexp/1
error : {update_conf_error, not_found} ->
return_http_error(404, "Resource not found");
error : {update_conf_error, already_exist} ->
return_http_error(400, "Resource already exist");
Class : Reason : Stk ->
?LOG(error, "Uncatched error: {~p, ~p}, stacktrace: ~0p",
[Class, Reason, Stk]),
@ -348,6 +423,11 @@ checks([K|Ks], Map) ->
error({miss_param, K})
end.
to_list(A) when is_atom(A) ->
atom_to_list(A);
to_list(B) when is_binary(B) ->
binary_to_list(B).
%%--------------------------------------------------------------------
%% common schemas

View File

@ -43,6 +43,7 @@
name :: gateway_name(),
config :: emqx_config:config(),
ctx :: emqx_gateway_ctx:context(),
authns :: [emqx_authentication:chain_name()],
status :: stopped | running,
child_pids :: [pid()],
gw_state :: emqx_gateway_impl:state() | undefined,
@ -94,16 +95,23 @@ init([Gateway, Ctx, _GwDscrptr]) ->
State = #state{
ctx = Ctx,
name = GwName,
authns = [],
config = Config,
child_pids = [],
status = stopped,
created_at = erlang:system_time(millisecond)
},
case cb_gateway_load(State) of
{error, Reason} ->
{stop, {load_gateway_failure, Reason}};
{ok, NState} ->
{ok, NState}
case maps:get(enable, Config, true) of
false ->
?LOG(info, "Skipp to start ~s gateway due to disabled", [GwName]),
{ok, State};
true ->
case cb_gateway_load(State) of
{error, Reason} ->
{stop, {load_gateway_failure, Reason}};
{ok, NState} ->
{ok, NState}
end
end.
handle_call(info, _From, State) ->
@ -174,9 +182,9 @@ handle_info(Info, State) ->
?LOG(warning, "Unexcepted info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, State = #state{ctx = Ctx, child_pids = Pids}) ->
terminate(_Reason, State = #state{child_pids = Pids}) ->
Pids /= [] andalso (_ = cb_gateway_unload(State)),
_ = do_deinit_authn(maps:get(auth, Ctx, undefined)),
_ = do_deinit_authn(State#state.authns),
ok.
code_change(_OldVsn, State, _Extra) ->
@ -197,52 +205,102 @@ detailed_gateway_info(State) ->
%% Internal funcs
%%--------------------------------------------------------------------
do_init_authn(GwName, Config) ->
case maps:get(authentication, Config, #{enable => false}) of
#{enable := false} -> undefined;
AuthCfg when is_map(AuthCfg) ->
case maps:get(enable, AuthCfg, true) of
false ->
undefined;
_ ->
%% TODO: Implement Authentication
GwName
%case emqx_authn:create_chain(#{id => ChainId}) of
% {ok, _ChainInfo} ->
% case emqx_authn:create_authenticator(ChainId, AuthCfg) of
% {ok, _} -> ChainId;
% {error, Reason} ->
% ?LOG(error, "Failed to create authentication ~p", [Reason]),
% throw({bad_authentication, Reason})
% end;
% {error, Reason} ->
% ?LOG(error, "Failed to create authentication chain: ~p", [Reason]),
% throw({bad_chain, {ChainId, Reason}})
%end.
end;
_ ->
undefined
%% same with emqx_authentication:global_chain/1
global_chain(mqtt) ->
'mqtt:global';
global_chain('mqtt-sn') ->
'mqtt-sn:global';
global_chain(coap) ->
'coap:global';
global_chain(lwm2m) ->
'lwm2m:global';
global_chain(stomp) ->
'stomp:global';
global_chain(_) ->
'unknown:global'.
listener_chain(GwName, Type, LisName) ->
emqx_gateway_utils:listener_id(GwName, Type, LisName).
%% There are two layer authentication configs
%% stomp.authn
%% / \
%% listeners.tcp.defautl.authn *.ssl.default.authn
%%
init_authn(GwName, Config) ->
Authns = authns(GwName, Config),
try
do_init_authn(Authns, [])
catch
throw : Reason = {badauth, _} ->
do_deinit_authn(proplists:get_keys(Authns)),
throw(Reason)
end.
do_deinit_authn(undefined) ->
ok;
do_deinit_authn(AuthnRef) ->
%% TODO:
?LOG(warning, "Failed to clean authn ~p, not suppported now", [AuthnRef]).
%case emqx_authn:delete_chain(AuthnRef) of
% ok -> ok;
% {error, {not_found, _}} ->
% ?LOG(warning, "Failed to clean authentication chain: ~s, "
% "reason: not_found", [AuthnRef]);
% {error, Reason} ->
% ?LOG(error, "Failed to clean authentication chain: ~s, "
% "reason: ~p", [AuthnRef, Reason])
%end.
do_init_authn([], Names) ->
Names;
do_init_authn([{_ChainName, _AuthConf = #{enable := false}}|More], Names) ->
do_init_authn(More, Names);
do_init_authn([{ChainName, AuthConf}|More], Names) when is_map(AuthConf) ->
_ = application:ensure_all_started(emqx_authn),
do_create_authn_chain(ChainName, AuthConf),
do_init_authn(More, [ChainName|Names]);
do_init_authn([_BadConf|More], Names) ->
do_init_authn(More, Names).
authns(GwName, Config) ->
Listeners = maps:to_list(maps:get(listeners, Config, #{})),
lists:append(
[ [{listener_chain(GwName, LisType, LisName), authn_conf(Opts)}
|| {LisName, Opts} <- maps:to_list(LisNames) ]
|| {LisType, LisNames} <- Listeners])
++ [{global_chain(GwName), authn_conf(Config)}].
authn_conf(Conf) ->
maps:get(authentication, Conf, #{enable => false}).
do_create_authn_chain(ChainName, AuthConf) ->
case ensure_chain(ChainName) of
ok ->
case emqx_authentication:create_authenticator(ChainName, AuthConf) of
{ok, _} -> ok;
{error, Reason} ->
?LOG(error, "Failed to create authenticator chain ~s, "
"reason: ~p, config: ~p",
[ChainName, Reason, AuthConf]),
throw({badauth, Reason})
end;
{error, Reason} ->
?LOG(error, "Falied to create authn chain ~s, reason ~p",
[ChainName, Reason]),
throw({badauth, Reason})
end.
ensure_chain(ChainName) ->
case emqx_authentication:create_chain(ChainName) of
{ok, _ChainInfo} ->
ok;
{error, {already_exists, _}} ->
ok;
{error, Reason} ->
{error, Reason}
end.
do_deinit_authn(Names) ->
lists:foreach(fun(ChainName) ->
case emqx_authentication:delete_chain(ChainName) of
ok -> ok;
{error, {not_found, _}} -> ok;
{error, Reason} ->
?LOG(error, "Failed to clean authentication chain: ~s, "
"reason: ~p", [ChainName, Reason])
end
end, Names).
do_update_one_by_one(NCfg0, State = #state{
ctx = Ctx,
config = OCfg,
status = Status}) ->
config = OCfg,
status = Status}) ->
NCfg = emqx_map_lib:deep_merge(OCfg, NCfg0),
@ -263,14 +321,9 @@ do_update_one_by_one(NCfg0, State = #state{
true -> State;
false ->
%% Reset Authentication first
_ = do_deinit_authn(maps:get(auth, Ctx, undefined)),
NCtx = Ctx#{
auth => do_init_authn(
State#state.name,
NCfg
)
},
State#state{ctx = NCtx}
_ = do_deinit_authn(State#state.authns),
AuthnNames = init_authn(State#state.name, NCfg),
State#state{authns = AuthnNames}
end,
cb_gateway_update(NCfg, NState);
Status == running, NEnable == false ->
@ -289,6 +342,7 @@ cb_gateway_unload(State = #state{name = GwName,
#{cbkmod := CbMod} = emqx_gateway_registry:lookup(GwName),
CbMod:on_gateway_unload(Gateway, GwState),
{ok, State#state{child_pids = [],
authns = [],
status = stopped,
gw_state = undefined,
started_at = undefined,
@ -300,6 +354,8 @@ cb_gateway_unload(State = #state{name = GwName,
[GwName, GwState,
Class, Reason, Stk]),
{error, {Class, Reason, Stk}}
after
_ = do_deinit_authn(State#state.authns)
end.
%% @doc 1. Create Authentcation Context
@ -311,38 +367,33 @@ cb_gateway_load(State = #state{name = GwName,
ctx = Ctx}) ->
Gateway = detailed_gateway_info(State),
case maps:get(enable, Config, true) of
false ->
?LOG(info, "Skipp to start ~s gateway due to disabled", [GwName]);
true ->
try
AuthnRef = do_init_authn(GwName, Config),
NCtx = Ctx#{auth => AuthnRef},
#{cbkmod := CbMod} = emqx_gateway_registry:lookup(GwName),
case CbMod:on_gateway_load(Gateway, NCtx) of
{error, Reason} ->
do_deinit_authn(AuthnRef),
throw({callback_return_error, Reason});
{ok, ChildPidOrSpecs, GwState} ->
ChildPids = start_child_process(ChildPidOrSpecs),
{ok, State#state{
ctx = NCtx,
status = running,
child_pids = ChildPids,
gw_state = GwState,
stopped_at = undefined,
started_at = erlang:system_time(millisecond)
}}
end
catch
Class : Reason1 : Stk ->
?LOG(error, "Failed to load ~s gateway (~0p, ~0p) "
"crashed: {~p, ~p}, stacktrace: ~0p",
[GwName, Gateway, Ctx,
Class, Reason1, Stk]),
{error, {Class, Reason1, Stk}}
end
try
AuthnNames = init_authn(GwName, Config),
NCtx = Ctx#{auth => AuthnNames},
#{cbkmod := CbMod} = emqx_gateway_registry:lookup(GwName),
case CbMod:on_gateway_load(Gateway, NCtx) of
{error, Reason} ->
do_deinit_authn(AuthnNames),
throw({callback_return_error, Reason});
{ok, ChildPidOrSpecs, GwState} ->
ChildPids = start_child_process(ChildPidOrSpecs),
{ok, State#state{
ctx = NCtx,
authns = AuthnNames,
status = running,
child_pids = ChildPids,
gw_state = GwState,
stopped_at = undefined,
started_at = erlang:system_time(millisecond)
}}
end
catch
Class : Reason1 : Stk ->
?LOG(error, "Failed to load ~s gateway (~0p, ~0p) "
"crashed: {~p, ~p}, stacktrace: ~0p",
[GwName, Gateway, Ctx,
Class, Reason1, Stk]),
{error, {Class, Reason1, Stk}}
end.
cb_gateway_update(Config,

View File

@ -50,11 +50,11 @@ namespace() -> gateway.
roots() -> [gateway].
fields(gateway) ->
[{stomp, sc(ref(stomp))},
{mqttsn, sc(ref(mqttsn))},
{coap, sc(ref(coap))},
{lwm2m, sc(ref(lwm2m))},
{exproto, sc(ref(exproto))}
[{stomp, sc_meta(ref(stomp) , #{nullable => {true, recursively}})},
{mqttsn, sc_meta(ref(mqttsn) , #{nullable => {true, recursively}})},
{coap, sc_meta(ref(coap) , #{nullable => {true, recursively}})},
{lwm2m, sc_meta(ref(lwm2m) , #{nullable => {true, recursively}})},
{exproto, sc_meta(ref(exproto), #{nullable => {true, recursively}})}
];
fields(stomp) ->
@ -92,10 +92,10 @@ fields(coap) ->
fields(lwm2m) ->
[ {xml_dir, sc(binary())}
, {lifetime_min, sc(duration())}
, {lifetime_max, sc(duration())}
, {qmode_time_windonw, sc(integer())}
, {auto_observe, sc(boolean())}
, {lifetime_min, sc(duration(), "1s")}
, {lifetime_max, sc(duration(), "86400s")}
, {qmode_time_window, sc(integer(), 22)}
, {auto_observe, sc(boolean(), false)}
, {update_msg_publish_condition, sc(hoconsc:union([always, contains_object_list]))}
, {translators, sc(ref(translators))}
, {listeners, sc(ref(udp_listeners))}
@ -154,8 +154,8 @@ fields(udp_tcp_listeners) ->
];
fields(tcp_listener) ->
[
%% some special confs for tcp listener
[ %% some special confs for tcp listener
{acceptors, sc(integer(), 16)}
] ++
tcp_opts() ++
proxy_protocol_opts() ++
@ -175,6 +175,8 @@ fields(udp_listener) ->
common_listener_opts();
fields(dtls_listener) ->
[ {acceptors, sc(integer(), 16)}
] ++
fields(udp_listener) ++
[{dtls, sc_meta(ref(dtls_opts),
#{desc => "DTLS listener options"})}];
@ -191,29 +193,33 @@ fields(dtls_opts) ->
emqx_schema:server_ssl_opts_schema(
#{ depth => 10
, reuse_sessions => true
, versions => dtls
, ciphers => dtls
, versions => dtls_all_available
, ciphers => dtls_all_available
}, false).
% authentication() ->
% hoconsc:union(
% [ undefined
% , hoconsc:ref(emqx_authn_mnesia, config)
% , hoconsc:ref(emqx_authn_mysql, config)
% , hoconsc:ref(emqx_authn_pgsql, config)
% , hoconsc:ref(emqx_authn_mongodb, standalone)
% , hoconsc:ref(emqx_authn_mongodb, 'replica-set')
% , hoconsc:ref(emqx_authn_mongodb, 'sharded-cluster')
% , hoconsc:ref(emqx_authn_redis, standalone)
% , hoconsc:ref(emqx_authn_redis, cluster)
% , hoconsc:ref(emqx_authn_redis, sentinel)
% , hoconsc:ref(emqx_authn_http, get)
% , hoconsc:ref(emqx_authn_http, post)
% , hoconsc:ref(emqx_authn_jwt, 'hmac-based')
% , hoconsc:ref(emqx_authn_jwt, 'public-key')
% , hoconsc:ref(emqx_authn_jwt, 'jwks')
% , hoconsc:ref(emqx_enhanced_authn_scram_mnesia, config)
% ]).
authentication() ->
sc_meta(hoconsc:union(
[ hoconsc:ref(emqx_authn_mnesia, config)
, hoconsc:ref(emqx_authn_mysql, config)
, hoconsc:ref(emqx_authn_pgsql, config)
, hoconsc:ref(emqx_authn_mongodb, standalone)
, hoconsc:ref(emqx_authn_mongodb, 'replica-set')
, hoconsc:ref(emqx_authn_mongodb, 'sharded-cluster')
, hoconsc:ref(emqx_authn_redis, standalone)
, hoconsc:ref(emqx_authn_redis, cluster)
, hoconsc:ref(emqx_authn_redis, sentinel)
, hoconsc:ref(emqx_authn_http, get)
, hoconsc:ref(emqx_authn_http, post)
, hoconsc:ref(emqx_authn_jwt, 'hmac-based')
, hoconsc:ref(emqx_authn_jwt, 'public-key')
, hoconsc:ref(emqx_authn_jwt, 'jwks')
, hoconsc:ref(emqx_enhanced_authn_scram_mnesia, config)
]),
#{nullable => {true, recursively},
desc =>
"""Default authentication configs for all of the gateway listeners.<br>
For per-listener overrides see <code>authentication</code>
in listener configs"""}).
gateway_common_options() ->
[ {enable, sc(boolean(), true)}
@ -221,16 +227,15 @@ gateway_common_options() ->
, {idle_timeout, sc(duration(), <<"30s">>)}
, {mountpoint, sc(binary(), <<>>)}
, {clientinfo_override, sc(ref(clientinfo_override))}
, {authentication, sc(hoconsc:lazy(map()))}
, {authentication, authentication()}
].
common_listener_opts() ->
[ {enable, sc(boolean(), true)}
, {bind, sc(union(ip_port(), integer()))}
, {acceptors, sc(integer(), 16)}
, {max_connections, sc(integer(), 1024)}
, {max_conn_rate, sc(integer())}
%, {rate_limit, sc(comma_separated_list())}
, {authentication, authentication()}
, {mountpoint, sc(binary(), undefined)}
, {access_rules, sc(hoconsc:array(string()), [])}
].
@ -242,8 +247,8 @@ udp_opts() ->
[{udp, sc_meta(ref(udp_opts), #{})}].
proxy_protocol_opts() ->
[ {proxy_protocol, sc(boolean())}
, {proxy_protocol_timeout, sc(duration())}
[ {proxy_protocol, sc(boolean(), false)}
, {proxy_protocol_timeout, sc(duration(), "15s")}
].
sc(Type) ->

View File

@ -117,13 +117,18 @@ format_listenon({Addr, Port}) when is_tuple(Addr) ->
parse_listenon(Port) when is_integer(Port) ->
Port;
parse_listenon(IpPort) when is_tuple(IpPort) ->
IpPort;
parse_listenon(Str) when is_binary(Str) ->
parse_listenon(binary_to_list(Str));
parse_listenon(Str) when is_list(Str) ->
case emqx_schema:to_ip_port(Str) of
{ok, R} -> R;
{error, _} ->
error({invalid_listenon_name, Str})
try list_to_integer(Str)
catch _ : _ ->
case emqx_schema:to_ip_port(Str) of
{ok, R} -> R;
{error, _} ->
error({invalid_listenon_name, Str})
end
end.
listener_id(GwName, Type, LisName) ->
@ -226,11 +231,7 @@ sock_opts(Name, Opts) ->
%% Envs
active_n(Options) ->
maps:get(
active_n,
maps:get(listener, Options, #{active_n => ?ACTIVE_N}),
?ACTIVE_N
).
maps:get(active_n, Options, ?ACTIVE_N).
-spec idle_timeout(map()) -> pos_integer().
idle_timeout(Options) ->

View File

@ -139,7 +139,12 @@ init(ConnInfo = #{socktype := Socktype,
GRpcChann = maps:get(handler, Options),
PoolName = maps:get(pool_name, Options),
NConnInfo = default_conninfo(ConnInfo),
ClientInfo = default_clientinfo(ConnInfo),
ListenerId = case maps:get(listener, Options, undefined) of
undefined -> undefined;
{GwName, Type, LisName} ->
emqx_gateway_utils:listener_id(GwName, Type, LisName)
end,
ClientInfo = maps:put(listener, ListenerId, default_clientinfo(ConnInfo)),
Channel = #channel{
ctx = Ctx,
gcli = #{channel => GRpcChann, pool_name => PoolName},

View File

@ -156,6 +156,7 @@ start_listener(GwName, Ctx, Type, LisName, ListenOn, SocketOpts, Cfg) ->
Name = emqx_gateway_utils:listener_id(GwName, Type, LisName),
NCfg = Cfg#{
ctx => Ctx,
listener => {GwName, Type, LisName},
frame_mod => emqx_exproto_frame,
chann_mod => emqx_exproto_channel
},

View File

@ -89,9 +89,15 @@ init(ConnInfo = #{peername := {PeerHost, _},
#{ctx := Ctx} = Config) ->
Peercert = maps:get(peercert, ConnInfo, undefined),
Mountpoint = maps:get(mountpoint, Config, undefined),
ListenerId = case maps:get(listener, Config, undefined) of
undefined -> undefined;
{GwName, Type, LisName} ->
emqx_gateway_utils:listener_id(GwName, Type, LisName)
end,
ClientInfo = set_peercert_infos(
Peercert,
#{ zone => default
, listener => ListenerId
, protocol => lwm2m
, peerhost => PeerHost
, sockport => SockPort

View File

@ -102,6 +102,7 @@ start_listener(GwName, Ctx, {Type, LisName, ListenOn, SocketOpts, Cfg}) ->
start_listener(GwName, Ctx, Type, LisName, ListenOn, SocketOpts, Cfg) ->
Name = emqx_gateway_utils:listener_id(GwName, Type, LisName),
NCfg = Cfg#{ ctx => Ctx
, listener => {GwName, Type, LisName}
, frame_mod => emqx_coap_frame
, chann_mod => emqx_lwm2m_channel
},

View File

@ -116,9 +116,15 @@ init(ConnInfo = #{peername := {PeerHost, _},
Registry = maps:get(registry, Option),
GwId = maps:get(gateway_id, Option),
EnableQoS3 = maps:get(enable_qos3, Option, true),
ListenerId = case maps:get(listener, Option, undefined) of
undefined -> undefined;
{GwName, Type, LisName} ->
emqx_gateway_utils:listener_id(GwName, Type, LisName)
end,
ClientInfo = set_peercert_infos(
Peercert,
#{ zone => default
, listener => ListenerId
, protocol => 'mqtt-sn'
, peerhost => PeerHost
, sockport => SockPort

View File

@ -121,6 +121,7 @@ start_listener(GwName, Ctx, Type, LisName, ListenOn, SocketOpts, Cfg) ->
Name = emqx_gateway_utils:listener_id(GwName, Type, LisName),
NCfg = Cfg#{
ctx => Ctx,
listene => {GwName, Type, LisName},
frame_mod => emqx_sn_frame,
chann_mod => emqx_sn_channel
},
@ -138,13 +139,13 @@ merge_default(Options) ->
end.
stop_listener(GwName, {Type, LisName, ListenOn, SocketOpts, Cfg}) ->
StopRet = stop_listener(GwName, LisName, Type, ListenOn, SocketOpts, Cfg),
StopRet = stop_listener(GwName, Type, LisName, ListenOn, SocketOpts, Cfg),
ListenOnStr = emqx_gateway_utils:format_listenon(ListenOn),
case StopRet of
ok -> ?ULOG("Gateway ~s:~s:~s on ~s stopped.~n",
[GwName, Type, LisName, ListenOnStr]);
{error, Reason} ->
?ELOG("Failed to stop gatewat ~s:~s:~s on ~s: ~0p~n",
?ELOG("Failed to stop gateway ~s:~s:~s on ~s: ~0p~n",
[GwName, Type, LisName, ListenOnStr, Reason])
end,
StopRet.

View File

@ -109,10 +109,15 @@ init(ConnInfo = #{peername := {PeerHost, _},
sockname := {_, SockPort}}, Option) ->
Peercert = maps:get(peercert, ConnInfo, undefined),
Mountpoint = maps:get(mountpoint, Option, undefined),
ListenerId = case maps:get(listener, Option, undefined) of
undefined -> undefined;
{GwName, Type, LisName} ->
emqx_gateway_utils:listener_id(GwName, Type, LisName)
end,
ClientInfo = setting_peercert_infos(
Peercert,
#{ zone => default
, listener => {tcp, default}
, listener => ListenerId
, protocol => stomp
, peerhost => PeerHost
, sockport => SockPort

View File

@ -106,6 +106,7 @@ start_listener(GwName, Ctx, Type, LisName, ListenOn, SocketOpts, Cfg) ->
Name = emqx_gateway_utils:listener_id(GwName, Type, LisName),
NCfg = Cfg#{
ctx => Ctx,
listener => {GwName, Type, LisName}, %% Used for authn
frame_mod => emqx_stomp_frame,
chann_mod => emqx_stomp_channel
},

View File

@ -34,7 +34,6 @@ gateway.coap
connection_required = true
subscribe_qos = qos1
publish_qos = qos1
authentication = undefined
listeners.udp.default
{bind = 5683}
@ -113,24 +112,24 @@ t_publish(_Config) ->
with_connection(Action).
t_publish_authz_deny(_Config) ->
Action = fun(Channel, Token) ->
Topic = <<"/abc">>,
Payload = <<"123">>,
InvalidToken = lists:reverse(Token),
TopicStr = binary_to_list(Topic),
URI = ?PS_PREFIX ++ TopicStr ++ "?clientid=client1&token=" ++ InvalidToken,
%% Sub topic first
emqx:subscribe(Topic),
Req = make_req(post, Payload),
Result = do_request(Channel, URI, Req),
?assertEqual({error, reset}, Result)
end,
with_connection(Action).
%t_publish_authz_deny(_Config) ->
% Action = fun(Channel, Token) ->
% Topic = <<"/abc">>,
% Payload = <<"123">>,
% InvalidToken = lists:reverse(Token),
%
% TopicStr = binary_to_list(Topic),
% URI = ?PS_PREFIX ++ TopicStr ++ "?clientid=client1&token=" ++ InvalidToken,
%
% %% Sub topic first
% emqx:subscribe(Topic),
%
% Req = make_req(post, Payload),
% Result = do_request(Channel, URI, Req),
% ?assertEqual({error, reset}, Result)
% end,
%
% with_connection(Action).
t_subscribe(_Config) ->
Topic = <<"/abc">>,

View File

@ -25,20 +25,18 @@
-define(CONF_DEFAULT, <<"
gateway.coap {
idle_timeout = 30s
enable_stats = false
mountpoint = \"\"
notify_type = qos
connection_required = true
subscribe_qos = qos1
publish_qos = qos1
authentication = undefined
listeners.udp.default {
bind = 5683
}
}
">>).
idle_timeout = 30s
enable_stats = false
mountpoint = \"\"
notify_type = qos
connection_required = true
subscribe_qos = qos1
publish_qos = qos1
listeners.udp.default {
bind = 5683
}
}
">>).
-define(HOST, "127.0.0.1").
-define(PORT, 5683).
@ -73,7 +71,7 @@ t_send_request_api(_) ->
Payload = <<"simple echo this">>,
Req = #{token => Token,
payload => Payload,
timeout => 10,
timeout => <<"10s">>,
content_type => <<"text/plain">>,
method => <<"get">>},
Auth = emqx_mgmt_api_test_util:auth_header_(),

View File

@ -0,0 +1,250 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_gateway_conf_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
%%--------------------------------------------------------------------
%% Setups
%%--------------------------------------------------------------------
all() ->
emqx_ct:all(?MODULE).
init_per_suite(Conf) ->
%% FIXME: Magic line. for saving gateway schema name for emqx_config
emqx_config:init_load(emqx_gateway_schema, <<"gateway {}">>),
emqx_ct_helpers:start_apps([emqx_gateway]),
Conf.
end_per_suite(_Conf) ->
emqx_ct_helpers:stop_apps([emqx_gateway]).
init_per_testcase(_CaseName, Conf) ->
_ = emqx_gateway_conf:remove_gateway(stomp),
Conf.
%%--------------------------------------------------------------------
%% Cases
%%--------------------------------------------------------------------
-define(CONF_STOMP_BAISC_1,
#{ <<"idle_timeout">> => <<"10s">>,
<<"mountpoint">> => <<"t/">>,
<<"frame">> =>
#{ <<"max_headers">> => 20,
<<"max_headers_length">> => 2000,
<<"max_body_length">> => 2000
}
}).
-define(CONF_STOMP_BAISC_2,
#{ <<"idle_timeout">> => <<"20s">>,
<<"mountpoint">> => <<"t2/">>,
<<"frame">> =>
#{ <<"max_headers">> => 30,
<<"max_headers_length">> => 3000,
<<"max_body_length">> => 3000
}
}).
-define(CONF_STOMP_LISTENER_1,
#{ <<"bind">> => <<"61613">>
}).
-define(CONF_STOMP_LISTENER_2,
#{ <<"bind">> => <<"61614">>
}).
-define(CONF_STOMP_AUTHN_1,
#{ <<"mechanism">> => <<"password-based">>,
<<"backend">> => <<"built-in-database">>,
<<"user_id_type">> => <<"clientid">>
}).
-define(CONF_STOMP_AUTHN_2,
#{ <<"mechanism">> => <<"password-based">>,
<<"backend">> => <<"built-in-database">>,
<<"user_id_type">> => <<"username">>
}).
t_load_remove_gateway(_) ->
StompConf1 = compose(?CONF_STOMP_BAISC_1,
?CONF_STOMP_AUTHN_1,
?CONF_STOMP_LISTENER_1
),
StompConf2 = compose(?CONF_STOMP_BAISC_2,
?CONF_STOMP_AUTHN_1,
?CONF_STOMP_LISTENER_1),
ok = emqx_gateway_conf:load_gateway(stomp, StompConf1),
{error, already_exist} =
emqx_gateway_conf:load_gateway(stomp, StompConf1),
assert_confs(StompConf1, emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:update_gateway(stomp, StompConf2),
assert_confs(StompConf2, emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:remove_gateway(stomp),
ok = emqx_gateway_conf:remove_gateway(stomp),
{error, not_found} =
emqx_gateway_conf:update_gateway(stomp, StompConf2),
?assertException(error, {config_not_found, [gateway, stomp]},
emqx:get_raw_config([gateway, stomp])),
ok.
t_load_remove_authn(_) ->
StompConf = compose_listener(?CONF_STOMP_BAISC_1, ?CONF_STOMP_LISTENER_1),
ok = emqx_gateway_conf:load_gateway(<<"stomp">>, StompConf),
assert_confs(StompConf, emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:add_authn(<<"stomp">>, ?CONF_STOMP_AUTHN_1),
assert_confs(
maps:put(<<"authentication">>, ?CONF_STOMP_AUTHN_1, StompConf),
emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:update_authn(<<"stomp">>, ?CONF_STOMP_AUTHN_2),
assert_confs(
maps:put(<<"authentication">>, ?CONF_STOMP_AUTHN_2, StompConf),
emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:remove_authn(<<"stomp">>),
{error, not_found} =
emqx_gateway_conf:update_authn(<<"stomp">>, ?CONF_STOMP_AUTHN_2),
?assertException(
error, {config_not_found, [gateway, stomp, authentication]},
emqx:get_raw_config([gateway, stomp, authentication])
),
ok.
t_load_remove_listeners(_) ->
StompConf = compose_authn(?CONF_STOMP_BAISC_1, ?CONF_STOMP_AUTHN_1),
ok = emqx_gateway_conf:load_gateway(<<"stomp">>, StompConf),
assert_confs(StompConf, emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:add_listener(
<<"stomp">>, {<<"tcp">>, <<"default">>}, ?CONF_STOMP_LISTENER_1),
assert_confs(
maps:merge(StompConf, listener(?CONF_STOMP_LISTENER_1)),
emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:update_listener(
<<"stomp">>, {<<"tcp">>, <<"default">>}, ?CONF_STOMP_LISTENER_2),
assert_confs(
maps:merge(StompConf, listener(?CONF_STOMP_LISTENER_2)),
emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:remove_listener(
<<"stomp">>, {<<"tcp">>, <<"default">>}),
{error, not_found} =
emqx_gateway_conf:update_listener(
<<"stomp">>, {<<"tcp">>, <<"default">>}, ?CONF_STOMP_LISTENER_2),
?assertException(
error, {config_not_found, [gateway, stomp, listeners, tcp, default]},
emqx:get_raw_config([gateway, stomp, listeners, tcp, default])
),
ok.
t_load_remove_listener_authn(_) ->
StompConf = compose_listener(
?CONF_STOMP_BAISC_1,
?CONF_STOMP_LISTENER_1
),
StompConf1 = compose_listener_authn(
?CONF_STOMP_BAISC_1,
?CONF_STOMP_LISTENER_1,
?CONF_STOMP_AUTHN_1
),
StompConf2 = compose_listener_authn(
?CONF_STOMP_BAISC_1,
?CONF_STOMP_LISTENER_1,
?CONF_STOMP_AUTHN_2
),
ok = emqx_gateway_conf:load_gateway(<<"stomp">>, StompConf),
assert_confs(StompConf, emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:add_authn(
<<"stomp">>, {<<"tcp">>, <<"default">>}, ?CONF_STOMP_AUTHN_1),
assert_confs(StompConf1, emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:update_authn(
<<"stomp">>, {<<"tcp">>, <<"default">>}, ?CONF_STOMP_AUTHN_2),
assert_confs(StompConf2, emqx:get_raw_config([gateway, stomp])),
ok = emqx_gateway_conf:remove_authn(
<<"stomp">>, {<<"tcp">>, <<"default">>}),
{error, not_found} =
emqx_gateway_conf:update_authn(
<<"stomp">>, {<<"tcp">>, <<"default">>}, ?CONF_STOMP_AUTHN_2),
Path = [gateway, stomp, listeners, tcp, default, authentication],
?assertException(
error, {config_not_found, Path},
emqx:get_raw_config(Path)
),
ok.
%%--------------------------------------------------------------------
%% Utils
compose(Basic, Authn, Listener) ->
maps:merge(
maps:merge(Basic, #{<<"authentication">> => Authn}),
listener(Listener)).
compose_listener(Basic, Listener) ->
maps:merge(Basic, listener(Listener)).
compose_authn(Basic, Authn) ->
maps:merge(Basic, #{<<"authentication">> => Authn}).
compose_listener_authn(Basic, Listener, Authn) ->
maps:merge(
Basic,
listener(maps:put(<<"authentication">>, Authn, Listener))).
listener(L) ->
#{<<"listeners">> => #{<<"tcp">> => #{<<"default">> => L}}}.
assert_confs(Expected, Effected) ->
case do_assert_confs(Expected, Effected) of
false ->
io:format(standard_error, "Expected config: ~p,\n"
"Effected config: ~p",
[Expected, Effected]),
exit(conf_not_match);
true ->
ok
end.
do_assert_confs(Expected, Effected) when is_map(Expected),
is_map(Effected) ->
Ks1 = maps:keys(Expected),
lists:all(fun(K) ->
do_assert_confs(maps:get(K, Expected),
maps:get(K, Effected, undefined))
end, Ks1);
do_assert_confs(Expected, Effected) ->
Expected =:= Effected.

View File

@ -33,7 +33,7 @@ gateway.lwm2m {
xml_dir = \"../../lib/emqx_gateway/src/lwm2m/lwm2m_xml\"
lifetime_min = 1s
lifetime_max = 86400s
qmode_time_windonw = 22
qmode_time_window = 22
auto_observe = false
mountpoint = \"lwm2m/%u\"
update_msg_publish_condition = contains_object_list

View File

@ -33,7 +33,7 @@ gateway.lwm2m {
xml_dir = \"../../lib/emqx_gateway/src/lwm2m/lwm2m_xml\"
lifetime_min = 1s
lifetime_max = 86400s
qmode_time_windonw = 22
qmode_time_window = 22
auto_observe = false
mountpoint = \"lwm2m/%u\"
update_msg_publish_condition = contains_object_list

View File

@ -35,8 +35,12 @@
%% @doc EMQ X boot entrypoint.
start() ->
os:set_signal(sighup, ignore),
os:set_signal(sigterm, handle), %% default is handle
case os:type() of
{win32, nt} -> ok;
_nix ->
os:set_signal(sighup, ignore),
os:set_signal(sigterm, handle) %% default is handle
end,
ok = set_backtrace_depth(),
ok = print_otp_version_warning(),
@ -146,7 +150,6 @@ reboot_apps() ->
, emqx_management
, emqx_retainer
, emqx_exhook
, emqx_rule_actions
, emqx_authn
, emqx_authz
].

View File

@ -102,7 +102,7 @@ fields("cluster") ->
, default => emqxcl
})}
, {"discovery_strategy",
sc(union([manual, static, mcast, dns, etcd, k8s]),
sc(hoconsc:enum([manual, static, mcast, dns, etcd, k8s]),
#{ default => manual
})}
, {"autoclean",
@ -122,7 +122,7 @@ fields("cluster") ->
sc(ref(cluster_mcast),
#{})}
, {"proto_dist",
sc(union([inet_tcp, inet6_tcp, inet_tls]),
sc(hoconsc:enum([inet_tcp, inet6_tcp, inet_tls]),
#{ mapping => "ekka.proto_dist"
, default => inet_tcp
})}
@ -136,7 +136,7 @@ fields("cluster") ->
sc(ref(cluster_k8s),
#{})}
, {"db_backend",
sc(union([mnesia, rlog]),
sc(hoconsc:enum([mnesia, rlog]),
#{ mapping => "ekka.db_backend"
, default => mnesia
})}
@ -224,7 +224,7 @@ fields(cluster_k8s) ->
#{ default => "emqx"
})}
, {"address_type",
sc(union([ip, dns, hostname]),
sc(hoconsc:enum([ip, dns, hostname]),
#{})}
, {"app_name",
sc(string(),
@ -242,7 +242,7 @@ fields(cluster_k8s) ->
fields("rlog") ->
[ {"role",
sc(union([core, replicant]),
sc(hoconsc:enum([core, replicant]),
#{ mapping => "ekka.node_role"
, default => core
})}
@ -334,7 +334,7 @@ fields("cluster_call") ->
fields("rpc") ->
[ {"mode",
sc(union(sync, async),
sc(hoconsc:enum([sync, async]),
#{ default => async
})}
, {"async_batch_size",
@ -343,7 +343,7 @@ fields("rpc") ->
, default => 256
})}
, {"port_discovery",
sc(union(manual, stateless),
sc(hoconsc:enum([manual, stateless]),
#{ mapping => "gen_rpc.port_discovery"
, default => stateless
})}
@ -434,7 +434,7 @@ fields("log_file_handler") ->
sc(ref("log_rotation"),
#{})}
, {"max_size",
sc(union([infinity, emqx_schema:bytesize()]),
sc(hoconsc:union([infinity, emqx_schema:bytesize()]),
#{ default => "10MB"
})}
] ++ log_handler_common_confs();
@ -464,7 +464,7 @@ fields("log_overload_kill") ->
#{ default => 20000
})}
, {"restart_after",
sc(union(emqx_schema:duration(), infinity),
sc(hoconsc:union([emqx_schema:duration(), infinity]),
#{ default => "5s"
})}
];
@ -582,7 +582,7 @@ log_handler_common_confs() ->
#{ default => unlimited
})}
, {"formatter",
sc(union([text, json]),
sc(hoconsc:enum([text, json]),
#{ default => text
})}
, {"single_line",
@ -608,11 +608,11 @@ log_handler_common_confs() ->
sc(ref("log_burst_limit"),
#{})}
, {"supervisor_reports",
sc(union([error, progress]),
sc(hoconsc:enum([error, progress]),
#{ default => error
})}
, {"max_depth",
sc(union([unlimited, integer()]),
sc(hoconsc:union([unlimited, integer()]),
#{ default => 100
})}
].

View File

@ -124,7 +124,7 @@ t_catch_up_status_handle_next_commit(_Config) ->
t_commit_ok_apply_fail_on_other_node_then_recover(_Config) ->
emqx_cluster_rpc:reset(),
{atomic, []} = emqx_cluster_rpc:status(),
Now = erlang:system_time(second),
Now = erlang:system_time(millisecond),
{M, F, A} = {?MODULE, failed_on_other_recover_after_5_second, [erlang:whereis(?NODE1), Now]},
{ok, _, ok} = emqx_cluster_rpc:multicall(M, F, A, 1, 1000),
{ok, _, ok} = emqx_cluster_rpc:multicall(io, format, ["test"], 1, 1000),
@ -132,10 +132,10 @@ t_commit_ok_apply_fail_on_other_node_then_recover(_Config) ->
?assertEqual([], L),
?assertEqual({io, format, ["test"]}, maps:get(mfa, Status)),
?assertEqual(node(), maps:get(node, Status)),
sleep(3000),
sleep(2300),
{atomic, [Status1]} = emqx_cluster_rpc:status(),
?assertEqual(Status, Status1),
sleep(2600),
sleep(3600),
{atomic, NewStatus} = emqx_cluster_rpc:status(),
?assertEqual(3, length(NewStatus)),
Pid = self(),
@ -243,11 +243,11 @@ failed_on_node_by_odd(Pid) ->
end.
failed_on_other_recover_after_5_second(Pid, CreatedAt) ->
Now = erlang:system_time(second),
Now = erlang:system_time(millisecond),
case Pid =:= self() of
true -> ok;
false ->
case Now < CreatedAt + 5 of
case Now < CreatedAt + 5001 of
true -> "MFA return not ok";
false -> ok
end

View File

@ -18,7 +18,9 @@
-include_lib("stdlib/include/qlc.hrl").
-export([paginate/3]).
-export([ paginate/3
, paginate/4
]).
%% first_next query APIs
-export([ params2qs/2
@ -47,6 +49,23 @@ paginate(Tables, Params, RowFun) ->
#{meta => #{page => Page, limit => Limit, count => Count},
data => [RowFun(Row) || Row <- Rows]}.
paginate(Tables, MatchSpec, Params, RowFun) ->
Qh = query_handle(Tables, MatchSpec),
Count = count(Tables, MatchSpec),
Page = b2i(page(Params)),
Limit = b2i(limit(Params)),
Cursor = qlc:cursor(Qh),
case Page > 1 of
true ->
_ = qlc:next_answers(Cursor, (Page - 1) * Limit),
ok;
false -> ok
end,
Rows = qlc:next_answers(Cursor, Limit),
qlc:delete_cursor(Cursor),
#{meta => #{page => Page, limit => Limit, count => Count},
data => [RowFun(Row) || Row <- Rows]}.
query_handle(Table) when is_atom(Table) ->
qlc:q([R|| R <- ets:table(Table)]);
query_handle([Table]) when is_atom(Table) ->
@ -54,6 +73,16 @@ query_handle([Table]) when is_atom(Table) ->
query_handle(Tables) ->
qlc:append([qlc:q([E || E <- ets:table(T)]) || T <- Tables]).
query_handle(Table, MatchSpec) when is_atom(Table) ->
Options = {traverse, {select, MatchSpec}},
qlc:q([R|| R <- ets:table(Table, Options)]);
query_handle([Table], MatchSpec) when is_atom(Table) ->
Options = {traverse, {select, MatchSpec}},
qlc:q([R|| R <- ets:table(Table, Options)]);
query_handle(Tables, MatchSpec) ->
Options = {traverse, {select, MatchSpec}},
qlc:append([qlc:q([E || E <- ets:table(T, Options)]) || T <- Tables]).
count(Table) when is_atom(Table) ->
ets:info(Table, size);
count([Table]) when is_atom(Table) ->
@ -61,8 +90,16 @@ count([Table]) when is_atom(Table) ->
count(Tables) ->
lists:sum([count(T) || T <- Tables]).
count(Table, Nodes) ->
lists:sum([rpc_call(Node, ets, info, [Table, size], 5000) || Node <- Nodes]).
count(Table, MatchSpec) when is_atom(Table) ->
[{MatchPattern, Where, _Re}] = MatchSpec,
NMatchSpec = [{MatchPattern, Where, [true]}],
ets:select_count(Table, NMatchSpec);
count([Table], MatchSpec) when is_atom(Table) ->
[{MatchPattern, Where, _Re}] = MatchSpec,
NMatchSpec = [{MatchPattern, Where, [true]}],
ets:select_count(Table, NMatchSpec);
count(Tables, MatchSpec) ->
lists:sum([count(T, MatchSpec) || T <- Tables]).
page(Params) when is_map(Params) ->
maps:get(<<"page">>, Params, 1);
@ -122,7 +159,7 @@ cluster_query(Params, Tab, QsSchema, QueryFun) ->
Rows = do_cluster_query(Nodes, Tab, Qs, QueryFun, Start, Limit+1, []),
Meta = #{page => Page, limit => Limit},
NMeta = case CodCnt =:= 0 of
true -> Meta#{count => count(Tab, Nodes)};
true -> Meta#{count => lists:sum([rpc_call(Node, ets, info, [Tab, size], 5000) || Node <- Nodes])};
_ -> Meta#{count => length(Rows)}
end,
#{meta => NMeta, data => lists:sublist(Rows, Limit)}.

View File

@ -18,22 +18,19 @@
-behavior(minirest_api).
-import(emqx_mgmt_util, [ page_params/0
, schema/1
, schema/2
, object_schema/2
, error_schema/2
, page_object_schema/1
, properties/1
]).
-include_lib("typerefl/include/types.hrl").
-import(hoconsc, [mk/2, ref/1, ref/2]).
-define(MAX_PAYLOAD_LENGTH, 2048).
-define(PAYLOAD_TOO_LARGE, 'PAYLOAD_TOO_LARGE').
-export([ status/2
, delayed_messages/2
, delayed_message/2
]).
-export([status/2
, delayed_messages/2
, delayed_message/2
]).
-export([paths/0, fields/1, schema/1]).
%% for rpc
-export([update_config_/1]).
@ -49,91 +46,94 @@
-define(MESSAGE_ID_SCHEMA_ERROR, 'MESSAGE_ID_SCHEMA_ERROR').
api_spec() ->
{
[status_api(), delayed_messages_api(), delayed_message_api()],
[]
}.
emqx_dashboard_swagger:spec(?MODULE).
conf_schema() ->
emqx_mgmt_api_configs:gen_schema(emqx:get_raw_config([delayed])).
properties() ->
PayloadDesc = io_lib:format("Payload, base64 encode. Payload will be ~p if length large than ~p",
[?PAYLOAD_TOO_LARGE, ?MAX_PAYLOAD_LENGTH]),
properties([
{msgid, integer, <<"Message Id">>},
{publish_at, string, <<"Client publish message time, rfc 3339">>},
{delayed_interval, integer, <<"Delayed interval, second">>},
{delayed_remaining, integer, <<"Delayed remaining, second">>},
{expected_at, string, <<"Expect publish time, rfc 3339">>},
{topic, string, <<"Topic">>},
{qos, string, <<"QoS">>},
{payload, string, iolist_to_binary(PayloadDesc)},
{from_clientid, string, <<"From ClientId">>},
{from_username, string, <<"From Username">>}
]).
paths() -> ["/mqtt/delayed", "/mqtt/delayed/messages", "/mqtt/delayed/messages/:msgid"].
parameters() ->
[#{
name => msgid,
in => path,
schema => #{type => string},
required => true
}].
status_api() ->
Metadata = #{
schema("/mqtt/delayed") ->
#{
operationId => status,
get => #{
tags => [<<"mqtt">>],
description => <<"Get delayed status">>,
summary => <<"Get delayed status">>,
responses => #{
<<"200">> => schema(conf_schema())}
},
200 => ref(emqx_modules_schema, "delayed")
}
},
put => #{
tags => [<<"mqtt">>],
description => <<"Enable or disable delayed, set max delayed messages">>,
'requestBody' => schema(conf_schema()),
requestBody => ref(emqx_modules_schema, "delayed"),
responses => #{
<<"200">> =>
schema(conf_schema(), <<"Enable or disable delayed successfully">>),
<<"400">> =>
error_schema(<<"Max limit illegality">>, [?BAD_REQUEST])
200 => mk(ref(emqx_modules_schema, "delayed"),
#{desc => <<"Enable or disable delayed successfully">>}),
400 => emqx_dashboard_swagger:error_codes([?BAD_REQUEST], <<"Max limit illegality">>)
}
}
},
{"/mqtt/delayed", Metadata, status}.
};
delayed_messages_api() ->
Metadata = #{
get => #{
description => "List delayed messages",
parameters => page_params(),
responses => #{
<<"200">> => page_object_schema(properties())
}
}
},
{"/mqtt/delayed/messages", Metadata, delayed_messages}.
delayed_message_api() ->
Metadata = #{
schema("/mqtt/delayed/messages/:msgid") ->
#{operationId => delayed_message,
get => #{
tags => [<<"mqtt">>],
description => <<"Get delayed message">>,
parameters => parameters(),
parameters => [{msgid, mk(binary(), #{in => path, desc => <<"delay message ID">>})}],
responses => #{
<<"400">> => error_schema(<<"Message ID Schema error">>, [?MESSAGE_ID_SCHEMA_ERROR]),
<<"404">> => error_schema(<<"Message ID not found">>, [?MESSAGE_ID_NOT_FOUND]),
<<"200">> => object_schema(maps:without([payload], properties()), <<"Get delayed message success">>)
200 => ref("message_without_payload"),
400 => emqx_dashboard_swagger:error_codes([?MESSAGE_ID_SCHEMA_ERROR], <<"Bad MsgId format">>),
404 => emqx_dashboard_swagger:error_codes([?MESSAGE_ID_NOT_FOUND], <<"MsgId not found">>)
}
},
delete => #{
tags => [<<"mqtt">>],
description => <<"Delete delayed message">>,
parameters => parameters(),
parameters => [{msgid, mk(binary(), #{in => path, desc => <<"delay message ID">>})}],
responses => #{
<<"400">> => error_schema(<<"Message ID Schema error">>, [?MESSAGE_ID_SCHEMA_ERROR]),
<<"404">> => error_schema(<<"Message ID not found">>, [?MESSAGE_ID_NOT_FOUND]),
<<"200">> => schema(<<"Delete delayed message success">>)
200 => <<"Delete delayed message success">>,
400 => emqx_dashboard_swagger:error_codes([?MESSAGE_ID_SCHEMA_ERROR], <<"Bad MsgId format">>),
404 => emqx_dashboard_swagger:error_codes([?MESSAGE_ID_NOT_FOUND], <<"MsgId not found">>)
}
}
},
{"/mqtt/delayed/messages/:msgid", Metadata, delayed_message}.
};
schema("/mqtt/delayed/messages") ->
#{
operationId => delayed_messages,
get => #{
tags => [<<"mqtt">>],
description => <<"List delayed messages">>,
parameters => [ref(emqx_dashboard_swagger, page), ref(emqx_dashboard_swagger, limit)],
responses => #{
200 =>
[
{data, mk(hoconsc:array(ref("message")), #{})},
{meta, [
{page, mk(integer(), #{})},
{limit, mk(integer(), #{})},
{count, mk(integer(), #{})}
]}
]
}
}
}.
fields("message_without_payload") ->
[
{msgid, mk(integer(), #{desc => <<"Message Id (MQTT message id hash)">>})},
{publish_at, mk(binary(), #{desc => <<"Client publish message time, rfc 3339">>})},
{delayed_interval, mk(integer(), #{desc => <<"Delayed interval, second">>})},
{delayed_remaining, mk(integer(), #{desc => <<"Delayed remaining, second">>})},
{expected_at, mk(binary(), #{desc => <<"Expect publish time, rfc 3339">>})},
{topic, mk(binary(), #{desc => <<"Topic">>, example => <<"/sys/#">>})},
{qos, mk(binary(), #{desc => <<"QoS">>})},
{from_clientid, mk(binary(), #{desc => <<"From ClientId">>})},
{from_username, mk(binary(), #{desc => <<"From Username">>})}
];
fields("message") ->
PayloadDesc = io_lib:format("Payload, base64 encode. Payload will be ~p if length large than ~p",
[?PAYLOAD_TOO_LARGE, ?MAX_PAYLOAD_LENGTH]),
fields("message_without_payload") ++
[{payload, mk(binary(), #{desc => iolist_to_binary(PayloadDesc)})}].
%%--------------------------------------------------------------------
%% HTTP API
@ -210,7 +210,7 @@ generate_max_delayed_messages(Config) ->
update_config_(Config) ->
lists:foreach(fun(Node) ->
update_config_(Node, Config)
end, ekka_mnesia:running_nodes()).
end, ekka_mnesia:running_nodes()).
update_config_(Node, Config) when Node =:= node() ->
_ = emqx_delayed:update_config(Config),

View File

@ -1,11 +0,0 @@
# emqx_rule_actions
This project contains a collection of rule actions/resources. It is mainly for
making unit test easier. Also it's easier for us to create utils that many
modules depends on it.
## Build
-----
$ rebar3 compile

View File

@ -1,25 +0,0 @@
{deps, []}.
{erl_opts, [warn_unused_vars,
warn_shadow_vars,
warn_unused_import,
warn_obsolete_guard,
no_debug_info,
compressed, %% for edge
{parse_transform}
]}.
{overrides, [{add, [{erl_opts, [no_debug_info, compressed]}]}]}.
{edoc_opts, [{preprocess, true}]}.
{xref_checks, [undefined_function_calls, undefined_functions,
locals_not_used, deprecated_function_calls,
warnings_as_errors, deprecated_functions
]}.
{cover_enabled, true}.
{cover_opts, [verbose]}.
{cover_export_enabled, true}.
{plugins, [rebar3_proper]}.

View File

@ -1,576 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc This module implements EMQX Bridge transport layer on top of MQTT protocol
-module(emqx_bridge_mqtt_actions).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx_rule_engine/include/rule_actions.hrl").
-import(emqx_plugin_libs_rule, [str/1]).
-export([ on_resource_create/2
, on_get_resource_status/2
, on_resource_destroy/2
]).
%% Callbacks of ecpool Worker
-export([connect/1]).
-export([subscriptions/1]).
-export([ on_action_create_data_to_mqtt_broker/2
, on_action_data_to_mqtt_broker/2
]).
-define(RESOURCE_TYPE_MQTT, 'bridge_mqtt').
-define(RESOURCE_TYPE_RPC, 'bridge_rpc').
-define(RESOURCE_CONFIG_SPEC_MQTT, #{
address => #{
order => 1,
type => string,
required => true,
default => <<"127.0.0.1:1883">>,
title => #{en => <<" Broker Address">>,
zh => <<"远程 broker 地址"/utf8>>},
description => #{en => <<"The MQTT Remote Address">>,
zh => <<"远程 MQTT Broker 的地址"/utf8>>}
},
pool_size => #{
order => 2,
type => number,
required => true,
default => 8,
title => #{en => <<"Pool Size">>,
zh => <<"连接池大小"/utf8>>},
description => #{en => <<"MQTT Connection Pool Size">>,
zh => <<"连接池大小"/utf8>>}
},
clientid => #{
order => 3,
type => string,
required => true,
default => <<"client">>,
title => #{en => <<"ClientId">>,
zh => <<"客户端 Id"/utf8>>},
description => #{en => <<"ClientId for connecting to remote MQTT broker">>,
zh => <<"连接远程 Broker 的 ClientId"/utf8>>}
},
append => #{
order => 4,
type => boolean,
required => false,
default => true,
title => #{en => <<"Append GUID">>,
zh => <<"附加 GUID"/utf8>>},
description => #{en => <<"Append GUID to MQTT ClientId?">>,
zh => <<"是否将GUID附加到 MQTT ClientId 后"/utf8>>}
},
username => #{
order => 5,
type => string,
required => false,
default => <<"">>,
title => #{en => <<"Username">>, zh => <<"用户名"/utf8>>},
description => #{en => <<"Username for connecting to remote MQTT Broker">>,
zh => <<"连接远程 Broker 的用户名"/utf8>>}
},
password => #{
order => 6,
type => password,
required => false,
default => <<"">>,
title => #{en => <<"Password">>,
zh => <<"密码"/utf8>>},
description => #{en => <<"Password for connecting to remote MQTT Broker">>,
zh => <<"连接远程 Broker 的密码"/utf8>>}
},
mountpoint => #{
order => 7,
type => string,
required => false,
default => <<"bridge/aws/${node}/">>,
title => #{en => <<"Bridge MountPoint">>,
zh => <<"桥接挂载点"/utf8>>},
description => #{
en => <<"MountPoint for bridge topic:<br/>"
"Example: The topic of messages sent to `topic1` on local node "
"will be transformed to `bridge/aws/${node}/topic1`">>,
zh => <<"桥接主题的挂载点:<br/>"
"示例: 本地节点向 `topic1` 发消息,远程桥接节点的主题"
"会变换为 `bridge/aws/${node}/topic1`"/utf8>>
}
},
disk_cache => #{
order => 8,
type => boolean,
required => false,
default => false,
title => #{en => <<"Disk Cache">>,
zh => <<"磁盘缓存"/utf8>>},
description => #{en => <<"The flag which determines whether messages "
"can be cached on local disk when bridge is "
"disconnected">>,
zh => <<"当桥接断开时用于控制是否将消息缓存到本地磁"
"盘队列上"/utf8>>}
},
proto_ver => #{
order => 9,
type => string,
required => false,
default => <<"mqttv4">>,
enum => [<<"mqttv3">>, <<"mqttv4">>, <<"mqttv5">>],
title => #{en => <<"Protocol Version">>,
zh => <<"协议版本"/utf8>>},
description => #{en => <<"MQTTT Protocol version">>,
zh => <<"MQTT 协议版本"/utf8>>}
},
keepalive => #{
order => 10,
type => string,
required => false,
default => <<"60s">> ,
title => #{en => <<"Keepalive">>,
zh => <<"心跳间隔"/utf8>>},
description => #{en => <<"Keepalive">>,
zh => <<"心跳间隔"/utf8>>}
},
reconnect_interval => #{
order => 11,
type => string,
required => false,
default => <<"30s">>,
title => #{en => <<"Reconnect Interval">>,
zh => <<"重连间隔"/utf8>>},
description => #{en => <<"Reconnect interval of bridge:<br/>">>,
zh => <<"重连间隔"/utf8>>}
},
retry_interval => #{
order => 12,
type => string,
required => false,
default => <<"20s">>,
title => #{en => <<"Retry interval">>,
zh => <<"重传间隔"/utf8>>},
description => #{en => <<"Retry interval for bridge QoS1 message delivering">>,
zh => <<"消息重传间隔"/utf8>>}
},
bridge_mode => #{
order => 13,
type => boolean,
required => false,
default => false,
title => #{en => <<"Bridge Mode">>,
zh => <<"桥接模式"/utf8>>},
description => #{en => <<"Bridge mode for MQTT bridge connection">>,
zh => <<"MQTT 连接是否为桥接模式"/utf8>>}
},
ssl => #{
order => 14,
type => boolean,
default => false,
title => #{en => <<"Enable SSL">>,
zh => <<"开启SSL链接"/utf8>>},
description => #{en => <<"Enable SSL or not">>,
zh => <<"是否开启 SSL"/utf8>>}
},
cacertfile => #{
order => 15,
type => file,
required => false,
default => <<"etc/certs/cacert.pem">>,
title => #{en => <<"CA certificates">>,
zh => <<"CA 证书"/utf8>>},
description => #{en => <<"The file path of the CA certificates">>,
zh => <<"CA 证书路径"/utf8>>}
},
certfile => #{
order => 16,
type => file,
required => false,
default => <<"etc/certs/client-cert.pem">>,
title => #{en => <<"SSL Certfile">>,
zh => <<"SSL 客户端证书"/utf8>>},
description => #{en => <<"The file path of the client certfile">>,
zh => <<"客户端证书路径"/utf8>>}
},
keyfile => #{
order => 17,
type => file,
required => false,
default => <<"etc/certs/client-key.pem">>,
title => #{en => <<"SSL Keyfile">>,
zh => <<"SSL 密钥文件"/utf8>>},
description => #{en => <<"The file path of the client keyfile">>,
zh => <<"客户端密钥路径"/utf8>>}
},
ciphers => #{
order => 18,
type => string,
required => false,
default => <<"ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,",
"ECDHE-ECDSA-AES256-SHA384,ECDHE-RSA-AES256-SHA384,ECDHE-ECDSA-DES-CBC3-SHA,",
"ECDH-ECDSA-AES256-GCM-SHA384,ECDH-RSA-AES256-GCM-SHA384,ECDH-ECDSA-AES256-SHA384,",
"ECDH-RSA-AES256-SHA384,DHE-DSS-AES256-GCM-SHA384,DHE-DSS-AES256-SHA256,AES256-GCM-SHA384,",
"AES256-SHA256,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256,",
"ECDHE-ECDSA-AES128-SHA256,ECDHE-RSA-AES128-SHA256,ECDH-ECDSA-AES128-GCM-SHA256,",
"ECDH-RSA-AES128-GCM-SHA256,ECDH-ECDSA-AES128-SHA256,ECDH-RSA-AES128-SHA256,",
"DHE-DSS-AES128-GCM-SHA256,DHE-DSS-AES128-SHA256,AES128-GCM-SHA256,AES128-SHA256,",
"ECDHE-ECDSA-AES256-SHA,ECDHE-RSA-AES256-SHA,DHE-DSS-AES256-SHA,ECDH-ECDSA-AES256-SHA,",
"ECDH-RSA-AES256-SHA,AES256-SHA,ECDHE-ECDSA-AES128-SHA,ECDHE-RSA-AES128-SHA,",
"DHE-DSS-AES128-SHA,ECDH-ECDSA-AES128-SHA,ECDH-RSA-AES128-SHA,AES128-SHA">>,
title => #{en => <<"SSL Ciphers">>,
zh => <<"SSL 加密算法"/utf8>>},
description => #{en => <<"SSL Ciphers">>,
zh => <<"SSL 加密算法"/utf8>>}
}
}).
-define(RESOURCE_CONFIG_SPEC_RPC, #{
address => #{
order => 1,
type => string,
required => true,
default => <<"emqx2@127.0.0.1">>,
title => #{en => <<"EMQ X Node Name">>,
zh => <<"EMQ X 节点名称"/utf8>>},
description => #{en => <<"EMQ X Remote Node Name">>,
zh => <<"远程 EMQ X 节点名称 "/utf8>>}
},
mountpoint => #{
order => 2,
type => string,
required => false,
default => <<"bridge/emqx/${node}/">>,
title => #{en => <<"Bridge MountPoint">>,
zh => <<"桥接挂载点"/utf8>>},
description => #{en => <<"MountPoint for bridge topic<br/>"
"Example: The topic of messages sent to `topic1` on local node "
"will be transformed to `bridge/aws/${node}/topic1`">>,
zh => <<"桥接主题的挂载点<br/>"
"示例: 本地节点向 `topic1` 发消息,远程桥接节点的主题"
"会变换为 `bridge/aws/${node}/topic1`"/utf8>>}
},
pool_size => #{
order => 3,
type => number,
required => true,
default => 8,
title => #{en => <<"Pool Size">>,
zh => <<"连接池大小"/utf8>>},
description => #{en => <<"MQTT/RPC Connection Pool Size">>,
zh => <<"连接池大小"/utf8>>}
},
reconnect_interval => #{
order => 4,
type => string,
required => false,
default => <<"30s">>,
title => #{en => <<"Reconnect Interval">>,
zh => <<"重连间隔"/utf8>>},
description => #{en => <<"Reconnect Interval of bridge">>,
zh => <<"重连间隔"/utf8>>}
},
batch_size => #{
order => 5,
type => number,
required => false,
default => 32,
title => #{en => <<"Batch Size">>,
zh => <<"批处理大小"/utf8>>},
description => #{en => <<"Batch Size">>,
zh => <<"批处理大小"/utf8>>}
},
disk_cache => #{
order => 6,
type => boolean,
required => false,
default => false,
title => #{en => <<"Disk Cache">>,
zh => <<"磁盘缓存"/utf8>>},
description => #{en => <<"The flag which determines whether messages "
"can be cached on local disk when bridge is "
"disconnected">>,
zh => <<"当桥接断开时用于控制是否将消息缓存到本地磁"
"盘队列上"/utf8>>}
}
}).
-define(ACTION_PARAM_RESOURCE, #{
type => string,
required => true,
title => #{en => <<"Resource ID">>, zh => <<"资源 ID"/utf8>>},
description => #{en => <<"Bind a resource to this action">>,
zh => <<"给动作绑定一个资源"/utf8>>}
}).
-resource_type(#{
name => ?RESOURCE_TYPE_MQTT,
create => on_resource_create,
status => on_get_resource_status,
destroy => on_resource_destroy,
params => ?RESOURCE_CONFIG_SPEC_MQTT,
title => #{en => <<"MQTT Bridge">>, zh => <<"MQTT Bridge"/utf8>>},
description => #{en => <<"MQTT Message Bridge">>, zh => <<"MQTT 消息桥接"/utf8>>}
}).
-resource_type(#{
name => ?RESOURCE_TYPE_RPC,
create => on_resource_create,
status => on_get_resource_status,
destroy => on_resource_destroy,
params => ?RESOURCE_CONFIG_SPEC_RPC,
title => #{en => <<"EMQX Bridge">>, zh => <<"EMQX Bridge"/utf8>>},
description => #{en => <<"EMQ X RPC Bridge">>, zh => <<"EMQ X RPC 消息桥接"/utf8>>}
}).
-rule_action(#{
name => data_to_mqtt_broker,
category => data_forward,
for => 'message.publish',
types => [?RESOURCE_TYPE_MQTT, ?RESOURCE_TYPE_RPC],
create => on_action_create_data_to_mqtt_broker,
params => #{'$resource' => ?ACTION_PARAM_RESOURCE,
forward_topic => #{
order => 1,
type => string,
required => false,
default => <<"">>,
title => #{en => <<"Forward Topic">>,
zh => <<"转发消息主题"/utf8>>},
description => #{en => <<"The topic used when forwarding the message. "
"Defaults to the topic of the bridge message if not provided.">>,
zh => <<"转发消息时使用的主题。如果未提供,则默认为桥接消息的主题。"/utf8>>}
},
payload_tmpl => #{
order => 2,
type => string,
input => textarea,
required => false,
default => <<"">>,
title => #{en => <<"Payload Template">>,
zh => <<"消息内容模板"/utf8>>},
description => #{en => <<"The payload template, variable interpolation is supported. "
"If using empty template (default), then the payload will be "
"all the available vars in JSON format">>,
zh => <<"消息内容模板,支持变量。"
"若使用空模板(默认),消息内容为 JSON 格式的所有字段"/utf8>>}
}
},
title => #{en => <<"Data bridge to MQTT Broker">>,
zh => <<"桥接数据到 MQTT Broker"/utf8>>},
description => #{en => <<"Bridge Data to MQTT Broker">>,
zh => <<"桥接数据到 MQTT Broker"/utf8>>}
}).
on_resource_create(ResId, Params) ->
?LOG(info, "Initiating Resource ~p, ResId: ~p", [?RESOURCE_TYPE_MQTT, ResId]),
{ok, _} = application:ensure_all_started(ecpool),
PoolName = pool_name(ResId),
Options = options(Params, PoolName, ResId),
start_resource(ResId, PoolName, Options),
case test_resource_status(PoolName) of
true -> ok;
false ->
on_resource_destroy(ResId, #{<<"pool">> => PoolName}),
error({{?RESOURCE_TYPE_MQTT, ResId}, connection_failed})
end,
#{<<"pool">> => PoolName}.
start_resource(ResId, PoolName, Options) ->
case ecpool:start_sup_pool(PoolName, ?MODULE, Options) of
{ok, _} ->
?LOG(info, "Initiated Resource ~p Successfully, ResId: ~p", [?RESOURCE_TYPE_MQTT, ResId]);
{error, {already_started, _Pid}} ->
on_resource_destroy(ResId, #{<<"pool">> => PoolName}),
start_resource(ResId, PoolName, Options);
{error, Reason} ->
?LOG(error, "Initiate Resource ~p failed, ResId: ~p, ~p", [?RESOURCE_TYPE_MQTT, ResId, Reason]),
on_resource_destroy(ResId, #{<<"pool">> => PoolName}),
error({{?RESOURCE_TYPE_MQTT, ResId}, create_failed})
end.
test_resource_status(PoolName) ->
IsConnected = fun(Worker) ->
case ecpool_worker:client(Worker) of
{ok, Bridge} ->
try emqx_connector_mqtt_worker:status(Bridge) of
connected -> true;
_ -> false
catch _Error:_Reason ->
false
end;
{error, _} ->
false
end
end,
Status = [IsConnected(Worker) || {_WorkerName, Worker} <- ecpool:workers(PoolName)],
lists:any(fun(St) -> St =:= true end, Status).
-spec(on_get_resource_status(ResId::binary(), Params::map()) -> Status::map()).
on_get_resource_status(_ResId, #{<<"pool">> := PoolName}) ->
IsAlive = test_resource_status(PoolName),
#{is_alive => IsAlive}.
on_resource_destroy(ResId, #{<<"pool">> := PoolName}) ->
?LOG(info, "Destroying Resource ~p, ResId: ~p", [?RESOURCE_TYPE_MQTT, ResId]),
case ecpool:stop_sup_pool(PoolName) of
ok ->
?LOG(info, "Destroyed Resource ~p Successfully, ResId: ~p", [?RESOURCE_TYPE_MQTT, ResId]);
{error, Reason} ->
?LOG(error, "Destroy Resource ~p failed, ResId: ~p, ~p", [?RESOURCE_TYPE_MQTT, ResId, Reason]),
error({{?RESOURCE_TYPE_MQTT, ResId}, destroy_failed})
end.
on_action_create_data_to_mqtt_broker(ActId, Opts = #{<<"pool">> := PoolName,
<<"forward_topic">> := ForwardTopic,
<<"payload_tmpl">> := PayloadTmpl}) ->
?LOG(info, "Initiating Action ~p.", [?FUNCTION_NAME]),
PayloadTks = emqx_plugin_libs_rule:preproc_tmpl(PayloadTmpl),
TopicTks = case ForwardTopic == <<"">> of
true -> undefined;
false -> emqx_plugin_libs_rule:preproc_tmpl(ForwardTopic)
end,
Opts.
on_action_data_to_mqtt_broker(Msg, _Env =
#{id := Id, clientid := From, flags := Flags,
topic := Topic, timestamp := TimeStamp, qos := QoS,
?BINDING_KEYS := #{
'ActId' := ActId,
'PoolName' := PoolName,
'TopicTks' := TopicTks,
'PayloadTks' := PayloadTks
}}) ->
Topic1 = case TopicTks =:= undefined of
true -> Topic;
false -> emqx_plugin_libs_rule:proc_tmpl(TopicTks, Msg)
end,
BrokerMsg = #message{id = Id,
qos = QoS,
from = From,
flags = Flags,
topic = Topic1,
payload = format_data(PayloadTks, Msg),
timestamp = TimeStamp},
ecpool:with_client(PoolName,
fun(BridgePid) ->
BridgePid ! {deliver, rule_engine, BrokerMsg}
end),
emqx_rule_metrics:inc_actions_success(ActId).
format_data([], Msg) ->
emqx_json:encode(Msg);
format_data(Tokens, Msg) ->
emqx_plugin_libs_rule:proc_tmpl(Tokens, Msg).
subscriptions(Subscriptions) ->
scan_binary(<<"[", Subscriptions/binary, "].">>).
is_node_addr(Addr0) ->
Addr = binary_to_list(Addr0),
case string:tokens(Addr, "@") of
[_NodeName, _Hostname] -> true;
_ -> false
end.
scan_binary(Bin) ->
TermString = binary_to_list(Bin),
scan_string(TermString).
scan_string(TermString) ->
{ok, Tokens, _} = erl_scan:string(TermString),
{ok, Term} = erl_parse:parse_term(Tokens),
Term.
connect(Options) when is_list(Options) ->
connect(maps:from_list(Options));
connect(Options = #{disk_cache := DiskCache, ecpool_worker_id := Id, pool_name := Pool}) ->
Options0 = case DiskCache of
true ->
DataDir = filename:join([emqx:get_config([node, data_dir]), replayq, Pool, integer_to_list(Id)]),
QueueOption = #{replayq_dir => DataDir},
Options#{queue => QueueOption};
false ->
Options
end,
Options1 = case maps:is_key(append, Options0) of
false -> Options0;
true ->
case maps:get(append, Options0, false) of
true ->
ClientId = lists:concat([str(maps:get(clientid, Options0)), "_", str(emqx_guid:to_hexstr(emqx_guid:gen()))]),
Options0#{clientid => ClientId};
false ->
Options0
end
end,
Options2 = maps:without([ecpool_worker_id, pool_name, append], Options1),
emqx_connector_mqtt_worker:start_link(Options2#{name => name(Pool, Id)}).
name(Pool, Id) ->
list_to_atom(atom_to_list(Pool) ++ ":" ++ integer_to_list(Id)).
pool_name(ResId) ->
list_to_atom("bridge_mqtt:" ++ str(ResId)).
options(Options, PoolName, ResId) ->
GetD = fun(Key, Default) -> maps:get(Key, Options, Default) end,
Get = fun(Key) -> GetD(Key, undefined) end,
Address = Get(<<"address">>),
[{max_inflight_batches, 32},
{forward_mountpoint, str(Get(<<"mountpoint">>))},
{disk_cache, GetD(<<"disk_cache">>, false)},
{start_type, auto},
{reconnect_delay_ms, hocon_postprocess:duration(str(Get(<<"reconnect_interval">>)))},
{if_record_metrics, false},
{pool_size, GetD(<<"pool_size">>, 1)},
{pool_name, PoolName}
] ++ case is_node_addr(Address) of
true ->
[{address, binary_to_atom(Get(<<"address">>), utf8)},
{connect_module, emqx_bridge_rpc},
{batch_size, Get(<<"batch_size">>)}];
false ->
[{address, binary_to_list(Address)},
{bridge_mode, GetD(<<"bridge_mode">>, true)},
{clean_start, true},
{clientid, str(Get(<<"clientid">>))},
{append, Get(<<"append">>)},
{connect_module, emqx_bridge_mqtt},
{keepalive, hocon_postprocess:duration(str(Get(<<"keepalive">>))) div 1000},
{username, str(Get(<<"username">>))},
{password, str(Get(<<"password">>))},
{proto_ver, mqtt_ver(Get(<<"proto_ver">>))},
{retry_interval, hocon_postprocess:duration(str(GetD(<<"retry_interval">>, "30s"))) div 1000}
| maybe_ssl(Options, Get(<<"ssl">>), ResId)]
end.
maybe_ssl(_Options, false, _ResId) ->
[];
maybe_ssl(Options, true, ResId) ->
[{ssl, true}, {ssl_opts, emqx_plugin_libs_ssl:save_files_return_opts(Options, "rules", ResId)}].
mqtt_ver(ProtoVer) ->
case ProtoVer of
<<"mqttv3">> -> v3;
<<"mqttv4">> -> v4;
<<"mqttv5">> -> v5;
_ -> v4
end.

View File

@ -1,12 +0,0 @@
%% -*- mode: erlang -*-
{application, emqx_rule_actions,
[{description, "Rule actions"},
{vsn, "5.0.0"},
{registered, []},
{applications,
[kernel,stdlib,emqx]},
{env,[]},
{modules, []},
{licenses, ["Apache 2.0"]},
{links, []}
]}.

View File

@ -1,379 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% Define the default actions.
-module(emqx_web_hook_actions).
-export([ on_resource_create/2
, on_get_resource_status/2
, on_resource_destroy/2
]).
-export([ on_action_create_data_to_webserver/2
, on_action_data_to_webserver/2
]).
-export_type([action_fun/0]).
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx_rule_engine/include/rule_actions.hrl").
-type(action_fun() :: fun((Data :: map(), Envs :: map()) -> Result :: any())).
-type(url() :: binary()).
-define(RESOURCE_TYPE_WEBHOOK, 'web_hook').
-define(RESOURCE_CONFIG_SPEC, #{
url => #{order => 1,
type => string,
format => url,
required => true,
title => #{en => <<"Request URL">>,
zh => <<"请求 URL"/utf8>>},
description => #{en => <<"The URL of the server that will receive the Webhook requests.">>,
zh => <<"用于接收 Webhook 请求的服务器的 URL。"/utf8>>}},
connect_timeout => #{order => 2,
type => string,
default => <<"5s">>,
title => #{en => <<"Connect Timeout">>,
zh => <<"连接超时时间"/utf8>>},
description => #{en => <<"Connect Timeout In Seconds">>,
zh => <<"连接超时时间"/utf8>>}},
request_timeout => #{order => 3,
type => string,
default => <<"5s">>,
title => #{en => <<"Request Timeout">>,
zh => <<"请求超时时间时间"/utf8>>},
description => #{en => <<"Request Timeout In Seconds">>,
zh => <<"请求超时时间"/utf8>>}},
pool_size => #{order => 4,
type => number,
default => 8,
title => #{en => <<"Pool Size">>, zh => <<"连接池大小"/utf8>>},
description => #{en => <<"Connection Pool">>,
zh => <<"连接池大小"/utf8>>}
},
cacertfile => #{order => 5,
type => file,
default => <<"">>,
title => #{en => <<"CA Certificate File">>,
zh => <<"CA 证书文件"/utf8>>},
description => #{en => <<"CA Certificate file">>,
zh => <<"CA 证书文件"/utf8>>}},
keyfile => #{order => 6,
type => file,
default => <<"">>,
title =>#{en => <<"SSL Key">>,
zh => <<"SSL Key"/utf8>>},
description => #{en => <<"Your ssl keyfile">>,
zh => <<"SSL 私钥"/utf8>>}},
certfile => #{order => 7,
type => file,
default => <<"">>,
title => #{en => <<"SSL Cert">>,
zh => <<"SSL Cert"/utf8>>},
description => #{en => <<"Your ssl certfile">>,
zh => <<"SSL 证书"/utf8>>}},
verify => #{order => 8,
type => boolean,
default => false,
title => #{en => <<"Verify Server Certfile">>,
zh => <<"校验服务器证书"/utf8>>},
description => #{en => <<"Whether to verify the server certificate. By default, the client will not verify the server's certificate. If verification is required, please set it to true.">>,
zh => <<"是否校验服务器证书。 默认客户端不会去校验服务器的证书如果需要校验请设置成true。"/utf8>>}},
server_name_indication => #{order => 9,
type => string,
title => #{en => <<"Server Name Indication">>,
zh => <<"服务器名称指示"/utf8>>},
description => #{en => <<"Specify the hostname used for peer certificate verification, or set to disable to turn off this verification.">>,
zh => <<"指定用于对端证书验证时使用的主机名,或者设置为 disable 以关闭此项验证。"/utf8>>}}
}).
-define(ACTION_PARAM_RESOURCE, #{
order => 0,
type => string,
required => true,
title => #{en => <<"Resource ID">>,
zh => <<"资源 ID"/utf8>>},
description => #{en => <<"Bind a resource to this action">>,
zh => <<"给动作绑定一个资源"/utf8>>}
}).
-define(ACTION_DATA_SPEC, #{
'$resource' => ?ACTION_PARAM_RESOURCE,
method => #{
order => 1,
type => string,
enum => [<<"POST">>, <<"DELETE">>, <<"PUT">>, <<"GET">>],
default => <<"POST">>,
title => #{en => <<"Method">>,
zh => <<"Method"/utf8>>},
description => #{en => <<"HTTP Method.\n"
"Note that: the Body option in the Action will be discarded in case of GET or DELETE method.">>,
zh => <<"HTTP Method。\n"
"注意:当方法为 GET 或 DELETE 时,动作中的 Body 选项会被忽略。"/utf8>>}},
path => #{
order => 2,
type => string,
required => false,
default => <<"">>,
title => #{en => <<"Path">>,
zh => <<"Path"/utf8>>},
description => #{en => <<"The path part of the URL, support using ${Var} to get the field value output by the rule.">>,
zh => <<"URL 的路径部分,支持使用 ${Var} 获取规则输出的字段值。\n"/utf8>>}
},
headers => #{
order => 3,
type => object,
schema => #{},
default => #{<<"content-type">> => <<"application/json">>},
title => #{en => <<"Headers">>,
zh => <<"Headers"/utf8>>},
description => #{en => <<"HTTP headers.">>,
zh => <<"HTTP headers。"/utf8>>}},
body => #{
order => 4,
type => string,
input => textarea,
required => false,
default => <<"">>,
title => #{en => <<"Body">>,
zh => <<"Body"/utf8>>},
description => #{en => <<"The HTTP body supports the use of ${Var} to obtain the field value output by the rule.\n"
"The content of the default HTTP request body is a JSON string composed of the keys and values of all fields output by the rule.">>,
zh => <<"HTTP 请求体,支持使用 ${Var} 获取规则输出的字段值\n"
"默认 HTTP 请求体的内容为规则输出的所有字段的键和值构成的 JSON 字符串。"/utf8>>}}
}).
-resource_type(
#{name => ?RESOURCE_TYPE_WEBHOOK,
create => on_resource_create,
status => on_get_resource_status,
destroy => on_resource_destroy,
params => ?RESOURCE_CONFIG_SPEC,
title => #{en => <<"WebHook">>,
zh => <<"WebHook"/utf8>>},
description => #{en => <<"WebHook">>,
zh => <<"WebHook"/utf8>>}
}).
-rule_action(#{name => data_to_webserver,
category => data_forward,
for => '$any',
create => on_action_create_data_to_webserver,
params => ?ACTION_DATA_SPEC,
types => [?RESOURCE_TYPE_WEBHOOK],
title => #{en => <<"Data to Web Server">>,
zh => <<"发送数据到 Web 服务"/utf8>>},
description => #{en => <<"Forward Messages to Web Server">>,
zh => <<"将数据转发给 Web 服务"/utf8>>}
}).
%%------------------------------------------------------------------------------
%% Actions for web hook
%%------------------------------------------------------------------------------
-spec(on_resource_create(binary(), map()) -> map()).
on_resource_create(ResId, Conf) ->
{ok, _} = application:ensure_all_started(ehttpc),
Options = pool_opts(Conf, ResId),
PoolName = pool_name(ResId),
case test_http_connect(Conf) of
true -> ok;
false -> error({error, check_http_connectivity_failed})
end,
start_resource(ResId, PoolName, Options),
Conf#{<<"pool">> => PoolName, options => Options}.
start_resource(ResId, PoolName, Options) ->
case ehttpc_pool:start_pool(PoolName, Options) of
{ok, _} ->
?LOG(info, "Initiated Resource ~p Successfully, ResId: ~p",
[?RESOURCE_TYPE_WEBHOOK, ResId]);
{error, {already_started, _Pid}} ->
on_resource_destroy(ResId, #{<<"pool">> => PoolName}),
start_resource(ResId, PoolName, Options);
{error, Reason} ->
?LOG(error, "Initiate Resource ~p failed, ResId: ~p, ~0p",
[?RESOURCE_TYPE_WEBHOOK, ResId, Reason]),
error({{?RESOURCE_TYPE_WEBHOOK, ResId}, create_failed})
end.
-spec(on_get_resource_status(binary(), map()) -> map()).
on_get_resource_status(_ResId, Conf) ->
#{is_alive => test_http_connect(Conf)}.
-spec(on_resource_destroy(binary(), map()) -> ok | {error, Reason::term()}).
on_resource_destroy(ResId, #{<<"pool">> := PoolName}) ->
?LOG(info, "Destroying Resource ~p, ResId: ~p", [?RESOURCE_TYPE_WEBHOOK, ResId]),
case ehttpc_pool:stop_pool(PoolName) of
ok ->
?LOG(info, "Destroyed Resource ~p Successfully, ResId: ~p", [?RESOURCE_TYPE_WEBHOOK, ResId]);
{error, Reason} ->
?LOG(error, "Destroy Resource ~p failed, ResId: ~p, ~p", [?RESOURCE_TYPE_WEBHOOK, ResId, Reason]),
error({{?RESOURCE_TYPE_WEBHOOK, ResId}, destroy_failed})
end.
%% An action that forwards publish messages to a remote web server.
-spec(on_action_create_data_to_webserver(Id::binary(), #{url() := string()}) -> {bindings(), NewParams :: map()}).
on_action_create_data_to_webserver(Id, Params) ->
#{method := Method,
path := Path,
headers := Headers,
body := Body,
pool := Pool,
request_timeout := RequestTimeout} = parse_action_params(Params),
BodyTokens = emqx_plugin_libs_rule:preproc_tmpl(Body),
PathTokens = emqx_plugin_libs_rule:preproc_tmpl(Path),
Params.
on_action_data_to_webserver(Selected, _Envs =
#{?BINDING_KEYS := #{
'Id' := Id,
'Method' := Method,
'Headers' := Headers,
'PathTokens' := PathTokens,
'BodyTokens' := BodyTokens,
'Pool' := Pool,
'RequestTimeout' := RequestTimeout},
clientid := ClientID}) ->
NBody = format_msg(BodyTokens, Selected),
NPath = emqx_plugin_libs_rule:proc_tmpl(PathTokens, Selected),
Req = create_req(Method, NPath, Headers, NBody),
case ehttpc:request(ehttpc_pool:pick_worker(Pool, ClientID), Method, Req, RequestTimeout) of
{ok, StatusCode, _} when StatusCode >= 200 andalso StatusCode < 300 ->
emqx_rule_metrics:inc_actions_success(Id);
{ok, StatusCode, _, _} when StatusCode >= 200 andalso StatusCode < 300 ->
emqx_rule_metrics:inc_actions_success(Id);
{ok, StatusCode, _} ->
?LOG(warning, "[WebHook Action] HTTP request failed with status code: ~p", [StatusCode]),
emqx_rule_metrics:inc_actions_error(Id);
{ok, StatusCode, _, _} ->
?LOG(warning, "[WebHook Action] HTTP request failed with status code: ~p", [StatusCode]),
emqx_rule_metrics:inc_actions_error(Id);
{error, Reason} ->
?LOG(error, "[WebHook Action] HTTP request error: ~p", [Reason]),
emqx_rule_metrics:inc_actions_error(Id)
end.
format_msg([], Data) ->
emqx_json:encode(Data);
format_msg(Tokens, Data) ->
emqx_plugin_libs_rule:proc_tmpl(Tokens, Data).
%%------------------------------------------------------------------------------
%% Internal functions
%%------------------------------------------------------------------------------
create_req(Method, Path, Headers, _Body)
when Method =:= get orelse Method =:= delete ->
{Path, Headers};
create_req(_, Path, Headers, Body) ->
{Path, Headers, Body}.
parse_action_params(Params = #{<<"url">> := URL}) ->
try
{ok, #{path := CommonPath}} = emqx_http_lib:uri_parse(URL),
Method = method(maps:get(<<"method">>, Params, <<"POST">>)),
Headers = headers(maps:get(<<"headers">>, Params, undefined)),
NHeaders = ensure_content_type_header(Headers, Method),
#{method => Method,
path => merge_path(CommonPath, maps:get(<<"path">>, Params, <<>>)),
headers => NHeaders,
body => maps:get(<<"body">>, Params, <<>>),
request_timeout => hocon_postprocess:duration(str(maps:get(<<"request_timeout">>, Params, <<"5s">>))),
pool => maps:get(<<"pool">>, Params)}
catch _:_ ->
throw({invalid_params, Params})
end.
ensure_content_type_header(Headers, Method) when Method =:= post orelse Method =:= put ->
Headers;
ensure_content_type_header(Headers, _Method) ->
lists:keydelete("content-type", 1, Headers).
merge_path(CommonPath, <<>>) ->
CommonPath;
merge_path(CommonPath, Path0) ->
case emqx_http_lib:uri_parse(Path0) of
{ok, #{path := Path1, 'query' := Query}} ->
Path2 = filename:join(CommonPath, Path1),
<<Path2/binary, "?", Query/binary>>;
{ok, #{path := Path1}} ->
filename:join(CommonPath, Path1)
end.
method(GET) when GET == <<"GET">>; GET == <<"get">> -> get;
method(POST) when POST == <<"POST">>; POST == <<"post">> -> post;
method(PUT) when PUT == <<"PUT">>; PUT == <<"put">> -> put;
method(DEL) when DEL == <<"DELETE">>; DEL == <<"delete">> -> delete.
headers(undefined) -> [];
headers(Headers) when is_map(Headers) ->
headers(maps:to_list(Headers));
headers(Headers) when is_list(Headers) ->
[{string:to_lower(str(K)), str(V)} || {K, V} <- Headers].
str(Str) when is_list(Str) -> Str;
str(Atom) when is_atom(Atom) -> atom_to_list(Atom);
str(Bin) when is_binary(Bin) -> binary_to_list(Bin).
pool_opts(Params = #{<<"url">> := URL}, ResId) ->
{ok, #{host := Host,
port := Port,
scheme := Scheme}} = emqx_http_lib:uri_parse(URL),
PoolSize = maps:get(<<"pool_size">>, Params, 32),
ConnectTimeout =
hocon_postprocess:duration(str(maps:get(<<"connect_timeout">>, Params, <<"5s">>))),
TransportOpts0 =
case Scheme =:= https of
true -> [get_ssl_opts(Params, ResId)];
false -> []
end,
TransportOpts = emqx_misc:ipv6_probe(TransportOpts0),
Opts = case Scheme =:= https of
true -> [{transport_opts, TransportOpts}, {transport, ssl}];
false -> [{transport_opts, TransportOpts}]
end,
[{host, Host},
{port, Port},
{pool_size, PoolSize},
{pool_type, hash},
{connect_timeout, ConnectTimeout},
{retry, 5},
{retry_timeout, 1000} | Opts].
pool_name(ResId) ->
list_to_atom("webhook:" ++ str(ResId)).
get_ssl_opts(Opts, ResId) ->
emqx_plugin_libs_ssl:save_files_return_opts(Opts, "rules", ResId).
test_http_connect(Conf) ->
Url = fun() -> maps:get(<<"url">>, Conf) end,
try
emqx_plugin_libs_rule:http_connectivity(Url())
of
ok -> true;
{error, _Reason} ->
?LOG(error, "check http_connectivity failed: ~p", [Url()]),
false
catch
Err:Reason:ST ->
?LOG(error, "check http_connectivity failed: ~p, ~0p", [Conf, {Err, Reason, ST}]),
false
end.

View File

@ -1,197 +0,0 @@
#Rule-Engine-APIs
## ENVs
APPSECRET="88ebdd6569afc:Mjg3MzUyNTI2Mjk2NTcyOTEwMDEwMDMzMTE2NTM1MTkzNjA"
## Rules
### test sql
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/rules?test' -d \
'{"rawsql":"select * from \"message.publish\" where topic=\"t/a\"","ctx":{}}'
### create
```shell
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/rules' -d \
'{"rawsql":"select * from \"t/a\"","actions":[{"name":"inspect","params":{"a":1}}],"description":"test-rule"}'
{"code":0,"data":{"actions":[{"name":"inspect","params":{"a":1}}],"description":"test-rule","enabled":true,"id":"rule:bc987915","rawsql":"select * from \"t/a\""}}
## with a resource id in the action args
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/rules' -d \
'{"rawsql":"select * from \"t/a\"","actions":[{"name":"inspect","params":{"$resource":"resource:3a7b44a1"}}],"description":"test-rule"}'
{"code":0,"data":{"actions":[{"name":"inspect","params":{"$resource":"resource:3a7b44a1","a":1}}],"description":"test-rule","enabled":true,"id":"rule:6fce0ca9","rawsql":"select * from \"t/a\""}}
```
### modify
```shell
## modify all of the params
$ curl -XPUT -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/rules/rule:bc987915' -d \
'{"rawsql":"select * from \"t/a\"","actions":[{"name":"inspect","params":{"a":1}}],"description":"test-rule"}'
## modify some of the params: disable it
$ curl -XPUT -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/rules/rule:bc987915' -d \
'{"enabled": false}'
## modify some of the params: add fallback actions
$ curl -XPUT -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/rules/rule:bc987915' -d \
'{"actions":[{"name":"inspect","params":{"a":1}, "fallbacks": [{"name":"donothing"}]}]}'
```
### show
```shell
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/rules/rule:bc987915'
{"code":0,"data":{"actions":[{"name":"inspect","params":{"a":1}}],"description":"test-rule","enabled":true,"id":"rule:bc987915","rawsql":"select * from \"t/a\""}}
```
### list
```shell
$ curl -v --basic -u $APPSECRET -k http://localhost:8081/api/v4/rules
{"code":0,"data":[{"actions":[{"name":"inspect","params":{"a":1}}],"description":"test-rule","enabled":true,"id":"rule:bc987915","rawsql":"select * from \"t/a\""},{"actions":[{"name":"inspect","params":{"$resource":"resource:3a7b44a1","a":1}}],"description":"test-rule","enabled":true,"id":"rule:6fce0ca9","rawsql":"select * from \"t/a\""}]}
```
### delete
```shell
$ curl -XDELETE -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/rules/rule:bc987915'
{"code":0}
```
## Actions
### list
```shell
$ curl -v --basic -u $APPSECRET -k http://localhost:8081/api/v4/actions
{"code":0,"data":[{"app":"emqx_rule_engine","description":"Republish a MQTT message to a another topic","name":"republish","params":{...},"types":[]},{"app":"emqx_rule_engine","description":"Inspect the details of action params for debug purpose","name":"inspect","params":{},"types":[]},{"app":"emqx_web_hook","description":"Forward Messages to Web Server","name":"data_to_webserver","params":{...},"types":["web_hook"]}]}
```
### show
```shell
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/actions/inspect'
{"code":0,"data":{"app":"emqx_rule_engine","description":"Debug Action","name":"inspect","params":{"$resource":"built_in"}}}
```
## Resource Types
### list
```shell
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/resource_types'
{"code":0,"data":[{"description":"Debug resource type","name":"built_in","params":{},"provider":"emqx_rule_engine"}]}
```
### list all resources of a type
```shell
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/resource_types/built_in/resources'
{"code":0,"data":[{"attrs":"undefined","config":{"a":1},"description":"test-rule","id":"resource:71df3086","type":"built_in"}]}
```
### show
```shell
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/resource_types/built_in'
{"code":0,"data":{"description":"Debug resource type","name":"built_in","params":{},"provider":"emqx_rule_engine"}}
```
## Resources
### create
```shell
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/resources' -d \
'{"type": "built_in", "config": {"a":1}, "description": "test-resource"}'
{"code":0,"data":{"attrs":"undefined","config":{"a":1},"description":"test-resource","id":"resource:71df3086","type":"built_in"}}
```
### start
```shell
$ curl -XPOST -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/resources/resource:71df3086'
{"code":0}
```
### list
```shell
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/resources'
{"code":0,"data":[{"attrs":"undefined","config":{"a":1},"description":"test-resource","id":"resource:71df3086","type":"built_in"}]}
```
### show
```shell
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/resources/resource:71df3086'
{"code":0,"data":{"attrs":"undefined","config":{"a":1},"description":"test-resource","id":"resource:71df3086","type":"built_in"}}
```
### get resource status
```shell
curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/resource_status/resource:71df3086'
{"code":0,"data":{"is_alive":true}}
```
### delete
```shell
$ curl -XDELETE -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/resources/resource:71df3086'
{"code":0}
```
## Rule example using webhook
``` shell
$ curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/resources' -d \
'{"type": "web_hook", "config": {"url": "http://127.0.0.1:9910", "headers": {"token":"axfw34y235wrq234t4ersgw4t"}, "method": "POST"}, "description": "web hook resource-1"}'
{"code":0,"data":{"attrs":"undefined","config":{"headers":{"token":"axfw34y235wrq234t4ersgw4t"},"method":"POST","url":"http://127.0.0.1:9910"},"description":"web hook resource-1","id":"resource:8531a11f","type":"web_hook"}}
curl -v --basic -u $APPSECRET -k 'http://localhost:8081/api/v4/rules' -d \
'{"rawsql":"SELECT clientid as c, username as u.name FROM \"#\"","actions":[{"name":"data_to_webserver","params":{"$resource": "resource:8531a11f"}}],"description":"Forward connected events to webhook"}'
{"code":0,"data":{"actions":[{"name":"data_to_webserver","params":{"$resource":"resource:8531a11f","headers":{"token":"axfw34y235wrq234t4ersgw4t"},"method":"POST","url":"http://127.0.0.1:9910"}}],"description":"Forward connected events to webhook","enabled":true,"id":"rule:4fe05936","rawsql":"select * from \"#\""}}
```
Start a `web server` using `nc`, and then connect to emqx broker using a mqtt client with username = 'Shawn':
```shell
$ echo -e "HTTP/1.1 200 OK\n\n $(date)" | nc -l 127.0.0.1 9910
POST / HTTP/1.1
content-type: application/json
content-length: 48
te:
host: 127.0.0.1:9910
connection: keep-alive
token: axfw34y235wrq234t4ersgw4t
{"c":"clientId-bP70ymeIyo","u":{"name":"Shawn"}
```

Some files were not shown because too many files have changed in this diff Show More