Merge remote-tracking branch 'origin/master' into dev/ee5.0
This commit is contained in:
commit
5a3fb4a011
|
@ -3,7 +3,7 @@
|
||||||
{erl_opts, [debug_info]}.
|
{erl_opts, [debug_info]}.
|
||||||
{deps,
|
{deps,
|
||||||
[
|
[
|
||||||
{minirest, {git, "https://github.com/emqx/minirest.git", {tag, "1.3.6"}}}
|
{minirest, {git, "https://github.com/emqx/minirest.git", {tag, "1.3.7"}}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
{shell, [
|
{shell, [
|
||||||
|
|
|
@ -57,24 +57,24 @@ jobs:
|
||||||
-X POST \
|
-X POST \
|
||||||
-d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ github.ref_name }}\" }" \
|
-d "{\"repo\":\"emqx/emqx\", \"tag\": \"${{ github.ref_name }}\" }" \
|
||||||
${{ secrets.EMQX_IO_RELEASE_API }}
|
${{ secrets.EMQX_IO_RELEASE_API }}
|
||||||
- name: update repo.emqx.io
|
- uses: emqx/push-helm-action@v1
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release' && endsWith(github.repository, 'emqx') && matrix.profile == 'emqx'
|
||||||
run: |
|
with:
|
||||||
REF=${{ github.ref_name }}
|
charts_dir: "${{ github.workspace }}/deploy/charts/emqx"
|
||||||
case "$REF" in
|
version: ${{ github.ref_name }}
|
||||||
v*)
|
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
BOOL_FLAG_NAME="emqx_ce"
|
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
;;
|
aws_region: "us-west-2"
|
||||||
e*)
|
aws_bucket_name: "repos-emqx-io"
|
||||||
BOOL_FLAG_NAME="emqx_ee"
|
- uses: emqx/push-helm-action@v1
|
||||||
;;
|
if: github.event_name == 'release' && endsWith(github.repository, 'enterprise') && matrix.profile == 'emqx-ee'
|
||||||
esac
|
with:
|
||||||
curl --silent --show-error \
|
charts_dir: "${{ github.workspace }}/deploy/charts/emqx-ee"
|
||||||
-H "Authorization: token ${{ secrets.CI_GIT_TOKEN }}" \
|
version: ${{ github.ref_name }}
|
||||||
-H "Accept: application/vnd.github.v3+json" \
|
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
-X POST \
|
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
-d "{\"ref\":\"v1.0.4\",\"inputs\":{\"version\": \"${{ github.ref_name }}\", \"${BOOL_FLAG_NAME}\": \"true\"}}" \
|
aws_region: "us-west-2"
|
||||||
"https://api.github.com/repos/emqx/emqx-ci-helper/actions/workflows/update_emqx_repos.yaml/dispatches"
|
aws_bucket_name: "repos-emqx-io"
|
||||||
- name: update homebrew packages
|
- name: update homebrew packages
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
run: |
|
run: |
|
||||||
|
|
|
@ -1,8 +1,26 @@
|
||||||
|
# 5.0.8
|
||||||
|
|
||||||
|
## Enhancements
|
||||||
|
|
||||||
|
* change the `/gateway` API path to plural form. [#8823](https://github.com/emqx/emqx/pull/8823)
|
||||||
|
|
||||||
|
# 5.0.7
|
||||||
|
|
||||||
|
## Bug fixes
|
||||||
|
|
||||||
|
* Remove `will_msg` (not used) field from the client API. [#8721](https://github.com/emqx/emqx/pull/8721)
|
||||||
|
* Fix `$queue` topic name error in management API return. [#8728](https://github.com/emqx/emqx/pull/8728)
|
||||||
|
* Fix race condition which may cause `client.connected` and `client.disconnected` out of order. [#8625](https://github.com/emqx/emqx/pull/8625)
|
||||||
|
|
||||||
|
## Enhancements
|
||||||
|
|
||||||
|
* Do not auto-populate default SSL cipher suites, so that the configs are less bloated. [#8769](https://github.com/emqx/emqx/pull/8769)
|
||||||
|
|
||||||
# 5.0.6
|
# 5.0.6
|
||||||
|
|
||||||
## Bug fixes
|
## Bug fixes
|
||||||
|
|
||||||
* Remove the needless `will_msg` field from the client API. [#8721](https://github.com/emqx/emqx/pull/8721)
|
* Upgrade Dashboard version to fix an issue where the node status was not displayed correctly. [#8771](https://github.com/emqx/emqx/pull/8771)
|
||||||
|
|
||||||
# 5.0.5
|
# 5.0.5
|
||||||
|
|
||||||
|
@ -19,6 +37,7 @@
|
||||||
* Updated `/nodes` API node_status from `Running/Stopped` to `running/stopped`. [#8642](https://github.com/emqx/emqx/pull/8642)
|
* Updated `/nodes` API node_status from `Running/Stopped` to `running/stopped`. [#8642](https://github.com/emqx/emqx/pull/8642)
|
||||||
* Improve handling of placeholder interpolation errors [#8635](https://github.com/emqx/emqx/pull/8635)
|
* Improve handling of placeholder interpolation errors [#8635](https://github.com/emqx/emqx/pull/8635)
|
||||||
* Better logging on unknown object IDs. [#8670](https://github.com/emqx/emqx/pull/8670)
|
* Better logging on unknown object IDs. [#8670](https://github.com/emqx/emqx/pull/8670)
|
||||||
|
* The bind option support `:1883` style. [#8758](https://github.com/emqx/emqx/pull/8758)
|
||||||
|
|
||||||
# 5.0.4
|
# 5.0.4
|
||||||
|
|
||||||
|
|
2
Makefile
2
Makefile
|
@ -6,7 +6,7 @@ export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.0-17:1.13.4-24.2.1-1-d
|
||||||
export EMQX_DEFAULT_RUNNER = debian:11-slim
|
export EMQX_DEFAULT_RUNNER = debian:11-slim
|
||||||
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
|
export OTP_VSN ?= $(shell $(CURDIR)/scripts/get-otp-vsn.sh)
|
||||||
export ELIXIR_VSN ?= $(shell $(CURDIR)/scripts/get-elixir-vsn.sh)
|
export ELIXIR_VSN ?= $(shell $(CURDIR)/scripts/get-elixir-vsn.sh)
|
||||||
export EMQX_DASHBOARD_VERSION ?= v1.0.6
|
export EMQX_DASHBOARD_VERSION ?= v1.0.7
|
||||||
export EMQX_EE_DASHBOARD_VERSION ?= e1.0.1-beta.1
|
export EMQX_EE_DASHBOARD_VERSION ?= e1.0.1-beta.1
|
||||||
export EMQX_REL_FORM ?= tgz
|
export EMQX_REL_FORM ?= tgz
|
||||||
export QUICER_DOWNLOAD_FROM_RELEASE = 1
|
export QUICER_DOWNLOAD_FROM_RELEASE = 1
|
||||||
|
|
|
@ -32,7 +32,7 @@
|
||||||
%% `apps/emqx/src/bpapi/README.md'
|
%% `apps/emqx/src/bpapi/README.md'
|
||||||
|
|
||||||
%% Community edition
|
%% Community edition
|
||||||
-define(EMQX_RELEASE_CE, "5.0.5-beta.1").
|
-define(EMQX_RELEASE_CE, "5.0.6").
|
||||||
|
|
||||||
%% Enterprise edition
|
%% Enterprise edition
|
||||||
-define(EMQX_RELEASE_EE, "5.0.0-beta.2").
|
-define(EMQX_RELEASE_EE, "5.0.0-beta.2").
|
||||||
|
|
|
@ -27,7 +27,7 @@
|
||||||
{jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}},
|
{jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}},
|
||||||
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}},
|
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}},
|
||||||
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}},
|
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}},
|
||||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.13.3"}}},
|
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.13.4"}}},
|
||||||
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
|
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
|
||||||
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.30.0"}}},
|
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.30.0"}}},
|
||||||
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
||||||
|
|
|
@ -23,6 +23,11 @@
|
||||||
versions_file/1
|
versions_file/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
announce_fun/1
|
||||||
|
]).
|
||||||
|
|
||||||
-export_type([api/0, api_version/0, var_name/0, call/0, rpc/0, bpapi_meta/0]).
|
-export_type([api/0, api_version/0, var_name/0, call/0, rpc/0, bpapi_meta/0]).
|
||||||
|
|
||||||
-include("emqx.hrl").
|
-include("emqx.hrl").
|
||||||
|
@ -77,7 +82,7 @@ supported_version(API) ->
|
||||||
-spec announce(atom()) -> ok.
|
-spec announce(atom()) -> ok.
|
||||||
announce(App) ->
|
announce(App) ->
|
||||||
{ok, Data} = file:consult(?MODULE:versions_file(App)),
|
{ok, Data} = file:consult(?MODULE:versions_file(App)),
|
||||||
{atomic, ok} = mria:transaction(?COMMON_SHARD, fun announce_fun/1, [Data]),
|
{atomic, ok} = mria:transaction(?COMMON_SHARD, fun ?MODULE:announce_fun/1, [Data]),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
-spec versions_file(atom()) -> file:filename_all().
|
-spec versions_file(atom()) -> file:filename_all().
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
{id, "emqx"},
|
{id, "emqx"},
|
||||||
{description, "EMQX Core"},
|
{description, "EMQX Core"},
|
||||||
% strict semver, bump manually!
|
% strict semver, bump manually!
|
||||||
{vsn, "5.0.5"},
|
{vsn, "5.0.7"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -54,6 +54,12 @@
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
create_activate_alarm/3,
|
||||||
|
do_get_alarms/0
|
||||||
|
]).
|
||||||
|
|
||||||
-record(activated_alarm, {
|
-record(activated_alarm, {
|
||||||
name :: binary() | atom(),
|
name :: binary() | atom(),
|
||||||
details :: map() | list(),
|
details :: map() | list(),
|
||||||
|
@ -210,7 +216,7 @@ init([]) ->
|
||||||
handle_call({activate_alarm, Name, Details, Message}, _From, State) ->
|
handle_call({activate_alarm, Name, Details, Message}, _From, State) ->
|
||||||
Res = mria:transaction(
|
Res = mria:transaction(
|
||||||
mria:local_content_shard(),
|
mria:local_content_shard(),
|
||||||
fun create_activate_alarm/3,
|
fun ?MODULE:create_activate_alarm/3,
|
||||||
[Name, Details, Message]
|
[Name, Details, Message]
|
||||||
),
|
),
|
||||||
case Res of
|
case Res of
|
||||||
|
@ -234,15 +240,7 @@ handle_call(delete_all_deactivated_alarms, _From, State) ->
|
||||||
handle_call({get_alarms, all}, _From, State) ->
|
handle_call({get_alarms, all}, _From, State) ->
|
||||||
{atomic, Alarms} =
|
{atomic, Alarms} =
|
||||||
mria:ro_transaction(
|
mria:ro_transaction(
|
||||||
mria:local_content_shard(),
|
mria:local_content_shard(), fun ?MODULE:do_get_alarms/0
|
||||||
fun() ->
|
|
||||||
[
|
|
||||||
normalize(Alarm)
|
|
||||||
|| Alarm <-
|
|
||||||
ets:tab2list(?ACTIVATED_ALARM) ++
|
|
||||||
ets:tab2list(?DEACTIVATED_ALARM)
|
|
||||||
]
|
|
||||||
end
|
|
||||||
),
|
),
|
||||||
{reply, Alarms, State, get_validity_period()};
|
{reply, Alarms, State, get_validity_period()};
|
||||||
handle_call({get_alarms, activated}, _From, State) ->
|
handle_call({get_alarms, activated}, _From, State) ->
|
||||||
|
@ -295,6 +293,14 @@ create_activate_alarm(Name, Details, Message) ->
|
||||||
Alarm
|
Alarm
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
do_get_alarms() ->
|
||||||
|
[
|
||||||
|
normalize(Alarm)
|
||||||
|
|| Alarm <-
|
||||||
|
ets:tab2list(?ACTIVATED_ALARM) ++
|
||||||
|
ets:tab2list(?DEACTIVATED_ALARM)
|
||||||
|
].
|
||||||
|
|
||||||
deactivate_alarm(
|
deactivate_alarm(
|
||||||
#activated_alarm{
|
#activated_alarm{
|
||||||
activate_at = ActivateAt,
|
activate_at = ActivateAt,
|
||||||
|
|
|
@ -49,6 +49,11 @@
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
expire_banned_items/1
|
||||||
|
]).
|
||||||
|
|
||||||
-elvis([{elvis_style, state_record_and_type, disable}]).
|
-elvis([{elvis_style, state_record_and_type, disable}]).
|
||||||
|
|
||||||
-define(BANNED_TAB, ?MODULE).
|
-define(BANNED_TAB, ?MODULE).
|
||||||
|
@ -224,7 +229,9 @@ handle_cast(Msg, State) ->
|
||||||
{noreply, State}.
|
{noreply, State}.
|
||||||
|
|
||||||
handle_info({timeout, TRef, expire}, State = #{expiry_timer := TRef}) ->
|
handle_info({timeout, TRef, expire}, State = #{expiry_timer := TRef}) ->
|
||||||
_ = mria:transaction(?COMMON_SHARD, fun expire_banned_items/1, [erlang:system_time(second)]),
|
_ = mria:transaction(?COMMON_SHARD, fun ?MODULE:expire_banned_items/1, [
|
||||||
|
erlang:system_time(second)
|
||||||
|
]),
|
||||||
{noreply, ensure_expiry_timer(State), hibernate};
|
{noreply, ensure_expiry_timer(State), hibernate};
|
||||||
handle_info(Info, State) ->
|
handle_info(Info, State) ->
|
||||||
?SLOG(error, #{msg => "unexpected_info", info => Info}),
|
?SLOG(error, #{msg => "unexpected_info", info => Info}),
|
||||||
|
|
|
@ -44,6 +44,11 @@
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
do_cleanup_channels/1
|
||||||
|
]).
|
||||||
|
|
||||||
-define(REGISTRY, ?MODULE).
|
-define(REGISTRY, ?MODULE).
|
||||||
-define(TAB, emqx_channel_registry).
|
-define(TAB, emqx_channel_registry).
|
||||||
-define(LOCK, {?MODULE, cleanup_down}).
|
-define(LOCK, {?MODULE, cleanup_down}).
|
||||||
|
@ -155,7 +160,7 @@ cleanup_channels(Node) ->
|
||||||
global:trans(
|
global:trans(
|
||||||
{?LOCK, self()},
|
{?LOCK, self()},
|
||||||
fun() ->
|
fun() ->
|
||||||
mria:transaction(?CM_SHARD, fun do_cleanup_channels/1, [Node])
|
mria:transaction(?CM_SHARD, fun ?MODULE:do_cleanup_channels/1, [Node])
|
||||||
end
|
end
|
||||||
).
|
).
|
||||||
|
|
||||||
|
|
|
@ -35,6 +35,11 @@
|
||||||
unsubscribe/2
|
unsubscribe/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
try_subscribe/2
|
||||||
|
]).
|
||||||
|
|
||||||
-record(exclusive_subscription, {
|
-record(exclusive_subscription, {
|
||||||
topic :: emqx_types:topic(),
|
topic :: emqx_types:topic(),
|
||||||
clientid :: emqx_types:clientid()
|
clientid :: emqx_types:clientid()
|
||||||
|
@ -80,10 +85,7 @@ on_delete_module() ->
|
||||||
-spec check_subscribe(emqx_types:clientinfo(), emqx_types:topic()) ->
|
-spec check_subscribe(emqx_types:clientinfo(), emqx_types:topic()) ->
|
||||||
allow | deny.
|
allow | deny.
|
||||||
check_subscribe(#{clientid := ClientId}, Topic) ->
|
check_subscribe(#{clientid := ClientId}, Topic) ->
|
||||||
Fun = fun() ->
|
case mria:transaction(?EXCLUSIVE_SHARD, fun ?MODULE:try_subscribe/2, [ClientId, Topic]) of
|
||||||
try_subscribe(ClientId, Topic)
|
|
||||||
end,
|
|
||||||
case mria:transaction(?EXCLUSIVE_SHARD, Fun) of
|
|
||||||
{atomic, Res} ->
|
{atomic, Res} ->
|
||||||
Res;
|
Res;
|
||||||
{aborted, Reason} ->
|
{aborted, Reason} ->
|
||||||
|
@ -94,7 +96,7 @@ check_subscribe(#{clientid := ClientId}, Topic) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
unsubscribe(Topic, #{is_exclusive := true}) ->
|
unsubscribe(Topic, #{is_exclusive := true}) ->
|
||||||
_ = mria:transaction(?EXCLUSIVE_SHARD, fun() -> mnesia:delete({?TAB, Topic}) end),
|
_ = mria:transaction(?EXCLUSIVE_SHARD, fun mnesia:delete/1, [{?TAB, Topic}]),
|
||||||
ok;
|
ok;
|
||||||
unsubscribe(_Topic, _SubOpts) ->
|
unsubscribe(_Topic, _SubOpts) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
|
@ -583,11 +583,7 @@ enable_authn(Opts) ->
|
||||||
maps:get(enable_authn, Opts, true).
|
maps:get(enable_authn, Opts, true).
|
||||||
|
|
||||||
ssl_opts(Opts) ->
|
ssl_opts(Opts) ->
|
||||||
maps:to_list(
|
emqx_tls_lib:to_server_opts(tls, maps:get(ssl_options, Opts, #{})).
|
||||||
emqx_tls_lib:drop_tls13_for_old_otp(
|
|
||||||
maps:get(ssl_options, Opts, #{})
|
|
||||||
)
|
|
||||||
).
|
|
||||||
|
|
||||||
tcp_opts(Opts) ->
|
tcp_opts(Opts) ->
|
||||||
maps:to_list(
|
maps:to_list(
|
||||||
|
|
|
@ -47,6 +47,11 @@
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
cleanup_routes/1
|
||||||
|
]).
|
||||||
|
|
||||||
-record(routing_node, {name, const = unused}).
|
-record(routing_node, {name, const = unused}).
|
||||||
|
|
||||||
-define(ROUTE, emqx_route).
|
-define(ROUTE, emqx_route).
|
||||||
|
@ -145,7 +150,7 @@ handle_info({nodedown, Node}, State = #{nodes := Nodes}) ->
|
||||||
global:trans(
|
global:trans(
|
||||||
{?LOCK, self()},
|
{?LOCK, self()},
|
||||||
fun() ->
|
fun() ->
|
||||||
mria:transaction(?ROUTE_SHARD, fun cleanup_routes/1, [Node])
|
mria:transaction(?ROUTE_SHARD, fun ?MODULE:cleanup_routes/1, [Node])
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
ok = mria:dirty_delete(?ROUTING_NODE, Node),
|
ok = mria:dirty_delete(?ROUTING_NODE, Node),
|
||||||
|
|
|
@ -102,7 +102,7 @@
|
||||||
|
|
||||||
-export([namespace/0, roots/0, roots/1, fields/1, desc/1]).
|
-export([namespace/0, roots/0, roots/1, fields/1, desc/1]).
|
||||||
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
|
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
|
||||||
-export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1, default_ciphers/1]).
|
-export([server_ssl_opts_schema/2, client_ssl_opts_schema/1, ciphers_schema/1]).
|
||||||
-export([sc/2, map/2]).
|
-export([sc/2, map/2]).
|
||||||
|
|
||||||
-elvis([{elvis_style, god_modules, disable}]).
|
-elvis([{elvis_style, god_modules, disable}]).
|
||||||
|
@ -1843,6 +1843,8 @@ filter(Opts) ->
|
||||||
common_ssl_opts_schema(Defaults) ->
|
common_ssl_opts_schema(Defaults) ->
|
||||||
D = fun(Field) -> maps:get(to_atom(Field), Defaults, undefined) end,
|
D = fun(Field) -> maps:get(to_atom(Field), Defaults, undefined) end,
|
||||||
Df = fun(Field, Default) -> maps:get(to_atom(Field), Defaults, Default) end,
|
Df = fun(Field, Default) -> maps:get(to_atom(Field), Defaults, Default) end,
|
||||||
|
Collection = maps:get(versions, Defaults, tls_all_available),
|
||||||
|
AvailableVersions = default_tls_vsns(Collection),
|
||||||
[
|
[
|
||||||
{"cacertfile",
|
{"cacertfile",
|
||||||
sc(
|
sc(
|
||||||
|
@ -1910,9 +1912,9 @@ common_ssl_opts_schema(Defaults) ->
|
||||||
sc(
|
sc(
|
||||||
hoconsc:array(typerefl:atom()),
|
hoconsc:array(typerefl:atom()),
|
||||||
#{
|
#{
|
||||||
default => default_tls_vsns(maps:get(versions, Defaults, tls_all_available)),
|
default => AvailableVersions,
|
||||||
desc => ?DESC(common_ssl_opts_schema_versions),
|
desc => ?DESC(common_ssl_opts_schema_versions),
|
||||||
validator => fun validate_tls_versions/1
|
validator => fun(Inputs) -> validate_tls_versions(AvailableVersions, Inputs) end
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
{"ciphers", ciphers_schema(D("ciphers"))},
|
{"ciphers", ciphers_schema(D("ciphers"))},
|
||||||
|
@ -2023,9 +2025,9 @@ client_ssl_opts_schema(Defaults) ->
|
||||||
].
|
].
|
||||||
|
|
||||||
default_tls_vsns(dtls_all_available) ->
|
default_tls_vsns(dtls_all_available) ->
|
||||||
proplists:get_value(available_dtls, ssl:versions());
|
emqx_tls_lib:available_versions(dtls);
|
||||||
default_tls_vsns(tls_all_available) ->
|
default_tls_vsns(tls_all_available) ->
|
||||||
emqx_tls_lib:default_versions().
|
emqx_tls_lib:available_versions(tls).
|
||||||
|
|
||||||
-spec ciphers_schema(quic | dtls_all_available | tls_all_available | undefined) ->
|
-spec ciphers_schema(quic | dtls_all_available | tls_all_available | undefined) ->
|
||||||
hocon_schema:field_schema().
|
hocon_schema:field_schema().
|
||||||
|
@ -2040,6 +2042,10 @@ ciphers_schema(Default) ->
|
||||||
#{
|
#{
|
||||||
default => default_ciphers(Default),
|
default => default_ciphers(Default),
|
||||||
converter => fun
|
converter => fun
|
||||||
|
(<<>>) ->
|
||||||
|
[];
|
||||||
|
("") ->
|
||||||
|
[];
|
||||||
(Ciphers) when is_binary(Ciphers) ->
|
(Ciphers) when is_binary(Ciphers) ->
|
||||||
binary:split(Ciphers, <<",">>, [global]);
|
binary:split(Ciphers, <<",">>, [global]);
|
||||||
(Ciphers) when is_list(Ciphers) ->
|
(Ciphers) when is_list(Ciphers) ->
|
||||||
|
@ -2061,19 +2067,15 @@ default_ciphers(Which) ->
|
||||||
do_default_ciphers(Which)
|
do_default_ciphers(Which)
|
||||||
).
|
).
|
||||||
|
|
||||||
do_default_ciphers(undefined) ->
|
|
||||||
do_default_ciphers(tls_all_available);
|
|
||||||
do_default_ciphers(quic) ->
|
do_default_ciphers(quic) ->
|
||||||
[
|
[
|
||||||
"TLS_AES_256_GCM_SHA384",
|
"TLS_AES_256_GCM_SHA384",
|
||||||
"TLS_AES_128_GCM_SHA256",
|
"TLS_AES_128_GCM_SHA256",
|
||||||
"TLS_CHACHA20_POLY1305_SHA256"
|
"TLS_CHACHA20_POLY1305_SHA256"
|
||||||
];
|
];
|
||||||
do_default_ciphers(dtls_all_available) ->
|
do_default_ciphers(_) ->
|
||||||
%% as of now, dtls does not support tlsv1.3 ciphers
|
%% otherwise resolve default ciphers list at runtime
|
||||||
emqx_tls_lib:selected_ciphers(['dtlsv1.2', 'dtlsv1']);
|
[].
|
||||||
do_default_ciphers(tls_all_available) ->
|
|
||||||
emqx_tls_lib:default_ciphers().
|
|
||||||
|
|
||||||
%% @private return a list of keys in a parent field
|
%% @private return a list of keys in a parent field
|
||||||
-spec keys(string(), hocon:config()) -> [string()].
|
-spec keys(string(), hocon:config()) -> [string()].
|
||||||
|
@ -2163,8 +2165,12 @@ to_bar_separated_list(Str) ->
|
||||||
%% - 127.0.0.1:1883
|
%% - 127.0.0.1:1883
|
||||||
%% - ::1:1883
|
%% - ::1:1883
|
||||||
%% - [::1]:1883
|
%% - [::1]:1883
|
||||||
|
%% - :1883
|
||||||
|
%% - :::1883
|
||||||
to_ip_port(Str) ->
|
to_ip_port(Str) ->
|
||||||
case split_ip_port(Str) of
|
case split_ip_port(Str) of
|
||||||
|
{"", Port} ->
|
||||||
|
{ok, {{0, 0, 0, 0}, list_to_integer(Port)}};
|
||||||
{Ip, Port} ->
|
{Ip, Port} ->
|
||||||
PortVal = list_to_integer(Port),
|
PortVal = list_to_integer(Port),
|
||||||
case inet:parse_address(Ip) of
|
case inet:parse_address(Ip) of
|
||||||
|
@ -2247,19 +2253,16 @@ parse_user_lookup_fun(StrConf) ->
|
||||||
{fun Mod:Fun/3, undefined}.
|
{fun Mod:Fun/3, undefined}.
|
||||||
|
|
||||||
validate_ciphers(Ciphers) ->
|
validate_ciphers(Ciphers) ->
|
||||||
All = emqx_tls_lib:all_ciphers(),
|
Set = emqx_tls_lib:all_ciphers_set_cached(),
|
||||||
case lists:filter(fun(Cipher) -> not lists:member(Cipher, All) end, Ciphers) of
|
case lists:filter(fun(Cipher) -> not sets:is_element(Cipher, Set) end, Ciphers) of
|
||||||
[] -> ok;
|
[] -> ok;
|
||||||
Bad -> {error, {bad_ciphers, Bad}}
|
Bad -> {error, {bad_ciphers, Bad}}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
validate_tls_versions(Versions) ->
|
validate_tls_versions(AvailableVersions, Versions) ->
|
||||||
AvailableVersions =
|
|
||||||
proplists:get_value(available, ssl:versions()) ++
|
|
||||||
proplists:get_value(available_dtls, ssl:versions()),
|
|
||||||
case lists:filter(fun(V) -> not lists:member(V, AvailableVersions) end, Versions) of
|
case lists:filter(fun(V) -> not lists:member(V, AvailableVersions) end, Versions) of
|
||||||
[] -> ok;
|
[] -> ok;
|
||||||
Vs -> {error, {unsupported_ssl_versions, Vs}}
|
Vs -> {error, {unsupported_tls_versions, Vs}}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
validations() ->
|
validations() ->
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2022 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
%% Note: this module CAN'T be hot-patched to avoid invalidating the
|
||||||
|
%% closures, so it must not be changed.
|
||||||
|
-module(emqx_secret).
|
||||||
|
|
||||||
|
%% API:
|
||||||
|
-export([wrap/1, unwrap/1]).
|
||||||
|
|
||||||
|
%%================================================================================
|
||||||
|
%% API funcions
|
||||||
|
%%================================================================================
|
||||||
|
|
||||||
|
wrap(Term) ->
|
||||||
|
fun() ->
|
||||||
|
Term
|
||||||
|
end.
|
||||||
|
|
||||||
|
unwrap(Term) when is_function(Term, 0) ->
|
||||||
|
%% Handle potentially nested funs
|
||||||
|
unwrap(Term());
|
||||||
|
unwrap(Term) ->
|
||||||
|
Term.
|
|
@ -67,6 +67,11 @@
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
init_monitors/0
|
||||||
|
]).
|
||||||
|
|
||||||
-export_type([strategy/0]).
|
-export_type([strategy/0]).
|
||||||
|
|
||||||
-type strategy() ::
|
-type strategy() ::
|
||||||
|
@ -336,7 +341,7 @@ subscribers(Group, Topic) ->
|
||||||
init([]) ->
|
init([]) ->
|
||||||
ok = mria:wait_for_tables([?TAB]),
|
ok = mria:wait_for_tables([?TAB]),
|
||||||
{ok, _} = mnesia:subscribe({table, ?TAB, simple}),
|
{ok, _} = mnesia:subscribe({table, ?TAB, simple}),
|
||||||
{atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun init_monitors/0),
|
{atomic, PMon} = mria:transaction(?SHARED_SUB_SHARD, fun ?MODULE:init_monitors/0),
|
||||||
ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]),
|
ok = emqx_tables:new(?SHARED_SUBS, [protected, bag]),
|
||||||
ok = emqx_tables:new(?ALIVE_SUBS, [protected, set, {read_concurrency, true}]),
|
ok = emqx_tables:new(?ALIVE_SUBS, [protected, set, {read_concurrency, true}]),
|
||||||
ok = emqx_tables:new(?SHARED_SUBS_ROUND_ROBIN_COUNTER, [public, set, {write_concurrency, true}]),
|
ok = emqx_tables:new(?SHARED_SUBS_ROUND_ROBIN_COUNTER, [public, set, {write_concurrency, true}]),
|
||||||
|
|
|
@ -18,13 +18,12 @@
|
||||||
|
|
||||||
%% version & cipher suites
|
%% version & cipher suites
|
||||||
-export([
|
-export([
|
||||||
default_versions/0,
|
available_versions/1,
|
||||||
integral_versions/1,
|
integral_versions/2,
|
||||||
default_ciphers/0,
|
default_ciphers/0,
|
||||||
selected_ciphers/1,
|
selected_ciphers/1,
|
||||||
integral_ciphers/2,
|
integral_ciphers/2,
|
||||||
drop_tls13_for_old_otp/1,
|
all_ciphers_set_cached/0
|
||||||
all_ciphers/0
|
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% SSL files
|
%% SSL files
|
||||||
|
@ -38,7 +37,9 @@
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
to_client_opts/1
|
to_server_opts/2,
|
||||||
|
to_client_opts/1,
|
||||||
|
to_client_opts/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-include("logger.hrl").
|
-include("logger.hrl").
|
||||||
|
@ -54,27 +55,80 @@
|
||||||
%% non-empty list of strings
|
%% non-empty list of strings
|
||||||
-define(IS_STRING_LIST(L), (is_list(L) andalso L =/= [] andalso ?IS_STRING(hd(L)))).
|
-define(IS_STRING_LIST(L), (is_list(L) andalso L =/= [] andalso ?IS_STRING(hd(L)))).
|
||||||
|
|
||||||
%% @doc Returns the default supported tls versions.
|
%% The ciphers that ssl:cipher_suites(exclusive, 'tlsv1.3', openssl)
|
||||||
-spec default_versions() -> [atom()].
|
%% should return when running on otp 23.
|
||||||
default_versions() -> available_versions().
|
%% But we still have to hard-code them because tlsv1.3 on otp 22 is
|
||||||
|
%% not trustworthy.
|
||||||
|
-define(TLSV13_EXCLUSIVE_CIPHERS, [
|
||||||
|
"TLS_AES_256_GCM_SHA384",
|
||||||
|
"TLS_AES_128_GCM_SHA256",
|
||||||
|
"TLS_CHACHA20_POLY1305_SHA256",
|
||||||
|
"TLS_AES_128_CCM_SHA256",
|
||||||
|
"TLS_AES_128_CCM_8_SHA256"
|
||||||
|
]).
|
||||||
|
|
||||||
|
-define(SELECTED_CIPHERS, [
|
||||||
|
"ECDHE-ECDSA-AES256-GCM-SHA384",
|
||||||
|
"ECDHE-RSA-AES256-GCM-SHA384",
|
||||||
|
"ECDHE-ECDSA-AES256-SHA384",
|
||||||
|
"ECDHE-RSA-AES256-SHA384",
|
||||||
|
"ECDH-ECDSA-AES256-GCM-SHA384",
|
||||||
|
"ECDH-RSA-AES256-GCM-SHA384",
|
||||||
|
"ECDH-ECDSA-AES256-SHA384",
|
||||||
|
"ECDH-RSA-AES256-SHA384",
|
||||||
|
"DHE-DSS-AES256-GCM-SHA384",
|
||||||
|
"DHE-DSS-AES256-SHA256",
|
||||||
|
"AES256-GCM-SHA384",
|
||||||
|
"AES256-SHA256",
|
||||||
|
"ECDHE-ECDSA-AES128-GCM-SHA256",
|
||||||
|
"ECDHE-RSA-AES128-GCM-SHA256",
|
||||||
|
"ECDHE-ECDSA-AES128-SHA256",
|
||||||
|
"ECDHE-RSA-AES128-SHA256",
|
||||||
|
"ECDH-ECDSA-AES128-GCM-SHA256",
|
||||||
|
"ECDH-RSA-AES128-GCM-SHA256",
|
||||||
|
"ECDH-ECDSA-AES128-SHA256",
|
||||||
|
"ECDH-RSA-AES128-SHA256",
|
||||||
|
"DHE-DSS-AES128-GCM-SHA256",
|
||||||
|
"DHE-DSS-AES128-SHA256",
|
||||||
|
"AES128-GCM-SHA256",
|
||||||
|
"AES128-SHA256",
|
||||||
|
"ECDHE-ECDSA-AES256-SHA",
|
||||||
|
"ECDHE-RSA-AES256-SHA",
|
||||||
|
"DHE-DSS-AES256-SHA",
|
||||||
|
"ECDH-ECDSA-AES256-SHA",
|
||||||
|
"ECDH-RSA-AES256-SHA",
|
||||||
|
"ECDHE-ECDSA-AES128-SHA",
|
||||||
|
"ECDHE-RSA-AES128-SHA",
|
||||||
|
"DHE-DSS-AES128-SHA",
|
||||||
|
"ECDH-ECDSA-AES128-SHA",
|
||||||
|
"ECDH-RSA-AES128-SHA",
|
||||||
|
|
||||||
|
%% psk
|
||||||
|
"RSA-PSK-AES256-GCM-SHA384",
|
||||||
|
"RSA-PSK-AES256-CBC-SHA384",
|
||||||
|
"RSA-PSK-AES128-GCM-SHA256",
|
||||||
|
"RSA-PSK-AES128-CBC-SHA256",
|
||||||
|
"RSA-PSK-AES256-CBC-SHA",
|
||||||
|
"RSA-PSK-AES128-CBC-SHA"
|
||||||
|
]).
|
||||||
|
|
||||||
%% @doc Validate a given list of desired tls versions.
|
%% @doc Validate a given list of desired tls versions.
|
||||||
%% raise an error exception if non of them are available.
|
%% raise an error exception if non of them are available.
|
||||||
%% The input list can be a string/binary of comma separated versions.
|
%% The input list can be a string/binary of comma separated versions.
|
||||||
-spec integral_versions(undefined | string() | binary() | [ssl:tls_version()]) ->
|
-spec integral_versions(tls | dtls, undefined | string() | binary() | [ssl:tls_version()]) ->
|
||||||
[ssl:tls_version()].
|
[ssl:tls_version()].
|
||||||
integral_versions(undefined) ->
|
integral_versions(Type, undefined) ->
|
||||||
integral_versions(default_versions());
|
available_versions(Type);
|
||||||
integral_versions([]) ->
|
integral_versions(Type, []) ->
|
||||||
integral_versions(default_versions());
|
available_versions(Type);
|
||||||
integral_versions(<<>>) ->
|
integral_versions(Type, <<>>) ->
|
||||||
integral_versions(default_versions());
|
available_versions(Type);
|
||||||
integral_versions(Desired) when ?IS_STRING(Desired) ->
|
integral_versions(Type, Desired) when ?IS_STRING(Desired) ->
|
||||||
integral_versions(iolist_to_binary(Desired));
|
integral_versions(Type, iolist_to_binary(Desired));
|
||||||
integral_versions(Desired) when is_binary(Desired) ->
|
integral_versions(Type, Desired) when is_binary(Desired) ->
|
||||||
integral_versions(parse_versions(Desired));
|
integral_versions(Type, parse_versions(Desired));
|
||||||
integral_versions(Desired) ->
|
integral_versions(Type, Desired) ->
|
||||||
Available = available_versions(),
|
Available = available_versions(Type),
|
||||||
case lists:filter(fun(V) -> lists:member(V, Available) end, Desired) of
|
case lists:filter(fun(V) -> lists:member(V, Available) end, Desired) of
|
||||||
[] ->
|
[] ->
|
||||||
erlang:error(#{
|
erlang:error(#{
|
||||||
|
@ -86,33 +140,36 @@ integral_versions(Desired) ->
|
||||||
Filtered
|
Filtered
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Return a list of all supported ciphers.
|
%% @doc Return a set of all ciphers
|
||||||
all_ciphers() -> all_ciphers(default_versions()).
|
all_ciphers_set_cached() ->
|
||||||
|
case persistent_term:get(?FUNCTION_NAME, false) of
|
||||||
|
false ->
|
||||||
|
S = sets:from_list(all_ciphers()),
|
||||||
|
persistent_term:put(?FUNCTION_NAME, S);
|
||||||
|
Set ->
|
||||||
|
Set
|
||||||
|
end.
|
||||||
|
|
||||||
%% @doc Return a list of (openssl string format) cipher suites.
|
%% @hidden Return a list of all supported ciphers.
|
||||||
|
all_ciphers() ->
|
||||||
|
all_ciphers(available_versions(all)).
|
||||||
|
|
||||||
|
%% @hidden Return a list of (openssl string format) cipher suites.
|
||||||
-spec all_ciphers([ssl:tls_version()]) -> [string()].
|
-spec all_ciphers([ssl:tls_version()]) -> [string()].
|
||||||
all_ciphers(['tlsv1.3']) ->
|
all_ciphers(['tlsv1.3']) ->
|
||||||
%% When it's only tlsv1.3 wanted, use 'exclusive' here
|
%% When it's only tlsv1.3 wanted, use 'exclusive' here
|
||||||
%% because 'all' returns legacy cipher suites too,
|
%% because 'all' returns legacy cipher suites too,
|
||||||
%% which does not make sense since tlsv1.3 can not use
|
%% which does not make sense since tlsv1.3 can not use
|
||||||
%% legacy cipher suites.
|
%% legacy cipher suites.
|
||||||
ssl:cipher_suites(exclusive, 'tlsv1.3', openssl);
|
?TLSV13_EXCLUSIVE_CIPHERS;
|
||||||
all_ciphers(Versions) ->
|
all_ciphers(Versions) ->
|
||||||
%% assert non-empty
|
%% assert non-empty
|
||||||
List = lists:append([ssl:cipher_suites(all, V, openssl) || V <- Versions]),
|
List = lists:append([ssl:cipher_suites(all, V, openssl) || V <- Versions]),
|
||||||
[_ | _] = dedup(List).
|
[_ | _] = dedup(List).
|
||||||
|
|
||||||
%% @doc All Pre-selected TLS ciphers.
|
%% @doc All Pre-selected TLS ciphers.
|
||||||
%% ssl:cipher_suites(all, V, openssl) is too slow. so we cache default ciphers.
|
|
||||||
default_ciphers() ->
|
default_ciphers() ->
|
||||||
case persistent_term:get(default_ciphers, undefined) of
|
selected_ciphers(available_versions(all)).
|
||||||
undefined ->
|
|
||||||
Default = selected_ciphers(available_versions()),
|
|
||||||
persistent_term:put(default_ciphers, Default),
|
|
||||||
Default;
|
|
||||||
Default ->
|
|
||||||
Default
|
|
||||||
end.
|
|
||||||
|
|
||||||
%% @doc Pre-selected TLS ciphers for given versions..
|
%% @doc Pre-selected TLS ciphers for given versions..
|
||||||
selected_ciphers(Vsns) ->
|
selected_ciphers(Vsns) ->
|
||||||
|
@ -126,54 +183,11 @@ selected_ciphers(Vsns) ->
|
||||||
|
|
||||||
do_selected_ciphers('tlsv1.3') ->
|
do_selected_ciphers('tlsv1.3') ->
|
||||||
case lists:member('tlsv1.3', proplists:get_value(available, ssl:versions())) of
|
case lists:member('tlsv1.3', proplists:get_value(available, ssl:versions())) of
|
||||||
true -> ssl:cipher_suites(exclusive, 'tlsv1.3', openssl);
|
true -> ?TLSV13_EXCLUSIVE_CIPHERS;
|
||||||
false -> []
|
false -> []
|
||||||
end ++ do_selected_ciphers('tlsv1.2');
|
end ++ do_selected_ciphers('tlsv1.2');
|
||||||
do_selected_ciphers(_) ->
|
do_selected_ciphers(_) ->
|
||||||
[
|
?SELECTED_CIPHERS.
|
||||||
"ECDHE-ECDSA-AES256-GCM-SHA384",
|
|
||||||
"ECDHE-RSA-AES256-GCM-SHA384",
|
|
||||||
"ECDHE-ECDSA-AES256-SHA384",
|
|
||||||
"ECDHE-RSA-AES256-SHA384",
|
|
||||||
"ECDH-ECDSA-AES256-GCM-SHA384",
|
|
||||||
"ECDH-RSA-AES256-GCM-SHA384",
|
|
||||||
"ECDH-ECDSA-AES256-SHA384",
|
|
||||||
"ECDH-RSA-AES256-SHA384",
|
|
||||||
"DHE-DSS-AES256-GCM-SHA384",
|
|
||||||
"DHE-DSS-AES256-SHA256",
|
|
||||||
"AES256-GCM-SHA384",
|
|
||||||
"AES256-SHA256",
|
|
||||||
"ECDHE-ECDSA-AES128-GCM-SHA256",
|
|
||||||
"ECDHE-RSA-AES128-GCM-SHA256",
|
|
||||||
"ECDHE-ECDSA-AES128-SHA256",
|
|
||||||
"ECDHE-RSA-AES128-SHA256",
|
|
||||||
"ECDH-ECDSA-AES128-GCM-SHA256",
|
|
||||||
"ECDH-RSA-AES128-GCM-SHA256",
|
|
||||||
"ECDH-ECDSA-AES128-SHA256",
|
|
||||||
"ECDH-RSA-AES128-SHA256",
|
|
||||||
"DHE-DSS-AES128-GCM-SHA256",
|
|
||||||
"DHE-DSS-AES128-SHA256",
|
|
||||||
"AES128-GCM-SHA256",
|
|
||||||
"AES128-SHA256",
|
|
||||||
"ECDHE-ECDSA-AES256-SHA",
|
|
||||||
"ECDHE-RSA-AES256-SHA",
|
|
||||||
"DHE-DSS-AES256-SHA",
|
|
||||||
"ECDH-ECDSA-AES256-SHA",
|
|
||||||
"ECDH-RSA-AES256-SHA",
|
|
||||||
"ECDHE-ECDSA-AES128-SHA",
|
|
||||||
"ECDHE-RSA-AES128-SHA",
|
|
||||||
"DHE-DSS-AES128-SHA",
|
|
||||||
"ECDH-ECDSA-AES128-SHA",
|
|
||||||
"ECDH-RSA-AES128-SHA",
|
|
||||||
|
|
||||||
%% psk
|
|
||||||
"RSA-PSK-AES256-GCM-SHA384",
|
|
||||||
"RSA-PSK-AES256-CBC-SHA384",
|
|
||||||
"RSA-PSK-AES128-GCM-SHA256",
|
|
||||||
"RSA-PSK-AES128-CBC-SHA256",
|
|
||||||
"RSA-PSK-AES256-CBC-SHA",
|
|
||||||
"RSA-PSK-AES128-CBC-SHA"
|
|
||||||
].
|
|
||||||
|
|
||||||
%% @doc Ensure version & cipher-suites integrity.
|
%% @doc Ensure version & cipher-suites integrity.
|
||||||
-spec integral_ciphers([ssl:tls_version()], binary() | string() | [string()]) -> [string()].
|
-spec integral_ciphers([ssl:tls_version()], binary() | string() | [string()]) -> [string()].
|
||||||
|
@ -201,17 +215,17 @@ ensure_tls13_cipher(true, Ciphers) ->
|
||||||
ensure_tls13_cipher(false, Ciphers) ->
|
ensure_tls13_cipher(false, Ciphers) ->
|
||||||
Ciphers.
|
Ciphers.
|
||||||
|
|
||||||
%% default ssl versions based on available versions.
|
%% @doc Returns the default available tls/dtls versions.
|
||||||
-spec available_versions() -> [atom()].
|
available_versions(Type) ->
|
||||||
available_versions() ->
|
All = ssl:versions(),
|
||||||
OtpRelease = list_to_integer(erlang:system_info(otp_release)),
|
available_versions(Type, All).
|
||||||
default_versions(OtpRelease).
|
|
||||||
|
|
||||||
%% tlsv1.3 is available from OTP-22 but we do not want to use until 23.
|
available_versions(tls, All) ->
|
||||||
default_versions(OtpRelease) when OtpRelease >= 23 ->
|
proplists:get_value(available, All);
|
||||||
proplists:get_value(available, ssl:versions());
|
available_versions(dtls, All) ->
|
||||||
default_versions(_) ->
|
proplists:get_value(available_dtls, All);
|
||||||
lists:delete('tlsv1.3', proplists:get_value(available, ssl:versions())).
|
available_versions(all, All) ->
|
||||||
|
available_versions(tls, All) ++ available_versions(dtls, All).
|
||||||
|
|
||||||
%% Deduplicate a list without re-ordering the elements.
|
%% Deduplicate a list without re-ordering the elements.
|
||||||
dedup([]) ->
|
dedup([]) ->
|
||||||
|
@ -244,6 +258,8 @@ do_parse_versions([V | More], Acc) ->
|
||||||
do_parse_versions(More, [Parsed | Acc])
|
do_parse_versions(More, [Parsed | Acc])
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
parse_version(<<"dtlsv1.2">>) -> 'dtlsv1.2';
|
||||||
|
parse_version(<<"dtlsv1">>) -> dtlsv1;
|
||||||
parse_version(<<"tlsv", Vsn/binary>>) -> parse_version(Vsn);
|
parse_version(<<"tlsv", Vsn/binary>>) -> parse_version(Vsn);
|
||||||
parse_version(<<"v", Vsn/binary>>) -> parse_version(Vsn);
|
parse_version(<<"v", Vsn/binary>>) -> parse_version(Vsn);
|
||||||
parse_version(<<"1.3">>) -> 'tlsv1.3';
|
parse_version(<<"1.3">>) -> 'tlsv1.3';
|
||||||
|
@ -259,36 +275,6 @@ split_by_comma(Bin) ->
|
||||||
trim_space(Bin) ->
|
trim_space(Bin) ->
|
||||||
hd([I || I <- binary:split(Bin, <<" ">>), I =/= <<>>]).
|
hd([I || I <- binary:split(Bin, <<" ">>), I =/= <<>>]).
|
||||||
|
|
||||||
%% @doc Drop tlsv1.3 version and ciphers from ssl options
|
|
||||||
%% if running on otp 22 or earlier.
|
|
||||||
drop_tls13_for_old_otp(SslOpts) ->
|
|
||||||
case list_to_integer(erlang:system_info(otp_release)) < 23 of
|
|
||||||
true -> drop_tls13(SslOpts);
|
|
||||||
false -> SslOpts
|
|
||||||
end.
|
|
||||||
|
|
||||||
%% The ciphers that ssl:cipher_suites(exclusive, 'tlsv1.3', openssl)
|
|
||||||
%% should return when running on otp 23.
|
|
||||||
%% But we still have to hard-code them because tlsv1.3 on otp 22 is
|
|
||||||
%% not trustworthy.
|
|
||||||
-define(TLSV13_EXCLUSIVE_CIPHERS, [
|
|
||||||
"TLS_AES_256_GCM_SHA384",
|
|
||||||
"TLS_AES_128_GCM_SHA256",
|
|
||||||
"TLS_CHACHA20_POLY1305_SHA256",
|
|
||||||
"TLS_AES_128_CCM_SHA256",
|
|
||||||
"TLS_AES_128_CCM_8_SHA256"
|
|
||||||
]).
|
|
||||||
drop_tls13(SslOpts0) ->
|
|
||||||
SslOpts1 =
|
|
||||||
case maps:find(versions, SslOpts0) of
|
|
||||||
error -> SslOpts0;
|
|
||||||
{ok, Vsns} -> SslOpts0#{versions => (Vsns -- ['tlsv1.3'])}
|
|
||||||
end,
|
|
||||||
case maps:find(ciphers, SslOpts1) of
|
|
||||||
error -> SslOpts1;
|
|
||||||
{ok, Ciphers} -> SslOpts1#{ciphers => Ciphers -- ?TLSV13_EXCLUSIVE_CIPHERS}
|
|
||||||
end.
|
|
||||||
|
|
||||||
%% @doc The input map is a HOCON decoded result of a struct defined as
|
%% @doc The input map is a HOCON decoded result of a struct defined as
|
||||||
%% emqx_schema:server_ssl_opts_schema. (NOTE: before schema-checked).
|
%% emqx_schema:server_ssl_opts_schema. (NOTE: before schema-checked).
|
||||||
%% `keyfile', `certfile' and `cacertfile' can be either pem format key or certificates,
|
%% `keyfile', `certfile' and `cacertfile' can be either pem format key or certificates,
|
||||||
|
@ -498,27 +484,54 @@ do_drop_invalid_certs([Key | Keys], SSL) ->
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Convert hocon-checked ssl client options (map()) to
|
%% @doc Convert hocon-checked ssl server options (map()) to
|
||||||
%% proplist accepted by ssl library.
|
%% proplist accepted by ssl library.
|
||||||
|
-spec to_server_opts(tls | dtls, map()) -> [{atom(), term()}].
|
||||||
|
to_server_opts(Type, Opts) ->
|
||||||
|
Versions = integral_versions(Type, maps:get(versions, Opts, undefined)),
|
||||||
|
Ciphers = integral_ciphers(Versions, maps:get(ciphers, Opts, undefined)),
|
||||||
|
maps:to_list(Opts#{
|
||||||
|
ciphers => Ciphers,
|
||||||
|
versions => Versions
|
||||||
|
}).
|
||||||
|
|
||||||
|
%% @doc Convert hocon-checked tls client options (map()) to
|
||||||
|
%% proplist accepted by ssl library.
|
||||||
|
-spec to_client_opts(map()) -> [{atom(), term()}].
|
||||||
to_client_opts(Opts) ->
|
to_client_opts(Opts) ->
|
||||||
|
to_client_opts(tls, Opts).
|
||||||
|
|
||||||
|
%% @doc Convert hocon-checked tls or dtls client options (map()) to
|
||||||
|
%% proplist accepted by ssl library.
|
||||||
|
-spec to_client_opts(tls | dtls, map()) -> [{atom(), term()}].
|
||||||
|
to_client_opts(Type, Opts) ->
|
||||||
GetD = fun(Key, Default) -> fuzzy_map_get(Key, Opts, Default) end,
|
GetD = fun(Key, Default) -> fuzzy_map_get(Key, Opts, Default) end,
|
||||||
Get = fun(Key) -> GetD(Key, undefined) end,
|
Get = fun(Key) -> GetD(Key, undefined) end,
|
||||||
KeyFile = ensure_str(Get(keyfile)),
|
case GetD(enable, false) of
|
||||||
CertFile = ensure_str(Get(certfile)),
|
true ->
|
||||||
CAFile = ensure_str(Get(cacertfile)),
|
KeyFile = ensure_str(Get(keyfile)),
|
||||||
Verify = GetD(verify, verify_none),
|
CertFile = ensure_str(Get(certfile)),
|
||||||
SNI = ensure_sni(Get(server_name_indication)),
|
CAFile = ensure_str(Get(cacertfile)),
|
||||||
Versions = integral_versions(Get(versions)),
|
Verify = GetD(verify, verify_none),
|
||||||
Ciphers = integral_ciphers(Versions, Get(ciphers)),
|
SNI = ensure_sni(Get(server_name_indication)),
|
||||||
filter([
|
Versions = integral_versions(Type, Get(versions)),
|
||||||
{keyfile, KeyFile},
|
Ciphers = integral_ciphers(Versions, Get(ciphers)),
|
||||||
{certfile, CertFile},
|
filter([
|
||||||
{cacertfile, CAFile},
|
{keyfile, KeyFile},
|
||||||
{verify, Verify},
|
{certfile, CertFile},
|
||||||
{server_name_indication, SNI},
|
{cacertfile, CAFile},
|
||||||
{versions, Versions},
|
{verify, Verify},
|
||||||
{ciphers, Ciphers}
|
{server_name_indication, SNI},
|
||||||
]).
|
{versions, Versions},
|
||||||
|
{ciphers, Ciphers},
|
||||||
|
{reuse_sessions, Get(reuse_sessions)},
|
||||||
|
{depth, Get(depth)},
|
||||||
|
{password, ensure_str(Get(password))},
|
||||||
|
{secure_renegotiate, Get(secure_renegotiate)}
|
||||||
|
]);
|
||||||
|
false ->
|
||||||
|
[]
|
||||||
|
end.
|
||||||
|
|
||||||
filter([]) -> [];
|
filter([]) -> [];
|
||||||
filter([{_, undefined} | T]) -> filter(T);
|
filter([{_, undefined} | T]) -> filter(T);
|
||||||
|
@ -556,28 +569,3 @@ ensure_ssl_file_key(SSL, RequiredKeys) ->
|
||||||
[] -> ok;
|
[] -> ok;
|
||||||
Miss -> {error, #{reason => ssl_file_option_not_found, which_options => Miss}}
|
Miss -> {error, #{reason => ssl_file_option_not_found, which_options => Miss}}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-if(?OTP_RELEASE > 22).
|
|
||||||
-ifdef(TEST).
|
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
|
||||||
|
|
||||||
drop_tls13_test() ->
|
|
||||||
Versions = default_versions(),
|
|
||||||
?assert(lists:member('tlsv1.3', Versions)),
|
|
||||||
Ciphers = all_ciphers(),
|
|
||||||
?assert(has_tlsv13_cipher(Ciphers)),
|
|
||||||
Opts0 = #{versions => Versions, ciphers => Ciphers, other => true},
|
|
||||||
Opts = drop_tls13(Opts0),
|
|
||||||
?assertNot(lists:member('tlsv1.3', maps:get(versions, Opts, undefined))),
|
|
||||||
?assertNot(has_tlsv13_cipher(maps:get(ciphers, Opts, undefined))).
|
|
||||||
|
|
||||||
drop_tls13_no_versions_cipers_test() ->
|
|
||||||
Opts0 = #{other => 0, bool => true},
|
|
||||||
Opts = drop_tls13(Opts0),
|
|
||||||
?_assertEqual(Opts0, Opts).
|
|
||||||
|
|
||||||
has_tlsv13_cipher(Ciphers) ->
|
|
||||||
lists:any(fun(C) -> lists:member(C, Ciphers) end, ?TLSV13_EXCLUSIVE_CIPHERS).
|
|
||||||
|
|
||||||
-endif.
|
|
||||||
-endif.
|
|
||||||
|
|
|
@ -51,10 +51,7 @@
|
||||||
|
|
||||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||||
|
|
||||||
-define(TRACE, ?MODULE).
|
-include("emqx_trace.hrl").
|
||||||
-define(SHARD, ?COMMON_SHARD).
|
|
||||||
-define(MAX_SIZE, 30).
|
|
||||||
-define(OWN_KEYS, [level, filters, filter_default, handlers]).
|
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-export([
|
-export([
|
||||||
|
@ -66,15 +63,6 @@
|
||||||
-export_type([ip_address/0]).
|
-export_type([ip_address/0]).
|
||||||
-type ip_address() :: string().
|
-type ip_address() :: string().
|
||||||
|
|
||||||
-record(?TRACE, {
|
|
||||||
name :: binary() | undefined | '_',
|
|
||||||
type :: clientid | topic | ip_address | undefined | '_',
|
|
||||||
filter :: emqx_types:topic() | emqx_types:clientid() | ip_address() | undefined | '_',
|
|
||||||
enable = true :: boolean() | '_',
|
|
||||||
start_at :: integer() | undefined | '_',
|
|
||||||
end_at :: integer() | undefined | '_'
|
|
||||||
}).
|
|
||||||
|
|
||||||
publish(#message{topic = <<"$SYS/", _/binary>>}) ->
|
publish(#message{topic = <<"$SYS/", _/binary>>}) ->
|
||||||
ignore;
|
ignore;
|
||||||
publish(#message{from = From, topic = Topic, payload = Payload}) when
|
publish(#message{from = From, topic = Topic, payload = Payload}) when
|
||||||
|
@ -172,13 +160,7 @@ create(Trace) ->
|
||||||
|
|
||||||
-spec delete(Name :: binary()) -> ok | {error, not_found}.
|
-spec delete(Name :: binary()) -> ok | {error, not_found}.
|
||||||
delete(Name) ->
|
delete(Name) ->
|
||||||
Tran = fun() ->
|
transaction(fun emqx_trace_dl:delete/1, [Name]).
|
||||||
case mnesia:read(?TRACE, Name) of
|
|
||||||
[_] -> mnesia:delete(?TRACE, Name, write);
|
|
||||||
[] -> mnesia:abort(not_found)
|
|
||||||
end
|
|
||||||
end,
|
|
||||||
transaction(Tran).
|
|
||||||
|
|
||||||
-spec clear() -> ok | {error, Reason :: term()}.
|
-spec clear() -> ok | {error, Reason :: term()}.
|
||||||
clear() ->
|
clear() ->
|
||||||
|
@ -190,20 +172,7 @@ clear() ->
|
||||||
-spec update(Name :: binary(), Enable :: boolean()) ->
|
-spec update(Name :: binary(), Enable :: boolean()) ->
|
||||||
ok | {error, not_found | finished}.
|
ok | {error, not_found | finished}.
|
||||||
update(Name, Enable) ->
|
update(Name, Enable) ->
|
||||||
Tran = fun() ->
|
transaction(fun emqx_trace_dl:update/2, [Name, Enable]).
|
||||||
case mnesia:read(?TRACE, Name) of
|
|
||||||
[] ->
|
|
||||||
mnesia:abort(not_found);
|
|
||||||
[#?TRACE{enable = Enable}] ->
|
|
||||||
ok;
|
|
||||||
[Rec] ->
|
|
||||||
case erlang:system_time(second) >= Rec#?TRACE.end_at of
|
|
||||||
false -> mnesia:write(?TRACE, Rec#?TRACE{enable = Enable}, write);
|
|
||||||
true -> mnesia:abort(finished)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end,
|
|
||||||
transaction(Tran).
|
|
||||||
|
|
||||||
check() ->
|
check() ->
|
||||||
gen_server:call(?MODULE, check).
|
gen_server:call(?MODULE, check).
|
||||||
|
@ -211,13 +180,7 @@ check() ->
|
||||||
-spec get_trace_filename(Name :: binary()) ->
|
-spec get_trace_filename(Name :: binary()) ->
|
||||||
{ok, FileName :: string()} | {error, not_found}.
|
{ok, FileName :: string()} | {error, not_found}.
|
||||||
get_trace_filename(Name) ->
|
get_trace_filename(Name) ->
|
||||||
Tran = fun() ->
|
transaction(fun emqx_trace_dl:get_trace_filename/1, [Name]).
|
||||||
case mnesia:read(?TRACE, Name, read) of
|
|
||||||
[] -> mnesia:abort(not_found);
|
|
||||||
[#?TRACE{start_at = Start}] -> {ok, filename(Name, Start)}
|
|
||||||
end
|
|
||||||
end,
|
|
||||||
transaction(Tran).
|
|
||||||
|
|
||||||
-spec trace_file(File :: file:filename_all()) ->
|
-spec trace_file(File :: file:filename_all()) ->
|
||||||
{ok, Node :: list(), Binary :: binary()}
|
{ok, Node :: list(), Binary :: binary()}
|
||||||
|
@ -309,23 +272,7 @@ code_change(_, State, _Extra) ->
|
||||||
{ok, State}.
|
{ok, State}.
|
||||||
|
|
||||||
insert_new_trace(Trace) ->
|
insert_new_trace(Trace) ->
|
||||||
Tran = fun() ->
|
transaction(fun emqx_trace_dl:insert_new_trace/1, [Trace]).
|
||||||
case mnesia:read(?TRACE, Trace#?TRACE.name) of
|
|
||||||
[] ->
|
|
||||||
#?TRACE{start_at = StartAt, type = Type, filter = Filter} = Trace,
|
|
||||||
Match = #?TRACE{_ = '_', start_at = StartAt, type = Type, filter = Filter},
|
|
||||||
case mnesia:match_object(?TRACE, Match, read) of
|
|
||||||
[] ->
|
|
||||||
ok = mnesia:write(?TRACE, Trace, write),
|
|
||||||
{ok, Trace};
|
|
||||||
[#?TRACE{name = Name}] ->
|
|
||||||
mnesia:abort({duplicate_condition, Name})
|
|
||||||
end;
|
|
||||||
[#?TRACE{name = Name}] ->
|
|
||||||
mnesia:abort({already_existed, Name})
|
|
||||||
end
|
|
||||||
end,
|
|
||||||
transaction(Tran).
|
|
||||||
|
|
||||||
update_trace(Traces) ->
|
update_trace(Traces) ->
|
||||||
Now = erlang:system_time(second),
|
Now = erlang:system_time(second),
|
||||||
|
@ -347,9 +294,7 @@ stop_all_trace_handler() ->
|
||||||
|
|
||||||
get_enabled_trace() ->
|
get_enabled_trace() ->
|
||||||
{atomic, Traces} =
|
{atomic, Traces} =
|
||||||
mria:ro_transaction(?SHARD, fun() ->
|
mria:ro_transaction(?SHARD, fun emqx_trace_dl:get_enabled_trace/0),
|
||||||
mnesia:match_object(?TRACE, #?TRACE{enable = true, _ = '_'}, read)
|
|
||||||
end),
|
|
||||||
Traces.
|
Traces.
|
||||||
|
|
||||||
find_closest_time(Traces, Now) ->
|
find_closest_time(Traces, Now) ->
|
||||||
|
@ -372,17 +317,7 @@ closest(Time, Now, Closest) -> min(Time - Now, Closest).
|
||||||
disable_finished([]) ->
|
disable_finished([]) ->
|
||||||
ok;
|
ok;
|
||||||
disable_finished(Traces) ->
|
disable_finished(Traces) ->
|
||||||
transaction(fun() ->
|
transaction(fun emqx_trace_dl:delete_finished/1, [Traces]).
|
||||||
lists:map(
|
|
||||||
fun(#?TRACE{name = Name}) ->
|
|
||||||
case mnesia:read(?TRACE, Name, write) of
|
|
||||||
[] -> ok;
|
|
||||||
[Trace] -> mnesia:write(?TRACE, Trace#?TRACE{enable = false}, write)
|
|
||||||
end
|
|
||||||
end,
|
|
||||||
Traces
|
|
||||||
)
|
|
||||||
end).
|
|
||||||
|
|
||||||
start_trace(Traces, Started0) ->
|
start_trace(Traces, Started0) ->
|
||||||
Started = lists:map(fun(#{name := Name}) -> Name end, Started0),
|
Started = lists:map(fun(#{name := Name}) -> Name end, Started0),
|
||||||
|
@ -586,8 +521,8 @@ filename(Name, Start) ->
|
||||||
[Time, _] = string:split(calendar:system_time_to_rfc3339(Start), "T", leading),
|
[Time, _] = string:split(calendar:system_time_to_rfc3339(Start), "T", leading),
|
||||||
lists:flatten(["trace_", binary_to_list(Name), "_", Time, ".log"]).
|
lists:flatten(["trace_", binary_to_list(Name), "_", Time, ".log"]).
|
||||||
|
|
||||||
transaction(Tran) ->
|
transaction(Fun, Args) ->
|
||||||
case mria:transaction(?COMMON_SHARD, Tran) of
|
case mria:transaction(?COMMON_SHARD, Fun, Args) of
|
||||||
{atomic, Res} -> Res;
|
{atomic, Res} -> Res;
|
||||||
{aborted, Reason} -> {error, Reason}
|
{aborted, Reason} -> {error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2022 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
-ifndef(EMQX_TRACE_HRL).
|
||||||
|
-define(EMQX_TRACE_HRL, true).
|
||||||
|
|
||||||
|
-define(TRACE, emqx_trace).
|
||||||
|
|
||||||
|
-record(?TRACE, {
|
||||||
|
name :: binary() | undefined | '_',
|
||||||
|
type :: clientid | topic | ip_address | undefined | '_',
|
||||||
|
filter ::
|
||||||
|
emqx_types:topic() | emqx_types:clientid() | emqx_trace:ip_address() | undefined | '_',
|
||||||
|
enable = true :: boolean() | '_',
|
||||||
|
start_at :: integer() | undefined | '_',
|
||||||
|
end_at :: integer() | undefined | '_'
|
||||||
|
}).
|
||||||
|
|
||||||
|
-define(SHARD, ?COMMON_SHARD).
|
||||||
|
-define(MAX_SIZE, 30).
|
||||||
|
-define(OWN_KEYS, [level, filters, filter_default, handlers]).
|
||||||
|
|
||||||
|
-endif.
|
|
@ -0,0 +1,103 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2022 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
%% Data layer for emqx_trace
|
||||||
|
-module(emqx_trace_dl).
|
||||||
|
|
||||||
|
%% API:
|
||||||
|
-export([
|
||||||
|
update/2,
|
||||||
|
insert_new_trace/1,
|
||||||
|
delete/1,
|
||||||
|
get_trace_filename/1,
|
||||||
|
delete_finished/1,
|
||||||
|
get_enabled_trace/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
-include("emqx_trace.hrl").
|
||||||
|
|
||||||
|
%%================================================================================
|
||||||
|
%% API funcions
|
||||||
|
%%================================================================================
|
||||||
|
|
||||||
|
%% Introduced in 5.0
|
||||||
|
-spec update(Name :: binary(), Enable :: boolean()) ->
|
||||||
|
ok.
|
||||||
|
update(Name, Enable) ->
|
||||||
|
case mnesia:read(?TRACE, Name) of
|
||||||
|
[] ->
|
||||||
|
mnesia:abort(not_found);
|
||||||
|
[#?TRACE{enable = Enable}] ->
|
||||||
|
ok;
|
||||||
|
[Rec] ->
|
||||||
|
case erlang:system_time(second) >= Rec#?TRACE.end_at of
|
||||||
|
false -> mnesia:write(?TRACE, Rec#?TRACE{enable = Enable}, write);
|
||||||
|
true -> mnesia:abort(finished)
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
|
%% Introduced in 5.0
|
||||||
|
insert_new_trace(Trace) ->
|
||||||
|
case mnesia:read(?TRACE, Trace#?TRACE.name) of
|
||||||
|
[] ->
|
||||||
|
#?TRACE{start_at = StartAt, type = Type, filter = Filter} = Trace,
|
||||||
|
Match = #?TRACE{_ = '_', start_at = StartAt, type = Type, filter = Filter},
|
||||||
|
case mnesia:match_object(?TRACE, Match, read) of
|
||||||
|
[] ->
|
||||||
|
ok = mnesia:write(?TRACE, Trace, write),
|
||||||
|
{ok, Trace};
|
||||||
|
[#?TRACE{name = Name}] ->
|
||||||
|
mnesia:abort({duplicate_condition, Name})
|
||||||
|
end;
|
||||||
|
[#?TRACE{name = Name}] ->
|
||||||
|
mnesia:abort({already_existed, Name})
|
||||||
|
end.
|
||||||
|
|
||||||
|
%% Introduced in 5.0
|
||||||
|
-spec delete(Name :: binary()) -> ok.
|
||||||
|
delete(Name) ->
|
||||||
|
case mnesia:read(?TRACE, Name) of
|
||||||
|
[_] -> mnesia:delete(?TRACE, Name, write);
|
||||||
|
[] -> mnesia:abort(not_found)
|
||||||
|
end.
|
||||||
|
|
||||||
|
%% Introduced in 5.0
|
||||||
|
-spec get_trace_filename(Name :: binary()) -> {ok, string()}.
|
||||||
|
get_trace_filename(Name) ->
|
||||||
|
case mnesia:read(?TRACE, Name, read) of
|
||||||
|
[] -> mnesia:abort(not_found);
|
||||||
|
[#?TRACE{start_at = Start}] -> {ok, emqx_trace:filename(Name, Start)}
|
||||||
|
end.
|
||||||
|
|
||||||
|
%% Introduced in 5.0
|
||||||
|
delete_finished(Traces) ->
|
||||||
|
lists:map(
|
||||||
|
fun(#?TRACE{name = Name}) ->
|
||||||
|
case mnesia:read(?TRACE, Name, write) of
|
||||||
|
[] -> ok;
|
||||||
|
[Trace] -> mnesia:write(?TRACE, Trace#?TRACE{enable = false}, write)
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
Traces
|
||||||
|
).
|
||||||
|
|
||||||
|
%% Introduced in 5.0
|
||||||
|
get_enabled_trace() ->
|
||||||
|
mnesia:match_object(?TRACE, #?TRACE{enable = true, _ = '_'}, read).
|
||||||
|
|
||||||
|
%%================================================================================
|
||||||
|
%% Internal functions
|
||||||
|
%%================================================================================
|
|
@ -72,7 +72,7 @@ end_per_testcase(TestCase, Config) when
|
||||||
->
|
->
|
||||||
Slave = ?config(slave, Config),
|
Slave = ?config(slave, Config),
|
||||||
emqx_common_test_helpers:stop_slave(Slave),
|
emqx_common_test_helpers:stop_slave(Slave),
|
||||||
mria:transaction(?ROUTE_SHARD, fun() -> mnesia:clear_table(?ROUTE_TAB) end),
|
mria:clear_table(?ROUTE_TAB),
|
||||||
snabbkaffe:stop(),
|
snabbkaffe:stop(),
|
||||||
ok;
|
ok;
|
||||||
end_per_testcase(_TestCase, _Config) ->
|
end_per_testcase(_TestCase, _Config) ->
|
||||||
|
|
|
@ -21,8 +21,7 @@
|
||||||
ssl_opts_dtls_test() ->
|
ssl_opts_dtls_test() ->
|
||||||
Sc = emqx_schema:server_ssl_opts_schema(
|
Sc = emqx_schema:server_ssl_opts_schema(
|
||||||
#{
|
#{
|
||||||
versions => dtls_all_available,
|
versions => dtls_all_available
|
||||||
ciphers => dtls_all_available
|
|
||||||
},
|
},
|
||||||
false
|
false
|
||||||
),
|
),
|
||||||
|
@ -30,7 +29,7 @@ ssl_opts_dtls_test() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{
|
#{
|
||||||
versions := ['dtlsv1.2', 'dtlsv1'],
|
versions := ['dtlsv1.2', 'dtlsv1'],
|
||||||
ciphers := ["ECDHE-ECDSA-AES256-GCM-SHA384" | _]
|
ciphers := []
|
||||||
},
|
},
|
||||||
Checked
|
Checked
|
||||||
).
|
).
|
||||||
|
@ -42,7 +41,7 @@ ssl_opts_tls_1_3_test() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{
|
#{
|
||||||
versions := ['tlsv1.3'],
|
versions := ['tlsv1.3'],
|
||||||
ciphers := [_ | _]
|
ciphers := []
|
||||||
},
|
},
|
||||||
Checked
|
Checked
|
||||||
).
|
).
|
||||||
|
@ -53,7 +52,7 @@ ssl_opts_tls_for_ranch_test() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
#{
|
#{
|
||||||
versions := ['tlsv1.3'],
|
versions := ['tlsv1.3'],
|
||||||
ciphers := [_ | _],
|
ciphers := [],
|
||||||
handshake_timeout := _
|
handshake_timeout := _
|
||||||
},
|
},
|
||||||
Checked
|
Checked
|
||||||
|
@ -125,7 +124,7 @@ validate(Schema, Data0) ->
|
||||||
),
|
),
|
||||||
Checked.
|
Checked.
|
||||||
|
|
||||||
ciperhs_schema_test() ->
|
ciphers_schema_test() ->
|
||||||
Sc = emqx_schema:ciphers_schema(undefined),
|
Sc = emqx_schema:ciphers_schema(undefined),
|
||||||
WSc = #{roots => [{ciphers, Sc}]},
|
WSc = #{roots => [{ciphers, Sc}]},
|
||||||
?assertThrow(
|
?assertThrow(
|
||||||
|
@ -135,7 +134,7 @@ ciperhs_schema_test() ->
|
||||||
|
|
||||||
bad_tls_version_test() ->
|
bad_tls_version_test() ->
|
||||||
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
|
Sc = emqx_schema:server_ssl_opts_schema(#{}, false),
|
||||||
Reason = {unsupported_ssl_versions, [foo]},
|
Reason = {unsupported_tls_versions, [foo]},
|
||||||
?assertThrow(
|
?assertThrow(
|
||||||
{_Sc, [#{kind := validation_error, reason := Reason}]},
|
{_Sc, [#{kind := validation_error, reason := Reason}]},
|
||||||
validate(Sc, #{<<"versions">> => [<<"foo">>]})
|
validate(Sc, #{<<"versions">> => [<<"foo">>]})
|
||||||
|
|
|
@ -51,24 +51,34 @@ test_cipher_format(Input) ->
|
||||||
?assertEqual([?TLS_13_CIPHER, ?TLS_12_CIPHER], Ciphers).
|
?assertEqual([?TLS_13_CIPHER, ?TLS_12_CIPHER], Ciphers).
|
||||||
|
|
||||||
tls_versions_test() ->
|
tls_versions_test() ->
|
||||||
?assert(lists:member('tlsv1.3', emqx_tls_lib:default_versions())).
|
?assert(lists:member('tlsv1.3', emqx_tls_lib:available_versions(tls))).
|
||||||
|
|
||||||
tls_version_unknown_test() ->
|
tls_version_unknown_test_() ->
|
||||||
?assertEqual(
|
lists:flatmap(
|
||||||
emqx_tls_lib:default_versions(),
|
fun(Type) ->
|
||||||
emqx_tls_lib:integral_versions([])
|
[
|
||||||
),
|
?_assertEqual(
|
||||||
?assertEqual(
|
emqx_tls_lib:available_versions(Type),
|
||||||
emqx_tls_lib:default_versions(),
|
emqx_tls_lib:integral_versions(Type, [])
|
||||||
emqx_tls_lib:integral_versions(<<>>)
|
),
|
||||||
),
|
?_assertEqual(
|
||||||
?assertEqual(
|
emqx_tls_lib:available_versions(Type),
|
||||||
emqx_tls_lib:default_versions(),
|
emqx_tls_lib:integral_versions(Type, <<>>)
|
||||||
emqx_tls_lib:integral_versions("foo")
|
),
|
||||||
),
|
?_assertEqual(
|
||||||
?assertError(
|
emqx_tls_lib:available_versions(Type),
|
||||||
#{reason := no_available_tls_version},
|
%% unknown version dropped
|
||||||
emqx_tls_lib:integral_versions([foo])
|
emqx_tls_lib:integral_versions(Type, "foo")
|
||||||
|
),
|
||||||
|
fun() ->
|
||||||
|
?assertError(
|
||||||
|
#{reason := no_available_tls_version},
|
||||||
|
emqx_tls_lib:integral_versions(Type, [foo])
|
||||||
|
)
|
||||||
|
end
|
||||||
|
]
|
||||||
|
end,
|
||||||
|
[tls, dtls]
|
||||||
).
|
).
|
||||||
|
|
||||||
cipher_suites_no_duplication_test() ->
|
cipher_suites_no_duplication_test() ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_authn, [
|
{application, emqx_authn, [
|
||||||
{description, "EMQX Authentication"},
|
{description, "EMQX Authentication"},
|
||||||
{vsn, "0.1.4"},
|
{vsn, "0.1.5"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_authn_sup, emqx_authn_registry]},
|
{registered, [emqx_authn_sup, emqx_authn_registry]},
|
||||||
{applications, [kernel, stdlib, emqx_resource, emqx_connector, ehttpc, epgsql, mysql, jose]},
|
{applications, [kernel, stdlib, emqx_resource, emqx_connector, ehttpc, epgsql, mysql, jose]},
|
||||||
|
|
|
@ -52,6 +52,14 @@
|
||||||
group_match_spec/1
|
group_match_spec/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
do_destroy/1,
|
||||||
|
do_add_user/2,
|
||||||
|
do_delete_user/2,
|
||||||
|
do_update_user/3
|
||||||
|
]).
|
||||||
|
|
||||||
-define(TAB, ?MODULE).
|
-define(TAB, ?MODULE).
|
||||||
-define(AUTHN_QSCHEMA, [
|
-define(AUTHN_QSCHEMA, [
|
||||||
{<<"like_user_id">>, binary},
|
{<<"like_user_id">>, binary},
|
||||||
|
@ -170,83 +178,79 @@ authenticate(_Credential, _State) ->
|
||||||
ignore.
|
ignore.
|
||||||
|
|
||||||
destroy(#{user_group := UserGroup}) ->
|
destroy(#{user_group := UserGroup}) ->
|
||||||
|
trans(fun ?MODULE:do_destroy/1, [UserGroup]).
|
||||||
|
|
||||||
|
do_destroy(UserGroup) ->
|
||||||
MatchSpec = group_match_spec(UserGroup),
|
MatchSpec = group_match_spec(UserGroup),
|
||||||
trans(
|
ok = lists:foreach(
|
||||||
fun() ->
|
fun(UserInfo) ->
|
||||||
ok = lists:foreach(
|
mnesia:delete_object(?TAB, UserInfo, write)
|
||||||
fun(UserInfo) ->
|
end,
|
||||||
mnesia:delete_object(?TAB, UserInfo, write)
|
mnesia:select(?TAB, MatchSpec, write)
|
||||||
end,
|
|
||||||
mnesia:select(?TAB, MatchSpec, write)
|
|
||||||
)
|
|
||||||
end
|
|
||||||
).
|
).
|
||||||
|
|
||||||
add_user(
|
add_user(UserInfo, State) ->
|
||||||
|
trans(fun ?MODULE:do_add_user/2, [UserInfo, State]).
|
||||||
|
|
||||||
|
do_add_user(
|
||||||
#{
|
#{
|
||||||
user_id := UserID,
|
user_id := UserID,
|
||||||
password := Password
|
password := Password
|
||||||
} = UserInfo,
|
} = UserInfo,
|
||||||
#{user_group := UserGroup} = State
|
#{user_group := UserGroup} = State
|
||||||
) ->
|
) ->
|
||||||
trans(
|
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||||
fun() ->
|
[] ->
|
||||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
||||||
[] ->
|
add_user(UserGroup, UserID, Password, IsSuperuser, State),
|
||||||
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
||||||
add_user(UserGroup, UserID, Password, IsSuperuser, State),
|
[_] ->
|
||||||
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
{error, already_exist}
|
||||||
[_] ->
|
end.
|
||||||
{error, already_exist}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
).
|
|
||||||
|
|
||||||
delete_user(UserID, #{user_group := UserGroup}) ->
|
delete_user(UserID, State) ->
|
||||||
trans(
|
trans(fun ?MODULE:do_delete_user/2, [UserID, State]).
|
||||||
fun() ->
|
|
||||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
|
||||||
[] ->
|
|
||||||
{error, not_found};
|
|
||||||
[_] ->
|
|
||||||
mnesia:delete(?TAB, {UserGroup, UserID}, write)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
).
|
|
||||||
|
|
||||||
update_user(
|
do_delete_user(UserID, #{user_group := UserGroup}) ->
|
||||||
|
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||||
|
[] ->
|
||||||
|
{error, not_found};
|
||||||
|
[_] ->
|
||||||
|
mnesia:delete(?TAB, {UserGroup, UserID}, write)
|
||||||
|
end.
|
||||||
|
|
||||||
|
update_user(UserID, User, State) ->
|
||||||
|
trans(fun ?MODULE:do_update_user/3, [UserID, User, State]).
|
||||||
|
|
||||||
|
do_update_user(
|
||||||
UserID,
|
UserID,
|
||||||
User,
|
User,
|
||||||
#{user_group := UserGroup} = State
|
#{user_group := UserGroup} = State
|
||||||
) ->
|
) ->
|
||||||
trans(
|
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||||
fun() ->
|
[] ->
|
||||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
{error, not_found};
|
||||||
[] ->
|
[#user_info{is_superuser = IsSuperuser} = UserInfo] ->
|
||||||
{error, not_found};
|
UserInfo1 = UserInfo#user_info{
|
||||||
[#user_info{is_superuser = IsSuperuser} = UserInfo] ->
|
is_superuser = maps:get(is_superuser, User, IsSuperuser)
|
||||||
UserInfo1 = UserInfo#user_info{
|
},
|
||||||
is_superuser = maps:get(is_superuser, User, IsSuperuser)
|
UserInfo2 =
|
||||||
},
|
case maps:get(password, User, undefined) of
|
||||||
UserInfo2 =
|
undefined ->
|
||||||
case maps:get(password, User, undefined) of
|
UserInfo1;
|
||||||
undefined ->
|
Password ->
|
||||||
UserInfo1;
|
{StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(
|
||||||
Password ->
|
Password, State
|
||||||
{StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(
|
),
|
||||||
Password, State
|
UserInfo1#user_info{
|
||||||
),
|
stored_key = StoredKey,
|
||||||
UserInfo1#user_info{
|
server_key = ServerKey,
|
||||||
stored_key = StoredKey,
|
salt = Salt
|
||||||
server_key = ServerKey,
|
}
|
||||||
salt = Salt
|
end,
|
||||||
}
|
mnesia:write(?TAB, UserInfo2, write),
|
||||||
end,
|
{ok, format_user_info(UserInfo2)}
|
||||||
mnesia:write(?TAB, UserInfo2, write),
|
end.
|
||||||
{ok, format_user_info(UserInfo2)}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
).
|
|
||||||
|
|
||||||
lookup_user(UserID, #{user_group := UserGroup}) ->
|
lookup_user(UserID, #{user_group := UserGroup}) ->
|
||||||
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
||||||
|
@ -386,12 +390,10 @@ retrieve(UserID, #{user_group := UserGroup}) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% TODO: Move to emqx_authn_utils.erl
|
%% TODO: Move to emqx_authn_utils.erl
|
||||||
trans(Fun) ->
|
|
||||||
trans(Fun, []).
|
|
||||||
|
|
||||||
trans(Fun, Args) ->
|
trans(Fun, Args) ->
|
||||||
case mria:transaction(?AUTH_SHARD, Fun, Args) of
|
case mria:transaction(?AUTH_SHARD, Fun, Args) of
|
||||||
{atomic, Res} -> Res;
|
{atomic, Res} -> Res;
|
||||||
|
{aborted, {function_clause, Stack}} -> erlang:raise(error, function_clause, Stack);
|
||||||
{aborted, Reason} -> {error, Reason}
|
{aborted, Reason} -> {error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
|
@ -54,6 +54,16 @@
|
||||||
group_match_spec/1
|
group_match_spec/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
do_destroy/1,
|
||||||
|
do_add_user/2,
|
||||||
|
do_delete_user/2,
|
||||||
|
do_update_user/3,
|
||||||
|
import/2,
|
||||||
|
import_csv/3
|
||||||
|
]).
|
||||||
|
|
||||||
-type user_group() :: binary().
|
-type user_group() :: binary().
|
||||||
-type user_id() :: binary().
|
-type user_id() :: binary().
|
||||||
|
|
||||||
|
@ -175,15 +185,14 @@ authenticate(
|
||||||
end.
|
end.
|
||||||
|
|
||||||
destroy(#{user_group := UserGroup}) ->
|
destroy(#{user_group := UserGroup}) ->
|
||||||
trans(
|
trans(fun ?MODULE:do_destroy/1, [UserGroup]).
|
||||||
fun() ->
|
|
||||||
ok = lists:foreach(
|
do_destroy(UserGroup) ->
|
||||||
fun(User) ->
|
ok = lists:foreach(
|
||||||
mnesia:delete_object(?TAB, User, write)
|
fun(User) ->
|
||||||
end,
|
mnesia:delete_object(?TAB, User, write)
|
||||||
mnesia:select(?TAB, group_match_spec(UserGroup), write)
|
end,
|
||||||
)
|
mnesia:select(?TAB, group_match_spec(UserGroup), write)
|
||||||
end
|
|
||||||
).
|
).
|
||||||
|
|
||||||
import_users({Filename0, FileData}, State) ->
|
import_users({Filename0, FileData}, State) ->
|
||||||
|
@ -200,7 +209,10 @@ import_users({Filename0, FileData}, State) ->
|
||||||
{error, {unsupported_file_format, Extension}}
|
{error, {unsupported_file_format, Extension}}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
add_user(
|
add_user(UserInfo, State) ->
|
||||||
|
trans(fun ?MODULE:do_add_user/2, [UserInfo, State]).
|
||||||
|
|
||||||
|
do_add_user(
|
||||||
#{
|
#{
|
||||||
user_id := UserID,
|
user_id := UserID,
|
||||||
password := Password
|
password := Password
|
||||||
|
@ -210,33 +222,31 @@ add_user(
|
||||||
password_hash_algorithm := Algorithm
|
password_hash_algorithm := Algorithm
|
||||||
}
|
}
|
||||||
) ->
|
) ->
|
||||||
trans(
|
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||||
fun() ->
|
[] ->
|
||||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
{PasswordHash, Salt} = emqx_authn_password_hashing:hash(Algorithm, Password),
|
||||||
[] ->
|
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
||||||
{PasswordHash, Salt} = emqx_authn_password_hashing:hash(Algorithm, Password),
|
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
|
||||||
IsSuperuser = maps:get(is_superuser, UserInfo, false),
|
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
||||||
insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser),
|
[_] ->
|
||||||
{ok, #{user_id => UserID, is_superuser => IsSuperuser}};
|
{error, already_exist}
|
||||||
[_] ->
|
end.
|
||||||
{error, already_exist}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
).
|
|
||||||
|
|
||||||
delete_user(UserID, #{user_group := UserGroup}) ->
|
delete_user(UserID, State) ->
|
||||||
trans(
|
trans(fun ?MODULE:do_delete_user/2, [UserID, State]).
|
||||||
fun() ->
|
|
||||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
|
||||||
[] ->
|
|
||||||
{error, not_found};
|
|
||||||
[_] ->
|
|
||||||
mnesia:delete(?TAB, {UserGroup, UserID}, write)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
).
|
|
||||||
|
|
||||||
update_user(
|
do_delete_user(UserID, #{user_group := UserGroup}) ->
|
||||||
|
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||||
|
[] ->
|
||||||
|
{error, not_found};
|
||||||
|
[_] ->
|
||||||
|
mnesia:delete(?TAB, {UserGroup, UserID}, write)
|
||||||
|
end.
|
||||||
|
|
||||||
|
update_user(UserID, UserInfo, State) ->
|
||||||
|
trans(fun ?MODULE:do_update_user/3, [UserID, UserInfo, State]).
|
||||||
|
|
||||||
|
do_update_user(
|
||||||
UserID,
|
UserID,
|
||||||
UserInfo,
|
UserInfo,
|
||||||
#{
|
#{
|
||||||
|
@ -244,33 +254,29 @@ update_user(
|
||||||
password_hash_algorithm := Algorithm
|
password_hash_algorithm := Algorithm
|
||||||
}
|
}
|
||||||
) ->
|
) ->
|
||||||
trans(
|
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
||||||
fun() ->
|
[] ->
|
||||||
case mnesia:read(?TAB, {UserGroup, UserID}, write) of
|
{error, not_found};
|
||||||
[] ->
|
[
|
||||||
{error, not_found};
|
#user_info{
|
||||||
[
|
password_hash = PasswordHash,
|
||||||
#user_info{
|
salt = Salt,
|
||||||
password_hash = PasswordHash,
|
is_superuser = IsSuperuser
|
||||||
salt = Salt,
|
}
|
||||||
is_superuser = IsSuperuser
|
] ->
|
||||||
}
|
NSuperuser = maps:get(is_superuser, UserInfo, IsSuperuser),
|
||||||
] ->
|
{NPasswordHash, NSalt} =
|
||||||
NSuperuser = maps:get(is_superuser, UserInfo, IsSuperuser),
|
case UserInfo of
|
||||||
{NPasswordHash, NSalt} =
|
#{password := Password} ->
|
||||||
case UserInfo of
|
emqx_authn_password_hashing:hash(
|
||||||
#{password := Password} ->
|
Algorithm, Password
|
||||||
emqx_authn_password_hashing:hash(
|
);
|
||||||
Algorithm, Password
|
#{} ->
|
||||||
);
|
{PasswordHash, Salt}
|
||||||
#{} ->
|
end,
|
||||||
{PasswordHash, Salt}
|
insert_user(UserGroup, UserID, NPasswordHash, NSalt, NSuperuser),
|
||||||
end,
|
{ok, #{user_id => UserID, is_superuser => NSuperuser}}
|
||||||
insert_user(UserGroup, UserID, NPasswordHash, NSalt, NSuperuser),
|
end.
|
||||||
{ok, #{user_id => UserID, is_superuser => NSuperuser}}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
).
|
|
||||||
|
|
||||||
lookup_user(UserID, #{user_group := UserGroup}) ->
|
lookup_user(UserID, #{user_group := UserGroup}) ->
|
||||||
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of
|
||||||
|
@ -335,7 +341,7 @@ run_fuzzy_filter(
|
||||||
import_users_from_json(Bin, #{user_group := UserGroup}) ->
|
import_users_from_json(Bin, #{user_group := UserGroup}) ->
|
||||||
case emqx_json:safe_decode(Bin, [return_maps]) of
|
case emqx_json:safe_decode(Bin, [return_maps]) of
|
||||||
{ok, List} ->
|
{ok, List} ->
|
||||||
trans(fun import/2, [UserGroup, List]);
|
trans(fun ?MODULE:import/2, [UserGroup, List]);
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end.
|
end.
|
||||||
|
@ -344,7 +350,7 @@ import_users_from_json(Bin, #{user_group := UserGroup}) ->
|
||||||
import_users_from_csv(CSV, #{user_group := UserGroup}) ->
|
import_users_from_csv(CSV, #{user_group := UserGroup}) ->
|
||||||
case get_csv_header(CSV) of
|
case get_csv_header(CSV) of
|
||||||
{ok, Seq, NewCSV} ->
|
{ok, Seq, NewCSV} ->
|
||||||
trans(fun import_csv/3, [UserGroup, NewCSV, Seq]);
|
trans(fun ?MODULE:import_csv/3, [UserGroup, NewCSV, Seq]);
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end.
|
end.
|
||||||
|
@ -435,9 +441,6 @@ get_user_identity(#{clientid := ClientID}, clientid) ->
|
||||||
get_user_identity(_, Type) ->
|
get_user_identity(_, Type) ->
|
||||||
{error, {bad_user_identity_type, Type}}.
|
{error, {bad_user_identity_type, Type}}.
|
||||||
|
|
||||||
trans(Fun) ->
|
|
||||||
trans(Fun, []).
|
|
||||||
|
|
||||||
trans(Fun, Args) ->
|
trans(Fun, Args) ->
|
||||||
case mria:transaction(?AUTH_SHARD, Fun, Args) of
|
case mria:transaction(?AUTH_SHARD, Fun, Args) of
|
||||||
{atomic, Res} -> Res;
|
{atomic, Res} -> Res;
|
||||||
|
|
|
@ -31,10 +31,16 @@
|
||||||
fast_forward_to_commit/2
|
fast_forward_to_commit/2
|
||||||
]).
|
]).
|
||||||
-export([
|
-export([
|
||||||
get_node_tnx_id/1,
|
commit/2,
|
||||||
|
commit_status_trans/2,
|
||||||
get_cluster_tnx_id/0,
|
get_cluster_tnx_id/0,
|
||||||
|
get_node_tnx_id/1,
|
||||||
|
init_mfa/2,
|
||||||
latest_tnx_id/0,
|
latest_tnx_id/0,
|
||||||
make_initiate_call_req/3
|
make_initiate_call_req/3,
|
||||||
|
read_next_mfa/1,
|
||||||
|
trans_query/1,
|
||||||
|
trans_status/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -194,18 +200,18 @@ do_multicall(M, F, A, RequiredSyncs, Timeout) ->
|
||||||
|
|
||||||
-spec query(pos_integer()) -> {'atomic', map()} | {'aborted', Reason :: term()}.
|
-spec query(pos_integer()) -> {'atomic', map()} | {'aborted', Reason :: term()}.
|
||||||
query(TnxId) ->
|
query(TnxId) ->
|
||||||
transaction(fun trans_query/1, [TnxId]).
|
transaction(fun ?MODULE:trans_query/1, [TnxId]).
|
||||||
|
|
||||||
-spec reset() -> reset.
|
-spec reset() -> reset.
|
||||||
reset() -> gen_server:call(?MODULE, reset).
|
reset() -> gen_server:call(?MODULE, reset).
|
||||||
|
|
||||||
-spec status() -> {'atomic', [map()]} | {'aborted', Reason :: term()}.
|
-spec status() -> {'atomic', [map()]} | {'aborted', Reason :: term()}.
|
||||||
status() ->
|
status() ->
|
||||||
transaction(fun trans_status/0, []).
|
transaction(fun ?MODULE:trans_status/0, []).
|
||||||
|
|
||||||
-spec latest_tnx_id() -> pos_integer().
|
-spec latest_tnx_id() -> pos_integer().
|
||||||
latest_tnx_id() ->
|
latest_tnx_id() ->
|
||||||
{atomic, TnxId} = transaction(fun get_cluster_tnx_id/0, []),
|
{atomic, TnxId} = transaction(fun ?MODULE:get_cluster_tnx_id/0, []),
|
||||||
TnxId.
|
TnxId.
|
||||||
|
|
||||||
-spec make_initiate_call_req(module(), atom(), list()) -> init_call_req().
|
-spec make_initiate_call_req(module(), atom(), list()) -> init_call_req().
|
||||||
|
@ -280,7 +286,7 @@ handle_call(reset, _From, State) ->
|
||||||
_ = mria:clear_table(?CLUSTER_MFA),
|
_ = mria:clear_table(?CLUSTER_MFA),
|
||||||
{reply, ok, State, {continue, ?CATCH_UP}};
|
{reply, ok, State, {continue, ?CATCH_UP}};
|
||||||
handle_call(?INITIATE(MFA), _From, State = #{node := Node}) ->
|
handle_call(?INITIATE(MFA), _From, State = #{node := Node}) ->
|
||||||
case transaction(fun init_mfa/2, [Node, MFA]) of
|
case transaction(fun ?MODULE:init_mfa/2, [Node, MFA]) of
|
||||||
{atomic, {ok, TnxId, Result}} ->
|
{atomic, {ok, TnxId, Result}} ->
|
||||||
{reply, {ok, TnxId, Result}, State, {continue, ?CATCH_UP}};
|
{reply, {ok, TnxId, Result}, State, {continue, ?CATCH_UP}};
|
||||||
{aborted, Error} ->
|
{aborted, Error} ->
|
||||||
|
@ -288,7 +294,7 @@ handle_call(?INITIATE(MFA), _From, State = #{node := Node}) ->
|
||||||
end;
|
end;
|
||||||
handle_call(skip_failed_commit, _From, State = #{node := Node}) ->
|
handle_call(skip_failed_commit, _From, State = #{node := Node}) ->
|
||||||
Timeout = catch_up(State, true),
|
Timeout = catch_up(State, true),
|
||||||
{atomic, LatestId} = transaction(fun get_node_tnx_id/1, [Node]),
|
{atomic, LatestId} = transaction(fun ?MODULE:get_node_tnx_id/1, [Node]),
|
||||||
{reply, LatestId, State, Timeout};
|
{reply, LatestId, State, Timeout};
|
||||||
handle_call({fast_forward_to_commit, ToTnxId}, _From, State) ->
|
handle_call({fast_forward_to_commit, ToTnxId}, _From, State) ->
|
||||||
NodeId = do_fast_forward_to_commit(ToTnxId, State),
|
NodeId = do_fast_forward_to_commit(ToTnxId, State),
|
||||||
|
@ -316,14 +322,14 @@ code_change(_OldVsn, State, _Extra) ->
|
||||||
catch_up(State) -> catch_up(State, false).
|
catch_up(State) -> catch_up(State, false).
|
||||||
|
|
||||||
catch_up(#{node := Node, retry_interval := RetryMs} = State, SkipResult) ->
|
catch_up(#{node := Node, retry_interval := RetryMs} = State, SkipResult) ->
|
||||||
case transaction(fun read_next_mfa/1, [Node]) of
|
case transaction(fun ?MODULE:read_next_mfa/1, [Node]) of
|
||||||
{atomic, caught_up} ->
|
{atomic, caught_up} ->
|
||||||
?TIMEOUT;
|
?TIMEOUT;
|
||||||
{atomic, {still_lagging, NextId, MFA}} ->
|
{atomic, {still_lagging, NextId, MFA}} ->
|
||||||
{Succeed, _} = apply_mfa(NextId, MFA, ?APPLY_KIND_REPLICATE),
|
{Succeed, _} = apply_mfa(NextId, MFA, ?APPLY_KIND_REPLICATE),
|
||||||
case Succeed orelse SkipResult of
|
case Succeed orelse SkipResult of
|
||||||
true ->
|
true ->
|
||||||
case transaction(fun commit/2, [Node, NextId]) of
|
case transaction(fun ?MODULE:commit/2, [Node, NextId]) of
|
||||||
{atomic, ok} ->
|
{atomic, ok} ->
|
||||||
catch_up(State, false);
|
catch_up(State, false);
|
||||||
Error ->
|
Error ->
|
||||||
|
@ -367,12 +373,12 @@ commit(Node, TnxId) ->
|
||||||
ok = mnesia:write(?CLUSTER_COMMIT, #cluster_rpc_commit{node = Node, tnx_id = TnxId}, write).
|
ok = mnesia:write(?CLUSTER_COMMIT, #cluster_rpc_commit{node = Node, tnx_id = TnxId}, write).
|
||||||
|
|
||||||
do_fast_forward_to_commit(ToTnxId, State = #{node := Node}) ->
|
do_fast_forward_to_commit(ToTnxId, State = #{node := Node}) ->
|
||||||
{atomic, NodeId} = transaction(fun get_node_tnx_id/1, [Node]),
|
{atomic, NodeId} = transaction(fun ?MODULE:get_node_tnx_id/1, [Node]),
|
||||||
case NodeId >= ToTnxId of
|
case NodeId >= ToTnxId of
|
||||||
true ->
|
true ->
|
||||||
NodeId;
|
NodeId;
|
||||||
false ->
|
false ->
|
||||||
{atomic, LatestId} = transaction(fun get_cluster_tnx_id/0, []),
|
{atomic, LatestId} = transaction(fun ?MODULE:get_cluster_tnx_id/0, []),
|
||||||
case LatestId =< NodeId of
|
case LatestId =< NodeId of
|
||||||
true ->
|
true ->
|
||||||
NodeId;
|
NodeId;
|
||||||
|
@ -529,11 +535,11 @@ wait_for_nodes_commit(RequiredSyncs, TnxId, Delay, Remain) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
lagging_node(TnxId) ->
|
lagging_node(TnxId) ->
|
||||||
{atomic, Nodes} = transaction(fun commit_status_trans/2, ['<', TnxId]),
|
{atomic, Nodes} = transaction(fun ?MODULE:commit_status_trans/2, ['<', TnxId]),
|
||||||
Nodes.
|
Nodes.
|
||||||
|
|
||||||
synced_nodes(TnxId) ->
|
synced_nodes(TnxId) ->
|
||||||
{atomic, Nodes} = transaction(fun commit_status_trans/2, ['>=', TnxId]),
|
{atomic, Nodes} = transaction(fun ?MODULE:commit_status_trans/2, ['>=', TnxId]),
|
||||||
Nodes.
|
Nodes.
|
||||||
|
|
||||||
commit_status_trans(Operator, TnxId) ->
|
commit_status_trans(Operator, TnxId) ->
|
||||||
|
@ -547,5 +553,5 @@ get_retry_ms() ->
|
||||||
|
|
||||||
maybe_init_tnx_id(_Node, TnxId) when TnxId < 0 -> ok;
|
maybe_init_tnx_id(_Node, TnxId) when TnxId < 0 -> ok;
|
||||||
maybe_init_tnx_id(Node, TnxId) ->
|
maybe_init_tnx_id(Node, TnxId) ->
|
||||||
{atomic, _} = transaction(fun commit/2, [Node, TnxId]),
|
{atomic, _} = transaction(fun ?MODULE:commit/2, [Node, TnxId]),
|
||||||
ok.
|
ok.
|
||||||
|
|
|
@ -30,6 +30,11 @@
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
del_stale_mfa/1
|
||||||
|
]).
|
||||||
|
|
||||||
start_link() ->
|
start_link() ->
|
||||||
MaxHistory = emqx_conf:get(["node", "cluster_call", "max_history"], 100),
|
MaxHistory = emqx_conf:get(["node", "cluster_call", "max_history"], 100),
|
||||||
CleanupMs = emqx_conf:get(["node", "cluster_call", "cleanup_interval"], 5 * 60 * 1000),
|
CleanupMs = emqx_conf:get(["node", "cluster_call", "cleanup_interval"], 5 * 60 * 1000),
|
||||||
|
@ -56,7 +61,7 @@ handle_cast(Msg, State) ->
|
||||||
{noreply, State}.
|
{noreply, State}.
|
||||||
|
|
||||||
handle_info({timeout, TRef, del_stale_mfa}, State = #{timer := TRef, max_history := MaxHistory}) ->
|
handle_info({timeout, TRef, del_stale_mfa}, State = #{timer := TRef, max_history := MaxHistory}) ->
|
||||||
case mria:transaction(?CLUSTER_RPC_SHARD, fun del_stale_mfa/1, [MaxHistory]) of
|
case mria:transaction(?CLUSTER_RPC_SHARD, fun ?MODULE:del_stale_mfa/1, [MaxHistory]) of
|
||||||
{atomic, ok} -> ok;
|
{atomic, ok} -> ok;
|
||||||
Error -> ?SLOG(error, #{msg => "del_stale_cluster_rpc_mfa_error", error => Error})
|
Error -> ?SLOG(error, #{msg => "del_stale_cluster_rpc_mfa_error", error => Error})
|
||||||
end,
|
end,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_connector, [
|
{application, emqx_connector, [
|
||||||
{description, "An OTP application"},
|
{description, "An OTP application"},
|
||||||
{vsn, "0.1.3"},
|
{vsn, "0.1.4"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_connector_app, []}},
|
{mod, {emqx_connector_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -100,7 +100,7 @@ on_start(
|
||||||
{host, Host},
|
{host, Host},
|
||||||
{port, Port},
|
{port, Port},
|
||||||
{username, User},
|
{username, User},
|
||||||
{password, Password},
|
{password, emqx_secret:wrap(Password)},
|
||||||
{database, DB},
|
{database, DB},
|
||||||
{auto_reconnect, reconn_interval(AutoReconn)},
|
{auto_reconnect, reconn_interval(AutoReconn)},
|
||||||
{pool_size, PoolSize},
|
{pool_size, PoolSize},
|
||||||
|
@ -160,7 +160,7 @@ reconn_interval(false) -> false.
|
||||||
connect(Opts) ->
|
connect(Opts) ->
|
||||||
Host = proplists:get_value(host, Opts),
|
Host = proplists:get_value(host, Opts),
|
||||||
Username = proplists:get_value(username, Opts),
|
Username = proplists:get_value(username, Opts),
|
||||||
Password = proplists:get_value(password, Opts),
|
Password = emqx_secret:unwrap(proplists:get_value(password, Opts)),
|
||||||
PrepareStatement = proplists:get_value(prepare_statement, Opts),
|
PrepareStatement = proplists:get_value(prepare_statement, Opts),
|
||||||
case epgsql:connect(Host, Username, Password, conn_opts(Opts)) of
|
case epgsql:connect(Host, Username, Password, conn_opts(Opts)) of
|
||||||
{ok, Conn} ->
|
{ok, Conn} ->
|
||||||
|
|
|
@ -197,7 +197,7 @@ its own from which a browser should permit loading resources."""
|
||||||
zh: "多语言支持"
|
zh: "多语言支持"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
bootstrap_user {
|
bootstrap_users_file {
|
||||||
desc {
|
desc {
|
||||||
en: "Initialize users file."
|
en: "Initialize users file."
|
||||||
zh: "初始化用户文件"
|
zh: "初始化用户文件"
|
||||||
|
|
|
@ -52,7 +52,7 @@
|
||||||
-export([
|
-export([
|
||||||
add_default_user/0,
|
add_default_user/0,
|
||||||
default_username/0,
|
default_username/0,
|
||||||
add_bootstrap_user/0
|
add_bootstrap_users/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-type emqx_admin() :: #?ADMIN{}.
|
-type emqx_admin() :: #?ADMIN{}.
|
||||||
|
@ -85,16 +85,16 @@ mnesia(boot) ->
|
||||||
add_default_user() ->
|
add_default_user() ->
|
||||||
add_default_user(binenv(default_username), binenv(default_password)).
|
add_default_user(binenv(default_username), binenv(default_password)).
|
||||||
|
|
||||||
-spec add_bootstrap_user() -> ok | {error, _}.
|
-spec add_bootstrap_users() -> ok | {error, _}.
|
||||||
add_bootstrap_user() ->
|
add_bootstrap_users() ->
|
||||||
case emqx:get_config([dashboard, bootstrap_user], undefined) of
|
case emqx:get_config([dashboard, bootstrap_users_file], undefined) of
|
||||||
undefined ->
|
undefined ->
|
||||||
ok;
|
ok;
|
||||||
File ->
|
File ->
|
||||||
case mnesia:table_info(?ADMIN, size) of
|
case mnesia:table_info(?ADMIN, size) of
|
||||||
0 ->
|
0 ->
|
||||||
?SLOG(debug, #{msg => "Add dashboard bootstrap users", file => File}),
|
?SLOG(debug, #{msg => "Add dashboard bootstrap users", file => File}),
|
||||||
add_bootstrap_user(File);
|
add_bootstrap_users(File);
|
||||||
_ ->
|
_ ->
|
||||||
ok
|
ok
|
||||||
end
|
end
|
||||||
|
@ -312,7 +312,7 @@ add_default_user(Username, Password) ->
|
||||||
_ -> {ok, default_user_exists}
|
_ -> {ok, default_user_exists}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
add_bootstrap_user(File) ->
|
add_bootstrap_users(File) ->
|
||||||
case file:open(File, [read]) of
|
case file:open(File, [read]) of
|
||||||
{ok, Dev} ->
|
{ok, Dev} ->
|
||||||
{ok, MP} = re:compile(<<"(\.+):(\.+$)">>, [ungreedy]),
|
{ok, MP} = re:compile(<<"(\.+):(\.+$)">>, [ungreedy]),
|
||||||
|
@ -324,7 +324,12 @@ add_bootstrap_user(File) ->
|
||||||
after
|
after
|
||||||
file:close(Dev)
|
file:close(Dev)
|
||||||
end;
|
end;
|
||||||
Error ->
|
{error, Reason} = Error ->
|
||||||
|
?SLOG(error, #{
|
||||||
|
msg => "failed to open the dashboard bootstrap users file",
|
||||||
|
file => File,
|
||||||
|
reason => Reason
|
||||||
|
}),
|
||||||
Error
|
Error
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ start(_StartType, _StartArgs) ->
|
||||||
case emqx_dashboard:start_listeners() of
|
case emqx_dashboard:start_listeners() of
|
||||||
ok ->
|
ok ->
|
||||||
emqx_dashboard_cli:load(),
|
emqx_dashboard_cli:load(),
|
||||||
case emqx_dashboard_admin:add_bootstrap_user() of
|
case emqx_dashboard_admin:add_bootstrap_users() of
|
||||||
ok ->
|
ok ->
|
||||||
{ok, _} = emqx_dashboard_admin:add_default_user(),
|
{ok, _} = emqx_dashboard_admin:add_default_user(),
|
||||||
{ok, Sup};
|
{ok, Sup};
|
||||||
|
|
|
@ -55,7 +55,8 @@ fields("dashboard") ->
|
||||||
)},
|
)},
|
||||||
{cors, fun cors/1},
|
{cors, fun cors/1},
|
||||||
{i18n_lang, fun i18n_lang/1},
|
{i18n_lang, fun i18n_lang/1},
|
||||||
{bootstrap_user, ?HOCON(binary(), #{desc => ?DESC(bootstrap_user), required => false})}
|
{bootstrap_users_file,
|
||||||
|
?HOCON(binary(), #{desc => ?DESC(bootstrap_users_file), required => false})}
|
||||||
];
|
];
|
||||||
fields("listeners") ->
|
fields("listeners") ->
|
||||||
[
|
[
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_exhook, [
|
{application, emqx_exhook, [
|
||||||
{description, "EMQX Extension for Hook"},
|
{description, "EMQX Extension for Hook"},
|
||||||
{vsn, "5.0.2"},
|
{vsn, "5.0.3"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_exhook_app, []}},
|
{mod, {emqx_exhook_app, []}},
|
||||||
|
|
|
@ -231,7 +231,7 @@ resolve_hookspec(HookSpecs) when is_list(HookSpecs) ->
|
||||||
end,
|
end,
|
||||||
case {lists:member(Name, AvailableHooks), lists:member(Name, MessageHooks)} of
|
case {lists:member(Name, AvailableHooks), lists:member(Name, MessageHooks)} of
|
||||||
{false, _} ->
|
{false, _} ->
|
||||||
error({unknown_hookpoint, Name});
|
error({unknown_hookpoint, Name0});
|
||||||
{true, false} ->
|
{true, false} ->
|
||||||
Acc#{Name => #{}};
|
Acc#{Name => #{}};
|
||||||
{true, true} ->
|
{true, true} ->
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
|
|
||||||
-export([request/2]).
|
-export([request/2]).
|
||||||
|
|
||||||
-define(PREFIX, "/gateway/coap/clients/:clientid").
|
-define(PREFIX, "/gateways/coap/clients/:clientid").
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, enum/1]).
|
-import(hoconsc, [mk/2, enum/1]).
|
||||||
-import(emqx_dashboard_swagger, [error_codes/2]).
|
-import(emqx_dashboard_swagger, [error_codes/2]).
|
||||||
|
@ -42,13 +42,13 @@ api_spec() ->
|
||||||
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true, translate_body => true}).
|
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true, translate_body => true}).
|
||||||
|
|
||||||
paths() ->
|
paths() ->
|
||||||
[?PREFIX ++ "/request"].
|
emqx_gateway_utils:make_deprecated_paths([?PREFIX ++ "/request"]).
|
||||||
|
|
||||||
schema(?PREFIX ++ "/request") ->
|
schema(?PREFIX ++ "/request") ->
|
||||||
#{
|
#{
|
||||||
operationId => request,
|
operationId => request,
|
||||||
post => #{
|
post => #{
|
||||||
tags => [<<"CoAP gateway">>],
|
tags => [<<"CoAP">>],
|
||||||
desc => ?DESC(send_coap_request),
|
desc => ?DESC(send_coap_request),
|
||||||
parameters => request_parameters(),
|
parameters => request_parameters(),
|
||||||
requestBody => request_body(),
|
requestBody => request_body(),
|
||||||
|
@ -60,7 +60,9 @@ schema(?PREFIX ++ "/request") ->
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}.
|
};
|
||||||
|
schema(Path) ->
|
||||||
|
emqx_gateway_utils:make_compatible_schema(Path, fun schema/1).
|
||||||
|
|
||||||
request(post, #{body := Body, bindings := Bindings}) ->
|
request(post, #{body := Body, bindings := Bindings}) ->
|
||||||
ClientId = maps:get(clientid, Bindings, undefined),
|
ClientId = maps:get(clientid, Bindings, undefined),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_gateway, [
|
{application, emqx_gateway, [
|
||||||
{description, "The Gateway management application"},
|
{description, "The Gateway management application"},
|
||||||
{vsn, "0.1.3"},
|
{vsn, "0.1.4"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_gateway_app, []}},
|
{mod, {emqx_gateway_app, []}},
|
||||||
{applications, [kernel, stdlib, grpc, emqx, emqx_authn]},
|
{applications, [kernel, stdlib, grpc, emqx, emqx_authn]},
|
||||||
|
|
|
@ -61,10 +61,10 @@ api_spec() ->
|
||||||
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
|
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
|
||||||
|
|
||||||
paths() ->
|
paths() ->
|
||||||
[
|
emqx_gateway_utils:make_deprecated_paths([
|
||||||
"/gateway",
|
"/gateways",
|
||||||
"/gateway/:name"
|
"/gateways/:name"
|
||||||
].
|
]).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% http handlers
|
%% http handlers
|
||||||
|
@ -159,7 +159,7 @@ gateway_insta(put, #{
|
||||||
%% Swagger defines
|
%% Swagger defines
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
schema("/gateway") ->
|
schema("/gateways") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => gateway,
|
'operationId' => gateway,
|
||||||
get =>
|
get =>
|
||||||
|
@ -185,7 +185,7 @@ schema("/gateway") ->
|
||||||
?STANDARD_RESP(#{201 => schema_gateways_conf()})
|
?STANDARD_RESP(#{201 => schema_gateways_conf()})
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name") ->
|
schema("/gateways/:name") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => gateway_insta,
|
'operationId' => gateway_insta,
|
||||||
get =>
|
get =>
|
||||||
|
@ -210,7 +210,9 @@ schema("/gateway/:name") ->
|
||||||
responses =>
|
responses =>
|
||||||
?STANDARD_RESP(#{200 => schema_gateways_conf()})
|
?STANDARD_RESP(#{200 => schema_gateways_conf()})
|
||||||
}
|
}
|
||||||
}.
|
};
|
||||||
|
schema(Path) ->
|
||||||
|
emqx_gateway_utils:make_compatible_schema(Path, fun schema/1).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% params defines
|
%% params defines
|
||||||
|
|
|
@ -60,11 +60,11 @@ api_spec() ->
|
||||||
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
|
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
|
||||||
|
|
||||||
paths() ->
|
paths() ->
|
||||||
[
|
emqx_gateway_utils:make_deprecated_paths([
|
||||||
"/gateway/:name/authentication",
|
"/gateways/:name/authentication",
|
||||||
"/gateway/:name/authentication/users",
|
"/gateways/:name/authentication/users",
|
||||||
"/gateway/:name/authentication/users/:uid"
|
"/gateways/:name/authentication/users/:uid"
|
||||||
].
|
]).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% http handlers
|
%% http handlers
|
||||||
|
@ -176,7 +176,7 @@ parse_qstring(Qs) ->
|
||||||
%% Swagger defines
|
%% Swagger defines
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
schema("/gateway/:name/authentication") ->
|
schema("/gateways/:name/authentication") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => authn,
|
'operationId' => authn,
|
||||||
get =>
|
get =>
|
||||||
|
@ -215,7 +215,7 @@ schema("/gateway/:name/authentication") ->
|
||||||
?STANDARD_RESP(#{204 => <<"Deleted">>})
|
?STANDARD_RESP(#{204 => <<"Deleted">>})
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name/authentication/users") ->
|
schema("/gateways/:name/authentication/users") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => users,
|
'operationId' => users,
|
||||||
get =>
|
get =>
|
||||||
|
@ -253,7 +253,7 @@ schema("/gateway/:name/authentication/users") ->
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name/authentication/users/:uid") ->
|
schema("/gateways/:name/authentication/users/:uid") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => users_insta,
|
'operationId' => users_insta,
|
||||||
get =>
|
get =>
|
||||||
|
@ -298,8 +298,9 @@ schema("/gateway/:name/authentication/users/:uid") ->
|
||||||
responses =>
|
responses =>
|
||||||
?STANDARD_RESP(#{204 => <<"User Deleted">>})
|
?STANDARD_RESP(#{204 => <<"User Deleted">>})
|
||||||
}
|
}
|
||||||
}.
|
};
|
||||||
|
schema(Path) ->
|
||||||
|
emqx_gateway_utils:make_compatible_schema(Path, fun schema/1).
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% params defines
|
%% params defines
|
||||||
|
|
||||||
|
|
|
@ -53,10 +53,10 @@ api_spec() ->
|
||||||
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => false}).
|
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => false}).
|
||||||
|
|
||||||
paths() ->
|
paths() ->
|
||||||
[
|
emqx_gateway_utils:make_deprecated_paths([
|
||||||
"/gateway/:name/authentication/import_users",
|
"/gateways/:name/authentication/import_users",
|
||||||
"/gateway/:name/listeners/:id/authentication/import_users"
|
"/gateways/:name/listeners/:id/authentication/import_users"
|
||||||
].
|
]).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% http handlers
|
%% http handlers
|
||||||
|
@ -117,7 +117,7 @@ import_listener_users(post, #{
|
||||||
%% Swagger defines
|
%% Swagger defines
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
schema("/gateway/:name/authentication/import_users") ->
|
schema("/gateways/:name/authentication/import_users") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => import_users,
|
'operationId' => import_users,
|
||||||
post =>
|
post =>
|
||||||
|
@ -129,7 +129,7 @@ schema("/gateway/:name/authentication/import_users") ->
|
||||||
?STANDARD_RESP(#{204 => <<"Imported">>})
|
?STANDARD_RESP(#{204 => <<"Imported">>})
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name/listeners/:id/authentication/import_users") ->
|
schema("/gateways/:name/listeners/:id/authentication/import_users") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => import_listener_users,
|
'operationId' => import_listener_users,
|
||||||
post =>
|
post =>
|
||||||
|
@ -141,8 +141,9 @@ schema("/gateway/:name/listeners/:id/authentication/import_users") ->
|
||||||
responses =>
|
responses =>
|
||||||
?STANDARD_RESP(#{204 => <<"Imported">>})
|
?STANDARD_RESP(#{204 => <<"Imported">>})
|
||||||
}
|
}
|
||||||
}.
|
};
|
||||||
|
schema(Path) ->
|
||||||
|
emqx_gateway_utils:make_compatible_schema(Path, fun schema/1).
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% params defines
|
%% params defines
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
|
@ -67,12 +67,12 @@ api_spec() ->
|
||||||
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true, translate_body => true}).
|
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true, translate_body => true}).
|
||||||
|
|
||||||
paths() ->
|
paths() ->
|
||||||
[
|
emqx_gateway_utils:make_deprecated_paths([
|
||||||
"/gateway/:name/clients",
|
"/gateways/:name/clients",
|
||||||
"/gateway/:name/clients/:clientid",
|
"/gateways/:name/clients/:clientid",
|
||||||
"/gateway/:name/clients/:clientid/subscriptions",
|
"/gateways/:name/clients/:clientid/subscriptions",
|
||||||
"/gateway/:name/clients/:clientid/subscriptions/:topic"
|
"/gateways/:name/clients/:clientid/subscriptions/:topic"
|
||||||
].
|
]).
|
||||||
|
|
||||||
-define(CLIENT_QSCHEMA, [
|
-define(CLIENT_QSCHEMA, [
|
||||||
{<<"node">>, atom},
|
{<<"node">>, atom},
|
||||||
|
@ -462,7 +462,7 @@ conn_state_to_connected(_) -> false.
|
||||||
%% Swagger defines
|
%% Swagger defines
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
schema("/gateway/:name/clients") ->
|
schema("/gateways/:name/clients") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => clients,
|
'operationId' => clients,
|
||||||
get =>
|
get =>
|
||||||
|
@ -473,7 +473,7 @@ schema("/gateway/:name/clients") ->
|
||||||
?STANDARD_RESP(#{200 => schema_client_list()})
|
?STANDARD_RESP(#{200 => schema_client_list()})
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name/clients/:clientid") ->
|
schema("/gateways/:name/clients/:clientid") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => clients_insta,
|
'operationId' => clients_insta,
|
||||||
get =>
|
get =>
|
||||||
|
@ -491,7 +491,7 @@ schema("/gateway/:name/clients/:clientid") ->
|
||||||
?STANDARD_RESP(#{204 => <<"Kicked">>})
|
?STANDARD_RESP(#{204 => <<"Kicked">>})
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name/clients/:clientid/subscriptions") ->
|
schema("/gateways/:name/clients/:clientid/subscriptions") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => subscriptions,
|
'operationId' => subscriptions,
|
||||||
get =>
|
get =>
|
||||||
|
@ -527,7 +527,7 @@ schema("/gateway/:name/clients/:clientid/subscriptions") ->
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name/clients/:clientid/subscriptions/:topic") ->
|
schema("/gateways/:name/clients/:clientid/subscriptions/:topic") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => subscriptions,
|
'operationId' => subscriptions,
|
||||||
delete =>
|
delete =>
|
||||||
|
@ -537,7 +537,9 @@ schema("/gateway/:name/clients/:clientid/subscriptions/:topic") ->
|
||||||
responses =>
|
responses =>
|
||||||
?STANDARD_RESP(#{204 => <<"Unsubscribed">>})
|
?STANDARD_RESP(#{204 => <<"Unsubscribed">>})
|
||||||
}
|
}
|
||||||
}.
|
};
|
||||||
|
schema(Path) ->
|
||||||
|
emqx_gateway_utils:make_compatible_schema(Path, fun schema/1).
|
||||||
|
|
||||||
params_client_query() ->
|
params_client_query() ->
|
||||||
params_gateway_name_in_path() ++
|
params_gateway_name_in_path() ++
|
||||||
|
|
|
@ -68,13 +68,13 @@ api_spec() ->
|
||||||
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
|
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
|
||||||
|
|
||||||
paths() ->
|
paths() ->
|
||||||
[
|
emqx_gateway_utils:make_deprecated_paths([
|
||||||
"/gateway/:name/listeners",
|
"/gateways/:name/listeners",
|
||||||
"/gateway/:name/listeners/:id",
|
"/gateways/:name/listeners/:id",
|
||||||
"/gateway/:name/listeners/:id/authentication",
|
"/gateways/:name/listeners/:id/authentication",
|
||||||
"/gateway/:name/listeners/:id/authentication/users",
|
"/gateways/:name/listeners/:id/authentication/users",
|
||||||
"/gateway/:name/listeners/:id/authentication/users/:uid"
|
"/gateways/:name/listeners/:id/authentication/users/:uid"
|
||||||
].
|
]).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% http handlers
|
%% http handlers
|
||||||
|
@ -353,7 +353,7 @@ bind2str(Listener = #{<<"bind">> := Bind}) ->
|
||||||
%% Swagger defines
|
%% Swagger defines
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
schema("/gateway/:name/listeners") ->
|
schema("/gateways/:name/listeners") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => listeners,
|
'operationId' => listeners,
|
||||||
get =>
|
get =>
|
||||||
|
@ -391,7 +391,7 @@ schema("/gateway/:name/listeners") ->
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name/listeners/:id") ->
|
schema("/gateways/:name/listeners/:id") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => listeners_insta,
|
'operationId' => listeners_insta,
|
||||||
get =>
|
get =>
|
||||||
|
@ -437,7 +437,7 @@ schema("/gateway/:name/listeners/:id") ->
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name/listeners/:id/authentication") ->
|
schema("/gateways/:name/listeners/:id/authentication") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => listeners_insta_authn,
|
'operationId' => listeners_insta_authn,
|
||||||
get =>
|
get =>
|
||||||
|
@ -480,7 +480,7 @@ schema("/gateway/:name/listeners/:id/authentication") ->
|
||||||
?STANDARD_RESP(#{200 => <<"Deleted">>})
|
?STANDARD_RESP(#{200 => <<"Deleted">>})
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name/listeners/:id/authentication/users") ->
|
schema("/gateways/:name/listeners/:id/authentication/users") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => users,
|
'operationId' => users,
|
||||||
get =>
|
get =>
|
||||||
|
@ -519,7 +519,7 @@ schema("/gateway/:name/listeners/:id/authentication/users") ->
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
schema("/gateway/:name/listeners/:id/authentication/users/:uid") ->
|
schema("/gateways/:name/listeners/:id/authentication/users/:uid") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => users_insta,
|
'operationId' => users_insta,
|
||||||
get =>
|
get =>
|
||||||
|
@ -567,8 +567,9 @@ schema("/gateway/:name/listeners/:id/authentication/users/:uid") ->
|
||||||
responses =>
|
responses =>
|
||||||
?STANDARD_RESP(#{204 => <<"Deleted">>})
|
?STANDARD_RESP(#{204 => <<"Deleted">>})
|
||||||
}
|
}
|
||||||
}.
|
};
|
||||||
|
schema(Path) ->
|
||||||
|
emqx_gateway_utils:make_compatible_schema(Path, fun schema/1).
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% params defines
|
%% params defines
|
||||||
|
|
||||||
|
|
|
@ -42,6 +42,11 @@
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
do_cleanup_channels/2
|
||||||
|
]).
|
||||||
|
|
||||||
-define(CM_SHARD, emqx_gateway_cm_shard).
|
-define(CM_SHARD, emqx_gateway_cm_shard).
|
||||||
-define(LOCK, {?MODULE, cleanup_down}).
|
-define(LOCK, {?MODULE, cleanup_down}).
|
||||||
|
|
||||||
|
@ -148,7 +153,7 @@ cleanup_channels(Node, Name) ->
|
||||||
global:trans(
|
global:trans(
|
||||||
{?LOCK, self()},
|
{?LOCK, self()},
|
||||||
fun() ->
|
fun() ->
|
||||||
mria:transaction(?CM_SHARD, fun do_cleanup_channels/2, [Node, Tab])
|
mria:transaction(?CM_SHARD, fun ?MODULE:do_cleanup_channels/2, [Node, Tab])
|
||||||
end
|
end
|
||||||
).
|
).
|
||||||
|
|
||||||
|
|
|
@ -365,8 +365,7 @@ fields(ssl_server_opts) ->
|
||||||
#{
|
#{
|
||||||
depth => 10,
|
depth => 10,
|
||||||
reuse_sessions => true,
|
reuse_sessions => true,
|
||||||
versions => tls_all_available,
|
versions => tls_all_available
|
||||||
ciphers => tls_all_available
|
|
||||||
},
|
},
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
|
@ -502,8 +501,7 @@ fields(dtls_opts) ->
|
||||||
#{
|
#{
|
||||||
depth => 10,
|
depth => 10,
|
||||||
reuse_sessions => true,
|
reuse_sessions => true,
|
||||||
versions => dtls_all_available,
|
versions => dtls_all_available
|
||||||
ciphers => dtls_all_available
|
|
||||||
},
|
},
|
||||||
false
|
false
|
||||||
).
|
).
|
||||||
|
|
|
@ -44,7 +44,9 @@
|
||||||
parse_listener_id/1,
|
parse_listener_id/1,
|
||||||
is_running/2,
|
is_running/2,
|
||||||
global_chain/1,
|
global_chain/1,
|
||||||
listener_chain/3
|
listener_chain/3,
|
||||||
|
make_deprecated_paths/1,
|
||||||
|
make_compatible_schema/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([stringfy/1]).
|
-export([stringfy/1]).
|
||||||
|
@ -455,14 +457,12 @@ esockd_access_rules(StrRules) ->
|
||||||
[Access(R) || R <- StrRules].
|
[Access(R) || R <- StrRules].
|
||||||
|
|
||||||
ssl_opts(Name, Opts) ->
|
ssl_opts(Name, Opts) ->
|
||||||
maps:to_list(
|
Type =
|
||||||
emqx_tls_lib:drop_tls13_for_old_otp(
|
case Name of
|
||||||
maps:without(
|
ssl -> tls;
|
||||||
[enable],
|
dtls -> dtls
|
||||||
maps:get(Name, Opts, #{})
|
end,
|
||||||
)
|
emqx_tls_lib:to_server_opts(Type, maps:get(Name, Opts, #{})).
|
||||||
)
|
|
||||||
).
|
|
||||||
|
|
||||||
sock_opts(Name, Opts) ->
|
sock_opts(Name, Opts) ->
|
||||||
maps:to_list(
|
maps:to_list(
|
||||||
|
@ -540,3 +540,36 @@ default_subopts() ->
|
||||||
qos => 0,
|
qos => 0,
|
||||||
is_new => true
|
is_new => true
|
||||||
}.
|
}.
|
||||||
|
|
||||||
|
%% Since 5.0.8, the API path of the gateway has been changed from "gateway" to "gateways"
|
||||||
|
%% and we need to be compatible with the old path
|
||||||
|
get_compatible_path("/gateway") ->
|
||||||
|
"/gateways";
|
||||||
|
get_compatible_path("/gateway/" ++ Rest) ->
|
||||||
|
"/gateways/" ++ Rest.
|
||||||
|
|
||||||
|
get_deprecated_path("/gateways") ->
|
||||||
|
"/gateway";
|
||||||
|
get_deprecated_path("/gateways/" ++ Rest) ->
|
||||||
|
"/gateway/" ++ Rest.
|
||||||
|
|
||||||
|
make_deprecated_paths(Paths) ->
|
||||||
|
Paths ++ [get_deprecated_path(Path) || Path <- Paths].
|
||||||
|
|
||||||
|
make_compatible_schema(Path, SchemaFun) ->
|
||||||
|
OldPath = get_compatible_path(Path),
|
||||||
|
make_compatible_schema2(OldPath, SchemaFun).
|
||||||
|
|
||||||
|
make_compatible_schema2(Path, SchemaFun) ->
|
||||||
|
Schema = SchemaFun(Path),
|
||||||
|
maps:map(
|
||||||
|
fun(Key, Value) ->
|
||||||
|
case lists:member(Key, [get, delete, put, post]) of
|
||||||
|
true ->
|
||||||
|
Value#{deprecated => true};
|
||||||
|
_ ->
|
||||||
|
Value
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
Schema
|
||||||
|
).
|
||||||
|
|
|
@ -25,7 +25,7 @@
|
||||||
|
|
||||||
-export([lookup/2, observe/2, read/2, write/2]).
|
-export([lookup/2, observe/2, read/2, write/2]).
|
||||||
|
|
||||||
-define(PATH(Suffix), "/gateway/lwm2m/clients/:clientid" Suffix).
|
-define(PATH(Suffix), "/gateways/lwm2m/clients/:clientid" Suffix).
|
||||||
-define(DATA_TYPE, ['Integer', 'Float', 'Time', 'String', 'Boolean', 'Opaque', 'Objlnk']).
|
-define(DATA_TYPE, ['Integer', 'Float', 'Time', 'String', 'Boolean', 'Opaque', 'Objlnk']).
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, ref/1, ref/2]).
|
-import(hoconsc, [mk/2, ref/1, ref/2]).
|
||||||
|
@ -37,13 +37,15 @@ api_spec() ->
|
||||||
emqx_dashboard_swagger:spec(?MODULE).
|
emqx_dashboard_swagger:spec(?MODULE).
|
||||||
|
|
||||||
paths() ->
|
paths() ->
|
||||||
[?PATH("/lookup"), ?PATH("/observe"), ?PATH("/read"), ?PATH("/write")].
|
emqx_gateway_utils:make_deprecated_paths([
|
||||||
|
?PATH("/lookup"), ?PATH("/observe"), ?PATH("/read"), ?PATH("/write")
|
||||||
|
]).
|
||||||
|
|
||||||
schema(?PATH("/lookup")) ->
|
schema(?PATH("/lookup")) ->
|
||||||
#{
|
#{
|
||||||
'operationId' => lookup,
|
'operationId' => lookup,
|
||||||
get => #{
|
get => #{
|
||||||
tags => [<<"lwm2m">>],
|
tags => [<<"LwM2M">>],
|
||||||
desc => ?DESC(lookup_resource),
|
desc => ?DESC(lookup_resource),
|
||||||
parameters => [
|
parameters => [
|
||||||
{clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})},
|
{clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})},
|
||||||
|
@ -67,7 +69,7 @@ schema(?PATH("/observe")) ->
|
||||||
#{
|
#{
|
||||||
'operationId' => observe,
|
'operationId' => observe,
|
||||||
post => #{
|
post => #{
|
||||||
tags => [<<"lwm2m">>],
|
tags => [<<"LwM2M">>],
|
||||||
desc => ?DESC(observe_resource),
|
desc => ?DESC(observe_resource),
|
||||||
parameters => [
|
parameters => [
|
||||||
{clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})},
|
{clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})},
|
||||||
|
@ -85,7 +87,7 @@ schema(?PATH("/read")) ->
|
||||||
#{
|
#{
|
||||||
'operationId' => read,
|
'operationId' => read,
|
||||||
post => #{
|
post => #{
|
||||||
tags => [<<"lwm2m">>],
|
tags => [<<"LwM2M">>],
|
||||||
desc => ?DESC(read_resource),
|
desc => ?DESC(read_resource),
|
||||||
parameters => [
|
parameters => [
|
||||||
{clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})},
|
{clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})},
|
||||||
|
@ -102,7 +104,7 @@ schema(?PATH("/write")) ->
|
||||||
'operationId' => write,
|
'operationId' => write,
|
||||||
post => #{
|
post => #{
|
||||||
desc => ?DESC(write_resource),
|
desc => ?DESC(write_resource),
|
||||||
tags => [<<"lwm2m">>],
|
tags => [<<"LwM2M">>],
|
||||||
parameters => [
|
parameters => [
|
||||||
{clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})},
|
{clientid, mk(binary(), #{in => path, example => "urn:oma:lwm2m:oma:2"})},
|
||||||
{path, mk(binary(), #{in => query, required => true, example => "/3/0/7"})},
|
{path, mk(binary(), #{in => query, required => true, example => "/3/0/7"})},
|
||||||
|
@ -118,7 +120,9 @@ schema(?PATH("/write")) ->
|
||||||
404 => error_codes(['CLIENT_NOT_FOUND'], <<"Clientid not found">>)
|
404 => error_codes(['CLIENT_NOT_FOUND'], <<"Clientid not found">>)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}.
|
};
|
||||||
|
schema(Path) ->
|
||||||
|
emqx_gateway_utils:make_compatible_schema(Path, fun schema/1).
|
||||||
|
|
||||||
fields(resource) ->
|
fields(resource) ->
|
||||||
[
|
[
|
||||||
|
|
|
@ -46,6 +46,11 @@
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
do_register/4
|
||||||
|
]).
|
||||||
|
|
||||||
-export([lookup_name/1]).
|
-export([lookup_name/1]).
|
||||||
|
|
||||||
-define(SN_SHARD, emqx_sn_shard).
|
-define(SN_SHARD, emqx_sn_shard).
|
||||||
|
@ -173,33 +178,11 @@ handle_call(
|
||||||
TopicId when TopicId >= 16#FFFF ->
|
TopicId when TopicId >= 16#FFFF ->
|
||||||
{reply, {error, too_large}, State};
|
{reply, {error, too_large}, State};
|
||||||
TopicId ->
|
TopicId ->
|
||||||
Fun = fun() ->
|
case
|
||||||
mnesia:write(
|
mria:transaction(?SN_SHARD, fun ?MODULE:do_register/4, [
|
||||||
Tab,
|
Tab, ClientId, TopicId, TopicName
|
||||||
#emqx_sn_registry{
|
])
|
||||||
key = {ClientId, next_topic_id},
|
of
|
||||||
value = TopicId + 1
|
|
||||||
},
|
|
||||||
write
|
|
||||||
),
|
|
||||||
mnesia:write(
|
|
||||||
Tab,
|
|
||||||
#emqx_sn_registry{
|
|
||||||
key = {ClientId, TopicName},
|
|
||||||
value = TopicId
|
|
||||||
},
|
|
||||||
write
|
|
||||||
),
|
|
||||||
mnesia:write(
|
|
||||||
Tab,
|
|
||||||
#emqx_sn_registry{
|
|
||||||
key = {ClientId, TopicId},
|
|
||||||
value = TopicName
|
|
||||||
},
|
|
||||||
write
|
|
||||||
)
|
|
||||||
end,
|
|
||||||
case mria:transaction(?SN_SHARD, Fun) of
|
|
||||||
{atomic, ok} ->
|
{atomic, ok} ->
|
||||||
{reply, TopicId, State};
|
{reply, TopicId, State};
|
||||||
{aborted, Error} ->
|
{aborted, Error} ->
|
||||||
|
@ -248,6 +231,32 @@ terminate(_Reason, _State) ->
|
||||||
code_change(_OldVsn, State, _Extra) ->
|
code_change(_OldVsn, State, _Extra) ->
|
||||||
{ok, State}.
|
{ok, State}.
|
||||||
|
|
||||||
|
do_register(Tab, ClientId, TopicId, TopicName) ->
|
||||||
|
mnesia:write(
|
||||||
|
Tab,
|
||||||
|
#emqx_sn_registry{
|
||||||
|
key = {ClientId, next_topic_id},
|
||||||
|
value = TopicId + 1
|
||||||
|
},
|
||||||
|
write
|
||||||
|
),
|
||||||
|
mnesia:write(
|
||||||
|
Tab,
|
||||||
|
#emqx_sn_registry{
|
||||||
|
key = {ClientId, TopicName},
|
||||||
|
value = TopicId
|
||||||
|
},
|
||||||
|
write
|
||||||
|
),
|
||||||
|
mnesia:write(
|
||||||
|
Tab,
|
||||||
|
#emqx_sn_registry{
|
||||||
|
key = {ClientId, TopicId},
|
||||||
|
value = TopicName
|
||||||
|
},
|
||||||
|
write
|
||||||
|
).
|
||||||
|
|
||||||
%%-----------------------------------------------------------------------------
|
%%-----------------------------------------------------------------------------
|
||||||
|
|
||||||
next_topic_id(Tab, PredefId, ClientId) ->
|
next_topic_id(Tab, PredefId, ClientId) ->
|
||||||
|
|
|
@ -255,37 +255,37 @@ t_clients_api(_) ->
|
||||||
Fun = fun(_Channel, _Token) ->
|
Fun = fun(_Channel, _Token) ->
|
||||||
ClientId = <<"client1">>,
|
ClientId = <<"client1">>,
|
||||||
%% list
|
%% list
|
||||||
{200, #{data := [Client1]}} = request(get, "/gateway/coap/clients"),
|
{200, #{data := [Client1]}} = request(get, "/gateways/coap/clients"),
|
||||||
#{clientid := ClientId} = Client1,
|
#{clientid := ClientId} = Client1,
|
||||||
%% searching
|
%% searching
|
||||||
{200, #{data := [Client2]}} =
|
{200, #{data := [Client2]}} =
|
||||||
request(
|
request(
|
||||||
get,
|
get,
|
||||||
"/gateway/coap/clients",
|
"/gateways/coap/clients",
|
||||||
[{<<"clientid">>, ClientId}]
|
[{<<"clientid">>, ClientId}]
|
||||||
),
|
),
|
||||||
{200, #{data := [Client3]}} =
|
{200, #{data := [Client3]}} =
|
||||||
request(
|
request(
|
||||||
get,
|
get,
|
||||||
"/gateway/coap/clients",
|
"/gateways/coap/clients",
|
||||||
[{<<"like_clientid">>, <<"cli">>}]
|
[{<<"like_clientid">>, <<"cli">>}]
|
||||||
),
|
),
|
||||||
%% lookup
|
%% lookup
|
||||||
{200, Client4} =
|
{200, Client4} =
|
||||||
request(get, "/gateway/coap/clients/client1"),
|
request(get, "/gateways/coap/clients/client1"),
|
||||||
%% assert
|
%% assert
|
||||||
Client1 = Client2 = Client3 = Client4,
|
Client1 = Client2 = Client3 = Client4,
|
||||||
%% kickout
|
%% kickout
|
||||||
{204, _} =
|
{204, _} =
|
||||||
request(delete, "/gateway/coap/clients/client1"),
|
request(delete, "/gateways/coap/clients/client1"),
|
||||||
timer:sleep(200),
|
timer:sleep(200),
|
||||||
{200, #{data := []}} = request(get, "/gateway/coap/clients")
|
{200, #{data := []}} = request(get, "/gateways/coap/clients")
|
||||||
end,
|
end,
|
||||||
with_connection(Fun).
|
with_connection(Fun).
|
||||||
|
|
||||||
t_clients_subscription_api(_) ->
|
t_clients_subscription_api(_) ->
|
||||||
Fun = fun(_Channel, _Token) ->
|
Fun = fun(_Channel, _Token) ->
|
||||||
Path = "/gateway/coap/clients/client1/subscriptions",
|
Path = "/gateways/coap/clients/client1/subscriptions",
|
||||||
%% list
|
%% list
|
||||||
{200, []} = request(get, Path),
|
{200, []} = request(get, Path),
|
||||||
%% create
|
%% create
|
||||||
|
@ -312,7 +312,7 @@ t_clients_subscription_api(_) ->
|
||||||
|
|
||||||
t_clients_get_subscription_api(_) ->
|
t_clients_get_subscription_api(_) ->
|
||||||
Fun = fun(Channel, Token) ->
|
Fun = fun(Channel, Token) ->
|
||||||
Path = "/gateway/coap/clients/client1/subscriptions",
|
Path = "/gateways/coap/clients/client1/subscriptions",
|
||||||
%% list
|
%% list
|
||||||
{200, []} = request(get, Path),
|
{200, []} = request(get, Path),
|
||||||
|
|
||||||
|
|
|
@ -71,28 +71,33 @@ end_per_suite(Config) ->
|
||||||
t_send_request_api(_) ->
|
t_send_request_api(_) ->
|
||||||
ClientId = start_client(),
|
ClientId = start_client(),
|
||||||
timer:sleep(200),
|
timer:sleep(200),
|
||||||
Path = emqx_mgmt_api_test_util:api_path(["gateway/coap/clients/client1/request"]),
|
Test = fun(API) ->
|
||||||
Token = <<"atoken">>,
|
Path = emqx_mgmt_api_test_util:api_path([API]),
|
||||||
Payload = <<"simple echo this">>,
|
Token = <<"atoken">>,
|
||||||
Req = #{
|
Payload = <<"simple echo this">>,
|
||||||
token => Token,
|
Req = #{
|
||||||
payload => Payload,
|
token => Token,
|
||||||
timeout => <<"10s">>,
|
payload => Payload,
|
||||||
content_type => <<"text/plain">>,
|
timeout => <<"10s">>,
|
||||||
method => <<"get">>
|
content_type => <<"text/plain">>,
|
||||||
},
|
method => <<"get">>
|
||||||
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
},
|
||||||
{ok, Response} = emqx_mgmt_api_test_util:request_api(
|
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
post,
|
{ok, Response} = emqx_mgmt_api_test_util:request_api(
|
||||||
Path,
|
post,
|
||||||
"method=get",
|
Path,
|
||||||
Auth,
|
"method=get",
|
||||||
Req
|
Auth,
|
||||||
),
|
Req
|
||||||
#{<<"token">> := RToken, <<"payload">> := RPayload} =
|
),
|
||||||
emqx_json:decode(Response, [return_maps]),
|
#{<<"token">> := RToken, <<"payload">> := RPayload} =
|
||||||
?assertEqual(Token, RToken),
|
emqx_json:decode(Response, [return_maps]),
|
||||||
?assertEqual(Payload, RPayload),
|
?assertEqual(Token, RToken),
|
||||||
|
?assertEqual(Payload, RPayload)
|
||||||
|
end,
|
||||||
|
Test("gateways/coap/clients/client1/request"),
|
||||||
|
timer:sleep(100),
|
||||||
|
Test("gateway/coap/clients/client1/request"),
|
||||||
erlang:exit(ClientId, kill),
|
erlang:exit(ClientId, kill),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
|
|
@ -483,8 +483,8 @@ ssl_opts() ->
|
||||||
maps:merge(
|
maps:merge(
|
||||||
Certs,
|
Certs,
|
||||||
#{
|
#{
|
||||||
versions => emqx_tls_lib:default_versions(),
|
versions => emqx_tls_lib:available_versions(tls),
|
||||||
ciphers => emqx_tls_lib:default_ciphers(),
|
ciphers => [],
|
||||||
verify => verify_peer,
|
verify => verify_peer,
|
||||||
fail_if_no_peer_cert => true,
|
fail_if_no_peer_cert => true,
|
||||||
secure_renegotiate => false,
|
secure_renegotiate => false,
|
||||||
|
|
|
@ -60,6 +60,22 @@ end_per_suite(Conf) ->
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
t_gateway(_) ->
|
t_gateway(_) ->
|
||||||
|
{200, Gateways} = request(get, "/gateways"),
|
||||||
|
lists:foreach(fun assert_gw_unloaded/1, Gateways),
|
||||||
|
{400, BadReq} = request(get, "/gateways/uname_gateway"),
|
||||||
|
assert_bad_request(BadReq),
|
||||||
|
{201, _} = request(post, "/gateways", #{name => <<"stomp">>}),
|
||||||
|
{200, StompGw1} = request(get, "/gateways/stomp"),
|
||||||
|
assert_feilds_apperence(
|
||||||
|
[name, status, enable, created_at, started_at],
|
||||||
|
StompGw1
|
||||||
|
),
|
||||||
|
{204, _} = request(delete, "/gateways/stomp"),
|
||||||
|
{200, StompGw2} = request(get, "/gateways/stomp"),
|
||||||
|
assert_gw_unloaded(StompGw2),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
t_deprecated_gateway(_) ->
|
||||||
{200, Gateways} = request(get, "/gateway"),
|
{200, Gateways} = request(get, "/gateway"),
|
||||||
lists:foreach(fun assert_gw_unloaded/1, Gateways),
|
lists:foreach(fun assert_gw_unloaded/1, Gateways),
|
||||||
{400, BadReq} = request(get, "/gateway/uname_gateway"),
|
{400, BadReq} = request(get, "/gateway/uname_gateway"),
|
||||||
|
@ -76,7 +92,7 @@ t_gateway(_) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_gateway_stomp(_) ->
|
t_gateway_stomp(_) ->
|
||||||
{200, Gw} = request(get, "/gateway/stomp"),
|
{200, Gw} = request(get, "/gateways/stomp"),
|
||||||
assert_gw_unloaded(Gw),
|
assert_gw_unloaded(Gw),
|
||||||
%% post
|
%% post
|
||||||
GwConf = #{
|
GwConf = #{
|
||||||
|
@ -90,18 +106,18 @@ t_gateway_stomp(_) ->
|
||||||
#{name => <<"def">>, type => <<"tcp">>, bind => <<"61613">>}
|
#{name => <<"def">>, type => <<"tcp">>, bind => <<"61613">>}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
{200, ConfResp} = request(get, "/gateway/stomp"),
|
{200, ConfResp} = request(get, "/gateways/stomp"),
|
||||||
assert_confs(GwConf, ConfResp),
|
assert_confs(GwConf, ConfResp),
|
||||||
%% put
|
%% put
|
||||||
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{frame => #{max_headers => 10}}),
|
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{frame => #{max_headers => 10}}),
|
||||||
{200, _} = request(put, "/gateway/stomp", maps:without([name, listeners], GwConf2)),
|
{200, _} = request(put, "/gateways/stomp", maps:without([name, listeners], GwConf2)),
|
||||||
{200, ConfResp2} = request(get, "/gateway/stomp"),
|
{200, ConfResp2} = request(get, "/gateways/stomp"),
|
||||||
assert_confs(GwConf2, ConfResp2),
|
assert_confs(GwConf2, ConfResp2),
|
||||||
{204, _} = request(delete, "/gateway/stomp").
|
{204, _} = request(delete, "/gateways/stomp").
|
||||||
|
|
||||||
t_gateway_mqttsn(_) ->
|
t_gateway_mqttsn(_) ->
|
||||||
{200, Gw} = request(get, "/gateway/mqttsn"),
|
{200, Gw} = request(get, "/gateways/mqttsn"),
|
||||||
assert_gw_unloaded(Gw),
|
assert_gw_unloaded(Gw),
|
||||||
%% post
|
%% post
|
||||||
GwConf = #{
|
GwConf = #{
|
||||||
|
@ -114,18 +130,18 @@ t_gateway_mqttsn(_) ->
|
||||||
#{name => <<"def">>, type => <<"udp">>, bind => <<"1884">>}
|
#{name => <<"def">>, type => <<"udp">>, bind => <<"1884">>}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
{200, ConfResp} = request(get, "/gateway/mqttsn"),
|
{200, ConfResp} = request(get, "/gateways/mqttsn"),
|
||||||
assert_confs(GwConf, ConfResp),
|
assert_confs(GwConf, ConfResp),
|
||||||
%% put
|
%% put
|
||||||
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{predefined => []}),
|
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{predefined => []}),
|
||||||
{200, _} = request(put, "/gateway/mqttsn", maps:without([name, listeners], GwConf2)),
|
{200, _} = request(put, "/gateways/mqttsn", maps:without([name, listeners], GwConf2)),
|
||||||
{200, ConfResp2} = request(get, "/gateway/mqttsn"),
|
{200, ConfResp2} = request(get, "/gateways/mqttsn"),
|
||||||
assert_confs(GwConf2, ConfResp2),
|
assert_confs(GwConf2, ConfResp2),
|
||||||
{204, _} = request(delete, "/gateway/mqttsn").
|
{204, _} = request(delete, "/gateways/mqttsn").
|
||||||
|
|
||||||
t_gateway_coap(_) ->
|
t_gateway_coap(_) ->
|
||||||
{200, Gw} = request(get, "/gateway/coap"),
|
{200, Gw} = request(get, "/gateways/coap"),
|
||||||
assert_gw_unloaded(Gw),
|
assert_gw_unloaded(Gw),
|
||||||
%% post
|
%% post
|
||||||
GwConf = #{
|
GwConf = #{
|
||||||
|
@ -136,18 +152,18 @@ t_gateway_coap(_) ->
|
||||||
#{name => <<"def">>, type => <<"udp">>, bind => <<"5683">>}
|
#{name => <<"def">>, type => <<"udp">>, bind => <<"5683">>}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
{200, ConfResp} = request(get, "/gateway/coap"),
|
{200, ConfResp} = request(get, "/gateways/coap"),
|
||||||
assert_confs(GwConf, ConfResp),
|
assert_confs(GwConf, ConfResp),
|
||||||
%% put
|
%% put
|
||||||
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{heartbeat => <<"10s">>}),
|
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{heartbeat => <<"10s">>}),
|
||||||
{200, _} = request(put, "/gateway/coap", maps:without([name, listeners], GwConf2)),
|
{200, _} = request(put, "/gateways/coap", maps:without([name, listeners], GwConf2)),
|
||||||
{200, ConfResp2} = request(get, "/gateway/coap"),
|
{200, ConfResp2} = request(get, "/gateways/coap"),
|
||||||
assert_confs(GwConf2, ConfResp2),
|
assert_confs(GwConf2, ConfResp2),
|
||||||
{204, _} = request(delete, "/gateway/coap").
|
{204, _} = request(delete, "/gateways/coap").
|
||||||
|
|
||||||
t_gateway_lwm2m(_) ->
|
t_gateway_lwm2m(_) ->
|
||||||
{200, Gw} = request(get, "/gateway/lwm2m"),
|
{200, Gw} = request(get, "/gateways/lwm2m"),
|
||||||
assert_gw_unloaded(Gw),
|
assert_gw_unloaded(Gw),
|
||||||
%% post
|
%% post
|
||||||
GwConf = #{
|
GwConf = #{
|
||||||
|
@ -168,18 +184,18 @@ t_gateway_lwm2m(_) ->
|
||||||
#{name => <<"def">>, type => <<"udp">>, bind => <<"5783">>}
|
#{name => <<"def">>, type => <<"udp">>, bind => <<"5783">>}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
{200, ConfResp} = request(get, "/gateway/lwm2m"),
|
{200, ConfResp} = request(get, "/gateways/lwm2m"),
|
||||||
assert_confs(GwConf, ConfResp),
|
assert_confs(GwConf, ConfResp),
|
||||||
%% put
|
%% put
|
||||||
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{qmode_time_window => <<"10s">>}),
|
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{qmode_time_window => <<"10s">>}),
|
||||||
{200, _} = request(put, "/gateway/lwm2m", maps:without([name, listeners], GwConf2)),
|
{200, _} = request(put, "/gateways/lwm2m", maps:without([name, listeners], GwConf2)),
|
||||||
{200, ConfResp2} = request(get, "/gateway/lwm2m"),
|
{200, ConfResp2} = request(get, "/gateways/lwm2m"),
|
||||||
assert_confs(GwConf2, ConfResp2),
|
assert_confs(GwConf2, ConfResp2),
|
||||||
{204, _} = request(delete, "/gateway/lwm2m").
|
{204, _} = request(delete, "/gateways/lwm2m").
|
||||||
|
|
||||||
t_gateway_exproto(_) ->
|
t_gateway_exproto(_) ->
|
||||||
{200, Gw} = request(get, "/gateway/exproto"),
|
{200, Gw} = request(get, "/gateways/exproto"),
|
||||||
assert_gw_unloaded(Gw),
|
assert_gw_unloaded(Gw),
|
||||||
%% post
|
%% post
|
||||||
GwConf = #{
|
GwConf = #{
|
||||||
|
@ -190,18 +206,18 @@ t_gateway_exproto(_) ->
|
||||||
#{name => <<"def">>, type => <<"tcp">>, bind => <<"7993">>}
|
#{name => <<"def">>, type => <<"tcp">>, bind => <<"7993">>}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
{200, ConfResp} = request(get, "/gateway/exproto"),
|
{200, ConfResp} = request(get, "/gateways/exproto"),
|
||||||
assert_confs(GwConf, ConfResp),
|
assert_confs(GwConf, ConfResp),
|
||||||
%% put
|
%% put
|
||||||
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{server => #{bind => <<"9200">>}}),
|
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{server => #{bind => <<"9200">>}}),
|
||||||
{200, _} = request(put, "/gateway/exproto", maps:without([name, listeners], GwConf2)),
|
{200, _} = request(put, "/gateways/exproto", maps:without([name, listeners], GwConf2)),
|
||||||
{200, ConfResp2} = request(get, "/gateway/exproto"),
|
{200, ConfResp2} = request(get, "/gateways/exproto"),
|
||||||
assert_confs(GwConf2, ConfResp2),
|
assert_confs(GwConf2, ConfResp2),
|
||||||
{204, _} = request(delete, "/gateway/exproto").
|
{204, _} = request(delete, "/gateways/exproto").
|
||||||
|
|
||||||
t_gateway_exproto_with_ssl(_) ->
|
t_gateway_exproto_with_ssl(_) ->
|
||||||
{200, Gw} = request(get, "/gateway/exproto"),
|
{200, Gw} = request(get, "/gateways/exproto"),
|
||||||
assert_gw_unloaded(Gw),
|
assert_gw_unloaded(Gw),
|
||||||
|
|
||||||
SslSvrOpts = ssl_server_opts(),
|
SslSvrOpts = ssl_server_opts(),
|
||||||
|
@ -221,8 +237,8 @@ t_gateway_exproto_with_ssl(_) ->
|
||||||
#{name => <<"def">>, type => <<"tcp">>, bind => <<"7993">>}
|
#{name => <<"def">>, type => <<"tcp">>, bind => <<"7993">>}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
{200, ConfResp} = request(get, "/gateway/exproto"),
|
{200, ConfResp} = request(get, "/gateways/exproto"),
|
||||||
assert_confs(GwConf, ConfResp),
|
assert_confs(GwConf, ConfResp),
|
||||||
%% put
|
%% put
|
||||||
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{
|
GwConf2 = emqx_map_lib:deep_merge(GwConf, #{
|
||||||
|
@ -231,50 +247,50 @@ t_gateway_exproto_with_ssl(_) ->
|
||||||
ssl_options => SslCliOpts
|
ssl_options => SslCliOpts
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
{200, _} = request(put, "/gateway/exproto", maps:without([name, listeners], GwConf2)),
|
{200, _} = request(put, "/gateways/exproto", maps:without([name, listeners], GwConf2)),
|
||||||
{200, ConfResp2} = request(get, "/gateway/exproto"),
|
{200, ConfResp2} = request(get, "/gateways/exproto"),
|
||||||
assert_confs(GwConf2, ConfResp2),
|
assert_confs(GwConf2, ConfResp2),
|
||||||
{204, _} = request(delete, "/gateway/exproto").
|
{204, _} = request(delete, "/gateways/exproto").
|
||||||
|
|
||||||
t_authn(_) ->
|
t_authn(_) ->
|
||||||
GwConf = #{name => <<"stomp">>},
|
GwConf = #{name => <<"stomp">>},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
ct:sleep(500),
|
ct:sleep(500),
|
||||||
{204, _} = request(get, "/gateway/stomp/authentication"),
|
{204, _} = request(get, "/gateways/stomp/authentication"),
|
||||||
|
|
||||||
AuthConf = #{
|
AuthConf = #{
|
||||||
mechanism => <<"password_based">>,
|
mechanism => <<"password_based">>,
|
||||||
backend => <<"built_in_database">>,
|
backend => <<"built_in_database">>,
|
||||||
user_id_type => <<"clientid">>
|
user_id_type => <<"clientid">>
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway/stomp/authentication", AuthConf),
|
{201, _} = request(post, "/gateways/stomp/authentication", AuthConf),
|
||||||
{200, ConfResp} = request(get, "/gateway/stomp/authentication"),
|
{200, ConfResp} = request(get, "/gateways/stomp/authentication"),
|
||||||
assert_confs(AuthConf, ConfResp),
|
assert_confs(AuthConf, ConfResp),
|
||||||
|
|
||||||
AuthConf2 = maps:merge(AuthConf, #{user_id_type => <<"username">>}),
|
AuthConf2 = maps:merge(AuthConf, #{user_id_type => <<"username">>}),
|
||||||
{200, _} = request(put, "/gateway/stomp/authentication", AuthConf2),
|
{200, _} = request(put, "/gateways/stomp/authentication", AuthConf2),
|
||||||
|
|
||||||
{200, ConfResp2} = request(get, "/gateway/stomp/authentication"),
|
{200, ConfResp2} = request(get, "/gateways/stomp/authentication"),
|
||||||
assert_confs(AuthConf2, ConfResp2),
|
assert_confs(AuthConf2, ConfResp2),
|
||||||
|
|
||||||
{204, _} = request(delete, "/gateway/stomp/authentication"),
|
{204, _} = request(delete, "/gateways/stomp/authentication"),
|
||||||
{204, _} = request(get, "/gateway/stomp/authentication"),
|
{204, _} = request(get, "/gateways/stomp/authentication"),
|
||||||
{204, _} = request(delete, "/gateway/stomp").
|
{204, _} = request(delete, "/gateways/stomp").
|
||||||
|
|
||||||
t_authn_data_mgmt(_) ->
|
t_authn_data_mgmt(_) ->
|
||||||
GwConf = #{name => <<"stomp">>},
|
GwConf = #{name => <<"stomp">>},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
ct:sleep(500),
|
ct:sleep(500),
|
||||||
{204, _} = request(get, "/gateway/stomp/authentication"),
|
{204, _} = request(get, "/gateways/stomp/authentication"),
|
||||||
|
|
||||||
AuthConf = #{
|
AuthConf = #{
|
||||||
mechanism => <<"password_based">>,
|
mechanism => <<"password_based">>,
|
||||||
backend => <<"built_in_database">>,
|
backend => <<"built_in_database">>,
|
||||||
user_id_type => <<"clientid">>
|
user_id_type => <<"clientid">>
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway/stomp/authentication", AuthConf),
|
{201, _} = request(post, "/gateways/stomp/authentication", AuthConf),
|
||||||
ct:sleep(500),
|
ct:sleep(500),
|
||||||
{200, ConfResp} = request(get, "/gateway/stomp/authentication"),
|
{200, ConfResp} = request(get, "/gateways/stomp/authentication"),
|
||||||
assert_confs(AuthConf, ConfResp),
|
assert_confs(AuthConf, ConfResp),
|
||||||
|
|
||||||
User1 = #{
|
User1 = #{
|
||||||
|
@ -282,19 +298,19 @@ t_authn_data_mgmt(_) ->
|
||||||
password => <<"123456">>,
|
password => <<"123456">>,
|
||||||
is_superuser => false
|
is_superuser => false
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway/stomp/authentication/users", User1),
|
{201, _} = request(post, "/gateways/stomp/authentication/users", User1),
|
||||||
{200, #{data := [UserRespd1]}} = request(get, "/gateway/stomp/authentication/users"),
|
{200, #{data := [UserRespd1]}} = request(get, "/gateways/stomp/authentication/users"),
|
||||||
assert_confs(UserRespd1, User1),
|
assert_confs(UserRespd1, User1),
|
||||||
|
|
||||||
{200, UserRespd2} = request(
|
{200, UserRespd2} = request(
|
||||||
get,
|
get,
|
||||||
"/gateway/stomp/authentication/users/test"
|
"/gateways/stomp/authentication/users/test"
|
||||||
),
|
),
|
||||||
assert_confs(UserRespd2, User1),
|
assert_confs(UserRespd2, User1),
|
||||||
|
|
||||||
{200, UserRespd3} = request(
|
{200, UserRespd3} = request(
|
||||||
put,
|
put,
|
||||||
"/gateway/stomp/authentication/users/test",
|
"/gateways/stomp/authentication/users/test",
|
||||||
#{
|
#{
|
||||||
password => <<"654321">>,
|
password => <<"654321">>,
|
||||||
is_superuser => true
|
is_superuser => true
|
||||||
|
@ -304,19 +320,19 @@ t_authn_data_mgmt(_) ->
|
||||||
|
|
||||||
{200, UserRespd4} = request(
|
{200, UserRespd4} = request(
|
||||||
get,
|
get,
|
||||||
"/gateway/stomp/authentication/users/test"
|
"/gateways/stomp/authentication/users/test"
|
||||||
),
|
),
|
||||||
assert_confs(UserRespd4, User1#{is_superuser => true}),
|
assert_confs(UserRespd4, User1#{is_superuser => true}),
|
||||||
|
|
||||||
{204, _} = request(delete, "/gateway/stomp/authentication/users/test"),
|
{204, _} = request(delete, "/gateways/stomp/authentication/users/test"),
|
||||||
|
|
||||||
{200, #{data := []}} = request(
|
{200, #{data := []}} = request(
|
||||||
get,
|
get,
|
||||||
"/gateway/stomp/authentication/users"
|
"/gateways/stomp/authentication/users"
|
||||||
),
|
),
|
||||||
|
|
||||||
ImportUri = emqx_dashboard_api_test_helpers:uri(
|
ImportUri = emqx_dashboard_api_test_helpers:uri(
|
||||||
["gateway", "stomp", "authentication", "import_users"]
|
["gateways", "stomp", "authentication", "import_users"]
|
||||||
),
|
),
|
||||||
|
|
||||||
Dir = code:lib_dir(emqx_authn, test),
|
Dir = code:lib_dir(emqx_authn, test),
|
||||||
|
@ -332,38 +348,38 @@ t_authn_data_mgmt(_) ->
|
||||||
{filename, "user-credentials.csv", CSVData}
|
{filename, "user-credentials.csv", CSVData}
|
||||||
]),
|
]),
|
||||||
|
|
||||||
{204, _} = request(delete, "/gateway/stomp/authentication"),
|
{204, _} = request(delete, "/gateways/stomp/authentication"),
|
||||||
{204, _} = request(get, "/gateway/stomp/authentication"),
|
{204, _} = request(get, "/gateways/stomp/authentication"),
|
||||||
{204, _} = request(delete, "/gateway/stomp").
|
{204, _} = request(delete, "/gateways/stomp").
|
||||||
|
|
||||||
t_listeners_tcp(_) ->
|
t_listeners_tcp(_) ->
|
||||||
GwConf = #{name => <<"stomp">>},
|
GwConf = #{name => <<"stomp">>},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
{404, _} = request(get, "/gateway/stomp/listeners"),
|
{404, _} = request(get, "/gateways/stomp/listeners"),
|
||||||
LisConf = #{
|
LisConf = #{
|
||||||
name => <<"def">>,
|
name => <<"def">>,
|
||||||
type => <<"tcp">>,
|
type => <<"tcp">>,
|
||||||
bind => <<"127.0.0.1:61613">>
|
bind => <<"127.0.0.1:61613">>
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway/stomp/listeners", LisConf),
|
{201, _} = request(post, "/gateways/stomp/listeners", LisConf),
|
||||||
{200, ConfResp} = request(get, "/gateway/stomp/listeners"),
|
{200, ConfResp} = request(get, "/gateways/stomp/listeners"),
|
||||||
assert_confs([LisConf], ConfResp),
|
assert_confs([LisConf], ConfResp),
|
||||||
{200, ConfResp1} = request(get, "/gateway/stomp/listeners/stomp:tcp:def"),
|
{200, ConfResp1} = request(get, "/gateways/stomp/listeners/stomp:tcp:def"),
|
||||||
assert_confs(LisConf, ConfResp1),
|
assert_confs(LisConf, ConfResp1),
|
||||||
|
|
||||||
LisConf2 = maps:merge(LisConf, #{bind => <<"127.0.0.1:61614">>}),
|
LisConf2 = maps:merge(LisConf, #{bind => <<"127.0.0.1:61614">>}),
|
||||||
{200, _} = request(
|
{200, _} = request(
|
||||||
put,
|
put,
|
||||||
"/gateway/stomp/listeners/stomp:tcp:def",
|
"/gateways/stomp/listeners/stomp:tcp:def",
|
||||||
LisConf2
|
LisConf2
|
||||||
),
|
),
|
||||||
|
|
||||||
{200, ConfResp2} = request(get, "/gateway/stomp/listeners/stomp:tcp:def"),
|
{200, ConfResp2} = request(get, "/gateways/stomp/listeners/stomp:tcp:def"),
|
||||||
assert_confs(LisConf2, ConfResp2),
|
assert_confs(LisConf2, ConfResp2),
|
||||||
|
|
||||||
{204, _} = request(delete, "/gateway/stomp/listeners/stomp:tcp:def"),
|
{204, _} = request(delete, "/gateways/stomp/listeners/stomp:tcp:def"),
|
||||||
{404, _} = request(get, "/gateway/stomp/listeners/stomp:tcp:def"),
|
{404, _} = request(get, "/gateways/stomp/listeners/stomp:tcp:def"),
|
||||||
{204, _} = request(delete, "/gateway/stomp").
|
{204, _} = request(delete, "/gateways/stomp").
|
||||||
|
|
||||||
t_listeners_authn(_) ->
|
t_listeners_authn(_) ->
|
||||||
GwConf = #{
|
GwConf = #{
|
||||||
|
@ -376,9 +392,9 @@ t_listeners_authn(_) ->
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
ct:sleep(500),
|
ct:sleep(500),
|
||||||
{200, ConfResp} = request(get, "/gateway/stomp"),
|
{200, ConfResp} = request(get, "/gateways/stomp"),
|
||||||
assert_confs(GwConf, ConfResp),
|
assert_confs(GwConf, ConfResp),
|
||||||
|
|
||||||
AuthConf = #{
|
AuthConf = #{
|
||||||
|
@ -386,7 +402,7 @@ t_listeners_authn(_) ->
|
||||||
backend => <<"built_in_database">>,
|
backend => <<"built_in_database">>,
|
||||||
user_id_type => <<"clientid">>
|
user_id_type => <<"clientid">>
|
||||||
},
|
},
|
||||||
Path = "/gateway/stomp/listeners/stomp:tcp:def/authentication",
|
Path = "/gateways/stomp/listeners/stomp:tcp:def/authentication",
|
||||||
{201, _} = request(post, Path, AuthConf),
|
{201, _} = request(post, Path, AuthConf),
|
||||||
{200, ConfResp2} = request(get, Path),
|
{200, ConfResp2} = request(get, Path),
|
||||||
assert_confs(AuthConf, ConfResp2),
|
assert_confs(AuthConf, ConfResp2),
|
||||||
|
@ -400,7 +416,7 @@ t_listeners_authn(_) ->
|
||||||
{204, _} = request(delete, Path),
|
{204, _} = request(delete, Path),
|
||||||
%% FIXME: 204?
|
%% FIXME: 204?
|
||||||
{204, _} = request(get, Path),
|
{204, _} = request(get, Path),
|
||||||
{204, _} = request(delete, "/gateway/stomp").
|
{204, _} = request(delete, "/gateways/stomp").
|
||||||
|
|
||||||
t_listeners_authn_data_mgmt(_) ->
|
t_listeners_authn_data_mgmt(_) ->
|
||||||
GwConf = #{
|
GwConf = #{
|
||||||
|
@ -413,8 +429,8 @@ t_listeners_authn_data_mgmt(_) ->
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
{200, ConfResp} = request(get, "/gateway/stomp"),
|
{200, ConfResp} = request(get, "/gateways/stomp"),
|
||||||
assert_confs(GwConf, ConfResp),
|
assert_confs(GwConf, ConfResp),
|
||||||
|
|
||||||
AuthConf = #{
|
AuthConf = #{
|
||||||
|
@ -422,7 +438,7 @@ t_listeners_authn_data_mgmt(_) ->
|
||||||
backend => <<"built_in_database">>,
|
backend => <<"built_in_database">>,
|
||||||
user_id_type => <<"clientid">>
|
user_id_type => <<"clientid">>
|
||||||
},
|
},
|
||||||
Path = "/gateway/stomp/listeners/stomp:tcp:def/authentication",
|
Path = "/gateways/stomp/listeners/stomp:tcp:def/authentication",
|
||||||
{201, _} = request(post, Path, AuthConf),
|
{201, _} = request(post, Path, AuthConf),
|
||||||
{200, ConfResp2} = request(get, Path),
|
{200, ConfResp2} = request(get, Path),
|
||||||
assert_confs(AuthConf, ConfResp2),
|
assert_confs(AuthConf, ConfResp2),
|
||||||
|
@ -434,7 +450,7 @@ t_listeners_authn_data_mgmt(_) ->
|
||||||
},
|
},
|
||||||
{201, _} = request(
|
{201, _} = request(
|
||||||
post,
|
post,
|
||||||
"/gateway/stomp/listeners/stomp:tcp:def/authentication/users",
|
"/gateways/stomp/listeners/stomp:tcp:def/authentication/users",
|
||||||
User1
|
User1
|
||||||
),
|
),
|
||||||
|
|
||||||
|
@ -474,7 +490,7 @@ t_listeners_authn_data_mgmt(_) ->
|
||||||
),
|
),
|
||||||
|
|
||||||
ImportUri = emqx_dashboard_api_test_helpers:uri(
|
ImportUri = emqx_dashboard_api_test_helpers:uri(
|
||||||
["gateway", "stomp", "listeners", "stomp:tcp:def", "authentication", "import_users"]
|
["gateways", "stomp", "listeners", "stomp:tcp:def", "authentication", "import_users"]
|
||||||
),
|
),
|
||||||
|
|
||||||
Dir = code:lib_dir(emqx_authn, test),
|
Dir = code:lib_dir(emqx_authn, test),
|
||||||
|
@ -490,31 +506,31 @@ t_listeners_authn_data_mgmt(_) ->
|
||||||
{filename, "user-credentials.csv", CSVData}
|
{filename, "user-credentials.csv", CSVData}
|
||||||
]),
|
]),
|
||||||
|
|
||||||
{204, _} = request(delete, "/gateway/stomp").
|
{204, _} = request(delete, "/gateways/stomp").
|
||||||
|
|
||||||
t_authn_fuzzy_search(_) ->
|
t_authn_fuzzy_search(_) ->
|
||||||
GwConf = #{name => <<"stomp">>},
|
GwConf = #{name => <<"stomp">>},
|
||||||
{201, _} = request(post, "/gateway", GwConf),
|
{201, _} = request(post, "/gateways", GwConf),
|
||||||
{204, _} = request(get, "/gateway/stomp/authentication"),
|
{204, _} = request(get, "/gateways/stomp/authentication"),
|
||||||
|
|
||||||
AuthConf = #{
|
AuthConf = #{
|
||||||
mechanism => <<"password_based">>,
|
mechanism => <<"password_based">>,
|
||||||
backend => <<"built_in_database">>,
|
backend => <<"built_in_database">>,
|
||||||
user_id_type => <<"clientid">>
|
user_id_type => <<"clientid">>
|
||||||
},
|
},
|
||||||
{201, _} = request(post, "/gateway/stomp/authentication", AuthConf),
|
{201, _} = request(post, "/gateways/stomp/authentication", AuthConf),
|
||||||
{200, ConfResp} = request(get, "/gateway/stomp/authentication"),
|
{200, ConfResp} = request(get, "/gateways/stomp/authentication"),
|
||||||
assert_confs(AuthConf, ConfResp),
|
assert_confs(AuthConf, ConfResp),
|
||||||
|
|
||||||
Checker = fun({User, Fuzzy}) ->
|
Checker = fun({User, Fuzzy}) ->
|
||||||
{200, #{data := [UserRespd]}} = request(
|
{200, #{data := [UserRespd]}} = request(
|
||||||
get, "/gateway/stomp/authentication/users", Fuzzy
|
get, "/gateways/stomp/authentication/users", Fuzzy
|
||||||
),
|
),
|
||||||
assert_confs(UserRespd, User)
|
assert_confs(UserRespd, User)
|
||||||
end,
|
end,
|
||||||
|
|
||||||
Create = fun(User) ->
|
Create = fun(User) ->
|
||||||
{201, _} = request(post, "/gateway/stomp/authentication/users", User)
|
{201, _} = request(post, "/gateways/stomp/authentication/users", User)
|
||||||
end,
|
end,
|
||||||
|
|
||||||
UserDatas = [
|
UserDatas = [
|
||||||
|
@ -535,9 +551,9 @@ t_authn_fuzzy_search(_) ->
|
||||||
lists:foreach(Create, UserDatas),
|
lists:foreach(Create, UserDatas),
|
||||||
lists:foreach(Checker, lists:zip(UserDatas, FuzzyDatas)),
|
lists:foreach(Checker, lists:zip(UserDatas, FuzzyDatas)),
|
||||||
|
|
||||||
{204, _} = request(delete, "/gateway/stomp/authentication"),
|
{204, _} = request(delete, "/gateways/stomp/authentication"),
|
||||||
{204, _} = request(get, "/gateway/stomp/authentication"),
|
{204, _} = request(get, "/gateways/stomp/authentication"),
|
||||||
{204, _} = request(delete, "/gateway/stomp").
|
{204, _} = request(delete, "/gateways/stomp").
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Asserts
|
%% Asserts
|
||||||
|
|
|
@ -141,7 +141,7 @@ on_start_auth(authn_http) ->
|
||||||
|
|
||||||
%% set authn for gateway
|
%% set authn for gateway
|
||||||
Setup = fun(Gateway) ->
|
Setup = fun(Gateway) ->
|
||||||
Path = io_lib:format("/gateway/~ts/authentication", [Gateway]),
|
Path = io_lib:format("/gateways/~ts/authentication", [Gateway]),
|
||||||
{204, _} = request(delete, Path),
|
{204, _} = request(delete, Path),
|
||||||
timer:sleep(200),
|
timer:sleep(200),
|
||||||
{201, _} = request(post, Path, http_authn_config()),
|
{201, _} = request(post, Path, http_authn_config()),
|
||||||
|
@ -198,7 +198,7 @@ on_start_auth(authz_http) ->
|
||||||
|
|
||||||
on_stop_auth(authn_http) ->
|
on_stop_auth(authn_http) ->
|
||||||
Delete = fun(Gateway) ->
|
Delete = fun(Gateway) ->
|
||||||
Path = io_lib:format("/gateway/~ts/authentication", [Gateway]),
|
Path = io_lib:format("/gateways/~ts/authentication", [Gateway]),
|
||||||
{204, _} = request(delete, Path)
|
{204, _} = request(delete, Path)
|
||||||
end,
|
end,
|
||||||
lists:foreach(Delete, ?GATEWAYS),
|
lists:foreach(Delete, ?GATEWAYS),
|
||||||
|
|
|
@ -2353,18 +2353,18 @@ case100_clients_api(Config) ->
|
||||||
std_register(UdpSock, Epn, ObjectList, MsgId1, RespTopic),
|
std_register(UdpSock, Epn, ObjectList, MsgId1, RespTopic),
|
||||||
|
|
||||||
%% list
|
%% list
|
||||||
{200, #{data := [Client1]}} = request(get, "/gateway/lwm2m/clients"),
|
{200, #{data := [Client1]}} = request(get, "/gateways/lwm2m/clients"),
|
||||||
%% searching
|
%% searching
|
||||||
{200, #{data := [Client2]}} =
|
{200, #{data := [Client2]}} =
|
||||||
request(
|
request(
|
||||||
get,
|
get,
|
||||||
"/gateway/lwm2m/clients",
|
"/gateways/lwm2m/clients",
|
||||||
[{<<"endpoint_name">>, list_to_binary(Epn)}]
|
[{<<"endpoint_name">>, list_to_binary(Epn)}]
|
||||||
),
|
),
|
||||||
{200, #{data := [Client3]}} =
|
{200, #{data := [Client3]}} =
|
||||||
request(
|
request(
|
||||||
get,
|
get,
|
||||||
"/gateway/lwm2m/clients",
|
"/gateways/lwm2m/clients",
|
||||||
[
|
[
|
||||||
{<<"like_endpoint_name">>, list_to_binary(Epn)},
|
{<<"like_endpoint_name">>, list_to_binary(Epn)},
|
||||||
{<<"gte_lifetime">>, <<"1">>}
|
{<<"gte_lifetime">>, <<"1">>}
|
||||||
|
@ -2373,14 +2373,14 @@ case100_clients_api(Config) ->
|
||||||
%% lookup
|
%% lookup
|
||||||
ClientId = maps:get(clientid, Client1),
|
ClientId = maps:get(clientid, Client1),
|
||||||
{200, Client4} =
|
{200, Client4} =
|
||||||
request(get, "/gateway/lwm2m/clients/" ++ binary_to_list(ClientId)),
|
request(get, "/gateways/lwm2m/clients/" ++ binary_to_list(ClientId)),
|
||||||
%% assert
|
%% assert
|
||||||
Client1 = Client2 = Client3 = Client4,
|
Client1 = Client2 = Client3 = Client4,
|
||||||
%% kickout
|
%% kickout
|
||||||
{204, _} =
|
{204, _} =
|
||||||
request(delete, "/gateway/lwm2m/clients/" ++ binary_to_list(ClientId)),
|
request(delete, "/gateways/lwm2m/clients/" ++ binary_to_list(ClientId)),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
{200, #{data := []}} = request(get, "/gateway/lwm2m/clients").
|
{200, #{data := []}} = request(get, "/gateways/lwm2m/clients").
|
||||||
|
|
||||||
case100_subscription_api(Config) ->
|
case100_subscription_api(Config) ->
|
||||||
Epn = "urn:oma:lwm2m:oma:3",
|
Epn = "urn:oma:lwm2m:oma:3",
|
||||||
|
@ -2390,10 +2390,10 @@ case100_subscription_api(Config) ->
|
||||||
RespTopic = list_to_binary("lwm2m/" ++ Epn ++ "/up/resp"),
|
RespTopic = list_to_binary("lwm2m/" ++ Epn ++ "/up/resp"),
|
||||||
std_register(UdpSock, Epn, ObjectList, MsgId1, RespTopic),
|
std_register(UdpSock, Epn, ObjectList, MsgId1, RespTopic),
|
||||||
|
|
||||||
{200, #{data := [Client1]}} = request(get, "/gateway/lwm2m/clients"),
|
{200, #{data := [Client1]}} = request(get, "/gateways/lwm2m/clients"),
|
||||||
ClientId = maps:get(clientid, Client1),
|
ClientId = maps:get(clientid, Client1),
|
||||||
Path =
|
Path =
|
||||||
"/gateway/lwm2m/clients/" ++
|
"/gateways/lwm2m/clients/" ++
|
||||||
binary_to_list(ClientId) ++
|
binary_to_list(ClientId) ++
|
||||||
"/subscriptions",
|
"/subscriptions",
|
||||||
|
|
||||||
|
|
|
@ -326,7 +326,7 @@ t_observe(Config) ->
|
||||||
test_recv_mqtt_response(RespTopic),
|
test_recv_mqtt_response(RespTopic),
|
||||||
|
|
||||||
%% step2, call observe API
|
%% step2, call observe API
|
||||||
call_send_api(Epn, "observe", "path=/3/0/1&enable=false"),
|
call_deprecated_send_api(Epn, "observe", "path=/3/0/1&enable=false"),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
#coap_message{type = Type, method = Method, options = Opts} = test_recv_coap_request(UdpSock),
|
#coap_message{type = Type, method = Method, options = Opts} = test_recv_coap_request(UdpSock),
|
||||||
?assertEqual(con, Type),
|
?assertEqual(con, Type),
|
||||||
|
@ -338,7 +338,7 @@ t_observe(Config) ->
|
||||||
%%% Internal Functions
|
%%% Internal Functions
|
||||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
call_lookup_api(ClientId, Path, Action) ->
|
call_lookup_api(ClientId, Path, Action) ->
|
||||||
ApiPath = emqx_mgmt_api_test_util:api_path(["gateway/lwm2m/clients", ClientId, "lookup"]),
|
ApiPath = emqx_mgmt_api_test_util:api_path(["gateways/lwm2m/clients", ClientId, "lookup"]),
|
||||||
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
Query = io_lib:format("path=~ts&action=~ts", [Path, Action]),
|
Query = io_lib:format("path=~ts&action=~ts", [Path, Action]),
|
||||||
{ok, Response} = emqx_mgmt_api_test_util:request_api(get, ApiPath, Query, Auth),
|
{ok, Response} = emqx_mgmt_api_test_util:request_api(get, ApiPath, Query, Auth),
|
||||||
|
@ -346,7 +346,13 @@ call_lookup_api(ClientId, Path, Action) ->
|
||||||
Response.
|
Response.
|
||||||
|
|
||||||
call_send_api(ClientId, Cmd, Query) ->
|
call_send_api(ClientId, Cmd, Query) ->
|
||||||
ApiPath = emqx_mgmt_api_test_util:api_path(["gateway/lwm2m/clients", ClientId, Cmd]),
|
call_send_api(ClientId, Cmd, Query, "gateways/lwm2m/clients").
|
||||||
|
|
||||||
|
call_deprecated_send_api(ClientId, Cmd, Query) ->
|
||||||
|
call_send_api(ClientId, Cmd, Query, "gateway/lwm2m/clients").
|
||||||
|
|
||||||
|
call_send_api(ClientId, Cmd, Query, API) ->
|
||||||
|
ApiPath = emqx_mgmt_api_test_util:api_path([API, ClientId, Cmd]),
|
||||||
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
Auth = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
{ok, Response} = emqx_mgmt_api_test_util:request_api(post, ApiPath, Query, Auth),
|
{ok, Response} = emqx_mgmt_api_test_util:request_api(post, ApiPath, Query, Auth),
|
||||||
?LOGT("rest api response:~ts~n", [Response]),
|
?LOGT("rest api response:~ts~n", [Response]),
|
||||||
|
|
|
@ -2198,15 +2198,15 @@ t_clients_api(_) ->
|
||||||
send_connect_msg(Socket, ClientId),
|
send_connect_msg(Socket, ClientId),
|
||||||
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
|
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
|
||||||
%% list
|
%% list
|
||||||
{200, #{data := [Client1]}} = request(get, "/gateway/mqttsn/clients"),
|
{200, #{data := [Client1]}} = request(get, "/gateways/mqttsn/clients"),
|
||||||
#{clientid := ClientId} = Client1,
|
#{clientid := ClientId} = Client1,
|
||||||
%% searching
|
%% searching
|
||||||
{200, #{data := [Client2]}} =
|
{200, #{data := [Client2]}} =
|
||||||
request(get, "/gateway/mqttsn/clients", [{<<"clientid">>, ClientId}]),
|
request(get, "/gateways/mqttsn/clients", [{<<"clientid">>, ClientId}]),
|
||||||
{200, #{data := [Client3]}} =
|
{200, #{data := [Client3]}} =
|
||||||
request(
|
request(
|
||||||
get,
|
get,
|
||||||
"/gateway/mqttsn/clients",
|
"/gateways/mqttsn/clients",
|
||||||
[
|
[
|
||||||
{<<"like_clientid">>, <<"test1">>},
|
{<<"like_clientid">>, <<"test1">>},
|
||||||
{<<"proto_ver">>, <<"1.2">>},
|
{<<"proto_ver">>, <<"1.2">>},
|
||||||
|
@ -2218,21 +2218,21 @@ t_clients_api(_) ->
|
||||||
),
|
),
|
||||||
%% lookup
|
%% lookup
|
||||||
{200, Client4} =
|
{200, Client4} =
|
||||||
request(get, "/gateway/mqttsn/clients/client_id_test1"),
|
request(get, "/gateways/mqttsn/clients/client_id_test1"),
|
||||||
%% assert
|
%% assert
|
||||||
Client1 = Client2 = Client3 = Client4,
|
Client1 = Client2 = Client3 = Client4,
|
||||||
%% kickout
|
%% kickout
|
||||||
{204, _} =
|
{204, _} =
|
||||||
request(delete, "/gateway/mqttsn/clients/client_id_test1"),
|
request(delete, "/gateways/mqttsn/clients/client_id_test1"),
|
||||||
timer:sleep(100),
|
timer:sleep(100),
|
||||||
{200, #{data := []}} = request(get, "/gateway/mqttsn/clients"),
|
{200, #{data := []}} = request(get, "/gateways/mqttsn/clients"),
|
||||||
|
|
||||||
send_disconnect_msg(Socket, undefined),
|
send_disconnect_msg(Socket, undefined),
|
||||||
gen_udp:close(Socket).
|
gen_udp:close(Socket).
|
||||||
|
|
||||||
t_clients_subscription_api(_) ->
|
t_clients_subscription_api(_) ->
|
||||||
ClientId = <<"client_id_test1">>,
|
ClientId = <<"client_id_test1">>,
|
||||||
Path = "/gateway/mqttsn/clients/client_id_test1/subscriptions",
|
Path = "/gateways/mqttsn/clients/client_id_test1/subscriptions",
|
||||||
{ok, Socket} = gen_udp:open(0, [binary]),
|
{ok, Socket} = gen_udp:open(0, [binary]),
|
||||||
send_connect_msg(Socket, ClientId),
|
send_connect_msg(Socket, ClientId),
|
||||||
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
|
?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)),
|
||||||
|
|
|
@ -721,12 +721,12 @@ t_rest_clienit_info(_) ->
|
||||||
_, _} = parse(Data),
|
_, _} = parse(Data),
|
||||||
|
|
||||||
%% client lists
|
%% client lists
|
||||||
{200, Clients} = request(get, "/gateway/stomp/clients"),
|
{200, Clients} = request(get, "/gateways/stomp/clients"),
|
||||||
?assertEqual(1, length(maps:get(data, Clients))),
|
?assertEqual(1, length(maps:get(data, Clients))),
|
||||||
StompClient = lists:nth(1, maps:get(data, Clients)),
|
StompClient = lists:nth(1, maps:get(data, Clients)),
|
||||||
ClientId = maps:get(clientid, StompClient),
|
ClientId = maps:get(clientid, StompClient),
|
||||||
ClientPath =
|
ClientPath =
|
||||||
"/gateway/stomp/clients/" ++
|
"/gateways/stomp/clients/" ++
|
||||||
binary_to_list(ClientId),
|
binary_to_list(ClientId),
|
||||||
{200, StompClient1} = request(get, ClientPath),
|
{200, StompClient1} = request(get, ClientPath),
|
||||||
?assertEqual(StompClient, StompClient1),
|
?assertEqual(StompClient, StompClient1),
|
||||||
|
@ -811,7 +811,7 @@ t_rest_clienit_info(_) ->
|
||||||
% sync
|
% sync
|
||||||
ignored = gen_server:call(emqx_cm, ignore, infinity),
|
ignored = gen_server:call(emqx_cm, ignore, infinity),
|
||||||
ok = emqx_pool:flush_async_tasks(),
|
ok = emqx_pool:flush_async_tasks(),
|
||||||
{200, Clients2} = request(get, "/gateway/stomp/clients"),
|
{200, Clients2} = request(get, "/gateways/stomp/clients"),
|
||||||
?assertEqual(0, length(maps:get(data, Clients2)))
|
?assertEqual(0, length(maps:get(data, Clients2)))
|
||||||
end).
|
end).
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
{application, emqx_management, [
|
{application, emqx_management, [
|
||||||
{description, "EMQX Management API and CLI"},
|
{description, "EMQX Management API and CLI"},
|
||||||
% strict semver, bump manually!
|
% strict semver, bump manually!
|
||||||
{vsn, "5.0.3"},
|
{vsn, "5.0.4"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_management_sup]},
|
{registered, [emqx_management_sup]},
|
||||||
{applications, [kernel, stdlib, emqx_plugins, minirest, emqx]},
|
{applications, [kernel, stdlib, emqx_plugins, minirest, emqx]},
|
||||||
|
|
|
@ -37,6 +37,7 @@
|
||||||
-define(PREFIX_RESET, "/configs_reset/").
|
-define(PREFIX_RESET, "/configs_reset/").
|
||||||
-define(ERR_MSG(MSG), list_to_binary(io_lib:format("~p", [MSG]))).
|
-define(ERR_MSG(MSG), list_to_binary(io_lib:format("~p", [MSG]))).
|
||||||
-define(OPTS, #{rawconf_with_defaults => true, override_to => cluster}).
|
-define(OPTS, #{rawconf_with_defaults => true, override_to => cluster}).
|
||||||
|
-define(TAGS, ["Configs"]).
|
||||||
|
|
||||||
-define(EXCLUDES,
|
-define(EXCLUDES,
|
||||||
[
|
[
|
||||||
|
@ -85,7 +86,7 @@ schema("/configs") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => configs,
|
'operationId' => configs,
|
||||||
get => #{
|
get => #{
|
||||||
tags => [conf],
|
tags => ?TAGS,
|
||||||
description =>
|
description =>
|
||||||
<<"Get all the configurations of the specified node, including hot and non-hot updatable items.">>,
|
<<"Get all the configurations of the specified node, including hot and non-hot updatable items.">>,
|
||||||
parameters => [
|
parameters => [
|
||||||
|
@ -111,7 +112,7 @@ schema("/configs_reset/:rootname") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => config_reset,
|
'operationId' => config_reset,
|
||||||
post => #{
|
post => #{
|
||||||
tags => [conf],
|
tags => ?TAGS,
|
||||||
description =>
|
description =>
|
||||||
<<
|
<<
|
||||||
"Reset the config entry specified by the query string parameter `conf_path`.</br>\n"
|
"Reset the config entry specified by the query string parameter `conf_path`.</br>\n"
|
||||||
|
@ -149,12 +150,12 @@ schema("/configs/global_zone") ->
|
||||||
#{
|
#{
|
||||||
'operationId' => global_zone_configs,
|
'operationId' => global_zone_configs,
|
||||||
get => #{
|
get => #{
|
||||||
tags => [conf],
|
tags => ?TAGS,
|
||||||
description => <<"Get the global zone configs">>,
|
description => <<"Get the global zone configs">>,
|
||||||
responses => #{200 => Schema}
|
responses => #{200 => Schema}
|
||||||
},
|
},
|
||||||
put => #{
|
put => #{
|
||||||
tags => [conf],
|
tags => ?TAGS,
|
||||||
description => <<"Update globbal zone configs">>,
|
description => <<"Update globbal zone configs">>,
|
||||||
'requestBody' => Schema,
|
'requestBody' => Schema,
|
||||||
responses => #{
|
responses => #{
|
||||||
|
@ -180,7 +181,7 @@ schema("/configs/global_zone") ->
|
||||||
%% #{
|
%% #{
|
||||||
%% 'operationId' => config,
|
%% 'operationId' => config,
|
||||||
%% get => #{
|
%% get => #{
|
||||||
%% tags => [conf],
|
%% tags => ?TAGS,
|
||||||
%% description => <<"Get config of this limiter">>,
|
%% description => <<"Get config of this limiter">>,
|
||||||
%% parameters => Parameters,
|
%% parameters => Parameters,
|
||||||
%% responses => #{
|
%% responses => #{
|
||||||
|
@ -189,7 +190,7 @@ schema("/configs/global_zone") ->
|
||||||
%% }
|
%% }
|
||||||
%% },
|
%% },
|
||||||
%% put => #{
|
%% put => #{
|
||||||
%% tags => [conf],
|
%% tags => ?TAGS,
|
||||||
%% description => <<"Update config of this limiter">>,
|
%% description => <<"Update config of this limiter">>,
|
||||||
%% parameters => Parameters,
|
%% parameters => Parameters,
|
||||||
%% 'requestBody' => Schema,
|
%% 'requestBody' => Schema,
|
||||||
|
@ -204,7 +205,7 @@ schema(Path) ->
|
||||||
#{
|
#{
|
||||||
'operationId' => config,
|
'operationId' => config,
|
||||||
get => #{
|
get => #{
|
||||||
tags => [conf],
|
tags => ?TAGS,
|
||||||
description => iolist_to_binary([
|
description => iolist_to_binary([
|
||||||
<<"Get the sub-configurations under *">>,
|
<<"Get the sub-configurations under *">>,
|
||||||
RootKey,
|
RootKey,
|
||||||
|
@ -216,7 +217,7 @@ schema(Path) ->
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
put => #{
|
put => #{
|
||||||
tags => [conf],
|
tags => ?TAGS,
|
||||||
description => iolist_to_binary([
|
description => iolist_to_binary([
|
||||||
<<"Update the sub-configurations under *">>,
|
<<"Update the sub-configurations under *">>,
|
||||||
RootKey,
|
RootKey,
|
||||||
|
|
|
@ -342,11 +342,18 @@ list_listeners(get, #{query_string := Query}) ->
|
||||||
{200, listener_status_by_id(NodeL)}.
|
{200, listener_status_by_id(NodeL)}.
|
||||||
|
|
||||||
crud_listeners_by_id(get, #{bindings := #{id := Id0}}) ->
|
crud_listeners_by_id(get, #{bindings := #{id := Id0}}) ->
|
||||||
Listeners = [
|
Listeners =
|
||||||
Conf#{<<"id">> => Id, <<"type">> => Type}
|
[
|
||||||
|| {Id, Type, Conf} <- emqx_listeners:list_raw(),
|
Conf#{
|
||||||
Id =:= Id0
|
<<"id">> => Id,
|
||||||
],
|
<<"type">> => Type,
|
||||||
|
<<"bind">> := iolist_to_binary(
|
||||||
|
emqx_listeners:format_bind(maps:get(<<"bind">>, Conf))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|| {Id, Type, Conf} <- emqx_listeners:list_raw(),
|
||||||
|
Id =:= Id0
|
||||||
|
],
|
||||||
case Listeners of
|
case Listeners of
|
||||||
[] -> {404, #{code => 'BAD_LISTENER_ID', message => ?LISTENER_NOT_FOUND}};
|
[] -> {404, #{code => 'BAD_LISTENER_ID', message => ?LISTENER_NOT_FOUND}};
|
||||||
[L] -> {200, L}
|
[L] -> {200, L}
|
||||||
|
|
|
@ -177,6 +177,8 @@ format({_Subscriber, Topic, Options}) ->
|
||||||
maps:with([qos, nl, rap, rh], Options)
|
maps:with([qos, nl, rap, rh], Options)
|
||||||
).
|
).
|
||||||
|
|
||||||
|
get_topic(Topic, #{share := <<"$queue">> = Group}) ->
|
||||||
|
filename:join([Group, Topic]);
|
||||||
get_topic(Topic, #{share := Group}) ->
|
get_topic(Topic, #{share := Group}) ->
|
||||||
filename:join([<<"$share">>, Group, Topic]);
|
filename:join([<<"$share">>, Group, Topic]);
|
||||||
get_topic(Topic, _) ->
|
get_topic(Topic, _) ->
|
||||||
|
|
|
@ -30,6 +30,13 @@
|
||||||
|
|
||||||
-export([authorize/3]).
|
-export([authorize/3]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
do_update/4,
|
||||||
|
do_delete/1,
|
||||||
|
do_create_app/3
|
||||||
|
]).
|
||||||
|
|
||||||
-define(APP, emqx_app).
|
-define(APP, emqx_app).
|
||||||
|
|
||||||
-record(?APP, {
|
-record(?APP, {
|
||||||
|
@ -58,40 +65,37 @@ create(Name, Enable, ExpiredAt, Desc) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
read(Name) ->
|
read(Name) ->
|
||||||
Fun = fun() ->
|
case mnesia:dirty_read(?APP, Name) of
|
||||||
case mnesia:read(?APP, Name) of
|
[App] -> {ok, to_map(App)};
|
||||||
[] -> mnesia:abort(not_found);
|
[] -> {error, not_found}
|
||||||
[App] -> to_map(App)
|
end.
|
||||||
end
|
|
||||||
end,
|
|
||||||
trans(Fun).
|
|
||||||
|
|
||||||
update(Name, Enable, ExpiredAt, Desc) ->
|
update(Name, Enable, ExpiredAt, Desc) ->
|
||||||
Fun = fun() ->
|
trans(fun ?MODULE:do_update/4, [Name, Enable, ExpiredAt, Desc]).
|
||||||
case mnesia:read(?APP, Name, write) of
|
|
||||||
[] ->
|
do_update(Name, Enable, ExpiredAt, Desc) ->
|
||||||
mnesia:abort(not_found);
|
case mnesia:read(?APP, Name, write) of
|
||||||
[App0 = #?APP{enable = Enable0, desc = Desc0}] ->
|
[] ->
|
||||||
App =
|
mnesia:abort(not_found);
|
||||||
App0#?APP{
|
[App0 = #?APP{enable = Enable0, desc = Desc0}] ->
|
||||||
expired_at = ExpiredAt,
|
App =
|
||||||
enable = ensure_not_undefined(Enable, Enable0),
|
App0#?APP{
|
||||||
desc = ensure_not_undefined(Desc, Desc0)
|
expired_at = ExpiredAt,
|
||||||
},
|
enable = ensure_not_undefined(Enable, Enable0),
|
||||||
ok = mnesia:write(App),
|
desc = ensure_not_undefined(Desc, Desc0)
|
||||||
to_map(App)
|
},
|
||||||
end
|
ok = mnesia:write(App),
|
||||||
end,
|
to_map(App)
|
||||||
trans(Fun).
|
end.
|
||||||
|
|
||||||
delete(Name) ->
|
delete(Name) ->
|
||||||
Fun = fun() ->
|
trans(fun ?MODULE:do_delete/1, [Name]).
|
||||||
case mnesia:read(?APP, Name) of
|
|
||||||
[] -> mnesia:abort(not_found);
|
do_delete(Name) ->
|
||||||
[_App] -> mnesia:delete({?APP, Name})
|
case mnesia:read(?APP, Name) of
|
||||||
end
|
[] -> mnesia:abort(not_found);
|
||||||
end,
|
[_App] -> mnesia:delete({?APP, Name})
|
||||||
trans(Fun).
|
end.
|
||||||
|
|
||||||
list() ->
|
list() ->
|
||||||
to_map(ets:match_object(?APP, #?APP{_ = '_'})).
|
to_map(ets:match_object(?APP, #?APP{_ = '_'})).
|
||||||
|
@ -118,8 +122,8 @@ authorize(_Path, ApiKey, ApiSecret) ->
|
||||||
|
|
||||||
find_by_api_key(ApiKey) ->
|
find_by_api_key(ApiKey) ->
|
||||||
Fun = fun() -> mnesia:match_object(#?APP{api_key = ApiKey, _ = '_'}) end,
|
Fun = fun() -> mnesia:match_object(#?APP{api_key = ApiKey, _ = '_'}) end,
|
||||||
case trans(Fun) of
|
case mria:ro_transaction(?COMMON_SHARD, Fun) of
|
||||||
{ok, [#?APP{api_secret_hash = SecretHash, enable = Enable, expired_at = ExpiredAt}]} ->
|
{atomic, [#?APP{api_secret_hash = SecretHash, enable = Enable, expired_at = ExpiredAt}]} ->
|
||||||
{ok, Enable, ExpiredAt, SecretHash};
|
{ok, Enable, ExpiredAt, SecretHash};
|
||||||
_ ->
|
_ ->
|
||||||
{error, "not_found"}
|
{error, "not_found"}
|
||||||
|
@ -163,23 +167,24 @@ create_app(Name, Enable, ExpiredAt, Desc) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
create_app(App = #?APP{api_key = ApiKey, name = Name}) ->
|
create_app(App = #?APP{api_key = ApiKey, name = Name}) ->
|
||||||
trans(fun() ->
|
trans(fun ?MODULE:do_create_app/3, [App, ApiKey, Name]).
|
||||||
case mnesia:read(?APP, Name) of
|
|
||||||
[_] ->
|
|
||||||
mnesia:abort(name_already_existed);
|
|
||||||
[] ->
|
|
||||||
case mnesia:match_object(?APP, #?APP{api_key = ApiKey, _ = '_'}, read) of
|
|
||||||
[] ->
|
|
||||||
ok = mnesia:write(App),
|
|
||||||
to_map(App);
|
|
||||||
_ ->
|
|
||||||
mnesia:abort(api_key_already_existed)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end).
|
|
||||||
|
|
||||||
trans(Fun) ->
|
do_create_app(App, ApiKey, Name) ->
|
||||||
case mria:transaction(?COMMON_SHARD, Fun) of
|
case mnesia:read(?APP, Name) of
|
||||||
|
[_] ->
|
||||||
|
mnesia:abort(name_already_existed);
|
||||||
|
[] ->
|
||||||
|
case mnesia:match_object(?APP, #?APP{api_key = ApiKey, _ = '_'}, read) of
|
||||||
|
[] ->
|
||||||
|
ok = mnesia:write(App),
|
||||||
|
to_map(App);
|
||||||
|
_ ->
|
||||||
|
mnesia:abort(api_key_already_existed)
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
|
trans(Fun, Args) ->
|
||||||
|
case mria:transaction(?COMMON_SHARD, Fun, Args) of
|
||||||
{atomic, Res} -> {ok, Res};
|
{atomic, Res} -> {ok, Res};
|
||||||
{aborted, Error} -> {error, Error}
|
{aborted, Error} -> {error, Error}
|
||||||
end.
|
end.
|
||||||
|
|
|
@ -20,5 +20,5 @@
|
||||||
%% Interval for reporting telemetry data, Default: 7d
|
%% Interval for reporting telemetry data, Default: 7d
|
||||||
-define(REPORT_INTERVAL, 604800).
|
-define(REPORT_INTERVAL, 604800).
|
||||||
|
|
||||||
-define(API_TAG_MQTT, [<<"mqtt">>]).
|
-define(API_TAG_MQTT, [<<"MQTT">>]).
|
||||||
-define(API_SCHEMA_MODULE, emqx_modules_schema).
|
-define(API_SCHEMA_MODULE, emqx_modules_schema).
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_modules, [
|
{application, emqx_modules, [
|
||||||
{description, "EMQX Modules"},
|
{description, "EMQX Modules"},
|
||||||
{vsn, "5.0.2"},
|
{vsn, "5.0.3"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{applications, [kernel, stdlib, emqx]},
|
{applications, [kernel, stdlib, emqx]},
|
||||||
{mod, {emqx_modules_app, []}},
|
{mod, {emqx_modules_app, []}},
|
||||||
|
|
|
@ -54,6 +54,11 @@
|
||||||
|
|
||||||
-export([official_version/1]).
|
-export([official_version/1]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
do_ensure_uuids/0
|
||||||
|
]).
|
||||||
|
|
||||||
%% internal export
|
%% internal export
|
||||||
-export([read_raw_build_info/0]).
|
-export([read_raw_build_info/0]).
|
||||||
|
|
||||||
|
@ -530,54 +535,56 @@ bin(B) when is_binary(B) ->
|
||||||
B.
|
B.
|
||||||
|
|
||||||
ensure_uuids() ->
|
ensure_uuids() ->
|
||||||
Txn = fun() ->
|
{atomic, {NodeUUID, ClusterUUID}} = mria:transaction(
|
||||||
NodeUUID =
|
?TELEMETRY_SHARD, fun ?MODULE:do_ensure_uuids/0
|
||||||
case mnesia:wread({?TELEMETRY, node()}) of
|
),
|
||||||
[] ->
|
|
||||||
NodeUUID0 =
|
|
||||||
case get_uuid_from_file(node) of
|
|
||||||
{ok, NUUID} -> NUUID;
|
|
||||||
undefined -> generate_uuid()
|
|
||||||
end,
|
|
||||||
mnesia:write(
|
|
||||||
?TELEMETRY,
|
|
||||||
#telemetry{
|
|
||||||
id = node(),
|
|
||||||
uuid = NodeUUID0
|
|
||||||
},
|
|
||||||
write
|
|
||||||
),
|
|
||||||
NodeUUID0;
|
|
||||||
[#telemetry{uuid = NodeUUID0}] ->
|
|
||||||
NodeUUID0
|
|
||||||
end,
|
|
||||||
ClusterUUID =
|
|
||||||
case mnesia:wread({?TELEMETRY, ?CLUSTER_UUID_KEY}) of
|
|
||||||
[] ->
|
|
||||||
ClusterUUID0 =
|
|
||||||
case get_uuid_from_file(cluster) of
|
|
||||||
{ok, CUUID} -> CUUID;
|
|
||||||
undefined -> generate_uuid()
|
|
||||||
end,
|
|
||||||
mnesia:write(
|
|
||||||
?TELEMETRY,
|
|
||||||
#telemetry{
|
|
||||||
id = ?CLUSTER_UUID_KEY,
|
|
||||||
uuid = ClusterUUID0
|
|
||||||
},
|
|
||||||
write
|
|
||||||
),
|
|
||||||
ClusterUUID0;
|
|
||||||
[#telemetry{uuid = ClusterUUID0}] ->
|
|
||||||
ClusterUUID0
|
|
||||||
end,
|
|
||||||
{NodeUUID, ClusterUUID}
|
|
||||||
end,
|
|
||||||
{atomic, {NodeUUID, ClusterUUID}} = mria:transaction(?TELEMETRY_SHARD, Txn),
|
|
||||||
save_uuid_to_file(NodeUUID, node),
|
save_uuid_to_file(NodeUUID, node),
|
||||||
save_uuid_to_file(ClusterUUID, cluster),
|
save_uuid_to_file(ClusterUUID, cluster),
|
||||||
{NodeUUID, ClusterUUID}.
|
{NodeUUID, ClusterUUID}.
|
||||||
|
|
||||||
|
do_ensure_uuids() ->
|
||||||
|
NodeUUID =
|
||||||
|
case mnesia:wread({?TELEMETRY, node()}) of
|
||||||
|
[] ->
|
||||||
|
NodeUUID0 =
|
||||||
|
case get_uuid_from_file(node) of
|
||||||
|
{ok, NUUID} -> NUUID;
|
||||||
|
undefined -> generate_uuid()
|
||||||
|
end,
|
||||||
|
mnesia:write(
|
||||||
|
?TELEMETRY,
|
||||||
|
#telemetry{
|
||||||
|
id = node(),
|
||||||
|
uuid = NodeUUID0
|
||||||
|
},
|
||||||
|
write
|
||||||
|
),
|
||||||
|
NodeUUID0;
|
||||||
|
[#telemetry{uuid = NodeUUID0}] ->
|
||||||
|
NodeUUID0
|
||||||
|
end,
|
||||||
|
ClusterUUID =
|
||||||
|
case mnesia:wread({?TELEMETRY, ?CLUSTER_UUID_KEY}) of
|
||||||
|
[] ->
|
||||||
|
ClusterUUID0 =
|
||||||
|
case get_uuid_from_file(cluster) of
|
||||||
|
{ok, CUUID} -> CUUID;
|
||||||
|
undefined -> generate_uuid()
|
||||||
|
end,
|
||||||
|
mnesia:write(
|
||||||
|
?TELEMETRY,
|
||||||
|
#telemetry{
|
||||||
|
id = ?CLUSTER_UUID_KEY,
|
||||||
|
uuid = ClusterUUID0
|
||||||
|
},
|
||||||
|
write
|
||||||
|
),
|
||||||
|
ClusterUUID0;
|
||||||
|
[#telemetry{uuid = ClusterUUID0}] ->
|
||||||
|
ClusterUUID0
|
||||||
|
end,
|
||||||
|
{NodeUUID, ClusterUUID}.
|
||||||
|
|
||||||
get_uuid_from_file(Type) ->
|
get_uuid_from_file(Type) ->
|
||||||
Path = uuid_file_path(Type),
|
Path = uuid_file_path(Type),
|
||||||
case file:read_file(Path) of
|
case file:read_file(Path) of
|
||||||
|
|
|
@ -39,7 +39,7 @@
|
||||||
sql_data/1
|
sql_data/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-define(EX_PLACE_HOLDER, "(\\$\\{[a-zA-Z0-9\\._]+\\})").
|
-define(EX_PLACE_HOLDER, "(\\$\\{[a-zA-Z0-9\\._]+\\}|\"\\$\\{[a-zA-Z0-9\\._]+\\}\")").
|
||||||
%% Space and CRLF
|
%% Space and CRLF
|
||||||
-define(EX_WITHE_CHARS, "\\s").
|
-define(EX_WITHE_CHARS, "\\s").
|
||||||
|
|
||||||
|
@ -235,7 +235,9 @@ get_phld_var(Phld, Data) ->
|
||||||
emqx_rule_maps:nested_get(Phld, Data).
|
emqx_rule_maps:nested_get(Phld, Data).
|
||||||
|
|
||||||
preproc_var_re(#{placeholders := PHs}) ->
|
preproc_var_re(#{placeholders := PHs}) ->
|
||||||
"(" ++ string:join([ph_to_re(PH) || PH <- PHs], "|") ++ ")";
|
Res = [ph_to_re(PH) || PH <- PHs],
|
||||||
|
QuoteRes = ["\"" ++ Re ++ "\"" || Re <- Res],
|
||||||
|
"(" ++ string:join(Res ++ QuoteRes, "|") ++ ")";
|
||||||
preproc_var_re(#{}) ->
|
preproc_var_re(#{}) ->
|
||||||
?EX_PLACE_HOLDER.
|
?EX_PLACE_HOLDER.
|
||||||
|
|
||||||
|
@ -292,7 +294,9 @@ parse_nested(Attr) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
unwrap(<<"${", Val/binary>>) ->
|
unwrap(<<"${", Val/binary>>) ->
|
||||||
binary:part(Val, {0, byte_size(Val) - 1}).
|
binary:part(Val, {0, byte_size(Val) - 1});
|
||||||
|
unwrap(<<"\"${", Val/binary>>) ->
|
||||||
|
binary:part(Val, {0, byte_size(Val) - 2}).
|
||||||
|
|
||||||
quote_sql(Str) ->
|
quote_sql(Str) ->
|
||||||
quote(Str, <<"\\\\'">>).
|
quote(Str, <<"\\\\'">>).
|
||||||
|
|
|
@ -150,6 +150,21 @@ t_preproc_sql6(_) ->
|
||||||
emqx_placeholder:proc_sql(ParamsTokens, Selected)
|
emqx_placeholder:proc_sql(ParamsTokens, Selected)
|
||||||
).
|
).
|
||||||
|
|
||||||
|
t_preproc_sql7(_) ->
|
||||||
|
Selected = #{a => <<"a">>, b => <<"b">>},
|
||||||
|
{PrepareStatement, ParamsTokens} = emqx_placeholder:preproc_sql(
|
||||||
|
<<"a:\"${a}\",b:\"${b}\"">>,
|
||||||
|
#{
|
||||||
|
replace_with => '$n',
|
||||||
|
placeholders => [<<"${a}">>]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
?assertEqual(<<"a:$1,b:\"${b}\"">>, PrepareStatement),
|
||||||
|
?assertEqual(
|
||||||
|
[<<"a">>],
|
||||||
|
emqx_placeholder:proc_sql(ParamsTokens, Selected)
|
||||||
|
).
|
||||||
|
|
||||||
t_preproc_tmpl_deep(_) ->
|
t_preproc_tmpl_deep(_) ->
|
||||||
Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}},
|
Selected = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}},
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
{application, emqx_psk, [
|
{application, emqx_psk, [
|
||||||
{description, "EMQX PSK"},
|
{description, "EMQX PSK"},
|
||||||
% strict semver, bump manually!
|
% strict semver, bump manually!
|
||||||
{vsn, "5.0.0"},
|
{vsn, "5.0.1"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_psk_sup]},
|
{registered, [emqx_psk_sup]},
|
||||||
{applications, [kernel, stdlib]},
|
{applications, [kernel, stdlib]},
|
||||||
|
|
|
@ -43,6 +43,11 @@
|
||||||
code_change/3
|
code_change/3
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
insert_psks/1
|
||||||
|
]).
|
||||||
|
|
||||||
-record(psk_entry, {
|
-record(psk_entry, {
|
||||||
psk_id :: binary(),
|
psk_id :: binary(),
|
||||||
shared_secret :: binary(),
|
shared_secret :: binary(),
|
||||||
|
@ -199,10 +204,10 @@ import_psks(SrcFile) ->
|
||||||
import_psks(Io, Delimiter, ChunkSize, NChunk) ->
|
import_psks(Io, Delimiter, ChunkSize, NChunk) ->
|
||||||
case get_psks(Io, Delimiter, ChunkSize) of
|
case get_psks(Io, Delimiter, ChunkSize) of
|
||||||
{ok, Entries} ->
|
{ok, Entries} ->
|
||||||
_ = trans(fun insert_psks/1, [Entries]),
|
_ = trans(fun ?MODULE:insert_psks/1, [Entries]),
|
||||||
import_psks(Io, Delimiter, ChunkSize, NChunk + 1);
|
import_psks(Io, Delimiter, ChunkSize, NChunk + 1);
|
||||||
{eof, Entries} ->
|
{eof, Entries} ->
|
||||||
_ = trans(fun insert_psks/1, [Entries]),
|
_ = trans(fun ?MODULE:insert_psks/1, [Entries]),
|
||||||
ok;
|
ok;
|
||||||
{error, {bad_format, {line, N}}} ->
|
{error, {bad_format, {line, N}}} ->
|
||||||
{error, {bad_format, {line, NChunk * ChunkSize + N}}};
|
{error, {bad_format, {line, NChunk * ChunkSize + N}}};
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
{application, emqx_retainer, [
|
{application, emqx_retainer, [
|
||||||
{description, "EMQX Retainer"},
|
{description, "EMQX Retainer"},
|
||||||
% strict semver, bump manually!
|
% strict semver, bump manually!
|
||||||
{vsn, "5.0.3"},
|
{vsn, "5.0.4"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_retainer_sup]},
|
{registered, [emqx_retainer_sup]},
|
||||||
{applications, [kernel, stdlib, emqx]},
|
{applications, [kernel, stdlib, emqx]},
|
||||||
|
|
|
@ -36,6 +36,15 @@
|
||||||
size/1
|
size/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% Internal exports (RPC)
|
||||||
|
-export([
|
||||||
|
do_store_retained/1,
|
||||||
|
do_clear_expired/0,
|
||||||
|
do_delete_message/1,
|
||||||
|
do_populate_index_meta/1,
|
||||||
|
do_reindex_batch/2
|
||||||
|
]).
|
||||||
|
|
||||||
%% Management API:
|
%% Management API:
|
||||||
-export([topics/0]).
|
-export([topics/0]).
|
||||||
|
|
||||||
|
@ -126,26 +135,8 @@ create_table(Table, RecordName, Attributes, Type, StorageType) ->
|
||||||
ok
|
ok
|
||||||
end.
|
end.
|
||||||
|
|
||||||
store_retained(_, #message{topic = Topic} = Msg) ->
|
store_retained(_, Msg = #message{topic = Topic}) ->
|
||||||
ExpiryTime = emqx_retainer:get_expiry_time(Msg),
|
case mria:transaction(?RETAINER_SHARD, fun ?MODULE:do_store_retained/1, [Msg]) of
|
||||||
Tokens = topic_to_tokens(Topic),
|
|
||||||
Fun =
|
|
||||||
case is_table_full() of
|
|
||||||
false ->
|
|
||||||
fun() ->
|
|
||||||
store_retained(db_indices(write), Msg, Tokens, ExpiryTime)
|
|
||||||
end;
|
|
||||||
_ ->
|
|
||||||
fun() ->
|
|
||||||
case mnesia:read(?TAB_MESSAGE, Tokens, write) of
|
|
||||||
[_] ->
|
|
||||||
store_retained(db_indices(write), Msg, Tokens, ExpiryTime);
|
|
||||||
[] ->
|
|
||||||
mnesia:abort(table_is_full)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end,
|
|
||||||
case mria:transaction(?RETAINER_SHARD, Fun) of
|
|
||||||
{atomic, ok} ->
|
{atomic, ok} ->
|
||||||
?tp(debug, message_retained, #{topic => Topic}),
|
?tp(debug, message_retained, #{topic => Topic}),
|
||||||
ok;
|
ok;
|
||||||
|
@ -157,7 +148,26 @@ store_retained(_, #message{topic = Topic} = Msg) ->
|
||||||
})
|
})
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
do_store_retained(#message{topic = Topic} = Msg) ->
|
||||||
|
ExpiryTime = emqx_retainer:get_expiry_time(Msg),
|
||||||
|
Tokens = topic_to_tokens(Topic),
|
||||||
|
case is_table_full() of
|
||||||
|
false ->
|
||||||
|
store_retained(db_indices(write), Msg, Tokens, ExpiryTime);
|
||||||
|
_ ->
|
||||||
|
case mnesia:read(?TAB_MESSAGE, Tokens, write) of
|
||||||
|
[_] ->
|
||||||
|
store_retained(db_indices(write), Msg, Tokens, ExpiryTime);
|
||||||
|
[] ->
|
||||||
|
mnesia:abort(table_is_full)
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
clear_expired(_) ->
|
clear_expired(_) ->
|
||||||
|
{atomic, _} = mria:transaction(?RETAINER_SHARD, fun ?MODULE:do_clear_expired/0),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
do_clear_expired() ->
|
||||||
NowMs = erlang:system_time(millisecond),
|
NowMs = erlang:system_time(millisecond),
|
||||||
QH = qlc:q([
|
QH = qlc:q([
|
||||||
TopicTokens
|
TopicTokens
|
||||||
|
@ -167,36 +177,29 @@ clear_expired(_) ->
|
||||||
} <- mnesia:table(?TAB_MESSAGE, [{lock, write}]),
|
} <- mnesia:table(?TAB_MESSAGE, [{lock, write}]),
|
||||||
(ExpiryTime =/= 0) and (ExpiryTime < NowMs)
|
(ExpiryTime =/= 0) and (ExpiryTime < NowMs)
|
||||||
]),
|
]),
|
||||||
Fun = fun() ->
|
QC = qlc:cursor(QH),
|
||||||
QC = qlc:cursor(QH),
|
clear_batch(db_indices(write), QC).
|
||||||
clear_batch(db_indices(write), QC)
|
|
||||||
end,
|
|
||||||
{atomic, _} = mria:transaction(?RETAINER_SHARD, Fun),
|
|
||||||
ok.
|
|
||||||
|
|
||||||
delete_message(_, Topic) ->
|
delete_message(_, Topic) ->
|
||||||
Tokens = topic_to_tokens(Topic),
|
{atomic, _} = mria:transaction(?RETAINER_SHARD, fun ?MODULE:do_delete_message/1, [Topic]),
|
||||||
DeleteFun =
|
|
||||||
case emqx_topic:wildcard(Topic) of
|
|
||||||
false ->
|
|
||||||
fun() ->
|
|
||||||
ok = delete_message_by_topic(Tokens, db_indices(write))
|
|
||||||
end;
|
|
||||||
true ->
|
|
||||||
fun() ->
|
|
||||||
QH = topic_search_table(Tokens),
|
|
||||||
qlc:fold(
|
|
||||||
fun(TopicTokens, _) ->
|
|
||||||
ok = delete_message_by_topic(TopicTokens, db_indices(write))
|
|
||||||
end,
|
|
||||||
undefined,
|
|
||||||
QH
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end,
|
|
||||||
{atomic, _} = mria:transaction(?RETAINER_SHARD, DeleteFun),
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
do_delete_message(Topic) ->
|
||||||
|
Tokens = topic_to_tokens(Topic),
|
||||||
|
case emqx_topic:wildcard(Topic) of
|
||||||
|
false ->
|
||||||
|
ok = delete_message_by_topic(Tokens, db_indices(write));
|
||||||
|
true ->
|
||||||
|
QH = topic_search_table(Tokens),
|
||||||
|
qlc:fold(
|
||||||
|
fun(TopicTokens, _) ->
|
||||||
|
ok = delete_message_by_topic(TopicTokens, db_indices(write))
|
||||||
|
end,
|
||||||
|
undefined,
|
||||||
|
QH
|
||||||
|
)
|
||||||
|
end.
|
||||||
|
|
||||||
read_message(_, Topic) ->
|
read_message(_, Topic) ->
|
||||||
{ok, read_messages(Topic)}.
|
{ok, read_messages(Topic)}.
|
||||||
|
|
||||||
|
@ -267,16 +270,11 @@ reindex(Force, StatusFun) ->
|
||||||
reindex(config_indices(), Force, StatusFun).
|
reindex(config_indices(), Force, StatusFun).
|
||||||
|
|
||||||
reindex_status() ->
|
reindex_status() ->
|
||||||
Fun = fun() ->
|
case mnesia:dirty_read(?TAB_INDEX_META, ?META_KEY) of
|
||||||
mnesia:read(?TAB_INDEX_META, ?META_KEY)
|
[#retained_index_meta{reindexing = true}] ->
|
||||||
end,
|
|
||||||
case mria:transaction(?RETAINER_SHARD, Fun) of
|
|
||||||
{atomic, [#retained_index_meta{reindexing = true}]} ->
|
|
||||||
true;
|
true;
|
||||||
{atomic, _} ->
|
_ ->
|
||||||
false;
|
false
|
||||||
{aborted, Reason} ->
|
|
||||||
{error, Reason}
|
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -439,37 +437,7 @@ config_indices() ->
|
||||||
|
|
||||||
populate_index_meta() ->
|
populate_index_meta() ->
|
||||||
ConfigIndices = config_indices(),
|
ConfigIndices = config_indices(),
|
||||||
Fun = fun() ->
|
case mria:transaction(?RETAINER_SHARD, fun ?MODULE:do_populate_index_meta/1, [ConfigIndices]) of
|
||||||
case mnesia:read(?TAB_INDEX_META, ?META_KEY, write) of
|
|
||||||
[
|
|
||||||
#retained_index_meta{
|
|
||||||
read_indices = ReadIndices,
|
|
||||||
write_indices = WriteIndices,
|
|
||||||
reindexing = Reindexing
|
|
||||||
}
|
|
||||||
] ->
|
|
||||||
case {ReadIndices, WriteIndices, Reindexing} of
|
|
||||||
{_, _, true} ->
|
|
||||||
ok;
|
|
||||||
{ConfigIndices, ConfigIndices, false} ->
|
|
||||||
ok;
|
|
||||||
{DBWriteIndices, DBReadIndices, false} ->
|
|
||||||
{error, DBWriteIndices, DBReadIndices}
|
|
||||||
end;
|
|
||||||
[] ->
|
|
||||||
mnesia:write(
|
|
||||||
?TAB_INDEX_META,
|
|
||||||
#retained_index_meta{
|
|
||||||
key = ?META_KEY,
|
|
||||||
read_indices = ConfigIndices,
|
|
||||||
write_indices = ConfigIndices,
|
|
||||||
reindexing = false
|
|
||||||
},
|
|
||||||
write
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end,
|
|
||||||
case mria:transaction(?RETAINER_SHARD, Fun) of
|
|
||||||
{atomic, ok} ->
|
{atomic, ok} ->
|
||||||
ok;
|
ok;
|
||||||
{atomic, {error, DBWriteIndices, DBReadIndices}} ->
|
{atomic, {error, DBWriteIndices, DBReadIndices}} ->
|
||||||
|
@ -488,6 +456,36 @@ populate_index_meta() ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
do_populate_index_meta(ConfigIndices) ->
|
||||||
|
case mnesia:read(?TAB_INDEX_META, ?META_KEY, write) of
|
||||||
|
[
|
||||||
|
#retained_index_meta{
|
||||||
|
read_indices = ReadIndices,
|
||||||
|
write_indices = WriteIndices,
|
||||||
|
reindexing = Reindexing
|
||||||
|
}
|
||||||
|
] ->
|
||||||
|
case {ReadIndices, WriteIndices, Reindexing} of
|
||||||
|
{_, _, true} ->
|
||||||
|
ok;
|
||||||
|
{ConfigIndices, ConfigIndices, false} ->
|
||||||
|
ok;
|
||||||
|
{DBWriteIndices, DBReadIndices, false} ->
|
||||||
|
{error, DBWriteIndices, DBReadIndices}
|
||||||
|
end;
|
||||||
|
[] ->
|
||||||
|
mnesia:write(
|
||||||
|
?TAB_INDEX_META,
|
||||||
|
#retained_index_meta{
|
||||||
|
key = ?META_KEY,
|
||||||
|
read_indices = ConfigIndices,
|
||||||
|
write_indices = ConfigIndices,
|
||||||
|
reindexing = false
|
||||||
|
},
|
||||||
|
write
|
||||||
|
)
|
||||||
|
end.
|
||||||
|
|
||||||
db_indices(Type) ->
|
db_indices(Type) ->
|
||||||
case mnesia:read(?TAB_INDEX_META, ?META_KEY) of
|
case mnesia:read(?TAB_INDEX_META, ?META_KEY) of
|
||||||
[#retained_index_meta{read_indices = ReadIndices, write_indices = WriteIndices}] ->
|
[#retained_index_meta{read_indices = ReadIndices, write_indices = WriteIndices}] ->
|
||||||
|
@ -533,6 +531,7 @@ reindex(NewIndices, Force, StatusFun) when
|
||||||
end.
|
end.
|
||||||
|
|
||||||
try_start_reindex(NewIndices, true) ->
|
try_start_reindex(NewIndices, true) ->
|
||||||
|
%% Note: we don't expect reindexing during upgrade, so this function is internal
|
||||||
mria:transaction(
|
mria:transaction(
|
||||||
?RETAINER_SHARD,
|
?RETAINER_SHARD,
|
||||||
fun() -> start_reindex(NewIndices) end
|
fun() -> start_reindex(NewIndices) end
|
||||||
|
@ -566,6 +565,7 @@ start_reindex(NewIndices) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
finalize_reindex() ->
|
finalize_reindex() ->
|
||||||
|
%% Note: we don't expect reindexing during upgrade, so this function is internal
|
||||||
{atomic, ok} = mria:transaction(
|
{atomic, ok} = mria:transaction(
|
||||||
?RETAINER_SHARD,
|
?RETAINER_SHARD,
|
||||||
fun() ->
|
fun() ->
|
||||||
|
@ -601,16 +601,7 @@ reindex_topic(Indices, Topic) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
reindex_batch(QC, Done, StatusFun) ->
|
reindex_batch(QC, Done, StatusFun) ->
|
||||||
Fun = fun() ->
|
case mria:transaction(?RETAINER_SHARD, fun ?MODULE:do_reindex_batch/2, [QC, Done]) of
|
||||||
Indices = db_indices(write),
|
|
||||||
{Status, Topics} = qlc_next_answers(QC, ?REINDEX_BATCH_SIZE),
|
|
||||||
ok = lists:foreach(
|
|
||||||
fun(Topic) -> reindex_topic(Indices, Topic) end,
|
|
||||||
Topics
|
|
||||||
),
|
|
||||||
{Status, Done + length(Topics)}
|
|
||||||
end,
|
|
||||||
case mria:transaction(?RETAINER_SHARD, Fun) of
|
|
||||||
{atomic, {more, NewDone}} ->
|
{atomic, {more, NewDone}} ->
|
||||||
_ = StatusFun(NewDone),
|
_ = StatusFun(NewDone),
|
||||||
reindex_batch(QC, NewDone, StatusFun);
|
reindex_batch(QC, NewDone, StatusFun);
|
||||||
|
@ -625,6 +616,15 @@ reindex_batch(QC, Done, StatusFun) ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
do_reindex_batch(QC, Done) ->
|
||||||
|
Indices = db_indices(write),
|
||||||
|
{Status, Topics} = qlc_next_answers(QC, ?REINDEX_BATCH_SIZE),
|
||||||
|
ok = lists:foreach(
|
||||||
|
fun(Topic) -> reindex_topic(Indices, Topic) end,
|
||||||
|
Topics
|
||||||
|
),
|
||||||
|
{Status, Done + length(Topics)}.
|
||||||
|
|
||||||
wait_dispatch_complete(Timeout) ->
|
wait_dispatch_complete(Timeout) ->
|
||||||
Nodes = mria_mnesia:running_nodes(),
|
Nodes = mria_mnesia:running_nodes(),
|
||||||
{Results, []} = emqx_retainer_proto_v1:wait_dispatch_complete(Nodes, Timeout),
|
{Results, []} = emqx_retainer_proto_v1:wait_dispatch_complete(Nodes, Timeout),
|
||||||
|
|
|
@ -45,9 +45,7 @@ retainer(["reindex", "status"]) ->
|
||||||
true ->
|
true ->
|
||||||
?PRINT_MSG("Reindexing is in progress~n");
|
?PRINT_MSG("Reindexing is in progress~n");
|
||||||
false ->
|
false ->
|
||||||
?PRINT_MSG("Reindexing is not running~n");
|
?PRINT_MSG("Reindexing is not running~n")
|
||||||
{error, Reason} ->
|
|
||||||
?PRINT("Can't get reindex status: ~p~n", [Reason])
|
|
||||||
end;
|
end;
|
||||||
retainer(["reindex", "start"]) ->
|
retainer(["reindex", "start"]) ->
|
||||||
retainer(["reindex", "start", "false"]);
|
retainer(["reindex", "start", "false"]);
|
||||||
|
|
|
@ -0,0 +1,77 @@
|
||||||
|
emqx_license_schema {
|
||||||
|
license_root {
|
||||||
|
desc {
|
||||||
|
en: "Defines the EMQX Enterprise license. \n\n"
|
||||||
|
"A license is either a `key` or a `file`.\n"
|
||||||
|
"When `key` and `file` are both configured, `key` is used.\n"
|
||||||
|
"\n"
|
||||||
|
"EMQX comes with a default trial license. For production use, please \n"
|
||||||
|
"visit https://www.emqx.com/apply-licenses/emqx to apply."
|
||||||
|
zh: "EMQX企业许可证。\n"
|
||||||
|
"许可证是一个 `key` 或一个 `file`。\n"
|
||||||
|
"当 `key` 和 `file` 同时被配置时,优先使用 `key`。\n"
|
||||||
|
"\n"
|
||||||
|
"EMQX 自带一个默认的试用许可证,若需要在生产环境部署,\n"
|
||||||
|
"请访问 https://www.emqx.com/apply-licenses/emqx 来申请。\n"
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "License"
|
||||||
|
zh: "许可证"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
license_type_field {
|
||||||
|
desc {
|
||||||
|
en: "License type"
|
||||||
|
zh: "许可证类型"
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "License type"
|
||||||
|
zh: "许可证类型"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
key_field {
|
||||||
|
desc {
|
||||||
|
en: "License string"
|
||||||
|
zh: "许可证字符串"
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "License string"
|
||||||
|
zh: "许可证字符串"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
file_field {
|
||||||
|
desc {
|
||||||
|
en: "Path to the license file"
|
||||||
|
zh: "许可证文件的路径"
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "Path to the license file"
|
||||||
|
zh: "许可证文件的路径"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
connection_low_watermark_field {
|
||||||
|
desc {
|
||||||
|
en: "Low watermark limit below which license connection quota usage alarms are deactivated"
|
||||||
|
zh: "低水位限制,低于此水位线时系统会清除连接配额使用告警"
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "Connection low watermark"
|
||||||
|
zh: "连接低水位线"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
connection_high_watermark_field {
|
||||||
|
desc {
|
||||||
|
en: "High watermark limit above which license connection quota usage alarms are activated"
|
||||||
|
zh: "高水位线,连接数超过这个水位线时,系统会触发许可证连接配额使用告警"
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "Connection high watermark"
|
||||||
|
zh: "连接高水位"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_license, [
|
{application, emqx_license, [
|
||||||
{description, "EMQX License"},
|
{description, "EMQX License"},
|
||||||
{vsn, "5.0.1"},
|
{vsn, "5.0.2"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_license_sup]},
|
{registered, [emqx_license_sup]},
|
||||||
{applications, [kernel, stdlib]},
|
{applications, [kernel, stdlib]},
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
-module(emqx_license_schema).
|
-module(emqx_license_schema).
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% hocon_schema callbacks
|
%% hocon_schema callbacks
|
||||||
|
@ -26,13 +27,7 @@ roots() ->
|
||||||
hoconsc:mk(
|
hoconsc:mk(
|
||||||
license_type(),
|
license_type(),
|
||||||
#{
|
#{
|
||||||
desc =>
|
desc => ?DESC(license_root)
|
||||||
"EMQX Enterprise license.\n"
|
|
||||||
"A license is either a `key` or a `file`.\n"
|
|
||||||
"When `key` and `file` are both configured, `key` is used.\n"
|
|
||||||
"\n"
|
|
||||||
"EMQX by default starts with a trial license. For a different license,\n"
|
|
||||||
"visit https://www.emqx.com/apply-licenses/emqx to apply.\n"
|
|
||||||
}
|
}
|
||||||
)}
|
)}
|
||||||
].
|
].
|
||||||
|
@ -41,18 +36,20 @@ fields(key_license) ->
|
||||||
[
|
[
|
||||||
{type, #{
|
{type, #{
|
||||||
type => key,
|
type => key,
|
||||||
required => true
|
required => true,
|
||||||
|
desc => ?DESC(license_type_field)
|
||||||
}},
|
}},
|
||||||
{key, #{
|
{key, #{
|
||||||
type => string(),
|
type => string(),
|
||||||
%% so it's not logged
|
%% so it's not logged
|
||||||
sensitive => true,
|
sensitive => true,
|
||||||
required => true,
|
required => true,
|
||||||
desc => "License string"
|
desc => ?DESC(key_field)
|
||||||
}},
|
}},
|
||||||
{file, #{
|
{file, #{
|
||||||
type => string(),
|
type => string(),
|
||||||
required => false
|
required => false,
|
||||||
|
desc => ?DESC(file_field)
|
||||||
}}
|
}}
|
||||||
| common_fields()
|
| common_fields()
|
||||||
];
|
];
|
||||||
|
@ -60,17 +57,19 @@ fields(file_license) ->
|
||||||
[
|
[
|
||||||
{type, #{
|
{type, #{
|
||||||
type => file,
|
type => file,
|
||||||
required => true
|
required => true,
|
||||||
|
desc => ?DESC(license_type_field)
|
||||||
}},
|
}},
|
||||||
{key, #{
|
{key, #{
|
||||||
type => string(),
|
type => string(),
|
||||||
%% so it's not logged
|
%% so it's not logged
|
||||||
sensitive => true,
|
sensitive => true,
|
||||||
required => false
|
required => false,
|
||||||
|
desc => ?DESC(key_field)
|
||||||
}},
|
}},
|
||||||
{file, #{
|
{file, #{
|
||||||
type => string(),
|
type => string(),
|
||||||
desc => "Path to the license file"
|
desc => ?DESC(file_field)
|
||||||
}}
|
}}
|
||||||
| common_fields()
|
| common_fields()
|
||||||
].
|
].
|
||||||
|
@ -87,12 +86,12 @@ common_fields() ->
|
||||||
{connection_low_watermark, #{
|
{connection_low_watermark, #{
|
||||||
type => emqx_schema:percent(),
|
type => emqx_schema:percent(),
|
||||||
default => "75%",
|
default => "75%",
|
||||||
desc => ""
|
desc => ?DESC(connection_low_watermark_field)
|
||||||
}},
|
}},
|
||||||
{connection_high_watermark, #{
|
{connection_high_watermark, #{
|
||||||
type => emqx_schema:percent(),
|
type => emqx_schema:percent(),
|
||||||
default => "80%",
|
default => "80%",
|
||||||
desc => ""
|
desc => ?DESC(connection_high_watermark_field)
|
||||||
}}
|
}}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
|
4
mix.exs
4
mix.exs
|
@ -52,10 +52,10 @@ defmodule EMQXUmbrella.MixProject do
|
||||||
{:jiffy, github: "emqx/jiffy", tag: "1.0.5", override: true},
|
{:jiffy, github: "emqx/jiffy", tag: "1.0.5", override: true},
|
||||||
{:cowboy, github: "emqx/cowboy", tag: "2.9.0", override: true},
|
{:cowboy, github: "emqx/cowboy", tag: "2.9.0", override: true},
|
||||||
{:esockd, github: "emqx/esockd", tag: "5.9.4", override: true},
|
{:esockd, github: "emqx/esockd", tag: "5.9.4", override: true},
|
||||||
{:ekka, github: "emqx/ekka", tag: "0.13.3", override: true},
|
{:ekka, github: "emqx/ekka", tag: "0.13.4", override: true},
|
||||||
{:gen_rpc, github: "emqx/gen_rpc", tag: "2.8.1", override: true},
|
{:gen_rpc, github: "emqx/gen_rpc", tag: "2.8.1", override: true},
|
||||||
{:grpc, github: "emqx/grpc-erl", tag: "0.6.6", override: true},
|
{:grpc, github: "emqx/grpc-erl", tag: "0.6.6", override: true},
|
||||||
{:minirest, github: "emqx/minirest", tag: "1.3.6", override: true},
|
{:minirest, github: "emqx/minirest", tag: "1.3.7", override: true},
|
||||||
{:ecpool, github: "emqx/ecpool", tag: "0.5.2", override: true},
|
{:ecpool, github: "emqx/ecpool", tag: "0.5.2", override: true},
|
||||||
{:replayq, "0.3.4", override: true},
|
{:replayq, "0.3.4", override: true},
|
||||||
{:pbkdf2, github: "emqx/erlang-pbkdf2", tag: "2.0.4", override: true},
|
{:pbkdf2, github: "emqx/erlang-pbkdf2", tag: "2.0.4", override: true},
|
||||||
|
|
|
@ -54,10 +54,10 @@
|
||||||
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
|
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
|
||||||
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}}
|
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}}
|
||||||
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}}
|
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}}
|
||||||
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.13.3"}}}
|
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.13.4"}}}
|
||||||
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}}
|
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}}
|
||||||
, {grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.6"}}}
|
, {grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.6"}}}
|
||||||
, {minirest, {git, "https://github.com/emqx/minirest", {tag, "1.3.6"}}}
|
, {minirest, {git, "https://github.com/emqx/minirest", {tag, "1.3.7"}}}
|
||||||
, {ecpool, {git, "https://github.com/emqx/ecpool", {tag, "0.5.2"}}}
|
, {ecpool, {git, "https://github.com/emqx/ecpool", {tag, "0.5.2"}}}
|
||||||
, {replayq, "0.3.4"}
|
, {replayq, "0.3.4"}
|
||||||
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
|
, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}
|
||||||
|
|
|
@ -233,3 +233,76 @@ authentication=[{enable=true, backend="built_in_database", mechanism="password_b
|
||||||
authentication=[{enable=true}]
|
authentication=[{enable=true}]
|
||||||
```
|
```
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
#### TLS/SSL ciphers
|
||||||
|
|
||||||
|
Starting from v5.0.6, EMQX no longer pre-populate the ciphers list with a default
|
||||||
|
set of cipher suite names.
|
||||||
|
Instead, the default ciphers are applyed at runtime when starting the listener
|
||||||
|
for servers, or when establishing a TLS connection as a client.
|
||||||
|
|
||||||
|
Below are the default ciphers selected by EMQX.
|
||||||
|
|
||||||
|
For tlsv1.3:
|
||||||
|
```
|
||||||
|
ciphers =
|
||||||
|
[ "TLS_AES_256_GCM_SHA384", "TLS_AES_128_GCM_SHA256",
|
||||||
|
"TLS_CHACHA20_POLY1305_SHA256", "TLS_AES_128_CCM_SHA256",
|
||||||
|
"TLS_AES_128_CCM_8_SHA256"
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
For tlsv1.2 or earlier
|
||||||
|
|
||||||
|
```
|
||||||
|
ciphers =
|
||||||
|
[ "ECDHE-ECDSA-AES256-GCM-SHA384",
|
||||||
|
"ECDHE-RSA-AES256-GCM-SHA384",
|
||||||
|
"ECDHE-ECDSA-AES256-SHA384",
|
||||||
|
"ECDHE-RSA-AES256-SHA384",
|
||||||
|
"ECDH-ECDSA-AES256-GCM-SHA384",
|
||||||
|
"ECDH-RSA-AES256-GCM-SHA384",
|
||||||
|
"ECDH-ECDSA-AES256-SHA384",
|
||||||
|
"ECDH-RSA-AES256-SHA384",
|
||||||
|
"DHE-DSS-AES256-GCM-SHA384",
|
||||||
|
"DHE-DSS-AES256-SHA256",
|
||||||
|
"AES256-GCM-SHA384",
|
||||||
|
"AES256-SHA256",
|
||||||
|
"ECDHE-ECDSA-AES128-GCM-SHA256",
|
||||||
|
"ECDHE-RSA-AES128-GCM-SHA256",
|
||||||
|
"ECDHE-ECDSA-AES128-SHA256",
|
||||||
|
"ECDHE-RSA-AES128-SHA256",
|
||||||
|
"ECDH-ECDSA-AES128-GCM-SHA256",
|
||||||
|
"ECDH-RSA-AES128-GCM-SHA256",
|
||||||
|
"ECDH-ECDSA-AES128-SHA256",
|
||||||
|
"ECDH-RSA-AES128-SHA256",
|
||||||
|
"DHE-DSS-AES128-GCM-SHA256",
|
||||||
|
"DHE-DSS-AES128-SHA256",
|
||||||
|
"AES128-GCM-SHA256",
|
||||||
|
"AES128-SHA256",
|
||||||
|
"ECDHE-ECDSA-AES256-SHA",
|
||||||
|
"ECDHE-RSA-AES256-SHA",
|
||||||
|
"DHE-DSS-AES256-SHA",
|
||||||
|
"ECDH-ECDSA-AES256-SHA",
|
||||||
|
"ECDH-RSA-AES256-SHA",
|
||||||
|
"ECDHE-ECDSA-AES128-SHA",
|
||||||
|
"ECDHE-RSA-AES128-SHA",
|
||||||
|
"DHE-DSS-AES128-SHA",
|
||||||
|
"ECDH-ECDSA-AES128-SHA",
|
||||||
|
"ECDH-RSA-AES128-SHA"
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
For PSK enabled listeners
|
||||||
|
|
||||||
|
```
|
||||||
|
ciphers =
|
||||||
|
[ "RSA-PSK-AES256-GCM-SHA384",
|
||||||
|
"RSA-PSK-AES256-CBC-SHA384",
|
||||||
|
"RSA-PSK-AES128-GCM-SHA256",
|
||||||
|
"RSA-PSK-AES128-CBC-SHA256",
|
||||||
|
"RSA-PSK-AES256-CBC-SHA",
|
||||||
|
"RSA-PSK-AES128-CBC-SHA"
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
|
@ -216,3 +216,73 @@ authentication=[{enable=true, backend="built_in_database", mechanism="password_b
|
||||||
authentication=[{enable=true}]
|
authentication=[{enable=true}]
|
||||||
```
|
```
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
#### TLS/SSL ciphers
|
||||||
|
|
||||||
|
从 v5.0.6 开始 EMQX 不在配置文件中详细列出所有默认的密码套件名称。
|
||||||
|
而是在配置文件中使用一个空列表,然后在运行时替换成默认的密码套件。
|
||||||
|
|
||||||
|
下面这些密码套件是 EMQX 默认支持的:
|
||||||
|
|
||||||
|
tlsv1.3:
|
||||||
|
```
|
||||||
|
ciphers =
|
||||||
|
[ "TLS_AES_256_GCM_SHA384", "TLS_AES_128_GCM_SHA256",
|
||||||
|
"TLS_CHACHA20_POLY1305_SHA256", "TLS_AES_128_CCM_SHA256",
|
||||||
|
"TLS_AES_128_CCM_8_SHA256"
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
tlsv1.2 或更早
|
||||||
|
|
||||||
|
```
|
||||||
|
ciphers =
|
||||||
|
[ "ECDHE-ECDSA-AES256-GCM-SHA384",
|
||||||
|
"ECDHE-RSA-AES256-GCM-SHA384",
|
||||||
|
"ECDHE-ECDSA-AES256-SHA384",
|
||||||
|
"ECDHE-RSA-AES256-SHA384",
|
||||||
|
"ECDH-ECDSA-AES256-GCM-SHA384",
|
||||||
|
"ECDH-RSA-AES256-GCM-SHA384",
|
||||||
|
"ECDH-ECDSA-AES256-SHA384",
|
||||||
|
"ECDH-RSA-AES256-SHA384",
|
||||||
|
"DHE-DSS-AES256-GCM-SHA384",
|
||||||
|
"DHE-DSS-AES256-SHA256",
|
||||||
|
"AES256-GCM-SHA384",
|
||||||
|
"AES256-SHA256",
|
||||||
|
"ECDHE-ECDSA-AES128-GCM-SHA256",
|
||||||
|
"ECDHE-RSA-AES128-GCM-SHA256",
|
||||||
|
"ECDHE-ECDSA-AES128-SHA256",
|
||||||
|
"ECDHE-RSA-AES128-SHA256",
|
||||||
|
"ECDH-ECDSA-AES128-GCM-SHA256",
|
||||||
|
"ECDH-RSA-AES128-GCM-SHA256",
|
||||||
|
"ECDH-ECDSA-AES128-SHA256",
|
||||||
|
"ECDH-RSA-AES128-SHA256",
|
||||||
|
"DHE-DSS-AES128-GCM-SHA256",
|
||||||
|
"DHE-DSS-AES128-SHA256",
|
||||||
|
"AES128-GCM-SHA256",
|
||||||
|
"AES128-SHA256",
|
||||||
|
"ECDHE-ECDSA-AES256-SHA",
|
||||||
|
"ECDHE-RSA-AES256-SHA",
|
||||||
|
"DHE-DSS-AES256-SHA",
|
||||||
|
"ECDH-ECDSA-AES256-SHA",
|
||||||
|
"ECDH-RSA-AES256-SHA",
|
||||||
|
"ECDHE-ECDSA-AES128-SHA",
|
||||||
|
"ECDHE-RSA-AES128-SHA",
|
||||||
|
"DHE-DSS-AES128-SHA",
|
||||||
|
"ECDH-ECDSA-AES128-SHA",
|
||||||
|
"ECDH-RSA-AES128-SHA"
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
配置 PSK 认证的监听器
|
||||||
|
|
||||||
|
```
|
||||||
|
ciphers = [
|
||||||
|
[ "RSA-PSK-AES256-GCM-SHA384",
|
||||||
|
"RSA-PSK-AES256-CBC-SHA384",
|
||||||
|
"RSA-PSK-AES128-GCM-SHA256",
|
||||||
|
"RSA-PSK-AES128-CBC-SHA256",
|
||||||
|
"RSA-PSK-AES256-CBC-SHA",
|
||||||
|
"RSA-PSK-AES128-CBC-SHA"
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
Loading…
Reference in New Issue