Merge pull request #11953 from id/1115-sync-master-to-r54
sync master to release-54
This commit is contained in:
commit
3016aaa355
4
Makefile
4
Makefile
|
@ -20,8 +20,8 @@ endif
|
||||||
|
|
||||||
# Dashboard version
|
# Dashboard version
|
||||||
# from https://github.com/emqx/emqx-dashboard5
|
# from https://github.com/emqx/emqx-dashboard5
|
||||||
export EMQX_DASHBOARD_VERSION ?= v1.5.0
|
export EMQX_DASHBOARD_VERSION ?= v1.5.1
|
||||||
export EMQX_EE_DASHBOARD_VERSION ?= e1.3.0
|
export EMQX_EE_DASHBOARD_VERSION ?= e1.3.1
|
||||||
|
|
||||||
PROFILE ?= emqx
|
PROFILE ?= emqx
|
||||||
REL_PROFILES := emqx emqx-enterprise
|
REL_PROFILES := emqx emqx-enterprise
|
||||||
|
|
|
@ -32,10 +32,10 @@
|
||||||
%% `apps/emqx/src/bpapi/README.md'
|
%% `apps/emqx/src/bpapi/README.md'
|
||||||
|
|
||||||
%% Opensource edition
|
%% Opensource edition
|
||||||
-define(EMQX_RELEASE_CE, "5.3.1-alpha.1").
|
-define(EMQX_RELEASE_CE, "5.3.1").
|
||||||
|
|
||||||
%% Enterprise edition
|
%% Enterprise edition
|
||||||
-define(EMQX_RELEASE_EE, "5.3.1-alpha.4").
|
-define(EMQX_RELEASE_EE, "5.3.1").
|
||||||
|
|
||||||
%% The HTTP API version
|
%% The HTTP API version
|
||||||
-define(EMQX_API_VERSION, "5.0").
|
-define(EMQX_API_VERSION, "5.0").
|
||||||
|
|
|
@ -11,6 +11,8 @@
|
||||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
-include_lib("emqx/include/emqx_mqtt.hrl").
|
-include_lib("emqx/include/emqx_mqtt.hrl").
|
||||||
|
|
||||||
|
-include_lib("emqx/src/emqx_persistent_session_ds.hrl").
|
||||||
|
|
||||||
-define(DEFAULT_KEYSPACE, default).
|
-define(DEFAULT_KEYSPACE, default).
|
||||||
-define(DS_SHARD_ID, <<"local">>).
|
-define(DS_SHARD_ID, <<"local">>).
|
||||||
-define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}).
|
-define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}).
|
||||||
|
@ -118,6 +120,7 @@ start_client(Opts0 = #{}) ->
|
||||||
properties => #{'Session-Expiry-Interval' => 300}
|
properties => #{'Session-Expiry-Interval' => 300}
|
||||||
},
|
},
|
||||||
Opts = maps:to_list(emqx_utils_maps:deep_merge(Defaults, Opts0)),
|
Opts = maps:to_list(emqx_utils_maps:deep_merge(Defaults, Opts0)),
|
||||||
|
ct:pal("starting client with opts:\n ~p", [Opts]),
|
||||||
{ok, Client} = emqtt:start_link(Opts),
|
{ok, Client} = emqtt:start_link(Opts),
|
||||||
on_exit(fun() -> catch emqtt:stop(Client) end),
|
on_exit(fun() -> catch emqtt:stop(Client) end),
|
||||||
Client.
|
Client.
|
||||||
|
@ -148,6 +151,9 @@ restart_node(Node, NodeSpec) ->
|
||||||
?tp(restarted_node, #{}),
|
?tp(restarted_node, #{}),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
is_persistent_connect_opts(#{properties := #{'Session-Expiry-Interval' := EI}}) ->
|
||||||
|
EI > 0.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Testcases
|
%% Testcases
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
@ -309,3 +315,94 @@ t_session_unsubscription_idempotency(Config) ->
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_session_discard_persistent_to_non_persistent(_Config) ->
|
||||||
|
ClientId = atom_to_binary(?FUNCTION_NAME),
|
||||||
|
Params = #{
|
||||||
|
client_id => ClientId,
|
||||||
|
reconnect_opts =>
|
||||||
|
#{
|
||||||
|
clean_start => true,
|
||||||
|
%% we set it to zero so that a new session is not created.
|
||||||
|
properties => #{'Session-Expiry-Interval' => 0},
|
||||||
|
proto_ver => v5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
do_t_session_discard(Params).
|
||||||
|
|
||||||
|
t_session_discard_persistent_to_persistent(_Config) ->
|
||||||
|
ClientId = atom_to_binary(?FUNCTION_NAME),
|
||||||
|
Params = #{
|
||||||
|
client_id => ClientId,
|
||||||
|
reconnect_opts =>
|
||||||
|
#{
|
||||||
|
clean_start => true,
|
||||||
|
properties => #{'Session-Expiry-Interval' => 30},
|
||||||
|
proto_ver => v5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
do_t_session_discard(Params).
|
||||||
|
|
||||||
|
do_t_session_discard(Params) ->
|
||||||
|
#{
|
||||||
|
client_id := ClientId,
|
||||||
|
reconnect_opts := ReconnectOpts0
|
||||||
|
} = Params,
|
||||||
|
ReconnectOpts = ReconnectOpts0#{clientid => ClientId},
|
||||||
|
SubTopicFilter = <<"t/+">>,
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
?tp(notice, "starting", #{}),
|
||||||
|
Client0 = start_client(#{
|
||||||
|
clientid => ClientId,
|
||||||
|
clean_start => false,
|
||||||
|
properties => #{'Session-Expiry-Interval' => 30},
|
||||||
|
proto_ver => v5
|
||||||
|
}),
|
||||||
|
{ok, _} = emqtt:connect(Client0),
|
||||||
|
?tp(notice, "subscribing", #{}),
|
||||||
|
{ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client0, SubTopicFilter, qos2),
|
||||||
|
%% Store some matching messages so that streams and iterators are created.
|
||||||
|
ok = emqtt:publish(Client0, <<"t/1">>, <<"1">>),
|
||||||
|
ok = emqtt:publish(Client0, <<"t/2">>, <<"2">>),
|
||||||
|
?retry(
|
||||||
|
_Sleep0 = 100,
|
||||||
|
_Attempts0 = 50,
|
||||||
|
true = map_size(emqx_persistent_session_ds:list_all_streams()) > 0
|
||||||
|
),
|
||||||
|
?retry(
|
||||||
|
_Sleep0 = 100,
|
||||||
|
_Attempts0 = 50,
|
||||||
|
true = map_size(emqx_persistent_session_ds:list_all_iterators()) > 0
|
||||||
|
),
|
||||||
|
ok = emqtt:stop(Client0),
|
||||||
|
?tp(notice, "disconnected", #{}),
|
||||||
|
|
||||||
|
?tp(notice, "reconnecting", #{}),
|
||||||
|
%% we still have iterators and streams
|
||||||
|
?assert(map_size(emqx_persistent_session_ds:list_all_streams()) > 0),
|
||||||
|
?assert(map_size(emqx_persistent_session_ds:list_all_iterators()) > 0),
|
||||||
|
Client1 = start_client(ReconnectOpts),
|
||||||
|
{ok, _} = emqtt:connect(Client1),
|
||||||
|
?assertEqual([], emqtt:subscriptions(Client1)),
|
||||||
|
case is_persistent_connect_opts(ReconnectOpts) of
|
||||||
|
true ->
|
||||||
|
?assertMatch(#{ClientId := _}, emqx_persistent_session_ds:list_all_sessions());
|
||||||
|
false ->
|
||||||
|
?assertEqual(#{}, emqx_persistent_session_ds:list_all_sessions())
|
||||||
|
end,
|
||||||
|
?assertEqual(#{}, emqx_persistent_session_ds:list_all_subscriptions()),
|
||||||
|
?assertEqual([], emqx_persistent_session_ds_router:topics()),
|
||||||
|
?assertEqual(#{}, emqx_persistent_session_ds:list_all_streams()),
|
||||||
|
?assertEqual(#{}, emqx_persistent_session_ds:list_all_iterators()),
|
||||||
|
ok = emqtt:stop(Client1),
|
||||||
|
?tp(notice, "disconnected", #{}),
|
||||||
|
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
fun(Trace) ->
|
||||||
|
ct:pal("trace:\n ~p", [Trace]),
|
||||||
|
ok
|
||||||
|
end
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.7"}}},
|
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.7"}}},
|
||||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.16"}}},
|
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.16"}}},
|
||||||
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.2.1"}}},
|
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.2.1"}}},
|
||||||
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.19"}}},
|
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.40.0"}}},
|
||||||
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},
|
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},
|
||||||
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
||||||
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
{application, emqx, [
|
{application, emqx, [
|
||||||
{id, "emqx"},
|
{id, "emqx"},
|
||||||
{description, "EMQX Core"},
|
{description, "EMQX Core"},
|
||||||
{vsn, "5.1.13"},
|
{vsn, "5.1.14"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -258,21 +258,21 @@ set_chan_stats(ClientId, ChanPid, Stats) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Open a session.
|
%% @doc Open a session.
|
||||||
-spec open_session(boolean(), emqx_types:clientinfo(), emqx_types:conninfo()) ->
|
-spec open_session(_CleanStart :: boolean(), emqx_types:clientinfo(), emqx_types:conninfo()) ->
|
||||||
{ok, #{
|
{ok, #{
|
||||||
session := emqx_session:t(),
|
session := emqx_session:t(),
|
||||||
present := boolean(),
|
present := boolean(),
|
||||||
replay => _ReplayContext
|
replay => _ReplayContext
|
||||||
}}
|
}}
|
||||||
| {error, Reason :: term()}.
|
| {error, Reason :: term()}.
|
||||||
open_session(true, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
|
open_session(_CleanStart = true, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
|
||||||
Self = self(),
|
Self = self(),
|
||||||
emqx_cm_locker:trans(ClientId, fun(_) ->
|
emqx_cm_locker:trans(ClientId, fun(_) ->
|
||||||
ok = discard_session(ClientId),
|
ok = discard_session(ClientId),
|
||||||
ok = emqx_session:destroy(ClientInfo, ConnInfo),
|
ok = emqx_session:destroy(ClientInfo, ConnInfo),
|
||||||
create_register_session(ClientInfo, ConnInfo, Self)
|
create_register_session(ClientInfo, ConnInfo, Self)
|
||||||
end);
|
end);
|
||||||
open_session(false, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
|
open_session(_CleanStart = false, ClientInfo = #{clientid := ClientId}, ConnInfo) ->
|
||||||
Self = self(),
|
Self = self(),
|
||||||
emqx_cm_locker:trans(ClientId, fun(_) ->
|
emqx_cm_locker:trans(ClientId, fun(_) ->
|
||||||
case emqx_session:open(ClientInfo, ConnInfo) of
|
case emqx_session:open(ClientInfo, ConnInfo) of
|
||||||
|
|
|
@ -662,14 +662,32 @@ remove_from_override_config(_BinKeyPath, #{persistent := false}) ->
|
||||||
undefined;
|
undefined;
|
||||||
remove_from_override_config(BinKeyPath, Opts) ->
|
remove_from_override_config(BinKeyPath, Opts) ->
|
||||||
OldConf = emqx_config:read_override_conf(Opts),
|
OldConf = emqx_config:read_override_conf(Opts),
|
||||||
emqx_utils_maps:deep_remove(BinKeyPath, OldConf).
|
UpgradedOldConf = upgrade_conf(OldConf),
|
||||||
|
emqx_utils_maps:deep_remove(BinKeyPath, UpgradedOldConf).
|
||||||
|
|
||||||
%% apply new config on top of override config
|
%% apply new config on top of override config
|
||||||
merge_to_override_config(_RawConf, #{persistent := false}) ->
|
merge_to_override_config(_RawConf, #{persistent := false}) ->
|
||||||
undefined;
|
undefined;
|
||||||
merge_to_override_config(RawConf, Opts) ->
|
merge_to_override_config(RawConf, Opts) ->
|
||||||
OldConf = emqx_config:read_override_conf(Opts),
|
OldConf = emqx_config:read_override_conf(Opts),
|
||||||
maps:merge(OldConf, RawConf).
|
UpgradedOldConf = upgrade_conf(OldConf),
|
||||||
|
maps:merge(UpgradedOldConf, RawConf).
|
||||||
|
|
||||||
|
upgrade_conf(Conf) ->
|
||||||
|
try
|
||||||
|
ConfLoader = emqx_app:get_config_loader(),
|
||||||
|
SchemaModule = apply(ConfLoader, schema_module, []),
|
||||||
|
apply(SchemaModule, upgrade_raw_conf, [Conf])
|
||||||
|
catch
|
||||||
|
ErrorType:Reason:Stack ->
|
||||||
|
?SLOG(warning, #{
|
||||||
|
msg => "failed_to_upgrade_config",
|
||||||
|
error_type => ErrorType,
|
||||||
|
reason => Reason,
|
||||||
|
stacktrace => Stack
|
||||||
|
}),
|
||||||
|
Conf
|
||||||
|
end.
|
||||||
|
|
||||||
up_req({remove, _Opts}) -> '$remove';
|
up_req({remove, _Opts}) -> '$remove';
|
||||||
up_req({{update, Req}, _Opts}) -> Req.
|
up_req({{update, Req}, _Opts}) -> Req.
|
||||||
|
|
|
@ -66,8 +66,9 @@
|
||||||
%% - Callbacks with greater priority values will be run before
|
%% - Callbacks with greater priority values will be run before
|
||||||
%% the ones with lower priority values. e.g. A Callback with
|
%% the ones with lower priority values. e.g. A Callback with
|
||||||
%% priority = 2 precedes the callback with priority = 1.
|
%% priority = 2 precedes the callback with priority = 1.
|
||||||
%% - The execution order is the adding order of callbacks if they have
|
%% - If the priorities of the hooks are equal then their execution
|
||||||
%% equal priority values.
|
%% order is determined by the lexicographic of hook function
|
||||||
|
%% names.
|
||||||
|
|
||||||
-type hookpoint() :: atom() | binary().
|
-type hookpoint() :: atom() | binary().
|
||||||
-type action() :: {module(), atom(), [term()] | undefined}.
|
-type action() :: {module(), atom(), [term()] | undefined}.
|
||||||
|
|
|
@ -33,7 +33,8 @@
|
||||||
desc/1,
|
desc/1,
|
||||||
types/0,
|
types/0,
|
||||||
short_paths/0,
|
short_paths/0,
|
||||||
short_paths_fields/0
|
short_paths_fields/0,
|
||||||
|
rate_type/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-define(KILOBYTE, 1024).
|
-define(KILOBYTE, 1024).
|
||||||
|
@ -129,9 +130,9 @@ fields(limiter) ->
|
||||||
];
|
];
|
||||||
fields(node_opts) ->
|
fields(node_opts) ->
|
||||||
[
|
[
|
||||||
{rate, ?HOCON(rate(), #{desc => deprecated_desc(rate), default => <<"infinity">>})},
|
{rate, ?HOCON(rate_type(), #{desc => deprecated_desc(rate), default => <<"infinity">>})},
|
||||||
{burst,
|
{burst,
|
||||||
?HOCON(burst_rate(), #{
|
?HOCON(burst_rate_type(), #{
|
||||||
desc => deprecated_desc(burst),
|
desc => deprecated_desc(burst),
|
||||||
default => <<"0">>
|
default => <<"0">>
|
||||||
})}
|
})}
|
||||||
|
@ -142,7 +143,7 @@ fields(bucket_opts) ->
|
||||||
fields_of_bucket(<<"infinity">>);
|
fields_of_bucket(<<"infinity">>);
|
||||||
fields(client_opts) ->
|
fields(client_opts) ->
|
||||||
[
|
[
|
||||||
{rate, ?HOCON(rate(), #{default => <<"infinity">>, desc => deprecated_desc(rate)})},
|
{rate, ?HOCON(rate_type(), #{default => <<"infinity">>, desc => deprecated_desc(rate)})},
|
||||||
{initial,
|
{initial,
|
||||||
?HOCON(initial(), #{
|
?HOCON(initial(), #{
|
||||||
default => <<"0">>,
|
default => <<"0">>,
|
||||||
|
@ -164,7 +165,7 @@ fields(client_opts) ->
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
{burst,
|
{burst,
|
||||||
?HOCON(burst(), #{
|
?HOCON(burst_type(), #{
|
||||||
desc => deprecated_desc(burst),
|
desc => deprecated_desc(burst),
|
||||||
default => <<"0">>,
|
default => <<"0">>,
|
||||||
importance => ?IMPORTANCE_HIDDEN,
|
importance => ?IMPORTANCE_HIDDEN,
|
||||||
|
@ -211,7 +212,7 @@ short_paths_fields() ->
|
||||||
short_paths_fields(Importance) ->
|
short_paths_fields(Importance) ->
|
||||||
[
|
[
|
||||||
{Name,
|
{Name,
|
||||||
?HOCON(rate(), #{
|
?HOCON(rate_type(), #{
|
||||||
desc => ?DESC(Name),
|
desc => ?DESC(Name),
|
||||||
required => false,
|
required => false,
|
||||||
importance => Importance,
|
importance => Importance,
|
||||||
|
@ -415,7 +416,7 @@ composite_bucket_fields(Types, ClientRef) ->
|
||||||
|
|
||||||
fields_of_bucket(Default) ->
|
fields_of_bucket(Default) ->
|
||||||
[
|
[
|
||||||
{rate, ?HOCON(rate(), #{desc => deprecated_desc(rate), default => Default})},
|
{rate, ?HOCON(rate_type(), #{desc => deprecated_desc(rate), default => Default})},
|
||||||
{burst,
|
{burst,
|
||||||
?HOCON(burst(), #{
|
?HOCON(burst(), #{
|
||||||
desc => deprecated_desc(burst),
|
desc => deprecated_desc(burst),
|
||||||
|
@ -461,3 +462,12 @@ alias_of_type(_) ->
|
||||||
|
|
||||||
deprecated_desc(_Field) ->
|
deprecated_desc(_Field) ->
|
||||||
<<"Deprecated since v5.0.25">>.
|
<<"Deprecated since v5.0.25">>.
|
||||||
|
|
||||||
|
rate_type() ->
|
||||||
|
typerefl:alias("string", rate()).
|
||||||
|
|
||||||
|
burst_type() ->
|
||||||
|
typerefl:alias("string", burst()).
|
||||||
|
|
||||||
|
burst_rate_type() ->
|
||||||
|
typerefl:alias("string", burst_rate()).
|
||||||
|
|
|
@ -19,16 +19,18 @@
|
||||||
-module(emqx_persistent_message_ds_replayer).
|
-module(emqx_persistent_message_ds_replayer).
|
||||||
|
|
||||||
%% API:
|
%% API:
|
||||||
-export([new/0, next_packet_id/1, replay/2, commit_offset/3, poll/3]).
|
-export([new/0, next_packet_id/1, replay/2, commit_offset/3, poll/3, n_inflight/1]).
|
||||||
|
|
||||||
%% internal exports:
|
%% internal exports:
|
||||||
-export([]).
|
-export([]).
|
||||||
|
|
||||||
-export_type([inflight/0]).
|
-export_type([inflight/0]).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/logger.hrl").
|
||||||
-include("emqx_persistent_session_ds.hrl").
|
-include("emqx_persistent_session_ds.hrl").
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
|
-include_lib("proper/include/proper.hrl").
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
|
@ -65,9 +67,28 @@ new() ->
|
||||||
#inflight{}.
|
#inflight{}.
|
||||||
|
|
||||||
-spec next_packet_id(inflight()) -> {emqx_types:packet_id(), inflight()}.
|
-spec next_packet_id(inflight()) -> {emqx_types:packet_id(), inflight()}.
|
||||||
next_packet_id(Inflight0 = #inflight{next_seqno = LastSeqno}) ->
|
next_packet_id(Inflight0 = #inflight{next_seqno = LastSeqNo}) ->
|
||||||
Inflight = Inflight0#inflight{next_seqno = LastSeqno + 1},
|
Inflight = Inflight0#inflight{next_seqno = LastSeqNo + 1},
|
||||||
{seqno_to_packet_id(LastSeqno), Inflight}.
|
case LastSeqNo rem 16#10000 of
|
||||||
|
0 ->
|
||||||
|
%% We skip sequence numbers that lead to PacketId = 0 to
|
||||||
|
%% simplify math. Note: it leads to occasional gaps in the
|
||||||
|
%% sequence numbers.
|
||||||
|
next_packet_id(Inflight);
|
||||||
|
PacketId ->
|
||||||
|
{PacketId, Inflight}
|
||||||
|
end.
|
||||||
|
|
||||||
|
-spec n_inflight(inflight()) -> non_neg_integer().
|
||||||
|
n_inflight(#inflight{next_seqno = NextSeqNo, acked_seqno = AckedSeqno}) ->
|
||||||
|
%% NOTE: this function assumes that gaps in the sequence ID occur
|
||||||
|
%% _only_ when the packet ID wraps:
|
||||||
|
case AckedSeqno >= ((NextSeqNo bsr 16) bsl 16) of
|
||||||
|
true ->
|
||||||
|
NextSeqNo - AckedSeqno;
|
||||||
|
false ->
|
||||||
|
NextSeqNo - AckedSeqno - 1
|
||||||
|
end.
|
||||||
|
|
||||||
-spec replay(emqx_persistent_session_ds:id(), inflight()) ->
|
-spec replay(emqx_persistent_session_ds:id(), inflight()) ->
|
||||||
emqx_session:replies().
|
emqx_session:replies().
|
||||||
|
@ -83,8 +104,20 @@ commit_offset(
|
||||||
acked_seqno = AckedSeqno0, next_seqno = NextSeqNo, offset_ranges = Ranges0
|
acked_seqno = AckedSeqno0, next_seqno = NextSeqNo, offset_ranges = Ranges0
|
||||||
}
|
}
|
||||||
) ->
|
) ->
|
||||||
AckedSeqno = packet_id_to_seqno(NextSeqNo, PacketId),
|
AckedSeqno =
|
||||||
true = AckedSeqno0 < AckedSeqno,
|
case packet_id_to_seqno(NextSeqNo, PacketId) of
|
||||||
|
N when N > AckedSeqno0; AckedSeqno0 =:= 0 ->
|
||||||
|
N;
|
||||||
|
OutOfRange ->
|
||||||
|
?SLOG(warning, #{
|
||||||
|
msg => "out-of-order_ack",
|
||||||
|
prev_seqno => AckedSeqno0,
|
||||||
|
acked_seqno => OutOfRange,
|
||||||
|
next_seqno => NextSeqNo,
|
||||||
|
packet_id => PacketId
|
||||||
|
}),
|
||||||
|
AckedSeqno0
|
||||||
|
end,
|
||||||
Ranges = lists:filter(
|
Ranges = lists:filter(
|
||||||
fun(#range{stream = Stream, last = LastSeqno, iterator_next = ItNext}) ->
|
fun(#range{stream = Stream, last = LastSeqno, iterator_next = ItNext}) ->
|
||||||
case LastSeqno =< AckedSeqno of
|
case LastSeqno =< AckedSeqno of
|
||||||
|
@ -139,19 +172,18 @@ fetch(_SessionId, Inflight, _Streams, 0, Acc) ->
|
||||||
fetch(SessionId, Inflight0, [Stream | Streams], N, Publishes0) ->
|
fetch(SessionId, Inflight0, [Stream | Streams], N, Publishes0) ->
|
||||||
#inflight{next_seqno = FirstSeqNo, offset_ranges = Ranges0} = Inflight0,
|
#inflight{next_seqno = FirstSeqNo, offset_ranges = Ranges0} = Inflight0,
|
||||||
ItBegin = get_last_iterator(SessionId, Stream, Ranges0),
|
ItBegin = get_last_iterator(SessionId, Stream, Ranges0),
|
||||||
{ok, ItEnd, Messages} = emqx_ds:next(ItBegin, N),
|
{ok, ItEnd, Messages} = emqx_ds:next(?PERSISTENT_MESSAGE_DB, ItBegin, N),
|
||||||
{Publishes, Inflight1} =
|
{NMessages, Publishes, Inflight1} =
|
||||||
lists:foldl(
|
lists:foldl(
|
||||||
fun(Msg, {PubAcc0, InflightAcc0}) ->
|
fun(Msg, {N0, PubAcc0, InflightAcc0}) ->
|
||||||
{PacketId, InflightAcc} = next_packet_id(InflightAcc0),
|
{PacketId, InflightAcc} = next_packet_id(InflightAcc0),
|
||||||
PubAcc = [{PacketId, Msg} | PubAcc0],
|
PubAcc = [{PacketId, Msg} | PubAcc0],
|
||||||
{PubAcc, InflightAcc}
|
{N0 + 1, PubAcc, InflightAcc}
|
||||||
end,
|
end,
|
||||||
{Publishes0, Inflight0},
|
{0, Publishes0, Inflight0},
|
||||||
Messages
|
Messages
|
||||||
),
|
),
|
||||||
#inflight{next_seqno = LastSeqNo} = Inflight1,
|
#inflight{next_seqno = LastSeqNo} = Inflight1,
|
||||||
NMessages = LastSeqNo - FirstSeqNo,
|
|
||||||
case NMessages > 0 of
|
case NMessages > 0 of
|
||||||
true ->
|
true ->
|
||||||
Range = #range{
|
Range = #range{
|
||||||
|
@ -167,8 +199,12 @@ fetch(SessionId, Inflight0, [Stream | Streams], N, Publishes0) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-spec update_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream(), emqx_ds:iterator()) -> ok.
|
-spec update_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream(), emqx_ds:iterator()) -> ok.
|
||||||
update_iterator(SessionId, Stream, Iterator) ->
|
update_iterator(DSSessionId, Stream, Iterator) ->
|
||||||
mria:dirty_write(?SESSION_ITER_TAB, #ds_iter{id = {SessionId, Stream}, iter = Iterator}).
|
%% Workaround: we convert `Stream' to a binary before attempting to store it in
|
||||||
|
%% mnesia(rocksdb) because of a bug in `mnesia_rocksdb' when trying to do
|
||||||
|
%% `mnesia:dirty_all_keys' later.
|
||||||
|
StreamBin = term_to_binary(Stream),
|
||||||
|
mria:dirty_write(?SESSION_ITER_TAB, #ds_iter{id = {DSSessionId, StreamBin}, iter = Iterator}).
|
||||||
|
|
||||||
get_last_iterator(SessionId, Stream, Ranges) ->
|
get_last_iterator(SessionId, Stream, Ranges) ->
|
||||||
case lists:keyfind(Stream, #range.stream, lists:reverse(Ranges)) of
|
case lists:keyfind(Stream, #range.stream, lists:reverse(Ranges)) of
|
||||||
|
@ -179,8 +215,10 @@ get_last_iterator(SessionId, Stream, Ranges) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-spec get_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream()) -> emqx_ds:iterator().
|
-spec get_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream()) -> emqx_ds:iterator().
|
||||||
get_iterator(SessionId, Stream) ->
|
get_iterator(DSSessionId, Stream) ->
|
||||||
Id = {SessionId, Stream},
|
%% See comment in `update_iterator'.
|
||||||
|
StreamBin = term_to_binary(Stream),
|
||||||
|
Id = {DSSessionId, StreamBin},
|
||||||
[#ds_iter{iter = It}] = mnesia:dirty_read(?SESSION_ITER_TAB, Id),
|
[#ds_iter{iter = It}] = mnesia:dirty_read(?SESSION_ITER_TAB, Id),
|
||||||
It.
|
It.
|
||||||
|
|
||||||
|
@ -193,25 +231,22 @@ get_streams(SessionId) ->
|
||||||
mnesia:dirty_read(?SESSION_STREAM_TAB, SessionId)
|
mnesia:dirty_read(?SESSION_STREAM_TAB, SessionId)
|
||||||
).
|
).
|
||||||
|
|
||||||
%% Packet ID as defined by MQTT protocol is a 16-bit integer in range
|
|
||||||
%% 1..FFFF. This function translates internal session sequence number
|
|
||||||
%% to MQTT packet ID by chopping off most significant bits and adding
|
|
||||||
%% 1. This assumes that there's never more FFFF in-flight packets at
|
|
||||||
%% any time:
|
|
||||||
-spec seqno_to_packet_id(non_neg_integer()) -> emqx_types:packet_id().
|
|
||||||
seqno_to_packet_id(Counter) ->
|
|
||||||
Counter rem 16#ffff + 1.
|
|
||||||
|
|
||||||
%% Reconstruct session counter by adding most significant bits from
|
%% Reconstruct session counter by adding most significant bits from
|
||||||
%% the current counter to the packet id.
|
%% the current counter to the packet id.
|
||||||
-spec packet_id_to_seqno(non_neg_integer(), emqx_types:packet_id()) -> non_neg_integer().
|
-spec packet_id_to_seqno(non_neg_integer(), emqx_types:packet_id()) -> non_neg_integer().
|
||||||
packet_id_to_seqno(NextSeqNo, PacketId) ->
|
packet_id_to_seqno(NextSeqNo, PacketId) ->
|
||||||
N = ((NextSeqNo bsr 16) bsl 16) + PacketId,
|
Epoch = NextSeqNo bsr 16,
|
||||||
case N > NextSeqNo of
|
case packet_id_to_seqno_(Epoch, PacketId) of
|
||||||
true -> N - 16#10000;
|
N when N =< NextSeqNo ->
|
||||||
false -> N
|
N;
|
||||||
|
_ ->
|
||||||
|
packet_id_to_seqno_(Epoch - 1, PacketId)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
-spec packet_id_to_seqno_(non_neg_integer(), emqx_types:packet_id()) -> non_neg_integer().
|
||||||
|
packet_id_to_seqno_(Epoch, PacketId) ->
|
||||||
|
(Epoch bsl 16) + PacketId.
|
||||||
|
|
||||||
-spec shuffle([A]) -> [A].
|
-spec shuffle([A]) -> [A].
|
||||||
shuffle(L0) ->
|
shuffle(L0) ->
|
||||||
L1 = lists:map(
|
L1 = lists:map(
|
||||||
|
@ -223,3 +258,57 @@ shuffle(L0) ->
|
||||||
L2 = lists:sort(L1),
|
L2 = lists:sort(L1),
|
||||||
{_, L} = lists:unzip(L2),
|
{_, L} = lists:unzip(L2),
|
||||||
L.
|
L.
|
||||||
|
|
||||||
|
-ifdef(TEST).
|
||||||
|
|
||||||
|
%% This test only tests boundary conditions (to make sure property-based test didn't skip them):
|
||||||
|
packet_id_to_seqno_test() ->
|
||||||
|
%% Packet ID = 1; first epoch:
|
||||||
|
?assertEqual(1, packet_id_to_seqno(1, 1)),
|
||||||
|
?assertEqual(1, packet_id_to_seqno(10, 1)),
|
||||||
|
?assertEqual(1, packet_id_to_seqno(1 bsl 16 - 1, 1)),
|
||||||
|
?assertEqual(1, packet_id_to_seqno(1 bsl 16, 1)),
|
||||||
|
%% Packet ID = 1; second and 3rd epochs:
|
||||||
|
?assertEqual(1 bsl 16 + 1, packet_id_to_seqno(1 bsl 16 + 1, 1)),
|
||||||
|
?assertEqual(1 bsl 16 + 1, packet_id_to_seqno(2 bsl 16, 1)),
|
||||||
|
?assertEqual(2 bsl 16 + 1, packet_id_to_seqno(2 bsl 16 + 1, 1)),
|
||||||
|
%% Packet ID = 16#ffff:
|
||||||
|
PID = 1 bsl 16 - 1,
|
||||||
|
?assertEqual(PID, packet_id_to_seqno(PID, PID)),
|
||||||
|
?assertEqual(PID, packet_id_to_seqno(1 bsl 16, PID)),
|
||||||
|
?assertEqual(1 bsl 16 + PID, packet_id_to_seqno(2 bsl 16, PID)),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
packet_id_to_seqno_test_() ->
|
||||||
|
Opts = [{numtests, 1000}, {to_file, user}],
|
||||||
|
{timeout, 30, fun() -> ?assert(proper:quickcheck(packet_id_to_seqno_prop(), Opts)) end}.
|
||||||
|
|
||||||
|
packet_id_to_seqno_prop() ->
|
||||||
|
?FORALL(
|
||||||
|
NextSeqNo,
|
||||||
|
next_seqno_gen(),
|
||||||
|
?FORALL(
|
||||||
|
SeqNo,
|
||||||
|
seqno_gen(NextSeqNo),
|
||||||
|
begin
|
||||||
|
PacketId = SeqNo rem 16#10000,
|
||||||
|
?assertEqual(SeqNo, packet_id_to_seqno(NextSeqNo, PacketId)),
|
||||||
|
true
|
||||||
|
end
|
||||||
|
)
|
||||||
|
).
|
||||||
|
|
||||||
|
next_seqno_gen() ->
|
||||||
|
?LET(
|
||||||
|
{Epoch, Offset},
|
||||||
|
{non_neg_integer(), non_neg_integer()},
|
||||||
|
Epoch bsl 16 + Offset
|
||||||
|
).
|
||||||
|
|
||||||
|
seqno_gen(NextSeqNo) ->
|
||||||
|
WindowSize = 1 bsl 16 - 1,
|
||||||
|
Min = max(0, NextSeqNo - WindowSize),
|
||||||
|
Max = max(0, NextSeqNo - 1),
|
||||||
|
range(Min, Max).
|
||||||
|
|
||||||
|
-endif.
|
||||||
|
|
|
@ -16,6 +16,8 @@
|
||||||
|
|
||||||
-module(emqx_persistent_session_ds).
|
-module(emqx_persistent_session_ds).
|
||||||
|
|
||||||
|
-behaviour(emqx_session).
|
||||||
|
|
||||||
-include("emqx.hrl").
|
-include("emqx.hrl").
|
||||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
-include_lib("stdlib/include/ms_transform.hrl").
|
-include_lib("stdlib/include/ms_transform.hrl").
|
||||||
|
@ -69,7 +71,13 @@
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-export([session_open/1]).
|
-export([
|
||||||
|
session_open/1,
|
||||||
|
list_all_sessions/0,
|
||||||
|
list_all_subscriptions/0,
|
||||||
|
list_all_streams/0,
|
||||||
|
list_all_iterators/0
|
||||||
|
]).
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
%% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be
|
%% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be
|
||||||
|
@ -93,6 +101,8 @@
|
||||||
iterators := #{topic() => subscription()},
|
iterators := #{topic() => subscription()},
|
||||||
%% Inflight messages
|
%% Inflight messages
|
||||||
inflight := emqx_persistent_message_ds_replayer:inflight(),
|
inflight := emqx_persistent_message_ds_replayer:inflight(),
|
||||||
|
%% Receive maximum
|
||||||
|
receive_maximum := pos_integer(),
|
||||||
%%
|
%%
|
||||||
props := map()
|
props := map()
|
||||||
}.
|
}.
|
||||||
|
@ -103,22 +113,28 @@
|
||||||
-type conninfo() :: emqx_session:conninfo().
|
-type conninfo() :: emqx_session:conninfo().
|
||||||
-type replies() :: emqx_session:replies().
|
-type replies() :: emqx_session:replies().
|
||||||
|
|
||||||
-export_type([id/0]).
|
-define(STATS_KEYS, [
|
||||||
|
subscriptions_cnt,
|
||||||
|
subscriptions_max,
|
||||||
|
inflight_cnt,
|
||||||
|
inflight_max,
|
||||||
|
next_pkt_id
|
||||||
|
]).
|
||||||
|
|
||||||
-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message).
|
-export_type([id/0]).
|
||||||
|
|
||||||
%%
|
%%
|
||||||
|
|
||||||
-spec create(clientinfo(), conninfo(), emqx_session:conf()) ->
|
-spec create(clientinfo(), conninfo(), emqx_session:conf()) ->
|
||||||
session().
|
session().
|
||||||
create(#{clientid := ClientID}, _ConnInfo, Conf) ->
|
create(#{clientid := ClientID}, ConnInfo, Conf) ->
|
||||||
% TODO: expiration
|
% TODO: expiration
|
||||||
ensure_timers(),
|
ensure_timers(),
|
||||||
ensure_session(ClientID, Conf).
|
ensure_session(ClientID, ConnInfo, Conf).
|
||||||
|
|
||||||
-spec open(clientinfo(), conninfo()) ->
|
-spec open(clientinfo(), conninfo()) ->
|
||||||
{_IsPresent :: true, session(), []} | false.
|
{_IsPresent :: true, session(), []} | false.
|
||||||
open(#{clientid := ClientID}, _ConnInfo) ->
|
open(#{clientid := ClientID} = _ClientInfo, ConnInfo) ->
|
||||||
%% NOTE
|
%% NOTE
|
||||||
%% The fact that we need to concern about discarding all live channels here
|
%% The fact that we need to concern about discarding all live channels here
|
||||||
%% is essentially a consequence of the in-memory session design, where we
|
%% is essentially a consequence of the in-memory session design, where we
|
||||||
|
@ -127,16 +143,19 @@ open(#{clientid := ClientID}, _ConnInfo) ->
|
||||||
%% space, and move this call back into `emqx_cm` where it belongs.
|
%% space, and move this call back into `emqx_cm` where it belongs.
|
||||||
ok = emqx_cm:discard_session(ClientID),
|
ok = emqx_cm:discard_session(ClientID),
|
||||||
case open_session(ClientID) of
|
case open_session(ClientID) of
|
||||||
Session = #{} ->
|
Session0 = #{} ->
|
||||||
ensure_timers(),
|
ensure_timers(),
|
||||||
|
ReceiveMaximum = receive_maximum(ConnInfo),
|
||||||
|
Session = Session0#{receive_maximum => ReceiveMaximum},
|
||||||
{true, Session, []};
|
{true, Session, []};
|
||||||
false ->
|
false ->
|
||||||
false
|
false
|
||||||
end.
|
end.
|
||||||
|
|
||||||
ensure_session(ClientID, Conf) ->
|
ensure_session(ClientID, ConnInfo, Conf) ->
|
||||||
{ok, Session, #{}} = session_ensure_new(ClientID, Conf),
|
{ok, Session, #{}} = session_ensure_new(ClientID, Conf),
|
||||||
Session#{iterators => #{}}.
|
ReceiveMaximum = receive_maximum(ConnInfo),
|
||||||
|
Session#{iterators => #{}, receive_maximum => ReceiveMaximum}.
|
||||||
|
|
||||||
open_session(ClientID) ->
|
open_session(ClientID) ->
|
||||||
case session_open(ClientID) of
|
case session_open(ClientID) of
|
||||||
|
@ -186,10 +205,10 @@ info(upgrade_qos, #{props := Conf}) ->
|
||||||
maps:get(upgrade_qos, Conf);
|
maps:get(upgrade_qos, Conf);
|
||||||
% info(inflight, #sessmem{inflight = Inflight}) ->
|
% info(inflight, #sessmem{inflight = Inflight}) ->
|
||||||
% Inflight;
|
% Inflight;
|
||||||
% info(inflight_cnt, #sessmem{inflight = Inflight}) ->
|
info(inflight_cnt, #{inflight := Inflight}) ->
|
||||||
% emqx_inflight:size(Inflight);
|
emqx_persistent_message_ds_replayer:n_inflight(Inflight);
|
||||||
% info(inflight_max, #sessmem{inflight = Inflight}) ->
|
info(inflight_max, #{receive_maximum := ReceiveMaximum}) ->
|
||||||
% emqx_inflight:max_size(Inflight);
|
ReceiveMaximum;
|
||||||
info(retry_interval, #{props := Conf}) ->
|
info(retry_interval, #{props := Conf}) ->
|
||||||
maps:get(retry_interval, Conf);
|
maps:get(retry_interval, Conf);
|
||||||
% info(mqueue, #sessmem{mqueue = MQueue}) ->
|
% info(mqueue, #sessmem{mqueue = MQueue}) ->
|
||||||
|
@ -200,8 +219,9 @@ info(retry_interval, #{props := Conf}) ->
|
||||||
% emqx_mqueue:max_len(MQueue);
|
% emqx_mqueue:max_len(MQueue);
|
||||||
% info(mqueue_dropped, #sessmem{mqueue = MQueue}) ->
|
% info(mqueue_dropped, #sessmem{mqueue = MQueue}) ->
|
||||||
% emqx_mqueue:dropped(MQueue);
|
% emqx_mqueue:dropped(MQueue);
|
||||||
info(next_pkt_id, #{}) ->
|
info(next_pkt_id, #{inflight := Inflight}) ->
|
||||||
_PacketId = 'TODO';
|
{PacketId, _} = emqx_persistent_message_ds_replayer:next_packet_id(Inflight),
|
||||||
|
PacketId;
|
||||||
% info(awaiting_rel, #sessmem{awaiting_rel = AwaitingRel}) ->
|
% info(awaiting_rel, #sessmem{awaiting_rel = AwaitingRel}) ->
|
||||||
% AwaitingRel;
|
% AwaitingRel;
|
||||||
% info(awaiting_rel_cnt, #sessmem{awaiting_rel = AwaitingRel}) ->
|
% info(awaiting_rel_cnt, #sessmem{awaiting_rel = AwaitingRel}) ->
|
||||||
|
@ -213,8 +233,7 @@ info(await_rel_timeout, #{props := Conf}) ->
|
||||||
|
|
||||||
-spec stats(session()) -> emqx_types:stats().
|
-spec stats(session()) -> emqx_types:stats().
|
||||||
stats(Session) ->
|
stats(Session) ->
|
||||||
% TODO: stub
|
info(?STATS_KEYS, Session).
|
||||||
info([], Session).
|
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Client -> Broker: SUBSCRIBE / UNSUBSCRIBE
|
%% Client -> Broker: SUBSCRIBE / UNSUBSCRIBE
|
||||||
|
@ -339,9 +358,12 @@ deliver(_ClientInfo, _Delivers, Session) ->
|
||||||
|
|
||||||
-spec handle_timeout(clientinfo(), _Timeout, session()) ->
|
-spec handle_timeout(clientinfo(), _Timeout, session()) ->
|
||||||
{ok, replies(), session()} | {ok, replies(), timeout(), session()}.
|
{ok, replies(), session()} | {ok, replies(), timeout(), session()}.
|
||||||
handle_timeout(_ClientInfo, pull, Session = #{id := Id, inflight := Inflight0}) ->
|
handle_timeout(
|
||||||
WindowSize = 100,
|
_ClientInfo,
|
||||||
{Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, WindowSize),
|
pull,
|
||||||
|
Session = #{id := Id, inflight := Inflight0, receive_maximum := ReceiveMaximum}
|
||||||
|
) ->
|
||||||
|
{Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, ReceiveMaximum),
|
||||||
%% TODO: make these values configurable:
|
%% TODO: make these values configurable:
|
||||||
Timeout =
|
Timeout =
|
||||||
case Publishes of
|
case Publishes of
|
||||||
|
@ -497,8 +519,6 @@ storage() ->
|
||||||
%% @doc Called when a client connects. This function looks up a
|
%% @doc Called when a client connects. This function looks up a
|
||||||
%% session or returns `false` if previous one couldn't be found.
|
%% session or returns `false` if previous one couldn't be found.
|
||||||
%%
|
%%
|
||||||
%% This function also spawns replay agents for each iterator.
|
|
||||||
%%
|
|
||||||
%% Note: session API doesn't handle session takeovers, it's the job of
|
%% Note: session API doesn't handle session takeovers, it's the job of
|
||||||
%% the broker.
|
%% the broker.
|
||||||
-spec session_open(id()) ->
|
-spec session_open(id()) ->
|
||||||
|
@ -541,14 +561,24 @@ session_create(SessionId, Props) ->
|
||||||
-spec session_drop(id()) -> ok.
|
-spec session_drop(id()) -> ok.
|
||||||
session_drop(DSSessionId) ->
|
session_drop(DSSessionId) ->
|
||||||
transaction(fun() ->
|
transaction(fun() ->
|
||||||
%% TODO: ensure all iterators from this clientid are closed?
|
|
||||||
ok = session_drop_subscriptions(DSSessionId),
|
ok = session_drop_subscriptions(DSSessionId),
|
||||||
|
ok = session_drop_iterators(DSSessionId),
|
||||||
|
ok = session_drop_streams(DSSessionId),
|
||||||
ok = mnesia:delete(?SESSION_TAB, DSSessionId, write)
|
ok = mnesia:delete(?SESSION_TAB, DSSessionId, write)
|
||||||
end).
|
end).
|
||||||
|
|
||||||
|
-spec session_drop_subscriptions(id()) -> ok.
|
||||||
session_drop_subscriptions(DSSessionId) ->
|
session_drop_subscriptions(DSSessionId) ->
|
||||||
IteratorRefs = session_read_subscriptions(DSSessionId),
|
Subscriptions = session_read_subscriptions(DSSessionId),
|
||||||
ok = lists:foreach(fun session_del_subscription/1, IteratorRefs).
|
lists:foreach(
|
||||||
|
fun(#ds_sub{id = DSSubId} = DSSub) ->
|
||||||
|
TopicFilter = subscription_id_to_topic_filter(DSSubId),
|
||||||
|
TopicFilterBin = emqx_topic:join(TopicFilter),
|
||||||
|
ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilterBin, DSSessionId),
|
||||||
|
ok = session_del_subscription(DSSub)
|
||||||
|
end,
|
||||||
|
Subscriptions
|
||||||
|
).
|
||||||
|
|
||||||
%% @doc Called when a client subscribes to a topic. Idempotent.
|
%% @doc Called when a client subscribes to a topic. Idempotent.
|
||||||
-spec session_add_subscription(id(), topic_filter(), _Props :: map()) ->
|
-spec session_add_subscription(id(), topic_filter(), _Props :: map()) ->
|
||||||
|
@ -619,6 +649,10 @@ new_subscription_id(DSSessionId, TopicFilter) ->
|
||||||
DSSubId = {DSSessionId, TopicFilter},
|
DSSubId = {DSSessionId, TopicFilter},
|
||||||
{DSSubId, NowMS}.
|
{DSSubId, NowMS}.
|
||||||
|
|
||||||
|
-spec subscription_id_to_topic_filter(subscription_id()) -> topic_filter().
|
||||||
|
subscription_id_to_topic_filter({_DSSessionId, TopicFilter}) ->
|
||||||
|
TopicFilter.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% RPC targets (v1)
|
%% RPC targets (v1)
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
@ -643,24 +677,26 @@ do_ensure_all_iterators_closed(_DSSessionID) ->
|
||||||
%% Reading batches
|
%% Reading batches
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
renew_streams(Id) ->
|
-spec renew_streams(id()) -> ok.
|
||||||
Subscriptions = ro_transaction(fun() -> session_read_subscriptions(Id) end),
|
renew_streams(DSSessionId) ->
|
||||||
ExistingStreams = ro_transaction(fun() -> mnesia:read(?SESSION_STREAM_TAB, Id) end),
|
Subscriptions = ro_transaction(fun() -> session_read_subscriptions(DSSessionId) end),
|
||||||
|
ExistingStreams = ro_transaction(fun() -> mnesia:read(?SESSION_STREAM_TAB, DSSessionId) end),
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
fun(#ds_sub{id = {_, TopicFilter}, start_time = StartTime}) ->
|
fun(#ds_sub{id = {_, TopicFilter}, start_time = StartTime}) ->
|
||||||
renew_streams(Id, ExistingStreams, TopicFilter, StartTime)
|
renew_streams(DSSessionId, ExistingStreams, TopicFilter, StartTime)
|
||||||
end,
|
end,
|
||||||
Subscriptions
|
Subscriptions
|
||||||
).
|
).
|
||||||
|
|
||||||
renew_streams(Id, ExistingStreams, TopicFilter, StartTime) ->
|
-spec renew_streams(id(), [ds_stream()], emqx_ds:topic_filter(), emqx_ds:time()) -> ok.
|
||||||
|
renew_streams(DSSessionId, ExistingStreams, TopicFilter, StartTime) ->
|
||||||
AllStreams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartTime),
|
AllStreams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartTime),
|
||||||
transaction(
|
transaction(
|
||||||
fun() ->
|
fun() ->
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
fun({Rank, Stream}) ->
|
fun({Rank, Stream}) ->
|
||||||
Rec = #ds_stream{
|
Rec = #ds_stream{
|
||||||
session = Id,
|
session = DSSessionId,
|
||||||
topic_filter = TopicFilter,
|
topic_filter = TopicFilter,
|
||||||
stream = Stream,
|
stream = Stream,
|
||||||
rank = Rank
|
rank = Rank
|
||||||
|
@ -670,8 +706,15 @@ renew_streams(Id, ExistingStreams, TopicFilter, StartTime) ->
|
||||||
ok;
|
ok;
|
||||||
false ->
|
false ->
|
||||||
mnesia:write(?SESSION_STREAM_TAB, Rec, write),
|
mnesia:write(?SESSION_STREAM_TAB, Rec, write),
|
||||||
{ok, Iterator} = emqx_ds:make_iterator(Stream, TopicFilter, StartTime),
|
{ok, Iterator} = emqx_ds:make_iterator(
|
||||||
IterRec = #ds_iter{id = {Id, Stream}, iter = Iterator},
|
?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartTime
|
||||||
|
),
|
||||||
|
%% Workaround: we convert `Stream' to a binary before
|
||||||
|
%% attempting to store it in mnesia(rocksdb) because of a bug
|
||||||
|
%% in `mnesia_rocksdb' when trying to do
|
||||||
|
%% `mnesia:dirty_all_keys' later.
|
||||||
|
StreamBin = term_to_binary(Stream),
|
||||||
|
IterRec = #ds_iter{id = {DSSessionId, StreamBin}, iter = Iterator},
|
||||||
mnesia:write(?SESSION_ITER_TAB, IterRec, write)
|
mnesia:write(?SESSION_ITER_TAB, IterRec, write)
|
||||||
end
|
end
|
||||||
end,
|
end,
|
||||||
|
@ -680,6 +723,33 @@ renew_streams(Id, ExistingStreams, TopicFilter, StartTime) ->
|
||||||
end
|
end
|
||||||
).
|
).
|
||||||
|
|
||||||
|
%% must be called inside a transaction
|
||||||
|
-spec session_drop_streams(id()) -> ok.
|
||||||
|
session_drop_streams(DSSessionId) ->
|
||||||
|
MS = ets:fun2ms(
|
||||||
|
fun(#ds_stream{session = DSSessionId0}) when DSSessionId0 =:= DSSessionId ->
|
||||||
|
DSSessionId0
|
||||||
|
end
|
||||||
|
),
|
||||||
|
StreamIDs = mnesia:select(?SESSION_STREAM_TAB, MS, write),
|
||||||
|
lists:foreach(fun(Key) -> mnesia:delete(?SESSION_STREAM_TAB, Key, write) end, StreamIDs).
|
||||||
|
|
||||||
|
%% must be called inside a transaction
|
||||||
|
-spec session_drop_iterators(id()) -> ok.
|
||||||
|
session_drop_iterators(DSSessionId) ->
|
||||||
|
MS = ets:fun2ms(
|
||||||
|
fun(#ds_iter{id = {DSSessionId0, StreamBin}}) when DSSessionId0 =:= DSSessionId ->
|
||||||
|
StreamBin
|
||||||
|
end
|
||||||
|
),
|
||||||
|
StreamBins = mnesia:select(?SESSION_ITER_TAB, MS, write),
|
||||||
|
lists:foreach(
|
||||||
|
fun(StreamBin) ->
|
||||||
|
mnesia:delete(?SESSION_ITER_TAB, {DSSessionId, StreamBin}, write)
|
||||||
|
end,
|
||||||
|
StreamBins
|
||||||
|
).
|
||||||
|
|
||||||
%%--------------------------------------------------------------------------------
|
%%--------------------------------------------------------------------------------
|
||||||
|
|
||||||
transaction(Fun) ->
|
transaction(Fun) ->
|
||||||
|
@ -726,3 +796,70 @@ ensure_timer(Type) ->
|
||||||
ensure_timer(Type, Timeout) ->
|
ensure_timer(Type, Timeout) ->
|
||||||
_ = emqx_utils:start_timer(Timeout, {emqx_session, Type}),
|
_ = emqx_utils:start_timer(Timeout, {emqx_session, Type}),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
-spec receive_maximum(conninfo()) -> pos_integer().
|
||||||
|
receive_maximum(ConnInfo) ->
|
||||||
|
%% Note: the default value should be always set by the channel
|
||||||
|
%% with respect to the zone configuration, but the type spec
|
||||||
|
%% indicates that it's optional.
|
||||||
|
maps:get(receive_maximum, ConnInfo, 65_535).
|
||||||
|
|
||||||
|
-ifdef(TEST).
|
||||||
|
list_all_sessions() ->
|
||||||
|
DSSessionIds = mnesia:dirty_all_keys(?SESSION_TAB),
|
||||||
|
Sessions = lists:map(
|
||||||
|
fun(SessionID) ->
|
||||||
|
{ok, Session, Subscriptions} = session_open(SessionID),
|
||||||
|
{SessionID, #{session => Session, subscriptions => Subscriptions}}
|
||||||
|
end,
|
||||||
|
DSSessionIds
|
||||||
|
),
|
||||||
|
maps:from_list(Sessions).
|
||||||
|
|
||||||
|
list_all_subscriptions() ->
|
||||||
|
DSSubIds = mnesia:dirty_all_keys(?SESSION_SUBSCRIPTIONS_TAB),
|
||||||
|
Subscriptions = lists:map(
|
||||||
|
fun(DSSubId) ->
|
||||||
|
[DSSub] = mnesia:dirty_read(?SESSION_SUBSCRIPTIONS_TAB, DSSubId),
|
||||||
|
{DSSubId, export_subscription(DSSub)}
|
||||||
|
end,
|
||||||
|
DSSubIds
|
||||||
|
),
|
||||||
|
maps:from_list(Subscriptions).
|
||||||
|
|
||||||
|
list_all_streams() ->
|
||||||
|
DSStreamIds = mnesia:dirty_all_keys(?SESSION_STREAM_TAB),
|
||||||
|
DSStreams = lists:map(
|
||||||
|
fun(DSStreamId) ->
|
||||||
|
Records = mnesia:dirty_read(?SESSION_STREAM_TAB, DSStreamId),
|
||||||
|
ExtDSStreams =
|
||||||
|
lists:map(
|
||||||
|
fun(Record) ->
|
||||||
|
export_record(
|
||||||
|
Record,
|
||||||
|
#ds_stream.session,
|
||||||
|
[session, topic_filter, stream, rank],
|
||||||
|
#{}
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
Records
|
||||||
|
),
|
||||||
|
{DSStreamId, ExtDSStreams}
|
||||||
|
end,
|
||||||
|
DSStreamIds
|
||||||
|
),
|
||||||
|
maps:from_list(DSStreams).
|
||||||
|
|
||||||
|
list_all_iterators() ->
|
||||||
|
DSIterIds = mnesia:dirty_all_keys(?SESSION_ITER_TAB),
|
||||||
|
DSIters = lists:map(
|
||||||
|
fun(DSIterId) ->
|
||||||
|
[Record] = mnesia:dirty_read(?SESSION_ITER_TAB, DSIterId),
|
||||||
|
{DSIterId, export_record(Record, #ds_iter.id, [id, iter], #{})}
|
||||||
|
end,
|
||||||
|
DSIterIds
|
||||||
|
),
|
||||||
|
maps:from_list(DSIters).
|
||||||
|
|
||||||
|
%% ifdef(TEST)
|
||||||
|
-endif.
|
||||||
|
|
|
@ -16,6 +16,8 @@
|
||||||
-ifndef(EMQX_PERSISTENT_SESSION_DS_HRL_HRL).
|
-ifndef(EMQX_PERSISTENT_SESSION_DS_HRL_HRL).
|
||||||
-define(EMQX_PERSISTENT_SESSION_DS_HRL_HRL, true).
|
-define(EMQX_PERSISTENT_SESSION_DS_HRL_HRL, true).
|
||||||
|
|
||||||
|
-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message).
|
||||||
|
|
||||||
-define(SESSION_TAB, emqx_ds_session).
|
-define(SESSION_TAB, emqx_ds_session).
|
||||||
-define(SESSION_SUBSCRIPTIONS_TAB, emqx_ds_session_subscriptions).
|
-define(SESSION_SUBSCRIPTIONS_TAB, emqx_ds_session_subscriptions).
|
||||||
-define(SESSION_STREAM_TAB, emqx_ds_stream_tab).
|
-define(SESSION_STREAM_TAB, emqx_ds_stream_tab).
|
||||||
|
@ -37,9 +39,10 @@
|
||||||
rank :: emqx_ds:stream_rank()
|
rank :: emqx_ds:stream_rank()
|
||||||
}).
|
}).
|
||||||
-type ds_stream() :: #ds_stream{}.
|
-type ds_stream() :: #ds_stream{}.
|
||||||
|
-type ds_stream_bin() :: binary().
|
||||||
|
|
||||||
-record(ds_iter, {
|
-record(ds_iter, {
|
||||||
id :: {emqx_persistent_session_ds:id(), emqx_ds:stream()},
|
id :: {emqx_persistent_session_ds:id(), ds_stream_bin()},
|
||||||
iter :: emqx_ds:iterator()
|
iter :: emqx_ds:iterator()
|
||||||
}).
|
}).
|
||||||
|
|
||||||
|
|
|
@ -47,11 +47,9 @@
|
||||||
-type bytesize() :: integer().
|
-type bytesize() :: integer().
|
||||||
-type wordsize() :: bytesize().
|
-type wordsize() :: bytesize().
|
||||||
-type percent() :: float().
|
-type percent() :: float().
|
||||||
-type file() :: string().
|
-type comma_separated_list() :: list(string()).
|
||||||
-type comma_separated_list() :: list().
|
|
||||||
-type comma_separated_binary() :: [binary()].
|
-type comma_separated_binary() :: [binary()].
|
||||||
-type comma_separated_atoms() :: [atom()].
|
-type comma_separated_atoms() :: [atom()].
|
||||||
-type bar_separated_list() :: list().
|
|
||||||
-type ip_port() :: tuple() | integer().
|
-type ip_port() :: tuple() | integer().
|
||||||
-type cipher() :: map().
|
-type cipher() :: map().
|
||||||
-type port_number() :: 1..65535.
|
-type port_number() :: 1..65535.
|
||||||
|
@ -75,7 +73,6 @@
|
||||||
-typerefl_from_string({percent/0, emqx_schema, to_percent}).
|
-typerefl_from_string({percent/0, emqx_schema, to_percent}).
|
||||||
-typerefl_from_string({comma_separated_list/0, emqx_schema, to_comma_separated_list}).
|
-typerefl_from_string({comma_separated_list/0, emqx_schema, to_comma_separated_list}).
|
||||||
-typerefl_from_string({comma_separated_binary/0, emqx_schema, to_comma_separated_binary}).
|
-typerefl_from_string({comma_separated_binary/0, emqx_schema, to_comma_separated_binary}).
|
||||||
-typerefl_from_string({bar_separated_list/0, emqx_schema, to_bar_separated_list}).
|
|
||||||
-typerefl_from_string({ip_port/0, emqx_schema, to_ip_port}).
|
-typerefl_from_string({ip_port/0, emqx_schema, to_ip_port}).
|
||||||
-typerefl_from_string({cipher/0, emqx_schema, to_erl_cipher_suite}).
|
-typerefl_from_string({cipher/0, emqx_schema, to_erl_cipher_suite}).
|
||||||
-typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}).
|
-typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}).
|
||||||
|
@ -118,7 +115,6 @@
|
||||||
to_percent/1,
|
to_percent/1,
|
||||||
to_comma_separated_list/1,
|
to_comma_separated_list/1,
|
||||||
to_comma_separated_binary/1,
|
to_comma_separated_binary/1,
|
||||||
to_bar_separated_list/1,
|
|
||||||
to_ip_port/1,
|
to_ip_port/1,
|
||||||
to_erl_cipher_suite/1,
|
to_erl_cipher_suite/1,
|
||||||
to_comma_separated_atoms/1,
|
to_comma_separated_atoms/1,
|
||||||
|
@ -154,10 +150,8 @@
|
||||||
bytesize/0,
|
bytesize/0,
|
||||||
wordsize/0,
|
wordsize/0,
|
||||||
percent/0,
|
percent/0,
|
||||||
file/0,
|
|
||||||
comma_separated_list/0,
|
comma_separated_list/0,
|
||||||
comma_separated_binary/0,
|
comma_separated_binary/0,
|
||||||
bar_separated_list/0,
|
|
||||||
ip_port/0,
|
ip_port/0,
|
||||||
cipher/0,
|
cipher/0,
|
||||||
comma_separated_atoms/0,
|
comma_separated_atoms/0,
|
||||||
|
@ -2564,9 +2558,6 @@ to_json_binary(Str) ->
|
||||||
Error
|
Error
|
||||||
end.
|
end.
|
||||||
|
|
||||||
to_bar_separated_list(Str) ->
|
|
||||||
{ok, string:tokens(Str, "| ")}.
|
|
||||||
|
|
||||||
%% @doc support the following format:
|
%% @doc support the following format:
|
||||||
%% - 127.0.0.1:1883
|
%% - 127.0.0.1:1883
|
||||||
%% - ::1:1883
|
%% - ::1:1883
|
||||||
|
@ -3316,7 +3307,7 @@ get_tombstone_map_value_type(Schema) ->
|
||||||
%% hoconsc:map_value_type(Schema)
|
%% hoconsc:map_value_type(Schema)
|
||||||
?MAP(_Name, Union) = hocon_schema:field_schema(Schema, type),
|
?MAP(_Name, Union) = hocon_schema:field_schema(Schema, type),
|
||||||
%% TODO: violation of abstraction, fix hoconsc:union_members/1
|
%% TODO: violation of abstraction, fix hoconsc:union_members/1
|
||||||
?UNION(Members) = Union,
|
?UNION(Members, _) = Union,
|
||||||
Tombstone = tombstone(),
|
Tombstone = tombstone(),
|
||||||
[Type, Tombstone] = hoconsc:union_members(Members),
|
[Type, Tombstone] = hoconsc:union_members(Members),
|
||||||
Type.
|
Type.
|
||||||
|
|
|
@ -176,6 +176,7 @@
|
||||||
t().
|
t().
|
||||||
-callback open(clientinfo(), conninfo()) ->
|
-callback open(clientinfo(), conninfo()) ->
|
||||||
{_IsPresent :: true, t(), _ReplayContext} | false.
|
{_IsPresent :: true, t(), _ReplayContext} | false.
|
||||||
|
-callback destroy(t() | clientinfo()) -> ok.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Create a Session
|
%% Create a Session
|
||||||
|
@ -247,7 +248,14 @@ get_mqtt_conf(Zone, Key) ->
|
||||||
|
|
||||||
-spec destroy(clientinfo(), conninfo()) -> ok.
|
-spec destroy(clientinfo(), conninfo()) -> ok.
|
||||||
destroy(ClientInfo, ConnInfo) ->
|
destroy(ClientInfo, ConnInfo) ->
|
||||||
(choose_impl_mod(ConnInfo)):destroy(ClientInfo).
|
%% When destroying/discarding a session, the current `ClientInfo' might suggest an
|
||||||
|
%% implementation which does not correspond to the one previously used by this client.
|
||||||
|
%% An example of this is a client that first connects with `Session-Expiry-Interval' >
|
||||||
|
%% 0, and later reconnects with `Session-Expiry-Interval' = 0 and `clean_start' =
|
||||||
|
%% true. So we may simply destroy sessions from all implementations, since the key
|
||||||
|
%% (ClientID) is the same.
|
||||||
|
Mods = choose_impl_candidates(ConnInfo),
|
||||||
|
lists:foreach(fun(Mod) -> Mod:destroy(ClientInfo) end, Mods).
|
||||||
|
|
||||||
-spec destroy(t()) -> ok.
|
-spec destroy(t()) -> ok.
|
||||||
destroy(Session) ->
|
destroy(Session) ->
|
||||||
|
|
|
@ -44,6 +44,8 @@
|
||||||
%% State is stored in-memory in the process heap.
|
%% State is stored in-memory in the process heap.
|
||||||
-module(emqx_session_mem).
|
-module(emqx_session_mem).
|
||||||
|
|
||||||
|
-behaviour(emqx_session).
|
||||||
|
|
||||||
-include("emqx.hrl").
|
-include("emqx.hrl").
|
||||||
-include("emqx_mqtt.hrl").
|
-include("emqx_mqtt.hrl").
|
||||||
-include("emqx_session_mem.hrl").
|
-include("emqx_session_mem.hrl").
|
||||||
|
|
|
@ -74,6 +74,9 @@
|
||||||
|
|
||||||
-export([merge_appspec/2]).
|
-export([merge_appspec/2]).
|
||||||
|
|
||||||
|
%% "Unofficial" `emqx_config_handler' and `emqx_conf' APIs
|
||||||
|
-export([schema_module/0, upgrade_raw_conf/1]).
|
||||||
|
|
||||||
-export_type([appspec/0]).
|
-export_type([appspec/0]).
|
||||||
-export_type([appspec_opts/0]).
|
-export_type([appspec_opts/0]).
|
||||||
|
|
||||||
|
@ -477,3 +480,18 @@ render_config(Config = #{}) ->
|
||||||
unicode:characters_to_binary(hocon_pp:do(Config, #{}));
|
unicode:characters_to_binary(hocon_pp:do(Config, #{}));
|
||||||
render_config(Config) ->
|
render_config(Config) ->
|
||||||
unicode:characters_to_binary(Config).
|
unicode:characters_to_binary(Config).
|
||||||
|
|
||||||
|
%%
|
||||||
|
|
||||||
|
%% "Unofficial" `emqx_config_handler' API
|
||||||
|
schema_module() ->
|
||||||
|
?MODULE.
|
||||||
|
|
||||||
|
%% "Unofficial" `emqx_conf' API
|
||||||
|
upgrade_raw_conf(Conf) ->
|
||||||
|
case emqx_release:edition() of
|
||||||
|
ee ->
|
||||||
|
emqx_enterprise_schema:upgrade_raw_conf(Conf);
|
||||||
|
ce ->
|
||||||
|
emqx_conf_schema:upgrade_raw_conf(Conf)
|
||||||
|
end.
|
||||||
|
|
|
@ -256,14 +256,14 @@ consume(TopicFilter, StartMS) ->
|
||||||
Streams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartMS),
|
Streams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartMS),
|
||||||
lists:flatmap(
|
lists:flatmap(
|
||||||
fun({_Rank, Stream}) ->
|
fun({_Rank, Stream}) ->
|
||||||
{ok, It} = emqx_ds:make_iterator(Stream, TopicFilter, StartMS),
|
{ok, It} = emqx_ds:make_iterator(?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartMS),
|
||||||
consume(It)
|
consume(It)
|
||||||
end,
|
end,
|
||||||
Streams
|
Streams
|
||||||
).
|
).
|
||||||
|
|
||||||
consume(It) ->
|
consume(It) ->
|
||||||
case emqx_ds:next(It, 100) of
|
case emqx_ds:next(?PERSISTENT_MESSAGE_DB, It, 100) of
|
||||||
{ok, _NIt, _Msgs = []} ->
|
{ok, _NIt, _Msgs = []} ->
|
||||||
[];
|
[];
|
||||||
{ok, NIt, Msgs} ->
|
{ok, NIt, Msgs} ->
|
||||||
|
|
|
@ -133,7 +133,7 @@ get_listener_port(Type, Name) ->
|
||||||
end_per_group(Group, Config) when Group == tcp; Group == ws; Group == quic ->
|
end_per_group(Group, Config) when Group == tcp; Group == ws; Group == quic ->
|
||||||
ok = emqx_cth_suite:stop(?config(group_apps, Config));
|
ok = emqx_cth_suite:stop(?config(group_apps, Config));
|
||||||
end_per_group(_, _Config) ->
|
end_per_group(_, _Config) ->
|
||||||
ok = emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB),
|
catch emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_testcase(TestCase, Config) ->
|
init_per_testcase(TestCase, Config) ->
|
||||||
|
@ -599,6 +599,7 @@ t_publish_while_client_is_gone(Config) ->
|
||||||
|
|
||||||
ok = emqtt:disconnect(Client2).
|
ok = emqtt:disconnect(Client2).
|
||||||
|
|
||||||
|
%% TODO: don't skip after QoS2 support is added to DS.
|
||||||
t_clean_start_drops_subscriptions(init, Config) -> skip_ds_tc(Config);
|
t_clean_start_drops_subscriptions(init, Config) -> skip_ds_tc(Config);
|
||||||
t_clean_start_drops_subscriptions('end', _Config) -> ok.
|
t_clean_start_drops_subscriptions('end', _Config) -> ok.
|
||||||
t_clean_start_drops_subscriptions(Config) ->
|
t_clean_start_drops_subscriptions(Config) ->
|
||||||
|
|
|
@ -674,7 +674,16 @@ t_multi_streams_packet_malform(Config) ->
|
||||||
|
|
||||||
?assert(is_list(emqtt:info(C))),
|
?assert(is_list(emqtt:info(C))),
|
||||||
|
|
||||||
{error, stm_send_error, aborted} = quicer:send(MalformStream, <<1, 2, 3, 4, 5, 6, 7, 8, 9, 0>>),
|
{error, stm_send_error, _} =
|
||||||
|
snabbkaffe:retry(
|
||||||
|
10000,
|
||||||
|
10,
|
||||||
|
fun() ->
|
||||||
|
{error, stm_send_error, _} = quicer:send(
|
||||||
|
MalformStream, <<1, 2, 3, 4, 5, 6, 7, 8, 9, 0>>
|
||||||
|
)
|
||||||
|
end
|
||||||
|
),
|
||||||
|
|
||||||
?assert(is_list(emqtt:info(C))),
|
?assert(is_list(emqtt:info(C))),
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth, [
|
{application, emqx_auth, [
|
||||||
{description, "EMQX Authentication and authorization"},
|
{description, "EMQX Authentication and authorization"},
|
||||||
{vsn, "0.1.27"},
|
{vsn, "0.1.28"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_auth_sup]},
|
{registered, [emqx_auth_sup]},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -38,7 +38,8 @@
|
||||||
authenticator_type_without/1,
|
authenticator_type_without/1,
|
||||||
authenticator_type_without/2,
|
authenticator_type_without/2,
|
||||||
mechanism/1,
|
mechanism/1,
|
||||||
backend/1
|
backend/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -60,6 +61,7 @@
|
||||||
api_write
|
api_write
|
||||||
%% config: schema for config validation
|
%% config: schema for config validation
|
||||||
| config.
|
| config.
|
||||||
|
-callback namespace() -> string().
|
||||||
-callback refs() -> [schema_ref()].
|
-callback refs() -> [schema_ref()].
|
||||||
-callback refs(shema_kind()) -> [schema_ref()].
|
-callback refs(shema_kind()) -> [schema_ref()].
|
||||||
-callback select_union_member(emqx_config:raw_config()) -> [schema_ref()] | undefined | no_return().
|
-callback select_union_member(emqx_config:raw_config()) -> [schema_ref()] | undefined | no_return().
|
||||||
|
@ -74,6 +76,8 @@
|
||||||
refs/1
|
refs/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
roots() -> [].
|
roots() -> [].
|
||||||
|
|
||||||
injected_fields(AuthnSchemaMods) ->
|
injected_fields(AuthnSchemaMods) ->
|
||||||
|
|
|
@ -136,7 +136,7 @@ authz_fields() ->
|
||||||
[
|
[
|
||||||
{sources,
|
{sources,
|
||||||
?HOCON(
|
?HOCON(
|
||||||
?ARRAY(?UNION(UnionMemberSelector)),
|
?ARRAY(hoconsc:union(UnionMemberSelector)),
|
||||||
#{
|
#{
|
||||||
default => [default_authz()],
|
default => [default_authz()],
|
||||||
desc => ?DESC(sources),
|
desc => ?DESC(sources),
|
||||||
|
@ -153,7 +153,7 @@ api_authz_fields() ->
|
||||||
[{sources, ?HOCON(?ARRAY(api_source_type()), #{desc => ?DESC(sources)})}].
|
[{sources, ?HOCON(?ARRAY(api_source_type()), #{desc => ?DESC(sources)})}].
|
||||||
|
|
||||||
api_source_type() ->
|
api_source_type() ->
|
||||||
?UNION(api_authz_refs()).
|
hoconsc:union(api_authz_refs()).
|
||||||
|
|
||||||
api_authz_refs() ->
|
api_authz_refs() ->
|
||||||
lists:concat([api_source_refs(Mod) || Mod <- source_schema_mods()]).
|
lists:concat([api_source_refs(Mod) || Mod <- source_schema_mods()]).
|
||||||
|
|
|
@ -16,7 +16,6 @@
|
||||||
|
|
||||||
-module(emqx_authn_chains_SUITE).
|
-module(emqx_authn_chains_SUITE).
|
||||||
|
|
||||||
-behaviour(hocon_schema).
|
|
||||||
-behaviour(emqx_authn_provider).
|
-behaviour(emqx_authn_provider).
|
||||||
|
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
|
|
|
@ -54,7 +54,7 @@ t_check_schema(_Config) ->
|
||||||
?assertThrow(
|
?assertThrow(
|
||||||
#{
|
#{
|
||||||
path := "authentication.1.password_hash_algorithm.name",
|
path := "authentication.1.password_hash_algorithm.name",
|
||||||
matched_type := "builtin_db/authn-hash:simple",
|
matched_type := "authn:builtin_db/authn-hash:simple",
|
||||||
reason := unable_to_convert_to_enum_symbol
|
reason := unable_to_convert_to_enum_symbol
|
||||||
},
|
},
|
||||||
Check(ConfigNotOk)
|
Check(ConfigNotOk)
|
||||||
|
@ -73,7 +73,7 @@ t_check_schema(_Config) ->
|
||||||
#{
|
#{
|
||||||
path := "authentication.1.password_hash_algorithm",
|
path := "authentication.1.password_hash_algorithm",
|
||||||
reason := "algorithm_name_missing",
|
reason := "algorithm_name_missing",
|
||||||
matched_type := "builtin_db"
|
matched_type := "authn:builtin_db"
|
||||||
},
|
},
|
||||||
Check(ConfigMissingAlgoName)
|
Check(ConfigMissingAlgoName)
|
||||||
).
|
).
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-define(ERR(Reason), {error, Reason}).
|
-define(ERR(Reason), {error, Reason}).
|
||||||
|
|
||||||
union_member_selector_mongo_test_() ->
|
union_member_selector_mongo_test_() ->
|
||||||
|
ok = ensure_schema_load(),
|
||||||
[
|
[
|
||||||
{"unknown", fun() ->
|
{"unknown", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
@ -31,25 +32,26 @@ union_member_selector_mongo_test_() ->
|
||||||
end},
|
end},
|
||||||
{"single", fun() ->
|
{"single", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "mongo_single"}),
|
?ERR(#{matched_type := "authn:mongo_single"}),
|
||||||
check("{mechanism = password_based, backend = mongodb, mongo_type = single}")
|
check("{mechanism = password_based, backend = mongodb, mongo_type = single}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"replica-set", fun() ->
|
{"replica-set", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "mongo_rs"}),
|
?ERR(#{matched_type := "authn:mongo_rs"}),
|
||||||
check("{mechanism = password_based, backend = mongodb, mongo_type = rs}")
|
check("{mechanism = password_based, backend = mongodb, mongo_type = rs}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"sharded", fun() ->
|
{"sharded", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "mongo_sharded"}),
|
?ERR(#{matched_type := "authn:mongo_sharded"}),
|
||||||
check("{mechanism = password_based, backend = mongodb, mongo_type = sharded}")
|
check("{mechanism = password_based, backend = mongodb, mongo_type = sharded}")
|
||||||
)
|
)
|
||||||
end}
|
end}
|
||||||
].
|
].
|
||||||
|
|
||||||
union_member_selector_jwt_test_() ->
|
union_member_selector_jwt_test_() ->
|
||||||
|
ok = ensure_schema_load(),
|
||||||
[
|
[
|
||||||
{"unknown", fun() ->
|
{"unknown", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
@ -59,25 +61,26 @@ union_member_selector_jwt_test_() ->
|
||||||
end},
|
end},
|
||||||
{"jwks", fun() ->
|
{"jwks", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "jwt_jwks"}),
|
?ERR(#{matched_type := "authn:jwt_jwks"}),
|
||||||
check("{mechanism = jwt, use_jwks = true}")
|
check("{mechanism = jwt, use_jwks = true}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"publick-key", fun() ->
|
{"publick-key", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "jwt_public_key"}),
|
?ERR(#{matched_type := "authn:jwt_public_key"}),
|
||||||
check("{mechanism = jwt, use_jwks = false, public_key = 1}")
|
check("{mechanism = jwt, use_jwks = false, public_key = 1}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"hmac-based", fun() ->
|
{"hmac-based", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "jwt_hmac"}),
|
?ERR(#{matched_type := "authn:jwt_hmac"}),
|
||||||
check("{mechanism = jwt, use_jwks = false}")
|
check("{mechanism = jwt, use_jwks = false}")
|
||||||
)
|
)
|
||||||
end}
|
end}
|
||||||
].
|
].
|
||||||
|
|
||||||
union_member_selector_redis_test_() ->
|
union_member_selector_redis_test_() ->
|
||||||
|
ok = ensure_schema_load(),
|
||||||
[
|
[
|
||||||
{"unknown", fun() ->
|
{"unknown", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
@ -87,25 +90,26 @@ union_member_selector_redis_test_() ->
|
||||||
end},
|
end},
|
||||||
{"single", fun() ->
|
{"single", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "redis_single"}),
|
?ERR(#{matched_type := "authn:redis_single"}),
|
||||||
check("{mechanism = password_based, backend = redis, redis_type = single}")
|
check("{mechanism = password_based, backend = redis, redis_type = single}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"cluster", fun() ->
|
{"cluster", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "redis_cluster"}),
|
?ERR(#{matched_type := "authn:redis_cluster"}),
|
||||||
check("{mechanism = password_based, backend = redis, redis_type = cluster}")
|
check("{mechanism = password_based, backend = redis, redis_type = cluster}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"sentinel", fun() ->
|
{"sentinel", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "redis_sentinel"}),
|
?ERR(#{matched_type := "authn:redis_sentinel"}),
|
||||||
check("{mechanism = password_based, backend = redis, redis_type = sentinel}")
|
check("{mechanism = password_based, backend = redis, redis_type = sentinel}")
|
||||||
)
|
)
|
||||||
end}
|
end}
|
||||||
].
|
].
|
||||||
|
|
||||||
union_member_selector_http_test_() ->
|
union_member_selector_http_test_() ->
|
||||||
|
ok = ensure_schema_load(),
|
||||||
[
|
[
|
||||||
{"unknown", fun() ->
|
{"unknown", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
@ -115,13 +119,13 @@ union_member_selector_http_test_() ->
|
||||||
end},
|
end},
|
||||||
{"get", fun() ->
|
{"get", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "http_get"}),
|
?ERR(#{matched_type := "authn:http_get"}),
|
||||||
check("{mechanism = password_based, backend = http, method = get}")
|
check("{mechanism = password_based, backend = http, method = get}")
|
||||||
)
|
)
|
||||||
end},
|
end},
|
||||||
{"post", fun() ->
|
{"post", fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
?ERR(#{matched_type := "http_post"}),
|
?ERR(#{matched_type := "authn:http_post"}),
|
||||||
check("{mechanism = password_based, backend = http, method = post}")
|
check("{mechanism = password_based, backend = http, method = post}")
|
||||||
)
|
)
|
||||||
end}
|
end}
|
||||||
|
@ -132,3 +136,7 @@ check(HoconConf) ->
|
||||||
#{roots => emqx_authn_schema:global_auth_fields()},
|
#{roots => emqx_authn_schema:global_auth_fields()},
|
||||||
["authentication= ", HoconConf]
|
["authentication= ", HoconConf]
|
||||||
).
|
).
|
||||||
|
|
||||||
|
ensure_schema_load() ->
|
||||||
|
_ = emqx_conf_schema:roots(),
|
||||||
|
ok.
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_http, [
|
{application, emqx_auth_http, [
|
||||||
{description, "EMQX External HTTP API Authentication and Authorization"},
|
{description, "EMQX External HTTP API Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_http_app, []}},
|
{mod, {emqx_auth_http_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -16,10 +16,6 @@
|
||||||
|
|
||||||
-module(emqx_authn_http_schema).
|
-module(emqx_authn_http_schema).
|
||||||
|
|
||||||
-include("emqx_auth_http.hrl").
|
|
||||||
-include_lib("emqx_auth/include/emqx_authn.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -27,9 +23,14 @@
|
||||||
validations/0,
|
validations/0,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_http.hrl").
|
||||||
|
-include_lib("emqx_auth/include/emqx_authn.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
-define(NOT_EMPTY(MSG), emqx_resource_validator:not_empty(MSG)).
|
-define(NOT_EMPTY(MSG), emqx_resource_validator:not_empty(MSG)).
|
||||||
-define(THROW_VALIDATION_ERROR(ERROR, MESSAGE),
|
-define(THROW_VALIDATION_ERROR(ERROR, MESSAGE),
|
||||||
throw(#{
|
throw(#{
|
||||||
|
@ -38,6 +39,8 @@
|
||||||
})
|
})
|
||||||
).
|
).
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[?R_REF(http_get), ?R_REF(http_post)].
|
[?R_REF(http_get), ?R_REF(http_post)].
|
||||||
|
|
||||||
|
@ -97,7 +100,7 @@ common_fields() ->
|
||||||
{backend, emqx_authn_schema:backend(?AUTHN_BACKEND)},
|
{backend, emqx_authn_schema:backend(?AUTHN_BACKEND)},
|
||||||
{url, fun url/1},
|
{url, fun url/1},
|
||||||
{body,
|
{body,
|
||||||
hoconsc:mk(map([{fuzzy, term(), binary()}]), #{
|
hoconsc:mk(typerefl:alias("map", map([{fuzzy, term(), binary()}])), #{
|
||||||
required => false, desc => ?DESC(body)
|
required => false, desc => ?DESC(body)
|
||||||
})},
|
})},
|
||||||
{request_timeout, fun request_timeout/1}
|
{request_timeout, fun request_timeout/1}
|
||||||
|
|
|
@ -26,7 +26,8 @@
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
source_refs/0,
|
source_refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -38,6 +39,8 @@
|
||||||
|
|
||||||
-import(emqx_schema, [mk_duration/2]).
|
-import(emqx_schema, [mk_duration/2]).
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
source_refs() ->
|
source_refs() ->
|
||||||
|
@ -96,7 +99,7 @@ http_common_fields() ->
|
||||||
mk_duration("Request timeout", #{
|
mk_duration("Request timeout", #{
|
||||||
required => false, default => <<"30s">>, desc => ?DESC(request_timeout)
|
required => false, default => <<"30s">>, desc => ?DESC(request_timeout)
|
||||||
})},
|
})},
|
||||||
{body, ?HOCON(map(), #{required => false, desc => ?DESC(body)})}
|
{body, ?HOCON(hoconsc:map(name, binary()), #{required => false, desc => ?DESC(body)})}
|
||||||
] ++
|
] ++
|
||||||
lists:keydelete(
|
lists:keydelete(
|
||||||
pool_type,
|
pool_type,
|
||||||
|
@ -105,7 +108,7 @@ http_common_fields() ->
|
||||||
).
|
).
|
||||||
|
|
||||||
headers(type) ->
|
headers(type) ->
|
||||||
list({binary(), binary()});
|
typerefl:alias("map", list({binary(), binary()}), #{}, [binary(), binary()]);
|
||||||
headers(desc) ->
|
headers(desc) ->
|
||||||
?DESC(?FUNCTION_NAME);
|
?DESC(?FUNCTION_NAME);
|
||||||
headers(converter) ->
|
headers(converter) ->
|
||||||
|
@ -118,7 +121,7 @@ headers(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
headers_no_content_type(type) ->
|
headers_no_content_type(type) ->
|
||||||
list({binary(), binary()});
|
typerefl:alias("map", list({binary(), binary()}), #{}, [binary(), binary()]);
|
||||||
headers_no_content_type(desc) ->
|
headers_no_content_type(desc) ->
|
||||||
?DESC(?FUNCTION_NAME);
|
?DESC(?FUNCTION_NAME);
|
||||||
headers_no_content_type(converter) ->
|
headers_no_content_type(converter) ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_jwt, [
|
{application, emqx_auth_jwt, [
|
||||||
{description, "EMQX JWT Authentication and Authorization"},
|
{description, "EMQX JWT Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_jwt_app, []}},
|
{mod, {emqx_auth_jwt_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -16,18 +16,21 @@
|
||||||
|
|
||||||
-module(emqx_authn_jwt_schema).
|
-module(emqx_authn_jwt_schema).
|
||||||
|
|
||||||
-include("emqx_auth_jwt.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_jwt.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[
|
[
|
||||||
?R_REF(jwt_hmac),
|
?R_REF(jwt_hmac),
|
||||||
|
@ -149,7 +152,8 @@ refresh_interval(validator) -> [fun(I) -> I > 0 end];
|
||||||
refresh_interval(_) -> undefined.
|
refresh_interval(_) -> undefined.
|
||||||
|
|
||||||
verify_claims(type) ->
|
verify_claims(type) ->
|
||||||
list();
|
%% user input is a map, converted to a list of {binary(), binary()}
|
||||||
|
typerefl:alias("map", list());
|
||||||
verify_claims(desc) ->
|
verify_claims(desc) ->
|
||||||
?DESC(?FUNCTION_NAME);
|
?DESC(?FUNCTION_NAME);
|
||||||
verify_claims(default) ->
|
verify_claims(default) ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_ldap, [
|
{application, emqx_auth_ldap, [
|
||||||
{description, "EMQX LDAP Authentication and Authorization"},
|
{description, "EMQX LDAP Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_ldap_app, []}},
|
{mod, {emqx_auth_ldap_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -16,18 +16,21 @@
|
||||||
|
|
||||||
-module(emqx_authn_ldap_schema).
|
-module(emqx_authn_ldap_schema).
|
||||||
|
|
||||||
-include("emqx_auth_ldap.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_ldap.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[?R_REF(ldap), ?R_REF(ldap_deprecated)].
|
[?R_REF(ldap), ?R_REF(ldap_deprecated)].
|
||||||
|
|
||||||
|
@ -52,7 +55,7 @@ fields(ldap) ->
|
||||||
[
|
[
|
||||||
{method,
|
{method,
|
||||||
?HOCON(
|
?HOCON(
|
||||||
?UNION([?R_REF(hash_method), ?R_REF(bind_method)]),
|
hoconsc:union([?R_REF(hash_method), ?R_REF(bind_method)]),
|
||||||
#{desc => ?DESC(method)}
|
#{desc => ?DESC(method)}
|
||||||
)}
|
)}
|
||||||
];
|
];
|
||||||
|
|
|
@ -26,9 +26,12 @@
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
source_refs/0,
|
source_refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
fields(ldap) ->
|
fields(ldap) ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_mnesia, [
|
{application, emqx_auth_mnesia, [
|
||||||
{description, "EMQX Buitl-in Database Authentication and Authorization"},
|
{description, "EMQX Buitl-in Database Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_mnesia_app, []}},
|
{mod, {emqx_auth_mnesia_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -25,9 +25,12 @@
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/1,
|
refs/1,
|
||||||
select_union_member/2
|
select_union_member/2,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs(api_write) ->
|
refs(api_write) ->
|
||||||
[?R_REF(builtin_db_api)];
|
[?R_REF(builtin_db_api)];
|
||||||
refs(_) ->
|
refs(_) ->
|
||||||
|
|
|
@ -22,12 +22,15 @@
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[?R_REF(scram)].
|
[?R_REF(scram)].
|
||||||
|
|
||||||
|
|
|
@ -26,9 +26,12 @@
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
source_refs/0,
|
source_refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
fields(builtin_db) ->
|
fields(builtin_db) ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_mongodb, [
|
{application, emqx_auth_mongodb, [
|
||||||
{description, "EMQX MongoDB Authentication and Authorization"},
|
{description, "EMQX MongoDB Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_mongodb_app, []}},
|
{mod, {emqx_auth_mongodb_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -16,18 +16,21 @@
|
||||||
|
|
||||||
-module(emqx_authn_mongodb_schema).
|
-module(emqx_authn_mongodb_schema).
|
||||||
|
|
||||||
-include("emqx_auth_mongodb.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_mongodb.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[
|
[
|
||||||
?R_REF(mongo_single),
|
?R_REF(mongo_single),
|
||||||
|
|
|
@ -16,17 +16,20 @@
|
||||||
|
|
||||||
-module(emqx_authz_mongodb_schema).
|
-module(emqx_authz_mongodb_schema).
|
||||||
|
|
||||||
-include("emqx_auth_mongodb.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
type/0,
|
type/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
source_refs/0,
|
source_refs/0,
|
||||||
select_union_member/1
|
select_union_member/1,
|
||||||
|
namespace/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_mongodb.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
source_refs() ->
|
source_refs() ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_mysql, [
|
{application, emqx_auth_mysql, [
|
||||||
{description, "EMQX MySQL Authentication and Authorization"},
|
{description, "EMQX MySQL Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_mysql_app, []}},
|
{mod, {emqx_auth_mysql_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -16,18 +16,21 @@
|
||||||
|
|
||||||
-module(emqx_authn_mysql_schema).
|
-module(emqx_authn_mysql_schema).
|
||||||
|
|
||||||
-include("emqx_auth_mysql.hrl").
|
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
|
||||||
|
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-include("emqx_auth_mysql.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[?R_REF(mysql)].
|
[?R_REF(mysql)].
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-behaviour(emqx_authz_schema).
|
-behaviour(emqx_authz_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
type/0,
|
type/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
|
@ -29,6 +30,8 @@
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
fields(mysql) ->
|
fields(mysql) ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_postgresql, [
|
{application, emqx_auth_postgresql, [
|
||||||
{description, "EMQX PostgreSQL Authentication and Authorization"},
|
{description, "EMQX PostgreSQL Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_postgresql_app, []}},
|
{mod, {emqx_auth_postgresql_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -22,12 +22,15 @@
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
select_union_member(
|
select_union_member(
|
||||||
#{
|
#{
|
||||||
<<"mechanism">> := ?AUTHN_MECHANISM_BIN, <<"backend">> := ?AUTHN_BACKEND_BIN
|
<<"mechanism">> := ?AUTHN_MECHANISM_BIN, <<"backend">> := ?AUTHN_BACKEND_BIN
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-behaviour(emqx_authz_schema).
|
-behaviour(emqx_authz_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
type/0,
|
type/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
|
@ -29,6 +30,8 @@
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
fields(postgresql) ->
|
fields(postgresql) ->
|
||||||
|
|
|
@ -104,7 +104,7 @@ t_update_with_invalid_config(_Config) ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{error, #{
|
{error, #{
|
||||||
kind := validation_error,
|
kind := validation_error,
|
||||||
matched_type := "postgresql",
|
matched_type := "authn:postgresql",
|
||||||
path := "authentication.1.server",
|
path := "authentication.1.server",
|
||||||
reason := required_field
|
reason := required_field
|
||||||
}},
|
}},
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_redis, [
|
{application, emqx_auth_redis, [
|
||||||
{description, "EMQX Redis Authentication and Authorization"},
|
{description, "EMQX Redis Authentication and Authorization"},
|
||||||
{vsn, "0.1.0"},
|
{vsn, "0.1.1"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_redis_app, []}},
|
{mod, {emqx_auth_redis_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -22,12 +22,15 @@
|
||||||
-behaviour(emqx_authn_schema).
|
-behaviour(emqx_authn_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
refs/0,
|
refs/0,
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authn".
|
||||||
|
|
||||||
refs() ->
|
refs() ->
|
||||||
[
|
[
|
||||||
?R_REF(redis_single),
|
?R_REF(redis_single),
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
-behaviour(emqx_authz_schema).
|
-behaviour(emqx_authz_schema).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
namespace/0,
|
||||||
type/0,
|
type/0,
|
||||||
fields/1,
|
fields/1,
|
||||||
desc/1,
|
desc/1,
|
||||||
|
@ -29,6 +30,8 @@
|
||||||
select_union_member/1
|
select_union_member/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
namespace() -> "authz".
|
||||||
|
|
||||||
type() -> ?AUTHZ_TYPE.
|
type() -> ?AUTHZ_TYPE.
|
||||||
|
|
||||||
fields(redis_single) ->
|
fields(redis_single) ->
|
||||||
|
|
|
@ -170,7 +170,7 @@ test_create_invalid_config(InvalidAuthConfig, Path) ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{error, #{
|
{error, #{
|
||||||
kind := validation_error,
|
kind := validation_error,
|
||||||
matched_type := "redis_single",
|
matched_type := "authn:redis_single",
|
||||||
path := Path
|
path := Path
|
||||||
}},
|
}},
|
||||||
emqx:update_config(?PATH, {create_authenticator, ?GLOBAL, InvalidAuthConfig})
|
emqx:update_config(?PATH, {create_authenticator, ?GLOBAL, InvalidAuthConfig})
|
||||||
|
|
|
@ -0,0 +1,200 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
%% @doc The module which knows everything about actions.
|
||||||
|
|
||||||
|
%% NOTE: it does not cover the V1 bridges.
|
||||||
|
|
||||||
|
-module(emqx_action_info).
|
||||||
|
|
||||||
|
-export([
|
||||||
|
action_type_to_connector_type/1,
|
||||||
|
action_type_to_bridge_v1_type/1,
|
||||||
|
bridge_v1_type_to_action_type/1,
|
||||||
|
is_action_type/1,
|
||||||
|
registered_schema_modules/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
-callback bridge_v1_type_name() -> atom().
|
||||||
|
-callback action_type_name() -> atom().
|
||||||
|
-callback connector_type_name() -> atom().
|
||||||
|
-callback schema_module() -> atom().
|
||||||
|
|
||||||
|
-optional_callbacks([bridge_v1_type_name/0]).
|
||||||
|
|
||||||
|
%% ====================================================================
|
||||||
|
%% Hadcoded list of info modules for actions
|
||||||
|
%% TODO: Remove this list once we have made sure that all relevants
|
||||||
|
%% apps are loaded before this module is called.
|
||||||
|
%% ====================================================================
|
||||||
|
|
||||||
|
-if(?EMQX_RELEASE_EDITION == ee).
|
||||||
|
hard_coded_action_info_modules_ee() ->
|
||||||
|
[
|
||||||
|
emqx_bridge_kafka_action_info,
|
||||||
|
emqx_bridge_azure_event_hub_action_info,
|
||||||
|
emqx_bridge_syskeeper_action_info
|
||||||
|
].
|
||||||
|
-else.
|
||||||
|
hard_coded_action_info_modules_ee() ->
|
||||||
|
[].
|
||||||
|
-endif.
|
||||||
|
|
||||||
|
hard_coded_action_info_modules_common() ->
|
||||||
|
[].
|
||||||
|
|
||||||
|
hard_coded_action_info_modules() ->
|
||||||
|
hard_coded_action_info_modules_common() ++ hard_coded_action_info_modules_ee().
|
||||||
|
|
||||||
|
%% ====================================================================
|
||||||
|
%% API
|
||||||
|
%% ====================================================================
|
||||||
|
|
||||||
|
action_type_to_connector_type(Type) when not is_atom(Type) ->
|
||||||
|
action_type_to_connector_type(binary_to_existing_atom(iolist_to_binary(Type)));
|
||||||
|
action_type_to_connector_type(Type) ->
|
||||||
|
ActionInfoMap = info_map(),
|
||||||
|
ActionTypeToConnectorTypeMap = maps:get(action_type_to_connector_type, ActionInfoMap),
|
||||||
|
case maps:get(Type, ActionTypeToConnectorTypeMap, undefined) of
|
||||||
|
undefined -> Type;
|
||||||
|
ConnectorType -> ConnectorType
|
||||||
|
end.
|
||||||
|
|
||||||
|
bridge_v1_type_to_action_type(Bin) when is_binary(Bin) ->
|
||||||
|
bridge_v1_type_to_action_type(binary_to_existing_atom(Bin));
|
||||||
|
bridge_v1_type_to_action_type(Type) ->
|
||||||
|
ActionInfoMap = info_map(),
|
||||||
|
BridgeV1TypeToActionType = maps:get(bridge_v1_type_to_action_type, ActionInfoMap),
|
||||||
|
case maps:get(Type, BridgeV1TypeToActionType, undefined) of
|
||||||
|
undefined -> Type;
|
||||||
|
ActionType -> ActionType
|
||||||
|
end.
|
||||||
|
|
||||||
|
action_type_to_bridge_v1_type(Bin) when is_binary(Bin) ->
|
||||||
|
action_type_to_bridge_v1_type(binary_to_existing_atom(Bin));
|
||||||
|
action_type_to_bridge_v1_type(Type) ->
|
||||||
|
ActionInfoMap = info_map(),
|
||||||
|
ActionTypeToBridgeV1Type = maps:get(action_type_to_bridge_v1_type, ActionInfoMap),
|
||||||
|
case maps:get(Type, ActionTypeToBridgeV1Type, undefined) of
|
||||||
|
undefined -> Type;
|
||||||
|
BridgeV1Type -> BridgeV1Type
|
||||||
|
end.
|
||||||
|
|
||||||
|
%% This function should return true for all inputs that are bridge V1 types for
|
||||||
|
%% bridges that have been refactored to bridge V2s, and for all all bridge V2
|
||||||
|
%% types. For everything else the function should return false.
|
||||||
|
is_action_type(Bin) when is_binary(Bin) ->
|
||||||
|
is_action_type(binary_to_existing_atom(Bin));
|
||||||
|
is_action_type(Type) ->
|
||||||
|
ActionInfoMap = info_map(),
|
||||||
|
ActionTypes = maps:get(action_type_names, ActionInfoMap),
|
||||||
|
case maps:get(Type, ActionTypes, undefined) of
|
||||||
|
undefined -> false;
|
||||||
|
_ -> true
|
||||||
|
end.
|
||||||
|
|
||||||
|
registered_schema_modules() ->
|
||||||
|
InfoMap = info_map(),
|
||||||
|
Schemas = maps:get(action_type_to_schema_module, InfoMap),
|
||||||
|
maps:to_list(Schemas).
|
||||||
|
|
||||||
|
%% ====================================================================
|
||||||
|
%% Internal functions for building the info map and accessing it
|
||||||
|
%% ====================================================================
|
||||||
|
|
||||||
|
internal_emqx_action_persistent_term_info_key() ->
|
||||||
|
?FUNCTION_NAME.
|
||||||
|
|
||||||
|
info_map() ->
|
||||||
|
case persistent_term:get(internal_emqx_action_persistent_term_info_key(), not_found) of
|
||||||
|
not_found ->
|
||||||
|
build_cache();
|
||||||
|
ActionInfoMap ->
|
||||||
|
ActionInfoMap
|
||||||
|
end.
|
||||||
|
|
||||||
|
build_cache() ->
|
||||||
|
ActionInfoModules = action_info_modules(),
|
||||||
|
ActionInfoMap =
|
||||||
|
lists:foldl(
|
||||||
|
fun(Module, InfoMapSoFar) ->
|
||||||
|
ModuleInfoMap = get_info_map(Module),
|
||||||
|
emqx_utils_maps:deep_merge(InfoMapSoFar, ModuleInfoMap)
|
||||||
|
end,
|
||||||
|
initial_info_map(),
|
||||||
|
ActionInfoModules
|
||||||
|
),
|
||||||
|
%% Update the persistent term with the new info map
|
||||||
|
persistent_term:put(internal_emqx_action_persistent_term_info_key(), ActionInfoMap),
|
||||||
|
ActionInfoMap.
|
||||||
|
|
||||||
|
action_info_modules() ->
|
||||||
|
ActionInfoModules = [
|
||||||
|
action_info_modules(App)
|
||||||
|
|| {App, _, _} <- application:loaded_applications()
|
||||||
|
],
|
||||||
|
lists:usort(lists:flatten(ActionInfoModules) ++ hard_coded_action_info_modules()).
|
||||||
|
|
||||||
|
action_info_modules(App) ->
|
||||||
|
case application:get_env(App, emqx_action_info_module) of
|
||||||
|
{ok, Module} ->
|
||||||
|
[Module];
|
||||||
|
_ ->
|
||||||
|
[]
|
||||||
|
end.
|
||||||
|
|
||||||
|
initial_info_map() ->
|
||||||
|
#{
|
||||||
|
action_type_names => #{},
|
||||||
|
bridge_v1_type_to_action_type => #{},
|
||||||
|
action_type_to_bridge_v1_type => #{},
|
||||||
|
action_type_to_connector_type => #{},
|
||||||
|
action_type_to_schema_module => #{}
|
||||||
|
}.
|
||||||
|
|
||||||
|
get_info_map(Module) ->
|
||||||
|
%% Force the module to get loaded
|
||||||
|
_ = code:ensure_loaded(Module),
|
||||||
|
ActionType = Module:action_type_name(),
|
||||||
|
BridgeV1Type =
|
||||||
|
case erlang:function_exported(Module, bridge_v1_type_name, 0) of
|
||||||
|
true ->
|
||||||
|
Module:bridge_v1_type_name();
|
||||||
|
false ->
|
||||||
|
Module:action_type_name()
|
||||||
|
end,
|
||||||
|
#{
|
||||||
|
action_type_names => #{
|
||||||
|
ActionType => true,
|
||||||
|
BridgeV1Type => true
|
||||||
|
},
|
||||||
|
bridge_v1_type_to_action_type => #{
|
||||||
|
BridgeV1Type => ActionType,
|
||||||
|
%% Alias the bridge V1 type to the action type
|
||||||
|
ActionType => ActionType
|
||||||
|
},
|
||||||
|
action_type_to_bridge_v1_type => #{
|
||||||
|
ActionType => BridgeV1Type
|
||||||
|
},
|
||||||
|
action_type_to_connector_type => #{
|
||||||
|
ActionType => Module:connector_type_name(),
|
||||||
|
%% Alias the bridge V1 type to the action type
|
||||||
|
BridgeV1Type => Module:connector_type_name()
|
||||||
|
},
|
||||||
|
action_type_to_schema_module => #{
|
||||||
|
ActionType => Module:schema_module()
|
||||||
|
}
|
||||||
|
}.
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_bridge, [
|
{application, emqx_bridge, [
|
||||||
{description, "EMQX bridges"},
|
{description, "EMQX bridges"},
|
||||||
{vsn, "0.1.29"},
|
{vsn, "0.1.30"},
|
||||||
{registered, [emqx_bridge_sup]},
|
{registered, [emqx_bridge_sup]},
|
||||||
{mod, {emqx_bridge_app, []}},
|
{mod, {emqx_bridge_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -308,7 +308,7 @@ list() ->
|
||||||
emqx:get_raw_config([bridges], #{})
|
emqx:get_raw_config([bridges], #{})
|
||||||
),
|
),
|
||||||
BridgeV2Bridges =
|
BridgeV2Bridges =
|
||||||
emqx_bridge_v2:list_and_transform_to_bridge_v1(),
|
emqx_bridge_v2:bridge_v1_list_and_transform(),
|
||||||
BridgeV1Bridges ++ BridgeV2Bridges.
|
BridgeV1Bridges ++ BridgeV2Bridges.
|
||||||
%%BridgeV2Bridges = emqx_bridge_v2:list().
|
%%BridgeV2Bridges = emqx_bridge_v2:list().
|
||||||
|
|
||||||
|
@ -319,7 +319,7 @@ lookup(Id) ->
|
||||||
lookup(Type, Name) ->
|
lookup(Type, Name) ->
|
||||||
case emqx_bridge_v2:is_bridge_v2_type(Type) of
|
case emqx_bridge_v2:is_bridge_v2_type(Type) of
|
||||||
true ->
|
true ->
|
||||||
emqx_bridge_v2:lookup_and_transform_to_bridge_v1(Type, Name);
|
emqx_bridge_v2:bridge_v1_lookup_and_transform(Type, Name);
|
||||||
false ->
|
false ->
|
||||||
RawConf = emqx:get_raw_config([bridges, Type, Name], #{}),
|
RawConf = emqx:get_raw_config([bridges, Type, Name], #{}),
|
||||||
lookup(Type, Name, RawConf)
|
lookup(Type, Name, RawConf)
|
||||||
|
@ -341,7 +341,7 @@ lookup(Type, Name, RawConf) ->
|
||||||
get_metrics(Type, Name) ->
|
get_metrics(Type, Name) ->
|
||||||
case emqx_bridge_v2:is_bridge_v2_type(Type) of
|
case emqx_bridge_v2:is_bridge_v2_type(Type) of
|
||||||
true ->
|
true ->
|
||||||
case emqx_bridge_v2:is_valid_bridge_v1(Type, Name) of
|
case emqx_bridge_v2:bridge_v1_is_valid(Type, Name) of
|
||||||
true ->
|
true ->
|
||||||
BridgeV2Type = emqx_bridge_v2:bridge_v2_type_to_connector_type(Type),
|
BridgeV2Type = emqx_bridge_v2:bridge_v2_type_to_connector_type(Type),
|
||||||
emqx_bridge_v2:get_metrics(BridgeV2Type, Name);
|
emqx_bridge_v2:get_metrics(BridgeV2Type, Name);
|
||||||
|
@ -384,7 +384,7 @@ create(BridgeType0, BridgeName, RawConf) ->
|
||||||
}),
|
}),
|
||||||
case emqx_bridge_v2:is_bridge_v2_type(BridgeType) of
|
case emqx_bridge_v2:is_bridge_v2_type(BridgeType) of
|
||||||
true ->
|
true ->
|
||||||
emqx_bridge_v2:split_bridge_v1_config_and_create(BridgeType, BridgeName, RawConf);
|
emqx_bridge_v2:bridge_v1_split_config_and_create(BridgeType, BridgeName, RawConf);
|
||||||
false ->
|
false ->
|
||||||
emqx_conf:update(
|
emqx_conf:update(
|
||||||
emqx_bridge:config_key_path() ++ [BridgeType, BridgeName],
|
emqx_bridge:config_key_path() ++ [BridgeType, BridgeName],
|
||||||
|
|
|
@ -627,7 +627,7 @@ create_bridge(BridgeType, BridgeName, Conf) ->
|
||||||
update_bridge(BridgeType, BridgeName, Conf) ->
|
update_bridge(BridgeType, BridgeName, Conf) ->
|
||||||
case emqx_bridge_v2:is_bridge_v2_type(BridgeType) of
|
case emqx_bridge_v2:is_bridge_v2_type(BridgeType) of
|
||||||
true ->
|
true ->
|
||||||
case emqx_bridge_v2:is_valid_bridge_v1(BridgeType, BridgeName) of
|
case emqx_bridge_v2:bridge_v1_is_valid(BridgeType, BridgeName) of
|
||||||
true ->
|
true ->
|
||||||
create_or_update_bridge(BridgeType, BridgeName, Conf, 200);
|
create_or_update_bridge(BridgeType, BridgeName, Conf, 200);
|
||||||
false ->
|
false ->
|
||||||
|
@ -1157,7 +1157,7 @@ map_to_json(M0) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
non_compat_bridge_msg() ->
|
non_compat_bridge_msg() ->
|
||||||
<<"bridge already exists as non Bridge V1 compatible Bridge V2 bridge">>.
|
<<"bridge already exists as non Bridge V1 compatible action">>.
|
||||||
|
|
||||||
upgrade_type(Type) ->
|
upgrade_type(Type) ->
|
||||||
emqx_bridge_lib:upgrade_type(Type).
|
emqx_bridge_lib:upgrade_type(Type).
|
||||||
|
|
|
@ -53,20 +53,20 @@ maybe_withdraw_rule_action_loop([BridgeId | More], DeleteActions) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @doc Kafka producer bridge renamed from 'kafka' to 'kafka_bridge' since 5.3.1.
|
%% @doc Kafka producer bridge renamed from 'kafka' to 'kafka_bridge' since 5.3.1.
|
||||||
upgrade_type(kafka) ->
|
upgrade_type(Type) when is_atom(Type) ->
|
||||||
kafka_producer;
|
emqx_bridge_v2:bridge_v1_type_to_bridge_v2_type(Type);
|
||||||
upgrade_type(<<"kafka">>) ->
|
upgrade_type(Type) when is_binary(Type) ->
|
||||||
<<"kafka_producer">>;
|
atom_to_binary(emqx_bridge_v2:bridge_v1_type_to_bridge_v2_type(Type));
|
||||||
upgrade_type(Other) ->
|
upgrade_type(Type) when is_list(Type) ->
|
||||||
Other.
|
atom_to_list(emqx_bridge_v2:bridge_v1_type_to_bridge_v2_type(list_to_binary(Type))).
|
||||||
|
|
||||||
%% @doc Kafka producer bridge type renamed from 'kafka' to 'kafka_bridge' since 5.3.1
|
%% @doc Kafka producer bridge type renamed from 'kafka' to 'kafka_bridge' since 5.3.1
|
||||||
downgrade_type(kafka_producer) ->
|
downgrade_type(Type) when is_atom(Type) ->
|
||||||
kafka;
|
emqx_bridge_v2:bridge_v2_type_to_bridge_v1_type(Type);
|
||||||
downgrade_type(<<"kafka_producer">>) ->
|
downgrade_type(Type) when is_binary(Type) ->
|
||||||
<<"kafka">>;
|
atom_to_binary(emqx_bridge_v2:bridge_v2_type_to_bridge_v1_type(Type));
|
||||||
downgrade_type(Other) ->
|
downgrade_type(Type) when is_list(Type) ->
|
||||||
Other.
|
atom_to_list(emqx_bridge_v2:bridge_v2_type_to_bridge_v1_type(list_to_binary(Type))).
|
||||||
|
|
||||||
%% A rule might be referencing an old version bridge type name
|
%% A rule might be referencing an old version bridge type name
|
||||||
%% i.e. 'kafka' instead of 'kafka_producer' so we need to try both
|
%% i.e. 'kafka' instead of 'kafka_producer' so we need to try both
|
||||||
|
|
|
@ -130,7 +130,7 @@ reset_metrics(ResourceId) ->
|
||||||
false ->
|
false ->
|
||||||
emqx_resource:reset_metrics(ResourceId);
|
emqx_resource:reset_metrics(ResourceId);
|
||||||
true ->
|
true ->
|
||||||
case emqx_bridge_v2:is_valid_bridge_v1(Type, Name) of
|
case emqx_bridge_v2:bridge_v1_is_valid(Type, Name) of
|
||||||
true ->
|
true ->
|
||||||
BridgeV2Type = emqx_bridge_v2:bridge_v2_type_to_connector_type(Type),
|
BridgeV2Type = emqx_bridge_v2:bridge_v2_type_to_connector_type(Type),
|
||||||
emqx_bridge_v2:reset_metrics(BridgeV2Type, Name);
|
emqx_bridge_v2:reset_metrics(BridgeV2Type, Name);
|
||||||
|
|
|
@ -40,6 +40,8 @@
|
||||||
list/0,
|
list/0,
|
||||||
lookup/2,
|
lookup/2,
|
||||||
create/3,
|
create/3,
|
||||||
|
%% The remove/2 function is only for internal use as it may create
|
||||||
|
%% rules with broken dependencies
|
||||||
remove/2,
|
remove/2,
|
||||||
%% The following is the remove function that is called by the HTTP API
|
%% The following is the remove function that is called by the HTTP API
|
||||||
%% It also checks for rule action dependencies and optionally removes
|
%% It also checks for rule action dependencies and optionally removes
|
||||||
|
@ -48,6 +50,7 @@
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% Operations
|
%% Operations
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
disable_enable/3,
|
disable_enable/3,
|
||||||
health_check/2,
|
health_check/2,
|
||||||
|
@ -73,7 +76,8 @@
|
||||||
-export([
|
-export([
|
||||||
id/2,
|
id/2,
|
||||||
id/3,
|
id/3,
|
||||||
is_valid_bridge_v1/2
|
bridge_v1_is_valid/2,
|
||||||
|
extract_connector_id_from_bridge_v2_id/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% Config Update Handler API
|
%% Config Update Handler API
|
||||||
|
@ -88,18 +92,26 @@
|
||||||
import_config/1
|
import_config/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% Compatibility API
|
%% Bridge V2 Types and Conversions
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
bridge_v2_type_to_connector_type/1,
|
bridge_v2_type_to_connector_type/1,
|
||||||
is_bridge_v2_type/1,
|
is_bridge_v2_type/1
|
||||||
lookup_and_transform_to_bridge_v1/2,
|
]).
|
||||||
list_and_transform_to_bridge_v1/0,
|
|
||||||
|
%% Compatibility Layer API
|
||||||
|
%% All public functions for the compatibility layer should be prefixed with
|
||||||
|
%% bridge_v1_
|
||||||
|
|
||||||
|
-export([
|
||||||
|
bridge_v1_lookup_and_transform/2,
|
||||||
|
bridge_v1_list_and_transform/0,
|
||||||
bridge_v1_check_deps_and_remove/3,
|
bridge_v1_check_deps_and_remove/3,
|
||||||
split_bridge_v1_config_and_create/3,
|
bridge_v1_split_config_and_create/3,
|
||||||
bridge_v1_create_dry_run/2,
|
bridge_v1_create_dry_run/2,
|
||||||
extract_connector_id_from_bridge_v2_id/1,
|
|
||||||
bridge_v1_type_to_bridge_v2_type/1,
|
bridge_v1_type_to_bridge_v2_type/1,
|
||||||
|
%% Exception from the naming convention:
|
||||||
|
bridge_v2_type_to_bridge_v1_type/1,
|
||||||
bridge_v1_id_to_connector_resource_id/1,
|
bridge_v1_id_to_connector_resource_id/1,
|
||||||
bridge_v1_enable_disable/3,
|
bridge_v1_enable_disable/3,
|
||||||
bridge_v1_restart/2,
|
bridge_v1_restart/2,
|
||||||
|
@ -107,6 +119,27 @@
|
||||||
bridge_v1_start/2
|
bridge_v1_start/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%%====================================================================
|
||||||
|
%% Types
|
||||||
|
%%====================================================================
|
||||||
|
|
||||||
|
-type bridge_v2_info() :: #{
|
||||||
|
type := binary(),
|
||||||
|
name := binary(),
|
||||||
|
raw_config := map(),
|
||||||
|
resource_data := map(),
|
||||||
|
status := emqx_resource:resource_status(),
|
||||||
|
%% Explanation of the status if the status is not connected
|
||||||
|
error := term()
|
||||||
|
}.
|
||||||
|
|
||||||
|
-type bridge_v2_type() :: binary() | atom() | [byte()].
|
||||||
|
-type bridge_v2_name() :: binary() | atom() | [byte()].
|
||||||
|
|
||||||
|
%%====================================================================
|
||||||
|
|
||||||
|
%%====================================================================
|
||||||
|
|
||||||
%%====================================================================
|
%%====================================================================
|
||||||
%% Loading and unloading config when EMQX starts and stops
|
%% Loading and unloading config when EMQX starts and stops
|
||||||
%%====================================================================
|
%%====================================================================
|
||||||
|
@ -157,6 +190,7 @@ unload_bridges() ->
|
||||||
%% CRUD API
|
%% CRUD API
|
||||||
%%====================================================================
|
%%====================================================================
|
||||||
|
|
||||||
|
-spec lookup(bridge_v2_type(), bridge_v2_name()) -> {ok, bridge_v2_info()} | {error, not_found}.
|
||||||
lookup(Type, Name) ->
|
lookup(Type, Name) ->
|
||||||
case emqx:get_raw_config([?ROOT_KEY, Type, Name], not_found) of
|
case emqx:get_raw_config([?ROOT_KEY, Type, Name], not_found) of
|
||||||
not_found ->
|
not_found ->
|
||||||
|
@ -191,8 +225,8 @@ lookup(Type, Name) ->
|
||||||
{disconnected, <<"Pending installation">>}
|
{disconnected, <<"Pending installation">>}
|
||||||
end,
|
end,
|
||||||
{ok, #{
|
{ok, #{
|
||||||
type => Type,
|
type => bin(Type),
|
||||||
name => Name,
|
name => bin(Name),
|
||||||
raw_config => RawConf,
|
raw_config => RawConf,
|
||||||
resource_data => InstanceData,
|
resource_data => InstanceData,
|
||||||
status => DisplayBridgeV2Status,
|
status => DisplayBridgeV2Status,
|
||||||
|
@ -200,9 +234,12 @@ lookup(Type, Name) ->
|
||||||
}}
|
}}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
-spec list() -> [bridge_v2_info()] | {error, term()}.
|
||||||
list() ->
|
list() ->
|
||||||
list_with_lookup_fun(fun lookup/2).
|
list_with_lookup_fun(fun lookup/2).
|
||||||
|
|
||||||
|
-spec create(bridge_v2_type(), bridge_v2_name(), map()) ->
|
||||||
|
{ok, emqx_config:update_result()} | {error, any()}.
|
||||||
create(BridgeType, BridgeName, RawConf) ->
|
create(BridgeType, BridgeName, RawConf) ->
|
||||||
?SLOG(debug, #{
|
?SLOG(debug, #{
|
||||||
brige_action => create,
|
brige_action => create,
|
||||||
|
@ -217,9 +254,10 @@ create(BridgeType, BridgeName, RawConf) ->
|
||||||
#{override_to => cluster}
|
#{override_to => cluster}
|
||||||
).
|
).
|
||||||
|
|
||||||
%% NOTE: This function can cause broken references but it is only called from
|
%% NOTE: This function can cause broken references from rules but it is only
|
||||||
%% test cases.
|
%% called directly from test cases.
|
||||||
-spec remove(atom() | binary(), binary()) -> ok | {error, any()}.
|
|
||||||
|
-spec remove(bridge_v2_type(), bridge_v2_name()) -> ok | {error, any()}.
|
||||||
remove(BridgeType, BridgeName) ->
|
remove(BridgeType, BridgeName) ->
|
||||||
?SLOG(debug, #{
|
?SLOG(debug, #{
|
||||||
brige_action => remove,
|
brige_action => remove,
|
||||||
|
@ -237,6 +275,7 @@ remove(BridgeType, BridgeName) ->
|
||||||
{error, Reason} -> {error, Reason}
|
{error, Reason} -> {error, Reason}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
-spec check_deps_and_remove(bridge_v2_type(), bridge_v2_name(), boolean()) -> ok | {error, any()}.
|
||||||
check_deps_and_remove(BridgeType, BridgeName, AlsoDeleteActions) ->
|
check_deps_and_remove(BridgeType, BridgeName, AlsoDeleteActions) ->
|
||||||
AlsoDelete =
|
AlsoDelete =
|
||||||
case AlsoDeleteActions of
|
case AlsoDeleteActions of
|
||||||
|
@ -360,28 +399,6 @@ uninstall_bridge_v2(
|
||||||
%% Already not installed
|
%% Already not installed
|
||||||
ok;
|
ok;
|
||||||
uninstall_bridge_v2(
|
uninstall_bridge_v2(
|
||||||
BridgeV2Type,
|
|
||||||
BridgeName,
|
|
||||||
Config
|
|
||||||
) ->
|
|
||||||
uninstall_bridge_v2_helper(
|
|
||||||
BridgeV2Type,
|
|
||||||
BridgeName,
|
|
||||||
combine_connector_and_bridge_v2_config(
|
|
||||||
BridgeV2Type,
|
|
||||||
BridgeName,
|
|
||||||
Config
|
|
||||||
)
|
|
||||||
).
|
|
||||||
|
|
||||||
uninstall_bridge_v2_helper(
|
|
||||||
_BridgeV2Type,
|
|
||||||
_BridgeName,
|
|
||||||
{error, Reason} = Error
|
|
||||||
) ->
|
|
||||||
?SLOG(error, Reason),
|
|
||||||
Error;
|
|
||||||
uninstall_bridge_v2_helper(
|
|
||||||
BridgeV2Type,
|
BridgeV2Type,
|
||||||
BridgeName,
|
BridgeName,
|
||||||
#{connector := ConnectorName} = Config
|
#{connector := ConnectorName} = Config
|
||||||
|
@ -390,11 +407,16 @@ uninstall_bridge_v2_helper(
|
||||||
CreationOpts = emqx_resource:fetch_creation_opts(Config),
|
CreationOpts = emqx_resource:fetch_creation_opts(Config),
|
||||||
ok = emqx_resource_buffer_worker_sup:stop_workers(BridgeV2Id, CreationOpts),
|
ok = emqx_resource_buffer_worker_sup:stop_workers(BridgeV2Id, CreationOpts),
|
||||||
ok = emqx_resource:clear_metrics(BridgeV2Id),
|
ok = emqx_resource:clear_metrics(BridgeV2Id),
|
||||||
%% Deinstall from connector
|
case combine_connector_and_bridge_v2_config(BridgeV2Type, BridgeName, Config) of
|
||||||
ConnectorId = emqx_connector_resource:resource_id(
|
{error, _} ->
|
||||||
connector_type(BridgeV2Type), ConnectorName
|
ok;
|
||||||
),
|
_CombinedConfig ->
|
||||||
emqx_resource_manager:remove_channel(ConnectorId, BridgeV2Id).
|
%% Deinstall from connector
|
||||||
|
ConnectorId = emqx_connector_resource:resource_id(
|
||||||
|
connector_type(BridgeV2Type), ConnectorName
|
||||||
|
),
|
||||||
|
emqx_resource_manager:remove_channel(ConnectorId, BridgeV2Id)
|
||||||
|
end.
|
||||||
|
|
||||||
combine_connector_and_bridge_v2_config(
|
combine_connector_and_bridge_v2_config(
|
||||||
BridgeV2Type,
|
BridgeV2Type,
|
||||||
|
@ -425,6 +447,8 @@ combine_connector_and_bridge_v2_config(
|
||||||
%% Operations
|
%% Operations
|
||||||
%%====================================================================
|
%%====================================================================
|
||||||
|
|
||||||
|
-spec disable_enable(disable | enable, bridge_v2_type(), bridge_v2_name()) ->
|
||||||
|
{ok, any()} | {error, any()}.
|
||||||
disable_enable(Action, BridgeType, BridgeName) when
|
disable_enable(Action, BridgeType, BridgeName) when
|
||||||
Action =:= disable; Action =:= enable
|
Action =:= disable; Action =:= enable
|
||||||
->
|
->
|
||||||
|
@ -502,6 +526,7 @@ connector_operation_helper_with_conf(
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
-spec reset_metrics(bridge_v2_type(), bridge_v2_name()) -> ok | {error, not_found}.
|
||||||
reset_metrics(Type, Name) ->
|
reset_metrics(Type, Name) ->
|
||||||
reset_metrics_helper(Type, Name, lookup_conf(Type, Name)).
|
reset_metrics_helper(Type, Name, lookup_conf(Type, Name)).
|
||||||
|
|
||||||
|
@ -509,7 +534,9 @@ reset_metrics_helper(_Type, _Name, #{enable := false}) ->
|
||||||
ok;
|
ok;
|
||||||
reset_metrics_helper(BridgeV2Type, BridgeName, #{connector := ConnectorName}) ->
|
reset_metrics_helper(BridgeV2Type, BridgeName, #{connector := ConnectorName}) ->
|
||||||
BridgeV2Id = id(BridgeV2Type, BridgeName, ConnectorName),
|
BridgeV2Id = id(BridgeV2Type, BridgeName, ConnectorName),
|
||||||
ok = emqx_metrics_worker:reset_metrics(?RES_METRICS, BridgeV2Id).
|
ok = emqx_metrics_worker:reset_metrics(?RES_METRICS, BridgeV2Id);
|
||||||
|
reset_metrics_helper(_, _, _) ->
|
||||||
|
{error, not_found}.
|
||||||
|
|
||||||
get_query_mode(BridgeV2Type, Config) ->
|
get_query_mode(BridgeV2Type, Config) ->
|
||||||
CreationOpts = emqx_resource:fetch_creation_opts(Config),
|
CreationOpts = emqx_resource:fetch_creation_opts(Config),
|
||||||
|
@ -517,6 +544,8 @@ get_query_mode(BridgeV2Type, Config) ->
|
||||||
ResourceType = emqx_connector_resource:connector_to_resource_type(ConnectorType),
|
ResourceType = emqx_connector_resource:connector_to_resource_type(ConnectorType),
|
||||||
emqx_resource:query_mode(ResourceType, Config, CreationOpts).
|
emqx_resource:query_mode(ResourceType, Config, CreationOpts).
|
||||||
|
|
||||||
|
-spec send_message(bridge_v2_type(), bridge_v2_name(), Message :: term(), QueryOpts :: map()) ->
|
||||||
|
term() | {error, term()}.
|
||||||
send_message(BridgeType, BridgeName, Message, QueryOpts0) ->
|
send_message(BridgeType, BridgeName, Message, QueryOpts0) ->
|
||||||
case lookup_conf(BridgeType, BridgeName) of
|
case lookup_conf(BridgeType, BridgeName) of
|
||||||
#{enable := true} = Config0 ->
|
#{enable := true} = Config0 ->
|
||||||
|
@ -550,8 +579,7 @@ do_send_msg_with_enabled_config(
|
||||||
emqx_resource:query(BridgeV2Id, {BridgeV2Id, Message}, QueryOpts).
|
emqx_resource:query(BridgeV2Id, {BridgeV2Id, Message}, QueryOpts).
|
||||||
|
|
||||||
-spec health_check(BridgeType :: term(), BridgeName :: term()) ->
|
-spec health_check(BridgeType :: term(), BridgeName :: term()) ->
|
||||||
#{status := term(), error := term()} | {error, Reason :: term()}.
|
#{status := emqx_resource:resource_status(), error := term()} | {error, Reason :: term()}.
|
||||||
|
|
||||||
health_check(BridgeType, BridgeName) ->
|
health_check(BridgeType, BridgeName) ->
|
||||||
case lookup_conf(BridgeType, BridgeName) of
|
case lookup_conf(BridgeType, BridgeName) of
|
||||||
#{
|
#{
|
||||||
|
@ -570,6 +598,34 @@ health_check(BridgeType, BridgeName) ->
|
||||||
Error
|
Error
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
-spec create_dry_run(bridge_v2_type(), Config :: map()) -> ok | {error, term()}.
|
||||||
|
create_dry_run(Type, Conf0) ->
|
||||||
|
Conf1 = maps:without([<<"name">>], Conf0),
|
||||||
|
TypeBin = bin(Type),
|
||||||
|
RawConf = #{<<"actions">> => #{TypeBin => #{<<"temp_name">> => Conf1}}},
|
||||||
|
%% Check config
|
||||||
|
try
|
||||||
|
_ =
|
||||||
|
hocon_tconf:check_plain(
|
||||||
|
emqx_bridge_v2_schema,
|
||||||
|
RawConf,
|
||||||
|
#{atom_key => true, required => false}
|
||||||
|
),
|
||||||
|
#{<<"connector">> := ConnectorName} = Conf1,
|
||||||
|
%% Check that the connector exists and do the dry run if it exists
|
||||||
|
ConnectorType = connector_type(Type),
|
||||||
|
case emqx:get_raw_config([connectors, ConnectorType, ConnectorName], not_found) of
|
||||||
|
not_found ->
|
||||||
|
{error, iolist_to_binary(io_lib:format("Connector ~p not found", [ConnectorName]))};
|
||||||
|
ConnectorRawConf ->
|
||||||
|
create_dry_run_helper(Type, ConnectorRawConf, Conf1)
|
||||||
|
end
|
||||||
|
catch
|
||||||
|
%% validation errors
|
||||||
|
throw:Reason1 ->
|
||||||
|
{error, Reason1}
|
||||||
|
end.
|
||||||
|
|
||||||
create_dry_run_helper(BridgeType, ConnectorRawConf, BridgeV2RawConf) ->
|
create_dry_run_helper(BridgeType, ConnectorRawConf, BridgeV2RawConf) ->
|
||||||
BridgeName = iolist_to_binary([?TEST_ID_PREFIX, emqx_utils:gen_id(8)]),
|
BridgeName = iolist_to_binary([?TEST_ID_PREFIX, emqx_utils:gen_id(8)]),
|
||||||
ConnectorType = connector_type(BridgeType),
|
ConnectorType = connector_type(BridgeType),
|
||||||
|
@ -601,33 +657,7 @@ create_dry_run_helper(BridgeType, ConnectorRawConf, BridgeV2RawConf) ->
|
||||||
end,
|
end,
|
||||||
emqx_connector_resource:create_dry_run(ConnectorType, ConnectorRawConf, OnReadyCallback).
|
emqx_connector_resource:create_dry_run(ConnectorType, ConnectorRawConf, OnReadyCallback).
|
||||||
|
|
||||||
create_dry_run(Type, Conf0) ->
|
-spec get_metrics(bridge_v2_type(), bridge_v2_name()) -> emqx_metrics_worker:metrics().
|
||||||
Conf1 = maps:without([<<"name">>], Conf0),
|
|
||||||
TypeBin = bin(Type),
|
|
||||||
RawConf = #{<<"actions">> => #{TypeBin => #{<<"temp_name">> => Conf1}}},
|
|
||||||
%% Check config
|
|
||||||
try
|
|
||||||
_ =
|
|
||||||
hocon_tconf:check_plain(
|
|
||||||
emqx_bridge_v2_schema,
|
|
||||||
RawConf,
|
|
||||||
#{atom_key => true, required => false}
|
|
||||||
),
|
|
||||||
#{<<"connector">> := ConnectorName} = Conf1,
|
|
||||||
%% Check that the connector exists and do the dry run if it exists
|
|
||||||
ConnectorType = connector_type(Type),
|
|
||||||
case emqx:get_raw_config([connectors, ConnectorType, ConnectorName], not_found) of
|
|
||||||
not_found ->
|
|
||||||
{error, iolist_to_binary(io_lib:format("Connector ~p not found", [ConnectorName]))};
|
|
||||||
ConnectorRawConf ->
|
|
||||||
create_dry_run_helper(Type, ConnectorRawConf, Conf1)
|
|
||||||
end
|
|
||||||
catch
|
|
||||||
%% validation errors
|
|
||||||
throw:Reason1 ->
|
|
||||||
{error, Reason1}
|
|
||||||
end.
|
|
||||||
|
|
||||||
get_metrics(Type, Name) ->
|
get_metrics(Type, Name) ->
|
||||||
emqx_resource:get_metrics(id(Type, Name)).
|
emqx_resource:get_metrics(id(Type, Name)).
|
||||||
|
|
||||||
|
@ -796,17 +826,8 @@ connector_type(Type) ->
|
||||||
%% remote call so it can be mocked
|
%% remote call so it can be mocked
|
||||||
?MODULE:bridge_v2_type_to_connector_type(Type).
|
?MODULE:bridge_v2_type_to_connector_type(Type).
|
||||||
|
|
||||||
bridge_v2_type_to_connector_type(Type) when not is_atom(Type) ->
|
bridge_v2_type_to_connector_type(Type) ->
|
||||||
bridge_v2_type_to_connector_type(binary_to_existing_atom(iolist_to_binary(Type)));
|
emqx_action_info:action_type_to_connector_type(Type).
|
||||||
bridge_v2_type_to_connector_type(kafka) ->
|
|
||||||
%% backward compatible
|
|
||||||
kafka_producer;
|
|
||||||
bridge_v2_type_to_connector_type(kafka_producer) ->
|
|
||||||
kafka_producer;
|
|
||||||
bridge_v2_type_to_connector_type(azure_event_hub_producer) ->
|
|
||||||
azure_event_hub_producer;
|
|
||||||
bridge_v2_type_to_connector_type(syskeeper_forwarder) ->
|
|
||||||
syskeeper_forwarder.
|
|
||||||
|
|
||||||
%%====================================================================
|
%%====================================================================
|
||||||
%% Data backup API
|
%% Data backup API
|
||||||
|
@ -1008,7 +1029,7 @@ unpack_bridge_conf(Type, PackedConf, TopLevelConf) ->
|
||||||
%%
|
%%
|
||||||
%% * The corresponding bridge v2 should exist
|
%% * The corresponding bridge v2 should exist
|
||||||
%% * The connector for the bridge v2 should have exactly one channel
|
%% * The connector for the bridge v2 should have exactly one channel
|
||||||
is_valid_bridge_v1(BridgeV1Type, BridgeName) ->
|
bridge_v1_is_valid(BridgeV1Type, BridgeName) ->
|
||||||
BridgeV2Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type),
|
BridgeV2Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type),
|
||||||
case lookup_conf(BridgeV2Type, BridgeName) of
|
case lookup_conf(BridgeV2Type, BridgeName) of
|
||||||
{error, _} ->
|
{error, _} ->
|
||||||
|
@ -1026,39 +1047,21 @@ is_valid_bridge_v1(BridgeV1Type, BridgeName) ->
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
bridge_v1_type_to_bridge_v2_type(Bin) when is_binary(Bin) ->
|
bridge_v1_type_to_bridge_v2_type(Type) ->
|
||||||
?MODULE:bridge_v1_type_to_bridge_v2_type(binary_to_existing_atom(Bin));
|
emqx_action_info:bridge_v1_type_to_action_type(Type).
|
||||||
bridge_v1_type_to_bridge_v2_type(kafka) ->
|
|
||||||
kafka_producer;
|
|
||||||
bridge_v1_type_to_bridge_v2_type(kafka_producer) ->
|
|
||||||
kafka_producer;
|
|
||||||
bridge_v1_type_to_bridge_v2_type(azure_event_hub_producer) ->
|
|
||||||
azure_event_hub_producer;
|
|
||||||
bridge_v1_type_to_bridge_v2_type(syskeeper_forwarder) ->
|
|
||||||
syskeeper_forwarder.
|
|
||||||
|
|
||||||
%% This function should return true for all inputs that are bridge V1 types for
|
bridge_v2_type_to_bridge_v1_type(Type) ->
|
||||||
%% bridges that have been refactored to bridge V2s, and for all all bridge V2
|
emqx_action_info:action_type_to_bridge_v1_type(Type).
|
||||||
%% types. For everything else the function should return false.
|
|
||||||
is_bridge_v2_type(Atom) when is_atom(Atom) ->
|
|
||||||
is_bridge_v2_type(atom_to_binary(Atom, utf8));
|
|
||||||
is_bridge_v2_type(<<"kafka_producer">>) ->
|
|
||||||
true;
|
|
||||||
is_bridge_v2_type(<<"kafka">>) ->
|
|
||||||
true;
|
|
||||||
is_bridge_v2_type(<<"azure_event_hub_producer">>) ->
|
|
||||||
true;
|
|
||||||
is_bridge_v2_type(<<"syskeeper_forwarder">>) ->
|
|
||||||
true;
|
|
||||||
is_bridge_v2_type(_) ->
|
|
||||||
false.
|
|
||||||
|
|
||||||
list_and_transform_to_bridge_v1() ->
|
is_bridge_v2_type(Type) ->
|
||||||
Bridges = list_with_lookup_fun(fun lookup_and_transform_to_bridge_v1/2),
|
emqx_action_info:is_action_type(Type).
|
||||||
|
|
||||||
|
bridge_v1_list_and_transform() ->
|
||||||
|
Bridges = list_with_lookup_fun(fun bridge_v1_lookup_and_transform/2),
|
||||||
[B || B <- Bridges, B =/= not_bridge_v1_compatible_error()].
|
[B || B <- Bridges, B =/= not_bridge_v1_compatible_error()].
|
||||||
|
|
||||||
lookup_and_transform_to_bridge_v1(BridgeV1Type, Name) ->
|
bridge_v1_lookup_and_transform(BridgeV1Type, Name) ->
|
||||||
case ?MODULE:is_valid_bridge_v1(BridgeV1Type, Name) of
|
case ?MODULE:bridge_v1_is_valid(BridgeV1Type, Name) of
|
||||||
true ->
|
true ->
|
||||||
Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type),
|
Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type),
|
||||||
case lookup(Type, Name) of
|
case lookup(Type, Name) of
|
||||||
|
@ -1066,7 +1069,7 @@ lookup_and_transform_to_bridge_v1(BridgeV1Type, Name) ->
|
||||||
ConnectorType = connector_type(Type),
|
ConnectorType = connector_type(Type),
|
||||||
case emqx_connector:lookup(ConnectorType, ConnectorName) of
|
case emqx_connector:lookup(ConnectorType, ConnectorName) of
|
||||||
{ok, Connector} ->
|
{ok, Connector} ->
|
||||||
lookup_and_transform_to_bridge_v1_helper(
|
bridge_v1_lookup_and_transform_helper(
|
||||||
BridgeV1Type, Name, Type, BridgeV2, ConnectorType, Connector
|
BridgeV1Type, Name, Type, BridgeV2, ConnectorType, Connector
|
||||||
);
|
);
|
||||||
Error ->
|
Error ->
|
||||||
|
@ -1082,7 +1085,7 @@ lookup_and_transform_to_bridge_v1(BridgeV1Type, Name) ->
|
||||||
not_bridge_v1_compatible_error() ->
|
not_bridge_v1_compatible_error() ->
|
||||||
{error, not_bridge_v1_compatible}.
|
{error, not_bridge_v1_compatible}.
|
||||||
|
|
||||||
lookup_and_transform_to_bridge_v1_helper(
|
bridge_v1_lookup_and_transform_helper(
|
||||||
BridgeV1Type, BridgeName, BridgeV2Type, BridgeV2, ConnectorType, Connector
|
BridgeV1Type, BridgeName, BridgeV2Type, BridgeV2, ConnectorType, Connector
|
||||||
) ->
|
) ->
|
||||||
ConnectorRawConfig1 = maps:get(raw_config, Connector),
|
ConnectorRawConfig1 = maps:get(raw_config, Connector),
|
||||||
|
@ -1135,7 +1138,7 @@ lookup_conf(Type, Name) ->
|
||||||
Config
|
Config
|
||||||
end.
|
end.
|
||||||
|
|
||||||
split_bridge_v1_config_and_create(BridgeV1Type, BridgeName, RawConf) ->
|
bridge_v1_split_config_and_create(BridgeV1Type, BridgeName, RawConf) ->
|
||||||
BridgeV2Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type),
|
BridgeV2Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type),
|
||||||
%% Check if the bridge v2 exists
|
%% Check if the bridge v2 exists
|
||||||
case lookup_conf(BridgeV2Type, BridgeName) of
|
case lookup_conf(BridgeV2Type, BridgeName) of
|
||||||
|
@ -1146,7 +1149,7 @@ split_bridge_v1_config_and_create(BridgeV1Type, BridgeName, RawConf) ->
|
||||||
BridgeV1Type, BridgeName, RawConf, PreviousRawConf
|
BridgeV1Type, BridgeName, RawConf, PreviousRawConf
|
||||||
);
|
);
|
||||||
_Conf ->
|
_Conf ->
|
||||||
case ?MODULE:is_valid_bridge_v1(BridgeV1Type, BridgeName) of
|
case ?MODULE:bridge_v1_is_valid(BridgeV1Type, BridgeName) of
|
||||||
true ->
|
true ->
|
||||||
%% Using remove + create as update, hence do not delete deps.
|
%% Using remove + create as update, hence do not delete deps.
|
||||||
RemoveDeps = [],
|
RemoveDeps = [],
|
||||||
|
@ -1381,7 +1384,7 @@ bridge_v1_id_to_connector_resource_id(BridgeId) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
bridge_v1_enable_disable(Action, BridgeType, BridgeName) ->
|
bridge_v1_enable_disable(Action, BridgeType, BridgeName) ->
|
||||||
case emqx_bridge_v2:is_valid_bridge_v1(BridgeType, BridgeName) of
|
case emqx_bridge_v2:bridge_v1_is_valid(BridgeType, BridgeName) of
|
||||||
true ->
|
true ->
|
||||||
bridge_v1_enable_disable_helper(
|
bridge_v1_enable_disable_helper(
|
||||||
Action,
|
Action,
|
||||||
|
@ -1426,7 +1429,7 @@ bridge_v1_start(BridgeV1Type, Name) ->
|
||||||
|
|
||||||
bridge_v1_operation_helper(BridgeV1Type, Name, ConnectorOpFun, DoHealthCheck) ->
|
bridge_v1_operation_helper(BridgeV1Type, Name, ConnectorOpFun, DoHealthCheck) ->
|
||||||
BridgeV2Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type),
|
BridgeV2Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type),
|
||||||
case emqx_bridge_v2:is_valid_bridge_v1(BridgeV1Type, Name) of
|
case emqx_bridge_v2:bridge_v1_is_valid(BridgeV1Type, Name) of
|
||||||
true ->
|
true ->
|
||||||
connector_operation_helper_with_conf(
|
connector_operation_helper_with_conf(
|
||||||
BridgeV2Type,
|
BridgeV2Type,
|
||||||
|
|
|
@ -40,7 +40,8 @@
|
||||||
'/actions/:id/enable/:enable'/2,
|
'/actions/:id/enable/:enable'/2,
|
||||||
'/actions/:id/:operation'/2,
|
'/actions/:id/:operation'/2,
|
||||||
'/nodes/:node/actions/:id/:operation'/2,
|
'/nodes/:node/actions/:id/:operation'/2,
|
||||||
'/actions_probe'/2
|
'/actions_probe'/2,
|
||||||
|
'/action_types'/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% BpAPI
|
%% BpAPI
|
||||||
|
@ -79,7 +80,8 @@ paths() ->
|
||||||
"/actions/:id/enable/:enable",
|
"/actions/:id/enable/:enable",
|
||||||
"/actions/:id/:operation",
|
"/actions/:id/:operation",
|
||||||
"/nodes/:node/actions/:id/:operation",
|
"/nodes/:node/actions/:id/:operation",
|
||||||
"/actions_probe"
|
"/actions_probe",
|
||||||
|
"/action_types"
|
||||||
].
|
].
|
||||||
|
|
||||||
error_schema(Code, Message) when is_atom(Code) ->
|
error_schema(Code, Message) when is_atom(Code) ->
|
||||||
|
@ -96,21 +98,11 @@ get_response_body_schema() ->
|
||||||
).
|
).
|
||||||
|
|
||||||
bridge_info_examples(Method) ->
|
bridge_info_examples(Method) ->
|
||||||
maps:merge(
|
emqx_bridge_v2_schema:examples(Method).
|
||||||
#{},
|
|
||||||
emqx_enterprise_bridge_examples(Method)
|
|
||||||
).
|
|
||||||
|
|
||||||
bridge_info_array_example(Method) ->
|
bridge_info_array_example(Method) ->
|
||||||
lists:map(fun(#{value := Config}) -> Config end, maps:values(bridge_info_examples(Method))).
|
lists:map(fun(#{value := Config}) -> Config end, maps:values(bridge_info_examples(Method))).
|
||||||
|
|
||||||
-if(?EMQX_RELEASE_EDITION == ee).
|
|
||||||
emqx_enterprise_bridge_examples(Method) ->
|
|
||||||
emqx_bridge_v2_enterprise:examples(Method).
|
|
||||||
-else.
|
|
||||||
emqx_enterprise_bridge_examples(_Method) -> #{}.
|
|
||||||
-endif.
|
|
||||||
|
|
||||||
param_path_id() ->
|
param_path_id() ->
|
||||||
{id,
|
{id,
|
||||||
mk(
|
mk(
|
||||||
|
@ -338,6 +330,27 @@ schema("/actions_probe") ->
|
||||||
400 => error_schema(['TEST_FAILED'], "bridge test failed")
|
400 => error_schema(['TEST_FAILED'], "bridge test failed")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
schema("/action_types") ->
|
||||||
|
#{
|
||||||
|
'operationId' => '/action_types',
|
||||||
|
get => #{
|
||||||
|
tags => [<<"actions">>],
|
||||||
|
desc => ?DESC("desc_api10"),
|
||||||
|
summary => <<"List available action types">>,
|
||||||
|
responses => #{
|
||||||
|
200 => emqx_dashboard_swagger:schema_with_examples(
|
||||||
|
array(emqx_bridge_v2_schema:types_sc()),
|
||||||
|
#{
|
||||||
|
<<"types">> =>
|
||||||
|
#{
|
||||||
|
summary => <<"Action types">>,
|
||||||
|
value => emqx_bridge_v2_schema:types()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
}.
|
}.
|
||||||
|
|
||||||
'/actions'(post, #{body := #{<<"type">> := BridgeType, <<"name">> := BridgeName} = Conf0}) ->
|
'/actions'(post, #{body := #{<<"type">> := BridgeType, <<"name">> := BridgeName} = Conf0}) ->
|
||||||
|
@ -486,6 +499,9 @@ schema("/actions_probe") ->
|
||||||
redact(BadRequest)
|
redact(BadRequest)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
'/action_types'(get, _Request) ->
|
||||||
|
?OK(emqx_bridge_v2_schema:types()).
|
||||||
|
|
||||||
maybe_deobfuscate_bridge_probe(#{<<"type">> := BridgeType, <<"name">> := BridgeName} = Params) ->
|
maybe_deobfuscate_bridge_probe(#{<<"type">> := BridgeType, <<"name">> := BridgeName} = Params) ->
|
||||||
case emqx_bridge:lookup(BridgeType, BridgeName) of
|
case emqx_bridge:lookup(BridgeType, BridgeName) of
|
||||||
{ok, #{raw_config := RawConf}} ->
|
{ok, #{raw_config := RawConf}} ->
|
||||||
|
@ -692,7 +708,13 @@ node_status(Bridges) ->
|
||||||
aggregate_status(AllStatus) ->
|
aggregate_status(AllStatus) ->
|
||||||
Head = fun([A | _]) -> A end,
|
Head = fun([A | _]) -> A end,
|
||||||
HeadVal = maps:get(status, Head(AllStatus), connecting),
|
HeadVal = maps:get(status, Head(AllStatus), connecting),
|
||||||
AllRes = lists:all(fun(#{status := Val}) -> Val == HeadVal end, AllStatus),
|
AllRes = lists:all(
|
||||||
|
fun
|
||||||
|
(#{status := Val}) -> Val == HeadVal;
|
||||||
|
(_) -> false
|
||||||
|
end,
|
||||||
|
AllStatus
|
||||||
|
),
|
||||||
case AllRes of
|
case AllRes of
|
||||||
true -> HeadVal;
|
true -> HeadVal;
|
||||||
false -> inconsistent
|
false -> inconsistent
|
||||||
|
@ -709,8 +731,10 @@ format_resource(
|
||||||
#{
|
#{
|
||||||
type := Type,
|
type := Type,
|
||||||
name := Name,
|
name := Name,
|
||||||
|
status := Status,
|
||||||
|
error := Error,
|
||||||
raw_config := RawConf,
|
raw_config := RawConf,
|
||||||
resource_data := ResourceData
|
resource_data := _ResourceData
|
||||||
},
|
},
|
||||||
Node
|
Node
|
||||||
) ->
|
) ->
|
||||||
|
@ -719,14 +743,16 @@ format_resource(
|
||||||
RawConf#{
|
RawConf#{
|
||||||
type => Type,
|
type => Type,
|
||||||
name => maps:get(<<"name">>, RawConf, Name),
|
name => maps:get(<<"name">>, RawConf, Name),
|
||||||
node => Node
|
node => Node,
|
||||||
|
status => Status,
|
||||||
|
error => Error
|
||||||
},
|
},
|
||||||
format_resource_data(ResourceData)
|
format_bridge_status_and_error(#{status => Status, error => Error})
|
||||||
)
|
)
|
||||||
).
|
).
|
||||||
|
|
||||||
format_resource_data(ResData) ->
|
format_bridge_status_and_error(Data) ->
|
||||||
maps:fold(fun format_resource_data/3, #{}, maps:with([status, error], ResData)).
|
maps:fold(fun format_resource_data/3, #{}, maps:with([status, error], Data)).
|
||||||
|
|
||||||
format_resource_data(error, undefined, Result) ->
|
format_resource_data(error, undefined, Result) ->
|
||||||
Result;
|
Result;
|
||||||
|
@ -765,8 +791,6 @@ do_create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode) ->
|
||||||
PreOrPostConfigUpdate =:= pre_config_update;
|
PreOrPostConfigUpdate =:= pre_config_update;
|
||||||
PreOrPostConfigUpdate =:= post_config_update
|
PreOrPostConfigUpdate =:= post_config_update
|
||||||
->
|
->
|
||||||
?BAD_REQUEST(map_to_json(redact(Reason)));
|
|
||||||
{error, Reason} ->
|
|
||||||
?BAD_REQUEST(map_to_json(redact(Reason)))
|
?BAD_REQUEST(map_to_json(redact(Reason)))
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
|
@ -82,6 +82,11 @@ schema_modules() ->
|
||||||
].
|
].
|
||||||
|
|
||||||
examples(Method) ->
|
examples(Method) ->
|
||||||
|
ActionExamples = emqx_bridge_v2_schema:examples(Method),
|
||||||
|
RegisteredExamples = registered_examples(Method),
|
||||||
|
maps:merge(ActionExamples, RegisteredExamples).
|
||||||
|
|
||||||
|
registered_examples(Method) ->
|
||||||
MergeFun =
|
MergeFun =
|
||||||
fun(Example, Examples) ->
|
fun(Example, Examples) ->
|
||||||
maps:merge(Examples, Example)
|
maps:merge(Examples, Example)
|
||||||
|
|
|
@ -1,80 +0,0 @@
|
||||||
%%--------------------------------------------------------------------
|
|
||||||
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
|
||||||
%%--------------------------------------------------------------------
|
|
||||||
-module(emqx_bridge_v2_enterprise).
|
|
||||||
|
|
||||||
-if(?EMQX_RELEASE_EDITION == ee).
|
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
|
||||||
|
|
||||||
-export([
|
|
||||||
api_schemas/1,
|
|
||||||
examples/1,
|
|
||||||
fields/1
|
|
||||||
]).
|
|
||||||
|
|
||||||
examples(Method) ->
|
|
||||||
MergeFun =
|
|
||||||
fun(Example, Examples) ->
|
|
||||||
maps:merge(Examples, Example)
|
|
||||||
end,
|
|
||||||
Fun =
|
|
||||||
fun(Module, Examples) ->
|
|
||||||
ConnectorExamples = erlang:apply(Module, bridge_v2_examples, [Method]),
|
|
||||||
lists:foldl(MergeFun, Examples, ConnectorExamples)
|
|
||||||
end,
|
|
||||||
lists:foldl(Fun, #{}, schema_modules()).
|
|
||||||
|
|
||||||
schema_modules() ->
|
|
||||||
[
|
|
||||||
emqx_bridge_kafka,
|
|
||||||
emqx_bridge_azure_event_hub,
|
|
||||||
emqx_bridge_syskeeper
|
|
||||||
].
|
|
||||||
|
|
||||||
fields(actions) ->
|
|
||||||
action_structs().
|
|
||||||
|
|
||||||
action_structs() ->
|
|
||||||
[
|
|
||||||
{kafka_producer,
|
|
||||||
mk(
|
|
||||||
hoconsc:map(name, ref(emqx_bridge_kafka, kafka_producer_action)),
|
|
||||||
#{
|
|
||||||
desc => <<"Kafka Producer Actions Config">>,
|
|
||||||
required => false
|
|
||||||
}
|
|
||||||
)},
|
|
||||||
{azure_event_hub_producer,
|
|
||||||
mk(
|
|
||||||
hoconsc:map(name, ref(emqx_bridge_azure_event_hub, actions)),
|
|
||||||
#{
|
|
||||||
desc => <<"Azure Event Hub Actions Config">>,
|
|
||||||
required => false
|
|
||||||
}
|
|
||||||
)},
|
|
||||||
{syskeeper_forwarder,
|
|
||||||
mk(
|
|
||||||
hoconsc:map(name, ref(emqx_bridge_syskeeper, config)),
|
|
||||||
#{
|
|
||||||
desc => <<"Syskeeper forwarder Bridge V2 Config">>,
|
|
||||||
required => false
|
|
||||||
}
|
|
||||||
)}
|
|
||||||
].
|
|
||||||
|
|
||||||
api_schemas(Method) ->
|
|
||||||
[
|
|
||||||
api_ref(emqx_bridge_kafka, <<"kafka_producer">>, Method ++ "_bridge_v2"),
|
|
||||||
api_ref(
|
|
||||||
emqx_bridge_azure_event_hub, <<"azure_event_hub_producer">>, Method ++ "_bridge_v2"
|
|
||||||
),
|
|
||||||
api_ref(emqx_bridge_syskeeper, <<"syskeeper_forwarder">>, Method)
|
|
||||||
].
|
|
||||||
|
|
||||||
api_ref(Module, Type, Method) ->
|
|
||||||
{Type, ref(Module, Method)}.
|
|
||||||
|
|
||||||
-else.
|
|
||||||
|
|
||||||
-endif.
|
|
|
@ -27,39 +27,23 @@
|
||||||
-export([
|
-export([
|
||||||
get_response/0,
|
get_response/0,
|
||||||
put_request/0,
|
put_request/0,
|
||||||
post_request/0
|
post_request/0,
|
||||||
|
examples/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([enterprise_api_schemas/1]).
|
%% Exported for mocking
|
||||||
|
%% TODO: refactor emqx_bridge_v1_compatibility_layer_SUITE so we don't need to
|
||||||
|
%% export this
|
||||||
|
-export([
|
||||||
|
registered_api_schemas/1
|
||||||
|
]).
|
||||||
|
|
||||||
-if(?EMQX_RELEASE_EDITION == ee).
|
-export([types/0, types_sc/0]).
|
||||||
enterprise_api_schemas(Method) ->
|
|
||||||
%% We *must* do this to ensure the module is really loaded, especially when we use
|
|
||||||
%% `call_hocon' from `nodetool' to generate initial configurations.
|
|
||||||
_ = emqx_bridge_v2_enterprise:module_info(),
|
|
||||||
case erlang:function_exported(emqx_bridge_v2_enterprise, api_schemas, 1) of
|
|
||||||
true -> emqx_bridge_v2_enterprise:api_schemas(Method);
|
|
||||||
false -> []
|
|
||||||
end.
|
|
||||||
|
|
||||||
enterprise_fields_actions() ->
|
-export_type([action_type/0]).
|
||||||
%% We *must* do this to ensure the module is really loaded, especially when we use
|
|
||||||
%% `call_hocon' from `nodetool' to generate initial configurations.
|
|
||||||
_ = emqx_bridge_v2_enterprise:module_info(),
|
|
||||||
case erlang:function_exported(emqx_bridge_v2_enterprise, fields, 1) of
|
|
||||||
true ->
|
|
||||||
emqx_bridge_v2_enterprise:fields(actions);
|
|
||||||
false ->
|
|
||||||
[]
|
|
||||||
end.
|
|
||||||
|
|
||||||
-else.
|
%% Should we explicitly list them here so dialyzer may be more helpful?
|
||||||
|
-type action_type() :: atom().
|
||||||
enterprise_api_schemas(_Method) -> [].
|
|
||||||
|
|
||||||
enterprise_fields_actions() -> [].
|
|
||||||
|
|
||||||
-endif.
|
|
||||||
|
|
||||||
%%======================================================================================
|
%%======================================================================================
|
||||||
%% For HTTP APIs
|
%% For HTTP APIs
|
||||||
|
@ -73,8 +57,18 @@ post_request() ->
|
||||||
api_schema("post").
|
api_schema("post").
|
||||||
|
|
||||||
api_schema(Method) ->
|
api_schema(Method) ->
|
||||||
EE = ?MODULE:enterprise_api_schemas(Method),
|
APISchemas = ?MODULE:registered_api_schemas(Method),
|
||||||
hoconsc:union(bridge_api_union(EE)).
|
hoconsc:union(bridge_api_union(APISchemas)).
|
||||||
|
|
||||||
|
registered_api_schemas(Method) ->
|
||||||
|
RegisteredSchemas = emqx_action_info:registered_schema_modules(),
|
||||||
|
[
|
||||||
|
api_ref(SchemaModule, atom_to_binary(BridgeV2Type), Method ++ "_bridge_v2")
|
||||||
|
|| {BridgeV2Type, SchemaModule} <- RegisteredSchemas
|
||||||
|
].
|
||||||
|
|
||||||
|
api_ref(Module, Type, Method) ->
|
||||||
|
{Type, ref(Module, Method)}.
|
||||||
|
|
||||||
bridge_api_union(Refs) ->
|
bridge_api_union(Refs) ->
|
||||||
Index = maps:from_list(Refs),
|
Index = maps:from_list(Refs),
|
||||||
|
@ -122,13 +116,40 @@ roots() ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
fields(actions) ->
|
fields(actions) ->
|
||||||
[] ++ enterprise_fields_actions().
|
registered_schema_fields().
|
||||||
|
|
||||||
|
registered_schema_fields() ->
|
||||||
|
[
|
||||||
|
Module:fields(action)
|
||||||
|
|| {_BridgeV2Type, Module} <- emqx_action_info:registered_schema_modules()
|
||||||
|
].
|
||||||
|
|
||||||
desc(actions) ->
|
desc(actions) ->
|
||||||
?DESC("desc_bridges_v2");
|
?DESC("desc_bridges_v2");
|
||||||
desc(_) ->
|
desc(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
|
-spec types() -> [action_type()].
|
||||||
|
types() ->
|
||||||
|
proplists:get_keys(?MODULE:fields(actions)).
|
||||||
|
|
||||||
|
-spec types_sc() -> ?ENUM([action_type()]).
|
||||||
|
types_sc() ->
|
||||||
|
hoconsc:enum(types()).
|
||||||
|
|
||||||
|
examples(Method) ->
|
||||||
|
MergeFun =
|
||||||
|
fun(Example, Examples) ->
|
||||||
|
maps:merge(Examples, Example)
|
||||||
|
end,
|
||||||
|
Fun =
|
||||||
|
fun(Module, Examples) ->
|
||||||
|
ConnectorExamples = erlang:apply(Module, bridge_v2_examples, [Method]),
|
||||||
|
lists:foldl(MergeFun, Examples, ConnectorExamples)
|
||||||
|
end,
|
||||||
|
SchemaModules = [Mod || {_, Mod} <- emqx_action_info:registered_schema_modules()],
|
||||||
|
lists:foldl(Fun, #{}, SchemaModules).
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-include_lib("hocon/include/hocon_types.hrl").
|
-include_lib("hocon/include/hocon_types.hrl").
|
||||||
schema_homogeneous_test() ->
|
schema_homogeneous_test() ->
|
||||||
|
|
|
@ -111,7 +111,7 @@ setup_mocks() ->
|
||||||
catch meck:new(emqx_bridge_v2_schema, MeckOpts),
|
catch meck:new(emqx_bridge_v2_schema, MeckOpts),
|
||||||
meck:expect(
|
meck:expect(
|
||||||
emqx_bridge_v2_schema,
|
emqx_bridge_v2_schema,
|
||||||
enterprise_api_schemas,
|
registered_api_schemas,
|
||||||
1,
|
1,
|
||||||
fun(Method) -> [{bridge_type_bin(), hoconsc:ref(?MODULE, "api_" ++ Method)}] end
|
fun(Method) -> [{bridge_type_bin(), hoconsc:ref(?MODULE, "api_" ++ Method)}] end
|
||||||
),
|
),
|
||||||
|
|
|
@ -264,17 +264,17 @@ t_create_dry_run_connector_does_not_exist(_) ->
|
||||||
BridgeConf = (bridge_config())#{<<"connector">> => <<"connector_does_not_exist">>},
|
BridgeConf = (bridge_config())#{<<"connector">> => <<"connector_does_not_exist">>},
|
||||||
{error, _} = emqx_bridge_v2:create_dry_run(bridge_type(), BridgeConf).
|
{error, _} = emqx_bridge_v2:create_dry_run(bridge_type(), BridgeConf).
|
||||||
|
|
||||||
t_is_valid_bridge_v1(_) ->
|
t_bridge_v1_is_valid(_) ->
|
||||||
{ok, _} = emqx_bridge_v2:create(bridge_type(), my_test_bridge, bridge_config()),
|
{ok, _} = emqx_bridge_v2:create(bridge_type(), my_test_bridge, bridge_config()),
|
||||||
true = emqx_bridge_v2:is_valid_bridge_v1(bridge_v1_type, my_test_bridge),
|
true = emqx_bridge_v2:bridge_v1_is_valid(bridge_v1_type, my_test_bridge),
|
||||||
%% Add another channel/bridge to the connector
|
%% Add another channel/bridge to the connector
|
||||||
{ok, _} = emqx_bridge_v2:create(bridge_type(), my_test_bridge_2, bridge_config()),
|
{ok, _} = emqx_bridge_v2:create(bridge_type(), my_test_bridge_2, bridge_config()),
|
||||||
false = emqx_bridge_v2:is_valid_bridge_v1(bridge_v1_type, my_test_bridge),
|
false = emqx_bridge_v2:bridge_v1_is_valid(bridge_v1_type, my_test_bridge),
|
||||||
ok = emqx_bridge_v2:remove(bridge_type(), my_test_bridge),
|
ok = emqx_bridge_v2:remove(bridge_type(), my_test_bridge),
|
||||||
true = emqx_bridge_v2:is_valid_bridge_v1(bridge_v1_type, my_test_bridge_2),
|
true = emqx_bridge_v2:bridge_v1_is_valid(bridge_v1_type, my_test_bridge_2),
|
||||||
ok = emqx_bridge_v2:remove(bridge_type(), my_test_bridge_2),
|
ok = emqx_bridge_v2:remove(bridge_type(), my_test_bridge_2),
|
||||||
%% Non existing bridge is a valid Bridge V1
|
%% Non existing bridge is a valid Bridge V1
|
||||||
true = emqx_bridge_v2:is_valid_bridge_v1(bridge_v1_type, my_test_bridge),
|
true = emqx_bridge_v2:bridge_v1_is_valid(bridge_v1_type, my_test_bridge),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_manual_health_check(_) ->
|
t_manual_health_check(_) ->
|
||||||
|
@ -647,10 +647,12 @@ t_load_config_success(_Config) ->
|
||||||
{ok, _},
|
{ok, _},
|
||||||
update_root_config(RootConf0)
|
update_root_config(RootConf0)
|
||||||
),
|
),
|
||||||
|
BridgeTypeBin = bin(BridgeType),
|
||||||
|
BridgeNameBin = bin(BridgeName),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, #{
|
{ok, #{
|
||||||
type := BridgeType,
|
type := BridgeTypeBin,
|
||||||
name := BridgeName,
|
name := BridgeNameBin,
|
||||||
raw_config := #{},
|
raw_config := #{},
|
||||||
resource_data := #{}
|
resource_data := #{}
|
||||||
}},
|
}},
|
||||||
|
@ -665,8 +667,8 @@ t_load_config_success(_Config) ->
|
||||||
),
|
),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, #{
|
{ok, #{
|
||||||
type := BridgeType,
|
type := BridgeTypeBin,
|
||||||
name := BridgeName,
|
name := BridgeNameBin,
|
||||||
raw_config := #{<<"some_key">> := <<"new_value">>},
|
raw_config := #{<<"some_key">> := <<"new_value">>},
|
||||||
resource_data := #{}
|
resource_data := #{}
|
||||||
}},
|
}},
|
||||||
|
@ -860,3 +862,7 @@ wait_until(Fun, Timeout) when Timeout >= 0 ->
|
||||||
end;
|
end;
|
||||||
wait_until(_, _) ->
|
wait_until(_, _) ->
|
||||||
ct:fail("Wait until event did not happen").
|
ct:fail("Wait until event did not happen").
|
||||||
|
|
||||||
|
bin(Bin) when is_binary(Bin) -> Bin;
|
||||||
|
bin(Str) when is_list(Str) -> list_to_binary(Str);
|
||||||
|
bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8).
|
||||||
|
|
|
@ -177,7 +177,9 @@ all() ->
|
||||||
groups() ->
|
groups() ->
|
||||||
AllTCs = emqx_common_test_helpers:all(?MODULE),
|
AllTCs = emqx_common_test_helpers:all(?MODULE),
|
||||||
SingleOnlyTests = [
|
SingleOnlyTests = [
|
||||||
t_bridges_probe
|
t_bridges_probe,
|
||||||
|
t_broken_bridge_config,
|
||||||
|
t_fix_broken_bridge_config
|
||||||
],
|
],
|
||||||
ClusterLaterJoinOnlyTCs = [
|
ClusterLaterJoinOnlyTCs = [
|
||||||
% t_cluster_later_join_metrics
|
% t_cluster_later_join_metrics
|
||||||
|
@ -236,6 +238,14 @@ end_per_group(_, Config) ->
|
||||||
emqx_cth_suite:stop(?config(group_apps, Config)),
|
emqx_cth_suite:stop(?config(group_apps, Config)),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
init_per_testcase(t_action_types, Config) ->
|
||||||
|
case ?config(cluster_nodes, Config) of
|
||||||
|
undefined ->
|
||||||
|
init_mocks();
|
||||||
|
Nodes ->
|
||||||
|
[erpc:call(Node, ?MODULE, init_mocks, []) || Node <- Nodes]
|
||||||
|
end,
|
||||||
|
Config;
|
||||||
init_per_testcase(_TestCase, Config) ->
|
init_per_testcase(_TestCase, Config) ->
|
||||||
case ?config(cluster_nodes, Config) of
|
case ?config(cluster_nodes, Config) of
|
||||||
undefined ->
|
undefined ->
|
||||||
|
@ -260,8 +270,14 @@ end_per_testcase(_TestCase, Config) ->
|
||||||
|
|
||||||
-define(CONNECTOR_IMPL, emqx_bridge_v2_dummy_connector).
|
-define(CONNECTOR_IMPL, emqx_bridge_v2_dummy_connector).
|
||||||
init_mocks() ->
|
init_mocks() ->
|
||||||
meck:new(emqx_connector_ee_schema, [passthrough, no_link]),
|
case emqx_release:edition() of
|
||||||
meck:expect(emqx_connector_ee_schema, resource_type, 1, ?CONNECTOR_IMPL),
|
ee ->
|
||||||
|
meck:new(emqx_connector_ee_schema, [passthrough, no_link]),
|
||||||
|
meck:expect(emqx_connector_ee_schema, resource_type, 1, ?CONNECTOR_IMPL),
|
||||||
|
ok;
|
||||||
|
ce ->
|
||||||
|
ok
|
||||||
|
end,
|
||||||
meck:new(?CONNECTOR_IMPL, [non_strict, no_link]),
|
meck:new(?CONNECTOR_IMPL, [non_strict, no_link]),
|
||||||
meck:expect(?CONNECTOR_IMPL, callback_mode, 0, async_if_possible),
|
meck:expect(?CONNECTOR_IMPL, callback_mode, 0, async_if_possible),
|
||||||
meck:expect(
|
meck:expect(
|
||||||
|
@ -289,7 +305,7 @@ init_mocks() ->
|
||||||
ok = meck:expect(?CONNECTOR_IMPL, on_get_channels, fun(ResId) ->
|
ok = meck:expect(?CONNECTOR_IMPL, on_get_channels, fun(ResId) ->
|
||||||
emqx_bridge_v2:get_channels_for_connector(ResId)
|
emqx_bridge_v2:get_channels_for_connector(ResId)
|
||||||
end),
|
end),
|
||||||
[?CONNECTOR_IMPL, emqx_connector_ee_schema].
|
ok.
|
||||||
|
|
||||||
clear_resources() ->
|
clear_resources() ->
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
|
@ -537,6 +553,117 @@ t_bridges_lifecycle(Config) ->
|
||||||
{ok, 400, _} = request(post, uri([?ROOT]), ?KAFKA_BRIDGE(<<"a.b">>), Config),
|
{ok, 400, _} = request(post, uri([?ROOT]), ?KAFKA_BRIDGE(<<"a.b">>), Config),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_broken_bridge_config(Config) ->
|
||||||
|
emqx_cth_suite:stop_apps([emqx_bridge]),
|
||||||
|
BridgeName = ?BRIDGE_NAME,
|
||||||
|
StartOps =
|
||||||
|
#{
|
||||||
|
config =>
|
||||||
|
"actions {\n"
|
||||||
|
" "
|
||||||
|
?BRIDGE_TYPE_STR
|
||||||
|
" {\n"
|
||||||
|
" " ++ binary_to_list(BridgeName) ++
|
||||||
|
" {\n"
|
||||||
|
" connector = does_not_exist\n"
|
||||||
|
" enable = true\n"
|
||||||
|
" kafka {\n"
|
||||||
|
" topic = test-topic-one-partition\n"
|
||||||
|
" }\n"
|
||||||
|
" local_topic = \"mqtt/local/topic\"\n"
|
||||||
|
" resource_opts {health_check_interval = 32s}\n"
|
||||||
|
" }\n"
|
||||||
|
" }\n"
|
||||||
|
"}\n"
|
||||||
|
"\n",
|
||||||
|
schema_mod => emqx_bridge_v2_schema
|
||||||
|
},
|
||||||
|
emqx_cth_suite:start_app(emqx_bridge, StartOps),
|
||||||
|
|
||||||
|
?assertMatch(
|
||||||
|
{ok, 200, [
|
||||||
|
#{
|
||||||
|
<<"name">> := BridgeName,
|
||||||
|
<<"type">> := ?BRIDGE_TYPE,
|
||||||
|
<<"connector">> := <<"does_not_exist">>,
|
||||||
|
<<"status">> := <<"disconnected">>,
|
||||||
|
<<"error">> := <<"Pending installation">>
|
||||||
|
}
|
||||||
|
]},
|
||||||
|
request_json(get, uri([?ROOT]), Config)
|
||||||
|
),
|
||||||
|
|
||||||
|
BridgeID = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME),
|
||||||
|
?assertEqual(
|
||||||
|
{ok, 204, <<>>},
|
||||||
|
request(delete, uri([?ROOT, BridgeID]), Config)
|
||||||
|
),
|
||||||
|
|
||||||
|
?assertEqual(
|
||||||
|
{ok, 200, []},
|
||||||
|
request_json(get, uri([?ROOT]), Config)
|
||||||
|
),
|
||||||
|
|
||||||
|
ok.
|
||||||
|
|
||||||
|
t_fix_broken_bridge_config(Config) ->
|
||||||
|
emqx_cth_suite:stop_apps([emqx_bridge]),
|
||||||
|
BridgeName = ?BRIDGE_NAME,
|
||||||
|
StartOps =
|
||||||
|
#{
|
||||||
|
config =>
|
||||||
|
"actions {\n"
|
||||||
|
" "
|
||||||
|
?BRIDGE_TYPE_STR
|
||||||
|
" {\n"
|
||||||
|
" " ++ binary_to_list(BridgeName) ++
|
||||||
|
" {\n"
|
||||||
|
" connector = does_not_exist\n"
|
||||||
|
" enable = true\n"
|
||||||
|
" kafka {\n"
|
||||||
|
" topic = test-topic-one-partition\n"
|
||||||
|
" }\n"
|
||||||
|
" local_topic = \"mqtt/local/topic\"\n"
|
||||||
|
" resource_opts {health_check_interval = 32s}\n"
|
||||||
|
" }\n"
|
||||||
|
" }\n"
|
||||||
|
"}\n"
|
||||||
|
"\n",
|
||||||
|
schema_mod => emqx_bridge_v2_schema
|
||||||
|
},
|
||||||
|
emqx_cth_suite:start_app(emqx_bridge, StartOps),
|
||||||
|
|
||||||
|
?assertMatch(
|
||||||
|
{ok, 200, [
|
||||||
|
#{
|
||||||
|
<<"name">> := BridgeName,
|
||||||
|
<<"type">> := ?BRIDGE_TYPE,
|
||||||
|
<<"connector">> := <<"does_not_exist">>,
|
||||||
|
<<"status">> := <<"disconnected">>,
|
||||||
|
<<"error">> := <<"Pending installation">>
|
||||||
|
}
|
||||||
|
]},
|
||||||
|
request_json(get, uri([?ROOT]), Config)
|
||||||
|
),
|
||||||
|
|
||||||
|
BridgeID = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME),
|
||||||
|
request_json(
|
||||||
|
put,
|
||||||
|
uri([?ROOT, BridgeID]),
|
||||||
|
?KAFKA_BRIDGE_UPDATE(?BRIDGE_NAME, ?CONNECTOR_NAME),
|
||||||
|
Config
|
||||||
|
),
|
||||||
|
|
||||||
|
?assertMatch(
|
||||||
|
{ok, 200, #{
|
||||||
|
<<"connector">> := ?CONNECTOR_NAME,
|
||||||
|
<<"status">> := <<"connected">>
|
||||||
|
}},
|
||||||
|
request_json(get, uri([?ROOT, BridgeID]), Config)
|
||||||
|
),
|
||||||
|
|
||||||
|
ok.
|
||||||
|
|
||||||
t_start_bridge_unknown_node(Config) ->
|
t_start_bridge_unknown_node(Config) ->
|
||||||
{ok, 404, _} =
|
{ok, 404, _} =
|
||||||
request(
|
request(
|
||||||
|
@ -886,6 +1013,14 @@ t_cascade_delete_actions(Config) ->
|
||||||
),
|
),
|
||||||
{ok, 200, []} = request_json(get, uri([?ROOT]), Config).
|
{ok, 200, []} = request_json(get, uri([?ROOT]), Config).
|
||||||
|
|
||||||
|
t_action_types(Config) ->
|
||||||
|
Res = request_json(get, uri(["action_types"]), Config),
|
||||||
|
?assertMatch({ok, 200, _}, Res),
|
||||||
|
{ok, 200, Types} = Res,
|
||||||
|
?assert(is_list(Types), #{types => Types}),
|
||||||
|
?assert(lists:all(fun is_binary/1, Types), #{types => Types}),
|
||||||
|
ok.
|
||||||
|
|
||||||
%%% helpers
|
%%% helpers
|
||||||
listen_on_random_port() ->
|
listen_on_random_port() ->
|
||||||
SockOpts = [binary, {active, false}, {packet, raw}, {reuseaddr, true}, {backlog, 1000}],
|
SockOpts = [binary, {active, false}, {packet, raw}, {reuseaddr, true}, {backlog, 1000}],
|
||||||
|
|
|
@ -145,6 +145,39 @@ create_bridge(Config, Overrides) ->
|
||||||
ct:pal("creating bridge with config: ~p", [BridgeConfig]),
|
ct:pal("creating bridge with config: ~p", [BridgeConfig]),
|
||||||
emqx_bridge_v2:create(BridgeType, BridgeName, BridgeConfig).
|
emqx_bridge_v2:create(BridgeType, BridgeName, BridgeConfig).
|
||||||
|
|
||||||
|
list_bridges_api() ->
|
||||||
|
Params = [],
|
||||||
|
Path = emqx_mgmt_api_test_util:api_path(["actions"]),
|
||||||
|
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
|
Opts = #{return_all => true},
|
||||||
|
ct:pal("listing bridges (via http)"),
|
||||||
|
Res =
|
||||||
|
case emqx_mgmt_api_test_util:request_api(get, Path, "", AuthHeader, Params, Opts) of
|
||||||
|
{ok, {Status, Headers, Body0}} ->
|
||||||
|
{ok, {Status, Headers, emqx_utils_json:decode(Body0, [return_maps])}};
|
||||||
|
Error ->
|
||||||
|
Error
|
||||||
|
end,
|
||||||
|
ct:pal("list bridges result: ~p", [Res]),
|
||||||
|
Res.
|
||||||
|
|
||||||
|
get_bridge_api(BridgeType, BridgeName) ->
|
||||||
|
BridgeId = emqx_bridge_resource:bridge_id(BridgeType, BridgeName),
|
||||||
|
Params = [],
|
||||||
|
Path = emqx_mgmt_api_test_util:api_path(["actions", BridgeId]),
|
||||||
|
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
|
Opts = #{return_all => true},
|
||||||
|
ct:pal("get bridge ~p (via http)", [{BridgeType, BridgeName}]),
|
||||||
|
Res =
|
||||||
|
case emqx_mgmt_api_test_util:request_api(get, Path, "", AuthHeader, Params, Opts) of
|
||||||
|
{ok, {Status, Headers, Body0}} ->
|
||||||
|
{ok, {Status, Headers, emqx_utils_json:decode(Body0, [return_maps])}};
|
||||||
|
Error ->
|
||||||
|
Error
|
||||||
|
end,
|
||||||
|
ct:pal("get bridge ~p result: ~p", [{BridgeType, BridgeName}, Res]),
|
||||||
|
Res.
|
||||||
|
|
||||||
create_bridge_api(Config) ->
|
create_bridge_api(Config) ->
|
||||||
create_bridge_api(Config, _Overrides = #{}).
|
create_bridge_api(Config, _Overrides = #{}).
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_bridge_azure_event_hub, [
|
{application, emqx_bridge_azure_event_hub, [
|
||||||
{description, "EMQX Enterprise Azure Event Hub Bridge"},
|
{description, "EMQX Enterprise Azure Event Hub Bridge"},
|
||||||
{vsn, "0.1.3"},
|
{vsn, "0.1.4"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
kernel,
|
kernel,
|
||||||
|
|
|
@ -114,6 +114,15 @@ fields(kafka_message) ->
|
||||||
Fields0 = emqx_bridge_kafka:fields(kafka_message),
|
Fields0 = emqx_bridge_kafka:fields(kafka_message),
|
||||||
Fields = proplists:delete(timestamp, Fields0),
|
Fields = proplists:delete(timestamp, Fields0),
|
||||||
override_documentations(Fields);
|
override_documentations(Fields);
|
||||||
|
fields(action) ->
|
||||||
|
{azure_event_hub_producer,
|
||||||
|
mk(
|
||||||
|
hoconsc:map(name, ref(emqx_bridge_azure_event_hub, actions)),
|
||||||
|
#{
|
||||||
|
desc => <<"Azure Event Hub Actions Config">>,
|
||||||
|
required => false
|
||||||
|
}
|
||||||
|
)};
|
||||||
fields(actions) ->
|
fields(actions) ->
|
||||||
Fields =
|
Fields =
|
||||||
override(
|
override(
|
||||||
|
@ -162,7 +171,7 @@ bridge_v2_examples(Method) ->
|
||||||
[
|
[
|
||||||
#{
|
#{
|
||||||
?AEH_CONNECTOR_TYPE_BIN => #{
|
?AEH_CONNECTOR_TYPE_BIN => #{
|
||||||
summary => <<"Azure Event Hub Bridge v2">>,
|
summary => <<"Azure Event Hub Action">>,
|
||||||
value => values({Method, bridge_v2})
|
value => values({Method, bridge_v2})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -207,7 +216,7 @@ values({post, bridge_v2}) ->
|
||||||
#{
|
#{
|
||||||
enable => true,
|
enable => true,
|
||||||
connector => <<"my_azure_event_hub_producer_connector">>,
|
connector => <<"my_azure_event_hub_producer_connector">>,
|
||||||
name => <<"my_azure_event_hub_producer_bridge">>,
|
name => <<"my_azure_event_hub_producer_action">>,
|
||||||
type => ?AEH_CONNECTOR_TYPE_BIN
|
type => ?AEH_CONNECTOR_TYPE_BIN
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_bridge_azure_event_hub_action_info).
|
||||||
|
|
||||||
|
-behaviour(emqx_action_info).
|
||||||
|
|
||||||
|
-export([
|
||||||
|
bridge_v1_type_name/0,
|
||||||
|
action_type_name/0,
|
||||||
|
connector_type_name/0,
|
||||||
|
schema_module/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
bridge_v1_type_name() -> azure_event_hub_producer.
|
||||||
|
|
||||||
|
action_type_name() -> azure_event_hub_producer.
|
||||||
|
|
||||||
|
connector_type_name() -> azure_event_hub_producer.
|
||||||
|
|
||||||
|
schema_module() -> emqx_bridge_azure_event_hub.
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_bridge_cassandra, [
|
{application, emqx_bridge_cassandra, [
|
||||||
{description, "EMQX Enterprise Cassandra Bridge"},
|
{description, "EMQX Enterprise Cassandra Bridge"},
|
||||||
{vsn, "0.1.5"},
|
{vsn, "0.1.6"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
kernel,
|
kernel,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_bridge_gcp_pubsub, [
|
{application, emqx_bridge_gcp_pubsub, [
|
||||||
{description, "EMQX Enterprise GCP Pub/Sub Bridge"},
|
{description, "EMQX Enterprise GCP Pub/Sub Bridge"},
|
||||||
{vsn, "0.1.9"},
|
{vsn, "0.1.10"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
kernel,
|
kernel,
|
||||||
|
|
|
@ -101,7 +101,7 @@ fields(connector_config) ->
|
||||||
)},
|
)},
|
||||||
{service_account_json,
|
{service_account_json,
|
||||||
sc(
|
sc(
|
||||||
service_account_json(),
|
?MODULE:service_account_json(),
|
||||||
#{
|
#{
|
||||||
required => true,
|
required => true,
|
||||||
validator => fun ?MODULE:service_account_json_validator/1,
|
validator => fun ?MODULE:service_account_json_validator/1,
|
||||||
|
|
|
@ -126,7 +126,7 @@ desc(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
write_syntax(type) ->
|
write_syntax(type) ->
|
||||||
emqx_bridge_influxdb:write_syntax();
|
emqx_bridge_influxdb:write_syntax_type();
|
||||||
write_syntax(required) ->
|
write_syntax(required) ->
|
||||||
true;
|
true;
|
||||||
write_syntax(validator) ->
|
write_syntax(validator) ->
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_bridge_http, [
|
{application, emqx_bridge_http, [
|
||||||
{description, "EMQX HTTP Bridge and Connector Application"},
|
{description, "EMQX HTTP Bridge and Connector Application"},
|
||||||
{vsn, "0.1.4"},
|
{vsn, "0.1.5"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [kernel, stdlib, emqx_connector, emqx_resource, ehttpc]},
|
{applications, [kernel, stdlib, emqx_connector, emqx_resource, ehttpc]},
|
||||||
{env, []},
|
{env, []},
|
||||||
|
|
|
@ -46,14 +46,6 @@
|
||||||
|
|
||||||
-export([validate_method/1, join_paths/2]).
|
-export([validate_method/1, join_paths/2]).
|
||||||
|
|
||||||
-type connect_timeout() :: emqx_schema:duration() | infinity.
|
|
||||||
-type pool_type() :: random | hash.
|
|
||||||
|
|
||||||
-reflect_type([
|
|
||||||
connect_timeout/0,
|
|
||||||
pool_type/0
|
|
||||||
]).
|
|
||||||
|
|
||||||
-define(DEFAULT_PIPELINE_SIZE, 100).
|
-define(DEFAULT_PIPELINE_SIZE, 100).
|
||||||
-define(DEFAULT_REQUEST_TIMEOUT_MS, 30_000).
|
-define(DEFAULT_REQUEST_TIMEOUT_MS, 30_000).
|
||||||
|
|
||||||
|
@ -89,7 +81,7 @@ fields(config) ->
|
||||||
)},
|
)},
|
||||||
{pool_type,
|
{pool_type,
|
||||||
sc(
|
sc(
|
||||||
pool_type(),
|
hoconsc:enum([random, hash]),
|
||||||
#{
|
#{
|
||||||
default => random,
|
default => random,
|
||||||
desc => ?DESC("pool_type")
|
desc => ?DESC("pool_type")
|
||||||
|
|
|
@ -11,7 +11,8 @@
|
||||||
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
conn_bridge_examples/1
|
conn_bridge_examples/1,
|
||||||
|
write_syntax_type/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -29,6 +30,9 @@
|
||||||
%% -------------------------------------------------------------------------------------------------
|
%% -------------------------------------------------------------------------------------------------
|
||||||
%% api
|
%% api
|
||||||
|
|
||||||
|
write_syntax_type() ->
|
||||||
|
typerefl:alias("string", write_syntax()).
|
||||||
|
|
||||||
conn_bridge_examples(Method) ->
|
conn_bridge_examples(Method) ->
|
||||||
[
|
[
|
||||||
#{
|
#{
|
||||||
|
@ -154,7 +158,7 @@ desc(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
write_syntax(type) ->
|
write_syntax(type) ->
|
||||||
?MODULE:write_syntax();
|
write_syntax_type();
|
||||||
write_syntax(required) ->
|
write_syntax(required) ->
|
||||||
true;
|
true;
|
||||||
write_syntax(validator) ->
|
write_syntax(validator) ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_bridge_kafka, [
|
{application, emqx_bridge_kafka, [
|
||||||
{description, "EMQX Enterprise Kafka Bridge"},
|
{description, "EMQX Enterprise Kafka Bridge"},
|
||||||
{vsn, "0.1.11"},
|
{vsn, "0.1.12"},
|
||||||
{registered, [emqx_bridge_kafka_consumer_sup]},
|
{registered, [emqx_bridge_kafka_consumer_sup]},
|
||||||
{applications, [
|
{applications, [
|
||||||
kernel,
|
kernel,
|
||||||
|
@ -12,7 +12,7 @@
|
||||||
brod,
|
brod,
|
||||||
brod_gssapi
|
brod_gssapi
|
||||||
]},
|
]},
|
||||||
{env, []},
|
{env, [{emqx_action_info_module, emqx_bridge_kafka_action_info}]},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
|
|
||||||
{links, []}
|
{links, []}
|
||||||
|
|
|
@ -100,7 +100,7 @@ values({post, connector}) ->
|
||||||
values({post, KafkaType}) ->
|
values({post, KafkaType}) ->
|
||||||
maps:merge(
|
maps:merge(
|
||||||
#{
|
#{
|
||||||
name => <<"my_kafka_producer_bridge">>,
|
name => <<"my_kafka_producer_action">>,
|
||||||
type => <<"kafka_producer">>
|
type => <<"kafka_producer">>
|
||||||
},
|
},
|
||||||
values({put, KafkaType})
|
values({put, KafkaType})
|
||||||
|
@ -524,7 +524,18 @@ fields(consumer_kafka_opts) ->
|
||||||
fields(resource_opts) ->
|
fields(resource_opts) ->
|
||||||
SupportedFields = [health_check_interval],
|
SupportedFields = [health_check_interval],
|
||||||
CreationOpts = emqx_resource_schema:create_opts(_Overrides = []),
|
CreationOpts = emqx_resource_schema:create_opts(_Overrides = []),
|
||||||
lists:filter(fun({Field, _}) -> lists:member(Field, SupportedFields) end, CreationOpts).
|
lists:filter(fun({Field, _}) -> lists:member(Field, SupportedFields) end, CreationOpts);
|
||||||
|
fields(action_field) ->
|
||||||
|
{kafka_producer,
|
||||||
|
mk(
|
||||||
|
hoconsc:map(name, ref(emqx_bridge_kafka, kafka_producer_action)),
|
||||||
|
#{
|
||||||
|
desc => <<"Kafka Producer Action Config">>,
|
||||||
|
required => false
|
||||||
|
}
|
||||||
|
)};
|
||||||
|
fields(action) ->
|
||||||
|
fields(action_field).
|
||||||
|
|
||||||
desc("config_connector") ->
|
desc("config_connector") ->
|
||||||
?DESC("desc_config");
|
?DESC("desc_config");
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_bridge_kafka_action_info).
|
||||||
|
|
||||||
|
-behaviour(emqx_action_info).
|
||||||
|
|
||||||
|
-export([
|
||||||
|
bridge_v1_type_name/0,
|
||||||
|
action_type_name/0,
|
||||||
|
connector_type_name/0,
|
||||||
|
schema_module/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
bridge_v1_type_name() -> kafka.
|
||||||
|
|
||||||
|
action_type_name() -> kafka_producer.
|
||||||
|
|
||||||
|
connector_type_name() -> kafka_producer.
|
||||||
|
|
||||||
|
schema_module() -> emqx_bridge_kafka.
|
|
@ -29,25 +29,27 @@ all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
_ = application:load(emqx_conf),
|
Apps = emqx_cth_suite:start(
|
||||||
ok = emqx_common_test_helpers:start_apps(apps_to_start_and_stop()),
|
[
|
||||||
application:ensure_all_started(telemetry),
|
emqx,
|
||||||
application:ensure_all_started(wolff),
|
emqx_conf,
|
||||||
application:ensure_all_started(brod),
|
emqx_connector,
|
||||||
|
emqx_bridge_kafka,
|
||||||
|
emqx_bridge,
|
||||||
|
emqx_rule_engine,
|
||||||
|
emqx_management,
|
||||||
|
{emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}
|
||||||
|
],
|
||||||
|
#{work_dir => emqx_cth_suite:work_dir(Config)}
|
||||||
|
),
|
||||||
|
{ok, _} = emqx_common_test_http:create_default_app(),
|
||||||
emqx_bridge_kafka_impl_producer_SUITE:wait_until_kafka_is_up(),
|
emqx_bridge_kafka_impl_producer_SUITE:wait_until_kafka_is_up(),
|
||||||
Config.
|
[{apps, Apps} | Config].
|
||||||
|
|
||||||
end_per_suite(_Config) ->
|
end_per_suite(Config) ->
|
||||||
emqx_common_test_helpers:stop_apps(apps_to_start_and_stop()).
|
Apps = ?config(apps, Config),
|
||||||
|
emqx_cth_suite:stop(Apps),
|
||||||
apps_to_start_and_stop() ->
|
ok.
|
||||||
[
|
|
||||||
emqx,
|
|
||||||
emqx_conf,
|
|
||||||
emqx_connector,
|
|
||||||
emqx_bridge,
|
|
||||||
emqx_rule_engine
|
|
||||||
].
|
|
||||||
|
|
||||||
t_create_remove_list(_) ->
|
t_create_remove_list(_) ->
|
||||||
[] = emqx_bridge_v2:list(),
|
[] = emqx_bridge_v2:list(),
|
||||||
|
@ -165,6 +167,24 @@ t_unknown_topic(_Config) ->
|
||||||
ok
|
ok
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
|
?assertMatch(
|
||||||
|
{ok,
|
||||||
|
{{_, 200, _}, _, [
|
||||||
|
#{
|
||||||
|
<<"status">> := <<"disconnected">>,
|
||||||
|
<<"node_status">> := [#{<<"status">> := <<"disconnected">>}]
|
||||||
|
}
|
||||||
|
]}},
|
||||||
|
emqx_bridge_v2_testlib:list_bridges_api()
|
||||||
|
),
|
||||||
|
?assertMatch(
|
||||||
|
{ok,
|
||||||
|
{{_, 200, _}, _, #{
|
||||||
|
<<"status">> := <<"disconnected">>,
|
||||||
|
<<"node_status">> := [#{<<"status">> := <<"disconnected">>}]
|
||||||
|
}}},
|
||||||
|
emqx_bridge_v2_testlib:get_bridge_api(?TYPE, BridgeName)
|
||||||
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
check_send_message_with_bridge(BridgeName) ->
|
check_send_message_with_bridge(BridgeName) ->
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_bridge_mqtt, [
|
{application, emqx_bridge_mqtt, [
|
||||||
{description, "EMQX MQTT Broker Bridge"},
|
{description, "EMQX MQTT Broker Bridge"},
|
||||||
{vsn, "0.1.4"},
|
{vsn, "0.1.5"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
kernel,
|
kernel,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_bridge_pulsar, [
|
{application, emqx_bridge_pulsar, [
|
||||||
{description, "EMQX Pulsar Bridge"},
|
{description, "EMQX Pulsar Bridge"},
|
||||||
{vsn, "0.1.7"},
|
{vsn, "0.1.8"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
kernel,
|
kernel,
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
%%===========================================================================
|
%%===========================================================================
|
||||||
|
|
||||||
pulsar_producer_validations_test() ->
|
pulsar_producer_validations_test() ->
|
||||||
Name = my_producer,
|
Name = list_to_atom("my_producer"),
|
||||||
Conf0 = pulsar_producer_hocon(),
|
Conf0 = pulsar_producer_hocon(),
|
||||||
Conf1 =
|
Conf1 =
|
||||||
Conf0 ++
|
Conf0 ++
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
-behaviour(ecpool_worker).
|
-behaviour(ecpool_worker).
|
||||||
|
|
||||||
%% hocon_schema callbacks
|
%% hocon_schema callbacks
|
||||||
-export([roots/0, fields/1]).
|
-export([namespace/0, roots/0, fields/1]).
|
||||||
|
|
||||||
%% HTTP API callbacks
|
%% HTTP API callbacks
|
||||||
-export([values/1]).
|
-export([values/1]).
|
||||||
|
@ -43,6 +43,8 @@
|
||||||
%% Internal callbacks
|
%% Internal callbacks
|
||||||
-export([publish_messages/3]).
|
-export([publish_messages/3]).
|
||||||
|
|
||||||
|
namespace() -> "rabbitmq".
|
||||||
|
|
||||||
roots() ->
|
roots() ->
|
||||||
[{config, #{type => hoconsc:ref(?MODULE, config)}}].
|
[{config, #{type => hoconsc:ref(?MODULE, config)}}].
|
||||||
|
|
||||||
|
|
|
@ -75,6 +75,15 @@ namespace() -> "syskeeper".
|
||||||
|
|
||||||
roots() -> [].
|
roots() -> [].
|
||||||
|
|
||||||
|
fields(action) ->
|
||||||
|
{syskeeper_forwarder,
|
||||||
|
mk(
|
||||||
|
hoconsc:map(name, ref(?MODULE, config)),
|
||||||
|
#{
|
||||||
|
desc => <<"Syskeeper Forwarder Action Config">>,
|
||||||
|
required => false
|
||||||
|
}
|
||||||
|
)};
|
||||||
fields(config) ->
|
fields(config) ->
|
||||||
[
|
[
|
||||||
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
|
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
|
||||||
|
@ -121,10 +130,16 @@ fields("creation_opts") ->
|
||||||
emqx_resource_schema:create_opts([{request_ttl, #{default => infinity}}]);
|
emqx_resource_schema:create_opts([{request_ttl, #{default => infinity}}]);
|
||||||
fields("post") ->
|
fields("post") ->
|
||||||
[type_field(), name_field() | fields(config)];
|
[type_field(), name_field() | fields(config)];
|
||||||
|
fields("post_bridge_v2") ->
|
||||||
|
fields("post");
|
||||||
fields("put") ->
|
fields("put") ->
|
||||||
fields(config);
|
fields(config);
|
||||||
|
fields("put_bridge_v2") ->
|
||||||
|
fields("put");
|
||||||
fields("get") ->
|
fields("get") ->
|
||||||
emqx_bridge_schema:status_fields() ++ fields("post").
|
emqx_bridge_schema:status_fields() ++ fields("post");
|
||||||
|
fields("get_bridge_v2") ->
|
||||||
|
fields("get").
|
||||||
|
|
||||||
desc(config) ->
|
desc(config) ->
|
||||||
?DESC("desc_config");
|
?DESC("desc_config");
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_bridge_syskeeper_action_info).
|
||||||
|
|
||||||
|
-behaviour(emqx_action_info).
|
||||||
|
|
||||||
|
-export([
|
||||||
|
bridge_v1_type_name/0,
|
||||||
|
action_type_name/0,
|
||||||
|
connector_type_name/0,
|
||||||
|
schema_module/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
bridge_v1_type_name() -> syskeeper_forwarder.
|
||||||
|
|
||||||
|
action_type_name() -> syskeeper_forwarder.
|
||||||
|
|
||||||
|
connector_type_name() -> syskeeper_forwarder.
|
||||||
|
|
||||||
|
schema_module() -> emqx_bridge_syskeeper.
|
|
@ -65,21 +65,23 @@ end_per_group(_Group, _Config) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
ok = emqx_common_test_helpers:start_apps([
|
Apps = emqx_cth_suite:start(
|
||||||
emqx_conf,
|
[
|
||||||
emqx_connector,
|
emqx_conf,
|
||||||
emqx_bridge,
|
emqx_connector,
|
||||||
emqx_bridge_syskeeper
|
emqx_bridge,
|
||||||
]),
|
emqx_bridge_syskeeper
|
||||||
_ = emqx_bridge_enterprise:module_info(),
|
],
|
||||||
|
#{work_dir => emqx_cth_suite:work_dir(Config)}
|
||||||
|
),
|
||||||
emqx_mgmt_api_test_util:init_suite(),
|
emqx_mgmt_api_test_util:init_suite(),
|
||||||
Config.
|
[{apps, Apps} | Config].
|
||||||
|
|
||||||
end_per_suite(_Config) ->
|
end_per_suite(Config) ->
|
||||||
|
Apps = ?config(apps, Config),
|
||||||
emqx_mgmt_api_test_util:end_suite(),
|
emqx_mgmt_api_test_util:end_suite(),
|
||||||
ok = emqx_common_test_helpers:stop_apps([
|
ok = emqx_cth_suite:stop(Apps),
|
||||||
emqx_bridge_syskeeper, emqx_bridge, emqx_connector, emqx_conf
|
ok.
|
||||||
]).
|
|
||||||
|
|
||||||
init_per_testcase(_Testcase, Config) ->
|
init_per_testcase(_Testcase, Config) ->
|
||||||
snabbkaffe:start_trace(),
|
snabbkaffe:start_trace(),
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
|
|
||||||
-define(CLUSTER_MFA, cluster_rpc_mfa).
|
-define(CLUSTER_MFA, cluster_rpc_mfa).
|
||||||
-define(CLUSTER_COMMIT, cluster_rpc_commit).
|
-define(CLUSTER_COMMIT, cluster_rpc_commit).
|
||||||
|
-define(DEFAULT_INIT_TXN_ID, -1).
|
||||||
|
|
||||||
-record(cluster_rpc_mfa, {
|
-record(cluster_rpc_mfa, {
|
||||||
tnx_id :: pos_integer(),
|
tnx_id :: pos_integer(),
|
||||||
|
|
|
@ -44,7 +44,9 @@
|
||||||
read_next_mfa/1,
|
read_next_mfa/1,
|
||||||
trans_query/1,
|
trans_query/1,
|
||||||
trans_status/0,
|
trans_status/0,
|
||||||
on_leave_clean/0
|
on_leave_clean/0,
|
||||||
|
get_commit_lag/0,
|
||||||
|
get_commit_lag/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -231,13 +233,29 @@ make_initiate_call_req(M, F, A) ->
|
||||||
-spec get_node_tnx_id(node()) -> integer().
|
-spec get_node_tnx_id(node()) -> integer().
|
||||||
get_node_tnx_id(Node) ->
|
get_node_tnx_id(Node) ->
|
||||||
case mnesia:wread({?CLUSTER_COMMIT, Node}) of
|
case mnesia:wread({?CLUSTER_COMMIT, Node}) of
|
||||||
[] -> -1;
|
[] -> ?DEFAULT_INIT_TXN_ID;
|
||||||
[#cluster_rpc_commit{tnx_id = TnxId}] -> TnxId
|
[#cluster_rpc_commit{tnx_id = TnxId}] -> TnxId
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
%% @doc Return the commit lag of *this* node.
|
||||||
|
-spec get_commit_lag() -> #{my_id := pos_integer(), latest := pos_integer()}.
|
||||||
|
get_commit_lag() ->
|
||||||
|
{atomic, Result} = transaction(fun ?MODULE:get_commit_lag/1, [node()]),
|
||||||
|
Result.
|
||||||
|
|
||||||
|
get_commit_lag(Node) ->
|
||||||
|
LatestId = get_cluster_tnx_id(),
|
||||||
|
LatestNode =
|
||||||
|
case mnesia:read(?CLUSTER_MFA, LatestId) of
|
||||||
|
[#?CLUSTER_MFA{initiator = N}] -> N;
|
||||||
|
_ -> undefined
|
||||||
|
end,
|
||||||
|
MyId = get_node_tnx_id(Node),
|
||||||
|
#{my_id => MyId, latest => LatestId, latest_node => LatestNode}.
|
||||||
|
|
||||||
%% Checks whether the Mnesia tables used by this module are waiting to
|
%% Checks whether the Mnesia tables used by this module are waiting to
|
||||||
%% be loaded and from where.
|
%% be loaded and from where.
|
||||||
-spec get_tables_status() -> #{atom() => {waiting, [node()]} | {disc | network, node()}}.
|
-spec get_tables_status() -> #{atom() => {waiting, [node()]} | {loaded, local | node()}}.
|
||||||
get_tables_status() ->
|
get_tables_status() ->
|
||||||
maps:from_list([
|
maps:from_list([
|
||||||
{Tab, do_get_tables_status(Tab)}
|
{Tab, do_get_tables_status(Tab)}
|
||||||
|
@ -249,13 +267,16 @@ do_get_tables_status(Tab) ->
|
||||||
TabNodes = proplists:get_value(all_nodes, Props),
|
TabNodes = proplists:get_value(all_nodes, Props),
|
||||||
KnownDown = mnesia_recover:get_mnesia_downs(),
|
KnownDown = mnesia_recover:get_mnesia_downs(),
|
||||||
LocalNode = node(),
|
LocalNode = node(),
|
||||||
case proplists:get_value(load_node, Props) of
|
%% load_node. Returns the name of the node that Mnesia loaded the table from.
|
||||||
|
%% The structure of the returned value is unspecified, but can be useful for debugging purposes.
|
||||||
|
LoadedFrom = proplists:get_value(load_node, Props),
|
||||||
|
case LoadedFrom of
|
||||||
unknown ->
|
unknown ->
|
||||||
{waiting, TabNodes -- [LocalNode | KnownDown]};
|
{waiting, TabNodes -- [LocalNode | KnownDown]};
|
||||||
LocalNode ->
|
LocalNode ->
|
||||||
{disc, LocalNode};
|
{loaded, local};
|
||||||
Node ->
|
Node ->
|
||||||
{network, Node}
|
{loaded, Node}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% Regardless of what MFA is returned, consider it a success),
|
%% Regardless of what MFA is returned, consider it a success),
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_conf, [
|
{application, emqx_conf, [
|
||||||
{description, "EMQX configuration management"},
|
{description, "EMQX configuration management"},
|
||||||
{vsn, "0.1.30"},
|
{vsn, "0.1.31"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_conf_app, []}},
|
{mod, {emqx_conf_app, []}},
|
||||||
{applications, [kernel, stdlib, emqx_ctl]},
|
{applications, [kernel, stdlib, emqx_ctl]},
|
||||||
|
|
|
@ -151,6 +151,9 @@ reset(Node, KeyPath, Opts) ->
|
||||||
%% @doc Called from build script.
|
%% @doc Called from build script.
|
||||||
%% TODO: move to a external escript after all refactoring is done
|
%% TODO: move to a external escript after all refactoring is done
|
||||||
dump_schema(Dir, SchemaModule) ->
|
dump_schema(Dir, SchemaModule) ->
|
||||||
|
%% TODO: Load all apps instead of only emqx_dashboard
|
||||||
|
%% as this will help schemas that searches for apps with
|
||||||
|
%% relevant schema definitions
|
||||||
_ = application:load(emqx_dashboard),
|
_ = application:load(emqx_dashboard),
|
||||||
ok = emqx_dashboard_desc_cache:init(),
|
ok = emqx_dashboard_desc_cache:init(),
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
|
@ -292,7 +295,7 @@ hocon_schema_to_spec(?MAP(Name, Type), LocalModule) ->
|
||||||
},
|
},
|
||||||
SubRefs
|
SubRefs
|
||||||
};
|
};
|
||||||
hocon_schema_to_spec(?UNION(Types), LocalModule) ->
|
hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) ->
|
||||||
{OneOf, Refs} = lists:foldl(
|
{OneOf, Refs} = lists:foldl(
|
||||||
fun(Type, {Acc, RefsAcc}) ->
|
fun(Type, {Acc, RefsAcc}) ->
|
||||||
{Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule),
|
{Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule),
|
||||||
|
@ -305,149 +308,8 @@ hocon_schema_to_spec(?UNION(Types), LocalModule) ->
|
||||||
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
||||||
{#{type => enum, symbols => [Atom]}, []}.
|
{#{type => enum, symbols => [Atom]}, []}.
|
||||||
|
|
||||||
typename_to_spec("user_id_type()", _Mod) ->
|
typename_to_spec(TypeStr, Module) ->
|
||||||
#{type => enum, symbols => [clientid, username]};
|
emqx_conf_schema_types:readable_dashboard(Module, TypeStr).
|
||||||
typename_to_spec("term()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("boolean()", _Mod) ->
|
|
||||||
#{type => boolean};
|
|
||||||
typename_to_spec("binary()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("float()", _Mod) ->
|
|
||||||
#{type => number};
|
|
||||||
typename_to_spec("integer()", _Mod) ->
|
|
||||||
#{type => number};
|
|
||||||
typename_to_spec("non_neg_integer()", _Mod) ->
|
|
||||||
#{type => number, minimum => 0};
|
|
||||||
typename_to_spec("number()", _Mod) ->
|
|
||||||
#{type => number};
|
|
||||||
typename_to_spec("string()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("atom()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("duration()", _Mod) ->
|
|
||||||
#{type => duration};
|
|
||||||
typename_to_spec("timeout_duration()", _Mod) ->
|
|
||||||
#{type => duration};
|
|
||||||
typename_to_spec("duration_s()", _Mod) ->
|
|
||||||
#{type => duration};
|
|
||||||
typename_to_spec("timeout_duration_s()", _Mod) ->
|
|
||||||
#{type => duration};
|
|
||||||
typename_to_spec("duration_ms()", _Mod) ->
|
|
||||||
#{type => duration};
|
|
||||||
typename_to_spec("timeout_duration_ms()", _Mod) ->
|
|
||||||
#{type => duration};
|
|
||||||
typename_to_spec("percent()", _Mod) ->
|
|
||||||
#{type => percent};
|
|
||||||
typename_to_spec("file()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("ip_port()", _Mod) ->
|
|
||||||
#{type => ip_port};
|
|
||||||
typename_to_spec("url()", _Mod) ->
|
|
||||||
#{type => url};
|
|
||||||
typename_to_spec("bytesize()", _Mod) ->
|
|
||||||
#{type => 'byteSize'};
|
|
||||||
typename_to_spec("wordsize()", _Mod) ->
|
|
||||||
#{type => 'byteSize'};
|
|
||||||
typename_to_spec("qos()", _Mod) ->
|
|
||||||
#{type => enum, symbols => [0, 1, 2]};
|
|
||||||
typename_to_spec("comma_separated_list()", _Mod) ->
|
|
||||||
#{type => comma_separated_string};
|
|
||||||
typename_to_spec("comma_separated_atoms()", _Mod) ->
|
|
||||||
#{type => comma_separated_string};
|
|
||||||
typename_to_spec("pool_type()", _Mod) ->
|
|
||||||
#{type => enum, symbols => [random, hash]};
|
|
||||||
typename_to_spec("log_level()", _Mod) ->
|
|
||||||
#{
|
|
||||||
type => enum,
|
|
||||||
symbols => [
|
|
||||||
debug,
|
|
||||||
info,
|
|
||||||
notice,
|
|
||||||
warning,
|
|
||||||
error,
|
|
||||||
critical,
|
|
||||||
alert,
|
|
||||||
emergency,
|
|
||||||
all
|
|
||||||
]
|
|
||||||
};
|
|
||||||
typename_to_spec("rate()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("capacity()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("burst_rate()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("failure_strategy()", _Mod) ->
|
|
||||||
#{type => enum, symbols => [force, drop, throw]};
|
|
||||||
typename_to_spec("initial()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("map()", _Mod) ->
|
|
||||||
#{type => object};
|
|
||||||
typename_to_spec("#{" ++ _, Mod) ->
|
|
||||||
typename_to_spec("map()", Mod);
|
|
||||||
typename_to_spec(Name, Mod) ->
|
|
||||||
Spec = range(Name),
|
|
||||||
Spec1 = remote_module_type(Spec, Name, Mod),
|
|
||||||
Spec2 = typerefl_array(Spec1, Name, Mod),
|
|
||||||
Spec3 = integer(Spec2, Name),
|
|
||||||
default_type(Spec3).
|
|
||||||
|
|
||||||
default_type(nomatch) -> #{type => string};
|
|
||||||
default_type(Type) -> Type.
|
|
||||||
|
|
||||||
range(Name) ->
|
|
||||||
case string:split(Name, "..") of
|
|
||||||
%% 1..10 1..inf -inf..10
|
|
||||||
[MinStr, MaxStr] ->
|
|
||||||
Schema = #{type => number},
|
|
||||||
Schema1 = add_integer_prop(Schema, minimum, MinStr),
|
|
||||||
add_integer_prop(Schema1, maximum, MaxStr);
|
|
||||||
_ ->
|
|
||||||
nomatch
|
|
||||||
end.
|
|
||||||
|
|
||||||
%% Module:Type
|
|
||||||
remote_module_type(nomatch, Name, Mod) ->
|
|
||||||
case string:split(Name, ":") of
|
|
||||||
[_Module, Type] -> typename_to_spec(Type, Mod);
|
|
||||||
_ -> nomatch
|
|
||||||
end;
|
|
||||||
remote_module_type(Spec, _Name, _Mod) ->
|
|
||||||
Spec.
|
|
||||||
|
|
||||||
%% [string()] or [integer()] or [xxx].
|
|
||||||
typerefl_array(nomatch, Name, Mod) ->
|
|
||||||
case string:trim(Name, leading, "[") of
|
|
||||||
Name ->
|
|
||||||
nomatch;
|
|
||||||
Name1 ->
|
|
||||||
case string:trim(Name1, trailing, "]") of
|
|
||||||
Name1 ->
|
|
||||||
notmatch;
|
|
||||||
Name2 ->
|
|
||||||
Schema = typename_to_spec(Name2, Mod),
|
|
||||||
#{type => array, items => Schema}
|
|
||||||
end
|
|
||||||
end;
|
|
||||||
typerefl_array(Spec, _Name, _Mod) ->
|
|
||||||
Spec.
|
|
||||||
|
|
||||||
%% integer(1)
|
|
||||||
integer(nomatch, Name) ->
|
|
||||||
case string:to_integer(Name) of
|
|
||||||
{Int, []} -> #{type => enum, symbols => [Int], default => Int};
|
|
||||||
_ -> nomatch
|
|
||||||
end;
|
|
||||||
integer(Spec, _Name) ->
|
|
||||||
Spec.
|
|
||||||
|
|
||||||
add_integer_prop(Schema, Key, Value) ->
|
|
||||||
case string:to_integer(Value) of
|
|
||||||
{error, no_integer} -> Schema;
|
|
||||||
{Int, []} when Key =:= minimum -> Schema#{Key => Int};
|
|
||||||
{Int, []} -> Schema#{Key => Int}
|
|
||||||
end.
|
|
||||||
|
|
||||||
to_bin(List) when is_list(List) ->
|
to_bin(List) when is_list(List) ->
|
||||||
case io_lib:printable_list(List) of
|
case io_lib:printable_list(List) of
|
||||||
|
|
|
@ -26,8 +26,6 @@
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
-include("emqx_conf.hrl").
|
-include("emqx_conf.hrl").
|
||||||
|
|
||||||
-define(DEFAULT_INIT_TXN_ID, -1).
|
|
||||||
|
|
||||||
start(_StartType, _StartArgs) ->
|
start(_StartType, _StartArgs) ->
|
||||||
try
|
try
|
||||||
ok = init_conf()
|
ok = init_conf()
|
||||||
|
@ -52,31 +50,32 @@ unset_config_loaded() ->
|
||||||
%% This function is named 'override' due to historical reasons.
|
%% This function is named 'override' due to historical reasons.
|
||||||
get_override_config_file() ->
|
get_override_config_file() ->
|
||||||
Node = node(),
|
Node = node(),
|
||||||
|
Data = #{
|
||||||
|
wall_clock => erlang:statistics(wall_clock),
|
||||||
|
node => Node,
|
||||||
|
release => emqx_release:version_with_prefix()
|
||||||
|
},
|
||||||
case emqx_app:init_load_done() of
|
case emqx_app:init_load_done() of
|
||||||
false ->
|
false ->
|
||||||
{error, #{node => Node, msg => "init_conf_load_not_done"}};
|
{error, Data#{msg => "init_conf_load_not_done"}};
|
||||||
true ->
|
true ->
|
||||||
case erlang:whereis(emqx_config_handler) of
|
case erlang:whereis(emqx_config_handler) of
|
||||||
undefined ->
|
undefined ->
|
||||||
{error, #{node => Node, msg => "emqx_config_handler_not_ready"}};
|
{error, Data#{msg => "emqx_config_handler_not_ready"}};
|
||||||
_ ->
|
_ ->
|
||||||
Fun = fun() ->
|
Fun = fun() ->
|
||||||
TnxId = emqx_cluster_rpc:get_node_tnx_id(Node),
|
TnxId = emqx_cluster_rpc:get_node_tnx_id(Node),
|
||||||
WallClock = erlang:statistics(wall_clock),
|
|
||||||
Conf = emqx_config_handler:get_raw_cluster_override_conf(),
|
Conf = emqx_config_handler:get_raw_cluster_override_conf(),
|
||||||
HasDeprecateFile = emqx_config:has_deprecated_file(),
|
HasDeprecateFile = emqx_config:has_deprecated_file(),
|
||||||
#{
|
Data#{
|
||||||
wall_clock => WallClock,
|
|
||||||
conf => Conf,
|
conf => Conf,
|
||||||
tnx_id => TnxId,
|
tnx_id => TnxId,
|
||||||
node => Node,
|
has_deprecated_file => HasDeprecateFile
|
||||||
has_deprecated_file => HasDeprecateFile,
|
|
||||||
release => emqx_release:version_with_prefix()
|
|
||||||
}
|
}
|
||||||
end,
|
end,
|
||||||
case mria:ro_transaction(?CLUSTER_RPC_SHARD, Fun) of
|
case mria:ro_transaction(?CLUSTER_RPC_SHARD, Fun) of
|
||||||
{atomic, Res} -> {ok, Res};
|
{atomic, Res} -> {ok, Res};
|
||||||
{aborted, Reason} -> {error, #{node => Node, msg => Reason}}
|
{aborted, Reason} -> {error, Data#{msg => Reason}}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
@ -105,7 +104,7 @@ init_load(TnxId) ->
|
||||||
ok = emqx_app:set_config_loader(emqx_conf),
|
ok = emqx_app:set_config_loader(emqx_conf),
|
||||||
ok;
|
ok;
|
||||||
Module ->
|
Module ->
|
||||||
?SLOG(debug, #{
|
?SLOG(info, #{
|
||||||
msg => "skip_init_config_load",
|
msg => "skip_init_config_load",
|
||||||
reason => "Some application has set another config loader",
|
reason => "Some application has set another config loader",
|
||||||
loader => Module
|
loader => Module
|
||||||
|
@ -126,7 +125,7 @@ sync_cluster_conf() ->
|
||||||
case cluster_nodes() of
|
case cluster_nodes() of
|
||||||
[] ->
|
[] ->
|
||||||
%% The first core nodes is self.
|
%% The first core nodes is self.
|
||||||
?SLOG(debug, #{
|
?SLOG(info, #{
|
||||||
msg => "skip_sync_cluster_conf",
|
msg => "skip_sync_cluster_conf",
|
||||||
reason => "This is a single node, or the first node in the cluster"
|
reason => "This is a single node, or the first node in the cluster"
|
||||||
}),
|
}),
|
||||||
|
@ -138,70 +137,94 @@ sync_cluster_conf() ->
|
||||||
%% @private Some core nodes are running, try to sync the cluster config from them.
|
%% @private Some core nodes are running, try to sync the cluster config from them.
|
||||||
sync_cluster_conf2(Nodes) ->
|
sync_cluster_conf2(Nodes) ->
|
||||||
{Results, Failed} = emqx_conf_proto_v3:get_override_config_file(Nodes),
|
{Results, Failed} = emqx_conf_proto_v3:get_override_config_file(Nodes),
|
||||||
{Ready, NotReady0} = lists:partition(fun(Res) -> element(1, Res) =:= ok end, Results),
|
{Ready, NotReady} = lists:partition(fun(Res) -> element(1, Res) =:= ok end, Results),
|
||||||
NotReady = lists:filter(fun(Res) -> element(1, Res) =:= error end, NotReady0),
|
LogData = #{peer_nodes => Nodes, self_node => node()},
|
||||||
case (Failed =/= [] orelse NotReady =/= []) of
|
case Failed ++ NotReady of
|
||||||
true when Ready =/= [] ->
|
|
||||||
%% Some core nodes failed to reply.
|
|
||||||
Warning = #{
|
|
||||||
nodes => Nodes,
|
|
||||||
failed => Failed,
|
|
||||||
not_ready => NotReady,
|
|
||||||
msg => "ignored_nodes_when_sync_cluster_conf"
|
|
||||||
},
|
|
||||||
?SLOG(warning, Warning);
|
|
||||||
true when Failed =/= [] ->
|
|
||||||
%% There are core nodes running but no one was able to reply.
|
|
||||||
?SLOG(error, #{
|
|
||||||
msg => "failed_to_sync_cluster_conf",
|
|
||||||
nodes => Nodes,
|
|
||||||
failed => Failed,
|
|
||||||
not_ready => NotReady
|
|
||||||
});
|
|
||||||
true ->
|
|
||||||
%% There are core nodes booting up
|
|
||||||
?SLOG(info, #{
|
|
||||||
msg => "peer_not_ready_for_config_sync",
|
|
||||||
reason => "The 'not_ready' peer node(s) are loading configs",
|
|
||||||
nodes => Nodes,
|
|
||||||
not_ready => NotReady
|
|
||||||
});
|
|
||||||
false ->
|
|
||||||
ok
|
|
||||||
end,
|
|
||||||
case Ready of
|
|
||||||
[] ->
|
[] ->
|
||||||
case should_proceed_with_boot() of
|
ok;
|
||||||
true ->
|
|
||||||
%% Act as if this node is alone, so it can
|
|
||||||
%% finish the boot sequence and load the
|
|
||||||
%% config for other nodes to copy it.
|
|
||||||
?SLOG(info, #{
|
|
||||||
msg => "skip_sync_cluster_conf",
|
|
||||||
loading_from_disk => true,
|
|
||||||
nodes => Nodes,
|
|
||||||
failed => Failed,
|
|
||||||
not_ready => NotReady
|
|
||||||
}),
|
|
||||||
{ok, ?DEFAULT_INIT_TXN_ID};
|
|
||||||
false ->
|
|
||||||
%% retry in some time
|
|
||||||
Jitter = rand:uniform(2000),
|
|
||||||
Timeout = 10000 + Jitter,
|
|
||||||
timer:sleep(Timeout),
|
|
||||||
?SLOG(warning, #{
|
|
||||||
msg => "sync_cluster_conf_retry",
|
|
||||||
timeout => Timeout,
|
|
||||||
nodes => Nodes,
|
|
||||||
failed => Failed,
|
|
||||||
not_ready => NotReady
|
|
||||||
}),
|
|
||||||
sync_cluster_conf()
|
|
||||||
end;
|
|
||||||
_ ->
|
_ ->
|
||||||
|
?SLOG(
|
||||||
|
warning,
|
||||||
|
LogData#{
|
||||||
|
msg => "cluster_config_fetch_failures",
|
||||||
|
failed_nodes => Failed,
|
||||||
|
booting_nodes => NotReady
|
||||||
|
}
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
MyRole = mria_rlog:role(),
|
||||||
|
case Ready of
|
||||||
|
[] when MyRole =:= replicant ->
|
||||||
|
%% replicant should never boot without copying from a core node
|
||||||
|
delay_and_retry(LogData#{role => replicant});
|
||||||
|
[] ->
|
||||||
|
%% none of the nodes are ready, either delay-and-retry or boot without wait
|
||||||
|
TableStatus = tx_commit_table_status(),
|
||||||
|
sync_cluster_conf5(TableStatus, LogData);
|
||||||
|
_ ->
|
||||||
|
%% copy config from the best node in the Ready list
|
||||||
sync_cluster_conf3(Ready)
|
sync_cluster_conf3(Ready)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
%% None of the peer nodes are responsive, so we have to make a decision
|
||||||
|
%% based on the commit lagging (if the commit table is loaded).
|
||||||
|
%%
|
||||||
|
%% It could be that the peer nodes are also booting up,
|
||||||
|
%% however we cannot always wait because it may run into a dead-lock.
|
||||||
|
%%
|
||||||
|
%% Giving up wait here implies that some changes made to the peer node outside
|
||||||
|
%% of cluster-rpc MFAs will be lost.
|
||||||
|
%% e.g. stop all nodes, manually change cluster.hocon in one node
|
||||||
|
%% then boot all nodes around the same time, the changed cluster.hocon may
|
||||||
|
%% get lost if the node happen to copy config from others.
|
||||||
|
sync_cluster_conf5({loaded, local}, LogData) ->
|
||||||
|
?SLOG(info, LogData#{
|
||||||
|
msg => "skip_copy_cluster_config_from_peer_nodes",
|
||||||
|
explain => "Commit table loaded locally from disk, assuming that I have the latest config"
|
||||||
|
}),
|
||||||
|
{ok, ?DEFAULT_INIT_TXN_ID};
|
||||||
|
sync_cluster_conf5({loaded, From}, LogData) ->
|
||||||
|
case get_commit_lag() of
|
||||||
|
#{my_id := MyId, latest := Latest} = Lagging when MyId >= Latest orelse Latest =:= 0 ->
|
||||||
|
?SLOG(info, LogData#{
|
||||||
|
msg => "skip_copy_cluster_config_from_peer_nodes",
|
||||||
|
explain => "I have the latest cluster config commit",
|
||||||
|
commit_loaded_from => From,
|
||||||
|
lagging_info => Lagging
|
||||||
|
}),
|
||||||
|
{ok, ?DEFAULT_INIT_TXN_ID};
|
||||||
|
#{my_id := _MyId, latest := _Latest} = Lagging ->
|
||||||
|
delay_and_retry(LogData#{lagging_info => Lagging, commit_loaded_from => From})
|
||||||
|
end;
|
||||||
|
sync_cluster_conf5({waiting, Waiting}, LogData) ->
|
||||||
|
%% this may never happen? since we waited for table before
|
||||||
|
delay_and_retry(LogData#{table_pending => Waiting}).
|
||||||
|
|
||||||
|
get_commit_lag() ->
|
||||||
|
emqx_cluster_rpc:get_commit_lag().
|
||||||
|
|
||||||
|
delay_and_retry(LogData) ->
|
||||||
|
Timeout = sync_delay_timeout(),
|
||||||
|
?SLOG(warning, LogData#{
|
||||||
|
msg => "sync_cluster_conf_retry",
|
||||||
|
explain =>
|
||||||
|
"Cannot boot alone due to potentially stale data. "
|
||||||
|
"Will try sync cluster config again after delay",
|
||||||
|
delay => Timeout
|
||||||
|
}),
|
||||||
|
timer:sleep(Timeout),
|
||||||
|
sync_cluster_conf().
|
||||||
|
|
||||||
|
-ifdef(TEST).
|
||||||
|
sync_delay_timeout() ->
|
||||||
|
Jitter = rand:uniform(200),
|
||||||
|
1_000 + Jitter.
|
||||||
|
-else.
|
||||||
|
sync_delay_timeout() ->
|
||||||
|
Jitter = rand:uniform(2000),
|
||||||
|
10_000 + Jitter.
|
||||||
|
-endif.
|
||||||
|
|
||||||
%% @private Filter out the nodes which are running a newer version than this node.
|
%% @private Filter out the nodes which are running a newer version than this node.
|
||||||
sync_cluster_conf3(Ready) ->
|
sync_cluster_conf3(Ready) ->
|
||||||
case lists:filter(fun is_older_or_same_version/1, Ready) of
|
case lists:filter(fun is_older_or_same_version/1, Ready) of
|
||||||
|
@ -217,10 +240,10 @@ sync_cluster_conf3(Ready) ->
|
||||||
),
|
),
|
||||||
?SLOG(warning, #{
|
?SLOG(warning, #{
|
||||||
msg => "all_available_nodes_running_newer_version",
|
msg => "all_available_nodes_running_newer_version",
|
||||||
hint =>
|
explain =>
|
||||||
"Booting this node without syncing cluster config from peer core nodes "
|
"Booting this node without syncing cluster config from core nodes "
|
||||||
"because other nodes are running a newer version",
|
"because other nodes are running a newer version",
|
||||||
peer_nodes => NodesAndVersions
|
versions => NodesAndVersions
|
||||||
}),
|
}),
|
||||||
{ok, ?DEFAULT_INIT_TXN_ID};
|
{ok, ?DEFAULT_INIT_TXN_ID};
|
||||||
Ready2 ->
|
Ready2 ->
|
||||||
|
@ -246,7 +269,7 @@ sync_cluster_conf4(Ready) ->
|
||||||
[{ok, Info} | _] = lists:sort(fun conf_sort/2, Ready),
|
[{ok, Info} | _] = lists:sort(fun conf_sort/2, Ready),
|
||||||
#{node := Node, conf := RawOverrideConf, tnx_id := TnxId} = Info,
|
#{node := Node, conf := RawOverrideConf, tnx_id := TnxId} = Info,
|
||||||
HasDeprecatedFile = has_deprecated_file(Info),
|
HasDeprecatedFile = has_deprecated_file(Info),
|
||||||
?SLOG(debug, #{
|
?SLOG(info, #{
|
||||||
msg => "sync_cluster_conf_success",
|
msg => "sync_cluster_conf_success",
|
||||||
synced_from_node => Node,
|
synced_from_node => Node,
|
||||||
has_deprecated_file => HasDeprecatedFile,
|
has_deprecated_file => HasDeprecatedFile,
|
||||||
|
@ -263,19 +286,9 @@ sync_cluster_conf4(Ready) ->
|
||||||
ok = sync_data_from_node(Node),
|
ok = sync_data_from_node(Node),
|
||||||
{ok, TnxId}.
|
{ok, TnxId}.
|
||||||
|
|
||||||
should_proceed_with_boot() ->
|
tx_commit_table_status() ->
|
||||||
TablesStatus = emqx_cluster_rpc:get_tables_status(),
|
TablesStatus = emqx_cluster_rpc:get_tables_status(),
|
||||||
LocalNode = node(),
|
maps:get(?CLUSTER_COMMIT, TablesStatus).
|
||||||
case maps:get(?CLUSTER_COMMIT, TablesStatus) of
|
|
||||||
{disc, LocalNode} ->
|
|
||||||
%% Loading locally; let this node finish its boot sequence
|
|
||||||
%% so others can copy the config from this one.
|
|
||||||
true;
|
|
||||||
_ ->
|
|
||||||
%% Loading from another node or still waiting for nodes to
|
|
||||||
%% be up. Try again.
|
|
||||||
false
|
|
||||||
end.
|
|
||||||
|
|
||||||
conf_sort({ok, #{tnx_id := Id1}}, {ok, #{tnx_id := Id2}}) when Id1 > Id2 -> true;
|
conf_sort({ok, #{tnx_id := Id1}}, {ok, #{tnx_id := Id2}}) when Id1 > Id2 -> true;
|
||||||
conf_sort({ok, #{tnx_id := Id, wall_clock := W1}}, {ok, #{tnx_id := Id, wall_clock := W2}}) ->
|
conf_sort({ok, #{tnx_id := Id, wall_clock := W1}}, {ok, #{tnx_id := Id, wall_clock := W2}}) ->
|
||||||
|
|
|
@ -28,21 +28,14 @@
|
||||||
|
|
||||||
-include("emqx_conf.hrl").
|
-include("emqx_conf.hrl").
|
||||||
|
|
||||||
-type log_level() :: debug | info | notice | warning | error | critical | alert | emergency | all.
|
|
||||||
-type file() :: string().
|
|
||||||
-type cipher() :: map().
|
|
||||||
|
|
||||||
-behaviour(hocon_schema).
|
-behaviour(hocon_schema).
|
||||||
|
|
||||||
-reflect_type([
|
|
||||||
log_level/0,
|
|
||||||
file/0,
|
|
||||||
cipher/0
|
|
||||||
]).
|
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1, tags/0
|
namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1, tags/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-export([log_level/0]).
|
||||||
|
|
||||||
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
|
-export([conf_get/2, conf_get/3, keys/2, filter/1]).
|
||||||
-export([upgrade_raw_conf/1]).
|
-export([upgrade_raw_conf/1]).
|
||||||
|
|
||||||
|
@ -548,7 +541,7 @@ fields("node") ->
|
||||||
)},
|
)},
|
||||||
{"crash_dump_file",
|
{"crash_dump_file",
|
||||||
sc(
|
sc(
|
||||||
file(),
|
string(),
|
||||||
#{
|
#{
|
||||||
mapping => "vm_args.-env ERL_CRASH_DUMP",
|
mapping => "vm_args.-env ERL_CRASH_DUMP",
|
||||||
desc => ?DESC(node_crash_dump_file),
|
desc => ?DESC(node_crash_dump_file),
|
||||||
|
@ -839,7 +832,7 @@ fields("rpc") ->
|
||||||
)},
|
)},
|
||||||
{"certfile",
|
{"certfile",
|
||||||
sc(
|
sc(
|
||||||
file(),
|
string(),
|
||||||
#{
|
#{
|
||||||
mapping => "gen_rpc.certfile",
|
mapping => "gen_rpc.certfile",
|
||||||
converter => fun ensure_unicode_path/2,
|
converter => fun ensure_unicode_path/2,
|
||||||
|
@ -848,7 +841,7 @@ fields("rpc") ->
|
||||||
)},
|
)},
|
||||||
{"keyfile",
|
{"keyfile",
|
||||||
sc(
|
sc(
|
||||||
file(),
|
string(),
|
||||||
#{
|
#{
|
||||||
mapping => "gen_rpc.keyfile",
|
mapping => "gen_rpc.keyfile",
|
||||||
converter => fun ensure_unicode_path/2,
|
converter => fun ensure_unicode_path/2,
|
||||||
|
@ -857,7 +850,7 @@ fields("rpc") ->
|
||||||
)},
|
)},
|
||||||
{"cacertfile",
|
{"cacertfile",
|
||||||
sc(
|
sc(
|
||||||
file(),
|
string(),
|
||||||
#{
|
#{
|
||||||
mapping => "gen_rpc.cacertfile",
|
mapping => "gen_rpc.cacertfile",
|
||||||
converter => fun ensure_unicode_path/2,
|
converter => fun ensure_unicode_path/2,
|
||||||
|
@ -985,7 +978,7 @@ fields("log") ->
|
||||||
})},
|
})},
|
||||||
{"file",
|
{"file",
|
||||||
sc(
|
sc(
|
||||||
?UNION([
|
hoconsc:union([
|
||||||
?R_REF("log_file_handler"),
|
?R_REF("log_file_handler"),
|
||||||
?MAP(handler_name, ?R_REF("log_file_handler"))
|
?MAP(handler_name, ?R_REF("log_file_handler"))
|
||||||
]),
|
]),
|
||||||
|
@ -1004,7 +997,7 @@ fields("log_file_handler") ->
|
||||||
[
|
[
|
||||||
{"path",
|
{"path",
|
||||||
sc(
|
sc(
|
||||||
file(),
|
string(),
|
||||||
#{
|
#{
|
||||||
desc => ?DESC("log_file_handler_file"),
|
desc => ?DESC("log_file_handler_file"),
|
||||||
default => <<"${EMQX_LOG_DIR}/emqx.log">>,
|
default => <<"${EMQX_LOG_DIR}/emqx.log">>,
|
||||||
|
@ -1538,3 +1531,6 @@ ensure_unicode_path(Path, _) when is_list(Path) ->
|
||||||
Path;
|
Path;
|
||||||
ensure_unicode_path(Path, _) ->
|
ensure_unicode_path(Path, _) ->
|
||||||
throw({"not_string", Path}).
|
throw({"not_string", Path}).
|
||||||
|
|
||||||
|
log_level() ->
|
||||||
|
hoconsc:enum([debug, info, notice, warning, error, critical, alert, emergency, all]).
|
||||||
|
|
|
@ -0,0 +1,340 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_conf_schema_types).
|
||||||
|
|
||||||
|
-export([readable/2]).
|
||||||
|
-export([readable_swagger/2, readable_dashboard/2, readable_docgen/2]).
|
||||||
|
|
||||||
|
%% Takes a typerefl name or hocon schema's display name and returns
|
||||||
|
%% a map of different flavors of more readable type specs.
|
||||||
|
%% - swagger: for swagger spec
|
||||||
|
%% - dashboard: to facilitate the dashboard UI rendering
|
||||||
|
%% - docgen: for documenation generation
|
||||||
|
readable(Module, TypeStr) when is_binary(TypeStr) ->
|
||||||
|
readable(Module, binary_to_list(TypeStr));
|
||||||
|
readable(Module, TypeStr) when is_list(TypeStr) ->
|
||||||
|
try
|
||||||
|
%% Module is ignored so far as all types are distinguished by their names
|
||||||
|
readable(TypeStr)
|
||||||
|
catch
|
||||||
|
throw:unknown_type ->
|
||||||
|
fail(#{reason => unknown_type, type => TypeStr, module => Module})
|
||||||
|
end.
|
||||||
|
|
||||||
|
readable_swagger(Module, TypeStr) ->
|
||||||
|
get_readable(Module, TypeStr, swagger).
|
||||||
|
|
||||||
|
readable_dashboard(Module, TypeStr) ->
|
||||||
|
get_readable(Module, TypeStr, dashboard).
|
||||||
|
|
||||||
|
readable_docgen(Module, TypeStr) ->
|
||||||
|
get_readable(Module, TypeStr, docgen).
|
||||||
|
|
||||||
|
get_readable(Module, TypeStr, Flavor) ->
|
||||||
|
Map = readable(Module, TypeStr),
|
||||||
|
case maps:get(Flavor, Map, undefined) of
|
||||||
|
undefined -> fail(#{reason => unknown_type, module => Module, type => TypeStr});
|
||||||
|
Value -> Value
|
||||||
|
end.
|
||||||
|
|
||||||
|
%% Fail the build or test. Production code should never get here.
|
||||||
|
-spec fail(_) -> no_return().
|
||||||
|
fail(Reason) ->
|
||||||
|
io:format(standard_error, "ERROR: ~p~n", [Reason]),
|
||||||
|
error(Reason).
|
||||||
|
|
||||||
|
readable("boolean()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => boolean},
|
||||||
|
dashboard => #{type => boolean},
|
||||||
|
docgen => #{type => "Boolean"}
|
||||||
|
};
|
||||||
|
readable("binary()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string},
|
||||||
|
dashboard => #{type => string},
|
||||||
|
docgen => #{type => "String"}
|
||||||
|
};
|
||||||
|
readable("float()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => number},
|
||||||
|
dashboard => #{type => number},
|
||||||
|
docgen => #{type => "Float"}
|
||||||
|
};
|
||||||
|
readable("integer()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => integer},
|
||||||
|
dashboard => #{type => integer},
|
||||||
|
docgen => #{type => "Integer"}
|
||||||
|
};
|
||||||
|
readable("non_neg_integer()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => integer, minimum => 0},
|
||||||
|
dashboard => #{type => integer, minimum => 0},
|
||||||
|
docgen => #{type => "Integer(0..+inf)"}
|
||||||
|
};
|
||||||
|
readable("pos_integer()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => integer, minimum => 1},
|
||||||
|
dashboard => #{type => integer, minimum => 1},
|
||||||
|
docgen => #{type => "Integer(1..+inf)"}
|
||||||
|
};
|
||||||
|
readable("number()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => number},
|
||||||
|
dashboard => #{type => number},
|
||||||
|
docgen => #{type => "Number"}
|
||||||
|
};
|
||||||
|
readable("string()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string},
|
||||||
|
dashboard => #{type => string},
|
||||||
|
docgen => #{type => "String"}
|
||||||
|
};
|
||||||
|
readable("atom()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string},
|
||||||
|
dashboard => #{type => string},
|
||||||
|
docgen => #{type => "String"}
|
||||||
|
};
|
||||||
|
readable("epoch_second()") ->
|
||||||
|
%% only for swagger
|
||||||
|
#{
|
||||||
|
swagger => #{
|
||||||
|
<<"oneOf">> => [
|
||||||
|
#{type => integer, example => 1640995200, description => <<"epoch-second">>},
|
||||||
|
#{
|
||||||
|
type => string,
|
||||||
|
example => <<"2022-01-01T00:00:00.000Z">>,
|
||||||
|
format => <<"date-time">>
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
readable("epoch_millisecond()") ->
|
||||||
|
%% only for swagger
|
||||||
|
#{
|
||||||
|
swagger => #{
|
||||||
|
<<"oneOf">> => [
|
||||||
|
#{
|
||||||
|
type => integer,
|
||||||
|
example => 1640995200000,
|
||||||
|
description => <<"epoch-millisecond">>
|
||||||
|
},
|
||||||
|
#{
|
||||||
|
type => string,
|
||||||
|
example => <<"2022-01-01T00:00:00.000Z">>,
|
||||||
|
format => <<"date-time">>
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
readable("duration()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"12m">>},
|
||||||
|
dashboard => #{type => duration},
|
||||||
|
docgen => #{type => "String", example => <<"12m">>}
|
||||||
|
};
|
||||||
|
readable("duration_s()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"1h">>},
|
||||||
|
dashboard => #{type => duration},
|
||||||
|
docgen => #{type => "String", example => <<"1h">>}
|
||||||
|
};
|
||||||
|
readable("duration_ms()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"32s">>},
|
||||||
|
dashboard => #{type => duration},
|
||||||
|
docgen => #{type => "String", example => <<"32s">>}
|
||||||
|
};
|
||||||
|
readable("timeout_duration()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"12m">>},
|
||||||
|
dashboard => #{type => duration},
|
||||||
|
docgen => #{type => "String", example => <<"12m">>}
|
||||||
|
};
|
||||||
|
readable("timeout_duration_s()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"1h">>},
|
||||||
|
dashboard => #{type => duration},
|
||||||
|
docgen => #{type => "String", example => <<"1h">>}
|
||||||
|
};
|
||||||
|
readable("timeout_duration_ms()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"32s">>},
|
||||||
|
dashboard => #{type => duration},
|
||||||
|
docgen => #{type => "String", example => <<"32s">>}
|
||||||
|
};
|
||||||
|
readable("percent()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"12%">>},
|
||||||
|
dashboard => #{type => percent},
|
||||||
|
docgen => #{type => "String", example => <<"12%">>}
|
||||||
|
};
|
||||||
|
readable("ip_port()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"127.0.0.1:80">>},
|
||||||
|
dashboard => #{type => ip_port},
|
||||||
|
docgen => #{type => "String", example => <<"127.0.0.1:80">>}
|
||||||
|
};
|
||||||
|
readable("url()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"http://127.0.0.1">>},
|
||||||
|
dashboard => #{type => url},
|
||||||
|
docgen => #{type => "String", example => <<"http://127.0.0.1">>}
|
||||||
|
};
|
||||||
|
readable("bytesize()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"32MB">>},
|
||||||
|
dashboard => #{type => 'byteSize'},
|
||||||
|
docgen => #{type => "String", example => <<"32MB">>}
|
||||||
|
};
|
||||||
|
readable("wordsize()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"1024KB">>},
|
||||||
|
dashboard => #{type => 'wordSize'},
|
||||||
|
docgen => #{type => "String", example => <<"1024KB">>}
|
||||||
|
};
|
||||||
|
readable("map(" ++ Map) ->
|
||||||
|
[$) | _MapArgs] = lists:reverse(Map),
|
||||||
|
%% TODO: for docgen, parse map args. e.g. Map(String,String)
|
||||||
|
#{
|
||||||
|
swagger => #{type => object, example => #{}},
|
||||||
|
dashboard => #{type => object},
|
||||||
|
docgen => #{type => "Map", example => #{}}
|
||||||
|
};
|
||||||
|
readable("qos()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => integer, minimum => 0, maximum => 2, example => 0},
|
||||||
|
dashboard => #{type => enum, symbols => [0, 1, 2]},
|
||||||
|
docgen => #{type => "Integer(0..2)", example => 0}
|
||||||
|
};
|
||||||
|
readable("comma_separated_list()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"item1,item2">>},
|
||||||
|
dashboard => #{type => comma_separated_string},
|
||||||
|
docgen => #{type => "String", example => <<"item1,item2">>}
|
||||||
|
};
|
||||||
|
readable("comma_separated_binary()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"item1,item2">>},
|
||||||
|
dashboard => #{type => comma_separated_string},
|
||||||
|
docgen => #{type => "String", example => <<"item1,item2">>}
|
||||||
|
};
|
||||||
|
readable("comma_separated_atoms()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"item1,item2">>},
|
||||||
|
dashboard => #{type => comma_separated_string},
|
||||||
|
docgen => #{type => "String", example => <<"item1,item2">>}
|
||||||
|
};
|
||||||
|
readable("service_account_json()") ->
|
||||||
|
%% This is a bit special,
|
||||||
|
%% service_account_josn in swagger spec is an object
|
||||||
|
%% the same in documenation.
|
||||||
|
%% However, dashboard wish it to be a string
|
||||||
|
%% TODO:
|
||||||
|
%% - Change type definition to stirng().
|
||||||
|
%% - Convert the embedded object to a escaped JSON string.
|
||||||
|
%% - Delete this function clause once the above is done.
|
||||||
|
#{
|
||||||
|
swagger => #{type => object},
|
||||||
|
dashboard => #{type => string},
|
||||||
|
docgen => #{type => "Map"}
|
||||||
|
};
|
||||||
|
readable("json_binary()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"{\"a\": [1,true]}">>},
|
||||||
|
dashboard => #{type => string},
|
||||||
|
docgen => #{type => "String", example => <<"{\"a\": [1,true]}">>}
|
||||||
|
};
|
||||||
|
readable("port_number()") ->
|
||||||
|
Result = try_range("1..65535"),
|
||||||
|
true = is_map(Result),
|
||||||
|
Result;
|
||||||
|
readable("secret()") ->
|
||||||
|
#{
|
||||||
|
swagger => #{type => string, example => <<"R4ND0M/S∃CЯ∃T"/utf8>>},
|
||||||
|
dashboard => #{type => string},
|
||||||
|
docgen => #{type => "String", example => <<"R4ND0M/S∃CЯ∃T"/utf8>>}
|
||||||
|
};
|
||||||
|
readable(TypeStr0) ->
|
||||||
|
case string:split(TypeStr0, ":") of
|
||||||
|
[ModuleStr, TypeStr] ->
|
||||||
|
Module = list_to_existing_atom(ModuleStr),
|
||||||
|
readable(Module, TypeStr);
|
||||||
|
_ ->
|
||||||
|
parse(TypeStr0)
|
||||||
|
end.
|
||||||
|
|
||||||
|
parse(TypeStr) ->
|
||||||
|
try_parse(TypeStr, [
|
||||||
|
fun try_typerefl_array/1,
|
||||||
|
fun try_range/1
|
||||||
|
]).
|
||||||
|
|
||||||
|
try_parse(_TypeStr, []) ->
|
||||||
|
throw(unknown_type);
|
||||||
|
try_parse(TypeStr, [ParseFun | More]) ->
|
||||||
|
case ParseFun(TypeStr) of
|
||||||
|
nomatch ->
|
||||||
|
try_parse(TypeStr, More);
|
||||||
|
Result ->
|
||||||
|
Result
|
||||||
|
end.
|
||||||
|
|
||||||
|
%% [string()] or [integer()] or [xxx] or [xxx,...]
|
||||||
|
try_typerefl_array(Name) ->
|
||||||
|
case string:trim(Name, leading, "[") of
|
||||||
|
Name ->
|
||||||
|
nomatch;
|
||||||
|
Name1 ->
|
||||||
|
case string:trim(Name1, trailing, ",.]") of
|
||||||
|
Name1 ->
|
||||||
|
notmatch;
|
||||||
|
Name2 ->
|
||||||
|
Flavors = readable(Name2),
|
||||||
|
DocgenSpec = maps:get(docgen, Flavors),
|
||||||
|
DocgenType = maps:get(type, DocgenSpec),
|
||||||
|
#{
|
||||||
|
swagger => #{type => array, items => maps:get(swagger, Flavors)},
|
||||||
|
dashboard => #{type => array, items => maps:get(dashboard, Flavors)},
|
||||||
|
docgen => #{type => "Array(" ++ DocgenType ++ ")"}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
|
try_range(Name) ->
|
||||||
|
case string:split(Name, "..") of
|
||||||
|
%% 1..10 1..inf -inf..10
|
||||||
|
[MinStr, MaxStr] ->
|
||||||
|
Schema0 = #{type => integer},
|
||||||
|
Schema1 = add_integer_prop(Schema0, minimum, MinStr),
|
||||||
|
Schema = add_integer_prop(Schema1, maximum, MaxStr),
|
||||||
|
#{
|
||||||
|
swagger => Schema,
|
||||||
|
dashboard => Schema,
|
||||||
|
docgen => #{type => "Integer(" ++ MinStr ++ ".." ++ MaxStr ++ ")"}
|
||||||
|
};
|
||||||
|
_ ->
|
||||||
|
nomatch
|
||||||
|
end.
|
||||||
|
|
||||||
|
add_integer_prop(Schema, Key, Value) ->
|
||||||
|
case string:to_integer(Value) of
|
||||||
|
{error, no_integer} -> Schema;
|
||||||
|
{Int, []} -> Schema#{Key => Int}
|
||||||
|
end.
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_connector, [
|
{application, emqx_connector, [
|
||||||
{description, "EMQX Data Integration Connectors"},
|
{description, "EMQX Data Integration Connectors"},
|
||||||
{vsn, "0.1.33"},
|
{vsn, "0.1.34"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_connector_app, []}},
|
{mod, {emqx_connector_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
pool_size/1,
|
||||||
relational_db_fields/0,
|
relational_db_fields/0,
|
||||||
ssl_fields/0,
|
ssl_fields/0,
|
||||||
prepare_statement_fields/0,
|
prepare_statement_fields/0,
|
||||||
|
@ -28,20 +29,17 @@
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
pool_size/1,
|
|
||||||
database/1,
|
database/1,
|
||||||
username/1,
|
username/1,
|
||||||
auto_reconnect/1
|
auto_reconnect/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-type database() :: binary().
|
-type database() :: binary().
|
||||||
-type pool_size() :: pos_integer().
|
|
||||||
-type username() :: binary().
|
-type username() :: binary().
|
||||||
-type password() :: binary().
|
-type password() :: binary().
|
||||||
|
|
||||||
-reflect_type([
|
-reflect_type([
|
||||||
database/0,
|
database/0,
|
||||||
pool_size/0,
|
|
||||||
username/0,
|
username/0,
|
||||||
password/0
|
password/0
|
||||||
]).
|
]).
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
{application, emqx_dashboard, [
|
{application, emqx_dashboard, [
|
||||||
{description, "EMQX Web Dashboard"},
|
{description, "EMQX Web Dashboard"},
|
||||||
% strict semver, bump manually!
|
% strict semver, bump manually!
|
||||||
{vsn, "5.0.29"},
|
{vsn, "5.0.30"},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{registered, [emqx_dashboard_sup]},
|
{registered, [emqx_dashboard_sup]},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -345,15 +345,7 @@ parse_spec_ref(Module, Path, Options) ->
|
||||||
erlang:apply(Module, schema, [Path])
|
erlang:apply(Module, schema, [Path])
|
||||||
catch
|
catch
|
||||||
Error:Reason:Stacktrace ->
|
Error:Reason:Stacktrace ->
|
||||||
%% This error is intended to fail the build
|
failed_to_generate_swagger_spec(Module, Path, Error, Reason, Stacktrace)
|
||||||
%% hence print to standard_error
|
|
||||||
io:format(
|
|
||||||
standard_error,
|
|
||||||
"Failed to generate swagger for path ~p in module ~p~n"
|
|
||||||
"error:~p~nreason:~p~n~p~n",
|
|
||||||
[Module, Path, Error, Reason, Stacktrace]
|
|
||||||
),
|
|
||||||
error({failed_to_generate_swagger_spec, Module, Path})
|
|
||||||
end,
|
end,
|
||||||
OperationId = maps:get('operationId', Schema),
|
OperationId = maps:get('operationId', Schema),
|
||||||
{Specs, Refs} = maps:fold(
|
{Specs, Refs} = maps:fold(
|
||||||
|
@ -369,6 +361,24 @@ parse_spec_ref(Module, Path, Options) ->
|
||||||
RouteOpts = generate_route_opts(Schema, Options),
|
RouteOpts = generate_route_opts(Schema, Options),
|
||||||
{OperationId, Specs, Refs, RouteOpts}.
|
{OperationId, Specs, Refs, RouteOpts}.
|
||||||
|
|
||||||
|
-ifdef(TEST).
|
||||||
|
-spec failed_to_generate_swagger_spec(_, _, _, _, _) -> no_return().
|
||||||
|
failed_to_generate_swagger_spec(Module, Path, _Error, _Reason, _Stacktrace) ->
|
||||||
|
error({failed_to_generate_swagger_spec, Module, Path}).
|
||||||
|
-else.
|
||||||
|
-spec failed_to_generate_swagger_spec(_, _, _, _, _) -> no_return().
|
||||||
|
failed_to_generate_swagger_spec(Module, Path, Error, Reason, Stacktrace) ->
|
||||||
|
%% This error is intended to fail the build
|
||||||
|
%% hence print to standard_error
|
||||||
|
io:format(
|
||||||
|
standard_error,
|
||||||
|
"Failed to generate swagger for path ~p in module ~p~n"
|
||||||
|
"error:~p~nreason:~p~n~p~n",
|
||||||
|
[Module, Path, Error, Reason, Stacktrace]
|
||||||
|
),
|
||||||
|
error({failed_to_generate_swagger_spec, Module, Path}).
|
||||||
|
|
||||||
|
-endif.
|
||||||
generate_route_opts(Schema, Options) ->
|
generate_route_opts(Schema, Options) ->
|
||||||
#{filter => compose_filters(filter(Options), custom_filter(Schema))}.
|
#{filter => compose_filters(filter(Options), custom_filter(Schema))}.
|
||||||
|
|
||||||
|
@ -776,7 +786,7 @@ hocon_schema_to_spec(?MAP(Name, Type), LocalModule) ->
|
||||||
},
|
},
|
||||||
SubRefs
|
SubRefs
|
||||||
};
|
};
|
||||||
hocon_schema_to_spec(?UNION(Types), LocalModule) ->
|
hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) ->
|
||||||
{OneOf, Refs} = lists:foldl(
|
{OneOf, Refs} = lists:foldl(
|
||||||
fun(Type, {Acc, RefsAcc}) ->
|
fun(Type, {Acc, RefsAcc}) ->
|
||||||
{Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule),
|
{Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule),
|
||||||
|
@ -789,193 +799,8 @@ hocon_schema_to_spec(?UNION(Types), LocalModule) ->
|
||||||
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) ->
|
||||||
{#{type => string, enum => [Atom]}, []}.
|
{#{type => string, enum => [Atom]}, []}.
|
||||||
|
|
||||||
typename_to_spec("term()", _Mod) ->
|
typename_to_spec(TypeStr, Module) ->
|
||||||
#{type => string, example => <<"any">>};
|
emqx_conf_schema_types:readable_swagger(Module, TypeStr).
|
||||||
typename_to_spec("boolean()", _Mod) ->
|
|
||||||
#{type => boolean};
|
|
||||||
typename_to_spec("binary()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("float()", _Mod) ->
|
|
||||||
#{type => number};
|
|
||||||
typename_to_spec("integer()", _Mod) ->
|
|
||||||
#{type => integer};
|
|
||||||
typename_to_spec("non_neg_integer()", _Mod) ->
|
|
||||||
#{type => integer, minimum => 0};
|
|
||||||
typename_to_spec("pos_integer()", _Mod) ->
|
|
||||||
#{type => integer, minimum => 1};
|
|
||||||
typename_to_spec("number()", _Mod) ->
|
|
||||||
#{type => number};
|
|
||||||
typename_to_spec("string()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("atom()", _Mod) ->
|
|
||||||
#{type => string};
|
|
||||||
typename_to_spec("epoch_second()", _Mod) ->
|
|
||||||
#{
|
|
||||||
<<"oneOf">> => [
|
|
||||||
#{type => integer, example => 1640995200, description => <<"epoch-second">>},
|
|
||||||
#{type => string, example => <<"2022-01-01T00:00:00.000Z">>, format => <<"date-time">>}
|
|
||||||
]
|
|
||||||
};
|
|
||||||
typename_to_spec("epoch_millisecond()", _Mod) ->
|
|
||||||
#{
|
|
||||||
<<"oneOf">> => [
|
|
||||||
#{type => integer, example => 1640995200000, description => <<"epoch-millisecond">>},
|
|
||||||
#{type => string, example => <<"2022-01-01T00:00:00.000Z">>, format => <<"date-time">>}
|
|
||||||
]
|
|
||||||
};
|
|
||||||
typename_to_spec("duration()", _Mod) ->
|
|
||||||
#{type => string, example => <<"12m">>};
|
|
||||||
typename_to_spec("duration_s()", _Mod) ->
|
|
||||||
#{type => string, example => <<"1h">>};
|
|
||||||
typename_to_spec("duration_ms()", _Mod) ->
|
|
||||||
#{type => string, example => <<"32s">>};
|
|
||||||
typename_to_spec("timeout_duration()", _Mod) ->
|
|
||||||
#{type => string, example => <<"12m">>};
|
|
||||||
typename_to_spec("timeout_duration_s()", _Mod) ->
|
|
||||||
#{type => string, example => <<"1h">>};
|
|
||||||
typename_to_spec("timeout_duration_ms()", _Mod) ->
|
|
||||||
#{type => string, example => <<"32s">>};
|
|
||||||
typename_to_spec("percent()", _Mod) ->
|
|
||||||
#{type => number, example => <<"12%">>};
|
|
||||||
typename_to_spec("file()", _Mod) ->
|
|
||||||
#{type => string, example => <<"/path/to/file">>};
|
|
||||||
typename_to_spec("ip_port()", _Mod) ->
|
|
||||||
#{type => string, example => <<"127.0.0.1:80">>};
|
|
||||||
typename_to_spec("write_syntax()", _Mod) ->
|
|
||||||
#{
|
|
||||||
type => string,
|
|
||||||
example =>
|
|
||||||
<<"${topic},clientid=${clientid}", " ", "payload=${payload},",
|
|
||||||
"${clientid}_int_value=${payload.int_key}i,", "bool=${payload.bool}">>
|
|
||||||
};
|
|
||||||
typename_to_spec("url()", _Mod) ->
|
|
||||||
#{type => string, example => <<"http://127.0.0.1">>};
|
|
||||||
typename_to_spec("connect_timeout()", Mod) ->
|
|
||||||
typename_to_spec("timeout()", Mod);
|
|
||||||
typename_to_spec("timeout()", _Mod) ->
|
|
||||||
#{
|
|
||||||
<<"oneOf">> => [
|
|
||||||
#{type => string, example => infinity},
|
|
||||||
#{type => integer}
|
|
||||||
],
|
|
||||||
example => infinity
|
|
||||||
};
|
|
||||||
typename_to_spec("bytesize()", _Mod) ->
|
|
||||||
#{type => string, example => <<"32MB">>};
|
|
||||||
typename_to_spec("wordsize()", _Mod) ->
|
|
||||||
#{type => string, example => <<"1024KB">>};
|
|
||||||
typename_to_spec("map()", _Mod) ->
|
|
||||||
#{type => object, example => #{}};
|
|
||||||
typename_to_spec("service_account_json()", _Mod) ->
|
|
||||||
#{type => object, example => #{}};
|
|
||||||
typename_to_spec("#{" ++ _, Mod) ->
|
|
||||||
typename_to_spec("map()", Mod);
|
|
||||||
typename_to_spec("qos()", _Mod) ->
|
|
||||||
#{type => integer, minimum => 0, maximum => 2, example => 0};
|
|
||||||
typename_to_spec("{binary(), binary()}", _Mod) ->
|
|
||||||
#{type => object, example => #{}};
|
|
||||||
typename_to_spec("{string(), string()}", _Mod) ->
|
|
||||||
#{type => object, example => #{}};
|
|
||||||
typename_to_spec("comma_separated_list()", _Mod) ->
|
|
||||||
#{type => string, example => <<"item1,item2">>};
|
|
||||||
typename_to_spec("comma_separated_binary()", _Mod) ->
|
|
||||||
#{type => string, example => <<"item1,item2">>};
|
|
||||||
typename_to_spec("comma_separated_atoms()", _Mod) ->
|
|
||||||
#{type => string, example => <<"item1,item2">>};
|
|
||||||
typename_to_spec("pool_type()", _Mod) ->
|
|
||||||
#{type => string, enum => [random, hash]};
|
|
||||||
typename_to_spec("log_level()", _Mod) ->
|
|
||||||
#{
|
|
||||||
type => string,
|
|
||||||
enum => [debug, info, notice, warning, error, critical, alert, emergency, all]
|
|
||||||
};
|
|
||||||
typename_to_spec("rate()", _Mod) ->
|
|
||||||
#{type => string, example => <<"10MB">>};
|
|
||||||
typename_to_spec("burst()", _Mod) ->
|
|
||||||
#{type => string, example => <<"100MB">>};
|
|
||||||
typename_to_spec("burst_rate()", _Mod) ->
|
|
||||||
%% 0/0s = no burst
|
|
||||||
#{type => string, example => <<"10MB">>};
|
|
||||||
typename_to_spec("failure_strategy()", _Mod) ->
|
|
||||||
#{type => string, example => <<"force">>};
|
|
||||||
typename_to_spec("initial()", _Mod) ->
|
|
||||||
#{type => string, example => <<"0MB">>};
|
|
||||||
typename_to_spec("bucket_name()", _Mod) ->
|
|
||||||
#{type => string, example => <<"retainer">>};
|
|
||||||
typename_to_spec("json_binary()", _Mod) ->
|
|
||||||
#{type => string, example => <<"{\"a\": [1,true]}">>};
|
|
||||||
typename_to_spec("port_number()", _Mod) ->
|
|
||||||
range("1..65535");
|
|
||||||
typename_to_spec("secret_access_key()", _Mod) ->
|
|
||||||
#{type => string, example => <<"TW8dPwmjpjJJuLW....">>};
|
|
||||||
typename_to_spec("secret()", _Mod) ->
|
|
||||||
%% TODO: ideally, this should be dispatched to the module that defines this type
|
|
||||||
#{type => string, example => <<"R4ND0M/S∃CЯ∃T"/utf8>>};
|
|
||||||
typename_to_spec(Name, Mod) ->
|
|
||||||
try_convert_to_spec(Name, Mod, [
|
|
||||||
fun try_remote_module_type/2,
|
|
||||||
fun try_typerefl_array/2,
|
|
||||||
fun try_range/2,
|
|
||||||
fun try_integer/2
|
|
||||||
]).
|
|
||||||
|
|
||||||
range(Name) ->
|
|
||||||
#{} = try_range(Name, undefined).
|
|
||||||
|
|
||||||
try_convert_to_spec(Name, Mod, []) ->
|
|
||||||
throw({error, #{msg => <<"Unsupported Type">>, type => Name, module => Mod}});
|
|
||||||
try_convert_to_spec(Name, Mod, [Converter | Rest]) ->
|
|
||||||
case Converter(Name, Mod) of
|
|
||||||
nomatch -> try_convert_to_spec(Name, Mod, Rest);
|
|
||||||
Spec -> Spec
|
|
||||||
end.
|
|
||||||
|
|
||||||
try_range(Name, _Mod) ->
|
|
||||||
case string:split(Name, "..") of
|
|
||||||
%% 1..10 1..inf -inf..10
|
|
||||||
[MinStr, MaxStr] ->
|
|
||||||
Schema = #{type => integer},
|
|
||||||
Schema1 = add_integer_prop(Schema, minimum, MinStr),
|
|
||||||
add_integer_prop(Schema1, maximum, MaxStr);
|
|
||||||
_ ->
|
|
||||||
nomatch
|
|
||||||
end.
|
|
||||||
|
|
||||||
%% Module:Type
|
|
||||||
try_remote_module_type(Name, Mod) ->
|
|
||||||
case string:split(Name, ":") of
|
|
||||||
[_Module, Type] -> typename_to_spec(Type, Mod);
|
|
||||||
_ -> nomatch
|
|
||||||
end.
|
|
||||||
|
|
||||||
%% [string()] or [integer()] or [xxx] or [xxx,...]
|
|
||||||
try_typerefl_array(Name, Mod) ->
|
|
||||||
case string:trim(Name, leading, "[") of
|
|
||||||
Name ->
|
|
||||||
nomatch;
|
|
||||||
Name1 ->
|
|
||||||
case string:trim(Name1, trailing, ",.]") of
|
|
||||||
Name1 ->
|
|
||||||
notmatch;
|
|
||||||
Name2 ->
|
|
||||||
Schema = typename_to_spec(Name2, Mod),
|
|
||||||
#{type => array, items => Schema}
|
|
||||||
end
|
|
||||||
end.
|
|
||||||
|
|
||||||
%% integer(1)
|
|
||||||
try_integer(Name, _Mod) ->
|
|
||||||
case string:to_integer(Name) of
|
|
||||||
{Int, []} -> #{type => integer, enum => [Int], default => Int};
|
|
||||||
_ -> nomatch
|
|
||||||
end.
|
|
||||||
|
|
||||||
add_integer_prop(Schema, Key, Value) ->
|
|
||||||
case string:to_integer(Value) of
|
|
||||||
{error, no_integer} -> Schema;
|
|
||||||
{Int, []} when Key =:= minimum -> Schema#{Key => Int};
|
|
||||||
{Int, []} -> Schema#{Key => Int}
|
|
||||||
end.
|
|
||||||
|
|
||||||
to_bin(List) when is_list(List) ->
|
to_bin(List) when is_list(List) ->
|
||||||
case io_lib:printable_list(List) of
|
case io_lib:printable_list(List) of
|
||||||
|
|
|
@ -816,7 +816,7 @@ to_schema(Body) ->
|
||||||
fields(good_ref) ->
|
fields(good_ref) ->
|
||||||
[
|
[
|
||||||
{'webhook-host', mk(emqx_schema:ip_port(), #{default => <<"127.0.0.1:80">>})},
|
{'webhook-host', mk(emqx_schema:ip_port(), #{default => <<"127.0.0.1:80">>})},
|
||||||
{log_dir, mk(emqx_schema:file(), #{example => "var/log/emqx"})},
|
{log_dir, mk(string(), #{example => "var/log/emqx"})},
|
||||||
{tag, mk(binary(), #{desc => <<"tag">>})}
|
{tag, mk(binary(), #{desc => <<"tag">>})}
|
||||||
];
|
];
|
||||||
fields(nest_ref) ->
|
fields(nest_ref) ->
|
||||||
|
|
|
@ -317,68 +317,68 @@ t_sub_fields(_Config) ->
|
||||||
validate(Path, Object, ExpectRefs),
|
validate(Path, Object, ExpectRefs),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_complicated_type(_Config) ->
|
t_complex_type(_Config) ->
|
||||||
Path = "/ref/complex_type",
|
Path = "/ref/complex_type",
|
||||||
Object = #{
|
|
||||||
<<"content">> => #{
|
|
||||||
<<"application/json">> =>
|
|
||||||
#{
|
|
||||||
<<"schema">> => #{
|
|
||||||
<<"properties">> =>
|
|
||||||
[
|
|
||||||
{<<"no_neg_integer">>, #{minimum => 0, type => integer}},
|
|
||||||
{<<"url">>, #{example => <<"http://127.0.0.1">>, type => string}},
|
|
||||||
{<<"server">>, #{example => <<"127.0.0.1:80">>, type => string}},
|
|
||||||
{<<"connect_timeout">>, #{
|
|
||||||
example => infinity,
|
|
||||||
<<"oneOf">> => [
|
|
||||||
#{example => infinity, type => string},
|
|
||||||
#{type => integer}
|
|
||||||
]
|
|
||||||
}},
|
|
||||||
{<<"pool_type">>, #{enum => [random, hash], type => string}},
|
|
||||||
{<<"timeout">>, #{
|
|
||||||
example => infinity,
|
|
||||||
<<"oneOf">> => [
|
|
||||||
#{example => infinity, type => string}, #{type => integer}
|
|
||||||
]
|
|
||||||
}},
|
|
||||||
{<<"bytesize">>, #{example => <<"32MB">>, type => string}},
|
|
||||||
{<<"wordsize">>, #{example => <<"1024KB">>, type => string}},
|
|
||||||
{<<"maps">>, #{example => #{}, type => object}},
|
|
||||||
{<<"comma_separated_list">>, #{
|
|
||||||
example => <<"item1,item2">>, type => string
|
|
||||||
}},
|
|
||||||
{<<"comma_separated_atoms">>, #{
|
|
||||||
example => <<"item1,item2">>, type => string
|
|
||||||
}},
|
|
||||||
{<<"log_level">>, #{
|
|
||||||
enum => [
|
|
||||||
debug,
|
|
||||||
info,
|
|
||||||
notice,
|
|
||||||
warning,
|
|
||||||
error,
|
|
||||||
critical,
|
|
||||||
alert,
|
|
||||||
emergency,
|
|
||||||
all
|
|
||||||
],
|
|
||||||
type => string
|
|
||||||
}},
|
|
||||||
{<<"fix_integer">>, #{
|
|
||||||
default => 100, enum => [100], type => integer
|
|
||||||
}}
|
|
||||||
],
|
|
||||||
<<"type">> => object
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{OperationId, Spec, Refs, #{}} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}),
|
{OperationId, Spec, Refs, #{}} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}),
|
||||||
?assertEqual(test, OperationId),
|
?assertEqual(test, OperationId),
|
||||||
Response = maps:get(responses, maps:get(post, Spec)),
|
Response = maps:get(responses, maps:get(post, Spec)),
|
||||||
?assertEqual(Object, maps:get(<<"200">>, Response)),
|
ResponseBody = maps:get(<<"200">>, Response),
|
||||||
|
Content = maps:get(<<"content">>, ResponseBody),
|
||||||
|
JsonContent = maps:get(<<"application/json">>, Content),
|
||||||
|
Schema = maps:get(<<"schema">>, JsonContent),
|
||||||
|
?assertMatch(#{<<"type">> := object}, Schema),
|
||||||
|
Properties = maps:get(<<"properties">>, Schema),
|
||||||
|
?assertMatch(
|
||||||
|
[
|
||||||
|
{<<"no_neg_integer">>, #{minimum := 0, type := integer}},
|
||||||
|
{<<"url">>, #{
|
||||||
|
example := <<"http://127.0.0.1">>, type := string
|
||||||
|
}},
|
||||||
|
{<<"server">>, #{
|
||||||
|
example := <<"127.0.0.1:80">>, type := string
|
||||||
|
}},
|
||||||
|
{<<"connect_timeout">>, #{
|
||||||
|
example := _, type := string
|
||||||
|
}},
|
||||||
|
{<<"pool_type">>, #{
|
||||||
|
enum := [random, hash], type := string
|
||||||
|
}},
|
||||||
|
{<<"timeout">>, #{
|
||||||
|
<<"oneOf">> := [
|
||||||
|
#{example := _, type := string},
|
||||||
|
#{enum := [infinity], type := string}
|
||||||
|
]
|
||||||
|
}},
|
||||||
|
{<<"bytesize">>, #{
|
||||||
|
example := <<"32MB">>, type := string
|
||||||
|
}},
|
||||||
|
{<<"wordsize">>, #{
|
||||||
|
example := <<"1024KB">>, type := string
|
||||||
|
}},
|
||||||
|
{<<"maps">>, #{example := #{}, type := object}},
|
||||||
|
{<<"comma_separated_list">>, #{
|
||||||
|
example := <<"item1,item2">>, type := string
|
||||||
|
}},
|
||||||
|
{<<"comma_separated_atoms">>, #{
|
||||||
|
example := <<"item1,item2">>, type := string
|
||||||
|
}},
|
||||||
|
{<<"log_level">>, #{
|
||||||
|
enum := [
|
||||||
|
debug,
|
||||||
|
info,
|
||||||
|
notice,
|
||||||
|
warning,
|
||||||
|
error,
|
||||||
|
critical,
|
||||||
|
alert,
|
||||||
|
emergency,
|
||||||
|
all
|
||||||
|
],
|
||||||
|
type := string
|
||||||
|
}}
|
||||||
|
],
|
||||||
|
Properties
|
||||||
|
),
|
||||||
?assertEqual([], Refs),
|
?assertEqual([], Refs),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
@ -410,7 +410,7 @@ t_ref_array_with_key(_Config) ->
|
||||||
{<<"percent_ex">>, #{
|
{<<"percent_ex">>, #{
|
||||||
description => <<"percent example">>,
|
description => <<"percent example">>,
|
||||||
example => <<"12%">>,
|
example => <<"12%">>,
|
||||||
type => number
|
type => string
|
||||||
}},
|
}},
|
||||||
{<<"duration_ms_ex">>, #{
|
{<<"duration_ms_ex">>, #{
|
||||||
description => <<"duration ms example">>,
|
description => <<"duration ms example">>,
|
||||||
|
@ -647,17 +647,16 @@ schema("/ref/complex_type") ->
|
||||||
{no_neg_integer, hoconsc:mk(non_neg_integer(), #{})},
|
{no_neg_integer, hoconsc:mk(non_neg_integer(), #{})},
|
||||||
{url, hoconsc:mk(url(), #{})},
|
{url, hoconsc:mk(url(), #{})},
|
||||||
{server, hoconsc:mk(emqx_schema:ip_port(), #{})},
|
{server, hoconsc:mk(emqx_schema:ip_port(), #{})},
|
||||||
{connect_timeout,
|
{connect_timeout, hoconsc:mk(emqx_schema:timeout_duration(), #{})},
|
||||||
hoconsc:mk(emqx_bridge_http_connector:connect_timeout(), #{})},
|
{pool_type, hoconsc:mk(hoconsc:enum([random, hash]), #{})},
|
||||||
{pool_type, hoconsc:mk(emqx_bridge_http_connector:pool_type(), #{})},
|
{timeout,
|
||||||
{timeout, hoconsc:mk(timeout(), #{})},
|
hoconsc:mk(hoconsc:union([infinity, emqx_schema:timeout_duration()]), #{})},
|
||||||
{bytesize, hoconsc:mk(emqx_schema:bytesize(), #{})},
|
{bytesize, hoconsc:mk(emqx_schema:bytesize(), #{})},
|
||||||
{wordsize, hoconsc:mk(emqx_schema:wordsize(), #{})},
|
{wordsize, hoconsc:mk(emqx_schema:wordsize(), #{})},
|
||||||
{maps, hoconsc:mk(map(), #{})},
|
{maps, hoconsc:mk(map(), #{})},
|
||||||
{comma_separated_list, hoconsc:mk(emqx_schema:comma_separated_list(), #{})},
|
{comma_separated_list, hoconsc:mk(emqx_schema:comma_separated_list(), #{})},
|
||||||
{comma_separated_atoms, hoconsc:mk(emqx_schema:comma_separated_atoms(), #{})},
|
{comma_separated_atoms, hoconsc:mk(emqx_schema:comma_separated_atoms(), #{})},
|
||||||
{log_level, hoconsc:mk(emqx_conf_schema:log_level(), #{})},
|
{log_level, hoconsc:mk(emqx_conf_schema:log_level(), #{})}
|
||||||
{fix_integer, hoconsc:mk(typerefl:integer(100), #{})}
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -684,7 +683,7 @@ to_schema(Object) ->
|
||||||
fields(good_ref) ->
|
fields(good_ref) ->
|
||||||
[
|
[
|
||||||
{'webhook-host', mk(emqx_schema:ip_port(), #{default => <<"127.0.0.1:80">>})},
|
{'webhook-host', mk(emqx_schema:ip_port(), #{default => <<"127.0.0.1:80">>})},
|
||||||
{log_dir, mk(emqx_schema:file(), #{example => "var/log/emqx"})},
|
{log_dir, mk(string(), #{example => "var/log/emqx"})},
|
||||||
{tag, mk(binary(), #{desc => <<"tag">>})}
|
{tag, mk(binary(), #{desc => <<"tag">>})}
|
||||||
];
|
];
|
||||||
fields(nest_ref) ->
|
fields(nest_ref) ->
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue