Merge remote-tracking branch 'origin/release-54'
This commit is contained in:
commit
c1f2287b86
|
@ -0,0 +1,2 @@
|
||||||
|
# See: emqx_common_test_helpers:copy_acl_conf/0
|
||||||
|
etc/acl.conf
|
|
@ -30,7 +30,7 @@
|
||||||
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.9"}}},
|
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.9"}}},
|
||||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.17.0"}}},
|
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.17.0"}}},
|
||||||
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.0"}}},
|
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.3.0"}}},
|
||||||
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.40.1"}}},
|
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.40.3"}}},
|
||||||
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},
|
{emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}},
|
||||||
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
{pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}},
|
||||||
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
{recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}},
|
||||||
|
|
|
@ -213,7 +213,7 @@ caps(#channel{clientinfo = #{zone := Zone}}) ->
|
||||||
-spec init(emqx_types:conninfo(), opts()) -> channel().
|
-spec init(emqx_types:conninfo(), opts()) -> channel().
|
||||||
init(
|
init(
|
||||||
ConnInfo = #{
|
ConnInfo = #{
|
||||||
peername := {PeerHost, _Port},
|
peername := {PeerHost, PeerPort},
|
||||||
sockname := {_Host, SockPort}
|
sockname := {_Host, SockPort}
|
||||||
},
|
},
|
||||||
#{
|
#{
|
||||||
|
@ -237,6 +237,7 @@ init(
|
||||||
listener => ListenerId,
|
listener => ListenerId,
|
||||||
protocol => Protocol,
|
protocol => Protocol,
|
||||||
peerhost => PeerHost,
|
peerhost => PeerHost,
|
||||||
|
peerport => PeerPort,
|
||||||
sockport => SockPort,
|
sockport => SockPort,
|
||||||
clientid => undefined,
|
clientid => undefined,
|
||||||
username => undefined,
|
username => undefined,
|
||||||
|
|
|
@ -135,7 +135,8 @@
|
||||||
%% save the updated config to the emqx_override.conf file
|
%% save the updated config to the emqx_override.conf file
|
||||||
%% defaults to `true`
|
%% defaults to `true`
|
||||||
persistent => boolean(),
|
persistent => boolean(),
|
||||||
override_to => local | cluster
|
override_to => local | cluster,
|
||||||
|
lazy_evaluator => fun((function()) -> term())
|
||||||
}.
|
}.
|
||||||
-type update_args() :: {update_cmd(), Opts :: update_opts()}.
|
-type update_args() :: {update_cmd(), Opts :: update_opts()}.
|
||||||
-type update_stage() :: pre_config_update | post_config_update.
|
-type update_stage() :: pre_config_update | post_config_update.
|
||||||
|
@ -616,14 +617,14 @@ save_to_override_conf(true, RawConf, Opts) ->
|
||||||
undefined ->
|
undefined ->
|
||||||
ok;
|
ok;
|
||||||
FileName ->
|
FileName ->
|
||||||
backup_and_write(FileName, hocon_pp:do(RawConf, #{}))
|
backup_and_write(FileName, hocon_pp:do(RawConf, Opts))
|
||||||
end;
|
end;
|
||||||
save_to_override_conf(false, RawConf, _Opts) ->
|
save_to_override_conf(false, RawConf, Opts) ->
|
||||||
case cluster_hocon_file() of
|
case cluster_hocon_file() of
|
||||||
undefined ->
|
undefined ->
|
||||||
ok;
|
ok;
|
||||||
FileName ->
|
FileName ->
|
||||||
backup_and_write(FileName, hocon_pp:do(RawConf, #{}))
|
backup_and_write(FileName, hocon_pp:do(RawConf, Opts))
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%% @private This is the same human-readable timestamp format as
|
%% @private This is the same human-readable timestamp format as
|
||||||
|
|
|
@ -979,7 +979,7 @@ handle_cast(Req, State) ->
|
||||||
%% rate limit
|
%% rate limit
|
||||||
|
|
||||||
-type limiter_type() :: emqx_limiter_container:limiter_type().
|
-type limiter_type() :: emqx_limiter_container:limiter_type().
|
||||||
-type limiter() :: emqx_limiter_container:limiter().
|
-type limiter() :: emqx_limiter_container:container().
|
||||||
-type check_succ_handler() ::
|
-type check_succ_handler() ::
|
||||||
fun((any(), list(any()), state()) -> _).
|
fun((any(), list(any()), state()) -> _).
|
||||||
|
|
||||||
|
|
|
@ -138,7 +138,7 @@ compact_errors(SchemaModule, Error, Stacktrace) ->
|
||||||
}}.
|
}}.
|
||||||
|
|
||||||
%% @doc This is only used in static check scripts in the CI.
|
%% @doc This is only used in static check scripts in the CI.
|
||||||
-spec load_and_check(module(), filename:filename_all()) -> {ok, term()} | {error, any()}.
|
-spec load_and_check(module(), file:name_all()) -> {ok, term()} | {error, any()}.
|
||||||
load_and_check(SchemaModule, File) ->
|
load_and_check(SchemaModule, File) ->
|
||||||
try
|
try
|
||||||
do_load_and_check(SchemaModule, File)
|
do_load_and_check(SchemaModule, File)
|
||||||
|
|
|
@ -125,12 +125,12 @@ when
|
||||||
-callback 'client.subscribe'(emqx_types:clientinfo(), emqx_types:properties(), TopicFilters) ->
|
-callback 'client.subscribe'(emqx_types:clientinfo(), emqx_types:properties(), TopicFilters) ->
|
||||||
fold_callback_result(TopicFilters)
|
fold_callback_result(TopicFilters)
|
||||||
when
|
when
|
||||||
TopicFilters :: list({emqx_topic:topic(), map()}).
|
TopicFilters :: list({emqx_types:topic(), map()}).
|
||||||
|
|
||||||
-callback 'client.unsubscribe'(emqx_types:clientinfo(), emqx_types:properties(), TopicFilters) ->
|
-callback 'client.unsubscribe'(emqx_types:clientinfo(), emqx_types:properties(), TopicFilters) ->
|
||||||
fold_callback_result(TopicFilters)
|
fold_callback_result(TopicFilters)
|
||||||
when
|
when
|
||||||
TopicFilters :: list({emqx_topic:topic(), map()}).
|
TopicFilters :: list({emqx_types:topic(), map()}).
|
||||||
|
|
||||||
-callback 'client.timeout'(_TimerReference :: reference(), _Msg :: term(), Replies) ->
|
-callback 'client.timeout'(_TimerReference :: reference(), _Msg :: term(), Replies) ->
|
||||||
fold_callback_result(Replies)
|
fold_callback_result(Replies)
|
||||||
|
|
|
@ -32,7 +32,7 @@
|
||||||
make_future/1,
|
make_future/1,
|
||||||
available/1
|
available/1
|
||||||
]).
|
]).
|
||||||
-export_type([local_limiter/0, limiter/0]).
|
-export_type([local_limiter/0, limiter/0, retry_context/1]).
|
||||||
|
|
||||||
%% a token bucket limiter which may or not contains a reference to another limiter,
|
%% a token bucket limiter which may or not contains a reference to another limiter,
|
||||||
%% and can be used in a client alone
|
%% and can be used in a client alone
|
||||||
|
|
|
@ -51,7 +51,7 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% API
|
%% API
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
-spec new(counters:countres_ref(), index(), rate()) -> bucket_ref().
|
-spec new(counters:counters_ref(), index(), rate()) -> bucket_ref().
|
||||||
new(Counter, Index, Rate) ->
|
new(Counter, Index, Rate) ->
|
||||||
#{
|
#{
|
||||||
counter => Counter,
|
counter => Counter,
|
||||||
|
|
|
@ -32,7 +32,7 @@
|
||||||
retry_list/2
|
retry_list/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export_type([container/0, check_result/0]).
|
-export_type([limiter/0, container/0, check_result/0, limiter_type/0]).
|
||||||
|
|
||||||
-type container() ::
|
-type container() ::
|
||||||
infinity
|
infinity
|
||||||
|
@ -51,7 +51,7 @@
|
||||||
-type limiter_id() :: emqx_limiter_schema:limiter_id().
|
-type limiter_id() :: emqx_limiter_schema:limiter_id().
|
||||||
-type limiter_type() :: emqx_limiter_schema:limiter_type().
|
-type limiter_type() :: emqx_limiter_schema:limiter_type().
|
||||||
-type limiter() :: emqx_htb_limiter:limiter().
|
-type limiter() :: emqx_htb_limiter:limiter().
|
||||||
-type retry_context() :: emqx_htb_limiter:retry_context().
|
-type retry_context() :: emqx_htb_limiter:retry_context(limiter()).
|
||||||
-type millisecond() :: non_neg_integer().
|
-type millisecond() :: non_neg_integer().
|
||||||
-type check_result() ::
|
-type check_result() ::
|
||||||
{ok, container()}
|
{ok, container()}
|
||||||
|
|
|
@ -63,6 +63,8 @@
|
||||||
-export([certs_dir/2]).
|
-export([certs_dir/2]).
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
|
-export_type([listener_id/0]).
|
||||||
|
|
||||||
-type listener_id() :: atom() | binary().
|
-type listener_id() :: atom() | binary().
|
||||||
-define(ROOT_KEY, listeners).
|
-define(ROOT_KEY, listeners).
|
||||||
-define(CONF_KEY_PATH, [?ROOT_KEY, '?', '?']).
|
-define(CONF_KEY_PATH, [?ROOT_KEY, '?', '?']).
|
||||||
|
|
|
@ -72,7 +72,7 @@
|
||||||
%% BACKW: v4.3.0
|
%% BACKW: v4.3.0
|
||||||
-export([upgrade_retained_delayed_counter_type/0]).
|
-export([upgrade_retained_delayed_counter_type/0]).
|
||||||
|
|
||||||
-export_type([metric_idx/0]).
|
-export_type([metric_idx/0, metric_name/0]).
|
||||||
|
|
||||||
-compile({inline, [inc/1, inc/2, dec/1, dec/2]}).
|
-compile({inline, [inc/1, inc/2, dec/1, dec/2]}).
|
||||||
-compile({inline, [inc_recv/1, inc_sent/1]}).
|
-compile({inline, [inc_recv/1, inc_sent/1]}).
|
||||||
|
@ -438,7 +438,7 @@ update_counter(Name, Value) ->
|
||||||
%% Inc received/sent metrics
|
%% Inc received/sent metrics
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-spec inc_msg(emqx_types:massage()) -> ok.
|
-spec inc_msg(emqx_types:message()) -> ok.
|
||||||
inc_msg(Msg) ->
|
inc_msg(Msg) ->
|
||||||
case Msg#message.qos of
|
case Msg#message.qos of
|
||||||
0 -> inc('messages.qos0.received');
|
0 -> inc('messages.qos0.received');
|
||||||
|
|
|
@ -85,6 +85,12 @@
|
||||||
]).
|
]).
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
|
-export_type([
|
||||||
|
id/0,
|
||||||
|
subscription_id/0,
|
||||||
|
session/0
|
||||||
|
]).
|
||||||
|
|
||||||
%% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be
|
%% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be
|
||||||
%% an atom, in theory (?).
|
%% an atom, in theory (?).
|
||||||
-type id() :: binary().
|
-type id() :: binary().
|
||||||
|
@ -145,8 +151,6 @@
|
||||||
(NOW_MS >= LAST_ALIVE_AT + EI))
|
(NOW_MS >= LAST_ALIVE_AT + EI))
|
||||||
).
|
).
|
||||||
|
|
||||||
-export_type([id/0]).
|
|
||||||
|
|
||||||
%%
|
%%
|
||||||
|
|
||||||
-spec create(clientinfo(), conninfo(), emqx_session:conf()) ->
|
-spec create(clientinfo(), conninfo(), emqx_session:conf()) ->
|
||||||
|
@ -243,7 +247,7 @@ stats(Session) ->
|
||||||
info(?STATS_KEYS, Session).
|
info(?STATS_KEYS, Session).
|
||||||
|
|
||||||
%% Debug/troubleshooting
|
%% Debug/troubleshooting
|
||||||
-spec print_session(emqx_types:client_id()) -> map() | undefined.
|
-spec print_session(emqx_types:clientid()) -> map() | undefined.
|
||||||
print_session(ClientId) ->
|
print_session(ClientId) ->
|
||||||
catch ro_transaction(
|
catch ro_transaction(
|
||||||
fun() ->
|
fun() ->
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
dest :: emqx_persistent_session_ds:id()
|
dest :: emqx_persistent_session_ds:id()
|
||||||
}).
|
}).
|
||||||
-record(ps_routeidx, {
|
-record(ps_routeidx, {
|
||||||
entry :: emqx_topic_index:key(emqx_persistent_session_ds_router:dest()),
|
entry :: '$1' | emqx_topic_index:key(emqx_persistent_session_ds_router:dest()),
|
||||||
unused = [] :: nil()
|
unused = [] :: nil()
|
||||||
}).
|
}).
|
||||||
|
|
||||||
|
|
|
@ -91,7 +91,7 @@
|
||||||
-type dest() :: node() | {group(), node()}.
|
-type dest() :: node() | {group(), node()}.
|
||||||
|
|
||||||
-record(routeidx, {
|
-record(routeidx, {
|
||||||
entry :: emqx_topic_index:key(dest()),
|
entry :: '$1' | emqx_topic_index:key(dest()),
|
||||||
unused = [] :: nil()
|
unused = [] :: nil()
|
||||||
}).
|
}).
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@
|
||||||
-export([mk/1]).
|
-export([mk/1]).
|
||||||
|
|
||||||
%% HOCON Schema API
|
%% HOCON Schema API
|
||||||
-export([convert_secret/2]).
|
-export([convert_secret/2, source/1]).
|
||||||
|
|
||||||
%% @doc Secret value.
|
%% @doc Secret value.
|
||||||
-type t() :: binary().
|
-type t() :: binary().
|
||||||
|
|
|
@ -111,6 +111,7 @@
|
||||||
t/0,
|
t/0,
|
||||||
conf/0,
|
conf/0,
|
||||||
conninfo/0,
|
conninfo/0,
|
||||||
|
clientinfo/0,
|
||||||
reply/0,
|
reply/0,
|
||||||
replies/0,
|
replies/0,
|
||||||
common_timer_name/0,
|
common_timer_name/0,
|
||||||
|
@ -174,11 +175,19 @@
|
||||||
%% Behaviour
|
%% Behaviour
|
||||||
%% -------------------------------------------------------------------
|
%% -------------------------------------------------------------------
|
||||||
|
|
||||||
|
-if(?OTP_RELEASE < 26).
|
||||||
|
-callback create(clientinfo(), conninfo(), conf()) ->
|
||||||
|
term().
|
||||||
|
-callback open(clientinfo(), conninfo(), conf()) ->
|
||||||
|
term().
|
||||||
|
-callback destroy(t() | clientinfo()) -> ok.
|
||||||
|
-else.
|
||||||
-callback create(clientinfo(), conninfo(), conf()) ->
|
-callback create(clientinfo(), conninfo(), conf()) ->
|
||||||
t().
|
t().
|
||||||
-callback open(clientinfo(), conninfo(), conf()) ->
|
-callback open(clientinfo(), conninfo(), conf()) ->
|
||||||
{_IsPresent :: true, t(), _ReplayContext} | false.
|
{_IsPresent :: true, t(), _ReplayContext} | false.
|
||||||
-callback destroy(t() | clientinfo()) -> ok.
|
-callback destroy(t() | clientinfo()) -> ok.
|
||||||
|
-endif.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% Create a Session
|
%% Create a Session
|
||||||
|
@ -499,7 +508,7 @@ cancel_timer(Name, Timers0) ->
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
-spec disconnect(clientinfo(), eqmx_types:conninfo(), t()) ->
|
-spec disconnect(clientinfo(), conninfo(), t()) ->
|
||||||
{idle | shutdown, t()}.
|
{idle | shutdown, t()}.
|
||||||
disconnect(_ClientInfo, ConnInfo, Session) ->
|
disconnect(_ClientInfo, ConnInfo, Session) ->
|
||||||
?IMPL(Session):disconnect(Session, ConnInfo).
|
?IMPL(Session):disconnect(Session, ConnInfo).
|
||||||
|
|
|
@ -34,7 +34,7 @@
|
||||||
|
|
||||||
%%
|
%%
|
||||||
|
|
||||||
-spec handle_event(emqx_session:client_info(), event()) ->
|
-spec handle_event(emqx_session:clientinfo(), event()) ->
|
||||||
ok.
|
ok.
|
||||||
handle_event(ClientInfo, {expired, Msg}) ->
|
handle_event(ClientInfo, {expired, Msg}) ->
|
||||||
ok = emqx_hooks:run('delivery.dropped', [ClientInfo, Msg, expired]),
|
ok = emqx_hooks:run('delivery.dropped', [ClientInfo, Msg, expired]),
|
||||||
|
|
|
@ -30,8 +30,8 @@
|
||||||
-export([init/1]).
|
-export([init/1]).
|
||||||
|
|
||||||
-type startchild_ret() ::
|
-type startchild_ret() ::
|
||||||
{ok, supervisor:child()}
|
{ok, pid()}
|
||||||
| {ok, supervisor:child(), term()}
|
| {ok, pid(), term()}
|
||||||
| {error, term()}.
|
| {error, term()}.
|
||||||
|
|
||||||
-define(SUP, ?MODULE).
|
-define(SUP, ?MODULE).
|
||||||
|
@ -52,7 +52,7 @@ start_child(ChildSpec) when is_map(ChildSpec) ->
|
||||||
start_child(Mod, Type) ->
|
start_child(Mod, Type) ->
|
||||||
start_child(child_spec(Mod, Type)).
|
start_child(child_spec(Mod, Type)).
|
||||||
|
|
||||||
-spec stop_child(supervisor:child_id()) -> ok | {error, term()}.
|
-spec stop_child(atom()) -> ok | {error, term()}.
|
||||||
stop_child(ChildId) ->
|
stop_child(ChildId) ->
|
||||||
case supervisor:terminate_child(?SUP, ChildId) of
|
case supervisor:terminate_child(?SUP, ChildId) of
|
||||||
ok -> supervisor:delete_child(?SUP, ChildId);
|
ok -> supervisor:delete_child(?SUP, ChildId);
|
||||||
|
|
|
@ -44,6 +44,9 @@
|
||||||
to_client_opts/2
|
to_client_opts/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
%% ssl:tls_version/0 is not exported.
|
||||||
|
-type tls_version() :: tlsv1 | 'tlsv1.1' | 'tlsv1.2' | 'tlsv1.3'.
|
||||||
|
|
||||||
-include("logger.hrl").
|
-include("logger.hrl").
|
||||||
|
|
||||||
-define(IS_TRUE(Val), ((Val =:= true) orelse (Val =:= <<"true">>))).
|
-define(IS_TRUE(Val), ((Val =:= true) orelse (Val =:= <<"true">>))).
|
||||||
|
@ -123,8 +126,8 @@
|
||||||
%% @doc Validate a given list of desired tls versions.
|
%% @doc Validate a given list of desired tls versions.
|
||||||
%% raise an error exception if non of them are available.
|
%% raise an error exception if non of them are available.
|
||||||
%% The input list can be a string/binary of comma separated versions.
|
%% The input list can be a string/binary of comma separated versions.
|
||||||
-spec integral_versions(tls | dtls, undefined | string() | binary() | [ssl:tls_version()]) ->
|
-spec integral_versions(tls | dtls, undefined | string() | binary() | [tls_version()]) ->
|
||||||
[ssl:tls_version()].
|
[tls_version()].
|
||||||
integral_versions(Type, undefined) ->
|
integral_versions(Type, undefined) ->
|
||||||
available_versions(Type);
|
available_versions(Type);
|
||||||
integral_versions(Type, []) ->
|
integral_versions(Type, []) ->
|
||||||
|
@ -164,7 +167,7 @@ all_ciphers() ->
|
||||||
all_ciphers(available_versions(all)).
|
all_ciphers(available_versions(all)).
|
||||||
|
|
||||||
%% @hidden Return a list of (openssl string format) cipher suites.
|
%% @hidden Return a list of (openssl string format) cipher suites.
|
||||||
-spec all_ciphers([ssl:tls_version()]) -> [string()].
|
-spec all_ciphers([tls_version()]) -> [string()].
|
||||||
all_ciphers(['tlsv1.3']) ->
|
all_ciphers(['tlsv1.3']) ->
|
||||||
%% When it's only tlsv1.3 wanted, use 'exclusive' here
|
%% When it's only tlsv1.3 wanted, use 'exclusive' here
|
||||||
%% because 'all' returns legacy cipher suites too,
|
%% because 'all' returns legacy cipher suites too,
|
||||||
|
@ -212,7 +215,7 @@ do_selected_ciphers(_) ->
|
||||||
?SELECTED_CIPHERS.
|
?SELECTED_CIPHERS.
|
||||||
|
|
||||||
%% @doc Ensure version & cipher-suites integrity.
|
%% @doc Ensure version & cipher-suites integrity.
|
||||||
-spec integral_ciphers([ssl:tls_version()], binary() | string() | [string()]) -> [string()].
|
-spec integral_ciphers([tls_version()], binary() | string() | [string()]) -> [string()].
|
||||||
integral_ciphers(Versions, Ciphers) when Ciphers =:= [] orelse Ciphers =:= undefined ->
|
integral_ciphers(Versions, Ciphers) when Ciphers =:= [] orelse Ciphers =:= undefined ->
|
||||||
%% not configured
|
%% not configured
|
||||||
integral_ciphers(Versions, selected_ciphers(Versions));
|
integral_ciphers(Versions, selected_ciphers(Versions));
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
|
|
||||||
%% SSL PSK Callbacks
|
%% SSL PSK Callbacks
|
||||||
-export([lookup/3]).
|
-export([lookup/3]).
|
||||||
|
-export_type([psk_identity/0]).
|
||||||
|
|
||||||
-type psk_identity() :: string().
|
-type psk_identity() :: string().
|
||||||
-type psk_user_state() :: term().
|
-type psk_user_state() :: term().
|
||||||
|
|
|
@ -30,6 +30,8 @@
|
||||||
-export([get_topic/1]).
|
-export([get_topic/1]).
|
||||||
-export([get_record/2]).
|
-export([get_record/2]).
|
||||||
|
|
||||||
|
-export_type([key/1]).
|
||||||
|
|
||||||
-type key(ID) :: emqx_trie_search:key(ID).
|
-type key(ID) :: emqx_trie_search:key(ID).
|
||||||
-type match(ID) :: key(ID).
|
-type match(ID) :: key(ID).
|
||||||
-type words() :: emqx_trie_search:words().
|
-type words() :: emqx_trie_search:words().
|
||||||
|
|
|
@ -19,11 +19,14 @@
|
||||||
-export([format/2]).
|
-export([format/2]).
|
||||||
-export([format_meta_map/1]).
|
-export([format_meta_map/1]).
|
||||||
|
|
||||||
|
%% logger_formatter:config/0 is not exported.
|
||||||
|
-type config() :: map().
|
||||||
|
|
||||||
%%%-----------------------------------------------------------------
|
%%%-----------------------------------------------------------------
|
||||||
%%% API
|
%%% API
|
||||||
-spec format(LogEvent, Config) -> unicode:chardata() when
|
-spec format(LogEvent, Config) -> unicode:chardata() when
|
||||||
LogEvent :: logger:log_event(),
|
LogEvent :: logger:log_event(),
|
||||||
Config :: logger:config().
|
Config :: config().
|
||||||
format(
|
format(
|
||||||
#{level := debug, meta := Meta = #{trace_tag := Tag}, msg := Msg},
|
#{level := debug, meta := Meta = #{trace_tag := Tag}, msg := Msg},
|
||||||
#{payload_encode := PEncode}
|
#{payload_encode := PEncode}
|
||||||
|
|
|
@ -125,7 +125,7 @@ uninstall(HandlerId) ->
|
||||||
name => binary(),
|
name => binary(),
|
||||||
type => topic | clientid | ip_address,
|
type => topic | clientid | ip_address,
|
||||||
id => atom(),
|
id => atom(),
|
||||||
filter => emqx_types:topic() | emqx_types:clienetid() | emqx_trace:ip_address(),
|
filter => emqx_types:topic() | emqx_types:clientid() | emqx_trace:ip_address(),
|
||||||
level => logger:level(),
|
level => logger:level(),
|
||||||
dst => file:filename() | console | unknown
|
dst => file:filename() | console | unknown
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,6 +72,22 @@ t_chan_info(_) ->
|
||||||
conn_state := connected,
|
conn_state := connected,
|
||||||
clientinfo := ClientInfo
|
clientinfo := ClientInfo
|
||||||
} = emqx_channel:info(channel()),
|
} = emqx_channel:info(channel()),
|
||||||
|
?assertMatch(
|
||||||
|
#{
|
||||||
|
zone := default,
|
||||||
|
listener := {tcp, default},
|
||||||
|
protocol := mqtt,
|
||||||
|
peerhost := {127, 0, 0, 1},
|
||||||
|
peerport := 3456,
|
||||||
|
sockport := 1883,
|
||||||
|
clientid := <<"clientid">>,
|
||||||
|
username := <<"username">>,
|
||||||
|
is_superuser := false,
|
||||||
|
is_bridge := false,
|
||||||
|
mountpoint := undefined
|
||||||
|
},
|
||||||
|
ClientInfo
|
||||||
|
),
|
||||||
?assertEqual(clientinfo(), ClientInfo).
|
?assertEqual(clientinfo(), ClientInfo).
|
||||||
|
|
||||||
t_chan_caps(_) ->
|
t_chan_caps(_) ->
|
||||||
|
@ -1063,7 +1079,8 @@ clientinfo(InitProps) ->
|
||||||
listener => {tcp, default},
|
listener => {tcp, default},
|
||||||
protocol => mqtt,
|
protocol => mqtt,
|
||||||
peerhost => {127, 0, 0, 1},
|
peerhost => {127, 0, 0, 1},
|
||||||
sockport => 3456,
|
peerport => 3456,
|
||||||
|
sockport => 1883,
|
||||||
clientid => <<"clientid">>,
|
clientid => <<"clientid">>,
|
||||||
username => <<"username">>,
|
username => <<"username">>,
|
||||||
is_superuser => false,
|
is_superuser => false,
|
||||||
|
|
|
@ -108,6 +108,7 @@ clientinfo() ->
|
||||||
{zone, zone()},
|
{zone, zone()},
|
||||||
{protocol, protocol()},
|
{protocol, protocol()},
|
||||||
{peerhost, ip()},
|
{peerhost, ip()},
|
||||||
|
{peerport, port()},
|
||||||
{sockport, port()},
|
{sockport, port()},
|
||||||
{clientid, clientid()},
|
{clientid, clientid()},
|
||||||
{username, username()},
|
{username, username()},
|
||||||
|
|
|
@ -119,7 +119,7 @@ log_to_db(Log) ->
|
||||||
Audit0 = to_audit(Log),
|
Audit0 = to_audit(Log),
|
||||||
Audit = Audit0#?AUDIT{
|
Audit = Audit0#?AUDIT{
|
||||||
node = node(),
|
node = node(),
|
||||||
created_at = erlang:system_time(microsecond)
|
created_at = erlang:system_time(millisecond)
|
||||||
},
|
},
|
||||||
mria:dirty_write(?AUDIT, Audit).
|
mria:dirty_write(?AUDIT, Audit).
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@
|
||||||
{<<"gte_created_at">>, timestamp},
|
{<<"gte_created_at">>, timestamp},
|
||||||
{<<"lte_created_at">>, timestamp},
|
{<<"lte_created_at">>, timestamp},
|
||||||
{<<"gte_duration_ms">>, timestamp},
|
{<<"gte_duration_ms">>, timestamp},
|
||||||
{<<"lte_duration_ms">>, timestamp}
|
{<<"lte_duration_ms">>, integer}
|
||||||
]).
|
]).
|
||||||
-define(DISABLE_MSG, <<"Audit is disabled">>).
|
-define(DISABLE_MSG, <<"Audit is disabled">>).
|
||||||
|
|
||||||
|
@ -290,16 +290,16 @@ gen_match_spec([{http_status_code, '=:=', T} | Qs], Audit, Conn) ->
|
||||||
gen_match_spec([{http_method, '=:=', T} | Qs], Audit, Conn) ->
|
gen_match_spec([{http_method, '=:=', T} | Qs], Audit, Conn) ->
|
||||||
gen_match_spec(Qs, Audit#?AUDIT{http_method = T}, Conn);
|
gen_match_spec(Qs, Audit#?AUDIT{http_method = T}, Conn);
|
||||||
gen_match_spec([{created_at, Hold, T} | Qs], Audit, Conn) ->
|
gen_match_spec([{created_at, Hold, T} | Qs], Audit, Conn) ->
|
||||||
gen_match_spec(Qs, Audit#?AUDIT{created_at = '$1'}, [{'$1', Hold, T} | Conn]);
|
gen_match_spec(Qs, Audit#?AUDIT{created_at = '$1'}, [{Hold, '$1', T} | Conn]);
|
||||||
gen_match_spec([{created_at, Hold1, T1, Hold2, T2} | Qs], Audit, Conn) ->
|
gen_match_spec([{created_at, Hold1, T1, Hold2, T2} | Qs], Audit, Conn) ->
|
||||||
gen_match_spec(Qs, Audit#?AUDIT{created_at = '$1'}, [
|
gen_match_spec(Qs, Audit#?AUDIT{created_at = '$1'}, [
|
||||||
{'$1', Hold1, T1}, {'$1', Hold2, T2} | Conn
|
{Hold1, '$1', T1}, {Hold2, '$1', T2} | Conn
|
||||||
]);
|
]);
|
||||||
gen_match_spec([{duration_ms, Hold, T} | Qs], Audit, Conn) ->
|
gen_match_spec([{duration_ms, Hold, T} | Qs], Audit, Conn) ->
|
||||||
gen_match_spec(Qs, Audit#?AUDIT{duration_ms = '$2'}, [{'$2', Hold, T} | Conn]);
|
gen_match_spec(Qs, Audit#?AUDIT{duration_ms = '$2'}, [{Hold, '$2', T} | Conn]);
|
||||||
gen_match_spec([{duration_ms, Hold1, T1, Hold2, T2} | Qs], Audit, Conn) ->
|
gen_match_spec([{duration_ms, Hold1, T1, Hold2, T2} | Qs], Audit, Conn) ->
|
||||||
gen_match_spec(Qs, Audit#?AUDIT{duration_ms = '$2'}, [
|
gen_match_spec(Qs, Audit#?AUDIT{duration_ms = '$2'}, [
|
||||||
{'$2', Hold1, T1}, {'$2', Hold2, T2} | Conn
|
{Hold1, '$2', T1}, {Hold2, '$2', T2} | Conn
|
||||||
]).
|
]).
|
||||||
|
|
||||||
format(Audit) ->
|
format(Audit) ->
|
||||||
|
|
|
@ -109,7 +109,7 @@ t_disabled(_) ->
|
||||||
Size1 = mnesia:table_info(emqx_audit, size),
|
Size1 = mnesia:table_info(emqx_audit, size),
|
||||||
|
|
||||||
{ok, Logs} = emqx_mgmt_api_configs_SUITE:get_config("log"),
|
{ok, Logs} = emqx_mgmt_api_configs_SUITE:get_config("log"),
|
||||||
Logs1 = emqx_utils_maps:deep_put([<<"audit">>, <<"max_filter_size">>], Logs, 100),
|
Logs1 = emqx_utils_maps:deep_put([<<"audit">>, <<"max_filter_size">>], Logs, 199),
|
||||||
NewLogs = emqx_utils_maps:deep_put([<<"audit">>, <<"enable">>], Logs1, false),
|
NewLogs = emqx_utils_maps:deep_put([<<"audit">>, <<"enable">>], Logs1, false),
|
||||||
{ok, _} = emqx_mgmt_api_configs_SUITE:update_config("log", NewLogs),
|
{ok, _} = emqx_mgmt_api_configs_SUITE:update_config("log", NewLogs),
|
||||||
{ok, GetLog1} = emqx_mgmt_api_configs_SUITE:get_config("log"),
|
{ok, GetLog1} = emqx_mgmt_api_configs_SUITE:get_config("log"),
|
||||||
|
@ -139,6 +139,11 @@ t_disabled(_) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_cli(_Config) ->
|
t_cli(_Config) ->
|
||||||
|
Size = mnesia:table_info(emqx_audit, size),
|
||||||
|
TimeInt = erlang:system_time(millisecond) - 10,
|
||||||
|
Time = integer_to_list(TimeInt),
|
||||||
|
DateStr = calendar:system_time_to_rfc3339(TimeInt, [{unit, millisecond}]),
|
||||||
|
Date = emqx_http_lib:uri_encode(DateStr),
|
||||||
ok = emqx_ctl:run_command(["conf", "show", "log"]),
|
ok = emqx_ctl:run_command(["conf", "show", "log"]),
|
||||||
AuditPath = emqx_mgmt_api_test_util:api_path(["audit"]),
|
AuditPath = emqx_mgmt_api_test_util:api_path(["audit"]),
|
||||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
|
@ -160,7 +165,7 @@ t_cli(_Config) ->
|
||||||
Data
|
Data
|
||||||
),
|
),
|
||||||
|
|
||||||
%% check filter
|
%% check cli filter
|
||||||
{ok, Res1} = emqx_mgmt_api_test_util:request_api(get, AuditPath, "from=cli", AuthHeader),
|
{ok, Res1} = emqx_mgmt_api_test_util:request_api(get, AuditPath, "from=cli", AuthHeader),
|
||||||
#{<<"data">> := Data1} = emqx_utils_json:decode(Res1, [return_maps]),
|
#{<<"data">> := Data1} = emqx_utils_json:decode(Res1, [return_maps]),
|
||||||
?assertEqual(Data, Data1),
|
?assertEqual(Data, Data1),
|
||||||
|
@ -168,10 +173,43 @@ t_cli(_Config) ->
|
||||||
get, AuditPath, "from=erlang_console", AuthHeader
|
get, AuditPath, "from=erlang_console", AuthHeader
|
||||||
),
|
),
|
||||||
?assertMatch(#{<<"data">> := []}, emqx_utils_json:decode(Res2, [return_maps])),
|
?assertMatch(#{<<"data">> := []}, emqx_utils_json:decode(Res2, [return_maps])),
|
||||||
|
|
||||||
|
%% check created_at filter
|
||||||
|
{ok, Res3} = emqx_mgmt_api_test_util:request_api(
|
||||||
|
get, AuditPath, "gte_created_at=" ++ Time, AuthHeader
|
||||||
|
),
|
||||||
|
#{<<"data">> := Data3} = emqx_utils_json:decode(Res3, [return_maps]),
|
||||||
|
?assertEqual(1, erlang:length(Data3)),
|
||||||
|
{ok, Res31} = emqx_mgmt_api_test_util:request_api(
|
||||||
|
get, AuditPath, "gte_created_at=" ++ Date, AuthHeader
|
||||||
|
),
|
||||||
|
?assertEqual(Res3, Res31),
|
||||||
|
{ok, Res4} = emqx_mgmt_api_test_util:request_api(
|
||||||
|
get, AuditPath, "lte_created_at=" ++ Time, AuthHeader
|
||||||
|
),
|
||||||
|
#{<<"data">> := Data4} = emqx_utils_json:decode(Res4, [return_maps]),
|
||||||
|
?assertEqual(Size, erlang:length(Data4)),
|
||||||
|
{ok, Res41} = emqx_mgmt_api_test_util:request_api(
|
||||||
|
get, AuditPath, "lte_created_at=" ++ Date, AuthHeader
|
||||||
|
),
|
||||||
|
?assertEqual(Res4, Res41),
|
||||||
|
|
||||||
|
%% check duration_ms filter
|
||||||
|
{ok, Res5} = emqx_mgmt_api_test_util:request_api(
|
||||||
|
get, AuditPath, "gte_duration_ms=0", AuthHeader
|
||||||
|
),
|
||||||
|
#{<<"data">> := Data5} = emqx_utils_json:decode(Res5, [return_maps]),
|
||||||
|
?assertEqual(Size + 1, erlang:length(Data5)),
|
||||||
|
{ok, Res6} = emqx_mgmt_api_test_util:request_api(
|
||||||
|
get, AuditPath, "lte_duration_ms=-1", AuthHeader
|
||||||
|
),
|
||||||
|
?assertMatch(#{<<"data">> := []}, emqx_utils_json:decode(Res6, [return_maps])),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_max_size(_Config) ->
|
t_max_size(_Config) ->
|
||||||
{ok, _} = emqx:update_config([log, audit, max_filter_size], 1000),
|
{ok, _} = emqx:update_config([log, audit, max_filter_size], 999),
|
||||||
|
%% Make sure this process is using latest max_filter_size.
|
||||||
|
?assertEqual(ignore, gen_server:call(emqx_audit, whatever)),
|
||||||
SizeFun =
|
SizeFun =
|
||||||
fun() ->
|
fun() ->
|
||||||
AuditPath = emqx_mgmt_api_test_util:api_path(["audit"]),
|
AuditPath = emqx_mgmt_api_test_util:api_path(["audit"]),
|
||||||
|
@ -186,11 +224,17 @@ t_max_size(_Config) ->
|
||||||
fun(_) ->
|
fun(_) ->
|
||||||
ok = emqx_ctl:run_command(["conf", "show", "log"])
|
ok = emqx_ctl:run_command(["conf", "show", "log"])
|
||||||
end,
|
end,
|
||||||
lists:duplicate(100, 1)
|
lists:duplicate(110, 1)
|
||||||
),
|
),
|
||||||
timer:sleep(110),
|
_ = mnesia:dump_log(),
|
||||||
|
LogCount = wait_for_dirty_write_log_done(1500),
|
||||||
Size1 = SizeFun(),
|
Size1 = SizeFun(),
|
||||||
?assert(Size1 - InitSize >= 100, {Size1, InitSize}),
|
?assert(Size1 - InitSize >= 100, #{
|
||||||
|
api => Size1,
|
||||||
|
init => InitSize,
|
||||||
|
log_size => LogCount,
|
||||||
|
config => emqx:get_config([log, audit, max_filter_size])
|
||||||
|
}),
|
||||||
{ok, _} = emqx:update_config([log, audit, max_filter_size], 10),
|
{ok, _} = emqx:update_config([log, audit, max_filter_size], 10),
|
||||||
%% wait for clean_expired
|
%% wait for clean_expired
|
||||||
timer:sleep(250),
|
timer:sleep(250),
|
||||||
|
@ -246,3 +290,19 @@ kickout_clients() ->
|
||||||
{ok, Clients2} = emqx_mgmt_api_test_util:request_api(get, ClientsPath),
|
{ok, Clients2} = emqx_mgmt_api_test_util:request_api(get, ClientsPath),
|
||||||
ClientsResponse2 = emqx_utils_json:decode(Clients2, [return_maps]),
|
ClientsResponse2 = emqx_utils_json:decode(Clients2, [return_maps]),
|
||||||
?assertMatch(#{<<"data">> := []}, ClientsResponse2).
|
?assertMatch(#{<<"data">> := []}, ClientsResponse2).
|
||||||
|
|
||||||
|
wait_for_dirty_write_log_done(MaxMs) ->
|
||||||
|
Size = mnesia:table_info(emqx_audit, size),
|
||||||
|
wait_for_dirty_write_log_done(Size, MaxMs).
|
||||||
|
|
||||||
|
wait_for_dirty_write_log_done(Size, RemainMs) when RemainMs =< 0 -> Size;
|
||||||
|
wait_for_dirty_write_log_done(Prev, RemainMs) ->
|
||||||
|
SleepMs = 100,
|
||||||
|
ct:sleep(SleepMs),
|
||||||
|
case mnesia:table_info(emqx_audit, size) of
|
||||||
|
Prev ->
|
||||||
|
ct:sleep(SleepMs * 2),
|
||||||
|
Prev;
|
||||||
|
New ->
|
||||||
|
wait_for_dirty_write_log_done(New, RemainMs - SleepMs)
|
||||||
|
end.
|
||||||
|
|
|
@ -201,7 +201,8 @@ gen_salt(#{name := Other}) when Other =/= plain, Other =/= bcrypt ->
|
||||||
<<X:128/big-unsigned-integer>> = crypto:strong_rand_bytes(16),
|
<<X:128/big-unsigned-integer>> = crypto:strong_rand_bytes(16),
|
||||||
iolist_to_binary(io_lib:format("~32.16.0b", [X])).
|
iolist_to_binary(io_lib:format("~32.16.0b", [X])).
|
||||||
|
|
||||||
-spec hash(algorithm_rw(), emqx_passwd:password()) -> {emqx_passwd:hash(), emqx_passwd:salt()}.
|
-spec hash(algorithm_rw(), emqx_passwd:password()) ->
|
||||||
|
{emqx_passwd:password_hash(), emqx_passwd:salt()}.
|
||||||
hash(#{name := bcrypt, salt_rounds := _} = Algorithm, Password) ->
|
hash(#{name := bcrypt, salt_rounds := _} = Algorithm, Password) ->
|
||||||
Salt0 = gen_salt(Algorithm),
|
Salt0 = gen_salt(Algorithm),
|
||||||
Hash = emqx_passwd:hash({bcrypt, Salt0}, Password),
|
Hash = emqx_passwd:hash({bcrypt, Salt0}, Password),
|
||||||
|
@ -231,7 +232,7 @@ hash(#{name := Other, salt_position := SaltPosition} = Algorithm, Password) ->
|
||||||
-spec check_password(
|
-spec check_password(
|
||||||
algorithm(),
|
algorithm(),
|
||||||
emqx_passwd:salt(),
|
emqx_passwd:salt(),
|
||||||
emqx_passwd:hash(),
|
emqx_passwd:password_hash(),
|
||||||
emqx_passwd:password()
|
emqx_passwd:password()
|
||||||
) -> boolean().
|
) -> boolean().
|
||||||
check_password(#{name := bcrypt}, _Salt, PasswordHash, Password) ->
|
check_password(#{name := bcrypt}, _Salt, PasswordHash, Password) ->
|
||||||
|
|
|
@ -281,12 +281,12 @@ parse_url(Url) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
convert_headers(Headers) ->
|
convert_headers(Headers) ->
|
||||||
maps:merge(default_headers(), transform_header_name(Headers)).
|
transform_header_name(Headers).
|
||||||
|
|
||||||
convert_headers_no_content_type(Headers) ->
|
convert_headers_no_content_type(Headers) ->
|
||||||
maps:without(
|
maps:without(
|
||||||
[<<"content-type">>],
|
[<<"content-type">>],
|
||||||
maps:merge(default_headers_no_content_type(), transform_header_name(Headers))
|
transform_header_name(Headers)
|
||||||
).
|
).
|
||||||
|
|
||||||
default_headers() ->
|
default_headers() ->
|
||||||
|
|
|
@ -31,7 +31,7 @@ introduced_in() ->
|
||||||
"5.0.0".
|
"5.0.0".
|
||||||
|
|
||||||
-spec lookup_from_all_nodes([node()], atom(), binary()) ->
|
-spec lookup_from_all_nodes([node()], atom(), binary()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
lookup_from_all_nodes(Nodes, ChainName, AuthenticatorID) ->
|
lookup_from_all_nodes(Nodes, ChainName, AuthenticatorID) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes, emqx_authn_api, lookup_from_local_node, [ChainName, AuthenticatorID], ?TIMEOUT
|
Nodes, emqx_authn_api, lookup_from_local_node, [ChainName, AuthenticatorID], ?TIMEOUT
|
||||||
|
|
|
@ -16,10 +16,6 @@
|
||||||
|
|
||||||
-module(emqx_authz_rule).
|
-module(emqx_authz_rule).
|
||||||
|
|
||||||
-include_lib("emqx/include/logger.hrl").
|
|
||||||
-include_lib("emqx/include/emqx_placeholder.hrl").
|
|
||||||
-include("emqx_authz.hrl").
|
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
-compile(nowarn_export_all).
|
-compile(nowarn_export_all).
|
||||||
|
@ -29,9 +25,16 @@
|
||||||
-export([
|
-export([
|
||||||
match/4,
|
match/4,
|
||||||
matches/4,
|
matches/4,
|
||||||
compile/1
|
compile/1,
|
||||||
|
compile/4
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-export_type([action/0, action_precompile/0]).
|
||||||
|
|
||||||
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
|
-include("emqx_authz.hrl").
|
||||||
|
|
||||||
-type permission() :: allow | deny.
|
-type permission() :: allow | deny.
|
||||||
|
|
||||||
-type who_condition() ::
|
-type who_condition() ::
|
||||||
|
@ -73,8 +76,24 @@
|
||||||
topic_condition/0
|
topic_condition/0
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-type action_precompile() ::
|
||||||
|
subscribe
|
||||||
|
| publish
|
||||||
|
| {subscribe, list()}
|
||||||
|
| {publish, list()}
|
||||||
|
| all.
|
||||||
|
|
||||||
|
-type topic_filter() :: emqx_types:topic().
|
||||||
|
|
||||||
|
-type rule_precompile() :: {permission(), who_condition(), action_precompile(), [topic_filter()]}.
|
||||||
|
|
||||||
-define(IS_PERMISSION(Permission), (Permission =:= allow orelse Permission =:= deny)).
|
-define(IS_PERMISSION(Permission), (Permission =:= allow orelse Permission =:= deny)).
|
||||||
|
|
||||||
|
-spec compile(permission(), who_condition(), action_precompile(), [topic_filter()]) -> rule().
|
||||||
|
compile(Permission, Who, Action, TopicFilters) ->
|
||||||
|
compile({Permission, Who, Action, TopicFilters}).
|
||||||
|
|
||||||
|
-spec compile({permission(), all} | rule_precompile()) -> rule().
|
||||||
compile({Permission, all}) when
|
compile({Permission, all}) when
|
||||||
?IS_PERMISSION(Permission)
|
?IS_PERMISSION(Permission)
|
||||||
->
|
->
|
||||||
|
|
|
@ -31,6 +31,6 @@ introduced_in() ->
|
||||||
"5.0.0".
|
"5.0.0".
|
||||||
|
|
||||||
-spec lookup_from_all_nodes([node()], atom()) ->
|
-spec lookup_from_all_nodes([node()], atom()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
lookup_from_all_nodes(Nodes, Type) ->
|
lookup_from_all_nodes(Nodes, Type) ->
|
||||||
erpc:multicall(Nodes, emqx_authz_api_sources, lookup_from_local_node, [Type], ?TIMEOUT).
|
erpc:multicall(Nodes, emqx_authz_api_sources, lookup_from_local_node, [Type], ?TIMEOUT).
|
||||||
|
|
|
@ -52,7 +52,7 @@ authenticate(
|
||||||
{ok, #{result := ok}} ->
|
{ok, #{result := ok}} ->
|
||||||
{ok, #{is_superuser => false}};
|
{ok, #{is_superuser => false}};
|
||||||
{ok, #{result := 'invalidCredentials'}} ->
|
{ok, #{result := 'invalidCredentials'}} ->
|
||||||
?TRACE_AUTHN_PROVIDER(error, "ldap_bind_failed", #{
|
?TRACE_AUTHN_PROVIDER(info, "ldap_bind_failed", #{
|
||||||
resource => ResourceId,
|
resource => ResourceId,
|
||||||
reason => 'invalidCredentials'
|
reason => 'invalidCredentials'
|
||||||
}),
|
}),
|
||||||
|
|
|
@ -55,7 +55,7 @@ fields(ldap) ->
|
||||||
[
|
[
|
||||||
{method,
|
{method,
|
||||||
?HOCON(
|
?HOCON(
|
||||||
hoconsc:union([?R_REF(hash_method), ?R_REF(bind_method)]),
|
hoconsc:union(fun method_union_member_selector/1),
|
||||||
#{desc => ?DESC(method)}
|
#{desc => ?DESC(method)}
|
||||||
)}
|
)}
|
||||||
];
|
];
|
||||||
|
@ -88,6 +88,26 @@ desc(bind_method) ->
|
||||||
desc(_) ->
|
desc(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
|
method_union_member_selector(all_union_members) ->
|
||||||
|
[?R_REF(hash_method), ?R_REF(bind_method)];
|
||||||
|
method_union_member_selector({value, Val}) ->
|
||||||
|
Val2 =
|
||||||
|
case is_map(Val) of
|
||||||
|
true -> emqx_utils_maps:binary_key_map(Val);
|
||||||
|
false -> Val
|
||||||
|
end,
|
||||||
|
case Val2 of
|
||||||
|
#{<<"type">> := <<"bind">>} ->
|
||||||
|
[?R_REF(bind_method)];
|
||||||
|
#{<<"type">> := <<"hash">>} ->
|
||||||
|
[?R_REF(hash_method)];
|
||||||
|
_ ->
|
||||||
|
throw(#{
|
||||||
|
field_name => method,
|
||||||
|
expected => [bind_method, hash_method]
|
||||||
|
})
|
||||||
|
end.
|
||||||
|
|
||||||
method_type(Type) ->
|
method_type(Type) ->
|
||||||
?HOCON(?ENUM([Type]), #{desc => ?DESC(?FUNCTION_NAME), default => Type}).
|
?HOCON(?ENUM([Type]), #{desc => ?DESC(?FUNCTION_NAME), default => Type}).
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,15 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_mnesia, [
|
{application, emqx_auth_mnesia, [
|
||||||
{description, "EMQX Buitl-in Database Authentication and Authorization"},
|
{description, "EMQX Buitl-in Database Authentication and Authorization"},
|
||||||
{vsn, "0.1.1"},
|
{vsn, "0.1.2"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_mnesia_app, []}},
|
{mod, {emqx_auth_mnesia_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
kernel,
|
kernel,
|
||||||
stdlib,
|
stdlib,
|
||||||
emqx,
|
emqx,
|
||||||
emqx_auth
|
emqx_auth,
|
||||||
|
esasl
|
||||||
]},
|
]},
|
||||||
{env, []},
|
{env, []},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
|
|
|
@ -32,7 +32,9 @@
|
||||||
-type clientid() :: {clientid, binary()}.
|
-type clientid() :: {clientid, binary()}.
|
||||||
-type who() :: username() | clientid() | all.
|
-type who() :: username() | clientid() | all.
|
||||||
|
|
||||||
-type rule() :: {emqx_authz_rule:permission(), emqx_authz_rule:action(), emqx_types:topic()}.
|
-type rule() :: {
|
||||||
|
emqx_authz_rule:permission(), emqx_authz_rule:action_precompile(), emqx_types:topic()
|
||||||
|
}.
|
||||||
-type rules() :: [rule()].
|
-type rules() :: [rule()].
|
||||||
|
|
||||||
-record(emqx_acl, {
|
-record(emqx_acl, {
|
||||||
|
@ -223,7 +225,7 @@ do_get_rules(Key) ->
|
||||||
do_authorize(_Client, _PubSub, _Topic, []) ->
|
do_authorize(_Client, _PubSub, _Topic, []) ->
|
||||||
nomatch;
|
nomatch;
|
||||||
do_authorize(Client, PubSub, Topic, [{Permission, Action, TopicFilter} | Tail]) ->
|
do_authorize(Client, PubSub, Topic, [{Permission, Action, TopicFilter} | Tail]) ->
|
||||||
Rule = emqx_authz_rule:compile({Permission, all, Action, [TopicFilter]}),
|
Rule = emqx_authz_rule:compile(Permission, all, Action, [TopicFilter]),
|
||||||
case emqx_authz_rule:match(Client, PubSub, Topic, Rule) of
|
case emqx_authz_rule:match(Client, PubSub, Topic, Rule) of
|
||||||
{matched, Permission} -> {matched, Permission};
|
{matched, Permission} -> {matched, Permission};
|
||||||
nomatch -> do_authorize(Client, PubSub, Topic, Tail)
|
nomatch -> do_authorize(Client, PubSub, Topic, Tail)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_auth_mysql, [
|
{application, emqx_auth_mysql, [
|
||||||
{description, "EMQX MySQL Authentication and Authorization"},
|
{description, "EMQX MySQL Authentication and Authorization"},
|
||||||
{vsn, "0.1.1"},
|
{vsn, "0.1.2"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{mod, {emqx_auth_mysql_app, []}},
|
{mod, {emqx_auth_mysql_app, []}},
|
||||||
{applications, [
|
{applications, [
|
||||||
|
|
|
@ -55,8 +55,7 @@ fields(mysql) ->
|
||||||
{password_hash_algorithm, fun emqx_authn_password_hashing:type_ro/1},
|
{password_hash_algorithm, fun emqx_authn_password_hashing:type_ro/1},
|
||||||
{query, fun query/1},
|
{query, fun query/1},
|
||||||
{query_timeout, fun query_timeout/1}
|
{query_timeout, fun query_timeout/1}
|
||||||
] ++ emqx_authn_schema:common_fields() ++
|
] ++ emqx_authn_schema:common_fields() ++ emqx_mysql:fields(config).
|
||||||
proplists:delete(prepare_statement, emqx_mysql:fields(config)).
|
|
||||||
|
|
||||||
desc(mysql) ->
|
desc(mysql) ->
|
||||||
?DESC(mysql);
|
?DESC(mysql);
|
||||||
|
|
|
@ -37,6 +37,7 @@ type() -> ?AUTHZ_TYPE.
|
||||||
fields(mysql) ->
|
fields(mysql) ->
|
||||||
emqx_authz_schema:authz_common_fields(?AUTHZ_TYPE) ++
|
emqx_authz_schema:authz_common_fields(?AUTHZ_TYPE) ++
|
||||||
emqx_mysql:fields(config) ++
|
emqx_mysql:fields(config) ++
|
||||||
|
emqx_connector_schema_lib:prepare_statement_fields() ++
|
||||||
[{query, query()}].
|
[{query, query()}].
|
||||||
|
|
||||||
desc(mysql) ->
|
desc(mysql) ->
|
||||||
|
|
|
@ -26,8 +26,8 @@
|
||||||
bridge_v1_type_to_action_type/1,
|
bridge_v1_type_to_action_type/1,
|
||||||
is_action_type/1,
|
is_action_type/1,
|
||||||
registered_schema_modules/0,
|
registered_schema_modules/0,
|
||||||
|
connector_action_config_to_bridge_v1_config/2,
|
||||||
connector_action_config_to_bridge_v1_config/3,
|
connector_action_config_to_bridge_v1_config/3,
|
||||||
has_custom_connector_action_config_to_bridge_v1_config/1,
|
|
||||||
bridge_v1_config_to_connector_config/2,
|
bridge_v1_config_to_connector_config/2,
|
||||||
has_custom_bridge_v1_config_to_connector_config/1,
|
has_custom_bridge_v1_config_to_connector_config/1,
|
||||||
bridge_v1_config_to_action_config/3,
|
bridge_v1_config_to_action_config/3,
|
||||||
|
@ -79,6 +79,7 @@ hard_coded_action_info_modules_ee() ->
|
||||||
emqx_bridge_kafka_action_info,
|
emqx_bridge_kafka_action_info,
|
||||||
emqx_bridge_matrix_action_info,
|
emqx_bridge_matrix_action_info,
|
||||||
emqx_bridge_mongodb_action_info,
|
emqx_bridge_mongodb_action_info,
|
||||||
|
emqx_bridge_mysql_action_info,
|
||||||
emqx_bridge_pgsql_action_info,
|
emqx_bridge_pgsql_action_info,
|
||||||
emqx_bridge_syskeeper_action_info,
|
emqx_bridge_syskeeper_action_info,
|
||||||
emqx_bridge_timescale_action_info,
|
emqx_bridge_timescale_action_info,
|
||||||
|
@ -161,14 +162,24 @@ registered_schema_modules() ->
|
||||||
Schemas = maps:get(action_type_to_schema_module, InfoMap),
|
Schemas = maps:get(action_type_to_schema_module, InfoMap),
|
||||||
maps:to_list(Schemas).
|
maps:to_list(Schemas).
|
||||||
|
|
||||||
has_custom_connector_action_config_to_bridge_v1_config(ActionOrBridgeType) ->
|
|
||||||
Module = get_action_info_module(ActionOrBridgeType),
|
|
||||||
erlang:function_exported(Module, connector_action_config_to_bridge_v1_config, 2).
|
|
||||||
|
|
||||||
connector_action_config_to_bridge_v1_config(ActionOrBridgeType, ConnectorConfig, ActionConfig) ->
|
connector_action_config_to_bridge_v1_config(ActionOrBridgeType, ConnectorConfig, ActionConfig) ->
|
||||||
Module = get_action_info_module(ActionOrBridgeType),
|
Module = get_action_info_module(ActionOrBridgeType),
|
||||||
%% should only be called if defined
|
case erlang:function_exported(Module, connector_action_config_to_bridge_v1_config, 2) of
|
||||||
Module:connector_action_config_to_bridge_v1_config(ConnectorConfig, ActionConfig).
|
true ->
|
||||||
|
Module:connector_action_config_to_bridge_v1_config(ConnectorConfig, ActionConfig);
|
||||||
|
false ->
|
||||||
|
connector_action_config_to_bridge_v1_config(ConnectorConfig, ActionConfig)
|
||||||
|
end.
|
||||||
|
|
||||||
|
connector_action_config_to_bridge_v1_config(ConnectorConfig, ActionConfig) ->
|
||||||
|
Merged = emqx_utils_maps:deep_merge(
|
||||||
|
maps:without(
|
||||||
|
[<<"connector">>],
|
||||||
|
emqx_utils_maps:unindent(<<"parameters">>, ActionConfig)
|
||||||
|
),
|
||||||
|
emqx_utils_maps:unindent(<<"parameters">>, ConnectorConfig)
|
||||||
|
),
|
||||||
|
maps:without([<<"description">>], Merged).
|
||||||
|
|
||||||
has_custom_bridge_v1_config_to_connector_config(ActionOrBridgeType) ->
|
has_custom_bridge_v1_config_to_connector_config(ActionOrBridgeType) ->
|
||||||
Module = get_action_info_module(ActionOrBridgeType),
|
Module = get_action_info_module(ActionOrBridgeType),
|
||||||
|
|
|
@ -913,7 +913,7 @@ format_resource(
|
||||||
redact(
|
redact(
|
||||||
maps:merge(
|
maps:merge(
|
||||||
RawConfFull#{
|
RawConfFull#{
|
||||||
type => downgrade_type(Type, RawConf),
|
type => downgrade_type(Type, emqx_bridge_lib:get_conf(Type, BridgeName)),
|
||||||
name => maps:get(<<"name">>, RawConf, BridgeName),
|
name => maps:get(<<"name">>, RawConf, BridgeName),
|
||||||
node => Node
|
node => Node
|
||||||
},
|
},
|
||||||
|
|
|
@ -18,7 +18,8 @@
|
||||||
-export([
|
-export([
|
||||||
maybe_withdraw_rule_action/3,
|
maybe_withdraw_rule_action/3,
|
||||||
upgrade_type/1,
|
upgrade_type/1,
|
||||||
downgrade_type/2
|
downgrade_type/2,
|
||||||
|
get_conf/2
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% @doc A bridge can be used as a rule action.
|
%% @doc A bridge can be used as a rule action.
|
||||||
|
|
|
@ -1112,40 +1112,25 @@ bridge_v1_lookup_and_transform(ActionType, Name) ->
|
||||||
not_bridge_v1_compatible_error() ->
|
not_bridge_v1_compatible_error() ->
|
||||||
{error, not_bridge_v1_compatible}.
|
{error, not_bridge_v1_compatible}.
|
||||||
|
|
||||||
|
connector_raw_config(Connector, ConnectorType) ->
|
||||||
|
get_raw_with_defaults(Connector, ConnectorType, <<"connectors">>, emqx_connector_schema).
|
||||||
|
|
||||||
|
action_raw_config(Action, ActionType) ->
|
||||||
|
get_raw_with_defaults(Action, ActionType, <<"actions">>, emqx_bridge_v2_schema).
|
||||||
|
|
||||||
|
get_raw_with_defaults(Config, Type, TopLevelConf, SchemaModule) ->
|
||||||
|
RawConfig = maps:get(raw_config, Config),
|
||||||
|
fill_defaults(Type, RawConfig, TopLevelConf, SchemaModule).
|
||||||
|
|
||||||
bridge_v1_lookup_and_transform_helper(
|
bridge_v1_lookup_and_transform_helper(
|
||||||
BridgeV1Type, BridgeName, ActionType, Action, ConnectorType, Connector
|
BridgeV1Type, BridgeName, ActionType, Action, ConnectorType, Connector
|
||||||
) ->
|
) ->
|
||||||
ConnectorRawConfig1 = maps:get(raw_config, Connector),
|
ConnectorRawConfig = connector_raw_config(Connector, ConnectorType),
|
||||||
ConnectorRawConfig2 = fill_defaults(
|
ActionRawConfig = action_raw_config(Action, ActionType),
|
||||||
ConnectorType,
|
BridgeV1Config = emqx_action_info:connector_action_config_to_bridge_v1_config(
|
||||||
ConnectorRawConfig1,
|
BridgeV1Type, ConnectorRawConfig, ActionRawConfig
|
||||||
<<"connectors">>,
|
|
||||||
emqx_connector_schema
|
|
||||||
),
|
),
|
||||||
ActionRawConfig1 = maps:get(raw_config, Action),
|
BridgeV1Tmp = maps:put(raw_config, BridgeV1Config, Action),
|
||||||
ActionRawConfig2 = fill_defaults(
|
|
||||||
ActionType,
|
|
||||||
ActionRawConfig1,
|
|
||||||
<<"actions">>,
|
|
||||||
emqx_bridge_v2_schema
|
|
||||||
),
|
|
||||||
BridgeV1ConfigFinal =
|
|
||||||
case
|
|
||||||
emqx_action_info:has_custom_connector_action_config_to_bridge_v1_config(BridgeV1Type)
|
|
||||||
of
|
|
||||||
false ->
|
|
||||||
BridgeV1Config1 = maps:remove(<<"connector">>, ActionRawConfig2),
|
|
||||||
%% Move parameters to the top level
|
|
||||||
ParametersMap = maps:get(<<"parameters">>, BridgeV1Config1, #{}),
|
|
||||||
BridgeV1Config2 = maps:remove(<<"parameters">>, BridgeV1Config1),
|
|
||||||
BridgeV1Config3 = emqx_utils_maps:deep_merge(BridgeV1Config2, ParametersMap),
|
|
||||||
emqx_utils_maps:deep_merge(ConnectorRawConfig2, BridgeV1Config3);
|
|
||||||
true ->
|
|
||||||
emqx_action_info:connector_action_config_to_bridge_v1_config(
|
|
||||||
BridgeV1Type, ConnectorRawConfig2, ActionRawConfig2
|
|
||||||
)
|
|
||||||
end,
|
|
||||||
BridgeV1Tmp = maps:put(raw_config, BridgeV1ConfigFinal, Action),
|
|
||||||
BridgeV1 = maps:remove(status, BridgeV1Tmp),
|
BridgeV1 = maps:remove(status, BridgeV1Tmp),
|
||||||
BridgeV2Status = maps:get(status, Action, undefined),
|
BridgeV2Status = maps:get(status, Action, undefined),
|
||||||
BridgeV2Error = maps:get(error, Action, undefined),
|
BridgeV2Error = maps:get(error, Action, undefined),
|
||||||
|
|
|
@ -69,7 +69,7 @@ stop_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -80,7 +80,7 @@ restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -91,7 +91,7 @@ stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
|
|
@ -82,7 +82,7 @@ stop_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -93,7 +93,7 @@ restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -104,7 +104,7 @@ start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -115,7 +115,7 @@ stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
|
|
@ -88,7 +88,7 @@ stop_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -99,7 +99,7 @@ restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -110,7 +110,7 @@ start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -121,7 +121,7 @@ stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
|
|
@ -80,7 +80,7 @@ stop_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -91,7 +91,7 @@ restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -102,7 +102,7 @@ start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -113,7 +113,7 @@ stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
|
|
@ -86,7 +86,7 @@ stop_bridge_to_node(Node, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec restart_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -97,7 +97,7 @@ restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec start_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -108,7 +108,7 @@ start_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
-spec stop_bridges_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -119,7 +119,7 @@ stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -147,7 +147,7 @@ v2_list_bridges_on_nodes(Nodes) ->
|
||||||
erpc:multicall(Nodes, emqx_bridge_v2, list, [], ?TIMEOUT).
|
erpc:multicall(Nodes, emqx_bridge_v2, list, [], ?TIMEOUT).
|
||||||
|
|
||||||
-spec v2_lookup_from_all_nodes([node()], key(), key()) ->
|
-spec v2_lookup_from_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
v2_lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
v2_lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -158,7 +158,7 @@ v2_lookup_from_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec v2_get_metrics_from_all_nodes([node()], key(), key()) ->
|
-spec v2_get_metrics_from_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
v2_get_metrics_from_all_nodes(Nodes, ActionType, ActionName) ->
|
v2_get_metrics_from_all_nodes(Nodes, ActionType, ActionName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -169,7 +169,7 @@ v2_get_metrics_from_all_nodes(Nodes, ActionType, ActionName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec v2_start_bridge_to_all_nodes([node()], key(), key()) ->
|
-spec v2_start_bridge_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(ok).
|
||||||
v2_start_bridge_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
v2_start_bridge_to_all_nodes(Nodes, BridgeType, BridgeName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
|
|
@ -124,7 +124,10 @@ common_bridge_fields() ->
|
||||||
desc => ?DESC("desc_enable"),
|
desc => ?DESC("desc_enable"),
|
||||||
default => true
|
default => true
|
||||||
}
|
}
|
||||||
)}
|
)},
|
||||||
|
%% Create v2 connector then usr v1 /bridges_probe api to test connector
|
||||||
|
%% /bridges_probe should pass through v2 connector's description.
|
||||||
|
{description, emqx_schema:description_schema()}
|
||||||
].
|
].
|
||||||
|
|
||||||
status_fields() ->
|
status_fields() ->
|
||||||
|
|
|
@ -31,7 +31,8 @@
|
||||||
get_response/0,
|
get_response/0,
|
||||||
put_request/0,
|
put_request/0,
|
||||||
post_request/0,
|
post_request/0,
|
||||||
examples/1
|
examples/1,
|
||||||
|
action_values/4
|
||||||
]).
|
]).
|
||||||
|
|
||||||
%% Exported for mocking
|
%% Exported for mocking
|
||||||
|
@ -47,7 +48,8 @@
|
||||||
-export([
|
-export([
|
||||||
make_producer_action_schema/1,
|
make_producer_action_schema/1,
|
||||||
make_consumer_action_schema/1,
|
make_consumer_action_schema/1,
|
||||||
top_level_common_action_keys/0
|
top_level_common_action_keys/0,
|
||||||
|
project_to_actions_resource_opts/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export_type([action_type/0]).
|
-export_type([action_type/0]).
|
||||||
|
@ -103,6 +105,54 @@ bridge_api_union(Refs) ->
|
||||||
end
|
end
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
-type http_method() :: get | post | put.
|
||||||
|
-type schema_example_map() :: #{atom() => term()}.
|
||||||
|
|
||||||
|
-spec action_values(http_method(), atom(), atom(), schema_example_map()) -> schema_example_map().
|
||||||
|
action_values(Method, ActionType, ConnectorType, ActionValues) ->
|
||||||
|
ActionTypeBin = atom_to_binary(ActionType),
|
||||||
|
ConnectorTypeBin = atom_to_binary(ConnectorType),
|
||||||
|
lists:foldl(
|
||||||
|
fun(M1, M2) ->
|
||||||
|
maps:merge(M1, M2)
|
||||||
|
end,
|
||||||
|
#{
|
||||||
|
enable => true,
|
||||||
|
description => <<"My example ", ActionTypeBin/binary, " action">>,
|
||||||
|
connector => <<ConnectorTypeBin/binary, "_connector">>,
|
||||||
|
resource_opts => #{
|
||||||
|
health_check_interval => "30s"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[
|
||||||
|
ActionValues,
|
||||||
|
method_values(Method, ActionType)
|
||||||
|
]
|
||||||
|
).
|
||||||
|
|
||||||
|
-spec method_values(http_method(), atom()) -> schema_example_map().
|
||||||
|
method_values(post, Type) ->
|
||||||
|
TypeBin = atom_to_binary(Type),
|
||||||
|
#{
|
||||||
|
name => <<TypeBin/binary, "_action">>,
|
||||||
|
type => TypeBin
|
||||||
|
};
|
||||||
|
method_values(get, Type) ->
|
||||||
|
maps:merge(
|
||||||
|
method_values(post, Type),
|
||||||
|
#{
|
||||||
|
status => <<"connected">>,
|
||||||
|
node_status => [
|
||||||
|
#{
|
||||||
|
node => <<"emqx@localhost">>,
|
||||||
|
status => <<"connected">>
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
);
|
||||||
|
method_values(put, _Type) ->
|
||||||
|
#{}.
|
||||||
|
|
||||||
%%======================================================================================
|
%%======================================================================================
|
||||||
%% HOCON Schema Callbacks
|
%% HOCON Schema Callbacks
|
||||||
%%======================================================================================
|
%%======================================================================================
|
||||||
|
@ -128,7 +178,7 @@ roots() ->
|
||||||
fields(actions) ->
|
fields(actions) ->
|
||||||
registered_schema_fields();
|
registered_schema_fields();
|
||||||
fields(resource_opts) ->
|
fields(resource_opts) ->
|
||||||
emqx_resource_schema:create_opts(_Overrides = []).
|
resource_opts_fields(_Overrides = []).
|
||||||
|
|
||||||
registered_schema_fields() ->
|
registered_schema_fields() ->
|
||||||
[
|
[
|
||||||
|
@ -154,8 +204,8 @@ types_sc() ->
|
||||||
resource_opts_fields() ->
|
resource_opts_fields() ->
|
||||||
resource_opts_fields(_Overrides = []).
|
resource_opts_fields(_Overrides = []).
|
||||||
|
|
||||||
resource_opts_fields(Overrides) ->
|
common_resource_opts_subfields() ->
|
||||||
ActionROFields = [
|
[
|
||||||
batch_size,
|
batch_size,
|
||||||
batch_time,
|
batch_time,
|
||||||
buffer_mode,
|
buffer_mode,
|
||||||
|
@ -167,10 +217,14 @@ resource_opts_fields(Overrides) ->
|
||||||
query_mode,
|
query_mode,
|
||||||
request_ttl,
|
request_ttl,
|
||||||
resume_interval,
|
resume_interval,
|
||||||
start_after_created,
|
|
||||||
start_timeout,
|
|
||||||
worker_pool_size
|
worker_pool_size
|
||||||
],
|
].
|
||||||
|
|
||||||
|
common_resource_opts_subfields_bin() ->
|
||||||
|
lists:map(fun atom_to_binary/1, common_resource_opts_subfields()).
|
||||||
|
|
||||||
|
resource_opts_fields(Overrides) ->
|
||||||
|
ActionROFields = common_resource_opts_subfields(),
|
||||||
lists:filter(
|
lists:filter(
|
||||||
fun({Key, _Sc}) -> lists:member(Key, ActionROFields) end,
|
fun({Key, _Sc}) -> lists:member(Key, ActionROFields) end,
|
||||||
emqx_resource_schema:create_opts(Overrides)
|
emqx_resource_schema:create_opts(Overrides)
|
||||||
|
@ -225,6 +279,10 @@ make_consumer_action_schema(ActionParametersRef) ->
|
||||||
})}
|
})}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
project_to_actions_resource_opts(OldResourceOpts) ->
|
||||||
|
Subfields = common_resource_opts_subfields_bin(),
|
||||||
|
maps:with(Subfields, OldResourceOpts).
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-include_lib("hocon/include/hocon_types.hrl").
|
-include_lib("hocon/include/hocon_types.hrl").
|
||||||
schema_homogeneous_test() ->
|
schema_homogeneous_test() ->
|
||||||
|
|
|
@ -1047,6 +1047,13 @@ t_bridges_probe(Config) ->
|
||||||
?HTTP_BRIDGE(URL),
|
?HTTP_BRIDGE(URL),
|
||||||
Config
|
Config
|
||||||
),
|
),
|
||||||
|
%% with descriptions is ok.
|
||||||
|
{ok, 204, <<>>} = request(
|
||||||
|
post,
|
||||||
|
uri(["bridges_probe"]),
|
||||||
|
(?HTTP_BRIDGE(URL))#{<<"description">> => <<"Test Description">>},
|
||||||
|
Config
|
||||||
|
),
|
||||||
|
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, 400, #{
|
{ok, 400, #{
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
-import(emqx_common_test_helpers, [on_exit/1]).
|
-import(emqx_common_test_helpers, [on_exit/1]).
|
||||||
|
|
||||||
|
@ -343,7 +344,7 @@ t_send_message_through_rule(_) ->
|
||||||
BridgeName = my_test_bridge,
|
BridgeName = my_test_bridge,
|
||||||
{ok, _} = emqx_bridge_v2:create(bridge_type(), BridgeName, bridge_config()),
|
{ok, _} = emqx_bridge_v2:create(bridge_type(), BridgeName, bridge_config()),
|
||||||
%% Create a rule to send message to the bridge
|
%% Create a rule to send message to the bridge
|
||||||
{ok, _} = emqx_rule_engine:create_rule(
|
{ok, #{id := RuleId}} = emqx_rule_engine:create_rule(
|
||||||
#{
|
#{
|
||||||
sql => <<"select * from \"t/a\"">>,
|
sql => <<"select * from \"t/a\"">>,
|
||||||
id => atom_to_binary(?FUNCTION_NAME),
|
id => atom_to_binary(?FUNCTION_NAME),
|
||||||
|
@ -357,6 +358,7 @@ t_send_message_through_rule(_) ->
|
||||||
description => <<"bridge_v2 test rule">>
|
description => <<"bridge_v2 test rule">>
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
on_exit(fun() -> emqx_rule_engine:delete_rule(RuleId) end),
|
||||||
%% Register name for this process
|
%% Register name for this process
|
||||||
register(registered_process_name(), self()),
|
register(registered_process_name(), self()),
|
||||||
%% Send message to the topic
|
%% Send message to the topic
|
||||||
|
@ -371,7 +373,6 @@ t_send_message_through_rule(_) ->
|
||||||
ct:fail("Failed to receive message")
|
ct:fail("Failed to receive message")
|
||||||
end,
|
end,
|
||||||
unregister(registered_process_name()),
|
unregister(registered_process_name()),
|
||||||
ok = emqx_rule_engine:delete_rule(atom_to_binary(?FUNCTION_NAME)),
|
|
||||||
ok = emqx_bridge_v2:remove(bridge_type(), BridgeName),
|
ok = emqx_bridge_v2:remove(bridge_type(), BridgeName),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
@ -894,6 +895,159 @@ t_lookup_status_when_connecting(_Config) ->
|
||||||
?assertMatch(#{status := ?status_disconnected}, ChannelData),
|
?assertMatch(#{status := ?status_disconnected}, ChannelData),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_rule_pointing_to_non_operational_channel(_Config) ->
|
||||||
|
%% Check that, if a rule sends a message to an action that is not yet installed and
|
||||||
|
%% uses `simple_async_internal_buffer', then it eventually increments the rule's
|
||||||
|
%% failed counter.
|
||||||
|
ResponseETS = ets:new(response_ets, [public]),
|
||||||
|
ets:insert(ResponseETS, {on_get_status_value, ?status_connecting}),
|
||||||
|
OnGetStatusFun = wrap_fun(fun() ->
|
||||||
|
ets:lookup_element(ResponseETS, on_get_status_value, 2)
|
||||||
|
end),
|
||||||
|
|
||||||
|
ConnectorConfig = emqx_utils_maps:deep_merge(con_config(), #{
|
||||||
|
<<"on_get_status_fun">> => OnGetStatusFun,
|
||||||
|
<<"resource_opts">> => #{<<"start_timeout">> => 100}
|
||||||
|
}),
|
||||||
|
ConnectorName = ?FUNCTION_NAME,
|
||||||
|
ct:pal("connector config:\n ~p", [ConnectorConfig]),
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
{ok, _} = emqx_connector:create(con_type(), ConnectorName, ConnectorConfig),
|
||||||
|
|
||||||
|
ActionName = my_test_action,
|
||||||
|
ChanStatusFun = wrap_fun(fun() -> ?status_disconnected end),
|
||||||
|
ActionConfig = (bridge_config())#{
|
||||||
|
<<"on_get_channel_status_fun">> => ChanStatusFun,
|
||||||
|
<<"connector">> => atom_to_binary(ConnectorName)
|
||||||
|
},
|
||||||
|
ct:pal("action config:\n ~p", [ActionConfig]),
|
||||||
|
|
||||||
|
meck:new(con_mod(), [passthrough, no_history, non_strict]),
|
||||||
|
on_exit(fun() -> catch meck:unload([con_mod()]) end),
|
||||||
|
meck:expect(con_mod(), query_mode, 1, simple_async_internal_buffer),
|
||||||
|
meck:expect(con_mod(), callback_mode, 0, async_if_possible),
|
||||||
|
|
||||||
|
{ok, _} = emqx_bridge_v2:create(bridge_type(), ActionName, ActionConfig),
|
||||||
|
|
||||||
|
?assertMatch(
|
||||||
|
{ok, #{
|
||||||
|
error := <<"Not installed">>,
|
||||||
|
status := ?status_connecting,
|
||||||
|
resource_data := #{status := ?status_connecting}
|
||||||
|
}},
|
||||||
|
emqx_bridge_v2:lookup(bridge_type(), ActionName)
|
||||||
|
),
|
||||||
|
|
||||||
|
{ok, #{id := RuleId}} = emqx_rule_engine:create_rule(
|
||||||
|
#{
|
||||||
|
sql => <<"select * from \"t/a\"">>,
|
||||||
|
id => atom_to_binary(?FUNCTION_NAME),
|
||||||
|
actions => [
|
||||||
|
<<
|
||||||
|
(atom_to_binary(bridge_type()))/binary,
|
||||||
|
":",
|
||||||
|
(atom_to_binary(ActionName))/binary
|
||||||
|
>>
|
||||||
|
]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
on_exit(fun() -> emqx_rule_engine:delete_rule(RuleId) end),
|
||||||
|
|
||||||
|
Msg = emqx_message:make(<<"t/a">>, <<"payload">>),
|
||||||
|
emqx:publish(Msg),
|
||||||
|
|
||||||
|
ActionId = emqx_bridge_v2:id(bridge_type(), ActionName, ConnectorName),
|
||||||
|
?assertEqual(1, emqx_resource_metrics:matched_get(ActionId)),
|
||||||
|
?assertEqual(1, emqx_resource_metrics:failed_get(ActionId)),
|
||||||
|
?retry(
|
||||||
|
_Sleep0 = 100,
|
||||||
|
_Attempts = 20,
|
||||||
|
?assertMatch(
|
||||||
|
#{
|
||||||
|
counters :=
|
||||||
|
#{
|
||||||
|
matched := 1,
|
||||||
|
'actions.failed' := 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
emqx_metrics_worker:get_metrics(rule_metrics, RuleId)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
[]
|
||||||
|
),
|
||||||
|
|
||||||
|
ok.
|
||||||
|
|
||||||
|
t_query_uses_action_query_mode(_Config) ->
|
||||||
|
%% Check that we compute the query mode from the action and not from the connector
|
||||||
|
%% when querying the resource.
|
||||||
|
|
||||||
|
%% Set one query mode for the connector...
|
||||||
|
meck:new(con_mod(), [passthrough, no_history, non_strict]),
|
||||||
|
on_exit(fun() -> catch meck:unload([con_mod()]) end),
|
||||||
|
meck:expect(con_mod(), query_mode, 1, sync),
|
||||||
|
meck:expect(con_mod(), callback_mode, 0, always_sync),
|
||||||
|
|
||||||
|
ConnectorConfig = emqx_utils_maps:deep_merge(con_config(), #{
|
||||||
|
<<"resource_opts">> => #{<<"start_timeout">> => 100}
|
||||||
|
}),
|
||||||
|
ConnectorName = ?FUNCTION_NAME,
|
||||||
|
ct:pal("connector config:\n ~p", [ConnectorConfig]),
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
{ok, _} = emqx_connector:create(con_type(), ConnectorName, ConnectorConfig),
|
||||||
|
|
||||||
|
ActionName = my_test_action,
|
||||||
|
ActionConfig = (bridge_config())#{
|
||||||
|
<<"connector">> => atom_to_binary(ConnectorName)
|
||||||
|
},
|
||||||
|
ct:pal("action config:\n ~p", [ActionConfig]),
|
||||||
|
|
||||||
|
%% ... now we use a quite different query mode for the action
|
||||||
|
meck:expect(con_mod(), query_mode, 1, simple_async_internal_buffer),
|
||||||
|
meck:expect(con_mod(), callback_mode, 0, async_if_possible),
|
||||||
|
|
||||||
|
{ok, _} = emqx_bridge_v2:create(bridge_type(), ActionName, ActionConfig),
|
||||||
|
|
||||||
|
{ok, #{id := RuleId}} = emqx_rule_engine:create_rule(
|
||||||
|
#{
|
||||||
|
sql => <<"select * from \"t/a\"">>,
|
||||||
|
id => atom_to_binary(?FUNCTION_NAME),
|
||||||
|
actions => [
|
||||||
|
<<
|
||||||
|
(atom_to_binary(bridge_type()))/binary,
|
||||||
|
":",
|
||||||
|
(atom_to_binary(ActionName))/binary
|
||||||
|
>>
|
||||||
|
]
|
||||||
|
}
|
||||||
|
),
|
||||||
|
on_exit(fun() -> emqx_rule_engine:delete_rule(RuleId) end),
|
||||||
|
|
||||||
|
Msg = emqx_message:make(<<"t/a">>, <<"payload">>),
|
||||||
|
{_, {ok, _}} =
|
||||||
|
?wait_async_action(
|
||||||
|
emqx:publish(Msg),
|
||||||
|
#{?snk_kind := call_query},
|
||||||
|
2_000
|
||||||
|
),
|
||||||
|
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
fun(Trace) ->
|
||||||
|
?assertMatch(
|
||||||
|
[#{query_mode := simple_async_internal_buffer}],
|
||||||
|
?of_kind(simple_query_override, Trace)
|
||||||
|
),
|
||||||
|
ok
|
||||||
|
end
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
||||||
%% Helper Functions
|
%% Helper Functions
|
||||||
|
|
||||||
wait_until(Fun) ->
|
wait_until(Fun) ->
|
||||||
|
|
|
@ -69,7 +69,6 @@ connector_resource_opts_test() ->
|
||||||
%% These are used by `emqx_resource_manager' itself to manage the resource lifecycle.
|
%% These are used by `emqx_resource_manager' itself to manage the resource lifecycle.
|
||||||
MinimumROFields = [
|
MinimumROFields = [
|
||||||
health_check_interval,
|
health_check_interval,
|
||||||
query_mode,
|
|
||||||
start_after_created,
|
start_after_created,
|
||||||
start_timeout
|
start_timeout
|
||||||
],
|
],
|
||||||
|
|
|
@ -262,7 +262,6 @@ t_write_failure(Config) ->
|
||||||
ProxyPort = ?config(proxy_port, Config),
|
ProxyPort = ?config(proxy_port, Config),
|
||||||
ProxyHost = ?config(proxy_host, Config),
|
ProxyHost = ?config(proxy_host, Config),
|
||||||
QueryMode = ?config(query_mode, Config),
|
QueryMode = ?config(query_mode, Config),
|
||||||
EnableBatch = ?config(enable_batch, Config),
|
|
||||||
Data = rand_data(),
|
Data = rand_data(),
|
||||||
{{ok, _}, {ok, _}} =
|
{{ok, _}, {ok, _}} =
|
||||||
?wait_async_action(
|
?wait_async_action(
|
||||||
|
|
|
@ -67,15 +67,25 @@ connector_action_config_to_bridge_v1_config(ConnectorConfig, ActionConfig) ->
|
||||||
}.
|
}.
|
||||||
|
|
||||||
bridge_v1_config_to_connector_config(BridgeV1Conf) ->
|
bridge_v1_config_to_connector_config(BridgeV1Conf) ->
|
||||||
%% To statisfy the emqx_bridge_api_SUITE:t_http_crud_apis/1
|
%% To satisfy the emqx_bridge_api_SUITE:t_http_crud_apis/1
|
||||||
ok = validate_webhook_url(maps:get(<<"url">>, BridgeV1Conf, undefined)),
|
ok = validate_webhook_url(maps:get(<<"url">>, BridgeV1Conf, undefined)),
|
||||||
maps:without(?REMOVED_KEYS ++ ?ACTION_KEYS ++ ?PARAMETER_KEYS, BridgeV1Conf).
|
ConnectorConfig0 = maps:without(?REMOVED_KEYS ++ ?ACTION_KEYS ++ ?PARAMETER_KEYS, BridgeV1Conf),
|
||||||
|
emqx_utils_maps:update_if_present(
|
||||||
|
<<"resource_opts">>,
|
||||||
|
fun emqx_connector_schema:project_to_connector_resource_opts/1,
|
||||||
|
ConnectorConfig0
|
||||||
|
).
|
||||||
|
|
||||||
bridge_v1_config_to_action_config(BridgeV1Conf, ConnectorName) ->
|
bridge_v1_config_to_action_config(BridgeV1Conf, ConnectorName) ->
|
||||||
Parameters = maps:with(?PARAMETER_KEYS, BridgeV1Conf),
|
Parameters = maps:with(?PARAMETER_KEYS, BridgeV1Conf),
|
||||||
Parameters1 = Parameters#{<<"path">> => <<>>, <<"headers">> => #{}},
|
Parameters1 = Parameters#{<<"path">> => <<>>, <<"headers">> => #{}},
|
||||||
CommonKeys = [<<"enable">>, <<"description">>],
|
CommonKeys = [<<"enable">>, <<"description">>],
|
||||||
ActionConfig = maps:with(?ACTION_KEYS ++ CommonKeys, BridgeV1Conf),
|
ActionConfig0 = maps:with(?ACTION_KEYS ++ CommonKeys, BridgeV1Conf),
|
||||||
|
ActionConfig = emqx_utils_maps:update_if_present(
|
||||||
|
<<"resource_opts">>,
|
||||||
|
fun emqx_bridge_v2_schema:project_to_actions_resource_opts/1,
|
||||||
|
ActionConfig0
|
||||||
|
),
|
||||||
ActionConfig#{<<"parameters">> => Parameters1, <<"connector">> => ConnectorName}.
|
ActionConfig#{<<"parameters">> => Parameters1, <<"connector">> => ConnectorName}.
|
||||||
|
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
|
|
|
@ -565,12 +565,8 @@ preprocess_request(undefined) ->
|
||||||
undefined;
|
undefined;
|
||||||
preprocess_request(Req) when map_size(Req) == 0 ->
|
preprocess_request(Req) when map_size(Req) == 0 ->
|
||||||
undefined;
|
undefined;
|
||||||
preprocess_request(
|
preprocess_request(#{method := Method} = Req) ->
|
||||||
#{
|
Path = maps:get(path, Req, <<>>),
|
||||||
method := Method,
|
|
||||||
path := Path
|
|
||||||
} = Req
|
|
||||||
) ->
|
|
||||||
Headers = maps:get(headers, Req, []),
|
Headers = maps:get(headers, Req, []),
|
||||||
#{
|
#{
|
||||||
method => parse_template(to_bin(Method)),
|
method => parse_template(to_bin(Method)),
|
||||||
|
|
|
@ -48,7 +48,15 @@ fields("get") ->
|
||||||
%%--- v1 bridges config file
|
%%--- v1 bridges config file
|
||||||
%% see: emqx_bridge_schema:fields(bridges)
|
%% see: emqx_bridge_schema:fields(bridges)
|
||||||
fields("config") ->
|
fields("config") ->
|
||||||
basic_config() ++ request_config();
|
basic_config() ++
|
||||||
|
request_config() ++
|
||||||
|
emqx_connector_schema:resource_opts_ref(?MODULE, "v1_resource_opts");
|
||||||
|
fields("v1_resource_opts") ->
|
||||||
|
UnsupportedOpts = [enable_batch, batch_size, batch_time],
|
||||||
|
lists:filter(
|
||||||
|
fun({K, _V}) -> not lists:member(K, UnsupportedOpts) end,
|
||||||
|
emqx_resource_schema:fields("creation_opts")
|
||||||
|
);
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
%% v2: configuration
|
%% v2: configuration
|
||||||
fields(action) ->
|
fields(action) ->
|
||||||
|
@ -89,7 +97,13 @@ fields("http_action") ->
|
||||||
required => true,
|
required => true,
|
||||||
desc => ?DESC("config_parameters_opts")
|
desc => ?DESC("config_parameters_opts")
|
||||||
})}
|
})}
|
||||||
] ++ http_resource_opts();
|
] ++ emqx_connector_schema:resource_opts_ref(?MODULE, action_resource_opts);
|
||||||
|
fields(action_resource_opts) ->
|
||||||
|
UnsupportedOpts = [batch_size, batch_time],
|
||||||
|
lists:filter(
|
||||||
|
fun({K, _V}) -> not lists:member(K, UnsupportedOpts) end,
|
||||||
|
emqx_bridge_v2_schema:resource_opts_fields()
|
||||||
|
);
|
||||||
fields("parameters_opts") ->
|
fields("parameters_opts") ->
|
||||||
[
|
[
|
||||||
{path,
|
{path,
|
||||||
|
@ -129,20 +143,20 @@ fields("config_connector") ->
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
{description, emqx_schema:description_schema()}
|
{description, emqx_schema:description_schema()}
|
||||||
] ++ connector_url_headers() ++ connector_opts();
|
] ++ connector_url_headers() ++
|
||||||
%%--------------------------------------------------------------------
|
connector_opts() ++
|
||||||
%% v1/v2
|
emqx_connector_schema:resource_opts_ref(?MODULE, connector_resource_opts);
|
||||||
fields("resource_opts") ->
|
fields(connector_resource_opts) ->
|
||||||
UnsupportedOpts = [enable_batch, batch_size, batch_time],
|
emqx_connector_schema:resource_opts_fields().
|
||||||
lists:filter(
|
|
||||||
fun({K, _V}) -> not lists:member(K, UnsupportedOpts) end,
|
|
||||||
emqx_resource_schema:fields("creation_opts")
|
|
||||||
).
|
|
||||||
|
|
||||||
desc("config") ->
|
desc("config") ->
|
||||||
?DESC("desc_config");
|
?DESC("desc_config");
|
||||||
desc("resource_opts") ->
|
desc("v1_resource_opts") ->
|
||||||
?DESC(emqx_resource_schema, "creation_opts");
|
?DESC(emqx_resource_schema, "creation_opts");
|
||||||
|
desc(connector_resource_opts) ->
|
||||||
|
?DESC(emqx_resource_schema, "resource_opts");
|
||||||
|
desc(action_resource_opts) ->
|
||||||
|
?DESC(emqx_resource_schema, "resource_opts");
|
||||||
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
|
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
|
||||||
["Configuration for WebHook using `", string:to_upper(Method), "` method."];
|
["Configuration for WebHook using `", string:to_upper(Method), "` method."];
|
||||||
desc("config_connector") ->
|
desc("config_connector") ->
|
||||||
|
@ -304,23 +318,10 @@ request_timeout_field() ->
|
||||||
}
|
}
|
||||||
)}.
|
)}.
|
||||||
|
|
||||||
http_resource_opts() ->
|
|
||||||
[
|
|
||||||
{resource_opts,
|
|
||||||
mk(
|
|
||||||
ref(?MODULE, "resource_opts"),
|
|
||||||
#{
|
|
||||||
required => false,
|
|
||||||
default => #{},
|
|
||||||
desc => ?DESC(emqx_resource_schema, <<"resource_opts">>)
|
|
||||||
}
|
|
||||||
)}
|
|
||||||
].
|
|
||||||
|
|
||||||
connector_opts() ->
|
connector_opts() ->
|
||||||
mark_request_field_deperecated(
|
mark_request_field_deperecated(
|
||||||
proplists:delete(max_retries, emqx_bridge_http_connector:fields(config))
|
proplists:delete(max_retries, emqx_bridge_http_connector:fields(config))
|
||||||
) ++ http_resource_opts().
|
).
|
||||||
|
|
||||||
mark_request_field_deperecated(Fields) ->
|
mark_request_field_deperecated(Fields) ->
|
||||||
lists:map(
|
lists:map(
|
||||||
|
|
|
@ -36,9 +36,7 @@
|
||||||
namespace() ->
|
namespace() ->
|
||||||
"bridge_mongodb".
|
"bridge_mongodb".
|
||||||
|
|
||||||
roots() ->
|
roots() -> [].
|
||||||
%% ???
|
|
||||||
[].
|
|
||||||
|
|
||||||
fields("config") ->
|
fields("config") ->
|
||||||
[
|
[
|
||||||
|
|
|
@ -10,7 +10,6 @@
|
||||||
-export([
|
-export([
|
||||||
bridge_v1_config_to_action_config/2,
|
bridge_v1_config_to_action_config/2,
|
||||||
bridge_v1_config_to_connector_config/1,
|
bridge_v1_config_to_connector_config/1,
|
||||||
connector_action_config_to_bridge_v1_config/2,
|
|
||||||
action_type_name/0,
|
action_type_name/0,
|
||||||
bridge_v1_type_name/0,
|
bridge_v1_type_name/0,
|
||||||
connector_type_name/0,
|
connector_type_name/0,
|
||||||
|
@ -26,20 +25,6 @@
|
||||||
|
|
||||||
-define(SCHEMA_MODULE, emqx_bridge_mongodb).
|
-define(SCHEMA_MODULE, emqx_bridge_mongodb).
|
||||||
|
|
||||||
connector_action_config_to_bridge_v1_config(ConnectorConfig, ActionConfig) ->
|
|
||||||
fix_v1_type(
|
|
||||||
maps:merge(
|
|
||||||
maps:without(
|
|
||||||
[<<"connector">>],
|
|
||||||
map_unindent(<<"parameters">>, ActionConfig)
|
|
||||||
),
|
|
||||||
map_unindent(<<"parameters">>, ConnectorConfig)
|
|
||||||
)
|
|
||||||
).
|
|
||||||
|
|
||||||
fix_v1_type(#{<<"mongo_type">> := MongoType} = Conf) ->
|
|
||||||
Conf#{<<"type">> => v1_type(MongoType)}.
|
|
||||||
|
|
||||||
bridge_v1_config_to_action_config(BridgeV1Config, ConnectorName) ->
|
bridge_v1_config_to_action_config(BridgeV1Config, ConnectorName) ->
|
||||||
ActionTopLevelKeys = schema_keys(mongodb_action),
|
ActionTopLevelKeys = schema_keys(mongodb_action),
|
||||||
ActionParametersKeys = schema_keys(action_parameters),
|
ActionParametersKeys = schema_keys(action_parameters),
|
||||||
|
@ -66,7 +51,7 @@ bridge_v1_config_to_connector_config(BridgeV1Config) ->
|
||||||
|
|
||||||
make_config_map(PickKeys, IndentKeys, Config) ->
|
make_config_map(PickKeys, IndentKeys, Config) ->
|
||||||
Conf0 = maps:with(PickKeys, Config),
|
Conf0 = maps:with(PickKeys, Config),
|
||||||
map_indent(<<"parameters">>, IndentKeys, Conf0).
|
emqx_utils_maps:indent(<<"parameters">>, IndentKeys, Conf0).
|
||||||
|
|
||||||
bridge_v1_type_name() ->
|
bridge_v1_type_name() ->
|
||||||
{fun ?MODULE:bridge_v1_type_name_fun/1, bridge_v1_type_names()}.
|
{fun ?MODULE:bridge_v1_type_name_fun/1, bridge_v1_type_names()}.
|
||||||
|
@ -86,18 +71,5 @@ v1_type(<<"rs">>) -> mongodb_rs;
|
||||||
v1_type(<<"sharded">>) -> mongodb_sharded;
|
v1_type(<<"sharded">>) -> mongodb_sharded;
|
||||||
v1_type(<<"single">>) -> mongodb_single.
|
v1_type(<<"single">>) -> mongodb_single.
|
||||||
|
|
||||||
map_unindent(Key, Map) ->
|
|
||||||
maps:merge(
|
|
||||||
maps:get(Key, Map),
|
|
||||||
maps:remove(Key, Map)
|
|
||||||
).
|
|
||||||
|
|
||||||
map_indent(IndentKey, PickKeys, Map) ->
|
|
||||||
maps:put(
|
|
||||||
IndentKey,
|
|
||||||
maps:with(PickKeys, Map),
|
|
||||||
maps:without(PickKeys, Map)
|
|
||||||
).
|
|
||||||
|
|
||||||
schema_keys(Name) ->
|
schema_keys(Name) ->
|
||||||
[bin(Key) || Key <- proplists:get_keys(?SCHEMA_MODULE:fields(Name))].
|
[bin(Key) || Key <- proplists:get_keys(?SCHEMA_MODULE:fields(Name))].
|
||||||
|
|
|
@ -104,8 +104,7 @@ connect(Pid, Name) ->
|
||||||
config(#{remote := RC = #{}} = Conf) ->
|
config(#{remote := RC = #{}} = Conf) ->
|
||||||
Conf#{remote => emqx_bridge_mqtt_msg:parse(RC)}.
|
Conf#{remote => emqx_bridge_mqtt_msg:parse(RC)}.
|
||||||
|
|
||||||
-spec send(pid(), message(), egress()) ->
|
-spec send(pid(), message(), egress()) -> ok.
|
||||||
ok.
|
|
||||||
send(Pid, MsgIn, Egress) ->
|
send(Pid, MsgIn, Egress) ->
|
||||||
emqtt:publish(Pid, export_msg(MsgIn, Egress)).
|
emqtt:publish(Pid, export_msg(MsgIn, Egress)).
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
emqx_resource,
|
emqx_resource,
|
||||||
emqx_mysql
|
emqx_mysql
|
||||||
]},
|
]},
|
||||||
{env, []},
|
{env, [{emqx_action_info_modules, [emqx_bridge_mysql_action_info]}]},
|
||||||
{modules, []},
|
{modules, []},
|
||||||
{links, []}
|
{links, []}
|
||||||
]}.
|
]}.
|
||||||
|
|
|
@ -10,7 +10,9 @@
|
||||||
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
conn_bridge_examples/1
|
bridge_v2_examples/1,
|
||||||
|
conn_bridge_examples/1,
|
||||||
|
connector_examples/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -20,6 +22,9 @@
|
||||||
desc/1
|
desc/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
|
-define(CONNECTOR_TYPE, mysql).
|
||||||
|
-define(ACTION_TYPE, ?CONNECTOR_TYPE).
|
||||||
|
|
||||||
-define(DEFAULT_SQL, <<
|
-define(DEFAULT_SQL, <<
|
||||||
"insert into t_mqtt_msg(msgid, topic, qos, payload, arrived) "
|
"insert into t_mqtt_msg(msgid, topic, qos, payload, arrived) "
|
||||||
"values (${id}, ${topic}, ${qos}, ${payload}, FROM_UNIXTIME(${timestamp}/1000))"
|
"values (${id}, ${topic}, ${qos}, ${payload}, FROM_UNIXTIME(${timestamp}/1000))"
|
||||||
|
@ -28,6 +33,22 @@
|
||||||
%% -------------------------------------------------------------------------------------------------
|
%% -------------------------------------------------------------------------------------------------
|
||||||
%% api
|
%% api
|
||||||
|
|
||||||
|
bridge_v2_examples(Method) ->
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
<<"mysql">> =>
|
||||||
|
#{
|
||||||
|
summary => <<"MySQL Action">>,
|
||||||
|
value => emqx_bridge_v2_schema:action_values(
|
||||||
|
Method, ?ACTION_TYPE, ?CONNECTOR_TYPE, action_values()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
].
|
||||||
|
|
||||||
|
action_values() ->
|
||||||
|
#{parameters => #{sql => ?DEFAULT_SQL}}.
|
||||||
|
|
||||||
conn_bridge_examples(Method) ->
|
conn_bridge_examples(Method) ->
|
||||||
[
|
[
|
||||||
#{
|
#{
|
||||||
|
@ -38,6 +59,29 @@ conn_bridge_examples(Method) ->
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
connector_examples(Method) ->
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
<<"mysql">> =>
|
||||||
|
#{
|
||||||
|
summary => <<"MySQL Connector">>,
|
||||||
|
value => emqx_connector_schema:connector_values(
|
||||||
|
Method, ?CONNECTOR_TYPE, connector_values()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
].
|
||||||
|
|
||||||
|
connector_values() ->
|
||||||
|
#{
|
||||||
|
server => <<"127.0.0.1:3306">>,
|
||||||
|
database => <<"test">>,
|
||||||
|
pool_size => 8,
|
||||||
|
username => <<"root">>,
|
||||||
|
password => <<"******">>,
|
||||||
|
resource_opts => #{health_check_interval => <<"20s">>}
|
||||||
|
}.
|
||||||
|
|
||||||
values(_Method) ->
|
values(_Method) ->
|
||||||
#{
|
#{
|
||||||
enable => true,
|
enable => true,
|
||||||
|
@ -80,17 +124,70 @@ fields("config") ->
|
||||||
#{desc => ?DESC("local_topic"), default => undefined}
|
#{desc => ?DESC("local_topic"), default => undefined}
|
||||||
)}
|
)}
|
||||||
] ++ emqx_resource_schema:fields("resource_opts") ++
|
] ++ emqx_resource_schema:fields("resource_opts") ++
|
||||||
(emqx_mysql:fields(config) --
|
emqx_mysql:fields(config);
|
||||||
emqx_connector_schema_lib:prepare_statement_fields());
|
fields(action) ->
|
||||||
|
{mysql,
|
||||||
|
mk(
|
||||||
|
hoconsc:map(name, ref(?MODULE, mysql_action)),
|
||||||
|
#{desc => ?DESC("mysql_action"), required => false}
|
||||||
|
)};
|
||||||
|
fields(mysql_action) ->
|
||||||
|
emqx_bridge_v2_schema:make_producer_action_schema(
|
||||||
|
mk(
|
||||||
|
ref(?MODULE, action_parameters),
|
||||||
|
#{
|
||||||
|
required => true, desc => ?DESC(action_parameters)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
|
fields(action_parameters) ->
|
||||||
|
[
|
||||||
|
{sql,
|
||||||
|
mk(
|
||||||
|
binary(),
|
||||||
|
#{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>}
|
||||||
|
)}
|
||||||
|
];
|
||||||
|
fields("config_connector") ->
|
||||||
|
emqx_connector_schema:common_fields() ++
|
||||||
|
emqx_mysql:fields(config) ++
|
||||||
|
emqx_connector_schema:resource_opts_ref(?MODULE, connector_resource_opts);
|
||||||
|
fields(connector_resource_opts) ->
|
||||||
|
emqx_connector_schema:resource_opts_fields();
|
||||||
fields("post") ->
|
fields("post") ->
|
||||||
[type_field(), name_field() | fields("config")];
|
[type_field(), name_field() | fields("config")];
|
||||||
fields("put") ->
|
fields("put") ->
|
||||||
fields("config");
|
fields("config");
|
||||||
fields("get") ->
|
fields("get") ->
|
||||||
emqx_bridge_schema:status_fields() ++ fields("post").
|
emqx_bridge_schema:status_fields() ++ fields("post");
|
||||||
|
fields("get_bridge_v2") ->
|
||||||
|
emqx_bridge_schema:status_fields() ++ fields("post_bridge_v2");
|
||||||
|
fields("post_bridge_v2") ->
|
||||||
|
[type_field(), name_field() | fields(mysql_action)];
|
||||||
|
fields("put_bridge_v2") ->
|
||||||
|
fields(mysql_action);
|
||||||
|
fields(Field) when
|
||||||
|
Field == "get_connector";
|
||||||
|
Field == "put_connector";
|
||||||
|
Field == "post_connector"
|
||||||
|
->
|
||||||
|
emqx_connector_schema:api_fields(
|
||||||
|
Field,
|
||||||
|
?CONNECTOR_TYPE,
|
||||||
|
emqx_mysql:fields(config) ++
|
||||||
|
emqx_connector_schema:resource_opts_ref(?MODULE, connector_resource_opts)
|
||||||
|
).
|
||||||
|
|
||||||
desc("config") ->
|
desc("config") ->
|
||||||
?DESC("desc_config");
|
?DESC("desc_config");
|
||||||
|
desc("config_connector") ->
|
||||||
|
?DESC("desc_config");
|
||||||
|
desc(connector_resource_opts) ->
|
||||||
|
?DESC(emqx_resource_schema, "resource_opts");
|
||||||
|
desc(action_parameters) ->
|
||||||
|
?DESC(action_parameters);
|
||||||
|
desc(mysql_action) ->
|
||||||
|
?DESC(mysql_action);
|
||||||
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
|
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
|
||||||
["Configuration for MySQL using `", string:to_upper(Method), "` method."];
|
["Configuration for MySQL using `", string:to_upper(Method), "` method."];
|
||||||
desc(_) ->
|
desc(_) ->
|
||||||
|
|
|
@ -0,0 +1,64 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
-module(emqx_bridge_mysql_action_info).
|
||||||
|
|
||||||
|
-behaviour(emqx_action_info).
|
||||||
|
|
||||||
|
%% behaviour callbacks
|
||||||
|
-export([
|
||||||
|
action_type_name/0,
|
||||||
|
bridge_v1_config_to_action_config/2,
|
||||||
|
bridge_v1_config_to_connector_config/1,
|
||||||
|
bridge_v1_type_name/0,
|
||||||
|
connector_action_config_to_bridge_v1_config/2,
|
||||||
|
connector_type_name/0,
|
||||||
|
schema_module/0
|
||||||
|
]).
|
||||||
|
|
||||||
|
-import(emqx_utils_conv, [bin/1]).
|
||||||
|
|
||||||
|
-define(MYSQL_TYPE, mysql).
|
||||||
|
-define(SCHEMA_MODULE, emqx_bridge_mysql).
|
||||||
|
|
||||||
|
action_type_name() -> ?MYSQL_TYPE.
|
||||||
|
bridge_v1_type_name() -> ?MYSQL_TYPE.
|
||||||
|
connector_type_name() -> ?MYSQL_TYPE.
|
||||||
|
|
||||||
|
schema_module() -> ?SCHEMA_MODULE.
|
||||||
|
|
||||||
|
connector_action_config_to_bridge_v1_config(ConnectorConfig, ActionConfig) ->
|
||||||
|
MergedConfig =
|
||||||
|
emqx_utils_maps:deep_merge(
|
||||||
|
maps:without(
|
||||||
|
[<<"connector">>],
|
||||||
|
emqx_utils_maps:unindent(<<"parameters">>, ActionConfig)
|
||||||
|
),
|
||||||
|
ConnectorConfig
|
||||||
|
),
|
||||||
|
BridgeV1Keys = schema_keys("config"),
|
||||||
|
maps:with(BridgeV1Keys, MergedConfig).
|
||||||
|
|
||||||
|
bridge_v1_config_to_action_config(BridgeV1Config, ConnectorName) ->
|
||||||
|
ActionTopLevelKeys = schema_keys(mysql_action),
|
||||||
|
ActionParametersKeys = schema_keys(action_parameters),
|
||||||
|
ActionKeys = ActionTopLevelKeys ++ ActionParametersKeys,
|
||||||
|
ActionConfig = make_config_map(ActionKeys, ActionParametersKeys, BridgeV1Config),
|
||||||
|
ActionConfig#{<<"connector">> => ConnectorName}.
|
||||||
|
|
||||||
|
bridge_v1_config_to_connector_config(BridgeV1Config) ->
|
||||||
|
ConnectorKeys = schema_keys("config_connector"),
|
||||||
|
ResourceOptsKeys = schema_keys(connector_resource_opts),
|
||||||
|
maps:update_with(
|
||||||
|
<<"resource_opts">>,
|
||||||
|
fun(ResourceOpts) -> maps:with(ResourceOptsKeys, ResourceOpts) end,
|
||||||
|
#{},
|
||||||
|
maps:with(ConnectorKeys, BridgeV1Config)
|
||||||
|
).
|
||||||
|
|
||||||
|
make_config_map(PickKeys, IndentKeys, Config) ->
|
||||||
|
Conf0 = maps:with(PickKeys, Config),
|
||||||
|
emqx_utils_maps:indent(<<"parameters">>, IndentKeys, Conf0).
|
||||||
|
|
||||||
|
schema_keys(Name) ->
|
||||||
|
[bin(Key) || Key <- proplists:get_keys(?SCHEMA_MODULE:fields(Name))].
|
|
@ -0,0 +1,150 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
-module(emqx_bridge_mysql_connector).
|
||||||
|
|
||||||
|
-behaviour(emqx_resource).
|
||||||
|
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
|
%% `emqx_resource' API
|
||||||
|
-export([
|
||||||
|
on_remove_channel/3,
|
||||||
|
callback_mode/0,
|
||||||
|
on_add_channel/4,
|
||||||
|
on_batch_query/3,
|
||||||
|
on_get_channel_status/3,
|
||||||
|
on_get_channels/1,
|
||||||
|
on_get_status/2,
|
||||||
|
on_query/3,
|
||||||
|
on_start/2,
|
||||||
|
on_stop/2
|
||||||
|
]).
|
||||||
|
|
||||||
|
%%========================================================================================
|
||||||
|
%% `emqx_resource' API
|
||||||
|
%%========================================================================================
|
||||||
|
|
||||||
|
callback_mode() -> emqx_mysql:callback_mode().
|
||||||
|
|
||||||
|
on_add_channel(
|
||||||
|
_InstanceId,
|
||||||
|
#{channels := Channels, connector_state := ConnectorState} = State0,
|
||||||
|
ChannelId,
|
||||||
|
ChannelConfig0
|
||||||
|
) ->
|
||||||
|
ChannelConfig1 = emqx_utils_maps:unindent(parameters, ChannelConfig0),
|
||||||
|
QueryTemplates = emqx_mysql:parse_prepare_sql(ChannelId, ChannelConfig1),
|
||||||
|
ChannelConfig2 = maps:merge(ChannelConfig1, QueryTemplates),
|
||||||
|
ChannelConfig = set_prepares(ChannelConfig2, ConnectorState),
|
||||||
|
State = State0#{
|
||||||
|
channels => maps:put(ChannelId, ChannelConfig, Channels),
|
||||||
|
connector_state => ConnectorState
|
||||||
|
},
|
||||||
|
{ok, State}.
|
||||||
|
|
||||||
|
on_get_channel_status(_InstanceId, ChannelId, #{channels := Channels}) ->
|
||||||
|
case maps:get(ChannelId, Channels) of
|
||||||
|
#{prepares := ok} ->
|
||||||
|
connected;
|
||||||
|
#{prepares := {error, _}} ->
|
||||||
|
connecting
|
||||||
|
end.
|
||||||
|
|
||||||
|
on_get_channels(InstanceId) ->
|
||||||
|
emqx_bridge_v2:get_channels_for_connector(InstanceId).
|
||||||
|
|
||||||
|
on_get_status(InstanceId, #{channels := Channels0, connector_state := ConnectorState} = State0) ->
|
||||||
|
case emqx_mysql:on_get_status(InstanceId, ConnectorState) of
|
||||||
|
WithState when is_tuple(WithState) ->
|
||||||
|
NewConnectorState = element(2, WithState),
|
||||||
|
State = State0#{connector_state => NewConnectorState},
|
||||||
|
setelement(2, WithState, State);
|
||||||
|
connected ->
|
||||||
|
Channels =
|
||||||
|
maps:map(
|
||||||
|
fun
|
||||||
|
(_ChannelId, #{prepares := ok} = ChannelConfig) ->
|
||||||
|
ChannelConfig;
|
||||||
|
(_ChannelId, #{prepares := {error, _}} = ChannelConfig) ->
|
||||||
|
set_prepares(ChannelConfig, ConnectorState)
|
||||||
|
end,
|
||||||
|
Channels0
|
||||||
|
),
|
||||||
|
State = State0#{channels => Channels},
|
||||||
|
{connected, State};
|
||||||
|
Other ->
|
||||||
|
Other
|
||||||
|
end.
|
||||||
|
|
||||||
|
on_query(InstId, {TypeOrKey, SQLOrKey}, State) ->
|
||||||
|
on_query(InstId, {TypeOrKey, SQLOrKey, [], default_timeout}, State);
|
||||||
|
on_query(InstId, {TypeOrKey, SQLOrKey, Params}, State) ->
|
||||||
|
on_query(InstId, {TypeOrKey, SQLOrKey, Params, default_timeout}, State);
|
||||||
|
on_query(
|
||||||
|
InstanceId,
|
||||||
|
{Channel, _Message, _Params, _Timeout} = Request,
|
||||||
|
#{channels := Channels, connector_state := ConnectorState}
|
||||||
|
) when is_binary(Channel) ->
|
||||||
|
ChannelConfig = maps:get(Channel, Channels),
|
||||||
|
Result = emqx_mysql:on_query(
|
||||||
|
InstanceId,
|
||||||
|
Request,
|
||||||
|
maps:merge(ConnectorState, ChannelConfig)
|
||||||
|
),
|
||||||
|
?tp(mysql_connector_on_query_return, #{instance_id => InstanceId, result => Result}),
|
||||||
|
Result;
|
||||||
|
on_query(InstanceId, Request, _State = #{channels := _Channels, connector_state := ConnectorState}) ->
|
||||||
|
emqx_mysql:on_query(InstanceId, Request, ConnectorState).
|
||||||
|
|
||||||
|
on_batch_query(
|
||||||
|
InstanceId,
|
||||||
|
[Req | _] = BatchRequest,
|
||||||
|
#{channels := Channels, connector_state := ConnectorState}
|
||||||
|
) when is_binary(element(1, Req)) ->
|
||||||
|
Channel = element(1, Req),
|
||||||
|
ChannelConfig = maps:get(Channel, Channels),
|
||||||
|
Result = emqx_mysql:on_batch_query(
|
||||||
|
InstanceId,
|
||||||
|
BatchRequest,
|
||||||
|
maps:merge(ConnectorState, ChannelConfig)
|
||||||
|
),
|
||||||
|
?tp(mysql_connector_on_batch_query_return, #{instance_id => InstanceId, result => Result}),
|
||||||
|
Result;
|
||||||
|
on_batch_query(InstanceId, BatchRequest, _State = #{connector_state := ConnectorState}) ->
|
||||||
|
emqx_mysql:on_batch_query(InstanceId, BatchRequest, ConnectorState).
|
||||||
|
|
||||||
|
on_remove_channel(
|
||||||
|
_InstanceId, #{channels := Channels, connector_state := ConnectorState} = State, ChannelId
|
||||||
|
) ->
|
||||||
|
ChannelConfig = maps:get(ChannelId, Channels),
|
||||||
|
emqx_mysql:unprepare_sql(maps:merge(ChannelConfig, ConnectorState)),
|
||||||
|
NewState = State#{channels => maps:remove(ChannelId, Channels)},
|
||||||
|
{ok, NewState}.
|
||||||
|
|
||||||
|
-spec on_start(binary(), hocon:config()) ->
|
||||||
|
{ok, #{connector_state := emqx_mysql:state(), channels := map()}} | {error, _}.
|
||||||
|
on_start(InstanceId, Config) ->
|
||||||
|
case emqx_mysql:on_start(InstanceId, Config) of
|
||||||
|
{ok, ConnectorState} ->
|
||||||
|
State = #{
|
||||||
|
connector_state => ConnectorState,
|
||||||
|
channels => #{}
|
||||||
|
},
|
||||||
|
{ok, State};
|
||||||
|
{error, Reason} ->
|
||||||
|
{error, Reason}
|
||||||
|
end.
|
||||||
|
|
||||||
|
on_stop(InstanceId, _State = #{connector_state := ConnectorState}) ->
|
||||||
|
ok = emqx_mysql:on_stop(InstanceId, ConnectorState),
|
||||||
|
?tp(mysql_connector_stopped, #{instance_id => InstanceId}),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
%%========================================================================================
|
||||||
|
%% Helper fns
|
||||||
|
%%========================================================================================
|
||||||
|
set_prepares(ChannelConfig, ConnectorState) ->
|
||||||
|
#{prepares := Prepares} =
|
||||||
|
emqx_mysql:init_prepare(maps:merge(ConnectorState, ChannelConfig)),
|
||||||
|
ChannelConfig#{prepares => Prepares}.
|
|
@ -242,13 +242,12 @@ send_message(Config, Payload) ->
|
||||||
query_resource(Config, Request) ->
|
query_resource(Config, Request) ->
|
||||||
Name = ?config(mysql_name, Config),
|
Name = ?config(mysql_name, Config),
|
||||||
BridgeType = ?config(mysql_bridge_type, Config),
|
BridgeType = ?config(mysql_bridge_type, Config),
|
||||||
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
emqx_bridge_v2:query(BridgeType, Name, Request, #{timeout => 500}).
|
||||||
emqx_resource:query(ResourceID, Request, #{timeout => 500}).
|
|
||||||
|
|
||||||
sync_query_resource(Config, Request) ->
|
sync_query_resource(Config, Request) ->
|
||||||
Name = ?config(mysql_name, Config),
|
Name = ?config(mysql_name, Config),
|
||||||
BridgeType = ?config(mysql_bridge_type, Config),
|
BridgeType = ?config(mysql_bridge_type, Config),
|
||||||
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
ResourceID = emqx_bridge_v2:id(BridgeType, Name),
|
||||||
emqx_resource_buffer_worker:simple_sync_query(ResourceID, Request).
|
emqx_resource_buffer_worker:simple_sync_query(ResourceID, Request).
|
||||||
|
|
||||||
query_resource_async(Config, Request) ->
|
query_resource_async(Config, Request) ->
|
||||||
|
@ -256,8 +255,7 @@ query_resource_async(Config, Request) ->
|
||||||
BridgeType = ?config(mysql_bridge_type, Config),
|
BridgeType = ?config(mysql_bridge_type, Config),
|
||||||
Ref = alias([reply]),
|
Ref = alias([reply]),
|
||||||
AsyncReplyFun = fun(Result) -> Ref ! {result, Ref, Result} end,
|
AsyncReplyFun = fun(Result) -> Ref ! {result, Ref, Result} end,
|
||||||
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
Return = emqx_bridge_v2:query(BridgeType, Name, Request, #{
|
||||||
Return = emqx_resource:query(ResourceID, Request, #{
|
|
||||||
timeout => 500, async_reply_fun => {AsyncReplyFun, []}
|
timeout => 500, async_reply_fun => {AsyncReplyFun, []}
|
||||||
}),
|
}),
|
||||||
{Return, Ref}.
|
{Return, Ref}.
|
||||||
|
@ -274,7 +272,9 @@ unprepare(Config, Key) ->
|
||||||
Name = ?config(mysql_name, Config),
|
Name = ?config(mysql_name, Config),
|
||||||
BridgeType = ?config(mysql_bridge_type, Config),
|
BridgeType = ?config(mysql_bridge_type, Config),
|
||||||
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
||||||
{ok, _, #{state := #{pool_name := PoolName}}} = emqx_resource:get_instance(ResourceID),
|
{ok, _, #{state := #{connector_state := #{pool_name := PoolName}}}} = emqx_resource:get_instance(
|
||||||
|
ResourceID
|
||||||
|
),
|
||||||
[
|
[
|
||||||
begin
|
begin
|
||||||
{ok, Conn} = ecpool_worker:client(Worker),
|
{ok, Conn} = ecpool_worker:client(Worker),
|
||||||
|
@ -343,6 +343,17 @@ create_rule_and_action_http(Config) ->
|
||||||
Error
|
Error
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
request_api_status(BridgeId) ->
|
||||||
|
Path = emqx_mgmt_api_test_util:api_path(["bridges", BridgeId]),
|
||||||
|
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
|
case emqx_mgmt_api_test_util:request_api(get, Path, "", AuthHeader) of
|
||||||
|
{ok, Res0} ->
|
||||||
|
#{<<"status">> := Status} = _Res = emqx_utils_json:decode(Res0, [return_maps]),
|
||||||
|
{ok, binary_to_existing_atom(Status)};
|
||||||
|
Error ->
|
||||||
|
Error
|
||||||
|
end.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Testcases
|
%% Testcases
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
@ -519,14 +530,18 @@ t_write_timeout(Config) ->
|
||||||
2 * Timeout
|
2 * Timeout
|
||||||
),
|
),
|
||||||
emqx_common_test_helpers:with_failure(timeout, ProxyName, ProxyHost, ProxyPort, fun() ->
|
emqx_common_test_helpers:with_failure(timeout, ProxyName, ProxyHost, ProxyPort, fun() ->
|
||||||
|
Name = ?config(mysql_name, Config),
|
||||||
|
BridgeType = ?config(mysql_bridge_type, Config),
|
||||||
|
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
||||||
|
|
||||||
case QueryMode of
|
case QueryMode of
|
||||||
sync ->
|
sync ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{error, {resource_error, #{reason := timeout}}},
|
{error, {resource_error, #{reason := timeout}}},
|
||||||
query_resource(Config, {send_message, SentData, [], Timeout})
|
query_resource(Config, {ResourceID, SentData, [], Timeout})
|
||||||
);
|
);
|
||||||
async ->
|
async ->
|
||||||
query_resource(Config, {send_message, SentData, [], Timeout}),
|
query_resource(Config, {ResourceID, SentData, [], Timeout}),
|
||||||
ok
|
ok
|
||||||
end,
|
end,
|
||||||
ok
|
ok
|
||||||
|
@ -703,7 +718,10 @@ t_uninitialized_prepared_statement(Config) ->
|
||||||
),
|
),
|
||||||
Val = integer_to_binary(erlang:unique_integer()),
|
Val = integer_to_binary(erlang:unique_integer()),
|
||||||
SentData = #{payload => Val, timestamp => 1668602148000},
|
SentData = #{payload => Val, timestamp => 1668602148000},
|
||||||
unprepare(Config, send_message),
|
Name = ?config(mysql_name, Config),
|
||||||
|
BridgeType = ?config(mysql_bridge_type, Config),
|
||||||
|
ResourceID = emqx_bridge_v2:id(BridgeType, Name),
|
||||||
|
unprepare(Config, ResourceID),
|
||||||
?check_trace(
|
?check_trace(
|
||||||
begin
|
begin
|
||||||
{Res, {ok, _}} =
|
{Res, {ok, _}} =
|
||||||
|
@ -721,7 +739,7 @@ t_uninitialized_prepared_statement(Config) ->
|
||||||
#{?snk_kind := mysql_connector_prepare_query_failed, error := not_prepared},
|
#{?snk_kind := mysql_connector_prepare_query_failed, error := not_prepared},
|
||||||
#{
|
#{
|
||||||
?snk_kind := mysql_connector_on_query_prepared_sql,
|
?snk_kind := mysql_connector_on_query_prepared_sql,
|
||||||
type_or_key := send_message
|
type_or_key := ResourceID
|
||||||
},
|
},
|
||||||
Trace
|
Trace
|
||||||
)
|
)
|
||||||
|
@ -736,33 +754,57 @@ t_uninitialized_prepared_statement(Config) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_missing_table(Config) ->
|
t_missing_table(Config) ->
|
||||||
|
QueryMode = ?config(query_mode, Config),
|
||||||
Name = ?config(mysql_name, Config),
|
Name = ?config(mysql_name, Config),
|
||||||
BridgeType = ?config(mysql_bridge_type, Config),
|
BridgeType = ?config(mysql_bridge_type, Config),
|
||||||
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
|
||||||
|
|
||||||
?check_trace(
|
?check_trace(
|
||||||
begin
|
begin
|
||||||
connect_and_drop_table(Config),
|
connect_and_drop_table(Config),
|
||||||
?assertMatch({ok, _}, create_bridge(Config)),
|
?assertMatch({ok, _}, create_bridge(Config)),
|
||||||
|
BridgeID = emqx_bridge_resource:bridge_id(BridgeType, Name),
|
||||||
?retry(
|
?retry(
|
||||||
_Sleep = 1_000,
|
_Sleep = 1_000,
|
||||||
_Attempts = 20,
|
_Attempts = 20,
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{ok, Status} when Status == connecting orelse Status == disconnected,
|
{ok, Status} when Status == connecting orelse Status == disconnected,
|
||||||
emqx_resource_manager:health_check(ResourceID)
|
request_api_status(BridgeID)
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
Val = integer_to_binary(erlang:unique_integer()),
|
Val = integer_to_binary(erlang:unique_integer()),
|
||||||
SentData = #{payload => Val, timestamp => 1668602148000},
|
SentData = #{payload => Val, timestamp => 1668602148000},
|
||||||
Timeout = 1000,
|
ResourceID = emqx_bridge_v2:id(BridgeType, Name),
|
||||||
?assertMatch(
|
Request = {ResourceID, SentData},
|
||||||
{error, {resource_error, #{reason := unhealthy_target}}},
|
Result =
|
||||||
query_resource(Config, {send_message, SentData, [], Timeout})
|
case QueryMode of
|
||||||
),
|
sync ->
|
||||||
|
query_resource(Config, Request);
|
||||||
|
async ->
|
||||||
|
{_, Ref} = query_resource_async(Config, Request),
|
||||||
|
{ok, Res} = receive_result(Ref, 2_000),
|
||||||
|
Res
|
||||||
|
end,
|
||||||
|
|
||||||
|
BatchSize = ?config(batch_size, Config),
|
||||||
|
IsBatch = BatchSize > 1,
|
||||||
|
case IsBatch of
|
||||||
|
true ->
|
||||||
|
?assertMatch(
|
||||||
|
{error,
|
||||||
|
{unrecoverable_error,
|
||||||
|
{1146, <<"42S02">>, <<"Table 'mqtt.mqtt_test' doesn't exist">>}}},
|
||||||
|
Result
|
||||||
|
);
|
||||||
|
false ->
|
||||||
|
?assertMatch(
|
||||||
|
{error, undefined_table},
|
||||||
|
Result
|
||||||
|
)
|
||||||
|
end,
|
||||||
ok
|
ok
|
||||||
end,
|
end,
|
||||||
fun(Trace) ->
|
fun(Trace) ->
|
||||||
?assertMatch([_, _, _], ?of_kind(mysql_undefined_table, Trace)),
|
?assertMatch([_ | _], ?of_kind(mysql_undefined_table, Trace)),
|
||||||
ok
|
ok
|
||||||
end
|
end
|
||||||
).
|
).
|
||||||
|
@ -770,9 +812,9 @@ t_missing_table(Config) ->
|
||||||
t_table_removed(Config) ->
|
t_table_removed(Config) ->
|
||||||
Name = ?config(mysql_name, Config),
|
Name = ?config(mysql_name, Config),
|
||||||
BridgeType = ?config(mysql_bridge_type, Config),
|
BridgeType = ?config(mysql_bridge_type, Config),
|
||||||
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
|
||||||
connect_and_create_table(Config),
|
connect_and_create_table(Config),
|
||||||
?assertMatch({ok, _}, create_bridge(Config)),
|
?assertMatch({ok, _}, create_bridge(Config)),
|
||||||
|
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
||||||
?retry(
|
?retry(
|
||||||
_Sleep = 1_000,
|
_Sleep = 1_000,
|
||||||
_Attempts = 20,
|
_Attempts = 20,
|
||||||
|
@ -782,17 +824,17 @@ t_table_removed(Config) ->
|
||||||
Val = integer_to_binary(erlang:unique_integer()),
|
Val = integer_to_binary(erlang:unique_integer()),
|
||||||
SentData = #{payload => Val, timestamp => 1668602148000},
|
SentData = #{payload => Val, timestamp => 1668602148000},
|
||||||
Timeout = 1000,
|
Timeout = 1000,
|
||||||
|
ActionID = emqx_bridge_v2:id(BridgeType, Name),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
{error,
|
{error,
|
||||||
{unrecoverable_error, {1146, <<"42S02">>, <<"Table 'mqtt.mqtt_test' doesn't exist">>}}},
|
{unrecoverable_error, {1146, <<"42S02">>, <<"Table 'mqtt.mqtt_test' doesn't exist">>}}},
|
||||||
sync_query_resource(Config, {send_message, SentData, [], Timeout})
|
sync_query_resource(Config, {ActionID, SentData, [], Timeout})
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_nested_payload_template(Config) ->
|
t_nested_payload_template(Config) ->
|
||||||
Name = ?config(mysql_name, Config),
|
Name = ?config(mysql_name, Config),
|
||||||
BridgeType = ?config(mysql_bridge_type, Config),
|
BridgeType = ?config(mysql_bridge_type, Config),
|
||||||
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
|
||||||
Value = integer_to_binary(erlang:unique_integer()),
|
Value = integer_to_binary(erlang:unique_integer()),
|
||||||
{ok, _} = create_bridge(
|
{ok, _} = create_bridge(
|
||||||
Config,
|
Config,
|
||||||
|
@ -803,6 +845,7 @@ t_nested_payload_template(Config) ->
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
{ok, #{<<"from">> := [Topic]}} = create_rule_and_action_http(Config),
|
{ok, #{<<"from">> := [Topic]}} = create_rule_and_action_http(Config),
|
||||||
|
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
||||||
?retry(
|
?retry(
|
||||||
_Sleep = 1_000,
|
_Sleep = 1_000,
|
||||||
_Attempts = 20,
|
_Attempts = 20,
|
||||||
|
|
|
@ -64,8 +64,7 @@ fields(action_parameters) ->
|
||||||
binary(),
|
binary(),
|
||||||
#{desc => ?DESC("sql_template"), default => default_sql(), format => <<"sql">>}
|
#{desc => ?DESC("sql_template"), default => default_sql(), format => <<"sql">>}
|
||||||
)}
|
)}
|
||||||
] ++
|
];
|
||||||
emqx_connector_schema_lib:prepare_statement_fields();
|
|
||||||
fields(pgsql_action) ->
|
fields(pgsql_action) ->
|
||||||
emqx_bridge_v2_schema:make_producer_action_schema(
|
emqx_bridge_v2_schema:make_producer_action_schema(
|
||||||
hoconsc:mk(
|
hoconsc:mk(
|
||||||
|
|
|
@ -23,7 +23,7 @@
|
||||||
-type state() :: #{
|
-type state() :: #{
|
||||||
pulsar_client_id := pulsar_client_id(),
|
pulsar_client_id := pulsar_client_id(),
|
||||||
producers := pulsar_producers:producers(),
|
producers := pulsar_producers:producers(),
|
||||||
sync_timeout := infinity | time:time(),
|
sync_timeout := erlang:timeout(),
|
||||||
message_template := message_template()
|
message_template := message_template()
|
||||||
}.
|
}.
|
||||||
-type buffer_mode() :: memory | disk | hybrid.
|
-type buffer_mode() :: memory | disk | hybrid.
|
||||||
|
@ -43,8 +43,8 @@
|
||||||
bridge_name := atom(),
|
bridge_name := atom(),
|
||||||
buffer := #{
|
buffer := #{
|
||||||
mode := buffer_mode(),
|
mode := buffer_mode(),
|
||||||
per_partition_limit := emqx_schema:byte_size(),
|
per_partition_limit := emqx_schema:bytesize(),
|
||||||
segment_bytes := emqx_schema:byte_size(),
|
segment_bytes := emqx_schema:bytesize(),
|
||||||
memory_overload_protection := boolean()
|
memory_overload_protection := boolean()
|
||||||
},
|
},
|
||||||
compression := compression_mode(),
|
compression := compression_mode(),
|
||||||
|
|
|
@ -120,6 +120,7 @@ fields("get_sentinel") ->
|
||||||
method_fields(get, redis_sentinel);
|
method_fields(get, redis_sentinel);
|
||||||
fields("get_cluster") ->
|
fields("get_cluster") ->
|
||||||
method_fields(get, redis_cluster);
|
method_fields(get, redis_cluster);
|
||||||
|
%% old bridge v1 schema
|
||||||
fields(Type) when
|
fields(Type) when
|
||||||
Type == redis_single orelse Type == redis_sentinel orelse Type == redis_cluster
|
Type == redis_single orelse Type == redis_sentinel orelse Type == redis_cluster
|
||||||
->
|
->
|
||||||
|
@ -147,7 +148,7 @@ redis_bridge_common_fields(Type) ->
|
||||||
{local_topic, mk(binary(), #{required => false, desc => ?DESC("desc_local_topic")})}
|
{local_topic, mk(binary(), #{required => false, desc => ?DESC("desc_local_topic")})}
|
||||||
| fields(action_parameters)
|
| fields(action_parameters)
|
||||||
] ++
|
] ++
|
||||||
resource_fields(Type).
|
v1_resource_fields(Type).
|
||||||
|
|
||||||
connector_fields(Type) ->
|
connector_fields(Type) ->
|
||||||
emqx_redis:fields(Type).
|
emqx_redis:fields(Type).
|
||||||
|
@ -158,7 +159,7 @@ type_name_fields(Type) ->
|
||||||
{name, mk(binary(), #{required => true, desc => ?DESC("desc_name")})}
|
{name, mk(binary(), #{required => true, desc => ?DESC("desc_name")})}
|
||||||
].
|
].
|
||||||
|
|
||||||
resource_fields(Type) ->
|
v1_resource_fields(Type) ->
|
||||||
[
|
[
|
||||||
{resource_opts,
|
{resource_opts,
|
||||||
mk(
|
mk(
|
||||||
|
|
|
@ -43,7 +43,12 @@ bridge_v1_config_to_action_config(BridgeV1Config, ConnectorName) ->
|
||||||
ActionTopLevelKeys = schema_keys(?SCHEMA_MODULE:fields(redis_action)),
|
ActionTopLevelKeys = schema_keys(?SCHEMA_MODULE:fields(redis_action)),
|
||||||
ActionParametersKeys = schema_keys(emqx_bridge_redis:fields(action_parameters)),
|
ActionParametersKeys = schema_keys(emqx_bridge_redis:fields(action_parameters)),
|
||||||
ActionKeys = ActionTopLevelKeys ++ ActionParametersKeys,
|
ActionKeys = ActionTopLevelKeys ++ ActionParametersKeys,
|
||||||
ActionConfig = make_config_map(ActionKeys, ActionParametersKeys, BridgeV1Config),
|
ActionConfig0 = make_config_map(ActionKeys, ActionParametersKeys, BridgeV1Config),
|
||||||
|
ActionConfig = emqx_utils_maps:update_if_present(
|
||||||
|
<<"resource_opts">>,
|
||||||
|
fun emqx_bridge_v2_schema:project_to_actions_resource_opts/1,
|
||||||
|
ActionConfig0
|
||||||
|
),
|
||||||
ActionConfig#{<<"connector">> => ConnectorName}.
|
ActionConfig#{<<"connector">> => ConnectorName}.
|
||||||
|
|
||||||
bridge_v1_config_to_connector_config(BridgeV1Config) ->
|
bridge_v1_config_to_connector_config(BridgeV1Config) ->
|
||||||
|
@ -57,7 +62,12 @@ bridge_v1_config_to_connector_config(BridgeV1Config) ->
|
||||||
(maps:keys(BridgeV1Config) -- (ActionKeys -- ConnectorTopLevelKeys)) ++
|
(maps:keys(BridgeV1Config) -- (ActionKeys -- ConnectorTopLevelKeys)) ++
|
||||||
[<<"redis_type">>],
|
[<<"redis_type">>],
|
||||||
ConnectorParametersKeys = ConnectorKeys -- ConnectorTopLevelKeys,
|
ConnectorParametersKeys = ConnectorKeys -- ConnectorTopLevelKeys,
|
||||||
make_config_map(ConnectorKeys, ConnectorParametersKeys, BridgeV1Config).
|
ConnectorConfig0 = make_config_map(ConnectorKeys, ConnectorParametersKeys, BridgeV1Config),
|
||||||
|
emqx_utils_maps:update_if_present(
|
||||||
|
<<"resource_opts">>,
|
||||||
|
fun emqx_connector_schema:project_to_connector_resource_opts/1,
|
||||||
|
ConnectorConfig0
|
||||||
|
).
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------------------
|
||||||
%% Internal helper fns
|
%% Internal helper fns
|
||||||
|
|
|
@ -51,8 +51,10 @@ fields("config_connector") ->
|
||||||
)}
|
)}
|
||||||
] ++
|
] ++
|
||||||
emqx_redis:redis_fields() ++
|
emqx_redis:redis_fields() ++
|
||||||
emqx_connector_schema:resource_opts_ref(?MODULE, resource_opts) ++
|
emqx_connector_schema:resource_opts_ref(?MODULE, connector_resource_opts) ++
|
||||||
emqx_connector_schema_lib:ssl_fields();
|
emqx_connector_schema_lib:ssl_fields();
|
||||||
|
fields(connector_resource_opts) ->
|
||||||
|
emqx_connector_schema:resource_opts_fields();
|
||||||
fields(action) ->
|
fields(action) ->
|
||||||
{?TYPE,
|
{?TYPE,
|
||||||
?HOCON(
|
?HOCON(
|
||||||
|
@ -74,15 +76,7 @@ fields(redis_action) ->
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
ResOpts =
|
[ResOpts] = emqx_connector_schema:resource_opts_ref(?MODULE, action_resource_opts),
|
||||||
{resource_opts,
|
|
||||||
?HOCON(
|
|
||||||
?R_REF(resource_opts),
|
|
||||||
#{
|
|
||||||
required => true,
|
|
||||||
desc => ?DESC(emqx_resource_schema, resource_opts)
|
|
||||||
}
|
|
||||||
)},
|
|
||||||
RedisType =
|
RedisType =
|
||||||
{redis_type,
|
{redis_type,
|
||||||
?HOCON(
|
?HOCON(
|
||||||
|
@ -90,8 +84,8 @@ fields(redis_action) ->
|
||||||
#{required => true, desc => ?DESC(redis_type)}
|
#{required => true, desc => ?DESC(redis_type)}
|
||||||
)},
|
)},
|
||||||
[RedisType | lists:keyreplace(resource_opts, 1, Schema, ResOpts)];
|
[RedisType | lists:keyreplace(resource_opts, 1, Schema, ResOpts)];
|
||||||
fields(resource_opts) ->
|
fields(action_resource_opts) ->
|
||||||
emqx_resource_schema:create_opts([
|
emqx_bridge_v2_schema:resource_opts_fields([
|
||||||
{batch_size, #{desc => ?DESC(batch_size)}},
|
{batch_size, #{desc => ?DESC(batch_size)}},
|
||||||
{batch_time, #{desc => ?DESC(batch_time)}}
|
{batch_time, #{desc => ?DESC(batch_time)}}
|
||||||
]);
|
]);
|
||||||
|
@ -124,6 +118,10 @@ desc(redis_action) ->
|
||||||
?DESC(redis_action);
|
?DESC(redis_action);
|
||||||
desc(resource_opts) ->
|
desc(resource_opts) ->
|
||||||
?DESC(emqx_resource_schema, resource_opts);
|
?DESC(emqx_resource_schema, resource_opts);
|
||||||
|
desc(connector_resource_opts) ->
|
||||||
|
?DESC(emqx_resource_schema, "resource_opts");
|
||||||
|
desc(action_resource_opts) ->
|
||||||
|
?DESC(emqx_resource_schema, "resource_opts");
|
||||||
desc(_Name) ->
|
desc(_Name) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
|
@ -248,20 +246,19 @@ connector_example(RedisType, post) ->
|
||||||
maps:merge(
|
maps:merge(
|
||||||
connector_example(RedisType, put),
|
connector_example(RedisType, put),
|
||||||
#{
|
#{
|
||||||
type => <<"redis_single_producer">>,
|
type => <<"redis">>,
|
||||||
name => <<"my_connector">>
|
name => <<"my_connector">>
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
connector_example(RedisType, put) ->
|
connector_example(RedisType, put) ->
|
||||||
#{
|
#{
|
||||||
enable => true,
|
enable => true,
|
||||||
desc => <<"My redis ", (atom_to_binary(RedisType))/binary, " connector">>,
|
description => <<"My redis ", (atom_to_binary(RedisType))/binary, " connector">>,
|
||||||
parameters => connector_parameter(RedisType),
|
parameters => connector_parameter(RedisType),
|
||||||
pool_size => 8,
|
pool_size => 8,
|
||||||
database => 1,
|
database => 1,
|
||||||
username => <<"test">>,
|
username => <<"test">>,
|
||||||
password => <<"******">>,
|
password => <<"******">>,
|
||||||
auto_reconnect => true,
|
|
||||||
ssl => #{enable => false}
|
ssl => #{enable => false}
|
||||||
}.
|
}.
|
||||||
|
|
||||||
|
|
|
@ -123,10 +123,19 @@ wait_for_ci_redis(Checks, Config) ->
|
||||||
ProxyHost = os:getenv("PROXY_HOST", ?PROXY_HOST),
|
ProxyHost = os:getenv("PROXY_HOST", ?PROXY_HOST),
|
||||||
ProxyPort = list_to_integer(os:getenv("PROXY_PORT", ?PROXY_PORT)),
|
ProxyPort = list_to_integer(os:getenv("PROXY_PORT", ?PROXY_PORT)),
|
||||||
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
||||||
ok = emqx_common_test_helpers:start_apps([
|
Apps = emqx_cth_suite:start(
|
||||||
emqx_conf, emqx_resource, emqx_connector, emqx_bridge, emqx_rule_engine
|
[
|
||||||
]),
|
emqx,
|
||||||
|
emqx_conf,
|
||||||
|
emqx_resource,
|
||||||
|
emqx_connector,
|
||||||
|
emqx_bridge,
|
||||||
|
emqx_rule_engine
|
||||||
|
],
|
||||||
|
#{work_dir => emqx_cth_suite:work_dir(Config)}
|
||||||
|
),
|
||||||
[
|
[
|
||||||
|
{apps, Apps},
|
||||||
{proxy_host, ProxyHost},
|
{proxy_host, ProxyHost},
|
||||||
{proxy_port, ProxyPort}
|
{proxy_port, ProxyPort}
|
||||||
| Config
|
| Config
|
||||||
|
@ -143,11 +152,9 @@ redis_checks() ->
|
||||||
1
|
1
|
||||||
end.
|
end.
|
||||||
|
|
||||||
end_per_suite(_Config) ->
|
end_per_suite(Config) ->
|
||||||
ok = emqx_bridge_v2_SUITE:delete_all_bridges_and_connectors(),
|
Apps = ?config(apps, Config),
|
||||||
ok = emqx_common_test_helpers:stop_apps([emqx_conf]),
|
emqx_cth_suite:stop(Apps),
|
||||||
ok = emqx_connector_test_helpers:stop_apps([emqx_rule_engine, emqx_bridge, emqx_resource]),
|
|
||||||
_ = application:stop(emqx_connector),
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_testcase(Testcase, Config0) ->
|
init_per_testcase(Testcase, Config0) ->
|
||||||
|
|
|
@ -31,7 +31,7 @@
|
||||||
socket := inet:socket(),
|
socket := inet:socket(),
|
||||||
frame_state :=
|
frame_state :=
|
||||||
undefined
|
undefined
|
||||||
| emqx_bridge_sysk_frame:state(),
|
| emqx_bridge_syskeeper_frame:state(),
|
||||||
buffer := binary(),
|
buffer := binary(),
|
||||||
conf := map()
|
conf := map()
|
||||||
}.
|
}.
|
||||||
|
|
|
@ -343,13 +343,8 @@ handle_call(reset, _From, State) ->
|
||||||
_ = mria:clear_table(?CLUSTER_COMMIT),
|
_ = mria:clear_table(?CLUSTER_COMMIT),
|
||||||
_ = mria:clear_table(?CLUSTER_MFA),
|
_ = mria:clear_table(?CLUSTER_MFA),
|
||||||
{reply, ok, State, {continue, ?CATCH_UP}};
|
{reply, ok, State, {continue, ?CATCH_UP}};
|
||||||
handle_call(?INITIATE(MFA), _From, State = #{node := Node}) ->
|
handle_call(?INITIATE(MFA), _From, State) ->
|
||||||
case transaction(fun ?MODULE:init_mfa/2, [Node, MFA]) of
|
do_initiate(MFA, State, 1, #{});
|
||||||
{atomic, {ok, TnxId, Result}} ->
|
|
||||||
{reply, {ok, TnxId, Result}, State, {continue, ?CATCH_UP}};
|
|
||||||
{aborted, Error} ->
|
|
||||||
{reply, {init_failure, Error}, State, {continue, ?CATCH_UP}}
|
|
||||||
end;
|
|
||||||
handle_call(skip_failed_commit, _From, State = #{node := Node}) ->
|
handle_call(skip_failed_commit, _From, State = #{node := Node}) ->
|
||||||
Timeout = catch_up(State, true),
|
Timeout = catch_up(State, true),
|
||||||
{atomic, LatestId} = transaction(fun ?MODULE:get_node_tnx_id/1, [Node]),
|
{atomic, LatestId} = transaction(fun ?MODULE:get_node_tnx_id/1, [Node]),
|
||||||
|
@ -465,11 +460,40 @@ get_oldest_mfa_id() ->
|
||||||
Id -> Id
|
Id -> Id
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
do_initiate(_MFA, State, Count, Failure) when Count > 10 ->
|
||||||
|
%% refuse to initiate cluster call from this node
|
||||||
|
%% because it's likely that the caller is based on
|
||||||
|
%% a stale view event we retry 10 time.
|
||||||
|
Error = stale_view_of_cluster_msg(Failure, Count),
|
||||||
|
{reply, {init_failure, Error}, State, {continue, ?CATCH_UP}};
|
||||||
|
do_initiate(MFA, State = #{node := Node}, Count, Failure0) ->
|
||||||
|
case transaction(fun ?MODULE:init_mfa/2, [Node, MFA]) of
|
||||||
|
{atomic, {ok, TnxId, Result}} ->
|
||||||
|
{reply, {ok, TnxId, Result}, State, {continue, ?CATCH_UP}};
|
||||||
|
{atomic, {retry, Failure1}} when Failure0 =:= Failure1 ->
|
||||||
|
%% Useless retry, so we return early.
|
||||||
|
Error = stale_view_of_cluster_msg(Failure0, Count),
|
||||||
|
{reply, {init_failure, Error}, State, {continue, ?CATCH_UP}};
|
||||||
|
{atomic, {retry, Failure1}} ->
|
||||||
|
catch_up(State),
|
||||||
|
do_initiate(MFA, State, Count + 1, Failure1);
|
||||||
|
{aborted, Error} ->
|
||||||
|
{reply, {init_failure, Error}, State, {continue, ?CATCH_UP}}
|
||||||
|
end.
|
||||||
|
|
||||||
|
stale_view_of_cluster_msg(Meta, Count) ->
|
||||||
|
Reason = Meta#{
|
||||||
|
msg => stale_view_of_cluster_state,
|
||||||
|
retry_times => Count
|
||||||
|
},
|
||||||
|
?SLOG(warning, Reason),
|
||||||
|
Reason.
|
||||||
|
|
||||||
%% The entry point of a config change transaction.
|
%% The entry point of a config change transaction.
|
||||||
init_mfa(Node, MFA) ->
|
init_mfa(Node, MFA) ->
|
||||||
mnesia:write_lock_table(?CLUSTER_MFA),
|
mnesia:write_lock_table(?CLUSTER_MFA),
|
||||||
LatestId = get_cluster_tnx_id(),
|
LatestId = get_cluster_tnx_id(),
|
||||||
MyTnxId = get_node_tnx_id(node()),
|
MyTnxId = get_node_tnx_id(Node),
|
||||||
case MyTnxId =:= LatestId of
|
case MyTnxId =:= LatestId of
|
||||||
true ->
|
true ->
|
||||||
TnxId = LatestId + 1,
|
TnxId = LatestId + 1,
|
||||||
|
@ -486,16 +510,8 @@ init_mfa(Node, MFA) ->
|
||||||
{false, Error} -> mnesia:abort(Error)
|
{false, Error} -> mnesia:abort(Error)
|
||||||
end;
|
end;
|
||||||
false ->
|
false ->
|
||||||
%% refuse to initiate cluster call from this node
|
Meta = #{cluster_tnx_id => LatestId, node_tnx_id => MyTnxId},
|
||||||
%% because it's likely that the caller is based on
|
{retry, Meta}
|
||||||
%% a stale view.
|
|
||||||
Reason = #{
|
|
||||||
msg => stale_view_of_cluster_state,
|
|
||||||
cluster_tnx_id => LatestId,
|
|
||||||
node_tnx_id => MyTnxId
|
|
||||||
},
|
|
||||||
?SLOG(warning, Reason),
|
|
||||||
mnesia:abort({error, Reason})
|
|
||||||
end.
|
end.
|
||||||
|
|
||||||
transaction(Func, Args) ->
|
transaction(Func, Args) ->
|
||||||
|
|
|
@ -43,12 +43,12 @@
|
||||||
|
|
||||||
%% API
|
%% API
|
||||||
%% @doc Adds a new config handler to emqx_config_handler.
|
%% @doc Adds a new config handler to emqx_config_handler.
|
||||||
-spec add_handler(emqx_config:config_key_path(), module()) -> ok.
|
-spec add_handler(emqx_utils_maps:config_key_path(), module()) -> ok.
|
||||||
add_handler(ConfKeyPath, HandlerName) ->
|
add_handler(ConfKeyPath, HandlerName) ->
|
||||||
emqx_config_handler:add_handler(ConfKeyPath, HandlerName).
|
emqx_config_handler:add_handler(ConfKeyPath, HandlerName).
|
||||||
|
|
||||||
%% @doc remove config handler from emqx_config_handler.
|
%% @doc remove config handler from emqx_config_handler.
|
||||||
-spec remove_handler(emqx_config:config_key_path()) -> ok.
|
-spec remove_handler(emqx_utils_maps:config_key_path()) -> ok.
|
||||||
remove_handler(ConfKeyPath) ->
|
remove_handler(ConfKeyPath) ->
|
||||||
emqx_config_handler:remove_handler(ConfKeyPath).
|
emqx_config_handler:remove_handler(ConfKeyPath).
|
||||||
|
|
||||||
|
|
|
@ -35,9 +35,10 @@ all() ->
|
||||||
t_commit_ok_apply_fail_on_other_node_then_recover,
|
t_commit_ok_apply_fail_on_other_node_then_recover,
|
||||||
t_del_stale_mfa,
|
t_del_stale_mfa,
|
||||||
t_skip_failed_commit,
|
t_skip_failed_commit,
|
||||||
t_fast_forward_commit
|
t_fast_forward_commit,
|
||||||
|
t_commit_concurrency
|
||||||
].
|
].
|
||||||
suite() -> [{timetrap, {minutes, 3}}].
|
suite() -> [{timetrap, {minutes, 5}}].
|
||||||
groups() -> [].
|
groups() -> [].
|
||||||
|
|
||||||
init_per_suite(Config) ->
|
init_per_suite(Config) ->
|
||||||
|
@ -63,6 +64,7 @@ end_per_suite(_Config) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_testcase(_TestCase, Config) ->
|
init_per_testcase(_TestCase, Config) ->
|
||||||
|
stop(),
|
||||||
start(),
|
start(),
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
|
@ -119,17 +121,101 @@ t_commit_crash_test(_Config) ->
|
||||||
t_commit_ok_but_apply_fail_on_other_node(_Config) ->
|
t_commit_ok_but_apply_fail_on_other_node(_Config) ->
|
||||||
emqx_cluster_rpc:reset(),
|
emqx_cluster_rpc:reset(),
|
||||||
{atomic, []} = emqx_cluster_rpc:status(),
|
{atomic, []} = emqx_cluster_rpc:status(),
|
||||||
MFA = {M, F, A} = {?MODULE, failed_on_node, [erlang:whereis(?NODE1)]},
|
Pid = self(),
|
||||||
|
{BaseM, BaseF, BaseA} = {?MODULE, echo, [Pid, test]},
|
||||||
|
{ok, _TnxId, ok} = multicall(BaseM, BaseF, BaseA),
|
||||||
|
?assertEqual(ok, receive_msg(3, test)),
|
||||||
|
|
||||||
|
{M, F, A} = {?MODULE, failed_on_node, [erlang:whereis(?NODE1)]},
|
||||||
{ok, _, ok} = multicall(M, F, A, 1, 1000),
|
{ok, _, ok} = multicall(M, F, A, 1, 1000),
|
||||||
{atomic, [Status]} = emqx_cluster_rpc:status(),
|
{atomic, AllStatus} = emqx_cluster_rpc:status(),
|
||||||
?assertEqual(MFA, maps:get(mfa, Status)),
|
Node = node(),
|
||||||
?assertEqual(node(), maps:get(node, Status)),
|
?assertEqual(
|
||||||
|
[
|
||||||
|
{1, {Node, emqx_cluster_rpc2}},
|
||||||
|
{1, {Node, emqx_cluster_rpc3}},
|
||||||
|
{2, Node}
|
||||||
|
],
|
||||||
|
lists:sort([{T, N} || #{tnx_id := T, node := N} <- AllStatus])
|
||||||
|
),
|
||||||
erlang:send(?NODE2, test),
|
erlang:send(?NODE2, test),
|
||||||
Call = emqx_cluster_rpc:make_initiate_call_req(M, F, A),
|
Call = emqx_cluster_rpc:make_initiate_call_req(M, F, A),
|
||||||
Res = gen_server:call(?NODE2, Call),
|
Res1 = gen_server:call(?NODE2, Call),
|
||||||
?assertEqual({init_failure, "MFA return not ok"}, Res),
|
Res2 = gen_server:call(?NODE3, Call),
|
||||||
|
%% Node2 is retry on tnx_id 1, and should not run Next MFA.
|
||||||
|
?assertEqual(
|
||||||
|
{init_failure, #{
|
||||||
|
msg => stale_view_of_cluster_state,
|
||||||
|
retry_times => 2,
|
||||||
|
cluster_tnx_id => 2,
|
||||||
|
node_tnx_id => 1
|
||||||
|
}},
|
||||||
|
Res1
|
||||||
|
),
|
||||||
|
?assertEqual(Res1, Res2),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_commit_concurrency(_Config) ->
|
||||||
|
emqx_cluster_rpc:reset(),
|
||||||
|
{atomic, []} = emqx_cluster_rpc:status(),
|
||||||
|
Pid = self(),
|
||||||
|
{BaseM, BaseF, BaseA} = {?MODULE, echo, [Pid, test]},
|
||||||
|
{ok, _TnxId, ok} = multicall(BaseM, BaseF, BaseA),
|
||||||
|
?assertEqual(ok, receive_msg(3, test)),
|
||||||
|
|
||||||
|
%% call concurrently without stale tnx_id error
|
||||||
|
Workers = lists:seq(1, 256),
|
||||||
|
lists:foreach(
|
||||||
|
fun(Seq) ->
|
||||||
|
{EchoM, EchoF, EchoA} = {?MODULE, echo_delay, [Pid, Seq]},
|
||||||
|
Call = emqx_cluster_rpc:make_initiate_call_req(EchoM, EchoF, EchoA),
|
||||||
|
spawn_link(fun() ->
|
||||||
|
?assertMatch({ok, _, ok}, gen_server:call(?NODE1, Call, infinity))
|
||||||
|
end),
|
||||||
|
spawn_link(fun() ->
|
||||||
|
?assertMatch({ok, _, ok}, gen_server:call(?NODE2, Call, infinity))
|
||||||
|
end),
|
||||||
|
spawn_link(fun() ->
|
||||||
|
?assertMatch({ok, _, ok}, gen_server:call(?NODE3, Call, infinity))
|
||||||
|
end)
|
||||||
|
end,
|
||||||
|
Workers
|
||||||
|
),
|
||||||
|
%% receive seq msg in order
|
||||||
|
List = lists:sort(receive_seq_msg([])),
|
||||||
|
?assertEqual(256 * 3 * 3, length(List), List),
|
||||||
|
{atomic, Status} = emqx_cluster_rpc:status(),
|
||||||
|
lists:map(
|
||||||
|
fun(#{tnx_id := TnxId} = S) ->
|
||||||
|
?assertEqual(256 * 3 + 1, TnxId, S)
|
||||||
|
end,
|
||||||
|
Status
|
||||||
|
),
|
||||||
|
AllMsgIndex = lists:flatten(lists:duplicate(9, Workers)),
|
||||||
|
Result =
|
||||||
|
lists:foldl(
|
||||||
|
fun(Index, Acc) ->
|
||||||
|
?assertEqual(true, lists:keymember(Index, 1, Acc), {Index, Acc}),
|
||||||
|
lists:keydelete(Index, 1, Acc)
|
||||||
|
end,
|
||||||
|
List,
|
||||||
|
AllMsgIndex
|
||||||
|
),
|
||||||
|
?assertEqual([], Result),
|
||||||
|
receive
|
||||||
|
Unknown -> throw({receive_unknown_msg, Unknown})
|
||||||
|
after 1000 -> ok
|
||||||
|
end,
|
||||||
|
ok.
|
||||||
|
|
||||||
|
receive_seq_msg(Acc) ->
|
||||||
|
receive
|
||||||
|
{msg, Seq, Time, Pid} ->
|
||||||
|
receive_seq_msg([{Seq, Time, Pid} | Acc])
|
||||||
|
after 3000 ->
|
||||||
|
Acc
|
||||||
|
end.
|
||||||
|
|
||||||
t_catch_up_status_handle_next_commit(_Config) ->
|
t_catch_up_status_handle_next_commit(_Config) ->
|
||||||
emqx_cluster_rpc:reset(),
|
emqx_cluster_rpc:reset(),
|
||||||
{atomic, []} = emqx_cluster_rpc:status(),
|
{atomic, []} = emqx_cluster_rpc:status(),
|
||||||
|
@ -296,9 +382,8 @@ stop() ->
|
||||||
erlang:exit(P, kill)
|
erlang:exit(P, kill)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|| N <- [?NODE1, ?NODE2, ?NODE3]
|
|| N <- [?NODE1, ?NODE2, ?NODE3, emqx_cluster_rpc_cleaner]
|
||||||
],
|
].
|
||||||
gen_server:stop(emqx_cluster_rpc_cleaner, normal, 5000).
|
|
||||||
|
|
||||||
receive_msg(0, _Msg) ->
|
receive_msg(0, _Msg) ->
|
||||||
ok;
|
ok;
|
||||||
|
@ -306,7 +391,7 @@ receive_msg(Count, Msg) when Count > 0 ->
|
||||||
receive
|
receive
|
||||||
Msg ->
|
Msg ->
|
||||||
receive_msg(Count - 1, Msg)
|
receive_msg(Count - 1, Msg)
|
||||||
after 800 ->
|
after 1000 ->
|
||||||
timeout
|
timeout
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -314,6 +399,11 @@ echo(Pid, Msg) ->
|
||||||
erlang:send(Pid, Msg),
|
erlang:send(Pid, Msg),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
echo_delay(Pid, Msg) ->
|
||||||
|
timer:sleep(rand:uniform(150)),
|
||||||
|
erlang:send(Pid, {msg, Msg, erlang:system_time(), self()}),
|
||||||
|
ok.
|
||||||
|
|
||||||
failed_on_node(Pid) ->
|
failed_on_node(Pid) ->
|
||||||
case Pid =:= self() of
|
case Pid =:= self() of
|
||||||
true -> ok;
|
true -> ok;
|
||||||
|
|
|
@ -358,7 +358,7 @@ authn_validations_test() ->
|
||||||
Headers0 = authentication_headers(Res0),
|
Headers0 = authentication_headers(Res0),
|
||||||
?assertEqual(<<"application/json">>, maps:get(<<"content-type">>, Headers0)),
|
?assertEqual(<<"application/json">>, maps:get(<<"content-type">>, Headers0)),
|
||||||
%% accept from converter
|
%% accept from converter
|
||||||
?assertEqual(<<"application/json">>, maps:get(<<"accept">>, Headers0)),
|
?assertNot(maps:is_key(<<"accept">>, Headers0)),
|
||||||
|
|
||||||
OKHttp = to_bin(?BASE_AUTHN_ARRAY, [post, false, <<"http://127.0.0.1:8080">>]),
|
OKHttp = to_bin(?BASE_AUTHN_ARRAY, [post, false, <<"http://127.0.0.1:8080">>]),
|
||||||
Conf1 = <<BaseConf/binary, OKHttp/binary>>,
|
Conf1 = <<BaseConf/binary, OKHttp/binary>>,
|
||||||
|
@ -366,7 +366,7 @@ authn_validations_test() ->
|
||||||
{_, Res1} = hocon_tconf:map_translate(emqx_conf_schema, ConfMap1, #{format => richmap}),
|
{_, Res1} = hocon_tconf:map_translate(emqx_conf_schema, ConfMap1, #{format => richmap}),
|
||||||
Headers1 = authentication_headers(Res1),
|
Headers1 = authentication_headers(Res1),
|
||||||
?assertEqual(<<"application/json">>, maps:get(<<"content-type">>, Headers1), Headers1),
|
?assertEqual(<<"application/json">>, maps:get(<<"content-type">>, Headers1), Headers1),
|
||||||
?assertEqual(<<"application/json">>, maps:get(<<"accept">>, Headers1), Headers1),
|
?assertNot(maps:is_key(<<"accept">>, Headers1)),
|
||||||
|
|
||||||
DisableSSLWithHttps = to_bin(?BASE_AUTHN_ARRAY, [post, false, <<"https://127.0.0.1:8080">>]),
|
DisableSSLWithHttps = to_bin(?BASE_AUTHN_ARRAY, [post, false, <<"https://127.0.0.1:8080">>]),
|
||||||
Conf2 = <<BaseConf/binary, DisableSSLWithHttps/binary>>,
|
Conf2 = <<BaseConf/binary, DisableSSLWithHttps/binary>>,
|
||||||
|
@ -382,16 +382,16 @@ authn_validations_test() ->
|
||||||
{_, Res3} = hocon_tconf:map_translate(emqx_conf_schema, ConfMap3, #{format => richmap}),
|
{_, Res3} = hocon_tconf:map_translate(emqx_conf_schema, ConfMap3, #{format => richmap}),
|
||||||
Headers3 = authentication_headers(Res3),
|
Headers3 = authentication_headers(Res3),
|
||||||
%% remove the content-type header when get method
|
%% remove the content-type header when get method
|
||||||
?assertEqual(false, maps:is_key(<<"content-type">>, Headers3), Headers3),
|
?assertNot(maps:is_key(<<"content-type">>, Headers3), Headers3),
|
||||||
?assertEqual(<<"application/json">>, maps:get(<<"accept">>, Headers3), Headers3),
|
?assertNot(maps:is_key(<<"accept">>, Headers3), Headers3),
|
||||||
|
|
||||||
BadHeaderWithTuple = binary:replace(BadHeader, [<<"[">>, <<"]">>], <<"">>, [global]),
|
BadHeaderWithTuple = binary:replace(BadHeader, [<<"[">>, <<"]">>], <<"">>, [global]),
|
||||||
Conf4 = <<BaseConf/binary, BadHeaderWithTuple/binary>>,
|
Conf4 = <<BaseConf/binary, BadHeaderWithTuple/binary>>,
|
||||||
{ok, ConfMap4} = hocon:binary(Conf4, #{format => richmap}),
|
{ok, ConfMap4} = hocon:binary(Conf4, #{format => richmap}),
|
||||||
{_, Res4} = hocon_tconf:map_translate(emqx_conf_schema, ConfMap4, #{}),
|
{_, Res4} = hocon_tconf:map_translate(emqx_conf_schema, ConfMap4, #{}),
|
||||||
Headers4 = authentication_headers(Res4),
|
Headers4 = authentication_headers(Res4),
|
||||||
?assertEqual(false, maps:is_key(<<"content-type">>, Headers4), Headers4),
|
?assertNot(maps:is_key(<<"content-type">>, Headers4), Headers4),
|
||||||
?assertEqual(<<"application/json">>, maps:get(<<"accept">>, Headers4), Headers4),
|
?assertNot(maps:is_key(<<"accept">>, Headers4), Headers4),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
%% erlfmt-ignore
|
%% erlfmt-ignore
|
||||||
|
|
|
@ -589,14 +589,24 @@ pick_connectors_by_id(Type, Name, ConnectorsAllNodes) ->
|
||||||
format_connector_info([FirstConnector | _] = Connectors) ->
|
format_connector_info([FirstConnector | _] = Connectors) ->
|
||||||
Res = maps:remove(node, FirstConnector),
|
Res = maps:remove(node, FirstConnector),
|
||||||
NodeStatus = node_status(Connectors),
|
NodeStatus = node_status(Connectors),
|
||||||
redact(Res#{
|
StatusReason = first_status_reason(Connectors),
|
||||||
|
Info0 = Res#{
|
||||||
status => aggregate_status(NodeStatus),
|
status => aggregate_status(NodeStatus),
|
||||||
node_status => NodeStatus
|
node_status => NodeStatus
|
||||||
}).
|
},
|
||||||
|
Info = emqx_utils_maps:put_if(Info0, status_reason, StatusReason, StatusReason =/= undefined),
|
||||||
|
redact(Info).
|
||||||
|
|
||||||
node_status(Connectors) ->
|
node_status(Connectors) ->
|
||||||
[maps:with([node, status, status_reason], B) || B <- Connectors].
|
[maps:with([node, status, status_reason], B) || B <- Connectors].
|
||||||
|
|
||||||
|
first_status_reason(Connectors) ->
|
||||||
|
StatusReasons = [Reason || #{status_reason := Reason} <- Connectors, Reason =/= undefined],
|
||||||
|
case StatusReasons of
|
||||||
|
[Reason | _] -> Reason;
|
||||||
|
_ -> undefined
|
||||||
|
end.
|
||||||
|
|
||||||
aggregate_status(AllStatus) ->
|
aggregate_status(AllStatus) ->
|
||||||
Head = fun([A | _]) -> A end,
|
Head = fun([A | _]) -> A end,
|
||||||
HeadVal = maps:get(status, Head(AllStatus), connecting),
|
HeadVal = maps:get(status, Head(AllStatus), connecting),
|
||||||
|
@ -611,10 +621,11 @@ format_resource(
|
||||||
type := Type,
|
type := Type,
|
||||||
name := ConnectorName,
|
name := ConnectorName,
|
||||||
raw_config := RawConf0,
|
raw_config := RawConf0,
|
||||||
resource_data := ResourceData
|
resource_data := ResourceData0
|
||||||
},
|
},
|
||||||
Node
|
Node
|
||||||
) ->
|
) ->
|
||||||
|
ResourceData = lookup_channels(Type, ConnectorName, ResourceData0),
|
||||||
RawConf = fill_defaults(Type, RawConf0),
|
RawConf = fill_defaults(Type, RawConf0),
|
||||||
redact(
|
redact(
|
||||||
maps:merge(
|
maps:merge(
|
||||||
|
@ -627,14 +638,23 @@ format_resource(
|
||||||
)
|
)
|
||||||
).
|
).
|
||||||
|
|
||||||
|
lookup_channels(Type, Name, ResourceData0) ->
|
||||||
|
ConnectorResId = emqx_connector_resource:resource_id(Type, Name),
|
||||||
|
case emqx_resource:get_channels(ConnectorResId) of
|
||||||
|
{ok, Channels} ->
|
||||||
|
ResourceData0#{channels => maps:from_list(Channels)};
|
||||||
|
{error, not_found} ->
|
||||||
|
ResourceData0#{channels => #{}}
|
||||||
|
end.
|
||||||
|
|
||||||
format_resource_data(ResData) ->
|
format_resource_data(ResData) ->
|
||||||
maps:fold(fun format_resource_data/3, #{}, maps:with([status, error, added_channels], ResData)).
|
maps:fold(fun format_resource_data/3, #{}, maps:with([status, error, channels], ResData)).
|
||||||
|
|
||||||
format_resource_data(error, undefined, Result) ->
|
format_resource_data(error, undefined, Result) ->
|
||||||
Result;
|
Result;
|
||||||
format_resource_data(error, Error, Result) ->
|
format_resource_data(error, Error, Result) ->
|
||||||
Result#{status_reason => emqx_utils:readable_error_msg(Error)};
|
Result#{status_reason => emqx_utils:readable_error_msg(Error)};
|
||||||
format_resource_data(added_channels, Channels, Result) ->
|
format_resource_data(channels, Channels, Result) ->
|
||||||
Result#{actions => lists:map(fun format_action/1, maps:keys(Channels))};
|
Result#{actions => lists:map(fun format_action/1, maps:keys(Channels))};
|
||||||
format_resource_data(K, V, Result) ->
|
format_resource_data(K, V, Result) ->
|
||||||
Result#{K => V}.
|
Result#{K => V}.
|
||||||
|
|
|
@ -48,7 +48,7 @@ init([]) ->
|
||||||
%% `emqx_connector_jwt_sup:ensure_jwt/1' to ensure that a JWT has
|
%% `emqx_connector_jwt_sup:ensure_jwt/1' to ensure that a JWT has
|
||||||
%% been stored, if synchronization is needed.
|
%% been stored, if synchronization is needed.
|
||||||
-spec ensure_worker_present(worker_id(), map()) ->
|
-spec ensure_worker_present(worker_id(), map()) ->
|
||||||
{ok, supervisor:child()} | {error, term()}.
|
{ok, pid()} | {error, term()}.
|
||||||
ensure_worker_present(Id, Config) ->
|
ensure_worker_present(Id, Config) ->
|
||||||
ChildSpec = jwt_worker_child_spec(Id, Config),
|
ChildSpec = jwt_worker_child_spec(Id, Config),
|
||||||
case supervisor:start_child(?MODULE, ChildSpec) of
|
case supervisor:start_child(?MODULE, ChildSpec) of
|
||||||
|
|
|
@ -42,7 +42,7 @@ list_connectors_on_nodes(Nodes) ->
|
||||||
-type key() :: atom() | binary() | [byte()].
|
-type key() :: atom() | binary() | [byte()].
|
||||||
|
|
||||||
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
-spec lookup_from_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
lookup_from_all_nodes(Nodes, ConnectorType, ConnectorName) ->
|
lookup_from_all_nodes(Nodes, ConnectorType, ConnectorName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
@ -64,7 +64,7 @@ start_connector_to_node(Node, ConnectorType, ConnectorName) ->
|
||||||
).
|
).
|
||||||
|
|
||||||
-spec start_connectors_to_all_nodes([node()], key(), key()) ->
|
-spec start_connectors_to_all_nodes([node()], key(), key()) ->
|
||||||
emqx_rpc:erpc_multicall().
|
emqx_rpc:erpc_multicall(term()).
|
||||||
start_connectors_to_all_nodes(Nodes, ConnectorType, ConnectorName) ->
|
start_connectors_to_all_nodes(Nodes, ConnectorType, ConnectorName) ->
|
||||||
erpc:multicall(
|
erpc:multicall(
|
||||||
Nodes,
|
Nodes,
|
||||||
|
|
|
@ -34,6 +34,8 @@ resource_type(matrix) ->
|
||||||
emqx_postgresql;
|
emqx_postgresql;
|
||||||
resource_type(mongodb) ->
|
resource_type(mongodb) ->
|
||||||
emqx_bridge_mongodb_connector;
|
emqx_bridge_mongodb_connector;
|
||||||
|
resource_type(mysql) ->
|
||||||
|
emqx_bridge_mysql_connector;
|
||||||
resource_type(pgsql) ->
|
resource_type(pgsql) ->
|
||||||
emqx_postgresql;
|
emqx_postgresql;
|
||||||
resource_type(syskeeper_forwarder) ->
|
resource_type(syskeeper_forwarder) ->
|
||||||
|
@ -94,6 +96,14 @@ connector_structs() ->
|
||||||
required => false
|
required => false
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
|
{matrix,
|
||||||
|
mk(
|
||||||
|
hoconsc:map(name, ref(emqx_bridge_matrix, "config_connector")),
|
||||||
|
#{
|
||||||
|
desc => <<"Matrix Connector Config">>,
|
||||||
|
required => false
|
||||||
|
}
|
||||||
|
)},
|
||||||
{mongodb,
|
{mongodb,
|
||||||
mk(
|
mk(
|
||||||
hoconsc:map(name, ref(emqx_bridge_mongodb, "config_connector")),
|
hoconsc:map(name, ref(emqx_bridge_mongodb, "config_connector")),
|
||||||
|
@ -102,6 +112,30 @@ connector_structs() ->
|
||||||
required => false
|
required => false
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
|
{mysql,
|
||||||
|
mk(
|
||||||
|
hoconsc:map(name, ref(emqx_bridge_mysql, "config_connector")),
|
||||||
|
#{
|
||||||
|
desc => <<"MySQL Connector Config">>,
|
||||||
|
required => false
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{pgsql,
|
||||||
|
mk(
|
||||||
|
hoconsc:map(name, ref(emqx_bridge_pgsql, "config_connector")),
|
||||||
|
#{
|
||||||
|
desc => <<"PostgreSQL Connector Config">>,
|
||||||
|
required => false
|
||||||
|
}
|
||||||
|
)},
|
||||||
|
{redis,
|
||||||
|
mk(
|
||||||
|
hoconsc:map(name, ref(emqx_bridge_redis_schema, "config_connector")),
|
||||||
|
#{
|
||||||
|
desc => <<"Redis Connector Config">>,
|
||||||
|
required => false
|
||||||
|
}
|
||||||
|
)},
|
||||||
{syskeeper_forwarder,
|
{syskeeper_forwarder,
|
||||||
mk(
|
mk(
|
||||||
hoconsc:map(name, ref(emqx_bridge_syskeeper_connector, config)),
|
hoconsc:map(name, ref(emqx_bridge_syskeeper_connector, config)),
|
||||||
|
@ -118,14 +152,6 @@ connector_structs() ->
|
||||||
required => false
|
required => false
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
{pgsql,
|
|
||||||
mk(
|
|
||||||
hoconsc:map(name, ref(emqx_bridge_pgsql, "config_connector")),
|
|
||||||
#{
|
|
||||||
desc => <<"PostgreSQL Connector Config">>,
|
|
||||||
required => false
|
|
||||||
}
|
|
||||||
)},
|
|
||||||
{timescale,
|
{timescale,
|
||||||
mk(
|
mk(
|
||||||
hoconsc:map(name, ref(emqx_bridge_timescale, "config_connector")),
|
hoconsc:map(name, ref(emqx_bridge_timescale, "config_connector")),
|
||||||
|
@ -133,22 +159,6 @@ connector_structs() ->
|
||||||
desc => <<"Timescale Connector Config">>,
|
desc => <<"Timescale Connector Config">>,
|
||||||
required => false
|
required => false
|
||||||
}
|
}
|
||||||
)},
|
|
||||||
{matrix,
|
|
||||||
mk(
|
|
||||||
hoconsc:map(name, ref(emqx_bridge_matrix, "config_connector")),
|
|
||||||
#{
|
|
||||||
desc => <<"Matrix Connector Config">>,
|
|
||||||
required => false
|
|
||||||
}
|
|
||||||
)},
|
|
||||||
{redis,
|
|
||||||
mk(
|
|
||||||
hoconsc:map(name, ref(emqx_bridge_redis_schema, "config_connector")),
|
|
||||||
#{
|
|
||||||
desc => <<"Redis Connector Config">>,
|
|
||||||
required => false
|
|
||||||
}
|
|
||||||
)}
|
)}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
@ -160,6 +170,7 @@ schema_modules() ->
|
||||||
emqx_bridge_kafka,
|
emqx_bridge_kafka,
|
||||||
emqx_bridge_matrix,
|
emqx_bridge_matrix,
|
||||||
emqx_bridge_mongodb,
|
emqx_bridge_mongodb,
|
||||||
|
emqx_bridge_mysql,
|
||||||
emqx_bridge_syskeeper_connector,
|
emqx_bridge_syskeeper_connector,
|
||||||
emqx_bridge_syskeeper_proxy,
|
emqx_bridge_syskeeper_proxy,
|
||||||
emqx_bridge_timescale,
|
emqx_bridge_timescale,
|
||||||
|
@ -185,6 +196,7 @@ api_schemas(Method) ->
|
||||||
api_ref(emqx_bridge_kafka, <<"kafka_producer">>, Method ++ "_connector"),
|
api_ref(emqx_bridge_kafka, <<"kafka_producer">>, Method ++ "_connector"),
|
||||||
api_ref(emqx_bridge_matrix, <<"matrix">>, Method ++ "_connector"),
|
api_ref(emqx_bridge_matrix, <<"matrix">>, Method ++ "_connector"),
|
||||||
api_ref(emqx_bridge_mongodb, <<"mongodb">>, Method ++ "_connector"),
|
api_ref(emqx_bridge_mongodb, <<"mongodb">>, Method ++ "_connector"),
|
||||||
|
api_ref(emqx_bridge_mysql, <<"mysql">>, Method ++ "_connector"),
|
||||||
api_ref(emqx_bridge_syskeeper_connector, <<"syskeeper_forwarder">>, Method),
|
api_ref(emqx_bridge_syskeeper_connector, <<"syskeeper_forwarder">>, Method),
|
||||||
api_ref(emqx_bridge_syskeeper_proxy, <<"syskeeper_proxy">>, Method),
|
api_ref(emqx_bridge_syskeeper_proxy, <<"syskeeper_proxy">>, Method),
|
||||||
api_ref(emqx_bridge_timescale, <<"timescale">>, Method ++ "_connector"),
|
api_ref(emqx_bridge_timescale, <<"timescale">>, Method ++ "_connector"),
|
||||||
|
|
|
@ -28,7 +28,8 @@
|
||||||
-export([
|
-export([
|
||||||
transform_bridges_v1_to_connectors_and_bridges_v2/1,
|
transform_bridges_v1_to_connectors_and_bridges_v2/1,
|
||||||
transform_bridge_v1_config_to_action_config/4,
|
transform_bridge_v1_config_to_action_config/4,
|
||||||
top_level_common_connector_keys/0
|
top_level_common_connector_keys/0,
|
||||||
|
project_to_connector_resource_opts/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([roots/0, fields/1, desc/1, namespace/0, tags/0]).
|
-export([roots/0, fields/1, desc/1, namespace/0, tags/0]).
|
||||||
|
@ -36,9 +37,11 @@
|
||||||
-export([get_response/0, put_request/0, post_request/0]).
|
-export([get_response/0, put_request/0, post_request/0]).
|
||||||
|
|
||||||
-export([connector_type_to_bridge_types/1]).
|
-export([connector_type_to_bridge_types/1]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
api_fields/3,
|
api_fields/3,
|
||||||
common_fields/0,
|
common_fields/0,
|
||||||
|
connector_values/3,
|
||||||
status_and_actions_fields/0,
|
status_and_actions_fields/0,
|
||||||
type_and_name_fields/1
|
type_and_name_fields/1
|
||||||
]).
|
]).
|
||||||
|
@ -128,16 +131,18 @@ connector_type_to_bridge_types(matrix) ->
|
||||||
[matrix];
|
[matrix];
|
||||||
connector_type_to_bridge_types(mongodb) ->
|
connector_type_to_bridge_types(mongodb) ->
|
||||||
[mongodb, mongodb_rs, mongodb_sharded, mongodb_single];
|
[mongodb, mongodb_rs, mongodb_sharded, mongodb_single];
|
||||||
|
connector_type_to_bridge_types(mysql) ->
|
||||||
|
[mysql];
|
||||||
connector_type_to_bridge_types(pgsql) ->
|
connector_type_to_bridge_types(pgsql) ->
|
||||||
[pgsql];
|
[pgsql];
|
||||||
|
connector_type_to_bridge_types(redis) ->
|
||||||
|
[redis, redis_single, redis_sentinel, redis_cluster];
|
||||||
connector_type_to_bridge_types(syskeeper_forwarder) ->
|
connector_type_to_bridge_types(syskeeper_forwarder) ->
|
||||||
[syskeeper_forwarder];
|
[syskeeper_forwarder];
|
||||||
connector_type_to_bridge_types(syskeeper_proxy) ->
|
connector_type_to_bridge_types(syskeeper_proxy) ->
|
||||||
[];
|
[];
|
||||||
connector_type_to_bridge_types(timescale) ->
|
connector_type_to_bridge_types(timescale) ->
|
||||||
[timescale];
|
[timescale].
|
||||||
connector_type_to_bridge_types(redis) ->
|
|
||||||
[redis, redis_single, redis_sentinel, redis_cluster].
|
|
||||||
|
|
||||||
actions_config_name() -> <<"actions">>.
|
actions_config_name() -> <<"actions">>.
|
||||||
|
|
||||||
|
@ -191,7 +196,7 @@ split_bridge_to_connector_and_action(
|
||||||
case maps:is_key(ConnectorFieldNameBin, BridgeV1Conf) of
|
case maps:is_key(ConnectorFieldNameBin, BridgeV1Conf) of
|
||||||
true ->
|
true ->
|
||||||
PrevFieldConfig =
|
PrevFieldConfig =
|
||||||
project_to_connector_resource_opts(
|
maybe_project_to_connector_resource_opts(
|
||||||
ConnectorFieldNameBin,
|
ConnectorFieldNameBin,
|
||||||
maps:get(ConnectorFieldNameBin, BridgeV1Conf)
|
maps:get(ConnectorFieldNameBin, BridgeV1Conf)
|
||||||
),
|
),
|
||||||
|
@ -227,12 +232,15 @@ split_bridge_to_connector_and_action(
|
||||||
end,
|
end,
|
||||||
{BridgeType, BridgeName, ActionMap, ConnectorName, ConnectorMap}.
|
{BridgeType, BridgeName, ActionMap, ConnectorName, ConnectorMap}.
|
||||||
|
|
||||||
project_to_connector_resource_opts(<<"resource_opts">>, OldResourceOpts) ->
|
maybe_project_to_connector_resource_opts(<<"resource_opts">>, OldResourceOpts) ->
|
||||||
Subfields = common_resource_opts_subfields_bin(),
|
project_to_connector_resource_opts(OldResourceOpts);
|
||||||
maps:with(Subfields, OldResourceOpts);
|
maybe_project_to_connector_resource_opts(_, OldConfig) ->
|
||||||
project_to_connector_resource_opts(_, OldConfig) ->
|
|
||||||
OldConfig.
|
OldConfig.
|
||||||
|
|
||||||
|
project_to_connector_resource_opts(OldResourceOpts) ->
|
||||||
|
Subfields = common_resource_opts_subfields_bin(),
|
||||||
|
maps:with(Subfields, OldResourceOpts).
|
||||||
|
|
||||||
transform_bridge_v1_config_to_action_config(
|
transform_bridge_v1_config_to_action_config(
|
||||||
BridgeV1Conf, ConnectorName, ConnectorConfSchemaMod, ConnectorConfSchemaName
|
BridgeV1Conf, ConnectorName, ConnectorConfSchemaMod, ConnectorConfSchemaName
|
||||||
) ->
|
) ->
|
||||||
|
@ -529,7 +537,6 @@ resource_opts_ref(Module, RefName) ->
|
||||||
common_resource_opts_subfields() ->
|
common_resource_opts_subfields() ->
|
||||||
[
|
[
|
||||||
health_check_interval,
|
health_check_interval,
|
||||||
query_mode,
|
|
||||||
start_after_created,
|
start_after_created,
|
||||||
start_timeout
|
start_timeout
|
||||||
].
|
].
|
||||||
|
@ -549,6 +556,48 @@ resource_opts_fields(Overrides) ->
|
||||||
emqx_resource_schema:create_opts(Overrides)
|
emqx_resource_schema:create_opts(Overrides)
|
||||||
).
|
).
|
||||||
|
|
||||||
|
-type http_method() :: get | post | put.
|
||||||
|
-type schema_example_map() :: #{atom() => term()}.
|
||||||
|
|
||||||
|
-spec connector_values(http_method(), atom(), schema_example_map()) -> schema_example_map().
|
||||||
|
connector_values(Method, Type, ConnectorValues) ->
|
||||||
|
TypeBin = atom_to_binary(Type),
|
||||||
|
lists:foldl(
|
||||||
|
fun(M1, M2) ->
|
||||||
|
maps:merge(M1, M2)
|
||||||
|
end,
|
||||||
|
#{
|
||||||
|
description => <<"My example ", TypeBin/binary, " connector">>
|
||||||
|
},
|
||||||
|
[
|
||||||
|
ConnectorValues,
|
||||||
|
method_values(Method, Type)
|
||||||
|
]
|
||||||
|
).
|
||||||
|
|
||||||
|
method_values(post, Type) ->
|
||||||
|
TypeBin = atom_to_binary(Type),
|
||||||
|
#{
|
||||||
|
name => <<TypeBin/binary, "_connector">>,
|
||||||
|
type => TypeBin
|
||||||
|
};
|
||||||
|
method_values(get, Type) ->
|
||||||
|
maps:merge(
|
||||||
|
method_values(post, Type),
|
||||||
|
#{
|
||||||
|
status => <<"connected">>,
|
||||||
|
node_status => [
|
||||||
|
#{
|
||||||
|
node => <<"emqx@localhost">>,
|
||||||
|
status => <<"connected">>
|
||||||
|
}
|
||||||
|
],
|
||||||
|
actions => [<<"my_action">>]
|
||||||
|
}
|
||||||
|
);
|
||||||
|
method_values(put, _Type) ->
|
||||||
|
#{}.
|
||||||
|
|
||||||
%%======================================================================================
|
%%======================================================================================
|
||||||
%% Helper Functions
|
%% Helper Functions
|
||||||
%%======================================================================================
|
%%======================================================================================
|
||||||
|
|
|
@ -23,6 +23,7 @@
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-include_lib("common_test/include/ct.hrl").
|
-include_lib("common_test/include/ct.hrl").
|
||||||
-include_lib("snabbkaffe/include/test_macros.hrl").
|
-include_lib("snabbkaffe/include/test_macros.hrl").
|
||||||
|
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||||
|
|
||||||
-define(CONNECTOR_NAME, (atom_to_binary(?FUNCTION_NAME))).
|
-define(CONNECTOR_NAME, (atom_to_binary(?FUNCTION_NAME))).
|
||||||
-define(RESOURCE(NAME, TYPE), #{
|
-define(RESOURCE(NAME, TYPE), #{
|
||||||
|
@ -103,48 +104,6 @@
|
||||||
}).
|
}).
|
||||||
-define(KAFKA_BRIDGE(Name), ?KAFKA_BRIDGE(Name, ?CONNECTOR_NAME)).
|
-define(KAFKA_BRIDGE(Name), ?KAFKA_BRIDGE(Name, ?CONNECTOR_NAME)).
|
||||||
|
|
||||||
%% -define(CONNECTOR_TYPE_MQTT, <<"mqtt">>).
|
|
||||||
%% -define(MQTT_CONNECTOR(SERVER, NAME), ?CONNECTOR(NAME, ?CONNECTOR_TYPE_MQTT)#{
|
|
||||||
%% <<"server">> => SERVER,
|
|
||||||
%% <<"username">> => <<"user1">>,
|
|
||||||
%% <<"password">> => <<"">>,
|
|
||||||
%% <<"proto_ver">> => <<"v5">>,
|
|
||||||
%% <<"egress">> => #{
|
|
||||||
%% <<"remote">> => #{
|
|
||||||
%% <<"topic">> => <<"emqx/${topic}">>,
|
|
||||||
%% <<"qos">> => <<"${qos}">>,
|
|
||||||
%% <<"retain">> => false
|
|
||||||
%% }
|
|
||||||
%% }
|
|
||||||
%% }).
|
|
||||||
%% -define(MQTT_CONNECTOR(SERVER), ?MQTT_CONNECTOR(SERVER, <<"mqtt_egress_test_connector">>)).
|
|
||||||
|
|
||||||
%% -define(CONNECTOR_TYPE_HTTP, <<"kafka_producer">>).
|
|
||||||
%% -define(HTTP_CONNECTOR(URL, NAME), ?CONNECTOR(NAME, ?CONNECTOR_TYPE_HTTP)#{
|
|
||||||
%% <<"url">> => URL,
|
|
||||||
%% <<"local_topic">> => <<"emqx_webhook/#">>,
|
|
||||||
%% <<"method">> => <<"post">>,
|
|
||||||
%% <<"body">> => <<"${payload}">>,
|
|
||||||
%% <<"headers">> => #{
|
|
||||||
%% % NOTE
|
|
||||||
%% % The Pascal-Case is important here.
|
|
||||||
%% % The reason is kinda ridiculous: `emqx_connector_resource:create_dry_run/2` converts
|
|
||||||
%% % connector config keys into atoms, and the atom 'Content-Type' exists in the ERTS
|
|
||||||
%% % when this happens (while the 'content-type' does not).
|
|
||||||
%% <<"Content-Type">> => <<"application/json">>
|
|
||||||
%% }
|
|
||||||
%% }).
|
|
||||||
%% -define(HTTP_CONNECTOR(URL), ?HTTP_CONNECTOR(URL, ?CONNECTOR_NAME)).
|
|
||||||
|
|
||||||
%% -define(URL(PORT, PATH),
|
|
||||||
%% list_to_binary(
|
|
||||||
%% io_lib:format(
|
|
||||||
%% "http://localhost:~s/~s",
|
|
||||||
%% [integer_to_list(PORT), PATH]
|
|
||||||
%% )
|
|
||||||
%% )
|
|
||||||
%% ).
|
|
||||||
|
|
||||||
-define(APPSPECS, [
|
-define(APPSPECS, [
|
||||||
emqx_conf,
|
emqx_conf,
|
||||||
emqx,
|
emqx,
|
||||||
|
@ -178,11 +137,14 @@ groups() ->
|
||||||
t_fail_delete_with_action,
|
t_fail_delete_with_action,
|
||||||
t_actions_field
|
t_actions_field
|
||||||
],
|
],
|
||||||
|
ClusterOnlyTests = [
|
||||||
|
t_inconsistent_state
|
||||||
|
],
|
||||||
ClusterLaterJoinOnlyTCs = [
|
ClusterLaterJoinOnlyTCs = [
|
||||||
% t_cluster_later_join_metrics
|
% t_cluster_later_join_metrics
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
{single, [], AllTCs -- ClusterLaterJoinOnlyTCs},
|
{single, [], (AllTCs -- ClusterLaterJoinOnlyTCs) -- ClusterOnlyTests},
|
||||||
{cluster_later_join, [], ClusterLaterJoinOnlyTCs},
|
{cluster_later_join, [], ClusterLaterJoinOnlyTCs},
|
||||||
{cluster, [], (AllTCs -- SingleOnlyTests) -- ClusterLaterJoinOnlyTCs}
|
{cluster, [], (AllTCs -- SingleOnlyTests) -- ClusterLaterJoinOnlyTCs}
|
||||||
].
|
].
|
||||||
|
@ -268,6 +230,8 @@ init_mocks(_TestCase) ->
|
||||||
fun
|
fun
|
||||||
(<<"connector:", ?CONNECTOR_TYPE_STR, ":bad_", _/binary>>, _C) ->
|
(<<"connector:", ?CONNECTOR_TYPE_STR, ":bad_", _/binary>>, _C) ->
|
||||||
{ok, bad_connector_state};
|
{ok, bad_connector_state};
|
||||||
|
(_I, #{bootstrap_hosts := <<"nope:9092">>}) ->
|
||||||
|
{ok, worst_connector_state};
|
||||||
(_I, _C) ->
|
(_I, _C) ->
|
||||||
{ok, connector_state}
|
{ok, connector_state}
|
||||||
end
|
end
|
||||||
|
@ -277,8 +241,17 @@ init_mocks(_TestCase) ->
|
||||||
?CONNECTOR_IMPL,
|
?CONNECTOR_IMPL,
|
||||||
on_get_status,
|
on_get_status,
|
||||||
fun
|
fun
|
||||||
(_, bad_connector_state) -> connecting;
|
(_, bad_connector_state) ->
|
||||||
(_, _) -> connected
|
connecting;
|
||||||
|
(_, worst_connector_state) ->
|
||||||
|
{?status_disconnected, worst_connector_state, [
|
||||||
|
#{
|
||||||
|
host => <<"nope:9092">>,
|
||||||
|
reason => unresolvable_hostname
|
||||||
|
}
|
||||||
|
]};
|
||||||
|
(_, _) ->
|
||||||
|
connected
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
meck:expect(?CONNECTOR_IMPL, on_add_channel, 4, {ok, connector_state}),
|
meck:expect(?CONNECTOR_IMPL, on_add_channel, 4, {ok, connector_state}),
|
||||||
|
@ -845,6 +818,39 @@ t_fail_delete_with_action(Config) ->
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_list_disabled_channels(Config) ->
|
||||||
|
ConnectorParams = ?KAFKA_CONNECTOR(?CONNECTOR_NAME),
|
||||||
|
?assertMatch(
|
||||||
|
{ok, 201, _},
|
||||||
|
request_json(
|
||||||
|
post,
|
||||||
|
uri(["connectors"]),
|
||||||
|
ConnectorParams,
|
||||||
|
Config
|
||||||
|
)
|
||||||
|
),
|
||||||
|
ActionName = ?BRIDGE_NAME,
|
||||||
|
ActionParams = (?KAFKA_BRIDGE(ActionName))#{<<"enable">> := true},
|
||||||
|
?assertMatch(
|
||||||
|
{ok, 201, #{<<"enable">> := true}},
|
||||||
|
request_json(
|
||||||
|
post,
|
||||||
|
uri(["actions"]),
|
||||||
|
ActionParams,
|
||||||
|
Config
|
||||||
|
)
|
||||||
|
),
|
||||||
|
ConnectorID = emqx_connector_resource:connector_id(?CONNECTOR_TYPE, ?CONNECTOR_NAME),
|
||||||
|
?assertMatch(
|
||||||
|
{ok, 200, #{<<"actions">> := [ActionName]}},
|
||||||
|
request_json(
|
||||||
|
get,
|
||||||
|
uri(["connectors", ConnectorID]),
|
||||||
|
Config
|
||||||
|
)
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
||||||
t_raw_config_response_defaults(Config) ->
|
t_raw_config_response_defaults(Config) ->
|
||||||
Params = maps:without([<<"enable">>, <<"resource_opts">>], ?KAFKA_CONNECTOR(?CONNECTOR_NAME)),
|
Params = maps:without([<<"enable">>, <<"resource_opts">>], ?KAFKA_CONNECTOR(?CONNECTOR_NAME)),
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
@ -858,6 +864,51 @@ t_raw_config_response_defaults(Config) ->
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_inconsistent_state(Config) ->
|
||||||
|
[_, Node2] = ?config(cluster_nodes, Config),
|
||||||
|
Params = ?KAFKA_CONNECTOR(?CONNECTOR_NAME),
|
||||||
|
?assertMatch(
|
||||||
|
{ok, 201, #{<<"enable">> := true, <<"resource_opts">> := #{}}},
|
||||||
|
request_json(
|
||||||
|
post,
|
||||||
|
uri(["connectors"]),
|
||||||
|
Params,
|
||||||
|
Config
|
||||||
|
)
|
||||||
|
),
|
||||||
|
BadParams = maps:without(
|
||||||
|
[<<"name">>, <<"type">>],
|
||||||
|
Params#{<<"bootstrap_hosts">> := <<"nope:9092">>}
|
||||||
|
),
|
||||||
|
{ok, _} = erpc:call(
|
||||||
|
Node2,
|
||||||
|
emqx,
|
||||||
|
update_config,
|
||||||
|
[[connectors, ?CONNECTOR_TYPE, ?CONNECTOR_NAME], BadParams, #{}]
|
||||||
|
),
|
||||||
|
|
||||||
|
ConnectorID = emqx_connector_resource:connector_id(?CONNECTOR_TYPE, ?CONNECTOR_NAME),
|
||||||
|
?assertMatch(
|
||||||
|
{ok, 200, #{
|
||||||
|
<<"status">> := <<"inconsistent">>,
|
||||||
|
<<"node_status">> := [
|
||||||
|
#{<<"status">> := <<"connected">>},
|
||||||
|
#{
|
||||||
|
<<"status">> := <<"disconnected">>,
|
||||||
|
<<"status_reason">> := _
|
||||||
|
}
|
||||||
|
],
|
||||||
|
<<"status_reason">> := _
|
||||||
|
}},
|
||||||
|
request_json(
|
||||||
|
get,
|
||||||
|
uri(["connectors", ConnectorID]),
|
||||||
|
Config
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
|
ok.
|
||||||
|
|
||||||
%%% helpers
|
%%% helpers
|
||||||
listen_on_random_port() ->
|
listen_on_random_port() ->
|
||||||
SockOpts = [binary, {active, false}, {packet, raw}, {reuseaddr, true}, {backlog, 1000}],
|
SockOpts = [binary, {active, false}, {packet, raw}, {reuseaddr, true}, {backlog, 1000}],
|
||||||
|
|
|
@ -336,30 +336,42 @@ audit_log(Level, From, Log) ->
|
||||||
{error, _} ->
|
{error, _} ->
|
||||||
ignore;
|
ignore;
|
||||||
{ok, {Mod, Fun}} ->
|
{ok, {Mod, Fun}} ->
|
||||||
try
|
case prune_unnecessary_log(Log) of
|
||||||
apply(Mod, Fun, [Level, From, normalize_audit_log_args(Log)])
|
false -> ok;
|
||||||
catch
|
{ok, Log1} -> apply_audit_command(Log1, Mod, Fun, Level, From)
|
||||||
_:{aborted, {no_exists, emqx_audit}} ->
|
end
|
||||||
case Log of
|
end.
|
||||||
#{cmd := cluster, args := ["leave"]} ->
|
|
||||||
ok;
|
apply_audit_command(Log, Mod, Fun, Level, From) ->
|
||||||
_ ->
|
try
|
||||||
?LOG_ERROR(#{
|
apply(Mod, Fun, [Level, From, Log])
|
||||||
msg => "ctl_command_crashed",
|
catch
|
||||||
reason => "emqx_audit table not found",
|
_:{aborted, {no_exists, emqx_audit}} ->
|
||||||
log => normalize_audit_log_args(Log),
|
case Log of
|
||||||
from => From
|
#{cmd := cluster, args := [<<"leave">>]} ->
|
||||||
})
|
ok;
|
||||||
end;
|
_ ->
|
||||||
_:Reason:Stacktrace ->
|
|
||||||
?LOG_ERROR(#{
|
?LOG_ERROR(#{
|
||||||
msg => "ctl_command_crashed",
|
msg => "ctl_command_crashed",
|
||||||
stacktrace => Stacktrace,
|
reason => "emqx_audit table not found",
|
||||||
reason => Reason,
|
log => Log,
|
||||||
log => normalize_audit_log_args(Log),
|
|
||||||
from => From
|
from => From
|
||||||
})
|
})
|
||||||
end
|
end;
|
||||||
|
_:Reason:Stacktrace ->
|
||||||
|
?LOG_ERROR(#{
|
||||||
|
msg => "ctl_command_crashed",
|
||||||
|
stacktrace => Stacktrace,
|
||||||
|
reason => Reason,
|
||||||
|
log => Log,
|
||||||
|
from => From
|
||||||
|
})
|
||||||
|
end.
|
||||||
|
|
||||||
|
prune_unnecessary_log(Log) ->
|
||||||
|
case normalize_audit_log_args(Log) of
|
||||||
|
#{args := [<<"emqx:is_running()">>]} -> false;
|
||||||
|
Log1 -> {ok, Log1}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
audit_level(ok, _Duration) -> info;
|
audit_level(ok, _Duration) -> info;
|
||||||
|
|
|
@ -24,6 +24,7 @@
|
||||||
-export([add_handler/0, remove_handler/0]).
|
-export([add_handler/0, remove_handler/0]).
|
||||||
-export([pre_config_update/3, post_config_update/5]).
|
-export([pre_config_update/3, post_config_update/5]).
|
||||||
-export([regenerate_minirest_dispatch/0]).
|
-export([regenerate_minirest_dispatch/0]).
|
||||||
|
-export([delay_job/1]).
|
||||||
|
|
||||||
-behaviour(gen_server).
|
-behaviour(gen_server).
|
||||||
|
|
||||||
|
@ -68,7 +69,7 @@ handle_call(_Request, _From, State) ->
|
||||||
handle_cast(_Request, State) ->
|
handle_cast(_Request, State) ->
|
||||||
{noreply, State, hibernate}.
|
{noreply, State, hibernate}.
|
||||||
|
|
||||||
handle_info(i18n_lang_changed, _State) ->
|
handle_info(regenerate, _State) ->
|
||||||
NewState = regenerate_minirest_dispatch(),
|
NewState = regenerate_minirest_dispatch(),
|
||||||
{noreply, NewState, hibernate};
|
{noreply, NewState, hibernate};
|
||||||
handle_info({update_listeners, OldListeners, NewListeners}, _State) ->
|
handle_info({update_listeners, OldListeners, NewListeners}, _State) ->
|
||||||
|
@ -146,7 +147,7 @@ remove_sensitive_data(Conf0) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
post_config_update(_, {change_i18n_lang, _}, _NewConf, _OldConf, _AppEnvs) ->
|
post_config_update(_, {change_i18n_lang, _}, _NewConf, _OldConf, _AppEnvs) ->
|
||||||
delay_job(i18n_lang_changed);
|
delay_job(regenerate);
|
||||||
post_config_update(_, _Req, NewConf, OldConf, _AppEnvs) ->
|
post_config_update(_, _Req, NewConf, OldConf, _AppEnvs) ->
|
||||||
OldHttp = get_listener(http, OldConf),
|
OldHttp = get_listener(http, OldConf),
|
||||||
OldHttps = get_listener(https, OldConf),
|
OldHttps = get_listener(https, OldConf),
|
||||||
|
|
|
@ -181,12 +181,12 @@ fields(hasnext) ->
|
||||||
fields(meta) ->
|
fields(meta) ->
|
||||||
fields(page) ++ fields(limit) ++ fields(count) ++ fields(hasnext).
|
fields(page) ++ fields(limit) ++ fields(count) ++ fields(hasnext).
|
||||||
|
|
||||||
-spec schema_with_example(hocon_schema:type(), term()) -> hocon_schema:field_schema_map().
|
-spec schema_with_example(hocon_schema:type(), term()) -> hocon_schema:field_schema().
|
||||||
schema_with_example(Type, Example) ->
|
schema_with_example(Type, Example) ->
|
||||||
hoconsc:mk(Type, #{examples => #{<<"example">> => Example}}).
|
hoconsc:mk(Type, #{examples => #{<<"example">> => Example}}).
|
||||||
|
|
||||||
-spec schema_with_examples(hocon_schema:type(), map() | list(tuple())) ->
|
-spec schema_with_examples(hocon_schema:type(), map() | list(tuple())) ->
|
||||||
hocon_schema:field_schema_map().
|
hocon_schema:field_schema().
|
||||||
schema_with_examples(Type, Examples) ->
|
schema_with_examples(Type, Examples) ->
|
||||||
hoconsc:mk(Type, #{examples => #{<<"examples">> => Examples}}).
|
hoconsc:mk(Type, #{examples => #{<<"examples">> => Examples}}).
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
-behaviour(hocon_schema).
|
-behaviour(hocon_schema).
|
||||||
|
|
||||||
%% API
|
%% API
|
||||||
-export([paths/0, api_spec/0, schema/1, fields/1]).
|
-export([paths/0, api_spec/0, schema/1, namespace/0, fields/1]).
|
||||||
-export([init_per_suite/1, end_per_suite/1]).
|
-export([init_per_suite/1, end_per_suite/1]).
|
||||||
-export([t_in_path/1, t_in_query/1, t_in_mix/1, t_without_in/1, t_ref/1, t_public_ref/1]).
|
-export([t_in_path/1, t_in_query/1, t_in_mix/1, t_without_in/1, t_ref/1, t_public_ref/1]).
|
||||||
-export([t_require/1, t_query_enum/1, t_nullable/1, t_method/1, t_api_spec/1]).
|
-export([t_require/1, t_query_enum/1, t_nullable/1, t_method/1, t_api_spec/1]).
|
||||||
|
@ -562,6 +562,8 @@ schema("/method/ok") ->
|
||||||
schema("/method/error") ->
|
schema("/method/error") ->
|
||||||
#{operationId => test, bar => #{200 => <<"ok">>}}.
|
#{operationId => test, bar => #{200 => <<"ok">>}}.
|
||||||
|
|
||||||
|
namespace() -> undefined.
|
||||||
|
|
||||||
fields(page) ->
|
fields(page) ->
|
||||||
[
|
[
|
||||||
{per_page,
|
{per_page,
|
||||||
|
|
|
@ -680,6 +680,8 @@ to_schema(Object) ->
|
||||||
post => #{responses => #{200 => Object, 201 => Object}}
|
post => #{responses => #{200 => Object, 201 => Object}}
|
||||||
}.
|
}.
|
||||||
|
|
||||||
|
namespace() -> undefined.
|
||||||
|
|
||||||
fields(good_ref) ->
|
fields(good_ref) ->
|
||||||
[
|
[
|
||||||
{'webhook-host', mk(emqx_schema:ip_port(), #{default => <<"127.0.0.1:80">>})},
|
{'webhook-host', mk(emqx_schema:ip_port(), #{default => <<"127.0.0.1:80">>})},
|
||||||
|
|
|
@ -208,7 +208,13 @@ start_backend_services() ->
|
||||||
update_config(Backend, UpdateReq) ->
|
update_config(Backend, UpdateReq) ->
|
||||||
%% we always make sure the valid configuration will update successfully,
|
%% we always make sure the valid configuration will update successfully,
|
||||||
%% ignore the runtime error during its update
|
%% ignore the runtime error during its update
|
||||||
case emqx_conf:update(?MOD_KEY_PATH(Backend), UpdateReq, #{override_to => cluster}) of
|
case
|
||||||
|
emqx_conf:update(
|
||||||
|
?MOD_KEY_PATH(Backend),
|
||||||
|
UpdateReq,
|
||||||
|
#{override_to => cluster, lazy_evaluator => fun emqx_schema_secret:source/1}
|
||||||
|
)
|
||||||
|
of
|
||||||
{ok, _UpdateResult} ->
|
{ok, _UpdateResult} ->
|
||||||
case lookup(Backend) of
|
case lookup(Backend) of
|
||||||
undefined ->
|
undefined ->
|
||||||
|
|
|
@ -42,8 +42,10 @@
|
||||||
stream/0,
|
stream/0,
|
||||||
stream_rank/0,
|
stream_rank/0,
|
||||||
iterator/0,
|
iterator/0,
|
||||||
|
iterator_id/0,
|
||||||
message_id/0,
|
message_id/0,
|
||||||
message_key/0,
|
message_key/0,
|
||||||
|
message_store_opts/0,
|
||||||
next_result/1, next_result/0,
|
next_result/1, next_result/0,
|
||||||
store_batch_result/0,
|
store_batch_result/0,
|
||||||
make_iterator_result/1, make_iterator_result/0,
|
make_iterator_result/1, make_iterator_result/0,
|
||||||
|
@ -67,6 +69,9 @@
|
||||||
|
|
||||||
-type stream_rank() :: {term(), integer()}.
|
-type stream_rank() :: {term(), integer()}.
|
||||||
|
|
||||||
|
%% TODO: Not implemented
|
||||||
|
-type iterator_id() :: term().
|
||||||
|
|
||||||
-opaque iterator() :: ds_specific_iterator().
|
-opaque iterator() :: ds_specific_iterator().
|
||||||
|
|
||||||
-opaque stream() :: ds_specific_stream().
|
-opaque stream() :: ds_specific_stream().
|
||||||
|
|
|
@ -11,11 +11,17 @@
|
||||||
-export([iteration_options/1]).
|
-export([iteration_options/1]).
|
||||||
-export([default_iteration_options/0]).
|
-export([default_iteration_options/0]).
|
||||||
|
|
||||||
|
-export_type([
|
||||||
|
backend_config/0,
|
||||||
|
iteration_options/0
|
||||||
|
]).
|
||||||
|
|
||||||
-type backend_config() ::
|
-type backend_config() ::
|
||||||
{emqx_ds_message_storage_bitmask, emqx_ds_message_storage_bitmask:options()}
|
{emqx_ds_message_storage_bitmask, emqx_ds_storage_bitfield_lts:options()}
|
||||||
| {module(), _Options}.
|
| {module(), _Options}.
|
||||||
|
|
||||||
-export_type([backend_config/0]).
|
-type keyspace() :: atom().
|
||||||
|
-type iteration_options() :: map().
|
||||||
|
|
||||||
%%================================================================================
|
%%================================================================================
|
||||||
%% API funcions
|
%% API funcions
|
||||||
|
@ -23,7 +29,7 @@
|
||||||
|
|
||||||
-define(APP, emqx_ds).
|
-define(APP, emqx_ds).
|
||||||
|
|
||||||
-spec keyspace_config(emqx_ds:keyspace()) -> backend_config().
|
-spec keyspace_config(keyspace()) -> backend_config().
|
||||||
keyspace_config(Keyspace) ->
|
keyspace_config(Keyspace) ->
|
||||||
DefaultKeyspaceConfig = application:get_env(
|
DefaultKeyspaceConfig = application:get_env(
|
||||||
?APP,
|
?APP,
|
||||||
|
@ -33,8 +39,8 @@ keyspace_config(Keyspace) ->
|
||||||
Keyspaces = application:get_env(?APP, keyspace_config, #{}),
|
Keyspaces = application:get_env(?APP, keyspace_config, #{}),
|
||||||
maps:get(Keyspace, Keyspaces, DefaultKeyspaceConfig).
|
maps:get(Keyspace, Keyspaces, DefaultKeyspaceConfig).
|
||||||
|
|
||||||
-spec iteration_options(emqx_ds:keyspace()) ->
|
-spec iteration_options(keyspace()) ->
|
||||||
emqx_ds_message_storage_bitmask:iteration_options().
|
iteration_options().
|
||||||
iteration_options(Keyspace) ->
|
iteration_options(Keyspace) ->
|
||||||
case keyspace_config(Keyspace) of
|
case keyspace_config(Keyspace) of
|
||||||
{emqx_ds_message_storage_bitmask, Config} ->
|
{emqx_ds_message_storage_bitmask, Config} ->
|
||||||
|
@ -43,7 +49,7 @@ iteration_options(Keyspace) ->
|
||||||
default_iteration_options()
|
default_iteration_options()
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-spec default_iteration_options() -> emqx_ds_message_storage_bitmask:iteration_options().
|
-spec default_iteration_options() -> iteration_options().
|
||||||
default_iteration_options() ->
|
default_iteration_options() ->
|
||||||
{emqx_ds_message_storage_bitmask, Config} = default_keyspace_config(),
|
{emqx_ds_message_storage_bitmask, Config} = default_keyspace_config(),
|
||||||
maps:get(iteration, Config).
|
maps:get(iteration, Config).
|
||||||
|
@ -60,7 +66,7 @@ default_keyspace_config() ->
|
||||||
}
|
}
|
||||||
}}.
|
}}.
|
||||||
|
|
||||||
-spec db_options(emqx_ds:keyspace()) -> emqx_ds_storage_layer:db_options().
|
-spec db_options(keyspace()) -> emqx_ds_storage_layer:options().
|
||||||
db_options(Keyspace) ->
|
db_options(Keyspace) ->
|
||||||
DefaultDBOptions = application:get_env(?APP, default_db_options, []),
|
DefaultDBOptions = application:get_env(?APP, default_db_options, []),
|
||||||
Keyspaces = application:get_env(?APP, keyspace_config, #{}),
|
Keyspaces = application:get_env(?APP, keyspace_config, #{}),
|
||||||
|
|
|
@ -24,7 +24,12 @@
|
||||||
%% Debug:
|
%% Debug:
|
||||||
-export([trie_next/3, trie_insert/3, dump_to_dot/2]).
|
-export([trie_next/3, trie_insert/3, dump_to_dot/2]).
|
||||||
|
|
||||||
-export_type([options/0, static_key/0, trie/0]).
|
-export_type([
|
||||||
|
options/0,
|
||||||
|
static_key/0,
|
||||||
|
trie/0,
|
||||||
|
msg_storage_key/0
|
||||||
|
]).
|
||||||
|
|
||||||
-include_lib("stdlib/include/ms_transform.hrl").
|
-include_lib("stdlib/include/ms_transform.hrl").
|
||||||
|
|
||||||
|
|
|
@ -92,7 +92,7 @@
|
||||||
?enc := emqx_ds_storage_layer:iterator()
|
?enc := emqx_ds_storage_layer:iterator()
|
||||||
}.
|
}.
|
||||||
|
|
||||||
-type message_id() :: emqx_ds_storage_layer:message_id().
|
-type message_id() :: emqx_ds:message_id().
|
||||||
|
|
||||||
-define(batch_messages, 2).
|
-define(batch_messages, 2).
|
||||||
|
|
||||||
|
@ -245,7 +245,7 @@ do_store_batch_v1(DB, Shard, #{?tag := ?BATCH, ?batch_messages := Messages}, Opt
|
||||||
emqx_ds_storage_layer:store_batch({DB, Shard}, Messages, Options).
|
emqx_ds_storage_layer:store_batch({DB, Shard}, Messages, Options).
|
||||||
|
|
||||||
-spec do_get_streams_v1(
|
-spec do_get_streams_v1(
|
||||||
emqx_ds:db(), emqx_ds_replicationi_layer:shard_id(), emqx_ds:topic_filter(), emqx_ds:time()
|
emqx_ds:db(), emqx_ds_replication_layer:shard_id(), emqx_ds:topic_filter(), emqx_ds:time()
|
||||||
) ->
|
) ->
|
||||||
[{integer(), emqx_ds_storage_layer:stream()}].
|
[{integer(), emqx_ds_storage_layer:stream()}].
|
||||||
do_get_streams_v1(DB, Shard, TopicFilter, StartTime) ->
|
do_get_streams_v1(DB, Shard, TopicFilter, StartTime) ->
|
||||||
|
@ -253,7 +253,7 @@ do_get_streams_v1(DB, Shard, TopicFilter, StartTime) ->
|
||||||
|
|
||||||
-spec do_make_iterator_v1(
|
-spec do_make_iterator_v1(
|
||||||
emqx_ds:db(),
|
emqx_ds:db(),
|
||||||
emqx_ds_storage_layer:shard_id(),
|
emqx_ds_replication_layer:shard_id(),
|
||||||
emqx_ds_storage_layer:stream(),
|
emqx_ds_storage_layer:stream(),
|
||||||
emqx_ds:topic_filter(),
|
emqx_ds:topic_filter(),
|
||||||
emqx_ds:time()
|
emqx_ds:time()
|
||||||
|
|
|
@ -280,7 +280,7 @@ in_sync_replicas_trans(DB, Shard) ->
|
||||||
{error, no_shard}
|
{error, no_shard}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-spec set_leader_trans(emqx_ds:ds(), emqx_ds_replication_layer:shard_id(), node()) ->
|
-spec set_leader_trans(emqx_ds:db(), emqx_ds_replication_layer:shard_id(), node()) ->
|
||||||
ok.
|
ok.
|
||||||
set_leader_trans(DB, Shard, Node) ->
|
set_leader_trans(DB, Shard, Node) ->
|
||||||
[Record0] = mnesia:wread({?SHARD_TAB, {DB, Shard}}),
|
[Record0] = mnesia:wread({?SHARD_TAB, {DB, Shard}}),
|
||||||
|
|
|
@ -34,7 +34,16 @@
|
||||||
%% internal exports:
|
%% internal exports:
|
||||||
-export([db_dir/1]).
|
-export([db_dir/1]).
|
||||||
|
|
||||||
-export_type([gen_id/0, generation/0, cf_refs/0, stream/0, iterator/0]).
|
-export_type([
|
||||||
|
gen_id/0,
|
||||||
|
generation/0,
|
||||||
|
cf_refs/0,
|
||||||
|
stream/0,
|
||||||
|
iterator/0,
|
||||||
|
shard_id/0,
|
||||||
|
options/0,
|
||||||
|
prototype/0
|
||||||
|
]).
|
||||||
|
|
||||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
|
@ -72,7 +81,7 @@
|
||||||
?enc := term()
|
?enc := term()
|
||||||
}.
|
}.
|
||||||
|
|
||||||
%% Note: this might be stored permanently on a remote node.
|
%% Note: this might be stred permanently on a remote node.
|
||||||
-opaque iterator() ::
|
-opaque iterator() ::
|
||||||
#{
|
#{
|
||||||
?tag := ?IT,
|
?tag := ?IT,
|
||||||
|
@ -117,17 +126,19 @@
|
||||||
%% Shard (runtime):
|
%% Shard (runtime):
|
||||||
-type shard() :: shard(generation()).
|
-type shard() :: shard(generation()).
|
||||||
|
|
||||||
|
-type options() :: map().
|
||||||
|
|
||||||
%%================================================================================
|
%%================================================================================
|
||||||
%% Generation callbacks
|
%% Generation callbacks
|
||||||
%%================================================================================
|
%%================================================================================
|
||||||
|
|
||||||
%% Create the new schema given generation id and the options.
|
%% Create the new schema given generation id and the options.
|
||||||
%% Create rocksdb column families.
|
%% Create rocksdb column families.
|
||||||
-callback create(shard_id(), rocksdb:db_handle(), gen_id(), _Options) ->
|
-callback create(shard_id(), rocksdb:db_handle(), gen_id(), Options :: map()) ->
|
||||||
{_Schema, cf_refs()}.
|
{_Schema, cf_refs()}.
|
||||||
|
|
||||||
%% Open the existing schema
|
%% Open the existing schema
|
||||||
-callback open(shard_id(), rocsdb:db_handle(), gen_id(), cf_refs(), _Schema) ->
|
-callback open(shard_id(), rocksdb:db_handle(), gen_id(), cf_refs(), _Schema) ->
|
||||||
_Data.
|
_Data.
|
||||||
|
|
||||||
-callback store_batch(shard_id(), _Data, [emqx_types:message()], emqx_ds:message_store_opts()) ->
|
-callback store_batch(shard_id(), _Data, [emqx_types:message()], emqx_ds:message_store_opts()) ->
|
||||||
|
@ -146,7 +157,7 @@
|
||||||
%% API for the replication layer
|
%% API for the replication layer
|
||||||
%%================================================================================
|
%%================================================================================
|
||||||
|
|
||||||
-spec open_shard(shard_id(), emqx_ds:builtin_db_opts()) -> ok.
|
-spec open_shard(shard_id(), options()) -> ok.
|
||||||
open_shard(Shard, Options) ->
|
open_shard(Shard, Options) ->
|
||||||
emqx_ds_storage_layer_sup:ensure_shard(Shard, Options).
|
emqx_ds_storage_layer_sup:ensure_shard(Shard, Options).
|
||||||
|
|
||||||
|
@ -244,13 +255,13 @@ next(Shard, Iter = #{?tag := ?IT, ?generation := GenId, ?enc := GenIter0}, Batch
|
||||||
|
|
||||||
-define(REF(ShardId), {via, gproc, {n, l, {?MODULE, ShardId}}}).
|
-define(REF(ShardId), {via, gproc, {n, l, {?MODULE, ShardId}}}).
|
||||||
|
|
||||||
-spec start_link(shard_id(), emqx_ds:builtin_db_opts()) ->
|
-spec start_link(shard_id(), options()) ->
|
||||||
{ok, pid()}.
|
{ok, pid()}.
|
||||||
start_link(Shard = {_, _}, Options) ->
|
start_link(Shard = {_, _}, Options) ->
|
||||||
gen_server:start_link(?REF(Shard), ?MODULE, {Shard, Options}, []).
|
gen_server:start_link(?REF(Shard), ?MODULE, {Shard, Options}, []).
|
||||||
|
|
||||||
-record(s, {
|
-record(s, {
|
||||||
shard_id :: emqx_ds:shard_id(),
|
shard_id :: shard_id(),
|
||||||
db :: rocksdb:db_handle(),
|
db :: rocksdb:db_handle(),
|
||||||
cf_refs :: cf_refs(),
|
cf_refs :: cf_refs(),
|
||||||
schema :: shard_schema(),
|
schema :: shard_schema(),
|
||||||
|
@ -381,7 +392,7 @@ commit_metadata(#s{shard_id = ShardId, schema = Schema, shard = Runtime, db = DB
|
||||||
ok = put_schema_persistent(DB, Schema),
|
ok = put_schema_persistent(DB, Schema),
|
||||||
put_schema_runtime(ShardId, Runtime).
|
put_schema_runtime(ShardId, Runtime).
|
||||||
|
|
||||||
-spec rocksdb_open(shard_id(), emqx_ds:builtin_db_opts()) ->
|
-spec rocksdb_open(shard_id(), options()) ->
|
||||||
{ok, rocksdb:db_handle(), cf_refs()} | {error, _TODO}.
|
{ok, rocksdb:db_handle(), cf_refs()} | {error, _TODO}.
|
||||||
rocksdb_open(Shard, Options) ->
|
rocksdb_open(Shard, Options) ->
|
||||||
DBOptions = [
|
DBOptions = [
|
||||||
|
|
|
@ -30,7 +30,7 @@ start_link() ->
|
||||||
start_shard(Shard, Options) ->
|
start_shard(Shard, Options) ->
|
||||||
supervisor:start_child(?SUP, shard_child_spec(Shard, Options)).
|
supervisor:start_child(?SUP, shard_child_spec(Shard, Options)).
|
||||||
|
|
||||||
-spec stop_shard(emqx_ds:shard()) -> ok | {error, _}.
|
-spec stop_shard(emqx_ds_storage_layer:shard_id()) -> ok | {error, _}.
|
||||||
stop_shard(Shard) ->
|
stop_shard(Shard) ->
|
||||||
ok = supervisor:terminate_child(?SUP, Shard),
|
ok = supervisor:terminate_child(?SUP, Shard),
|
||||||
ok = supervisor:delete_child(?SUP, Shard).
|
ok = supervisor:delete_child(?SUP, Shard).
|
||||||
|
|
|
@ -34,8 +34,7 @@
|
||||||
%% API funcions
|
%% API funcions
|
||||||
%%================================================================================
|
%%================================================================================
|
||||||
|
|
||||||
-spec drop_db([node()], emqx_ds:db()) ->
|
-spec drop_db([node()], emqx_ds:db()) -> [emqx_rpc:erpc(ok)].
|
||||||
[{ok, ok} | erpc:caught_call_exception()].
|
|
||||||
drop_db(Node, DB) ->
|
drop_db(Node, DB) ->
|
||||||
erpc:multicall(Node, emqx_ds_replication_layer, do_drop_db_v1, [DB]).
|
erpc:multicall(Node, emqx_ds_replication_layer, do_drop_db_v1, [DB]).
|
||||||
|
|
||||||
|
|
|
@ -11,9 +11,6 @@
|
||||||
-export([store/2]).
|
-export([store/2]).
|
||||||
-export([iterate/2]).
|
-export([iterate/2]).
|
||||||
|
|
||||||
-type topic() :: list(binary()).
|
|
||||||
-type time() :: integer().
|
|
||||||
|
|
||||||
-opaque t() :: ets:tid().
|
-opaque t() :: ets:tid().
|
||||||
|
|
||||||
-export_type([t/0]).
|
-export_type([t/0]).
|
||||||
|
|
|
@ -61,7 +61,7 @@
|
||||||
}.
|
}.
|
||||||
|
|
||||||
%% For performance reasons we treat regular lists as streams, see `next/1'
|
%% For performance reasons we treat regular lists as streams, see `next/1'
|
||||||
-opaque cont(Data) ::
|
-type cont(Data) ::
|
||||||
fun(() -> stream(Data))
|
fun(() -> stream(Data))
|
||||||
| stream(Data).
|
| stream(Data).
|
||||||
|
|
||||||
|
@ -78,11 +78,11 @@
|
||||||
chunk_size :: non_neg_integer()
|
chunk_size :: non_neg_integer()
|
||||||
}).
|
}).
|
||||||
|
|
||||||
-opaque chunk_state() :: #chunk_state{}.
|
-type chunk_state() :: #chunk_state{}.
|
||||||
|
|
||||||
-record(interleave_state, {streams :: [{Tag :: term(), Stream :: term()}]}).
|
-record(interleave_state, {streams :: [{Tag :: term(), Stream :: term()}]}).
|
||||||
|
|
||||||
-opaque interleave_state() :: #interleave_state{}.
|
-type interleave_state() :: #interleave_state{}.
|
||||||
|
|
||||||
%% =============================================================================
|
%% =============================================================================
|
||||||
%% API functions
|
%% API functions
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue