diff --git a/Makefile b/Makefile index f117d5e9d..0112776bb 100644 --- a/Makefile +++ b/Makefile @@ -20,8 +20,8 @@ endif # Dashboard version # from https://github.com/emqx/emqx-dashboard5 -export EMQX_DASHBOARD_VERSION ?= v1.5.0 -export EMQX_EE_DASHBOARD_VERSION ?= e1.3.0 +export EMQX_DASHBOARD_VERSION ?= v1.5.1 +export EMQX_EE_DASHBOARD_VERSION ?= e1.3.1 PROFILE ?= emqx REL_PROFILES := emqx emqx-enterprise diff --git a/apps/emqx/include/emqx_release.hrl b/apps/emqx/include/emqx_release.hrl index 87a4b47e0..011d52595 100644 --- a/apps/emqx/include/emqx_release.hrl +++ b/apps/emqx/include/emqx_release.hrl @@ -32,10 +32,10 @@ %% `apps/emqx/src/bpapi/README.md' %% Opensource edition --define(EMQX_RELEASE_CE, "5.3.1-alpha.1"). +-define(EMQX_RELEASE_CE, "5.3.1"). %% Enterprise edition --define(EMQX_RELEASE_EE, "5.3.1-alpha.4"). +-define(EMQX_RELEASE_EE, "5.3.1"). %% The HTTP API version -define(EMQX_API_VERSION, "5.0"). diff --git a/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl b/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl index ee5d203e4..f22a4f97e 100644 --- a/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl +++ b/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl @@ -11,6 +11,8 @@ -include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl"). +-include_lib("emqx/src/emqx_persistent_session_ds.hrl"). + -define(DEFAULT_KEYSPACE, default). -define(DS_SHARD_ID, <<"local">>). -define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}). @@ -118,6 +120,7 @@ start_client(Opts0 = #{}) -> properties => #{'Session-Expiry-Interval' => 300} }, Opts = maps:to_list(emqx_utils_maps:deep_merge(Defaults, Opts0)), + ct:pal("starting client with opts:\n ~p", [Opts]), {ok, Client} = emqtt:start_link(Opts), on_exit(fun() -> catch emqtt:stop(Client) end), Client. @@ -148,6 +151,9 @@ restart_node(Node, NodeSpec) -> ?tp(restarted_node, #{}), ok. +is_persistent_connect_opts(#{properties := #{'Session-Expiry-Interval' := EI}}) -> + EI > 0. + %%------------------------------------------------------------------------------ %% Testcases %%------------------------------------------------------------------------------ @@ -309,3 +315,94 @@ t_session_unsubscription_idempotency(Config) -> end ), ok. + +t_session_discard_persistent_to_non_persistent(_Config) -> + ClientId = atom_to_binary(?FUNCTION_NAME), + Params = #{ + client_id => ClientId, + reconnect_opts => + #{ + clean_start => true, + %% we set it to zero so that a new session is not created. + properties => #{'Session-Expiry-Interval' => 0}, + proto_ver => v5 + } + }, + do_t_session_discard(Params). + +t_session_discard_persistent_to_persistent(_Config) -> + ClientId = atom_to_binary(?FUNCTION_NAME), + Params = #{ + client_id => ClientId, + reconnect_opts => + #{ + clean_start => true, + properties => #{'Session-Expiry-Interval' => 30}, + proto_ver => v5 + } + }, + do_t_session_discard(Params). + +do_t_session_discard(Params) -> + #{ + client_id := ClientId, + reconnect_opts := ReconnectOpts0 + } = Params, + ReconnectOpts = ReconnectOpts0#{clientid => ClientId}, + SubTopicFilter = <<"t/+">>, + ?check_trace( + begin + ?tp(notice, "starting", #{}), + Client0 = start_client(#{ + clientid => ClientId, + clean_start => false, + properties => #{'Session-Expiry-Interval' => 30}, + proto_ver => v5 + }), + {ok, _} = emqtt:connect(Client0), + ?tp(notice, "subscribing", #{}), + {ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client0, SubTopicFilter, qos2), + %% Store some matching messages so that streams and iterators are created. + ok = emqtt:publish(Client0, <<"t/1">>, <<"1">>), + ok = emqtt:publish(Client0, <<"t/2">>, <<"2">>), + ?retry( + _Sleep0 = 100, + _Attempts0 = 50, + true = map_size(emqx_persistent_session_ds:list_all_streams()) > 0 + ), + ?retry( + _Sleep0 = 100, + _Attempts0 = 50, + true = map_size(emqx_persistent_session_ds:list_all_iterators()) > 0 + ), + ok = emqtt:stop(Client0), + ?tp(notice, "disconnected", #{}), + + ?tp(notice, "reconnecting", #{}), + %% we still have iterators and streams + ?assert(map_size(emqx_persistent_session_ds:list_all_streams()) > 0), + ?assert(map_size(emqx_persistent_session_ds:list_all_iterators()) > 0), + Client1 = start_client(ReconnectOpts), + {ok, _} = emqtt:connect(Client1), + ?assertEqual([], emqtt:subscriptions(Client1)), + case is_persistent_connect_opts(ReconnectOpts) of + true -> + ?assertMatch(#{ClientId := _}, emqx_persistent_session_ds:list_all_sessions()); + false -> + ?assertEqual(#{}, emqx_persistent_session_ds:list_all_sessions()) + end, + ?assertEqual(#{}, emqx_persistent_session_ds:list_all_subscriptions()), + ?assertEqual([], emqx_persistent_session_ds_router:topics()), + ?assertEqual(#{}, emqx_persistent_session_ds:list_all_streams()), + ?assertEqual(#{}, emqx_persistent_session_ds:list_all_iterators()), + ok = emqtt:stop(Client1), + ?tp(notice, "disconnected", #{}), + + ok + end, + fun(Trace) -> + ct:pal("trace:\n ~p", [Trace]), + ok + end + ), + ok. diff --git a/apps/emqx/rebar.config b/apps/emqx/rebar.config index 9f67caf5d..71f581267 100644 --- a/apps/emqx/rebar.config +++ b/apps/emqx/rebar.config @@ -30,7 +30,7 @@ {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.7"}}}, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.16"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.2.1"}}}, - {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.19"}}}, + {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.40.0"}}}, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}}, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}, diff --git a/apps/emqx/src/emqx.app.src b/apps/emqx/src/emqx.app.src index f8a02db2e..0545f36a5 100644 --- a/apps/emqx/src/emqx.app.src +++ b/apps/emqx/src/emqx.app.src @@ -2,7 +2,7 @@ {application, emqx, [ {id, "emqx"}, {description, "EMQX Core"}, - {vsn, "5.1.13"}, + {vsn, "5.1.14"}, {modules, []}, {registered, []}, {applications, [ diff --git a/apps/emqx/src/emqx_cm.erl b/apps/emqx/src/emqx_cm.erl index 1e4940965..537c60876 100644 --- a/apps/emqx/src/emqx_cm.erl +++ b/apps/emqx/src/emqx_cm.erl @@ -258,21 +258,21 @@ set_chan_stats(ClientId, ChanPid, Stats) -> end. %% @doc Open a session. --spec open_session(boolean(), emqx_types:clientinfo(), emqx_types:conninfo()) -> +-spec open_session(_CleanStart :: boolean(), emqx_types:clientinfo(), emqx_types:conninfo()) -> {ok, #{ session := emqx_session:t(), present := boolean(), replay => _ReplayContext }} | {error, Reason :: term()}. -open_session(true, ClientInfo = #{clientid := ClientId}, ConnInfo) -> +open_session(_CleanStart = true, ClientInfo = #{clientid := ClientId}, ConnInfo) -> Self = self(), emqx_cm_locker:trans(ClientId, fun(_) -> ok = discard_session(ClientId), ok = emqx_session:destroy(ClientInfo, ConnInfo), create_register_session(ClientInfo, ConnInfo, Self) end); -open_session(false, ClientInfo = #{clientid := ClientId}, ConnInfo) -> +open_session(_CleanStart = false, ClientInfo = #{clientid := ClientId}, ConnInfo) -> Self = self(), emqx_cm_locker:trans(ClientId, fun(_) -> case emqx_session:open(ClientInfo, ConnInfo) of diff --git a/apps/emqx/src/emqx_config_handler.erl b/apps/emqx/src/emqx_config_handler.erl index d8c014b8e..05784feb7 100644 --- a/apps/emqx/src/emqx_config_handler.erl +++ b/apps/emqx/src/emqx_config_handler.erl @@ -662,14 +662,32 @@ remove_from_override_config(_BinKeyPath, #{persistent := false}) -> undefined; remove_from_override_config(BinKeyPath, Opts) -> OldConf = emqx_config:read_override_conf(Opts), - emqx_utils_maps:deep_remove(BinKeyPath, OldConf). + UpgradedOldConf = upgrade_conf(OldConf), + emqx_utils_maps:deep_remove(BinKeyPath, UpgradedOldConf). %% apply new config on top of override config merge_to_override_config(_RawConf, #{persistent := false}) -> undefined; merge_to_override_config(RawConf, Opts) -> OldConf = emqx_config:read_override_conf(Opts), - maps:merge(OldConf, RawConf). + UpgradedOldConf = upgrade_conf(OldConf), + maps:merge(UpgradedOldConf, RawConf). + +upgrade_conf(Conf) -> + try + ConfLoader = emqx_app:get_config_loader(), + SchemaModule = apply(ConfLoader, schema_module, []), + apply(SchemaModule, upgrade_raw_conf, [Conf]) + catch + ErrorType:Reason:Stack -> + ?SLOG(warning, #{ + msg => "failed_to_upgrade_config", + error_type => ErrorType, + reason => Reason, + stacktrace => Stack + }), + Conf + end. up_req({remove, _Opts}) -> '$remove'; up_req({{update, Req}, _Opts}) -> Req. diff --git a/apps/emqx/src/emqx_hooks.erl b/apps/emqx/src/emqx_hooks.erl index c3e9c2230..efe2c0de8 100644 --- a/apps/emqx/src/emqx_hooks.erl +++ b/apps/emqx/src/emqx_hooks.erl @@ -66,8 +66,9 @@ %% - Callbacks with greater priority values will be run before %% the ones with lower priority values. e.g. A Callback with %% priority = 2 precedes the callback with priority = 1. -%% - The execution order is the adding order of callbacks if they have -%% equal priority values. +%% - If the priorities of the hooks are equal then their execution +%% order is determined by the lexicographic of hook function +%% names. -type hookpoint() :: atom() | binary(). -type action() :: {module(), atom(), [term()] | undefined}. diff --git a/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl b/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl index 2dd4aa241..09ab6099c 100644 --- a/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl +++ b/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl @@ -33,7 +33,8 @@ desc/1, types/0, short_paths/0, - short_paths_fields/0 + short_paths_fields/0, + rate_type/0 ]). -define(KILOBYTE, 1024). @@ -129,9 +130,9 @@ fields(limiter) -> ]; fields(node_opts) -> [ - {rate, ?HOCON(rate(), #{desc => deprecated_desc(rate), default => <<"infinity">>})}, + {rate, ?HOCON(rate_type(), #{desc => deprecated_desc(rate), default => <<"infinity">>})}, {burst, - ?HOCON(burst_rate(), #{ + ?HOCON(burst_rate_type(), #{ desc => deprecated_desc(burst), default => <<"0">> })} @@ -142,7 +143,7 @@ fields(bucket_opts) -> fields_of_bucket(<<"infinity">>); fields(client_opts) -> [ - {rate, ?HOCON(rate(), #{default => <<"infinity">>, desc => deprecated_desc(rate)})}, + {rate, ?HOCON(rate_type(), #{default => <<"infinity">>, desc => deprecated_desc(rate)})}, {initial, ?HOCON(initial(), #{ default => <<"0">>, @@ -164,7 +165,7 @@ fields(client_opts) -> } )}, {burst, - ?HOCON(burst(), #{ + ?HOCON(burst_type(), #{ desc => deprecated_desc(burst), default => <<"0">>, importance => ?IMPORTANCE_HIDDEN, @@ -211,7 +212,7 @@ short_paths_fields() -> short_paths_fields(Importance) -> [ {Name, - ?HOCON(rate(), #{ + ?HOCON(rate_type(), #{ desc => ?DESC(Name), required => false, importance => Importance, @@ -415,7 +416,7 @@ composite_bucket_fields(Types, ClientRef) -> fields_of_bucket(Default) -> [ - {rate, ?HOCON(rate(), #{desc => deprecated_desc(rate), default => Default})}, + {rate, ?HOCON(rate_type(), #{desc => deprecated_desc(rate), default => Default})}, {burst, ?HOCON(burst(), #{ desc => deprecated_desc(burst), @@ -461,3 +462,12 @@ alias_of_type(_) -> deprecated_desc(_Field) -> <<"Deprecated since v5.0.25">>. + +rate_type() -> + typerefl:alias("string", rate()). + +burst_type() -> + typerefl:alias("string", burst()). + +burst_rate_type() -> + typerefl:alias("string", burst_rate()). diff --git a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl index d86ca84ad..69b6675d8 100644 --- a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl +++ b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl @@ -19,16 +19,18 @@ -module(emqx_persistent_message_ds_replayer). %% API: --export([new/0, next_packet_id/1, replay/2, commit_offset/3, poll/3]). +-export([new/0, next_packet_id/1, replay/2, commit_offset/3, poll/3, n_inflight/1]). %% internal exports: -export([]). -export_type([inflight/0]). +-include_lib("emqx/include/logger.hrl"). -include("emqx_persistent_session_ds.hrl"). -ifdef(TEST). +-include_lib("proper/include/proper.hrl"). -include_lib("eunit/include/eunit.hrl"). -endif. @@ -65,9 +67,28 @@ new() -> #inflight{}. -spec next_packet_id(inflight()) -> {emqx_types:packet_id(), inflight()}. -next_packet_id(Inflight0 = #inflight{next_seqno = LastSeqno}) -> - Inflight = Inflight0#inflight{next_seqno = LastSeqno + 1}, - {seqno_to_packet_id(LastSeqno), Inflight}. +next_packet_id(Inflight0 = #inflight{next_seqno = LastSeqNo}) -> + Inflight = Inflight0#inflight{next_seqno = LastSeqNo + 1}, + case LastSeqNo rem 16#10000 of + 0 -> + %% We skip sequence numbers that lead to PacketId = 0 to + %% simplify math. Note: it leads to occasional gaps in the + %% sequence numbers. + next_packet_id(Inflight); + PacketId -> + {PacketId, Inflight} + end. + +-spec n_inflight(inflight()) -> non_neg_integer(). +n_inflight(#inflight{next_seqno = NextSeqNo, acked_seqno = AckedSeqno}) -> + %% NOTE: this function assumes that gaps in the sequence ID occur + %% _only_ when the packet ID wraps: + case AckedSeqno >= ((NextSeqNo bsr 16) bsl 16) of + true -> + NextSeqNo - AckedSeqno; + false -> + NextSeqNo - AckedSeqno - 1 + end. -spec replay(emqx_persistent_session_ds:id(), inflight()) -> emqx_session:replies(). @@ -83,8 +104,20 @@ commit_offset( acked_seqno = AckedSeqno0, next_seqno = NextSeqNo, offset_ranges = Ranges0 } ) -> - AckedSeqno = packet_id_to_seqno(NextSeqNo, PacketId), - true = AckedSeqno0 < AckedSeqno, + AckedSeqno = + case packet_id_to_seqno(NextSeqNo, PacketId) of + N when N > AckedSeqno0; AckedSeqno0 =:= 0 -> + N; + OutOfRange -> + ?SLOG(warning, #{ + msg => "out-of-order_ack", + prev_seqno => AckedSeqno0, + acked_seqno => OutOfRange, + next_seqno => NextSeqNo, + packet_id => PacketId + }), + AckedSeqno0 + end, Ranges = lists:filter( fun(#range{stream = Stream, last = LastSeqno, iterator_next = ItNext}) -> case LastSeqno =< AckedSeqno of @@ -139,19 +172,18 @@ fetch(_SessionId, Inflight, _Streams, 0, Acc) -> fetch(SessionId, Inflight0, [Stream | Streams], N, Publishes0) -> #inflight{next_seqno = FirstSeqNo, offset_ranges = Ranges0} = Inflight0, ItBegin = get_last_iterator(SessionId, Stream, Ranges0), - {ok, ItEnd, Messages} = emqx_ds:next(ItBegin, N), - {Publishes, Inflight1} = + {ok, ItEnd, Messages} = emqx_ds:next(?PERSISTENT_MESSAGE_DB, ItBegin, N), + {NMessages, Publishes, Inflight1} = lists:foldl( - fun(Msg, {PubAcc0, InflightAcc0}) -> + fun(Msg, {N0, PubAcc0, InflightAcc0}) -> {PacketId, InflightAcc} = next_packet_id(InflightAcc0), PubAcc = [{PacketId, Msg} | PubAcc0], - {PubAcc, InflightAcc} + {N0 + 1, PubAcc, InflightAcc} end, - {Publishes0, Inflight0}, + {0, Publishes0, Inflight0}, Messages ), #inflight{next_seqno = LastSeqNo} = Inflight1, - NMessages = LastSeqNo - FirstSeqNo, case NMessages > 0 of true -> Range = #range{ @@ -167,8 +199,12 @@ fetch(SessionId, Inflight0, [Stream | Streams], N, Publishes0) -> end. -spec update_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream(), emqx_ds:iterator()) -> ok. -update_iterator(SessionId, Stream, Iterator) -> - mria:dirty_write(?SESSION_ITER_TAB, #ds_iter{id = {SessionId, Stream}, iter = Iterator}). +update_iterator(DSSessionId, Stream, Iterator) -> + %% Workaround: we convert `Stream' to a binary before attempting to store it in + %% mnesia(rocksdb) because of a bug in `mnesia_rocksdb' when trying to do + %% `mnesia:dirty_all_keys' later. + StreamBin = term_to_binary(Stream), + mria:dirty_write(?SESSION_ITER_TAB, #ds_iter{id = {DSSessionId, StreamBin}, iter = Iterator}). get_last_iterator(SessionId, Stream, Ranges) -> case lists:keyfind(Stream, #range.stream, lists:reverse(Ranges)) of @@ -179,8 +215,10 @@ get_last_iterator(SessionId, Stream, Ranges) -> end. -spec get_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream()) -> emqx_ds:iterator(). -get_iterator(SessionId, Stream) -> - Id = {SessionId, Stream}, +get_iterator(DSSessionId, Stream) -> + %% See comment in `update_iterator'. + StreamBin = term_to_binary(Stream), + Id = {DSSessionId, StreamBin}, [#ds_iter{iter = It}] = mnesia:dirty_read(?SESSION_ITER_TAB, Id), It. @@ -193,25 +231,22 @@ get_streams(SessionId) -> mnesia:dirty_read(?SESSION_STREAM_TAB, SessionId) ). -%% Packet ID as defined by MQTT protocol is a 16-bit integer in range -%% 1..FFFF. This function translates internal session sequence number -%% to MQTT packet ID by chopping off most significant bits and adding -%% 1. This assumes that there's never more FFFF in-flight packets at -%% any time: --spec seqno_to_packet_id(non_neg_integer()) -> emqx_types:packet_id(). -seqno_to_packet_id(Counter) -> - Counter rem 16#ffff + 1. - %% Reconstruct session counter by adding most significant bits from %% the current counter to the packet id. -spec packet_id_to_seqno(non_neg_integer(), emqx_types:packet_id()) -> non_neg_integer(). packet_id_to_seqno(NextSeqNo, PacketId) -> - N = ((NextSeqNo bsr 16) bsl 16) + PacketId, - case N > NextSeqNo of - true -> N - 16#10000; - false -> N + Epoch = NextSeqNo bsr 16, + case packet_id_to_seqno_(Epoch, PacketId) of + N when N =< NextSeqNo -> + N; + _ -> + packet_id_to_seqno_(Epoch - 1, PacketId) end. +-spec packet_id_to_seqno_(non_neg_integer(), emqx_types:packet_id()) -> non_neg_integer(). +packet_id_to_seqno_(Epoch, PacketId) -> + (Epoch bsl 16) + PacketId. + -spec shuffle([A]) -> [A]. shuffle(L0) -> L1 = lists:map( @@ -223,3 +258,57 @@ shuffle(L0) -> L2 = lists:sort(L1), {_, L} = lists:unzip(L2), L. + +-ifdef(TEST). + +%% This test only tests boundary conditions (to make sure property-based test didn't skip them): +packet_id_to_seqno_test() -> + %% Packet ID = 1; first epoch: + ?assertEqual(1, packet_id_to_seqno(1, 1)), + ?assertEqual(1, packet_id_to_seqno(10, 1)), + ?assertEqual(1, packet_id_to_seqno(1 bsl 16 - 1, 1)), + ?assertEqual(1, packet_id_to_seqno(1 bsl 16, 1)), + %% Packet ID = 1; second and 3rd epochs: + ?assertEqual(1 bsl 16 + 1, packet_id_to_seqno(1 bsl 16 + 1, 1)), + ?assertEqual(1 bsl 16 + 1, packet_id_to_seqno(2 bsl 16, 1)), + ?assertEqual(2 bsl 16 + 1, packet_id_to_seqno(2 bsl 16 + 1, 1)), + %% Packet ID = 16#ffff: + PID = 1 bsl 16 - 1, + ?assertEqual(PID, packet_id_to_seqno(PID, PID)), + ?assertEqual(PID, packet_id_to_seqno(1 bsl 16, PID)), + ?assertEqual(1 bsl 16 + PID, packet_id_to_seqno(2 bsl 16, PID)), + ok. + +packet_id_to_seqno_test_() -> + Opts = [{numtests, 1000}, {to_file, user}], + {timeout, 30, fun() -> ?assert(proper:quickcheck(packet_id_to_seqno_prop(), Opts)) end}. + +packet_id_to_seqno_prop() -> + ?FORALL( + NextSeqNo, + next_seqno_gen(), + ?FORALL( + SeqNo, + seqno_gen(NextSeqNo), + begin + PacketId = SeqNo rem 16#10000, + ?assertEqual(SeqNo, packet_id_to_seqno(NextSeqNo, PacketId)), + true + end + ) + ). + +next_seqno_gen() -> + ?LET( + {Epoch, Offset}, + {non_neg_integer(), non_neg_integer()}, + Epoch bsl 16 + Offset + ). + +seqno_gen(NextSeqNo) -> + WindowSize = 1 bsl 16 - 1, + Min = max(0, NextSeqNo - WindowSize), + Max = max(0, NextSeqNo - 1), + range(Min, Max). + +-endif. diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index 5ab7723f7..6c0fc2dcc 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -16,6 +16,8 @@ -module(emqx_persistent_session_ds). +-behaviour(emqx_session). + -include("emqx.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("stdlib/include/ms_transform.hrl"). @@ -69,7 +71,13 @@ ]). -ifdef(TEST). --export([session_open/1]). +-export([ + session_open/1, + list_all_sessions/0, + list_all_subscriptions/0, + list_all_streams/0, + list_all_iterators/0 +]). -endif. %% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be @@ -93,6 +101,8 @@ iterators := #{topic() => subscription()}, %% Inflight messages inflight := emqx_persistent_message_ds_replayer:inflight(), + %% Receive maximum + receive_maximum := pos_integer(), %% props := map() }. @@ -103,22 +113,28 @@ -type conninfo() :: emqx_session:conninfo(). -type replies() :: emqx_session:replies(). --export_type([id/0]). +-define(STATS_KEYS, [ + subscriptions_cnt, + subscriptions_max, + inflight_cnt, + inflight_max, + next_pkt_id +]). --define(PERSISTENT_MESSAGE_DB, emqx_persistent_message). +-export_type([id/0]). %% -spec create(clientinfo(), conninfo(), emqx_session:conf()) -> session(). -create(#{clientid := ClientID}, _ConnInfo, Conf) -> +create(#{clientid := ClientID}, ConnInfo, Conf) -> % TODO: expiration ensure_timers(), - ensure_session(ClientID, Conf). + ensure_session(ClientID, ConnInfo, Conf). -spec open(clientinfo(), conninfo()) -> {_IsPresent :: true, session(), []} | false. -open(#{clientid := ClientID}, _ConnInfo) -> +open(#{clientid := ClientID} = _ClientInfo, ConnInfo) -> %% NOTE %% The fact that we need to concern about discarding all live channels here %% is essentially a consequence of the in-memory session design, where we @@ -127,16 +143,19 @@ open(#{clientid := ClientID}, _ConnInfo) -> %% space, and move this call back into `emqx_cm` where it belongs. ok = emqx_cm:discard_session(ClientID), case open_session(ClientID) of - Session = #{} -> + Session0 = #{} -> ensure_timers(), + ReceiveMaximum = receive_maximum(ConnInfo), + Session = Session0#{receive_maximum => ReceiveMaximum}, {true, Session, []}; false -> false end. -ensure_session(ClientID, Conf) -> +ensure_session(ClientID, ConnInfo, Conf) -> {ok, Session, #{}} = session_ensure_new(ClientID, Conf), - Session#{iterators => #{}}. + ReceiveMaximum = receive_maximum(ConnInfo), + Session#{iterators => #{}, receive_maximum => ReceiveMaximum}. open_session(ClientID) -> case session_open(ClientID) of @@ -186,10 +205,10 @@ info(upgrade_qos, #{props := Conf}) -> maps:get(upgrade_qos, Conf); % info(inflight, #sessmem{inflight = Inflight}) -> % Inflight; -% info(inflight_cnt, #sessmem{inflight = Inflight}) -> -% emqx_inflight:size(Inflight); -% info(inflight_max, #sessmem{inflight = Inflight}) -> -% emqx_inflight:max_size(Inflight); +info(inflight_cnt, #{inflight := Inflight}) -> + emqx_persistent_message_ds_replayer:n_inflight(Inflight); +info(inflight_max, #{receive_maximum := ReceiveMaximum}) -> + ReceiveMaximum; info(retry_interval, #{props := Conf}) -> maps:get(retry_interval, Conf); % info(mqueue, #sessmem{mqueue = MQueue}) -> @@ -200,8 +219,9 @@ info(retry_interval, #{props := Conf}) -> % emqx_mqueue:max_len(MQueue); % info(mqueue_dropped, #sessmem{mqueue = MQueue}) -> % emqx_mqueue:dropped(MQueue); -info(next_pkt_id, #{}) -> - _PacketId = 'TODO'; +info(next_pkt_id, #{inflight := Inflight}) -> + {PacketId, _} = emqx_persistent_message_ds_replayer:next_packet_id(Inflight), + PacketId; % info(awaiting_rel, #sessmem{awaiting_rel = AwaitingRel}) -> % AwaitingRel; % info(awaiting_rel_cnt, #sessmem{awaiting_rel = AwaitingRel}) -> @@ -213,8 +233,7 @@ info(await_rel_timeout, #{props := Conf}) -> -spec stats(session()) -> emqx_types:stats(). stats(Session) -> - % TODO: stub - info([], Session). + info(?STATS_KEYS, Session). %%-------------------------------------------------------------------- %% Client -> Broker: SUBSCRIBE / UNSUBSCRIBE @@ -339,9 +358,12 @@ deliver(_ClientInfo, _Delivers, Session) -> -spec handle_timeout(clientinfo(), _Timeout, session()) -> {ok, replies(), session()} | {ok, replies(), timeout(), session()}. -handle_timeout(_ClientInfo, pull, Session = #{id := Id, inflight := Inflight0}) -> - WindowSize = 100, - {Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, WindowSize), +handle_timeout( + _ClientInfo, + pull, + Session = #{id := Id, inflight := Inflight0, receive_maximum := ReceiveMaximum} +) -> + {Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, ReceiveMaximum), %% TODO: make these values configurable: Timeout = case Publishes of @@ -497,8 +519,6 @@ storage() -> %% @doc Called when a client connects. This function looks up a %% session or returns `false` if previous one couldn't be found. %% -%% This function also spawns replay agents for each iterator. -%% %% Note: session API doesn't handle session takeovers, it's the job of %% the broker. -spec session_open(id()) -> @@ -541,14 +561,24 @@ session_create(SessionId, Props) -> -spec session_drop(id()) -> ok. session_drop(DSSessionId) -> transaction(fun() -> - %% TODO: ensure all iterators from this clientid are closed? ok = session_drop_subscriptions(DSSessionId), + ok = session_drop_iterators(DSSessionId), + ok = session_drop_streams(DSSessionId), ok = mnesia:delete(?SESSION_TAB, DSSessionId, write) end). +-spec session_drop_subscriptions(id()) -> ok. session_drop_subscriptions(DSSessionId) -> - IteratorRefs = session_read_subscriptions(DSSessionId), - ok = lists:foreach(fun session_del_subscription/1, IteratorRefs). + Subscriptions = session_read_subscriptions(DSSessionId), + lists:foreach( + fun(#ds_sub{id = DSSubId} = DSSub) -> + TopicFilter = subscription_id_to_topic_filter(DSSubId), + TopicFilterBin = emqx_topic:join(TopicFilter), + ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilterBin, DSSessionId), + ok = session_del_subscription(DSSub) + end, + Subscriptions + ). %% @doc Called when a client subscribes to a topic. Idempotent. -spec session_add_subscription(id(), topic_filter(), _Props :: map()) -> @@ -619,6 +649,10 @@ new_subscription_id(DSSessionId, TopicFilter) -> DSSubId = {DSSessionId, TopicFilter}, {DSSubId, NowMS}. +-spec subscription_id_to_topic_filter(subscription_id()) -> topic_filter(). +subscription_id_to_topic_filter({_DSSessionId, TopicFilter}) -> + TopicFilter. + %%-------------------------------------------------------------------- %% RPC targets (v1) %%-------------------------------------------------------------------- @@ -643,24 +677,26 @@ do_ensure_all_iterators_closed(_DSSessionID) -> %% Reading batches %%-------------------------------------------------------------------- -renew_streams(Id) -> - Subscriptions = ro_transaction(fun() -> session_read_subscriptions(Id) end), - ExistingStreams = ro_transaction(fun() -> mnesia:read(?SESSION_STREAM_TAB, Id) end), +-spec renew_streams(id()) -> ok. +renew_streams(DSSessionId) -> + Subscriptions = ro_transaction(fun() -> session_read_subscriptions(DSSessionId) end), + ExistingStreams = ro_transaction(fun() -> mnesia:read(?SESSION_STREAM_TAB, DSSessionId) end), lists:foreach( fun(#ds_sub{id = {_, TopicFilter}, start_time = StartTime}) -> - renew_streams(Id, ExistingStreams, TopicFilter, StartTime) + renew_streams(DSSessionId, ExistingStreams, TopicFilter, StartTime) end, Subscriptions ). -renew_streams(Id, ExistingStreams, TopicFilter, StartTime) -> +-spec renew_streams(id(), [ds_stream()], emqx_ds:topic_filter(), emqx_ds:time()) -> ok. +renew_streams(DSSessionId, ExistingStreams, TopicFilter, StartTime) -> AllStreams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartTime), transaction( fun() -> lists:foreach( fun({Rank, Stream}) -> Rec = #ds_stream{ - session = Id, + session = DSSessionId, topic_filter = TopicFilter, stream = Stream, rank = Rank @@ -670,8 +706,15 @@ renew_streams(Id, ExistingStreams, TopicFilter, StartTime) -> ok; false -> mnesia:write(?SESSION_STREAM_TAB, Rec, write), - {ok, Iterator} = emqx_ds:make_iterator(Stream, TopicFilter, StartTime), - IterRec = #ds_iter{id = {Id, Stream}, iter = Iterator}, + {ok, Iterator} = emqx_ds:make_iterator( + ?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartTime + ), + %% Workaround: we convert `Stream' to a binary before + %% attempting to store it in mnesia(rocksdb) because of a bug + %% in `mnesia_rocksdb' when trying to do + %% `mnesia:dirty_all_keys' later. + StreamBin = term_to_binary(Stream), + IterRec = #ds_iter{id = {DSSessionId, StreamBin}, iter = Iterator}, mnesia:write(?SESSION_ITER_TAB, IterRec, write) end end, @@ -680,6 +723,33 @@ renew_streams(Id, ExistingStreams, TopicFilter, StartTime) -> end ). +%% must be called inside a transaction +-spec session_drop_streams(id()) -> ok. +session_drop_streams(DSSessionId) -> + MS = ets:fun2ms( + fun(#ds_stream{session = DSSessionId0}) when DSSessionId0 =:= DSSessionId -> + DSSessionId0 + end + ), + StreamIDs = mnesia:select(?SESSION_STREAM_TAB, MS, write), + lists:foreach(fun(Key) -> mnesia:delete(?SESSION_STREAM_TAB, Key, write) end, StreamIDs). + +%% must be called inside a transaction +-spec session_drop_iterators(id()) -> ok. +session_drop_iterators(DSSessionId) -> + MS = ets:fun2ms( + fun(#ds_iter{id = {DSSessionId0, StreamBin}}) when DSSessionId0 =:= DSSessionId -> + StreamBin + end + ), + StreamBins = mnesia:select(?SESSION_ITER_TAB, MS, write), + lists:foreach( + fun(StreamBin) -> + mnesia:delete(?SESSION_ITER_TAB, {DSSessionId, StreamBin}, write) + end, + StreamBins + ). + %%-------------------------------------------------------------------------------- transaction(Fun) -> @@ -726,3 +796,70 @@ ensure_timer(Type) -> ensure_timer(Type, Timeout) -> _ = emqx_utils:start_timer(Timeout, {emqx_session, Type}), ok. + +-spec receive_maximum(conninfo()) -> pos_integer(). +receive_maximum(ConnInfo) -> + %% Note: the default value should be always set by the channel + %% with respect to the zone configuration, but the type spec + %% indicates that it's optional. + maps:get(receive_maximum, ConnInfo, 65_535). + +-ifdef(TEST). +list_all_sessions() -> + DSSessionIds = mnesia:dirty_all_keys(?SESSION_TAB), + Sessions = lists:map( + fun(SessionID) -> + {ok, Session, Subscriptions} = session_open(SessionID), + {SessionID, #{session => Session, subscriptions => Subscriptions}} + end, + DSSessionIds + ), + maps:from_list(Sessions). + +list_all_subscriptions() -> + DSSubIds = mnesia:dirty_all_keys(?SESSION_SUBSCRIPTIONS_TAB), + Subscriptions = lists:map( + fun(DSSubId) -> + [DSSub] = mnesia:dirty_read(?SESSION_SUBSCRIPTIONS_TAB, DSSubId), + {DSSubId, export_subscription(DSSub)} + end, + DSSubIds + ), + maps:from_list(Subscriptions). + +list_all_streams() -> + DSStreamIds = mnesia:dirty_all_keys(?SESSION_STREAM_TAB), + DSStreams = lists:map( + fun(DSStreamId) -> + Records = mnesia:dirty_read(?SESSION_STREAM_TAB, DSStreamId), + ExtDSStreams = + lists:map( + fun(Record) -> + export_record( + Record, + #ds_stream.session, + [session, topic_filter, stream, rank], + #{} + ) + end, + Records + ), + {DSStreamId, ExtDSStreams} + end, + DSStreamIds + ), + maps:from_list(DSStreams). + +list_all_iterators() -> + DSIterIds = mnesia:dirty_all_keys(?SESSION_ITER_TAB), + DSIters = lists:map( + fun(DSIterId) -> + [Record] = mnesia:dirty_read(?SESSION_ITER_TAB, DSIterId), + {DSIterId, export_record(Record, #ds_iter.id, [id, iter], #{})} + end, + DSIterIds + ), + maps:from_list(DSIters). + +%% ifdef(TEST) +-endif. diff --git a/apps/emqx/src/emqx_persistent_session_ds.hrl b/apps/emqx/src/emqx_persistent_session_ds.hrl index 81b997df5..cc995ce66 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.hrl +++ b/apps/emqx/src/emqx_persistent_session_ds.hrl @@ -16,6 +16,8 @@ -ifndef(EMQX_PERSISTENT_SESSION_DS_HRL_HRL). -define(EMQX_PERSISTENT_SESSION_DS_HRL_HRL, true). +-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message). + -define(SESSION_TAB, emqx_ds_session). -define(SESSION_SUBSCRIPTIONS_TAB, emqx_ds_session_subscriptions). -define(SESSION_STREAM_TAB, emqx_ds_stream_tab). @@ -37,9 +39,10 @@ rank :: emqx_ds:stream_rank() }). -type ds_stream() :: #ds_stream{}. +-type ds_stream_bin() :: binary(). -record(ds_iter, { - id :: {emqx_persistent_session_ds:id(), emqx_ds:stream()}, + id :: {emqx_persistent_session_ds:id(), ds_stream_bin()}, iter :: emqx_ds:iterator() }). diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index 3848e77b4..3ad03c4d4 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -47,11 +47,9 @@ -type bytesize() :: integer(). -type wordsize() :: bytesize(). -type percent() :: float(). --type file() :: string(). --type comma_separated_list() :: list(). +-type comma_separated_list() :: list(string()). -type comma_separated_binary() :: [binary()]. -type comma_separated_atoms() :: [atom()]. --type bar_separated_list() :: list(). -type ip_port() :: tuple() | integer(). -type cipher() :: map(). -type port_number() :: 1..65535. @@ -75,7 +73,6 @@ -typerefl_from_string({percent/0, emqx_schema, to_percent}). -typerefl_from_string({comma_separated_list/0, emqx_schema, to_comma_separated_list}). -typerefl_from_string({comma_separated_binary/0, emqx_schema, to_comma_separated_binary}). --typerefl_from_string({bar_separated_list/0, emqx_schema, to_bar_separated_list}). -typerefl_from_string({ip_port/0, emqx_schema, to_ip_port}). -typerefl_from_string({cipher/0, emqx_schema, to_erl_cipher_suite}). -typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}). @@ -118,7 +115,6 @@ to_percent/1, to_comma_separated_list/1, to_comma_separated_binary/1, - to_bar_separated_list/1, to_ip_port/1, to_erl_cipher_suite/1, to_comma_separated_atoms/1, @@ -154,10 +150,8 @@ bytesize/0, wordsize/0, percent/0, - file/0, comma_separated_list/0, comma_separated_binary/0, - bar_separated_list/0, ip_port/0, cipher/0, comma_separated_atoms/0, @@ -2564,9 +2558,6 @@ to_json_binary(Str) -> Error end. -to_bar_separated_list(Str) -> - {ok, string:tokens(Str, "| ")}. - %% @doc support the following format: %% - 127.0.0.1:1883 %% - ::1:1883 @@ -3316,7 +3307,7 @@ get_tombstone_map_value_type(Schema) -> %% hoconsc:map_value_type(Schema) ?MAP(_Name, Union) = hocon_schema:field_schema(Schema, type), %% TODO: violation of abstraction, fix hoconsc:union_members/1 - ?UNION(Members) = Union, + ?UNION(Members, _) = Union, Tombstone = tombstone(), [Type, Tombstone] = hoconsc:union_members(Members), Type. diff --git a/apps/emqx/src/emqx_session.erl b/apps/emqx/src/emqx_session.erl index 147b0b35c..4bae4ce03 100644 --- a/apps/emqx/src/emqx_session.erl +++ b/apps/emqx/src/emqx_session.erl @@ -176,6 +176,7 @@ t(). -callback open(clientinfo(), conninfo()) -> {_IsPresent :: true, t(), _ReplayContext} | false. +-callback destroy(t() | clientinfo()) -> ok. %%-------------------------------------------------------------------- %% Create a Session @@ -247,7 +248,14 @@ get_mqtt_conf(Zone, Key) -> -spec destroy(clientinfo(), conninfo()) -> ok. destroy(ClientInfo, ConnInfo) -> - (choose_impl_mod(ConnInfo)):destroy(ClientInfo). + %% When destroying/discarding a session, the current `ClientInfo' might suggest an + %% implementation which does not correspond to the one previously used by this client. + %% An example of this is a client that first connects with `Session-Expiry-Interval' > + %% 0, and later reconnects with `Session-Expiry-Interval' = 0 and `clean_start' = + %% true. So we may simply destroy sessions from all implementations, since the key + %% (ClientID) is the same. + Mods = choose_impl_candidates(ConnInfo), + lists:foreach(fun(Mod) -> Mod:destroy(ClientInfo) end, Mods). -spec destroy(t()) -> ok. destroy(Session) -> diff --git a/apps/emqx/src/emqx_session_mem.erl b/apps/emqx/src/emqx_session_mem.erl index 8279953c1..d609435c0 100644 --- a/apps/emqx/src/emqx_session_mem.erl +++ b/apps/emqx/src/emqx_session_mem.erl @@ -44,6 +44,8 @@ %% State is stored in-memory in the process heap. -module(emqx_session_mem). +-behaviour(emqx_session). + -include("emqx.hrl"). -include("emqx_mqtt.hrl"). -include("emqx_session_mem.hrl"). diff --git a/apps/emqx/test/emqx_cth_suite.erl b/apps/emqx/test/emqx_cth_suite.erl index 5a59238de..401d4f59d 100644 --- a/apps/emqx/test/emqx_cth_suite.erl +++ b/apps/emqx/test/emqx_cth_suite.erl @@ -74,6 +74,9 @@ -export([merge_appspec/2]). +%% "Unofficial" `emqx_config_handler' and `emqx_conf' APIs +-export([schema_module/0, upgrade_raw_conf/1]). + -export_type([appspec/0]). -export_type([appspec_opts/0]). @@ -477,3 +480,18 @@ render_config(Config = #{}) -> unicode:characters_to_binary(hocon_pp:do(Config, #{})); render_config(Config) -> unicode:characters_to_binary(Config). + +%% + +%% "Unofficial" `emqx_config_handler' API +schema_module() -> + ?MODULE. + +%% "Unofficial" `emqx_conf' API +upgrade_raw_conf(Conf) -> + case emqx_release:edition() of + ee -> + emqx_enterprise_schema:upgrade_raw_conf(Conf); + ce -> + emqx_conf_schema:upgrade_raw_conf(Conf) + end. diff --git a/apps/emqx/test/emqx_persistent_messages_SUITE.erl b/apps/emqx/test/emqx_persistent_messages_SUITE.erl index 52ba090b5..45cf85a05 100644 --- a/apps/emqx/test/emqx_persistent_messages_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_messages_SUITE.erl @@ -256,14 +256,14 @@ consume(TopicFilter, StartMS) -> Streams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartMS), lists:flatmap( fun({_Rank, Stream}) -> - {ok, It} = emqx_ds:make_iterator(Stream, TopicFilter, StartMS), + {ok, It} = emqx_ds:make_iterator(?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartMS), consume(It) end, Streams ). consume(It) -> - case emqx_ds:next(It, 100) of + case emqx_ds:next(?PERSISTENT_MESSAGE_DB, It, 100) of {ok, _NIt, _Msgs = []} -> []; {ok, NIt, Msgs} -> diff --git a/apps/emqx/test/emqx_persistent_session_SUITE.erl b/apps/emqx/test/emqx_persistent_session_SUITE.erl index 5a14e0bc9..bd7ca1c46 100644 --- a/apps/emqx/test/emqx_persistent_session_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_session_SUITE.erl @@ -133,7 +133,7 @@ get_listener_port(Type, Name) -> end_per_group(Group, Config) when Group == tcp; Group == ws; Group == quic -> ok = emqx_cth_suite:stop(?config(group_apps, Config)); end_per_group(_, _Config) -> - ok = emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB), + catch emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB), ok. init_per_testcase(TestCase, Config) -> @@ -599,6 +599,7 @@ t_publish_while_client_is_gone(Config) -> ok = emqtt:disconnect(Client2). +%% TODO: don't skip after QoS2 support is added to DS. t_clean_start_drops_subscriptions(init, Config) -> skip_ds_tc(Config); t_clean_start_drops_subscriptions('end', _Config) -> ok. t_clean_start_drops_subscriptions(Config) -> diff --git a/apps/emqx/test/emqx_quic_multistreams_SUITE.erl b/apps/emqx/test/emqx_quic_multistreams_SUITE.erl index 267782ff9..c5eaf4c24 100644 --- a/apps/emqx/test/emqx_quic_multistreams_SUITE.erl +++ b/apps/emqx/test/emqx_quic_multistreams_SUITE.erl @@ -674,7 +674,16 @@ t_multi_streams_packet_malform(Config) -> ?assert(is_list(emqtt:info(C))), - {error, stm_send_error, aborted} = quicer:send(MalformStream, <<1, 2, 3, 4, 5, 6, 7, 8, 9, 0>>), + {error, stm_send_error, _} = + snabbkaffe:retry( + 10000, + 10, + fun() -> + {error, stm_send_error, _} = quicer:send( + MalformStream, <<1, 2, 3, 4, 5, 6, 7, 8, 9, 0>> + ) + end + ), ?assert(is_list(emqtt:info(C))), diff --git a/apps/emqx_auth/src/emqx_auth.app.src b/apps/emqx_auth/src/emqx_auth.app.src index cfd2aa447..3d9109fd1 100644 --- a/apps/emqx_auth/src/emqx_auth.app.src +++ b/apps/emqx_auth/src/emqx_auth.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth, [ {description, "EMQX Authentication and authorization"}, - {vsn, "0.1.27"}, + {vsn, "0.1.28"}, {modules, []}, {registered, [emqx_auth_sup]}, {applications, [ diff --git a/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl b/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl index 9b9935a1f..371c6f2be 100644 --- a/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl +++ b/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl @@ -38,7 +38,8 @@ authenticator_type_without/1, authenticator_type_without/2, mechanism/1, - backend/1 + backend/1, + namespace/0 ]). -export([ @@ -60,6 +61,7 @@ api_write %% config: schema for config validation | config. +-callback namespace() -> string(). -callback refs() -> [schema_ref()]. -callback refs(shema_kind()) -> [schema_ref()]. -callback select_union_member(emqx_config:raw_config()) -> [schema_ref()] | undefined | no_return(). @@ -74,6 +76,8 @@ refs/1 ]). +namespace() -> "authn". + roots() -> []. injected_fields(AuthnSchemaMods) -> diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_schema.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_schema.erl index ac2c2503d..426c7a9f6 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_schema.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_schema.erl @@ -136,7 +136,7 @@ authz_fields() -> [ {sources, ?HOCON( - ?ARRAY(?UNION(UnionMemberSelector)), + ?ARRAY(hoconsc:union(UnionMemberSelector)), #{ default => [default_authz()], desc => ?DESC(sources), @@ -153,7 +153,7 @@ api_authz_fields() -> [{sources, ?HOCON(?ARRAY(api_source_type()), #{desc => ?DESC(sources)})}]. api_source_type() -> - ?UNION(api_authz_refs()). + hoconsc:union(api_authz_refs()). api_authz_refs() -> lists:concat([api_source_refs(Mod) || Mod <- source_schema_mods()]). diff --git a/apps/emqx_auth/test/emqx_authn/emqx_authn_chains_SUITE.erl b/apps/emqx_auth/test/emqx_authn/emqx_authn_chains_SUITE.erl index 747a1d15a..61a15b139 100644 --- a/apps/emqx_auth/test/emqx_authn/emqx_authn_chains_SUITE.erl +++ b/apps/emqx_auth/test/emqx_authn/emqx_authn_chains_SUITE.erl @@ -16,7 +16,6 @@ -module(emqx_authn_chains_SUITE). --behaviour(hocon_schema). -behaviour(emqx_authn_provider). -compile(export_all). diff --git a/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_SUITE.erl b/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_SUITE.erl index 6d6ea420f..23532b4af 100644 --- a/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_SUITE.erl +++ b/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_SUITE.erl @@ -54,7 +54,7 @@ t_check_schema(_Config) -> ?assertThrow( #{ path := "authentication.1.password_hash_algorithm.name", - matched_type := "builtin_db/authn-hash:simple", + matched_type := "authn:builtin_db/authn-hash:simple", reason := unable_to_convert_to_enum_symbol }, Check(ConfigNotOk) @@ -73,7 +73,7 @@ t_check_schema(_Config) -> #{ path := "authentication.1.password_hash_algorithm", reason := "algorithm_name_missing", - matched_type := "builtin_db" + matched_type := "authn:builtin_db" }, Check(ConfigMissingAlgoName) ). diff --git a/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_tests.erl b/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_tests.erl index b0451e110..b4835cdaa 100644 --- a/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_tests.erl +++ b/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_tests.erl @@ -22,6 +22,7 @@ -define(ERR(Reason), {error, Reason}). union_member_selector_mongo_test_() -> + ok = ensure_schema_load(), [ {"unknown", fun() -> ?assertMatch( @@ -31,25 +32,26 @@ union_member_selector_mongo_test_() -> end}, {"single", fun() -> ?assertMatch( - ?ERR(#{matched_type := "mongo_single"}), + ?ERR(#{matched_type := "authn:mongo_single"}), check("{mechanism = password_based, backend = mongodb, mongo_type = single}") ) end}, {"replica-set", fun() -> ?assertMatch( - ?ERR(#{matched_type := "mongo_rs"}), + ?ERR(#{matched_type := "authn:mongo_rs"}), check("{mechanism = password_based, backend = mongodb, mongo_type = rs}") ) end}, {"sharded", fun() -> ?assertMatch( - ?ERR(#{matched_type := "mongo_sharded"}), + ?ERR(#{matched_type := "authn:mongo_sharded"}), check("{mechanism = password_based, backend = mongodb, mongo_type = sharded}") ) end} ]. union_member_selector_jwt_test_() -> + ok = ensure_schema_load(), [ {"unknown", fun() -> ?assertMatch( @@ -59,25 +61,26 @@ union_member_selector_jwt_test_() -> end}, {"jwks", fun() -> ?assertMatch( - ?ERR(#{matched_type := "jwt_jwks"}), + ?ERR(#{matched_type := "authn:jwt_jwks"}), check("{mechanism = jwt, use_jwks = true}") ) end}, {"publick-key", fun() -> ?assertMatch( - ?ERR(#{matched_type := "jwt_public_key"}), + ?ERR(#{matched_type := "authn:jwt_public_key"}), check("{mechanism = jwt, use_jwks = false, public_key = 1}") ) end}, {"hmac-based", fun() -> ?assertMatch( - ?ERR(#{matched_type := "jwt_hmac"}), + ?ERR(#{matched_type := "authn:jwt_hmac"}), check("{mechanism = jwt, use_jwks = false}") ) end} ]. union_member_selector_redis_test_() -> + ok = ensure_schema_load(), [ {"unknown", fun() -> ?assertMatch( @@ -87,25 +90,26 @@ union_member_selector_redis_test_() -> end}, {"single", fun() -> ?assertMatch( - ?ERR(#{matched_type := "redis_single"}), + ?ERR(#{matched_type := "authn:redis_single"}), check("{mechanism = password_based, backend = redis, redis_type = single}") ) end}, {"cluster", fun() -> ?assertMatch( - ?ERR(#{matched_type := "redis_cluster"}), + ?ERR(#{matched_type := "authn:redis_cluster"}), check("{mechanism = password_based, backend = redis, redis_type = cluster}") ) end}, {"sentinel", fun() -> ?assertMatch( - ?ERR(#{matched_type := "redis_sentinel"}), + ?ERR(#{matched_type := "authn:redis_sentinel"}), check("{mechanism = password_based, backend = redis, redis_type = sentinel}") ) end} ]. union_member_selector_http_test_() -> + ok = ensure_schema_load(), [ {"unknown", fun() -> ?assertMatch( @@ -115,13 +119,13 @@ union_member_selector_http_test_() -> end}, {"get", fun() -> ?assertMatch( - ?ERR(#{matched_type := "http_get"}), + ?ERR(#{matched_type := "authn:http_get"}), check("{mechanism = password_based, backend = http, method = get}") ) end}, {"post", fun() -> ?assertMatch( - ?ERR(#{matched_type := "http_post"}), + ?ERR(#{matched_type := "authn:http_post"}), check("{mechanism = password_based, backend = http, method = post}") ) end} @@ -132,3 +136,7 @@ check(HoconConf) -> #{roots => emqx_authn_schema:global_auth_fields()}, ["authentication= ", HoconConf] ). + +ensure_schema_load() -> + _ = emqx_conf_schema:roots(), + ok. diff --git a/apps/emqx_auth_http/src/emqx_auth_http.app.src b/apps/emqx_auth_http/src/emqx_auth_http.app.src index b5de90ad9..183b9a993 100644 --- a/apps/emqx_auth_http/src/emqx_auth_http.app.src +++ b/apps/emqx_auth_http/src/emqx_auth_http.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_http, [ {description, "EMQX External HTTP API Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_http_app, []}}, {applications, [ diff --git a/apps/emqx_auth_http/src/emqx_authn_http_schema.erl b/apps/emqx_auth_http/src/emqx_authn_http_schema.erl index 1eaac6378..7b7af727d 100644 --- a/apps/emqx_auth_http/src/emqx_authn_http_schema.erl +++ b/apps/emqx_auth_http/src/emqx_authn_http_schema.erl @@ -16,10 +16,6 @@ -module(emqx_authn_http_schema). --include("emqx_auth_http.hrl"). --include_lib("emqx_auth/include/emqx_authn.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ @@ -27,9 +23,14 @@ validations/0, desc/1, refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). +-include("emqx_auth_http.hrl"). +-include_lib("emqx_auth/include/emqx_authn.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + -define(NOT_EMPTY(MSG), emqx_resource_validator:not_empty(MSG)). -define(THROW_VALIDATION_ERROR(ERROR, MESSAGE), throw(#{ @@ -38,6 +39,8 @@ }) ). +namespace() -> "authn". + refs() -> [?R_REF(http_get), ?R_REF(http_post)]. @@ -97,7 +100,7 @@ common_fields() -> {backend, emqx_authn_schema:backend(?AUTHN_BACKEND)}, {url, fun url/1}, {body, - hoconsc:mk(map([{fuzzy, term(), binary()}]), #{ + hoconsc:mk(typerefl:alias("map", map([{fuzzy, term(), binary()}])), #{ required => false, desc => ?DESC(body) })}, {request_timeout, fun request_timeout/1} diff --git a/apps/emqx_auth_http/src/emqx_authz_http_schema.erl b/apps/emqx_auth_http/src/emqx_authz_http_schema.erl index 18ec23757..90a7439a2 100644 --- a/apps/emqx_auth_http/src/emqx_authz_http_schema.erl +++ b/apps/emqx_auth_http/src/emqx_authz_http_schema.erl @@ -26,7 +26,8 @@ fields/1, desc/1, source_refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). -export([ @@ -38,6 +39,8 @@ -import(emqx_schema, [mk_duration/2]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. source_refs() -> @@ -96,7 +99,7 @@ http_common_fields() -> mk_duration("Request timeout", #{ required => false, default => <<"30s">>, desc => ?DESC(request_timeout) })}, - {body, ?HOCON(map(), #{required => false, desc => ?DESC(body)})} + {body, ?HOCON(hoconsc:map(name, binary()), #{required => false, desc => ?DESC(body)})} ] ++ lists:keydelete( pool_type, @@ -105,7 +108,7 @@ http_common_fields() -> ). headers(type) -> - list({binary(), binary()}); + typerefl:alias("map", list({binary(), binary()}), #{}, [binary(), binary()]); headers(desc) -> ?DESC(?FUNCTION_NAME); headers(converter) -> @@ -118,7 +121,7 @@ headers(_) -> undefined. headers_no_content_type(type) -> - list({binary(), binary()}); + typerefl:alias("map", list({binary(), binary()}), #{}, [binary(), binary()]); headers_no_content_type(desc) -> ?DESC(?FUNCTION_NAME); headers_no_content_type(converter) -> diff --git a/apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src b/apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src index 4679e43bb..b4b5ccf02 100644 --- a/apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src +++ b/apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_jwt, [ {description, "EMQX JWT Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_jwt_app, []}}, {applications, [ diff --git a/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl b/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl index fc7de7cd8..63da372ff 100644 --- a/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl +++ b/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl @@ -16,18 +16,21 @@ -module(emqx_authn_jwt_schema). --include("emqx_auth_jwt.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +-include("emqx_auth_jwt.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +namespace() -> "authn". + refs() -> [ ?R_REF(jwt_hmac), @@ -149,7 +152,8 @@ refresh_interval(validator) -> [fun(I) -> I > 0 end]; refresh_interval(_) -> undefined. verify_claims(type) -> - list(); + %% user input is a map, converted to a list of {binary(), binary()} + typerefl:alias("map", list()); verify_claims(desc) -> ?DESC(?FUNCTION_NAME); verify_claims(default) -> diff --git a/apps/emqx_auth_ldap/src/emqx_auth_ldap.app.src b/apps/emqx_auth_ldap/src/emqx_auth_ldap.app.src index 383c4822c..3d4d5f467 100644 --- a/apps/emqx_auth_ldap/src/emqx_auth_ldap.app.src +++ b/apps/emqx_auth_ldap/src/emqx_auth_ldap.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_ldap, [ {description, "EMQX LDAP Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_ldap_app, []}}, {applications, [ diff --git a/apps/emqx_auth_ldap/src/emqx_authn_ldap_schema.erl b/apps/emqx_auth_ldap/src/emqx_authn_ldap_schema.erl index badacceea..3190d6e14 100644 --- a/apps/emqx_auth_ldap/src/emqx_authn_ldap_schema.erl +++ b/apps/emqx_auth_ldap/src/emqx_authn_ldap_schema.erl @@ -16,18 +16,21 @@ -module(emqx_authn_ldap_schema). --include("emqx_auth_ldap.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +-include("emqx_auth_ldap.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +namespace() -> "authn". + refs() -> [?R_REF(ldap), ?R_REF(ldap_deprecated)]. @@ -52,7 +55,7 @@ fields(ldap) -> [ {method, ?HOCON( - ?UNION([?R_REF(hash_method), ?R_REF(bind_method)]), + hoconsc:union([?R_REF(hash_method), ?R_REF(bind_method)]), #{desc => ?DESC(method)} )} ]; diff --git a/apps/emqx_auth_ldap/src/emqx_authz_ldap_schema.erl b/apps/emqx_auth_ldap/src/emqx_authz_ldap_schema.erl index 491b0debf..e6a060f42 100644 --- a/apps/emqx_auth_ldap/src/emqx_authz_ldap_schema.erl +++ b/apps/emqx_auth_ldap/src/emqx_authz_ldap_schema.erl @@ -26,9 +26,12 @@ fields/1, desc/1, source_refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. fields(ldap) -> diff --git a/apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src b/apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src index 988d300fb..5cc2c2a31 100644 --- a/apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src +++ b/apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_mnesia, [ {description, "EMQX Buitl-in Database Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_mnesia_app, []}}, {applications, [ diff --git a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl index bb5ccfe1a..373d95fc8 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl @@ -25,9 +25,12 @@ fields/1, desc/1, refs/1, - select_union_member/2 + select_union_member/2, + namespace/0 ]). +namespace() -> "authn". + refs(api_write) -> [?R_REF(builtin_db_api)]; refs(_) -> diff --git a/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia_schema.erl b/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia_schema.erl index fa22693b3..ef4ec6e05 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia_schema.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia_schema.erl @@ -22,12 +22,15 @@ -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +namespace() -> "authn". + refs() -> [?R_REF(scram)]. diff --git a/apps/emqx_auth_mnesia/src/emqx_authz_mnesia_schema.erl b/apps/emqx_auth_mnesia/src/emqx_authz_mnesia_schema.erl index cab544bf7..4d467397e 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authz_mnesia_schema.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authz_mnesia_schema.erl @@ -26,9 +26,12 @@ fields/1, desc/1, source_refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. fields(builtin_db) -> diff --git a/apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src b/apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src index 38cf0138f..8970329fe 100644 --- a/apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src +++ b/apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_mongodb, [ {description, "EMQX MongoDB Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_mongodb_app, []}}, {applications, [ diff --git a/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl b/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl index 8f76bedc2..b72a1e83a 100644 --- a/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl +++ b/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl @@ -16,18 +16,21 @@ -module(emqx_authn_mongodb_schema). --include("emqx_auth_mongodb.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +-include("emqx_auth_mongodb.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +namespace() -> "authn". + refs() -> [ ?R_REF(mongo_single), diff --git a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl index aff399e68..bdde704f9 100644 --- a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl +++ b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl @@ -16,17 +16,20 @@ -module(emqx_authz_mongodb_schema). --include("emqx_auth_mongodb.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -export([ type/0, fields/1, desc/1, source_refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). +-include("emqx_auth_mongodb.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. source_refs() -> diff --git a/apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src b/apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src index 933e8f819..38750b79a 100644 --- a/apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src +++ b/apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_mysql, [ {description, "EMQX MySQL Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_mysql_app, []}}, {applications, [ diff --git a/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl b/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl index 0189ecc61..6472794fe 100644 --- a/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl +++ b/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl @@ -16,18 +16,21 @@ -module(emqx_authn_mysql_schema). --include("emqx_auth_mysql.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +-include("emqx_auth_mysql.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +namespace() -> "authn". + refs() -> [?R_REF(mysql)]. diff --git a/apps/emqx_auth_mysql/src/emqx_authz_mysql_schema.erl b/apps/emqx_auth_mysql/src/emqx_authz_mysql_schema.erl index a9ce422e6..43f6ca6fa 100644 --- a/apps/emqx_auth_mysql/src/emqx_authz_mysql_schema.erl +++ b/apps/emqx_auth_mysql/src/emqx_authz_mysql_schema.erl @@ -22,6 +22,7 @@ -behaviour(emqx_authz_schema). -export([ + namespace/0, type/0, fields/1, desc/1, @@ -29,6 +30,8 @@ select_union_member/1 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. fields(mysql) -> diff --git a/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src b/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src index 1d23ccac4..bae3da0cb 100644 --- a/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src +++ b/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_postgresql, [ {description, "EMQX PostgreSQL Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_postgresql_app, []}}, {applications, [ diff --git a/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl b/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl index 6b3b600ee..ef7d00df3 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl @@ -22,12 +22,15 @@ -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +namespace() -> "authn". + select_union_member( #{ <<"mechanism">> := ?AUTHN_MECHANISM_BIN, <<"backend">> := ?AUTHN_BACKEND_BIN diff --git a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl index 2be7e9387..296b00126 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl @@ -22,6 +22,7 @@ -behaviour(emqx_authz_schema). -export([ + namespace/0, type/0, fields/1, desc/1, @@ -29,6 +30,8 @@ select_union_member/1 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. fields(postgresql) -> diff --git a/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl b/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl index ea44c0a45..af1f1db2d 100644 --- a/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl +++ b/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl @@ -104,7 +104,7 @@ t_update_with_invalid_config(_Config) -> ?assertMatch( {error, #{ kind := validation_error, - matched_type := "postgresql", + matched_type := "authn:postgresql", path := "authentication.1.server", reason := required_field }}, diff --git a/apps/emqx_auth_redis/src/emqx_auth_redis.app.src b/apps/emqx_auth_redis/src/emqx_auth_redis.app.src index 388fd413c..bd33606d3 100644 --- a/apps/emqx_auth_redis/src/emqx_auth_redis.app.src +++ b/apps/emqx_auth_redis/src/emqx_auth_redis.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_redis, [ {description, "EMQX Redis Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_redis_app, []}}, {applications, [ diff --git a/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl b/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl index 7b5794c48..f3e124ca1 100644 --- a/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl +++ b/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl @@ -22,12 +22,15 @@ -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +namespace() -> "authn". + refs() -> [ ?R_REF(redis_single), diff --git a/apps/emqx_auth_redis/src/emqx_authz_redis_schema.erl b/apps/emqx_auth_redis/src/emqx_authz_redis_schema.erl index 755192bfc..5cd084795 100644 --- a/apps/emqx_auth_redis/src/emqx_authz_redis_schema.erl +++ b/apps/emqx_auth_redis/src/emqx_authz_redis_schema.erl @@ -22,6 +22,7 @@ -behaviour(emqx_authz_schema). -export([ + namespace/0, type/0, fields/1, desc/1, @@ -29,6 +30,8 @@ select_union_member/1 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. fields(redis_single) -> diff --git a/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl b/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl index 081c4e641..e7673b790 100644 --- a/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl +++ b/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl @@ -170,7 +170,7 @@ test_create_invalid_config(InvalidAuthConfig, Path) -> ?assertMatch( {error, #{ kind := validation_error, - matched_type := "redis_single", + matched_type := "authn:redis_single", path := Path }}, emqx:update_config(?PATH, {create_authenticator, ?GLOBAL, InvalidAuthConfig}) diff --git a/apps/emqx_bridge/src/emqx_action_info.erl b/apps/emqx_bridge/src/emqx_action_info.erl new file mode 100644 index 000000000..e1932af44 --- /dev/null +++ b/apps/emqx_bridge/src/emqx_action_info.erl @@ -0,0 +1,200 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +%% @doc The module which knows everything about actions. + +%% NOTE: it does not cover the V1 bridges. + +-module(emqx_action_info). + +-export([ + action_type_to_connector_type/1, + action_type_to_bridge_v1_type/1, + bridge_v1_type_to_action_type/1, + is_action_type/1, + registered_schema_modules/0 +]). + +-callback bridge_v1_type_name() -> atom(). +-callback action_type_name() -> atom(). +-callback connector_type_name() -> atom(). +-callback schema_module() -> atom(). + +-optional_callbacks([bridge_v1_type_name/0]). + +%% ==================================================================== +%% Hadcoded list of info modules for actions +%% TODO: Remove this list once we have made sure that all relevants +%% apps are loaded before this module is called. +%% ==================================================================== + +-if(?EMQX_RELEASE_EDITION == ee). +hard_coded_action_info_modules_ee() -> + [ + emqx_bridge_kafka_action_info, + emqx_bridge_azure_event_hub_action_info, + emqx_bridge_syskeeper_action_info + ]. +-else. +hard_coded_action_info_modules_ee() -> + []. +-endif. + +hard_coded_action_info_modules_common() -> + []. + +hard_coded_action_info_modules() -> + hard_coded_action_info_modules_common() ++ hard_coded_action_info_modules_ee(). + +%% ==================================================================== +%% API +%% ==================================================================== + +action_type_to_connector_type(Type) when not is_atom(Type) -> + action_type_to_connector_type(binary_to_existing_atom(iolist_to_binary(Type))); +action_type_to_connector_type(Type) -> + ActionInfoMap = info_map(), + ActionTypeToConnectorTypeMap = maps:get(action_type_to_connector_type, ActionInfoMap), + case maps:get(Type, ActionTypeToConnectorTypeMap, undefined) of + undefined -> Type; + ConnectorType -> ConnectorType + end. + +bridge_v1_type_to_action_type(Bin) when is_binary(Bin) -> + bridge_v1_type_to_action_type(binary_to_existing_atom(Bin)); +bridge_v1_type_to_action_type(Type) -> + ActionInfoMap = info_map(), + BridgeV1TypeToActionType = maps:get(bridge_v1_type_to_action_type, ActionInfoMap), + case maps:get(Type, BridgeV1TypeToActionType, undefined) of + undefined -> Type; + ActionType -> ActionType + end. + +action_type_to_bridge_v1_type(Bin) when is_binary(Bin) -> + action_type_to_bridge_v1_type(binary_to_existing_atom(Bin)); +action_type_to_bridge_v1_type(Type) -> + ActionInfoMap = info_map(), + ActionTypeToBridgeV1Type = maps:get(action_type_to_bridge_v1_type, ActionInfoMap), + case maps:get(Type, ActionTypeToBridgeV1Type, undefined) of + undefined -> Type; + BridgeV1Type -> BridgeV1Type + end. + +%% This function should return true for all inputs that are bridge V1 types for +%% bridges that have been refactored to bridge V2s, and for all all bridge V2 +%% types. For everything else the function should return false. +is_action_type(Bin) when is_binary(Bin) -> + is_action_type(binary_to_existing_atom(Bin)); +is_action_type(Type) -> + ActionInfoMap = info_map(), + ActionTypes = maps:get(action_type_names, ActionInfoMap), + case maps:get(Type, ActionTypes, undefined) of + undefined -> false; + _ -> true + end. + +registered_schema_modules() -> + InfoMap = info_map(), + Schemas = maps:get(action_type_to_schema_module, InfoMap), + maps:to_list(Schemas). + +%% ==================================================================== +%% Internal functions for building the info map and accessing it +%% ==================================================================== + +internal_emqx_action_persistent_term_info_key() -> + ?FUNCTION_NAME. + +info_map() -> + case persistent_term:get(internal_emqx_action_persistent_term_info_key(), not_found) of + not_found -> + build_cache(); + ActionInfoMap -> + ActionInfoMap + end. + +build_cache() -> + ActionInfoModules = action_info_modules(), + ActionInfoMap = + lists:foldl( + fun(Module, InfoMapSoFar) -> + ModuleInfoMap = get_info_map(Module), + emqx_utils_maps:deep_merge(InfoMapSoFar, ModuleInfoMap) + end, + initial_info_map(), + ActionInfoModules + ), + %% Update the persistent term with the new info map + persistent_term:put(internal_emqx_action_persistent_term_info_key(), ActionInfoMap), + ActionInfoMap. + +action_info_modules() -> + ActionInfoModules = [ + action_info_modules(App) + || {App, _, _} <- application:loaded_applications() + ], + lists:usort(lists:flatten(ActionInfoModules) ++ hard_coded_action_info_modules()). + +action_info_modules(App) -> + case application:get_env(App, emqx_action_info_module) of + {ok, Module} -> + [Module]; + _ -> + [] + end. + +initial_info_map() -> + #{ + action_type_names => #{}, + bridge_v1_type_to_action_type => #{}, + action_type_to_bridge_v1_type => #{}, + action_type_to_connector_type => #{}, + action_type_to_schema_module => #{} + }. + +get_info_map(Module) -> + %% Force the module to get loaded + _ = code:ensure_loaded(Module), + ActionType = Module:action_type_name(), + BridgeV1Type = + case erlang:function_exported(Module, bridge_v1_type_name, 0) of + true -> + Module:bridge_v1_type_name(); + false -> + Module:action_type_name() + end, + #{ + action_type_names => #{ + ActionType => true, + BridgeV1Type => true + }, + bridge_v1_type_to_action_type => #{ + BridgeV1Type => ActionType, + %% Alias the bridge V1 type to the action type + ActionType => ActionType + }, + action_type_to_bridge_v1_type => #{ + ActionType => BridgeV1Type + }, + action_type_to_connector_type => #{ + ActionType => Module:connector_type_name(), + %% Alias the bridge V1 type to the action type + BridgeV1Type => Module:connector_type_name() + }, + action_type_to_schema_module => #{ + ActionType => Module:schema_module() + } + }. diff --git a/apps/emqx_bridge/src/emqx_bridge.app.src b/apps/emqx_bridge/src/emqx_bridge.app.src index c2387fe99..f829b12df 100644 --- a/apps/emqx_bridge/src/emqx_bridge.app.src +++ b/apps/emqx_bridge/src/emqx_bridge.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge, [ {description, "EMQX bridges"}, - {vsn, "0.1.29"}, + {vsn, "0.1.30"}, {registered, [emqx_bridge_sup]}, {mod, {emqx_bridge_app, []}}, {applications, [ diff --git a/apps/emqx_bridge/src/emqx_bridge.erl b/apps/emqx_bridge/src/emqx_bridge.erl index 7ffe58f6a..a3d54586a 100644 --- a/apps/emqx_bridge/src/emqx_bridge.erl +++ b/apps/emqx_bridge/src/emqx_bridge.erl @@ -308,7 +308,7 @@ list() -> emqx:get_raw_config([bridges], #{}) ), BridgeV2Bridges = - emqx_bridge_v2:list_and_transform_to_bridge_v1(), + emqx_bridge_v2:bridge_v1_list_and_transform(), BridgeV1Bridges ++ BridgeV2Bridges. %%BridgeV2Bridges = emqx_bridge_v2:list(). @@ -319,7 +319,7 @@ lookup(Id) -> lookup(Type, Name) -> case emqx_bridge_v2:is_bridge_v2_type(Type) of true -> - emqx_bridge_v2:lookup_and_transform_to_bridge_v1(Type, Name); + emqx_bridge_v2:bridge_v1_lookup_and_transform(Type, Name); false -> RawConf = emqx:get_raw_config([bridges, Type, Name], #{}), lookup(Type, Name, RawConf) @@ -341,7 +341,7 @@ lookup(Type, Name, RawConf) -> get_metrics(Type, Name) -> case emqx_bridge_v2:is_bridge_v2_type(Type) of true -> - case emqx_bridge_v2:is_valid_bridge_v1(Type, Name) of + case emqx_bridge_v2:bridge_v1_is_valid(Type, Name) of true -> BridgeV2Type = emqx_bridge_v2:bridge_v2_type_to_connector_type(Type), emqx_bridge_v2:get_metrics(BridgeV2Type, Name); @@ -384,7 +384,7 @@ create(BridgeType0, BridgeName, RawConf) -> }), case emqx_bridge_v2:is_bridge_v2_type(BridgeType) of true -> - emqx_bridge_v2:split_bridge_v1_config_and_create(BridgeType, BridgeName, RawConf); + emqx_bridge_v2:bridge_v1_split_config_and_create(BridgeType, BridgeName, RawConf); false -> emqx_conf:update( emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], diff --git a/apps/emqx_bridge/src/emqx_bridge_api.erl b/apps/emqx_bridge/src/emqx_bridge_api.erl index b3ceba9ca..d263817bf 100644 --- a/apps/emqx_bridge/src/emqx_bridge_api.erl +++ b/apps/emqx_bridge/src/emqx_bridge_api.erl @@ -627,7 +627,7 @@ create_bridge(BridgeType, BridgeName, Conf) -> update_bridge(BridgeType, BridgeName, Conf) -> case emqx_bridge_v2:is_bridge_v2_type(BridgeType) of true -> - case emqx_bridge_v2:is_valid_bridge_v1(BridgeType, BridgeName) of + case emqx_bridge_v2:bridge_v1_is_valid(BridgeType, BridgeName) of true -> create_or_update_bridge(BridgeType, BridgeName, Conf, 200); false -> @@ -1157,7 +1157,7 @@ map_to_json(M0) -> end. non_compat_bridge_msg() -> - <<"bridge already exists as non Bridge V1 compatible Bridge V2 bridge">>. + <<"bridge already exists as non Bridge V1 compatible action">>. upgrade_type(Type) -> emqx_bridge_lib:upgrade_type(Type). diff --git a/apps/emqx_bridge/src/emqx_bridge_lib.erl b/apps/emqx_bridge/src/emqx_bridge_lib.erl index b11344ee1..4be605745 100644 --- a/apps/emqx_bridge/src/emqx_bridge_lib.erl +++ b/apps/emqx_bridge/src/emqx_bridge_lib.erl @@ -53,20 +53,20 @@ maybe_withdraw_rule_action_loop([BridgeId | More], DeleteActions) -> end. %% @doc Kafka producer bridge renamed from 'kafka' to 'kafka_bridge' since 5.3.1. -upgrade_type(kafka) -> - kafka_producer; -upgrade_type(<<"kafka">>) -> - <<"kafka_producer">>; -upgrade_type(Other) -> - Other. +upgrade_type(Type) when is_atom(Type) -> + emqx_bridge_v2:bridge_v1_type_to_bridge_v2_type(Type); +upgrade_type(Type) when is_binary(Type) -> + atom_to_binary(emqx_bridge_v2:bridge_v1_type_to_bridge_v2_type(Type)); +upgrade_type(Type) when is_list(Type) -> + atom_to_list(emqx_bridge_v2:bridge_v1_type_to_bridge_v2_type(list_to_binary(Type))). %% @doc Kafka producer bridge type renamed from 'kafka' to 'kafka_bridge' since 5.3.1 -downgrade_type(kafka_producer) -> - kafka; -downgrade_type(<<"kafka_producer">>) -> - <<"kafka">>; -downgrade_type(Other) -> - Other. +downgrade_type(Type) when is_atom(Type) -> + emqx_bridge_v2:bridge_v2_type_to_bridge_v1_type(Type); +downgrade_type(Type) when is_binary(Type) -> + atom_to_binary(emqx_bridge_v2:bridge_v2_type_to_bridge_v1_type(Type)); +downgrade_type(Type) when is_list(Type) -> + atom_to_list(emqx_bridge_v2:bridge_v2_type_to_bridge_v1_type(list_to_binary(Type))). %% A rule might be referencing an old version bridge type name %% i.e. 'kafka' instead of 'kafka_producer' so we need to try both diff --git a/apps/emqx_bridge/src/emqx_bridge_resource.erl b/apps/emqx_bridge/src/emqx_bridge_resource.erl index 20b3b08f6..231548f30 100644 --- a/apps/emqx_bridge/src/emqx_bridge_resource.erl +++ b/apps/emqx_bridge/src/emqx_bridge_resource.erl @@ -130,7 +130,7 @@ reset_metrics(ResourceId) -> false -> emqx_resource:reset_metrics(ResourceId); true -> - case emqx_bridge_v2:is_valid_bridge_v1(Type, Name) of + case emqx_bridge_v2:bridge_v1_is_valid(Type, Name) of true -> BridgeV2Type = emqx_bridge_v2:bridge_v2_type_to_connector_type(Type), emqx_bridge_v2:reset_metrics(BridgeV2Type, Name); diff --git a/apps/emqx_bridge/src/emqx_bridge_v2.erl b/apps/emqx_bridge/src/emqx_bridge_v2.erl index c8aff2f8a..70e248e56 100644 --- a/apps/emqx_bridge/src/emqx_bridge_v2.erl +++ b/apps/emqx_bridge/src/emqx_bridge_v2.erl @@ -40,6 +40,8 @@ list/0, lookup/2, create/3, + %% The remove/2 function is only for internal use as it may create + %% rules with broken dependencies remove/2, %% The following is the remove function that is called by the HTTP API %% It also checks for rule action dependencies and optionally removes @@ -48,6 +50,7 @@ ]). %% Operations + -export([ disable_enable/3, health_check/2, @@ -73,7 +76,8 @@ -export([ id/2, id/3, - is_valid_bridge_v1/2 + bridge_v1_is_valid/2, + extract_connector_id_from_bridge_v2_id/1 ]). %% Config Update Handler API @@ -88,18 +92,26 @@ import_config/1 ]). -%% Compatibility API +%% Bridge V2 Types and Conversions -export([ bridge_v2_type_to_connector_type/1, - is_bridge_v2_type/1, - lookup_and_transform_to_bridge_v1/2, - list_and_transform_to_bridge_v1/0, + is_bridge_v2_type/1 +]). + +%% Compatibility Layer API +%% All public functions for the compatibility layer should be prefixed with +%% bridge_v1_ + +-export([ + bridge_v1_lookup_and_transform/2, + bridge_v1_list_and_transform/0, bridge_v1_check_deps_and_remove/3, - split_bridge_v1_config_and_create/3, + bridge_v1_split_config_and_create/3, bridge_v1_create_dry_run/2, - extract_connector_id_from_bridge_v2_id/1, bridge_v1_type_to_bridge_v2_type/1, + %% Exception from the naming convention: + bridge_v2_type_to_bridge_v1_type/1, bridge_v1_id_to_connector_resource_id/1, bridge_v1_enable_disable/3, bridge_v1_restart/2, @@ -107,6 +119,27 @@ bridge_v1_start/2 ]). +%%==================================================================== +%% Types +%%==================================================================== + +-type bridge_v2_info() :: #{ + type := binary(), + name := binary(), + raw_config := map(), + resource_data := map(), + status := emqx_resource:resource_status(), + %% Explanation of the status if the status is not connected + error := term() +}. + +-type bridge_v2_type() :: binary() | atom() | [byte()]. +-type bridge_v2_name() :: binary() | atom() | [byte()]. + +%%==================================================================== + +%%==================================================================== + %%==================================================================== %% Loading and unloading config when EMQX starts and stops %%==================================================================== @@ -157,6 +190,7 @@ unload_bridges() -> %% CRUD API %%==================================================================== +-spec lookup(bridge_v2_type(), bridge_v2_name()) -> {ok, bridge_v2_info()} | {error, not_found}. lookup(Type, Name) -> case emqx:get_raw_config([?ROOT_KEY, Type, Name], not_found) of not_found -> @@ -191,8 +225,8 @@ lookup(Type, Name) -> {disconnected, <<"Pending installation">>} end, {ok, #{ - type => Type, - name => Name, + type => bin(Type), + name => bin(Name), raw_config => RawConf, resource_data => InstanceData, status => DisplayBridgeV2Status, @@ -200,9 +234,12 @@ lookup(Type, Name) -> }} end. +-spec list() -> [bridge_v2_info()] | {error, term()}. list() -> list_with_lookup_fun(fun lookup/2). +-spec create(bridge_v2_type(), bridge_v2_name(), map()) -> + {ok, emqx_config:update_result()} | {error, any()}. create(BridgeType, BridgeName, RawConf) -> ?SLOG(debug, #{ brige_action => create, @@ -217,9 +254,10 @@ create(BridgeType, BridgeName, RawConf) -> #{override_to => cluster} ). -%% NOTE: This function can cause broken references but it is only called from -%% test cases. --spec remove(atom() | binary(), binary()) -> ok | {error, any()}. +%% NOTE: This function can cause broken references from rules but it is only +%% called directly from test cases. + +-spec remove(bridge_v2_type(), bridge_v2_name()) -> ok | {error, any()}. remove(BridgeType, BridgeName) -> ?SLOG(debug, #{ brige_action => remove, @@ -237,6 +275,7 @@ remove(BridgeType, BridgeName) -> {error, Reason} -> {error, Reason} end. +-spec check_deps_and_remove(bridge_v2_type(), bridge_v2_name(), boolean()) -> ok | {error, any()}. check_deps_and_remove(BridgeType, BridgeName, AlsoDeleteActions) -> AlsoDelete = case AlsoDeleteActions of @@ -360,28 +399,6 @@ uninstall_bridge_v2( %% Already not installed ok; uninstall_bridge_v2( - BridgeV2Type, - BridgeName, - Config -) -> - uninstall_bridge_v2_helper( - BridgeV2Type, - BridgeName, - combine_connector_and_bridge_v2_config( - BridgeV2Type, - BridgeName, - Config - ) - ). - -uninstall_bridge_v2_helper( - _BridgeV2Type, - _BridgeName, - {error, Reason} = Error -) -> - ?SLOG(error, Reason), - Error; -uninstall_bridge_v2_helper( BridgeV2Type, BridgeName, #{connector := ConnectorName} = Config @@ -390,11 +407,16 @@ uninstall_bridge_v2_helper( CreationOpts = emqx_resource:fetch_creation_opts(Config), ok = emqx_resource_buffer_worker_sup:stop_workers(BridgeV2Id, CreationOpts), ok = emqx_resource:clear_metrics(BridgeV2Id), - %% Deinstall from connector - ConnectorId = emqx_connector_resource:resource_id( - connector_type(BridgeV2Type), ConnectorName - ), - emqx_resource_manager:remove_channel(ConnectorId, BridgeV2Id). + case combine_connector_and_bridge_v2_config(BridgeV2Type, BridgeName, Config) of + {error, _} -> + ok; + _CombinedConfig -> + %% Deinstall from connector + ConnectorId = emqx_connector_resource:resource_id( + connector_type(BridgeV2Type), ConnectorName + ), + emqx_resource_manager:remove_channel(ConnectorId, BridgeV2Id) + end. combine_connector_and_bridge_v2_config( BridgeV2Type, @@ -425,6 +447,8 @@ combine_connector_and_bridge_v2_config( %% Operations %%==================================================================== +-spec disable_enable(disable | enable, bridge_v2_type(), bridge_v2_name()) -> + {ok, any()} | {error, any()}. disable_enable(Action, BridgeType, BridgeName) when Action =:= disable; Action =:= enable -> @@ -502,6 +526,7 @@ connector_operation_helper_with_conf( end end. +-spec reset_metrics(bridge_v2_type(), bridge_v2_name()) -> ok | {error, not_found}. reset_metrics(Type, Name) -> reset_metrics_helper(Type, Name, lookup_conf(Type, Name)). @@ -509,7 +534,9 @@ reset_metrics_helper(_Type, _Name, #{enable := false}) -> ok; reset_metrics_helper(BridgeV2Type, BridgeName, #{connector := ConnectorName}) -> BridgeV2Id = id(BridgeV2Type, BridgeName, ConnectorName), - ok = emqx_metrics_worker:reset_metrics(?RES_METRICS, BridgeV2Id). + ok = emqx_metrics_worker:reset_metrics(?RES_METRICS, BridgeV2Id); +reset_metrics_helper(_, _, _) -> + {error, not_found}. get_query_mode(BridgeV2Type, Config) -> CreationOpts = emqx_resource:fetch_creation_opts(Config), @@ -517,6 +544,8 @@ get_query_mode(BridgeV2Type, Config) -> ResourceType = emqx_connector_resource:connector_to_resource_type(ConnectorType), emqx_resource:query_mode(ResourceType, Config, CreationOpts). +-spec send_message(bridge_v2_type(), bridge_v2_name(), Message :: term(), QueryOpts :: map()) -> + term() | {error, term()}. send_message(BridgeType, BridgeName, Message, QueryOpts0) -> case lookup_conf(BridgeType, BridgeName) of #{enable := true} = Config0 -> @@ -550,8 +579,7 @@ do_send_msg_with_enabled_config( emqx_resource:query(BridgeV2Id, {BridgeV2Id, Message}, QueryOpts). -spec health_check(BridgeType :: term(), BridgeName :: term()) -> - #{status := term(), error := term()} | {error, Reason :: term()}. - + #{status := emqx_resource:resource_status(), error := term()} | {error, Reason :: term()}. health_check(BridgeType, BridgeName) -> case lookup_conf(BridgeType, BridgeName) of #{ @@ -570,6 +598,34 @@ health_check(BridgeType, BridgeName) -> Error end. +-spec create_dry_run(bridge_v2_type(), Config :: map()) -> ok | {error, term()}. +create_dry_run(Type, Conf0) -> + Conf1 = maps:without([<<"name">>], Conf0), + TypeBin = bin(Type), + RawConf = #{<<"actions">> => #{TypeBin => #{<<"temp_name">> => Conf1}}}, + %% Check config + try + _ = + hocon_tconf:check_plain( + emqx_bridge_v2_schema, + RawConf, + #{atom_key => true, required => false} + ), + #{<<"connector">> := ConnectorName} = Conf1, + %% Check that the connector exists and do the dry run if it exists + ConnectorType = connector_type(Type), + case emqx:get_raw_config([connectors, ConnectorType, ConnectorName], not_found) of + not_found -> + {error, iolist_to_binary(io_lib:format("Connector ~p not found", [ConnectorName]))}; + ConnectorRawConf -> + create_dry_run_helper(Type, ConnectorRawConf, Conf1) + end + catch + %% validation errors + throw:Reason1 -> + {error, Reason1} + end. + create_dry_run_helper(BridgeType, ConnectorRawConf, BridgeV2RawConf) -> BridgeName = iolist_to_binary([?TEST_ID_PREFIX, emqx_utils:gen_id(8)]), ConnectorType = connector_type(BridgeType), @@ -601,33 +657,7 @@ create_dry_run_helper(BridgeType, ConnectorRawConf, BridgeV2RawConf) -> end, emqx_connector_resource:create_dry_run(ConnectorType, ConnectorRawConf, OnReadyCallback). -create_dry_run(Type, Conf0) -> - Conf1 = maps:without([<<"name">>], Conf0), - TypeBin = bin(Type), - RawConf = #{<<"actions">> => #{TypeBin => #{<<"temp_name">> => Conf1}}}, - %% Check config - try - _ = - hocon_tconf:check_plain( - emqx_bridge_v2_schema, - RawConf, - #{atom_key => true, required => false} - ), - #{<<"connector">> := ConnectorName} = Conf1, - %% Check that the connector exists and do the dry run if it exists - ConnectorType = connector_type(Type), - case emqx:get_raw_config([connectors, ConnectorType, ConnectorName], not_found) of - not_found -> - {error, iolist_to_binary(io_lib:format("Connector ~p not found", [ConnectorName]))}; - ConnectorRawConf -> - create_dry_run_helper(Type, ConnectorRawConf, Conf1) - end - catch - %% validation errors - throw:Reason1 -> - {error, Reason1} - end. - +-spec get_metrics(bridge_v2_type(), bridge_v2_name()) -> emqx_metrics_worker:metrics(). get_metrics(Type, Name) -> emqx_resource:get_metrics(id(Type, Name)). @@ -796,17 +826,8 @@ connector_type(Type) -> %% remote call so it can be mocked ?MODULE:bridge_v2_type_to_connector_type(Type). -bridge_v2_type_to_connector_type(Type) when not is_atom(Type) -> - bridge_v2_type_to_connector_type(binary_to_existing_atom(iolist_to_binary(Type))); -bridge_v2_type_to_connector_type(kafka) -> - %% backward compatible - kafka_producer; -bridge_v2_type_to_connector_type(kafka_producer) -> - kafka_producer; -bridge_v2_type_to_connector_type(azure_event_hub_producer) -> - azure_event_hub_producer; -bridge_v2_type_to_connector_type(syskeeper_forwarder) -> - syskeeper_forwarder. +bridge_v2_type_to_connector_type(Type) -> + emqx_action_info:action_type_to_connector_type(Type). %%==================================================================== %% Data backup API @@ -1008,7 +1029,7 @@ unpack_bridge_conf(Type, PackedConf, TopLevelConf) -> %% %% * The corresponding bridge v2 should exist %% * The connector for the bridge v2 should have exactly one channel -is_valid_bridge_v1(BridgeV1Type, BridgeName) -> +bridge_v1_is_valid(BridgeV1Type, BridgeName) -> BridgeV2Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type), case lookup_conf(BridgeV2Type, BridgeName) of {error, _} -> @@ -1026,39 +1047,21 @@ is_valid_bridge_v1(BridgeV1Type, BridgeName) -> end end. -bridge_v1_type_to_bridge_v2_type(Bin) when is_binary(Bin) -> - ?MODULE:bridge_v1_type_to_bridge_v2_type(binary_to_existing_atom(Bin)); -bridge_v1_type_to_bridge_v2_type(kafka) -> - kafka_producer; -bridge_v1_type_to_bridge_v2_type(kafka_producer) -> - kafka_producer; -bridge_v1_type_to_bridge_v2_type(azure_event_hub_producer) -> - azure_event_hub_producer; -bridge_v1_type_to_bridge_v2_type(syskeeper_forwarder) -> - syskeeper_forwarder. +bridge_v1_type_to_bridge_v2_type(Type) -> + emqx_action_info:bridge_v1_type_to_action_type(Type). -%% This function should return true for all inputs that are bridge V1 types for -%% bridges that have been refactored to bridge V2s, and for all all bridge V2 -%% types. For everything else the function should return false. -is_bridge_v2_type(Atom) when is_atom(Atom) -> - is_bridge_v2_type(atom_to_binary(Atom, utf8)); -is_bridge_v2_type(<<"kafka_producer">>) -> - true; -is_bridge_v2_type(<<"kafka">>) -> - true; -is_bridge_v2_type(<<"azure_event_hub_producer">>) -> - true; -is_bridge_v2_type(<<"syskeeper_forwarder">>) -> - true; -is_bridge_v2_type(_) -> - false. +bridge_v2_type_to_bridge_v1_type(Type) -> + emqx_action_info:action_type_to_bridge_v1_type(Type). -list_and_transform_to_bridge_v1() -> - Bridges = list_with_lookup_fun(fun lookup_and_transform_to_bridge_v1/2), +is_bridge_v2_type(Type) -> + emqx_action_info:is_action_type(Type). + +bridge_v1_list_and_transform() -> + Bridges = list_with_lookup_fun(fun bridge_v1_lookup_and_transform/2), [B || B <- Bridges, B =/= not_bridge_v1_compatible_error()]. -lookup_and_transform_to_bridge_v1(BridgeV1Type, Name) -> - case ?MODULE:is_valid_bridge_v1(BridgeV1Type, Name) of +bridge_v1_lookup_and_transform(BridgeV1Type, Name) -> + case ?MODULE:bridge_v1_is_valid(BridgeV1Type, Name) of true -> Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type), case lookup(Type, Name) of @@ -1066,7 +1069,7 @@ lookup_and_transform_to_bridge_v1(BridgeV1Type, Name) -> ConnectorType = connector_type(Type), case emqx_connector:lookup(ConnectorType, ConnectorName) of {ok, Connector} -> - lookup_and_transform_to_bridge_v1_helper( + bridge_v1_lookup_and_transform_helper( BridgeV1Type, Name, Type, BridgeV2, ConnectorType, Connector ); Error -> @@ -1082,7 +1085,7 @@ lookup_and_transform_to_bridge_v1(BridgeV1Type, Name) -> not_bridge_v1_compatible_error() -> {error, not_bridge_v1_compatible}. -lookup_and_transform_to_bridge_v1_helper( +bridge_v1_lookup_and_transform_helper( BridgeV1Type, BridgeName, BridgeV2Type, BridgeV2, ConnectorType, Connector ) -> ConnectorRawConfig1 = maps:get(raw_config, Connector), @@ -1135,7 +1138,7 @@ lookup_conf(Type, Name) -> Config end. -split_bridge_v1_config_and_create(BridgeV1Type, BridgeName, RawConf) -> +bridge_v1_split_config_and_create(BridgeV1Type, BridgeName, RawConf) -> BridgeV2Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type), %% Check if the bridge v2 exists case lookup_conf(BridgeV2Type, BridgeName) of @@ -1146,7 +1149,7 @@ split_bridge_v1_config_and_create(BridgeV1Type, BridgeName, RawConf) -> BridgeV1Type, BridgeName, RawConf, PreviousRawConf ); _Conf -> - case ?MODULE:is_valid_bridge_v1(BridgeV1Type, BridgeName) of + case ?MODULE:bridge_v1_is_valid(BridgeV1Type, BridgeName) of true -> %% Using remove + create as update, hence do not delete deps. RemoveDeps = [], @@ -1381,7 +1384,7 @@ bridge_v1_id_to_connector_resource_id(BridgeId) -> end. bridge_v1_enable_disable(Action, BridgeType, BridgeName) -> - case emqx_bridge_v2:is_valid_bridge_v1(BridgeType, BridgeName) of + case emqx_bridge_v2:bridge_v1_is_valid(BridgeType, BridgeName) of true -> bridge_v1_enable_disable_helper( Action, @@ -1426,7 +1429,7 @@ bridge_v1_start(BridgeV1Type, Name) -> bridge_v1_operation_helper(BridgeV1Type, Name, ConnectorOpFun, DoHealthCheck) -> BridgeV2Type = ?MODULE:bridge_v1_type_to_bridge_v2_type(BridgeV1Type), - case emqx_bridge_v2:is_valid_bridge_v1(BridgeV1Type, Name) of + case emqx_bridge_v2:bridge_v1_is_valid(BridgeV1Type, Name) of true -> connector_operation_helper_with_conf( BridgeV2Type, diff --git a/apps/emqx_bridge/src/emqx_bridge_v2_api.erl b/apps/emqx_bridge/src/emqx_bridge_v2_api.erl index 1da84451d..d5fd09631 100644 --- a/apps/emqx_bridge/src/emqx_bridge_v2_api.erl +++ b/apps/emqx_bridge/src/emqx_bridge_v2_api.erl @@ -40,7 +40,8 @@ '/actions/:id/enable/:enable'/2, '/actions/:id/:operation'/2, '/nodes/:node/actions/:id/:operation'/2, - '/actions_probe'/2 + '/actions_probe'/2, + '/action_types'/2 ]). %% BpAPI @@ -79,7 +80,8 @@ paths() -> "/actions/:id/enable/:enable", "/actions/:id/:operation", "/nodes/:node/actions/:id/:operation", - "/actions_probe" + "/actions_probe", + "/action_types" ]. error_schema(Code, Message) when is_atom(Code) -> @@ -96,21 +98,11 @@ get_response_body_schema() -> ). bridge_info_examples(Method) -> - maps:merge( - #{}, - emqx_enterprise_bridge_examples(Method) - ). + emqx_bridge_v2_schema:examples(Method). bridge_info_array_example(Method) -> lists:map(fun(#{value := Config}) -> Config end, maps:values(bridge_info_examples(Method))). --if(?EMQX_RELEASE_EDITION == ee). -emqx_enterprise_bridge_examples(Method) -> - emqx_bridge_v2_enterprise:examples(Method). --else. -emqx_enterprise_bridge_examples(_Method) -> #{}. --endif. - param_path_id() -> {id, mk( @@ -338,6 +330,27 @@ schema("/actions_probe") -> 400 => error_schema(['TEST_FAILED'], "bridge test failed") } } + }; +schema("/action_types") -> + #{ + 'operationId' => '/action_types', + get => #{ + tags => [<<"actions">>], + desc => ?DESC("desc_api10"), + summary => <<"List available action types">>, + responses => #{ + 200 => emqx_dashboard_swagger:schema_with_examples( + array(emqx_bridge_v2_schema:types_sc()), + #{ + <<"types">> => + #{ + summary => <<"Action types">>, + value => emqx_bridge_v2_schema:types() + } + } + ) + } + } }. '/actions'(post, #{body := #{<<"type">> := BridgeType, <<"name">> := BridgeName} = Conf0}) -> @@ -486,6 +499,9 @@ schema("/actions_probe") -> redact(BadRequest) end. +'/action_types'(get, _Request) -> + ?OK(emqx_bridge_v2_schema:types()). + maybe_deobfuscate_bridge_probe(#{<<"type">> := BridgeType, <<"name">> := BridgeName} = Params) -> case emqx_bridge:lookup(BridgeType, BridgeName) of {ok, #{raw_config := RawConf}} -> @@ -692,7 +708,13 @@ node_status(Bridges) -> aggregate_status(AllStatus) -> Head = fun([A | _]) -> A end, HeadVal = maps:get(status, Head(AllStatus), connecting), - AllRes = lists:all(fun(#{status := Val}) -> Val == HeadVal end, AllStatus), + AllRes = lists:all( + fun + (#{status := Val}) -> Val == HeadVal; + (_) -> false + end, + AllStatus + ), case AllRes of true -> HeadVal; false -> inconsistent @@ -709,8 +731,10 @@ format_resource( #{ type := Type, name := Name, + status := Status, + error := Error, raw_config := RawConf, - resource_data := ResourceData + resource_data := _ResourceData }, Node ) -> @@ -719,14 +743,16 @@ format_resource( RawConf#{ type => Type, name => maps:get(<<"name">>, RawConf, Name), - node => Node + node => Node, + status => Status, + error => Error }, - format_resource_data(ResourceData) + format_bridge_status_and_error(#{status => Status, error => Error}) ) ). -format_resource_data(ResData) -> - maps:fold(fun format_resource_data/3, #{}, maps:with([status, error], ResData)). +format_bridge_status_and_error(Data) -> + maps:fold(fun format_resource_data/3, #{}, maps:with([status, error], Data)). format_resource_data(error, undefined, Result) -> Result; @@ -765,8 +791,6 @@ do_create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode) -> PreOrPostConfigUpdate =:= pre_config_update; PreOrPostConfigUpdate =:= post_config_update -> - ?BAD_REQUEST(map_to_json(redact(Reason))); - {error, Reason} -> ?BAD_REQUEST(map_to_json(redact(Reason))) end. diff --git a/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl b/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl index 93951cca0..3e0875e1f 100644 --- a/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl +++ b/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl @@ -82,6 +82,11 @@ schema_modules() -> ]. examples(Method) -> + ActionExamples = emqx_bridge_v2_schema:examples(Method), + RegisteredExamples = registered_examples(Method), + maps:merge(ActionExamples, RegisteredExamples). + +registered_examples(Method) -> MergeFun = fun(Example, Examples) -> maps:merge(Examples, Example) diff --git a/apps/emqx_bridge/src/schema/emqx_bridge_v2_enterprise.erl b/apps/emqx_bridge/src/schema/emqx_bridge_v2_enterprise.erl deleted file mode 100644 index ac0713545..000000000 --- a/apps/emqx_bridge/src/schema/emqx_bridge_v2_enterprise.erl +++ /dev/null @@ -1,80 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%%-------------------------------------------------------------------- --module(emqx_bridge_v2_enterprise). - --if(?EMQX_RELEASE_EDITION == ee). - --import(hoconsc, [mk/2, enum/1, ref/2]). - --export([ - api_schemas/1, - examples/1, - fields/1 -]). - -examples(Method) -> - MergeFun = - fun(Example, Examples) -> - maps:merge(Examples, Example) - end, - Fun = - fun(Module, Examples) -> - ConnectorExamples = erlang:apply(Module, bridge_v2_examples, [Method]), - lists:foldl(MergeFun, Examples, ConnectorExamples) - end, - lists:foldl(Fun, #{}, schema_modules()). - -schema_modules() -> - [ - emqx_bridge_kafka, - emqx_bridge_azure_event_hub, - emqx_bridge_syskeeper - ]. - -fields(actions) -> - action_structs(). - -action_structs() -> - [ - {kafka_producer, - mk( - hoconsc:map(name, ref(emqx_bridge_kafka, kafka_producer_action)), - #{ - desc => <<"Kafka Producer Actions Config">>, - required => false - } - )}, - {azure_event_hub_producer, - mk( - hoconsc:map(name, ref(emqx_bridge_azure_event_hub, actions)), - #{ - desc => <<"Azure Event Hub Actions Config">>, - required => false - } - )}, - {syskeeper_forwarder, - mk( - hoconsc:map(name, ref(emqx_bridge_syskeeper, config)), - #{ - desc => <<"Syskeeper forwarder Bridge V2 Config">>, - required => false - } - )} - ]. - -api_schemas(Method) -> - [ - api_ref(emqx_bridge_kafka, <<"kafka_producer">>, Method ++ "_bridge_v2"), - api_ref( - emqx_bridge_azure_event_hub, <<"azure_event_hub_producer">>, Method ++ "_bridge_v2" - ), - api_ref(emqx_bridge_syskeeper, <<"syskeeper_forwarder">>, Method) - ]. - -api_ref(Module, Type, Method) -> - {Type, ref(Module, Method)}. - --else. - --endif. diff --git a/apps/emqx_bridge/src/schema/emqx_bridge_v2_schema.erl b/apps/emqx_bridge/src/schema/emqx_bridge_v2_schema.erl index d6d8eb9a1..ede783e97 100644 --- a/apps/emqx_bridge/src/schema/emqx_bridge_v2_schema.erl +++ b/apps/emqx_bridge/src/schema/emqx_bridge_v2_schema.erl @@ -27,39 +27,23 @@ -export([ get_response/0, put_request/0, - post_request/0 + post_request/0, + examples/1 ]). --export([enterprise_api_schemas/1]). +%% Exported for mocking +%% TODO: refactor emqx_bridge_v1_compatibility_layer_SUITE so we don't need to +%% export this +-export([ + registered_api_schemas/1 +]). --if(?EMQX_RELEASE_EDITION == ee). -enterprise_api_schemas(Method) -> - %% We *must* do this to ensure the module is really loaded, especially when we use - %% `call_hocon' from `nodetool' to generate initial configurations. - _ = emqx_bridge_v2_enterprise:module_info(), - case erlang:function_exported(emqx_bridge_v2_enterprise, api_schemas, 1) of - true -> emqx_bridge_v2_enterprise:api_schemas(Method); - false -> [] - end. +-export([types/0, types_sc/0]). -enterprise_fields_actions() -> - %% We *must* do this to ensure the module is really loaded, especially when we use - %% `call_hocon' from `nodetool' to generate initial configurations. - _ = emqx_bridge_v2_enterprise:module_info(), - case erlang:function_exported(emqx_bridge_v2_enterprise, fields, 1) of - true -> - emqx_bridge_v2_enterprise:fields(actions); - false -> - [] - end. +-export_type([action_type/0]). --else. - -enterprise_api_schemas(_Method) -> []. - -enterprise_fields_actions() -> []. - --endif. +%% Should we explicitly list them here so dialyzer may be more helpful? +-type action_type() :: atom(). %%====================================================================================== %% For HTTP APIs @@ -73,8 +57,18 @@ post_request() -> api_schema("post"). api_schema(Method) -> - EE = ?MODULE:enterprise_api_schemas(Method), - hoconsc:union(bridge_api_union(EE)). + APISchemas = ?MODULE:registered_api_schemas(Method), + hoconsc:union(bridge_api_union(APISchemas)). + +registered_api_schemas(Method) -> + RegisteredSchemas = emqx_action_info:registered_schema_modules(), + [ + api_ref(SchemaModule, atom_to_binary(BridgeV2Type), Method ++ "_bridge_v2") + || {BridgeV2Type, SchemaModule} <- RegisteredSchemas + ]. + +api_ref(Module, Type, Method) -> + {Type, ref(Module, Method)}. bridge_api_union(Refs) -> Index = maps:from_list(Refs), @@ -122,13 +116,40 @@ roots() -> end. fields(actions) -> - [] ++ enterprise_fields_actions(). + registered_schema_fields(). + +registered_schema_fields() -> + [ + Module:fields(action) + || {_BridgeV2Type, Module} <- emqx_action_info:registered_schema_modules() + ]. desc(actions) -> ?DESC("desc_bridges_v2"); desc(_) -> undefined. +-spec types() -> [action_type()]. +types() -> + proplists:get_keys(?MODULE:fields(actions)). + +-spec types_sc() -> ?ENUM([action_type()]). +types_sc() -> + hoconsc:enum(types()). + +examples(Method) -> + MergeFun = + fun(Example, Examples) -> + maps:merge(Examples, Example) + end, + Fun = + fun(Module, Examples) -> + ConnectorExamples = erlang:apply(Module, bridge_v2_examples, [Method]), + lists:foldl(MergeFun, Examples, ConnectorExamples) + end, + SchemaModules = [Mod || {_, Mod} <- emqx_action_info:registered_schema_modules()], + lists:foldl(Fun, #{}, SchemaModules). + -ifdef(TEST). -include_lib("hocon/include/hocon_types.hrl"). schema_homogeneous_test() -> diff --git a/apps/emqx_bridge/test/emqx_bridge_v1_compatibility_layer_SUITE.erl b/apps/emqx_bridge/test/emqx_bridge_v1_compatibility_layer_SUITE.erl index 8227e7993..f3b7fb685 100644 --- a/apps/emqx_bridge/test/emqx_bridge_v1_compatibility_layer_SUITE.erl +++ b/apps/emqx_bridge/test/emqx_bridge_v1_compatibility_layer_SUITE.erl @@ -111,7 +111,7 @@ setup_mocks() -> catch meck:new(emqx_bridge_v2_schema, MeckOpts), meck:expect( emqx_bridge_v2_schema, - enterprise_api_schemas, + registered_api_schemas, 1, fun(Method) -> [{bridge_type_bin(), hoconsc:ref(?MODULE, "api_" ++ Method)}] end ), diff --git a/apps/emqx_bridge/test/emqx_bridge_v2_SUITE.erl b/apps/emqx_bridge/test/emqx_bridge_v2_SUITE.erl index 367e95784..2766088a1 100644 --- a/apps/emqx_bridge/test/emqx_bridge_v2_SUITE.erl +++ b/apps/emqx_bridge/test/emqx_bridge_v2_SUITE.erl @@ -264,17 +264,17 @@ t_create_dry_run_connector_does_not_exist(_) -> BridgeConf = (bridge_config())#{<<"connector">> => <<"connector_does_not_exist">>}, {error, _} = emqx_bridge_v2:create_dry_run(bridge_type(), BridgeConf). -t_is_valid_bridge_v1(_) -> +t_bridge_v1_is_valid(_) -> {ok, _} = emqx_bridge_v2:create(bridge_type(), my_test_bridge, bridge_config()), - true = emqx_bridge_v2:is_valid_bridge_v1(bridge_v1_type, my_test_bridge), + true = emqx_bridge_v2:bridge_v1_is_valid(bridge_v1_type, my_test_bridge), %% Add another channel/bridge to the connector {ok, _} = emqx_bridge_v2:create(bridge_type(), my_test_bridge_2, bridge_config()), - false = emqx_bridge_v2:is_valid_bridge_v1(bridge_v1_type, my_test_bridge), + false = emqx_bridge_v2:bridge_v1_is_valid(bridge_v1_type, my_test_bridge), ok = emqx_bridge_v2:remove(bridge_type(), my_test_bridge), - true = emqx_bridge_v2:is_valid_bridge_v1(bridge_v1_type, my_test_bridge_2), + true = emqx_bridge_v2:bridge_v1_is_valid(bridge_v1_type, my_test_bridge_2), ok = emqx_bridge_v2:remove(bridge_type(), my_test_bridge_2), %% Non existing bridge is a valid Bridge V1 - true = emqx_bridge_v2:is_valid_bridge_v1(bridge_v1_type, my_test_bridge), + true = emqx_bridge_v2:bridge_v1_is_valid(bridge_v1_type, my_test_bridge), ok. t_manual_health_check(_) -> @@ -647,10 +647,12 @@ t_load_config_success(_Config) -> {ok, _}, update_root_config(RootConf0) ), + BridgeTypeBin = bin(BridgeType), + BridgeNameBin = bin(BridgeName), ?assertMatch( {ok, #{ - type := BridgeType, - name := BridgeName, + type := BridgeTypeBin, + name := BridgeNameBin, raw_config := #{}, resource_data := #{} }}, @@ -665,8 +667,8 @@ t_load_config_success(_Config) -> ), ?assertMatch( {ok, #{ - type := BridgeType, - name := BridgeName, + type := BridgeTypeBin, + name := BridgeNameBin, raw_config := #{<<"some_key">> := <<"new_value">>}, resource_data := #{} }}, @@ -860,3 +862,7 @@ wait_until(Fun, Timeout) when Timeout >= 0 -> end; wait_until(_, _) -> ct:fail("Wait until event did not happen"). + +bin(Bin) when is_binary(Bin) -> Bin; +bin(Str) when is_list(Str) -> list_to_binary(Str); +bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8). diff --git a/apps/emqx_bridge/test/emqx_bridge_v2_api_SUITE.erl b/apps/emqx_bridge/test/emqx_bridge_v2_api_SUITE.erl index bf2ac51a2..059f9ac9f 100644 --- a/apps/emqx_bridge/test/emqx_bridge_v2_api_SUITE.erl +++ b/apps/emqx_bridge/test/emqx_bridge_v2_api_SUITE.erl @@ -177,7 +177,9 @@ all() -> groups() -> AllTCs = emqx_common_test_helpers:all(?MODULE), SingleOnlyTests = [ - t_bridges_probe + t_bridges_probe, + t_broken_bridge_config, + t_fix_broken_bridge_config ], ClusterLaterJoinOnlyTCs = [ % t_cluster_later_join_metrics @@ -236,6 +238,14 @@ end_per_group(_, Config) -> emqx_cth_suite:stop(?config(group_apps, Config)), ok. +init_per_testcase(t_action_types, Config) -> + case ?config(cluster_nodes, Config) of + undefined -> + init_mocks(); + Nodes -> + [erpc:call(Node, ?MODULE, init_mocks, []) || Node <- Nodes] + end, + Config; init_per_testcase(_TestCase, Config) -> case ?config(cluster_nodes, Config) of undefined -> @@ -260,8 +270,14 @@ end_per_testcase(_TestCase, Config) -> -define(CONNECTOR_IMPL, emqx_bridge_v2_dummy_connector). init_mocks() -> - meck:new(emqx_connector_ee_schema, [passthrough, no_link]), - meck:expect(emqx_connector_ee_schema, resource_type, 1, ?CONNECTOR_IMPL), + case emqx_release:edition() of + ee -> + meck:new(emqx_connector_ee_schema, [passthrough, no_link]), + meck:expect(emqx_connector_ee_schema, resource_type, 1, ?CONNECTOR_IMPL), + ok; + ce -> + ok + end, meck:new(?CONNECTOR_IMPL, [non_strict, no_link]), meck:expect(?CONNECTOR_IMPL, callback_mode, 0, async_if_possible), meck:expect( @@ -289,7 +305,7 @@ init_mocks() -> ok = meck:expect(?CONNECTOR_IMPL, on_get_channels, fun(ResId) -> emqx_bridge_v2:get_channels_for_connector(ResId) end), - [?CONNECTOR_IMPL, emqx_connector_ee_schema]. + ok. clear_resources() -> lists:foreach( @@ -537,6 +553,117 @@ t_bridges_lifecycle(Config) -> {ok, 400, _} = request(post, uri([?ROOT]), ?KAFKA_BRIDGE(<<"a.b">>), Config), ok. +t_broken_bridge_config(Config) -> + emqx_cth_suite:stop_apps([emqx_bridge]), + BridgeName = ?BRIDGE_NAME, + StartOps = + #{ + config => + "actions {\n" + " " + ?BRIDGE_TYPE_STR + " {\n" + " " ++ binary_to_list(BridgeName) ++ + " {\n" + " connector = does_not_exist\n" + " enable = true\n" + " kafka {\n" + " topic = test-topic-one-partition\n" + " }\n" + " local_topic = \"mqtt/local/topic\"\n" + " resource_opts {health_check_interval = 32s}\n" + " }\n" + " }\n" + "}\n" + "\n", + schema_mod => emqx_bridge_v2_schema + }, + emqx_cth_suite:start_app(emqx_bridge, StartOps), + + ?assertMatch( + {ok, 200, [ + #{ + <<"name">> := BridgeName, + <<"type">> := ?BRIDGE_TYPE, + <<"connector">> := <<"does_not_exist">>, + <<"status">> := <<"disconnected">>, + <<"error">> := <<"Pending installation">> + } + ]}, + request_json(get, uri([?ROOT]), Config) + ), + + BridgeID = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME), + ?assertEqual( + {ok, 204, <<>>}, + request(delete, uri([?ROOT, BridgeID]), Config) + ), + + ?assertEqual( + {ok, 200, []}, + request_json(get, uri([?ROOT]), Config) + ), + + ok. + +t_fix_broken_bridge_config(Config) -> + emqx_cth_suite:stop_apps([emqx_bridge]), + BridgeName = ?BRIDGE_NAME, + StartOps = + #{ + config => + "actions {\n" + " " + ?BRIDGE_TYPE_STR + " {\n" + " " ++ binary_to_list(BridgeName) ++ + " {\n" + " connector = does_not_exist\n" + " enable = true\n" + " kafka {\n" + " topic = test-topic-one-partition\n" + " }\n" + " local_topic = \"mqtt/local/topic\"\n" + " resource_opts {health_check_interval = 32s}\n" + " }\n" + " }\n" + "}\n" + "\n", + schema_mod => emqx_bridge_v2_schema + }, + emqx_cth_suite:start_app(emqx_bridge, StartOps), + + ?assertMatch( + {ok, 200, [ + #{ + <<"name">> := BridgeName, + <<"type">> := ?BRIDGE_TYPE, + <<"connector">> := <<"does_not_exist">>, + <<"status">> := <<"disconnected">>, + <<"error">> := <<"Pending installation">> + } + ]}, + request_json(get, uri([?ROOT]), Config) + ), + + BridgeID = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME), + request_json( + put, + uri([?ROOT, BridgeID]), + ?KAFKA_BRIDGE_UPDATE(?BRIDGE_NAME, ?CONNECTOR_NAME), + Config + ), + + ?assertMatch( + {ok, 200, #{ + <<"connector">> := ?CONNECTOR_NAME, + <<"status">> := <<"connected">> + }}, + request_json(get, uri([?ROOT, BridgeID]), Config) + ), + + ok. + t_start_bridge_unknown_node(Config) -> {ok, 404, _} = request( @@ -886,6 +1013,14 @@ t_cascade_delete_actions(Config) -> ), {ok, 200, []} = request_json(get, uri([?ROOT]), Config). +t_action_types(Config) -> + Res = request_json(get, uri(["action_types"]), Config), + ?assertMatch({ok, 200, _}, Res), + {ok, 200, Types} = Res, + ?assert(is_list(Types), #{types => Types}), + ?assert(lists:all(fun is_binary/1, Types), #{types => Types}), + ok. + %%% helpers listen_on_random_port() -> SockOpts = [binary, {active, false}, {packet, raw}, {reuseaddr, true}, {backlog, 1000}], diff --git a/apps/emqx_bridge/test/emqx_bridge_v2_testlib.erl b/apps/emqx_bridge/test/emqx_bridge_v2_testlib.erl index 278a0420a..5a2b6b000 100644 --- a/apps/emqx_bridge/test/emqx_bridge_v2_testlib.erl +++ b/apps/emqx_bridge/test/emqx_bridge_v2_testlib.erl @@ -145,6 +145,39 @@ create_bridge(Config, Overrides) -> ct:pal("creating bridge with config: ~p", [BridgeConfig]), emqx_bridge_v2:create(BridgeType, BridgeName, BridgeConfig). +list_bridges_api() -> + Params = [], + Path = emqx_mgmt_api_test_util:api_path(["actions"]), + AuthHeader = emqx_mgmt_api_test_util:auth_header_(), + Opts = #{return_all => true}, + ct:pal("listing bridges (via http)"), + Res = + case emqx_mgmt_api_test_util:request_api(get, Path, "", AuthHeader, Params, Opts) of + {ok, {Status, Headers, Body0}} -> + {ok, {Status, Headers, emqx_utils_json:decode(Body0, [return_maps])}}; + Error -> + Error + end, + ct:pal("list bridges result: ~p", [Res]), + Res. + +get_bridge_api(BridgeType, BridgeName) -> + BridgeId = emqx_bridge_resource:bridge_id(BridgeType, BridgeName), + Params = [], + Path = emqx_mgmt_api_test_util:api_path(["actions", BridgeId]), + AuthHeader = emqx_mgmt_api_test_util:auth_header_(), + Opts = #{return_all => true}, + ct:pal("get bridge ~p (via http)", [{BridgeType, BridgeName}]), + Res = + case emqx_mgmt_api_test_util:request_api(get, Path, "", AuthHeader, Params, Opts) of + {ok, {Status, Headers, Body0}} -> + {ok, {Status, Headers, emqx_utils_json:decode(Body0, [return_maps])}}; + Error -> + Error + end, + ct:pal("get bridge ~p result: ~p", [{BridgeType, BridgeName}, Res]), + Res. + create_bridge_api(Config) -> create_bridge_api(Config, _Overrides = #{}). diff --git a/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.app.src b/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.app.src index ece0495f9..40ea79334 100644 --- a/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.app.src +++ b/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_azure_event_hub, [ {description, "EMQX Enterprise Azure Event Hub Bridge"}, - {vsn, "0.1.3"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.erl b/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.erl index bf2cf5438..eb364bdff 100644 --- a/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.erl +++ b/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.erl @@ -114,6 +114,15 @@ fields(kafka_message) -> Fields0 = emqx_bridge_kafka:fields(kafka_message), Fields = proplists:delete(timestamp, Fields0), override_documentations(Fields); +fields(action) -> + {azure_event_hub_producer, + mk( + hoconsc:map(name, ref(emqx_bridge_azure_event_hub, actions)), + #{ + desc => <<"Azure Event Hub Actions Config">>, + required => false + } + )}; fields(actions) -> Fields = override( @@ -162,7 +171,7 @@ bridge_v2_examples(Method) -> [ #{ ?AEH_CONNECTOR_TYPE_BIN => #{ - summary => <<"Azure Event Hub Bridge v2">>, + summary => <<"Azure Event Hub Action">>, value => values({Method, bridge_v2}) } } @@ -207,7 +216,7 @@ values({post, bridge_v2}) -> #{ enable => true, connector => <<"my_azure_event_hub_producer_connector">>, - name => <<"my_azure_event_hub_producer_bridge">>, + name => <<"my_azure_event_hub_producer_action">>, type => ?AEH_CONNECTOR_TYPE_BIN } ); diff --git a/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub_action_info.erl b/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub_action_info.erl new file mode 100644 index 000000000..8ebdb2435 --- /dev/null +++ b/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub_action_info.erl @@ -0,0 +1,22 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_bridge_azure_event_hub_action_info). + +-behaviour(emqx_action_info). + +-export([ + bridge_v1_type_name/0, + action_type_name/0, + connector_type_name/0, + schema_module/0 +]). + +bridge_v1_type_name() -> azure_event_hub_producer. + +action_type_name() -> azure_event_hub_producer. + +connector_type_name() -> azure_event_hub_producer. + +schema_module() -> emqx_bridge_azure_event_hub. diff --git a/apps/emqx_bridge_cassandra/src/emqx_bridge_cassandra.app.src b/apps/emqx_bridge_cassandra/src/emqx_bridge_cassandra.app.src index 59661d7c0..97be100d2 100644 --- a/apps/emqx_bridge_cassandra/src/emqx_bridge_cassandra.app.src +++ b/apps/emqx_bridge_cassandra/src/emqx_bridge_cassandra.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_cassandra, [ {description, "EMQX Enterprise Cassandra Bridge"}, - {vsn, "0.1.5"}, + {vsn, "0.1.6"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src index d0821ea83..d4c16e13c 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_gcp_pubsub, [ {description, "EMQX Enterprise GCP Pub/Sub Bridge"}, - {vsn, "0.1.9"}, + {vsn, "0.1.10"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl index 685fd3397..bb4a13875 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl @@ -101,7 +101,7 @@ fields(connector_config) -> )}, {service_account_json, sc( - service_account_json(), + ?MODULE:service_account_json(), #{ required => true, validator => fun ?MODULE:service_account_json_validator/1, diff --git a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl index d63103e2e..f5ae714d7 100644 --- a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl +++ b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl @@ -126,7 +126,7 @@ desc(_) -> undefined. write_syntax(type) -> - emqx_bridge_influxdb:write_syntax(); + emqx_bridge_influxdb:write_syntax_type(); write_syntax(required) -> true; write_syntax(validator) -> diff --git a/apps/emqx_bridge_http/src/emqx_bridge_http.app.src b/apps/emqx_bridge_http/src/emqx_bridge_http.app.src index e5c559bd5..87d7e57a6 100644 --- a/apps/emqx_bridge_http/src/emqx_bridge_http.app.src +++ b/apps/emqx_bridge_http/src/emqx_bridge_http.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_http, [ {description, "EMQX HTTP Bridge and Connector Application"}, - {vsn, "0.1.4"}, + {vsn, "0.1.5"}, {registered, []}, {applications, [kernel, stdlib, emqx_connector, emqx_resource, ehttpc]}, {env, []}, diff --git a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl index b2f876d21..743ab97fe 100644 --- a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl +++ b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl @@ -46,14 +46,6 @@ -export([validate_method/1, join_paths/2]). --type connect_timeout() :: emqx_schema:duration() | infinity. --type pool_type() :: random | hash. - --reflect_type([ - connect_timeout/0, - pool_type/0 -]). - -define(DEFAULT_PIPELINE_SIZE, 100). -define(DEFAULT_REQUEST_TIMEOUT_MS, 30_000). @@ -89,7 +81,7 @@ fields(config) -> )}, {pool_type, sc( - pool_type(), + hoconsc:enum([random, hash]), #{ default => random, desc => ?DESC("pool_type") diff --git a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl index 47eeecb4e..acb295752 100644 --- a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl +++ b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl @@ -11,7 +11,8 @@ -import(hoconsc, [mk/2, enum/1, ref/2]). -export([ - conn_bridge_examples/1 + conn_bridge_examples/1, + write_syntax_type/0 ]). -export([ @@ -29,6 +30,9 @@ %% ------------------------------------------------------------------------------------------------- %% api +write_syntax_type() -> + typerefl:alias("string", write_syntax()). + conn_bridge_examples(Method) -> [ #{ @@ -154,7 +158,7 @@ desc(_) -> undefined. write_syntax(type) -> - ?MODULE:write_syntax(); + write_syntax_type(); write_syntax(required) -> true; write_syntax(validator) -> diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src index 88fa6b7bd..00b9d8968 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge_kafka, [ {description, "EMQX Enterprise Kafka Bridge"}, - {vsn, "0.1.11"}, + {vsn, "0.1.12"}, {registered, [emqx_bridge_kafka_consumer_sup]}, {applications, [ kernel, @@ -12,7 +12,7 @@ brod, brod_gssapi ]}, - {env, []}, + {env, [{emqx_action_info_module, emqx_bridge_kafka_action_info}]}, {modules, []}, {links, []} diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl index 800a87601..d193738bb 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl @@ -100,7 +100,7 @@ values({post, connector}) -> values({post, KafkaType}) -> maps:merge( #{ - name => <<"my_kafka_producer_bridge">>, + name => <<"my_kafka_producer_action">>, type => <<"kafka_producer">> }, values({put, KafkaType}) @@ -524,7 +524,18 @@ fields(consumer_kafka_opts) -> fields(resource_opts) -> SupportedFields = [health_check_interval], CreationOpts = emqx_resource_schema:create_opts(_Overrides = []), - lists:filter(fun({Field, _}) -> lists:member(Field, SupportedFields) end, CreationOpts). + lists:filter(fun({Field, _}) -> lists:member(Field, SupportedFields) end, CreationOpts); +fields(action_field) -> + {kafka_producer, + mk( + hoconsc:map(name, ref(emqx_bridge_kafka, kafka_producer_action)), + #{ + desc => <<"Kafka Producer Action Config">>, + required => false + } + )}; +fields(action) -> + fields(action_field). desc("config_connector") -> ?DESC("desc_config"); diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_action_info.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_action_info.erl new file mode 100644 index 000000000..50d4f0c63 --- /dev/null +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_action_info.erl @@ -0,0 +1,22 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_bridge_kafka_action_info). + +-behaviour(emqx_action_info). + +-export([ + bridge_v1_type_name/0, + action_type_name/0, + connector_type_name/0, + schema_module/0 +]). + +bridge_v1_type_name() -> kafka. + +action_type_name() -> kafka_producer. + +connector_type_name() -> kafka_producer. + +schema_module() -> emqx_bridge_kafka. diff --git a/apps/emqx_bridge_kafka/test/emqx_bridge_v2_kafka_producer_SUITE.erl b/apps/emqx_bridge_kafka/test/emqx_bridge_v2_kafka_producer_SUITE.erl index 58a16ea67..6adb66357 100644 --- a/apps/emqx_bridge_kafka/test/emqx_bridge_v2_kafka_producer_SUITE.erl +++ b/apps/emqx_bridge_kafka/test/emqx_bridge_v2_kafka_producer_SUITE.erl @@ -29,25 +29,27 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - _ = application:load(emqx_conf), - ok = emqx_common_test_helpers:start_apps(apps_to_start_and_stop()), - application:ensure_all_started(telemetry), - application:ensure_all_started(wolff), - application:ensure_all_started(brod), + Apps = emqx_cth_suite:start( + [ + emqx, + emqx_conf, + emqx_connector, + emqx_bridge_kafka, + emqx_bridge, + emqx_rule_engine, + emqx_management, + {emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"} + ], + #{work_dir => emqx_cth_suite:work_dir(Config)} + ), + {ok, _} = emqx_common_test_http:create_default_app(), emqx_bridge_kafka_impl_producer_SUITE:wait_until_kafka_is_up(), - Config. + [{apps, Apps} | Config]. -end_per_suite(_Config) -> - emqx_common_test_helpers:stop_apps(apps_to_start_and_stop()). - -apps_to_start_and_stop() -> - [ - emqx, - emqx_conf, - emqx_connector, - emqx_bridge, - emqx_rule_engine - ]. +end_per_suite(Config) -> + Apps = ?config(apps, Config), + emqx_cth_suite:stop(Apps), + ok. t_create_remove_list(_) -> [] = emqx_bridge_v2:list(), @@ -165,6 +167,24 @@ t_unknown_topic(_Config) -> ok end ), + ?assertMatch( + {ok, + {{_, 200, _}, _, [ + #{ + <<"status">> := <<"disconnected">>, + <<"node_status">> := [#{<<"status">> := <<"disconnected">>}] + } + ]}}, + emqx_bridge_v2_testlib:list_bridges_api() + ), + ?assertMatch( + {ok, + {{_, 200, _}, _, #{ + <<"status">> := <<"disconnected">>, + <<"node_status">> := [#{<<"status">> := <<"disconnected">>}] + }}}, + emqx_bridge_v2_testlib:get_bridge_api(?TYPE, BridgeName) + ), ok. check_send_message_with_bridge(BridgeName) -> diff --git a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src index e39c4df69..cbef0dda8 100644 --- a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src +++ b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge_mqtt, [ {description, "EMQX MQTT Broker Bridge"}, - {vsn, "0.1.4"}, + {vsn, "0.1.5"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src b/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src index b012874f8..c9abebf8b 100644 --- a/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src +++ b/apps/emqx_bridge_pulsar/src/emqx_bridge_pulsar.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_pulsar, [ {description, "EMQX Pulsar Bridge"}, - {vsn, "0.1.7"}, + {vsn, "0.1.8"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl index 7169ea3d2..29299dcc9 100644 --- a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl +++ b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl @@ -11,7 +11,7 @@ %%=========================================================================== pulsar_producer_validations_test() -> - Name = my_producer, + Name = list_to_atom("my_producer"), Conf0 = pulsar_producer_hocon(), Conf1 = Conf0 ++ diff --git a/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq_connector.erl b/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq_connector.erl index 04a93e08e..2e4074f79 100644 --- a/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq_connector.erl +++ b/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq_connector.erl @@ -20,7 +20,7 @@ -behaviour(ecpool_worker). %% hocon_schema callbacks --export([roots/0, fields/1]). +-export([namespace/0, roots/0, fields/1]). %% HTTP API callbacks -export([values/1]). @@ -43,6 +43,8 @@ %% Internal callbacks -export([publish_messages/3]). +namespace() -> "rabbitmq". + roots() -> [{config, #{type => hoconsc:ref(?MODULE, config)}}]. diff --git a/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper.erl b/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper.erl index 55e3d08b9..0ccc76c9a 100644 --- a/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper.erl +++ b/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper.erl @@ -75,6 +75,15 @@ namespace() -> "syskeeper". roots() -> []. +fields(action) -> + {syskeeper_forwarder, + mk( + hoconsc:map(name, ref(?MODULE, config)), + #{ + desc => <<"Syskeeper Forwarder Action Config">>, + required => false + } + )}; fields(config) -> [ {enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})}, @@ -121,10 +130,16 @@ fields("creation_opts") -> emqx_resource_schema:create_opts([{request_ttl, #{default => infinity}}]); fields("post") -> [type_field(), name_field() | fields(config)]; +fields("post_bridge_v2") -> + fields("post"); fields("put") -> fields(config); +fields("put_bridge_v2") -> + fields("put"); fields("get") -> - emqx_bridge_schema:status_fields() ++ fields("post"). + emqx_bridge_schema:status_fields() ++ fields("post"); +fields("get_bridge_v2") -> + fields("get"). desc(config) -> ?DESC("desc_config"); diff --git a/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper_action_info.erl b/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper_action_info.erl new file mode 100644 index 000000000..77d3c26ce --- /dev/null +++ b/apps/emqx_bridge_syskeeper/src/emqx_bridge_syskeeper_action_info.erl @@ -0,0 +1,22 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_bridge_syskeeper_action_info). + +-behaviour(emqx_action_info). + +-export([ + bridge_v1_type_name/0, + action_type_name/0, + connector_type_name/0, + schema_module/0 +]). + +bridge_v1_type_name() -> syskeeper_forwarder. + +action_type_name() -> syskeeper_forwarder. + +connector_type_name() -> syskeeper_forwarder. + +schema_module() -> emqx_bridge_syskeeper. diff --git a/apps/emqx_bridge_syskeeper/test/emqx_bridge_syskeeper_SUITE.erl b/apps/emqx_bridge_syskeeper/test/emqx_bridge_syskeeper_SUITE.erl index 54330ea37..66b267eac 100644 --- a/apps/emqx_bridge_syskeeper/test/emqx_bridge_syskeeper_SUITE.erl +++ b/apps/emqx_bridge_syskeeper/test/emqx_bridge_syskeeper_SUITE.erl @@ -65,21 +65,23 @@ end_per_group(_Group, _Config) -> ok. init_per_suite(Config) -> - ok = emqx_common_test_helpers:start_apps([ - emqx_conf, - emqx_connector, - emqx_bridge, - emqx_bridge_syskeeper - ]), - _ = emqx_bridge_enterprise:module_info(), + Apps = emqx_cth_suite:start( + [ + emqx_conf, + emqx_connector, + emqx_bridge, + emqx_bridge_syskeeper + ], + #{work_dir => emqx_cth_suite:work_dir(Config)} + ), emqx_mgmt_api_test_util:init_suite(), - Config. + [{apps, Apps} | Config]. -end_per_suite(_Config) -> +end_per_suite(Config) -> + Apps = ?config(apps, Config), emqx_mgmt_api_test_util:end_suite(), - ok = emqx_common_test_helpers:stop_apps([ - emqx_bridge_syskeeper, emqx_bridge, emqx_connector, emqx_conf - ]). + ok = emqx_cth_suite:stop(Apps), + ok. init_per_testcase(_Testcase, Config) -> snabbkaffe:start_trace(), diff --git a/apps/emqx_conf/include/emqx_conf.hrl b/apps/emqx_conf/include/emqx_conf.hrl index 6c4a89fb8..eeaa7c09e 100644 --- a/apps/emqx_conf/include/emqx_conf.hrl +++ b/apps/emqx_conf/include/emqx_conf.hrl @@ -21,6 +21,7 @@ -define(CLUSTER_MFA, cluster_rpc_mfa). -define(CLUSTER_COMMIT, cluster_rpc_commit). +-define(DEFAULT_INIT_TXN_ID, -1). -record(cluster_rpc_mfa, { tnx_id :: pos_integer(), diff --git a/apps/emqx_conf/src/emqx_cluster_rpc.erl b/apps/emqx_conf/src/emqx_cluster_rpc.erl index 934d7ef7a..5bc330afa 100644 --- a/apps/emqx_conf/src/emqx_cluster_rpc.erl +++ b/apps/emqx_conf/src/emqx_cluster_rpc.erl @@ -44,7 +44,9 @@ read_next_mfa/1, trans_query/1, trans_status/0, - on_leave_clean/0 + on_leave_clean/0, + get_commit_lag/0, + get_commit_lag/1 ]). -export([ @@ -231,13 +233,29 @@ make_initiate_call_req(M, F, A) -> -spec get_node_tnx_id(node()) -> integer(). get_node_tnx_id(Node) -> case mnesia:wread({?CLUSTER_COMMIT, Node}) of - [] -> -1; + [] -> ?DEFAULT_INIT_TXN_ID; [#cluster_rpc_commit{tnx_id = TnxId}] -> TnxId end. +%% @doc Return the commit lag of *this* node. +-spec get_commit_lag() -> #{my_id := pos_integer(), latest := pos_integer()}. +get_commit_lag() -> + {atomic, Result} = transaction(fun ?MODULE:get_commit_lag/1, [node()]), + Result. + +get_commit_lag(Node) -> + LatestId = get_cluster_tnx_id(), + LatestNode = + case mnesia:read(?CLUSTER_MFA, LatestId) of + [#?CLUSTER_MFA{initiator = N}] -> N; + _ -> undefined + end, + MyId = get_node_tnx_id(Node), + #{my_id => MyId, latest => LatestId, latest_node => LatestNode}. + %% Checks whether the Mnesia tables used by this module are waiting to %% be loaded and from where. --spec get_tables_status() -> #{atom() => {waiting, [node()]} | {disc | network, node()}}. +-spec get_tables_status() -> #{atom() => {waiting, [node()]} | {loaded, local | node()}}. get_tables_status() -> maps:from_list([ {Tab, do_get_tables_status(Tab)} @@ -249,13 +267,16 @@ do_get_tables_status(Tab) -> TabNodes = proplists:get_value(all_nodes, Props), KnownDown = mnesia_recover:get_mnesia_downs(), LocalNode = node(), - case proplists:get_value(load_node, Props) of + %% load_node. Returns the name of the node that Mnesia loaded the table from. + %% The structure of the returned value is unspecified, but can be useful for debugging purposes. + LoadedFrom = proplists:get_value(load_node, Props), + case LoadedFrom of unknown -> {waiting, TabNodes -- [LocalNode | KnownDown]}; LocalNode -> - {disc, LocalNode}; + {loaded, local}; Node -> - {network, Node} + {loaded, Node} end. %% Regardless of what MFA is returned, consider it a success), diff --git a/apps/emqx_conf/src/emqx_conf.app.src b/apps/emqx_conf/src/emqx_conf.app.src index fda3e4759..3856a882c 100644 --- a/apps/emqx_conf/src/emqx_conf.app.src +++ b/apps/emqx_conf/src/emqx_conf.app.src @@ -1,6 +1,6 @@ {application, emqx_conf, [ {description, "EMQX configuration management"}, - {vsn, "0.1.30"}, + {vsn, "0.1.31"}, {registered, []}, {mod, {emqx_conf_app, []}}, {applications, [kernel, stdlib, emqx_ctl]}, diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl index c4bd0efc9..c986a65ee 100644 --- a/apps/emqx_conf/src/emqx_conf.erl +++ b/apps/emqx_conf/src/emqx_conf.erl @@ -151,6 +151,9 @@ reset(Node, KeyPath, Opts) -> %% @doc Called from build script. %% TODO: move to a external escript after all refactoring is done dump_schema(Dir, SchemaModule) -> + %% TODO: Load all apps instead of only emqx_dashboard + %% as this will help schemas that searches for apps with + %% relevant schema definitions _ = application:load(emqx_dashboard), ok = emqx_dashboard_desc_cache:init(), lists:foreach( @@ -292,7 +295,7 @@ hocon_schema_to_spec(?MAP(Name, Type), LocalModule) -> }, SubRefs }; -hocon_schema_to_spec(?UNION(Types), LocalModule) -> +hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) -> {OneOf, Refs} = lists:foldl( fun(Type, {Acc, RefsAcc}) -> {Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule), @@ -305,149 +308,8 @@ hocon_schema_to_spec(?UNION(Types), LocalModule) -> hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) -> {#{type => enum, symbols => [Atom]}, []}. -typename_to_spec("user_id_type()", _Mod) -> - #{type => enum, symbols => [clientid, username]}; -typename_to_spec("term()", _Mod) -> - #{type => string}; -typename_to_spec("boolean()", _Mod) -> - #{type => boolean}; -typename_to_spec("binary()", _Mod) -> - #{type => string}; -typename_to_spec("float()", _Mod) -> - #{type => number}; -typename_to_spec("integer()", _Mod) -> - #{type => number}; -typename_to_spec("non_neg_integer()", _Mod) -> - #{type => number, minimum => 0}; -typename_to_spec("number()", _Mod) -> - #{type => number}; -typename_to_spec("string()", _Mod) -> - #{type => string}; -typename_to_spec("atom()", _Mod) -> - #{type => string}; -typename_to_spec("duration()", _Mod) -> - #{type => duration}; -typename_to_spec("timeout_duration()", _Mod) -> - #{type => duration}; -typename_to_spec("duration_s()", _Mod) -> - #{type => duration}; -typename_to_spec("timeout_duration_s()", _Mod) -> - #{type => duration}; -typename_to_spec("duration_ms()", _Mod) -> - #{type => duration}; -typename_to_spec("timeout_duration_ms()", _Mod) -> - #{type => duration}; -typename_to_spec("percent()", _Mod) -> - #{type => percent}; -typename_to_spec("file()", _Mod) -> - #{type => string}; -typename_to_spec("ip_port()", _Mod) -> - #{type => ip_port}; -typename_to_spec("url()", _Mod) -> - #{type => url}; -typename_to_spec("bytesize()", _Mod) -> - #{type => 'byteSize'}; -typename_to_spec("wordsize()", _Mod) -> - #{type => 'byteSize'}; -typename_to_spec("qos()", _Mod) -> - #{type => enum, symbols => [0, 1, 2]}; -typename_to_spec("comma_separated_list()", _Mod) -> - #{type => comma_separated_string}; -typename_to_spec("comma_separated_atoms()", _Mod) -> - #{type => comma_separated_string}; -typename_to_spec("pool_type()", _Mod) -> - #{type => enum, symbols => [random, hash]}; -typename_to_spec("log_level()", _Mod) -> - #{ - type => enum, - symbols => [ - debug, - info, - notice, - warning, - error, - critical, - alert, - emergency, - all - ] - }; -typename_to_spec("rate()", _Mod) -> - #{type => string}; -typename_to_spec("capacity()", _Mod) -> - #{type => string}; -typename_to_spec("burst_rate()", _Mod) -> - #{type => string}; -typename_to_spec("failure_strategy()", _Mod) -> - #{type => enum, symbols => [force, drop, throw]}; -typename_to_spec("initial()", _Mod) -> - #{type => string}; -typename_to_spec("map()", _Mod) -> - #{type => object}; -typename_to_spec("#{" ++ _, Mod) -> - typename_to_spec("map()", Mod); -typename_to_spec(Name, Mod) -> - Spec = range(Name), - Spec1 = remote_module_type(Spec, Name, Mod), - Spec2 = typerefl_array(Spec1, Name, Mod), - Spec3 = integer(Spec2, Name), - default_type(Spec3). - -default_type(nomatch) -> #{type => string}; -default_type(Type) -> Type. - -range(Name) -> - case string:split(Name, "..") of - %% 1..10 1..inf -inf..10 - [MinStr, MaxStr] -> - Schema = #{type => number}, - Schema1 = add_integer_prop(Schema, minimum, MinStr), - add_integer_prop(Schema1, maximum, MaxStr); - _ -> - nomatch - end. - -%% Module:Type -remote_module_type(nomatch, Name, Mod) -> - case string:split(Name, ":") of - [_Module, Type] -> typename_to_spec(Type, Mod); - _ -> nomatch - end; -remote_module_type(Spec, _Name, _Mod) -> - Spec. - -%% [string()] or [integer()] or [xxx]. -typerefl_array(nomatch, Name, Mod) -> - case string:trim(Name, leading, "[") of - Name -> - nomatch; - Name1 -> - case string:trim(Name1, trailing, "]") of - Name1 -> - notmatch; - Name2 -> - Schema = typename_to_spec(Name2, Mod), - #{type => array, items => Schema} - end - end; -typerefl_array(Spec, _Name, _Mod) -> - Spec. - -%% integer(1) -integer(nomatch, Name) -> - case string:to_integer(Name) of - {Int, []} -> #{type => enum, symbols => [Int], default => Int}; - _ -> nomatch - end; -integer(Spec, _Name) -> - Spec. - -add_integer_prop(Schema, Key, Value) -> - case string:to_integer(Value) of - {error, no_integer} -> Schema; - {Int, []} when Key =:= minimum -> Schema#{Key => Int}; - {Int, []} -> Schema#{Key => Int} - end. +typename_to_spec(TypeStr, Module) -> + emqx_conf_schema_types:readable_dashboard(Module, TypeStr). to_bin(List) when is_list(List) -> case io_lib:printable_list(List) of diff --git a/apps/emqx_conf/src/emqx_conf_app.erl b/apps/emqx_conf/src/emqx_conf_app.erl index 7addb3823..74a7a8f2e 100644 --- a/apps/emqx_conf/src/emqx_conf_app.erl +++ b/apps/emqx_conf/src/emqx_conf_app.erl @@ -26,8 +26,6 @@ -include_lib("emqx/include/logger.hrl"). -include("emqx_conf.hrl"). --define(DEFAULT_INIT_TXN_ID, -1). - start(_StartType, _StartArgs) -> try ok = init_conf() @@ -52,31 +50,32 @@ unset_config_loaded() -> %% This function is named 'override' due to historical reasons. get_override_config_file() -> Node = node(), + Data = #{ + wall_clock => erlang:statistics(wall_clock), + node => Node, + release => emqx_release:version_with_prefix() + }, case emqx_app:init_load_done() of false -> - {error, #{node => Node, msg => "init_conf_load_not_done"}}; + {error, Data#{msg => "init_conf_load_not_done"}}; true -> case erlang:whereis(emqx_config_handler) of undefined -> - {error, #{node => Node, msg => "emqx_config_handler_not_ready"}}; + {error, Data#{msg => "emqx_config_handler_not_ready"}}; _ -> Fun = fun() -> TnxId = emqx_cluster_rpc:get_node_tnx_id(Node), - WallClock = erlang:statistics(wall_clock), Conf = emqx_config_handler:get_raw_cluster_override_conf(), HasDeprecateFile = emqx_config:has_deprecated_file(), - #{ - wall_clock => WallClock, + Data#{ conf => Conf, tnx_id => TnxId, - node => Node, - has_deprecated_file => HasDeprecateFile, - release => emqx_release:version_with_prefix() + has_deprecated_file => HasDeprecateFile } end, case mria:ro_transaction(?CLUSTER_RPC_SHARD, Fun) of {atomic, Res} -> {ok, Res}; - {aborted, Reason} -> {error, #{node => Node, msg => Reason}} + {aborted, Reason} -> {error, Data#{msg => Reason}} end end end. @@ -105,7 +104,7 @@ init_load(TnxId) -> ok = emqx_app:set_config_loader(emqx_conf), ok; Module -> - ?SLOG(debug, #{ + ?SLOG(info, #{ msg => "skip_init_config_load", reason => "Some application has set another config loader", loader => Module @@ -126,7 +125,7 @@ sync_cluster_conf() -> case cluster_nodes() of [] -> %% The first core nodes is self. - ?SLOG(debug, #{ + ?SLOG(info, #{ msg => "skip_sync_cluster_conf", reason => "This is a single node, or the first node in the cluster" }), @@ -138,70 +137,94 @@ sync_cluster_conf() -> %% @private Some core nodes are running, try to sync the cluster config from them. sync_cluster_conf2(Nodes) -> {Results, Failed} = emqx_conf_proto_v3:get_override_config_file(Nodes), - {Ready, NotReady0} = lists:partition(fun(Res) -> element(1, Res) =:= ok end, Results), - NotReady = lists:filter(fun(Res) -> element(1, Res) =:= error end, NotReady0), - case (Failed =/= [] orelse NotReady =/= []) of - true when Ready =/= [] -> - %% Some core nodes failed to reply. - Warning = #{ - nodes => Nodes, - failed => Failed, - not_ready => NotReady, - msg => "ignored_nodes_when_sync_cluster_conf" - }, - ?SLOG(warning, Warning); - true when Failed =/= [] -> - %% There are core nodes running but no one was able to reply. - ?SLOG(error, #{ - msg => "failed_to_sync_cluster_conf", - nodes => Nodes, - failed => Failed, - not_ready => NotReady - }); - true -> - %% There are core nodes booting up - ?SLOG(info, #{ - msg => "peer_not_ready_for_config_sync", - reason => "The 'not_ready' peer node(s) are loading configs", - nodes => Nodes, - not_ready => NotReady - }); - false -> - ok - end, - case Ready of + {Ready, NotReady} = lists:partition(fun(Res) -> element(1, Res) =:= ok end, Results), + LogData = #{peer_nodes => Nodes, self_node => node()}, + case Failed ++ NotReady of [] -> - case should_proceed_with_boot() of - true -> - %% Act as if this node is alone, so it can - %% finish the boot sequence and load the - %% config for other nodes to copy it. - ?SLOG(info, #{ - msg => "skip_sync_cluster_conf", - loading_from_disk => true, - nodes => Nodes, - failed => Failed, - not_ready => NotReady - }), - {ok, ?DEFAULT_INIT_TXN_ID}; - false -> - %% retry in some time - Jitter = rand:uniform(2000), - Timeout = 10000 + Jitter, - timer:sleep(Timeout), - ?SLOG(warning, #{ - msg => "sync_cluster_conf_retry", - timeout => Timeout, - nodes => Nodes, - failed => Failed, - not_ready => NotReady - }), - sync_cluster_conf() - end; + ok; _ -> + ?SLOG( + warning, + LogData#{ + msg => "cluster_config_fetch_failures", + failed_nodes => Failed, + booting_nodes => NotReady + } + ) + end, + MyRole = mria_rlog:role(), + case Ready of + [] when MyRole =:= replicant -> + %% replicant should never boot without copying from a core node + delay_and_retry(LogData#{role => replicant}); + [] -> + %% none of the nodes are ready, either delay-and-retry or boot without wait + TableStatus = tx_commit_table_status(), + sync_cluster_conf5(TableStatus, LogData); + _ -> + %% copy config from the best node in the Ready list sync_cluster_conf3(Ready) end. +%% None of the peer nodes are responsive, so we have to make a decision +%% based on the commit lagging (if the commit table is loaded). +%% +%% It could be that the peer nodes are also booting up, +%% however we cannot always wait because it may run into a dead-lock. +%% +%% Giving up wait here implies that some changes made to the peer node outside +%% of cluster-rpc MFAs will be lost. +%% e.g. stop all nodes, manually change cluster.hocon in one node +%% then boot all nodes around the same time, the changed cluster.hocon may +%% get lost if the node happen to copy config from others. +sync_cluster_conf5({loaded, local}, LogData) -> + ?SLOG(info, LogData#{ + msg => "skip_copy_cluster_config_from_peer_nodes", + explain => "Commit table loaded locally from disk, assuming that I have the latest config" + }), + {ok, ?DEFAULT_INIT_TXN_ID}; +sync_cluster_conf5({loaded, From}, LogData) -> + case get_commit_lag() of + #{my_id := MyId, latest := Latest} = Lagging when MyId >= Latest orelse Latest =:= 0 -> + ?SLOG(info, LogData#{ + msg => "skip_copy_cluster_config_from_peer_nodes", + explain => "I have the latest cluster config commit", + commit_loaded_from => From, + lagging_info => Lagging + }), + {ok, ?DEFAULT_INIT_TXN_ID}; + #{my_id := _MyId, latest := _Latest} = Lagging -> + delay_and_retry(LogData#{lagging_info => Lagging, commit_loaded_from => From}) + end; +sync_cluster_conf5({waiting, Waiting}, LogData) -> + %% this may never happen? since we waited for table before + delay_and_retry(LogData#{table_pending => Waiting}). + +get_commit_lag() -> + emqx_cluster_rpc:get_commit_lag(). + +delay_and_retry(LogData) -> + Timeout = sync_delay_timeout(), + ?SLOG(warning, LogData#{ + msg => "sync_cluster_conf_retry", + explain => + "Cannot boot alone due to potentially stale data. " + "Will try sync cluster config again after delay", + delay => Timeout + }), + timer:sleep(Timeout), + sync_cluster_conf(). + +-ifdef(TEST). +sync_delay_timeout() -> + Jitter = rand:uniform(200), + 1_000 + Jitter. +-else. +sync_delay_timeout() -> + Jitter = rand:uniform(2000), + 10_000 + Jitter. +-endif. + %% @private Filter out the nodes which are running a newer version than this node. sync_cluster_conf3(Ready) -> case lists:filter(fun is_older_or_same_version/1, Ready) of @@ -217,10 +240,10 @@ sync_cluster_conf3(Ready) -> ), ?SLOG(warning, #{ msg => "all_available_nodes_running_newer_version", - hint => - "Booting this node without syncing cluster config from peer core nodes " + explain => + "Booting this node without syncing cluster config from core nodes " "because other nodes are running a newer version", - peer_nodes => NodesAndVersions + versions => NodesAndVersions }), {ok, ?DEFAULT_INIT_TXN_ID}; Ready2 -> @@ -246,7 +269,7 @@ sync_cluster_conf4(Ready) -> [{ok, Info} | _] = lists:sort(fun conf_sort/2, Ready), #{node := Node, conf := RawOverrideConf, tnx_id := TnxId} = Info, HasDeprecatedFile = has_deprecated_file(Info), - ?SLOG(debug, #{ + ?SLOG(info, #{ msg => "sync_cluster_conf_success", synced_from_node => Node, has_deprecated_file => HasDeprecatedFile, @@ -263,19 +286,9 @@ sync_cluster_conf4(Ready) -> ok = sync_data_from_node(Node), {ok, TnxId}. -should_proceed_with_boot() -> +tx_commit_table_status() -> TablesStatus = emqx_cluster_rpc:get_tables_status(), - LocalNode = node(), - case maps:get(?CLUSTER_COMMIT, TablesStatus) of - {disc, LocalNode} -> - %% Loading locally; let this node finish its boot sequence - %% so others can copy the config from this one. - true; - _ -> - %% Loading from another node or still waiting for nodes to - %% be up. Try again. - false - end. + maps:get(?CLUSTER_COMMIT, TablesStatus). conf_sort({ok, #{tnx_id := Id1}}, {ok, #{tnx_id := Id2}}) when Id1 > Id2 -> true; conf_sort({ok, #{tnx_id := Id, wall_clock := W1}}, {ok, #{tnx_id := Id, wall_clock := W2}}) -> diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index ba9560157..3a2b5d972 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -28,21 +28,14 @@ -include("emqx_conf.hrl"). --type log_level() :: debug | info | notice | warning | error | critical | alert | emergency | all. --type file() :: string(). --type cipher() :: map(). - -behaviour(hocon_schema). --reflect_type([ - log_level/0, - file/0, - cipher/0 -]). - -export([ namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1, tags/0 ]). + +-export([log_level/0]). + -export([conf_get/2, conf_get/3, keys/2, filter/1]). -export([upgrade_raw_conf/1]). @@ -548,7 +541,7 @@ fields("node") -> )}, {"crash_dump_file", sc( - file(), + string(), #{ mapping => "vm_args.-env ERL_CRASH_DUMP", desc => ?DESC(node_crash_dump_file), @@ -839,7 +832,7 @@ fields("rpc") -> )}, {"certfile", sc( - file(), + string(), #{ mapping => "gen_rpc.certfile", converter => fun ensure_unicode_path/2, @@ -848,7 +841,7 @@ fields("rpc") -> )}, {"keyfile", sc( - file(), + string(), #{ mapping => "gen_rpc.keyfile", converter => fun ensure_unicode_path/2, @@ -857,7 +850,7 @@ fields("rpc") -> )}, {"cacertfile", sc( - file(), + string(), #{ mapping => "gen_rpc.cacertfile", converter => fun ensure_unicode_path/2, @@ -985,7 +978,7 @@ fields("log") -> })}, {"file", sc( - ?UNION([ + hoconsc:union([ ?R_REF("log_file_handler"), ?MAP(handler_name, ?R_REF("log_file_handler")) ]), @@ -1004,7 +997,7 @@ fields("log_file_handler") -> [ {"path", sc( - file(), + string(), #{ desc => ?DESC("log_file_handler_file"), default => <<"${EMQX_LOG_DIR}/emqx.log">>, @@ -1538,3 +1531,6 @@ ensure_unicode_path(Path, _) when is_list(Path) -> Path; ensure_unicode_path(Path, _) -> throw({"not_string", Path}). + +log_level() -> + hoconsc:enum([debug, info, notice, warning, error, critical, alert, emergency, all]). diff --git a/apps/emqx_conf/src/emqx_conf_schema_types.erl b/apps/emqx_conf/src/emqx_conf_schema_types.erl new file mode 100644 index 000000000..dc3af77b2 --- /dev/null +++ b/apps/emqx_conf/src/emqx_conf_schema_types.erl @@ -0,0 +1,340 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_conf_schema_types). + +-export([readable/2]). +-export([readable_swagger/2, readable_dashboard/2, readable_docgen/2]). + +%% Takes a typerefl name or hocon schema's display name and returns +%% a map of different flavors of more readable type specs. +%% - swagger: for swagger spec +%% - dashboard: to facilitate the dashboard UI rendering +%% - docgen: for documenation generation +readable(Module, TypeStr) when is_binary(TypeStr) -> + readable(Module, binary_to_list(TypeStr)); +readable(Module, TypeStr) when is_list(TypeStr) -> + try + %% Module is ignored so far as all types are distinguished by their names + readable(TypeStr) + catch + throw:unknown_type -> + fail(#{reason => unknown_type, type => TypeStr, module => Module}) + end. + +readable_swagger(Module, TypeStr) -> + get_readable(Module, TypeStr, swagger). + +readable_dashboard(Module, TypeStr) -> + get_readable(Module, TypeStr, dashboard). + +readable_docgen(Module, TypeStr) -> + get_readable(Module, TypeStr, docgen). + +get_readable(Module, TypeStr, Flavor) -> + Map = readable(Module, TypeStr), + case maps:get(Flavor, Map, undefined) of + undefined -> fail(#{reason => unknown_type, module => Module, type => TypeStr}); + Value -> Value + end. + +%% Fail the build or test. Production code should never get here. +-spec fail(_) -> no_return(). +fail(Reason) -> + io:format(standard_error, "ERROR: ~p~n", [Reason]), + error(Reason). + +readable("boolean()") -> + #{ + swagger => #{type => boolean}, + dashboard => #{type => boolean}, + docgen => #{type => "Boolean"} + }; +readable("binary()") -> + #{ + swagger => #{type => string}, + dashboard => #{type => string}, + docgen => #{type => "String"} + }; +readable("float()") -> + #{ + swagger => #{type => number}, + dashboard => #{type => number}, + docgen => #{type => "Float"} + }; +readable("integer()") -> + #{ + swagger => #{type => integer}, + dashboard => #{type => integer}, + docgen => #{type => "Integer"} + }; +readable("non_neg_integer()") -> + #{ + swagger => #{type => integer, minimum => 0}, + dashboard => #{type => integer, minimum => 0}, + docgen => #{type => "Integer(0..+inf)"} + }; +readable("pos_integer()") -> + #{ + swagger => #{type => integer, minimum => 1}, + dashboard => #{type => integer, minimum => 1}, + docgen => #{type => "Integer(1..+inf)"} + }; +readable("number()") -> + #{ + swagger => #{type => number}, + dashboard => #{type => number}, + docgen => #{type => "Number"} + }; +readable("string()") -> + #{ + swagger => #{type => string}, + dashboard => #{type => string}, + docgen => #{type => "String"} + }; +readable("atom()") -> + #{ + swagger => #{type => string}, + dashboard => #{type => string}, + docgen => #{type => "String"} + }; +readable("epoch_second()") -> + %% only for swagger + #{ + swagger => #{ + <<"oneOf">> => [ + #{type => integer, example => 1640995200, description => <<"epoch-second">>}, + #{ + type => string, + example => <<"2022-01-01T00:00:00.000Z">>, + format => <<"date-time">> + } + ] + } + }; +readable("epoch_millisecond()") -> + %% only for swagger + #{ + swagger => #{ + <<"oneOf">> => [ + #{ + type => integer, + example => 1640995200000, + description => <<"epoch-millisecond">> + }, + #{ + type => string, + example => <<"2022-01-01T00:00:00.000Z">>, + format => <<"date-time">> + } + ] + } + }; +readable("duration()") -> + #{ + swagger => #{type => string, example => <<"12m">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"12m">>} + }; +readable("duration_s()") -> + #{ + swagger => #{type => string, example => <<"1h">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"1h">>} + }; +readable("duration_ms()") -> + #{ + swagger => #{type => string, example => <<"32s">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"32s">>} + }; +readable("timeout_duration()") -> + #{ + swagger => #{type => string, example => <<"12m">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"12m">>} + }; +readable("timeout_duration_s()") -> + #{ + swagger => #{type => string, example => <<"1h">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"1h">>} + }; +readable("timeout_duration_ms()") -> + #{ + swagger => #{type => string, example => <<"32s">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"32s">>} + }; +readable("percent()") -> + #{ + swagger => #{type => string, example => <<"12%">>}, + dashboard => #{type => percent}, + docgen => #{type => "String", example => <<"12%">>} + }; +readable("ip_port()") -> + #{ + swagger => #{type => string, example => <<"127.0.0.1:80">>}, + dashboard => #{type => ip_port}, + docgen => #{type => "String", example => <<"127.0.0.1:80">>} + }; +readable("url()") -> + #{ + swagger => #{type => string, example => <<"http://127.0.0.1">>}, + dashboard => #{type => url}, + docgen => #{type => "String", example => <<"http://127.0.0.1">>} + }; +readable("bytesize()") -> + #{ + swagger => #{type => string, example => <<"32MB">>}, + dashboard => #{type => 'byteSize'}, + docgen => #{type => "String", example => <<"32MB">>} + }; +readable("wordsize()") -> + #{ + swagger => #{type => string, example => <<"1024KB">>}, + dashboard => #{type => 'wordSize'}, + docgen => #{type => "String", example => <<"1024KB">>} + }; +readable("map(" ++ Map) -> + [$) | _MapArgs] = lists:reverse(Map), + %% TODO: for docgen, parse map args. e.g. Map(String,String) + #{ + swagger => #{type => object, example => #{}}, + dashboard => #{type => object}, + docgen => #{type => "Map", example => #{}} + }; +readable("qos()") -> + #{ + swagger => #{type => integer, minimum => 0, maximum => 2, example => 0}, + dashboard => #{type => enum, symbols => [0, 1, 2]}, + docgen => #{type => "Integer(0..2)", example => 0} + }; +readable("comma_separated_list()") -> + #{ + swagger => #{type => string, example => <<"item1,item2">>}, + dashboard => #{type => comma_separated_string}, + docgen => #{type => "String", example => <<"item1,item2">>} + }; +readable("comma_separated_binary()") -> + #{ + swagger => #{type => string, example => <<"item1,item2">>}, + dashboard => #{type => comma_separated_string}, + docgen => #{type => "String", example => <<"item1,item2">>} + }; +readable("comma_separated_atoms()") -> + #{ + swagger => #{type => string, example => <<"item1,item2">>}, + dashboard => #{type => comma_separated_string}, + docgen => #{type => "String", example => <<"item1,item2">>} + }; +readable("service_account_json()") -> + %% This is a bit special, + %% service_account_josn in swagger spec is an object + %% the same in documenation. + %% However, dashboard wish it to be a string + %% TODO: + %% - Change type definition to stirng(). + %% - Convert the embedded object to a escaped JSON string. + %% - Delete this function clause once the above is done. + #{ + swagger => #{type => object}, + dashboard => #{type => string}, + docgen => #{type => "Map"} + }; +readable("json_binary()") -> + #{ + swagger => #{type => string, example => <<"{\"a\": [1,true]}">>}, + dashboard => #{type => string}, + docgen => #{type => "String", example => <<"{\"a\": [1,true]}">>} + }; +readable("port_number()") -> + Result = try_range("1..65535"), + true = is_map(Result), + Result; +readable("secret()") -> + #{ + swagger => #{type => string, example => <<"R4ND0M/S∃CЯ∃T"/utf8>>}, + dashboard => #{type => string}, + docgen => #{type => "String", example => <<"R4ND0M/S∃CЯ∃T"/utf8>>} + }; +readable(TypeStr0) -> + case string:split(TypeStr0, ":") of + [ModuleStr, TypeStr] -> + Module = list_to_existing_atom(ModuleStr), + readable(Module, TypeStr); + _ -> + parse(TypeStr0) + end. + +parse(TypeStr) -> + try_parse(TypeStr, [ + fun try_typerefl_array/1, + fun try_range/1 + ]). + +try_parse(_TypeStr, []) -> + throw(unknown_type); +try_parse(TypeStr, [ParseFun | More]) -> + case ParseFun(TypeStr) of + nomatch -> + try_parse(TypeStr, More); + Result -> + Result + end. + +%% [string()] or [integer()] or [xxx] or [xxx,...] +try_typerefl_array(Name) -> + case string:trim(Name, leading, "[") of + Name -> + nomatch; + Name1 -> + case string:trim(Name1, trailing, ",.]") of + Name1 -> + notmatch; + Name2 -> + Flavors = readable(Name2), + DocgenSpec = maps:get(docgen, Flavors), + DocgenType = maps:get(type, DocgenSpec), + #{ + swagger => #{type => array, items => maps:get(swagger, Flavors)}, + dashboard => #{type => array, items => maps:get(dashboard, Flavors)}, + docgen => #{type => "Array(" ++ DocgenType ++ ")"} + } + end + end. + +try_range(Name) -> + case string:split(Name, "..") of + %% 1..10 1..inf -inf..10 + [MinStr, MaxStr] -> + Schema0 = #{type => integer}, + Schema1 = add_integer_prop(Schema0, minimum, MinStr), + Schema = add_integer_prop(Schema1, maximum, MaxStr), + #{ + swagger => Schema, + dashboard => Schema, + docgen => #{type => "Integer(" ++ MinStr ++ ".." ++ MaxStr ++ ")"} + }; + _ -> + nomatch + end. + +add_integer_prop(Schema, Key, Value) -> + case string:to_integer(Value) of + {error, no_integer} -> Schema; + {Int, []} -> Schema#{Key => Int} + end. diff --git a/apps/emqx_connector/src/emqx_connector.app.src b/apps/emqx_connector/src/emqx_connector.app.src index 6b462986b..cc78829e7 100644 --- a/apps/emqx_connector/src/emqx_connector.app.src +++ b/apps/emqx_connector/src/emqx_connector.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_connector, [ {description, "EMQX Data Integration Connectors"}, - {vsn, "0.1.33"}, + {vsn, "0.1.34"}, {registered, []}, {mod, {emqx_connector_app, []}}, {applications, [ diff --git a/apps/emqx_connector/src/emqx_connector_schema_lib.erl b/apps/emqx_connector/src/emqx_connector_schema_lib.erl index d2357b360..76a06cb5a 100644 --- a/apps/emqx_connector/src/emqx_connector_schema_lib.erl +++ b/apps/emqx_connector/src/emqx_connector_schema_lib.erl @@ -20,6 +20,7 @@ -include_lib("hocon/include/hoconsc.hrl"). -export([ + pool_size/1, relational_db_fields/0, ssl_fields/0, prepare_statement_fields/0, @@ -28,20 +29,17 @@ ]). -export([ - pool_size/1, database/1, username/1, auto_reconnect/1 ]). -type database() :: binary(). --type pool_size() :: pos_integer(). -type username() :: binary(). -type password() :: binary(). -reflect_type([ database/0, - pool_size/0, username/0, password/0 ]). diff --git a/apps/emqx_dashboard/src/emqx_dashboard.app.src b/apps/emqx_dashboard/src/emqx_dashboard.app.src index 2e3eb1d32..97691c6cd 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard.app.src +++ b/apps/emqx_dashboard/src/emqx_dashboard.app.src @@ -2,7 +2,7 @@ {application, emqx_dashboard, [ {description, "EMQX Web Dashboard"}, % strict semver, bump manually! - {vsn, "5.0.29"}, + {vsn, "5.0.30"}, {modules, []}, {registered, [emqx_dashboard_sup]}, {applications, [ diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl index f1759fb2d..c1379d4d6 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl @@ -345,15 +345,7 @@ parse_spec_ref(Module, Path, Options) -> erlang:apply(Module, schema, [Path]) catch Error:Reason:Stacktrace -> - %% This error is intended to fail the build - %% hence print to standard_error - io:format( - standard_error, - "Failed to generate swagger for path ~p in module ~p~n" - "error:~p~nreason:~p~n~p~n", - [Module, Path, Error, Reason, Stacktrace] - ), - error({failed_to_generate_swagger_spec, Module, Path}) + failed_to_generate_swagger_spec(Module, Path, Error, Reason, Stacktrace) end, OperationId = maps:get('operationId', Schema), {Specs, Refs} = maps:fold( @@ -369,6 +361,24 @@ parse_spec_ref(Module, Path, Options) -> RouteOpts = generate_route_opts(Schema, Options), {OperationId, Specs, Refs, RouteOpts}. +-ifdef(TEST). +-spec failed_to_generate_swagger_spec(_, _, _, _, _) -> no_return(). +failed_to_generate_swagger_spec(Module, Path, _Error, _Reason, _Stacktrace) -> + error({failed_to_generate_swagger_spec, Module, Path}). +-else. +-spec failed_to_generate_swagger_spec(_, _, _, _, _) -> no_return(). +failed_to_generate_swagger_spec(Module, Path, Error, Reason, Stacktrace) -> + %% This error is intended to fail the build + %% hence print to standard_error + io:format( + standard_error, + "Failed to generate swagger for path ~p in module ~p~n" + "error:~p~nreason:~p~n~p~n", + [Module, Path, Error, Reason, Stacktrace] + ), + error({failed_to_generate_swagger_spec, Module, Path}). + +-endif. generate_route_opts(Schema, Options) -> #{filter => compose_filters(filter(Options), custom_filter(Schema))}. @@ -776,7 +786,7 @@ hocon_schema_to_spec(?MAP(Name, Type), LocalModule) -> }, SubRefs }; -hocon_schema_to_spec(?UNION(Types), LocalModule) -> +hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) -> {OneOf, Refs} = lists:foldl( fun(Type, {Acc, RefsAcc}) -> {Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule), @@ -789,193 +799,8 @@ hocon_schema_to_spec(?UNION(Types), LocalModule) -> hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) -> {#{type => string, enum => [Atom]}, []}. -typename_to_spec("term()", _Mod) -> - #{type => string, example => <<"any">>}; -typename_to_spec("boolean()", _Mod) -> - #{type => boolean}; -typename_to_spec("binary()", _Mod) -> - #{type => string}; -typename_to_spec("float()", _Mod) -> - #{type => number}; -typename_to_spec("integer()", _Mod) -> - #{type => integer}; -typename_to_spec("non_neg_integer()", _Mod) -> - #{type => integer, minimum => 0}; -typename_to_spec("pos_integer()", _Mod) -> - #{type => integer, minimum => 1}; -typename_to_spec("number()", _Mod) -> - #{type => number}; -typename_to_spec("string()", _Mod) -> - #{type => string}; -typename_to_spec("atom()", _Mod) -> - #{type => string}; -typename_to_spec("epoch_second()", _Mod) -> - #{ - <<"oneOf">> => [ - #{type => integer, example => 1640995200, description => <<"epoch-second">>}, - #{type => string, example => <<"2022-01-01T00:00:00.000Z">>, format => <<"date-time">>} - ] - }; -typename_to_spec("epoch_millisecond()", _Mod) -> - #{ - <<"oneOf">> => [ - #{type => integer, example => 1640995200000, description => <<"epoch-millisecond">>}, - #{type => string, example => <<"2022-01-01T00:00:00.000Z">>, format => <<"date-time">>} - ] - }; -typename_to_spec("duration()", _Mod) -> - #{type => string, example => <<"12m">>}; -typename_to_spec("duration_s()", _Mod) -> - #{type => string, example => <<"1h">>}; -typename_to_spec("duration_ms()", _Mod) -> - #{type => string, example => <<"32s">>}; -typename_to_spec("timeout_duration()", _Mod) -> - #{type => string, example => <<"12m">>}; -typename_to_spec("timeout_duration_s()", _Mod) -> - #{type => string, example => <<"1h">>}; -typename_to_spec("timeout_duration_ms()", _Mod) -> - #{type => string, example => <<"32s">>}; -typename_to_spec("percent()", _Mod) -> - #{type => number, example => <<"12%">>}; -typename_to_spec("file()", _Mod) -> - #{type => string, example => <<"/path/to/file">>}; -typename_to_spec("ip_port()", _Mod) -> - #{type => string, example => <<"127.0.0.1:80">>}; -typename_to_spec("write_syntax()", _Mod) -> - #{ - type => string, - example => - <<"${topic},clientid=${clientid}", " ", "payload=${payload},", - "${clientid}_int_value=${payload.int_key}i,", "bool=${payload.bool}">> - }; -typename_to_spec("url()", _Mod) -> - #{type => string, example => <<"http://127.0.0.1">>}; -typename_to_spec("connect_timeout()", Mod) -> - typename_to_spec("timeout()", Mod); -typename_to_spec("timeout()", _Mod) -> - #{ - <<"oneOf">> => [ - #{type => string, example => infinity}, - #{type => integer} - ], - example => infinity - }; -typename_to_spec("bytesize()", _Mod) -> - #{type => string, example => <<"32MB">>}; -typename_to_spec("wordsize()", _Mod) -> - #{type => string, example => <<"1024KB">>}; -typename_to_spec("map()", _Mod) -> - #{type => object, example => #{}}; -typename_to_spec("service_account_json()", _Mod) -> - #{type => object, example => #{}}; -typename_to_spec("#{" ++ _, Mod) -> - typename_to_spec("map()", Mod); -typename_to_spec("qos()", _Mod) -> - #{type => integer, minimum => 0, maximum => 2, example => 0}; -typename_to_spec("{binary(), binary()}", _Mod) -> - #{type => object, example => #{}}; -typename_to_spec("{string(), string()}", _Mod) -> - #{type => object, example => #{}}; -typename_to_spec("comma_separated_list()", _Mod) -> - #{type => string, example => <<"item1,item2">>}; -typename_to_spec("comma_separated_binary()", _Mod) -> - #{type => string, example => <<"item1,item2">>}; -typename_to_spec("comma_separated_atoms()", _Mod) -> - #{type => string, example => <<"item1,item2">>}; -typename_to_spec("pool_type()", _Mod) -> - #{type => string, enum => [random, hash]}; -typename_to_spec("log_level()", _Mod) -> - #{ - type => string, - enum => [debug, info, notice, warning, error, critical, alert, emergency, all] - }; -typename_to_spec("rate()", _Mod) -> - #{type => string, example => <<"10MB">>}; -typename_to_spec("burst()", _Mod) -> - #{type => string, example => <<"100MB">>}; -typename_to_spec("burst_rate()", _Mod) -> - %% 0/0s = no burst - #{type => string, example => <<"10MB">>}; -typename_to_spec("failure_strategy()", _Mod) -> - #{type => string, example => <<"force">>}; -typename_to_spec("initial()", _Mod) -> - #{type => string, example => <<"0MB">>}; -typename_to_spec("bucket_name()", _Mod) -> - #{type => string, example => <<"retainer">>}; -typename_to_spec("json_binary()", _Mod) -> - #{type => string, example => <<"{\"a\": [1,true]}">>}; -typename_to_spec("port_number()", _Mod) -> - range("1..65535"); -typename_to_spec("secret_access_key()", _Mod) -> - #{type => string, example => <<"TW8dPwmjpjJJuLW....">>}; -typename_to_spec("secret()", _Mod) -> - %% TODO: ideally, this should be dispatched to the module that defines this type - #{type => string, example => <<"R4ND0M/S∃CЯ∃T"/utf8>>}; -typename_to_spec(Name, Mod) -> - try_convert_to_spec(Name, Mod, [ - fun try_remote_module_type/2, - fun try_typerefl_array/2, - fun try_range/2, - fun try_integer/2 - ]). - -range(Name) -> - #{} = try_range(Name, undefined). - -try_convert_to_spec(Name, Mod, []) -> - throw({error, #{msg => <<"Unsupported Type">>, type => Name, module => Mod}}); -try_convert_to_spec(Name, Mod, [Converter | Rest]) -> - case Converter(Name, Mod) of - nomatch -> try_convert_to_spec(Name, Mod, Rest); - Spec -> Spec - end. - -try_range(Name, _Mod) -> - case string:split(Name, "..") of - %% 1..10 1..inf -inf..10 - [MinStr, MaxStr] -> - Schema = #{type => integer}, - Schema1 = add_integer_prop(Schema, minimum, MinStr), - add_integer_prop(Schema1, maximum, MaxStr); - _ -> - nomatch - end. - -%% Module:Type -try_remote_module_type(Name, Mod) -> - case string:split(Name, ":") of - [_Module, Type] -> typename_to_spec(Type, Mod); - _ -> nomatch - end. - -%% [string()] or [integer()] or [xxx] or [xxx,...] -try_typerefl_array(Name, Mod) -> - case string:trim(Name, leading, "[") of - Name -> - nomatch; - Name1 -> - case string:trim(Name1, trailing, ",.]") of - Name1 -> - notmatch; - Name2 -> - Schema = typename_to_spec(Name2, Mod), - #{type => array, items => Schema} - end - end. - -%% integer(1) -try_integer(Name, _Mod) -> - case string:to_integer(Name) of - {Int, []} -> #{type => integer, enum => [Int], default => Int}; - _ -> nomatch - end. - -add_integer_prop(Schema, Key, Value) -> - case string:to_integer(Value) of - {error, no_integer} -> Schema; - {Int, []} when Key =:= minimum -> Schema#{Key => Int}; - {Int, []} -> Schema#{Key => Int} - end. +typename_to_spec(TypeStr, Module) -> + emqx_conf_schema_types:readable_swagger(Module, TypeStr). to_bin(List) when is_list(List) -> case io_lib:printable_list(List) of diff --git a/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl b/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl index 2457cd56a..b5c55622b 100644 --- a/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl @@ -816,7 +816,7 @@ to_schema(Body) -> fields(good_ref) -> [ {'webhook-host', mk(emqx_schema:ip_port(), #{default => <<"127.0.0.1:80">>})}, - {log_dir, mk(emqx_schema:file(), #{example => "var/log/emqx"})}, + {log_dir, mk(string(), #{example => "var/log/emqx"})}, {tag, mk(binary(), #{desc => <<"tag">>})} ]; fields(nest_ref) -> diff --git a/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl b/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl index d84f17c44..5987ad8fa 100644 --- a/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl @@ -317,68 +317,68 @@ t_sub_fields(_Config) -> validate(Path, Object, ExpectRefs), ok. -t_complicated_type(_Config) -> +t_complex_type(_Config) -> Path = "/ref/complex_type", - Object = #{ - <<"content">> => #{ - <<"application/json">> => - #{ - <<"schema">> => #{ - <<"properties">> => - [ - {<<"no_neg_integer">>, #{minimum => 0, type => integer}}, - {<<"url">>, #{example => <<"http://127.0.0.1">>, type => string}}, - {<<"server">>, #{example => <<"127.0.0.1:80">>, type => string}}, - {<<"connect_timeout">>, #{ - example => infinity, - <<"oneOf">> => [ - #{example => infinity, type => string}, - #{type => integer} - ] - }}, - {<<"pool_type">>, #{enum => [random, hash], type => string}}, - {<<"timeout">>, #{ - example => infinity, - <<"oneOf">> => [ - #{example => infinity, type => string}, #{type => integer} - ] - }}, - {<<"bytesize">>, #{example => <<"32MB">>, type => string}}, - {<<"wordsize">>, #{example => <<"1024KB">>, type => string}}, - {<<"maps">>, #{example => #{}, type => object}}, - {<<"comma_separated_list">>, #{ - example => <<"item1,item2">>, type => string - }}, - {<<"comma_separated_atoms">>, #{ - example => <<"item1,item2">>, type => string - }}, - {<<"log_level">>, #{ - enum => [ - debug, - info, - notice, - warning, - error, - critical, - alert, - emergency, - all - ], - type => string - }}, - {<<"fix_integer">>, #{ - default => 100, enum => [100], type => integer - }} - ], - <<"type">> => object - } - } - } - }, {OperationId, Spec, Refs, #{}} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}), ?assertEqual(test, OperationId), Response = maps:get(responses, maps:get(post, Spec)), - ?assertEqual(Object, maps:get(<<"200">>, Response)), + ResponseBody = maps:get(<<"200">>, Response), + Content = maps:get(<<"content">>, ResponseBody), + JsonContent = maps:get(<<"application/json">>, Content), + Schema = maps:get(<<"schema">>, JsonContent), + ?assertMatch(#{<<"type">> := object}, Schema), + Properties = maps:get(<<"properties">>, Schema), + ?assertMatch( + [ + {<<"no_neg_integer">>, #{minimum := 0, type := integer}}, + {<<"url">>, #{ + example := <<"http://127.0.0.1">>, type := string + }}, + {<<"server">>, #{ + example := <<"127.0.0.1:80">>, type := string + }}, + {<<"connect_timeout">>, #{ + example := _, type := string + }}, + {<<"pool_type">>, #{ + enum := [random, hash], type := string + }}, + {<<"timeout">>, #{ + <<"oneOf">> := [ + #{example := _, type := string}, + #{enum := [infinity], type := string} + ] + }}, + {<<"bytesize">>, #{ + example := <<"32MB">>, type := string + }}, + {<<"wordsize">>, #{ + example := <<"1024KB">>, type := string + }}, + {<<"maps">>, #{example := #{}, type := object}}, + {<<"comma_separated_list">>, #{ + example := <<"item1,item2">>, type := string + }}, + {<<"comma_separated_atoms">>, #{ + example := <<"item1,item2">>, type := string + }}, + {<<"log_level">>, #{ + enum := [ + debug, + info, + notice, + warning, + error, + critical, + alert, + emergency, + all + ], + type := string + }} + ], + Properties + ), ?assertEqual([], Refs), ok. @@ -410,7 +410,7 @@ t_ref_array_with_key(_Config) -> {<<"percent_ex">>, #{ description => <<"percent example">>, example => <<"12%">>, - type => number + type => string }}, {<<"duration_ms_ex">>, #{ description => <<"duration ms example">>, @@ -647,17 +647,16 @@ schema("/ref/complex_type") -> {no_neg_integer, hoconsc:mk(non_neg_integer(), #{})}, {url, hoconsc:mk(url(), #{})}, {server, hoconsc:mk(emqx_schema:ip_port(), #{})}, - {connect_timeout, - hoconsc:mk(emqx_bridge_http_connector:connect_timeout(), #{})}, - {pool_type, hoconsc:mk(emqx_bridge_http_connector:pool_type(), #{})}, - {timeout, hoconsc:mk(timeout(), #{})}, + {connect_timeout, hoconsc:mk(emqx_schema:timeout_duration(), #{})}, + {pool_type, hoconsc:mk(hoconsc:enum([random, hash]), #{})}, + {timeout, + hoconsc:mk(hoconsc:union([infinity, emqx_schema:timeout_duration()]), #{})}, {bytesize, hoconsc:mk(emqx_schema:bytesize(), #{})}, {wordsize, hoconsc:mk(emqx_schema:wordsize(), #{})}, {maps, hoconsc:mk(map(), #{})}, {comma_separated_list, hoconsc:mk(emqx_schema:comma_separated_list(), #{})}, {comma_separated_atoms, hoconsc:mk(emqx_schema:comma_separated_atoms(), #{})}, - {log_level, hoconsc:mk(emqx_conf_schema:log_level(), #{})}, - {fix_integer, hoconsc:mk(typerefl:integer(100), #{})} + {log_level, hoconsc:mk(emqx_conf_schema:log_level(), #{})} ] } } @@ -684,7 +683,7 @@ to_schema(Object) -> fields(good_ref) -> [ {'webhook-host', mk(emqx_schema:ip_port(), #{default => <<"127.0.0.1:80">>})}, - {log_dir, mk(emqx_schema:file(), #{example => "var/log/emqx"})}, + {log_dir, mk(string(), #{example => "var/log/emqx"})}, {tag, mk(binary(), #{desc => <<"tag">>})} ]; fields(nest_ref) -> diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_ldap.erl b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_ldap.erl index b6bdcf744..faa87b80e 100644 --- a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_ldap.erl +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_ldap.erl @@ -12,6 +12,7 @@ -behaviour(emqx_dashboard_sso). -export([ + namespace/0, fields/1, desc/1 ]). @@ -30,6 +31,9 @@ %% Hocon Schema %%------------------------------------------------------------------------------ +namespace() -> + "sso". + hocon_ref() -> hoconsc:ref(?MODULE, ldap). diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_schema.erl b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_schema.erl index 92f9ba519..aa032a3cc 100644 --- a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_schema.erl +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_schema.erl @@ -8,7 +8,7 @@ -include_lib("typerefl/include/types.hrl"). %% Hocon --export([fields/1, desc/1]). +-export([namespace/0, fields/1, desc/1]). -export([ common_backend_schema/1, @@ -21,6 +21,8 @@ %%------------------------------------------------------------------------------ %% Hocon Schema %%------------------------------------------------------------------------------ +namespace() -> "sso". + fields(sso) -> lists:map( fun({Type, Module}) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index 27a0745bc..725d62673 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -28,7 +28,7 @@ -export([store_batch/2, store_batch/3]). %% Message replay API: --export([get_streams/3, make_iterator/3, next/2]). +-export([get_streams/3, make_iterator/4, next/3]). %% Misc. API: -export([]). @@ -100,6 +100,26 @@ -type get_iterator_result(Iterator) :: {ok, Iterator} | undefined. +-define(persistent_term(DB), {emqx_ds_db_backend, DB}). + +-define(module(DB), (persistent_term:get(?persistent_term(DB)))). + +%%================================================================================ +%% Behavior callbacks +%%================================================================================ + +-callback open_db(db(), create_db_opts()) -> ok | {error, _}. + +-callback drop_db(db()) -> ok | {error, _}. + +-callback store_batch(db(), [emqx_types:message()], message_store_opts()) -> store_batch_result(). + +-callback get_streams(db(), topic_filter(), time()) -> [{stream_rank(), stream()}]. + +-callback make_iterator(db(), _Stream, topic_filter(), time()) -> make_iterator_result(_Iterator). + +-callback next(db(), Iterator, pos_integer()) -> next_result(Iterator). + %%================================================================================ %% API funcions %%================================================================================ @@ -107,19 +127,29 @@ %% @doc Different DBs are completely independent from each other. They %% could represent something like different tenants. -spec open_db(db(), create_db_opts()) -> ok. -open_db(DB, Opts = #{backend := builtin}) -> - emqx_ds_replication_layer:open_db(DB, Opts). +open_db(DB, Opts = #{backend := Backend}) when Backend =:= builtin -> + Module = + case Backend of + builtin -> emqx_ds_replication_layer + end, + persistent_term:put(?persistent_term(DB), Module), + ?module(DB):open_db(DB, Opts). %% @doc TODO: currently if one or a few shards are down, they won't be %% deleted. -spec drop_db(db()) -> ok. drop_db(DB) -> - emqx_ds_replication_layer:drop_db(DB). + case persistent_term:get(?persistent_term(DB), undefined) of + undefined -> + ok; + Module -> + Module:drop_db(DB) + end. -spec store_batch(db(), [emqx_types:message()], message_store_opts()) -> store_batch_result(). store_batch(DB, Msgs, Opts) -> - emqx_ds_replication_layer:store_batch(DB, Msgs, Opts). + ?module(DB):store_batch(DB, Msgs, Opts). -spec store_batch(db(), [emqx_types:message()]) -> store_batch_result(). store_batch(DB, Msgs) -> @@ -168,15 +198,15 @@ store_batch(DB, Msgs) -> %% replaying streams that depend on the given one. -spec get_streams(db(), topic_filter(), time()) -> [{stream_rank(), stream()}]. get_streams(DB, TopicFilter, StartTime) -> - emqx_ds_replication_layer:get_streams(DB, TopicFilter, StartTime). + ?module(DB):get_streams(DB, TopicFilter, StartTime). --spec make_iterator(stream(), topic_filter(), time()) -> make_iterator_result(). -make_iterator(Stream, TopicFilter, StartTime) -> - emqx_ds_replication_layer:make_iterator(Stream, TopicFilter, StartTime). +-spec make_iterator(db(), stream(), topic_filter(), time()) -> make_iterator_result(). +make_iterator(DB, Stream, TopicFilter, StartTime) -> + ?module(DB):make_iterator(DB, Stream, TopicFilter, StartTime). --spec next(iterator(), pos_integer()) -> next_result(). -next(Iter, BatchSize) -> - emqx_ds_replication_layer:next(Iter, BatchSize). +-spec next(db(), iterator(), pos_integer()) -> next_result(). +next(DB, Iter, BatchSize) -> + ?module(DB):next(DB, Iter, BatchSize). %%================================================================================ %% Internal exports diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index b81f43c4f..a06af104d 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -18,23 +18,26 @@ %% replication on their own. -module(emqx_ds_replication_layer). +-behaviour(emqx_ds). + -export([ list_shards/1, open_db/2, drop_db/1, store_batch/3, get_streams/3, - make_iterator/3, - next/2 + make_iterator/4, + next/3 ]). %% internal exports: -export([ - do_open_shard_v1/2, - do_drop_shard_v1/1, - do_get_streams_v1/3, - do_make_iterator_v1/4, - do_next_v1/3 + do_open_shard_v1/3, + do_drop_shard_v1/2, + do_store_batch_v1/4, + do_get_streams_v1/4, + do_make_iterator_v1/5, + do_next_v1/4 ]). -export_type([shard_id/0, stream/0, iterator/0, message_id/0]). @@ -47,17 +50,15 @@ %% records over the wire. %% tags: --define(stream, stream). --define(it, it). +-define(STREAM, 1). +-define(IT, 2). %% keys: -define(tag, 1). -define(shard, 2). -define(enc, 3). --type db() :: emqx_ds:db(). - --type shard_id() :: {db(), atom()}. +-type shard_id() :: atom(). %% This enapsulates the stream entity from the replication level. %% @@ -67,14 +68,14 @@ %% account. -opaque stream() :: #{ - ?tag := ?stream, + ?tag := ?STREAM, ?shard := emqx_ds_replication_layer:shard_id(), ?enc := emqx_ds_storage_layer:stream() }. -opaque iterator() :: #{ - ?tag := ?it, + ?tag := ?IT, ?shard := emqx_ds_replication_layer:shard_id(), ?enc := emqx_ds_storage_layer:iterator() }. @@ -85,58 +86,54 @@ %% API functions %%================================================================================ --spec list_shards(db()) -> [shard_id()]. -list_shards(DB) -> +-spec list_shards(emqx_ds:db()) -> [shard_id()]. +list_shards(_DB) -> %% TODO: milestone 5 - lists:map( - fun(Node) -> - shard_id(DB, Node) - end, - list_nodes() - ). + list_nodes(). --spec open_db(db(), emqx_ds:create_db_opts()) -> ok | {error, _}. +-spec open_db(emqx_ds:db(), emqx_ds:create_db_opts()) -> ok | {error, _}. open_db(DB, Opts) -> %% TODO: improve error reporting, don't just crash lists:foreach( - fun(Node) -> - Shard = shard_id(DB, Node), - ok = emqx_ds_proto_v1:open_shard(Node, Shard, Opts) + fun(Shard) -> + Node = node_of_shard(DB, Shard), + ok = emqx_ds_proto_v1:open_shard(Node, DB, Shard, Opts) end, - list_nodes() + list_shards(DB) ). --spec drop_db(db()) -> ok | {error, _}. +-spec drop_db(emqx_ds:db()) -> ok | {error, _}. drop_db(DB) -> lists:foreach( - fun(Node) -> - Shard = shard_id(DB, Node), - ok = emqx_ds_proto_v1:drop_shard(Node, Shard) + fun(Shard) -> + Node = node_of_shard(DB, Shard), + ok = emqx_ds_proto_v1:drop_shard(Node, DB, Shard) end, - list_nodes() + list_shards(DB) ). --spec store_batch(db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> +-spec store_batch(emqx_ds:db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> emqx_ds:store_batch_result(). -store_batch(DB, Msg, Opts) -> +store_batch(DB, Batch, Opts) -> %% TODO: Currently we store messages locally. - Shard = shard_id(DB, node()), - emqx_ds_storage_layer:store_batch(Shard, Msg, Opts). + Shard = node(), + Node = node_of_shard(DB, Shard), + emqx_ds_proto_v1:store_batch(Node, DB, Shard, Batch, Opts). --spec get_streams(db(), emqx_ds:topic_filter(), emqx_ds:time()) -> +-spec get_streams(emqx_ds:db(), emqx_ds:topic_filter(), emqx_ds:time()) -> [{emqx_ds:stream_rank(), stream()}]. get_streams(DB, TopicFilter, StartTime) -> Shards = list_shards(DB), lists:flatmap( fun(Shard) -> - Node = node_of_shard(Shard), - Streams = emqx_ds_proto_v1:get_streams(Node, Shard, TopicFilter, StartTime), + Node = node_of_shard(DB, Shard), + Streams = emqx_ds_proto_v1:get_streams(Node, DB, Shard, TopicFilter, StartTime), lists:map( fun({RankY, Stream}) -> RankX = Shard, Rank = {RankX, RankY}, {Rank, #{ - ?tag => ?stream, + ?tag => ?STREAM, ?shard => Shard, ?enc => Stream }} @@ -147,22 +144,22 @@ get_streams(DB, TopicFilter, StartTime) -> Shards ). --spec make_iterator(stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> +-spec make_iterator(emqx_ds:db(), stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). -make_iterator(Stream, TopicFilter, StartTime) -> - #{?tag := ?stream, ?shard := Shard, ?enc := StorageStream} = Stream, - Node = node_of_shard(Shard), - case emqx_ds_proto_v1:make_iterator(Node, Shard, StorageStream, TopicFilter, StartTime) of +make_iterator(DB, Stream, TopicFilter, StartTime) -> + #{?tag := ?STREAM, ?shard := Shard, ?enc := StorageStream} = Stream, + Node = node_of_shard(DB, Shard), + case emqx_ds_proto_v1:make_iterator(Node, DB, Shard, StorageStream, TopicFilter, StartTime) of {ok, Iter} -> - {ok, #{?tag => ?it, ?shard => Shard, ?enc => Iter}}; + {ok, #{?tag => ?IT, ?shard => Shard, ?enc => Iter}}; Err = {error, _} -> Err end. --spec next(iterator(), pos_integer()) -> emqx_ds:next_result(iterator()). -next(Iter0, BatchSize) -> - #{?tag := ?it, ?shard := Shard, ?enc := StorageIter0} = Iter0, - Node = node_of_shard(Shard), +-spec next(emqx_ds:db(), iterator(), pos_integer()) -> emqx_ds:next_result(iterator()). +next(DB, Iter0, BatchSize) -> + #{?tag := ?IT, ?shard := Shard, ?enc := StorageIter0} = Iter0, + Node = node_of_shard(DB, Shard), %% TODO: iterator can contain information that is useful for %% reconstructing messages sent over the network. For example, %% when we send messages with the learned topic index, we could @@ -171,7 +168,7 @@ next(Iter0, BatchSize) -> %% %% This kind of trickery should be probably done here in the %% replication layer. Or, perhaps, in the logic layer. - case emqx_ds_proto_v1:next(Node, Shard, StorageIter0, BatchSize) of + case emqx_ds_proto_v1:next(Node, DB, Shard, StorageIter0, BatchSize) of {ok, StorageIter, Batch} -> Iter = Iter0#{?enc := StorageIter}, {ok, Iter, Batch}; @@ -187,42 +184,61 @@ next(Iter0, BatchSize) -> %% Internal exports (RPC targets) %%================================================================================ --spec do_open_shard_v1(shard_id(), emqx_ds:create_db_opts()) -> ok. -do_open_shard_v1(Shard, Opts) -> - emqx_ds_storage_layer:open_shard(Shard, Opts). +-spec do_open_shard_v1( + emqx_ds:db(), emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts() +) -> + ok | {error, _}. +do_open_shard_v1(DB, Shard, Opts) -> + emqx_ds_storage_layer:open_shard({DB, Shard}, Opts). --spec do_drop_shard_v1(shard_id()) -> ok. -do_drop_shard_v1(Shard) -> - emqx_ds_storage_layer:drop_shard(Shard). +-spec do_drop_shard_v1(emqx_ds:db(), emqx_ds_replication_layer:shard_id()) -> ok | {error, _}. +do_drop_shard_v1(DB, Shard) -> + emqx_ds_storage_layer:drop_shard({DB, Shard}). --spec do_get_streams_v1(shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> +-spec do_store_batch_v1( + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + [emqx_types:message()], + emqx_ds:message_store_opts() +) -> + emqx_ds:store_batch_result(). +do_store_batch_v1(DB, Shard, Batch, Options) -> + emqx_ds_storage_layer:store_batch({DB, Shard}, Batch, Options). + +-spec do_get_streams_v1( + emqx_ds:db(), emqx_ds_replicationi_layer:shard_id(), emqx_ds:topic_filter(), emqx_ds:time() +) -> [{integer(), emqx_ds_storage_layer:stream()}]. -do_get_streams_v1(Shard, TopicFilter, StartTime) -> - emqx_ds_storage_layer:get_streams(Shard, TopicFilter, StartTime). +do_get_streams_v1(DB, Shard, TopicFilter, StartTime) -> + emqx_ds_storage_layer:get_streams({DB, Shard}, TopicFilter, StartTime). -spec do_make_iterator_v1( - shard_id(), emqx_ds_storage_layer:stream(), emqx_ds:topic_filter(), emqx_ds:time() + emqx_ds:db(), + emqx_ds_storage_layer:shard_id(), + emqx_ds_storage_layer:stream(), + emqx_ds:topic_filter(), + emqx_ds:time() ) -> {ok, emqx_ds_storage_layer:iterator()} | {error, _}. -do_make_iterator_v1(Shard, Stream, TopicFilter, StartTime) -> - emqx_ds_storage_layer:make_iterator(Shard, Stream, TopicFilter, StartTime). +do_make_iterator_v1(DB, Shard, Stream, TopicFilter, StartTime) -> + emqx_ds_storage_layer:make_iterator({DB, Shard}, Stream, TopicFilter, StartTime). --spec do_next_v1(shard_id(), emqx_ds_storage_layer:iterator(), pos_integer()) -> +-spec do_next_v1( + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:iterator(), + pos_integer() +) -> emqx_ds:next_result(emqx_ds_storage_layer:iterator()). -do_next_v1(Shard, Iter, BatchSize) -> - emqx_ds_storage_layer:next(Shard, Iter, BatchSize). +do_next_v1(DB, Shard, Iter, BatchSize) -> + emqx_ds_storage_layer:next({DB, Shard}, Iter, BatchSize). %%================================================================================ %% Internal functions %%================================================================================ -shard_id(DB, Node) -> - %% TODO: don't bake node name into the schema, don't repeat the - %% Mnesia's 1M$ mistake. - {DB, Node}. - --spec node_of_shard(shard_id()) -> node(). -node_of_shard({_DB, Node}) -> +-spec node_of_shard(emqx_ds:db(), shard_id()) -> node(). +node_of_shard(_DB, Node) -> Node. list_nodes() -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index d2c997ae1..2d4949919 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -42,8 +42,8 @@ %% records over the wire. %% tags: --define(stream, stream). --define(it, it). +-define(STREAM, 1). +-define(IT, 2). %% keys: -define(tag, 1). @@ -81,13 +81,13 @@ -type stream() :: #{ - ?tag := ?stream, + ?tag := ?STREAM, ?storage_key := emqx_ds_lts:msg_storage_key() }. -type iterator() :: #{ - ?tag := ?it, + ?tag := ?IT, ?topic_filter := emqx_ds:topic_filter(), ?start_time := emqx_ds:time(), ?storage_key := emqx_ds_lts:msg_storage_key(), @@ -110,7 +110,7 @@ %%================================================================================ -spec create( - emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:shard_id(), rocksdb:db_handle(), emqx_ds_storage_layer:gen_id(), options() @@ -137,7 +137,7 @@ create(_ShardId, DBHandle, GenId, Options) -> {Schema, [{DataCFName, DataCFHandle}, {TrieCFName, TrieCFHandle}]}. -spec open( - emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:shard_id(), rocksdb:db_handle(), emqx_ds_storage_layer:gen_id(), emqx_ds_storage_layer:cf_refs(), @@ -173,7 +173,7 @@ open(_Shard, DBHandle, GenId, CFRefs, Schema) -> }. -spec store_batch( - emqx_ds_replication_layer:shard_id(), s(), [emqx_types:message()], emqx_ds:message_store_opts() + emqx_ds_storage_layer:shard_id(), s(), [emqx_types:message()], emqx_ds:message_store_opts() ) -> emqx_ds:store_batch_result(). store_batch(_ShardId, S = #s{db = DB, data = Data}, Messages, _Options) -> @@ -187,30 +187,30 @@ store_batch(_ShardId, S = #s{db = DB, data = Data}, Messages, _Options) -> ). -spec get_streams( - emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:shard_id(), s(), emqx_ds:topic_filter(), emqx_ds:time() ) -> [stream()]. get_streams(_Shard, #s{trie = Trie}, TopicFilter, _StartTime) -> Indexes = emqx_ds_lts:match_topics(Trie, TopicFilter), - [#{?tag => ?stream, ?storage_key => I} || I <- Indexes]. + [#{?tag => ?STREAM, ?storage_key => I} || I <- Indexes]. -spec make_iterator( - emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:shard_id(), s(), stream(), emqx_ds:topic_filter(), emqx_ds:time() ) -> {ok, iterator()}. make_iterator( - _Shard, _Data, #{?tag := ?stream, ?storage_key := StorageKey}, TopicFilter, StartTime + _Shard, _Data, #{?tag := ?STREAM, ?storage_key := StorageKey}, TopicFilter, StartTime ) -> %% Note: it's a good idea to keep the iterator structure lean, %% since it can be stored on a remote node that could update its %% code independently from us. {ok, #{ - ?tag => ?it, + ?tag => ?IT, ?topic_filter => TopicFilter, ?start_time => StartTime, ?storage_key => StorageKey, @@ -225,7 +225,7 @@ next(_Shard, Schema = #s{ts_offset = TSOffset}, It, BatchSize) -> SafeCutoffTime = (Now bsr TSOffset) bsl TSOffset, next_until(Schema, It, SafeCutoffTime, BatchSize). -next_until(_Schema, It = #{?tag := ?it, ?start_time := StartTime}, SafeCutoffTime, _BatchSize) when +next_until(_Schema, It = #{?tag := ?IT, ?start_time := StartTime}, SafeCutoffTime, _BatchSize) when StartTime >= SafeCutoffTime -> %% We're in the middle of the current epoch, so we can't yet iterate over it. @@ -235,7 +235,7 @@ next_until(_Schema, It = #{?tag := ?it, ?start_time := StartTime}, SafeCutoffTim {ok, It, []}; next_until(#s{db = DB, data = CF, keymappers = Keymappers}, It, SafeCutoffTime, BatchSize) -> #{ - ?tag := ?it, + ?tag := ?IT, ?start_time := StartTime, ?storage_key := {TopicIndex, Varying} } = It, @@ -286,7 +286,7 @@ next_loop(_ITHandle, _KeyMapper, _Filter, _Cutoff, It, Acc, 0) -> {ok, It, lists:reverse(Acc)}; next_loop(ITHandle, KeyMapper, Filter, Cutoff, It0, Acc0, N0) -> inc_counter(), - #{?tag := ?it, ?last_seen_key := Key0} = It0, + #{?tag := ?IT, ?last_seen_key := Key0} = It0, case emqx_ds_bitmask_keymapper:bin_increment(Filter, Key0) of overflow -> {ok, It0, lists:reverse(Acc0)}; @@ -346,7 +346,7 @@ check_message( overflow; check_message( _Cutoff, - #{?tag := ?it, ?start_time := StartTime, ?topic_filter := TopicFilter}, + #{?tag := ?IT, ?start_time := StartTime, ?topic_filter := TopicFilter}, #message{timestamp = Timestamp, topic = Topic} ) when Timestamp >= StartTime -> emqx_topic:match(emqx_topic:words(Topic), TopicFilter); diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index c91ac49d5..0fe719dbc 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -38,8 +38,8 @@ %% records over the wire. %% tags: --define(stream, stream). --define(it, it). +-define(STREAM, 1). +-define(IT, 2). %% keys: -define(tag, 1). @@ -50,7 +50,7 @@ {emqx_ds_storage_reference, emqx_ds_storage_reference:options()} | {emqx_ds_storage_bitfield_lts, emqx_ds_storage_bitfield_lts:options()}. --type shard_id() :: emqx_ds_replication_layer:shard_id(). +-type shard_id() :: {emqx_ds:db(), emqx_ds_replication_layer:shard_id()}. -type cf_refs() :: [{string(), rocksdb:cf_handle()}]. @@ -59,7 +59,7 @@ %% Note: this might be stored permanently on a remote node. -opaque stream() :: #{ - ?tag := ?stream, + ?tag := ?STREAM, ?generation := gen_id(), ?enc := term() }. @@ -67,7 +67,7 @@ %% Note: this might be stored permanently on a remote node. -opaque iterator() :: #{ - ?tag := ?it, + ?tag := ?IT, ?generation := gen_id(), ?enc := term() }. @@ -165,7 +165,7 @@ get_streams(Shard, TopicFilter, StartTime) -> Streams = Mod:get_streams(Shard, GenData, TopicFilter, StartTime), [ {GenId, #{ - ?tag => ?stream, + ?tag => ?STREAM, ?generation => GenId, ?enc => Stream }} @@ -178,13 +178,13 @@ get_streams(Shard, TopicFilter, StartTime) -> -spec make_iterator(shard_id(), stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). make_iterator( - Shard, #{?tag := ?stream, ?generation := GenId, ?enc := Stream}, TopicFilter, StartTime + Shard, #{?tag := ?STREAM, ?generation := GenId, ?enc := Stream}, TopicFilter, StartTime ) -> #{module := Mod, data := GenData} = generation_get(Shard, GenId), case Mod:make_iterator(Shard, GenData, Stream, TopicFilter, StartTime) of {ok, Iter} -> {ok, #{ - ?tag => ?it, + ?tag => ?IT, ?generation => GenId, ?enc => Iter }}; @@ -194,7 +194,7 @@ make_iterator( -spec next(shard_id(), iterator(), pos_integer()) -> emqx_ds:next_result(iterator()). -next(Shard, Iter = #{?tag := ?it, ?generation := GenId, ?enc := GenIter0}, BatchSize) -> +next(Shard, Iter = #{?tag := ?IT, ?generation := GenId, ?enc := GenIter0}, BatchSize) -> #{module := Mod, data := GenData} = generation_get(Shard, GenId), Current = generation_current(Shard), case Mod:next(Shard, GenData, GenIter0, BatchSize) of @@ -217,7 +217,7 @@ next(Shard, Iter = #{?tag := ?it, ?generation := GenId, ?enc := GenIter0}, Batch -spec start_link(shard_id(), emqx_ds:builtin_db_opts()) -> {ok, pid()}. -start_link(Shard, Options) -> +start_link(Shard = {_, _}, Options) -> gen_server:start_link(?REF(Shard), ?MODULE, {Shard, Options}, []). -record(s, { @@ -417,11 +417,11 @@ generations_since(Shard, Since) -> -define(PERSISTENT_TERM(SHARD), {emqx_ds_storage_layer, SHARD}). -spec get_schema_runtime(shard_id()) -> shard(). -get_schema_runtime(Shard) -> +get_schema_runtime(Shard = {_, _}) -> persistent_term:get(?PERSISTENT_TERM(Shard)). -spec put_schema_runtime(shard_id(), shard()) -> ok. -put_schema_runtime(Shard, RuntimeSchema) -> +put_schema_runtime(Shard = {_, _}, RuntimeSchema) -> persistent_term:put(?PERSISTENT_TERM(Shard), RuntimeSchema), ok. diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl index fac7204bf..c2eee8dcb 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl @@ -25,7 +25,7 @@ start_link() -> supervisor:start_link({local, ?SUP}, ?MODULE, []). --spec start_shard(emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> +-spec start_shard(emqx_ds_storage_layer:shard_id(), emqx_ds:create_db_opts()) -> supervisor:startchild_ret(). start_shard(Shard, Options) -> supervisor:start_child(?SUP, shard_child_spec(Shard, Options)). @@ -35,7 +35,8 @@ stop_shard(Shard) -> ok = supervisor:terminate_child(?SUP, Shard), ok = supervisor:delete_child(?SUP, Shard). --spec ensure_shard(emqx_ds:shard(), emqx_ds_storage_layer:options()) -> ok | {error, _Reason}. +-spec ensure_shard(emqx_ds_storage_layer:shard_id(), emqx_ds_storage_layer:options()) -> + ok | {error, _Reason}. ensure_shard(Shard, Options) -> case start_shard(Shard, Options) of {ok, _Pid} -> @@ -63,7 +64,7 @@ init([]) -> %% Internal functions %%================================================================================ --spec shard_child_spec(emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> +-spec shard_child_spec(emqx_ds_storage_layer:shard_id(), emqx_ds:create_db_opts()) -> supervisor:child_spec(). shard_child_spec(Shard, Options) -> #{ diff --git a/apps/emqx_durable_storage/src/emqx_durable_storage.app.src b/apps/emqx_durable_storage/src/emqx_durable_storage.app.src index f106494c8..2bce4ff8e 100644 --- a/apps/emqx_durable_storage/src/emqx_durable_storage.app.src +++ b/apps/emqx_durable_storage/src/emqx_durable_storage.app.src @@ -2,7 +2,7 @@ {application, emqx_durable_storage, [ {description, "Message persistence and subscription replays for EMQX"}, % strict semver, bump manually! - {vsn, "0.1.6"}, + {vsn, "0.1.7"}, {modules, []}, {registered, []}, {applications, [kernel, stdlib, rocksdb, gproc, mria, emqx_utils]}, diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index 6a79a4a61..10d1ed7a5 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -19,7 +19,7 @@ -include_lib("emqx_utils/include/bpapi.hrl"). %% API: --export([open_shard/3, drop_shard/2, get_streams/4, make_iterator/5, next/4]). +-export([open_shard/4, drop_shard/3, store_batch/5, get_streams/5, make_iterator/6, next/5]). %% behavior callbacks: -export([introduced_in/0]). @@ -28,44 +28,69 @@ %% API funcions %%================================================================================ --spec open_shard(node(), emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> +-spec open_shard( + node(), + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + emqx_ds:create_db_opts() +) -> ok. -open_shard(Node, Shard, Opts) -> - erpc:call(Node, emqx_ds_replication_layer, do_open_shard_v1, [Shard, Opts]). +open_shard(Node, DB, Shard, Opts) -> + erpc:call(Node, emqx_ds_replication_layer, do_open_shard_v1, [DB, Shard, Opts]). --spec drop_shard(node(), emqx_ds_replication_layer:shard_id()) -> +-spec drop_shard(node(), emqx_ds:db(), emqx_ds_replication_layer:shard_id()) -> ok. -drop_shard(Node, Shard) -> - erpc:call(Node, emqx_ds_replication_layer, do_drop_shard_v1, [Shard]). +drop_shard(Node, DB, Shard) -> + erpc:call(Node, emqx_ds_replication_layer, do_drop_shard_v1, [DB, Shard]). -spec get_streams( - node(), emqx_ds_replication_layer:shard_id(), emqx_ds:topic_filter(), emqx_ds:time() + node(), + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + emqx_ds:topic_filter(), + emqx_ds:time() ) -> [{integer(), emqx_ds_storage_layer:stream()}]. -get_streams(Node, Shard, TopicFilter, Time) -> - erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [Shard, TopicFilter, Time]). +get_streams(Node, DB, Shard, TopicFilter, Time) -> + erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [DB, Shard, TopicFilter, Time]). -spec make_iterator( node(), + emqx_ds:db(), emqx_ds_replication_layer:shard_id(), emqx_ds_storage_layer:stream(), emqx_ds:topic_filter(), emqx_ds:time() ) -> {ok, emqx_ds_storage_layer:iterator()} | {error, _}. -make_iterator(Node, Shard, Stream, TopicFilter, StartTime) -> +make_iterator(Node, DB, Shard, Stream, TopicFilter, StartTime) -> erpc:call(Node, emqx_ds_replication_layer, do_make_iterator_v1, [ - Shard, Stream, TopicFilter, StartTime + DB, Shard, Stream, TopicFilter, StartTime ]). -spec next( - node(), emqx_ds_replication_layer:shard_id(), emqx_ds_storage_layer:iterator(), pos_integer() + node(), + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:iterator(), + pos_integer() ) -> {ok, emqx_ds_storage_layer:iterator(), [emqx_types:messages()]} | {ok, end_of_stream} | {error, _}. -next(Node, Shard, Iter, BatchSize) -> - erpc:call(Node, emqx_ds_replication_layer, do_next_v1, [Shard, Iter, BatchSize]). +next(Node, DB, Shard, Iter, BatchSize) -> + erpc:call(Node, emqx_ds_replication_layer, do_next_v1, [DB, Shard, Iter, BatchSize]). + +-spec store_batch( + node(), + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + [emqx_types:message()], + emqx_ds:message_store_opts() +) -> + emqx_ds:store_batch_result(). +store_batch(Node, DB, Shard, Batch, Options) -> + erpc:call(Node, emqx_ds_replication_layer, do_store_batch_v1, [DB, Shard, Batch, Options]). %%================================================================================ %% behavior callbacks diff --git a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl index 9637431d3..9b74e3227 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl @@ -54,7 +54,7 @@ t_02_smoke_get_streams_start_iter(_Config) -> TopicFilter = ['#'], [{Rank, Stream}] = emqx_ds:get_streams(DB, TopicFilter, StartTime), ?assertMatch({_, _}, Rank), - ?assertMatch({ok, _Iter}, emqx_ds:make_iterator(Stream, TopicFilter, StartTime)). + ?assertMatch({ok, _Iter}, emqx_ds:make_iterator(DB, Stream, TopicFilter, StartTime)). %% A simple smoke test that verifies that it's possible to iterate %% over messages. @@ -70,8 +70,8 @@ t_03_smoke_iterate(_Config) -> ], ?assertMatch(ok, emqx_ds:store_batch(DB, Msgs)), [{_, Stream}] = emqx_ds:get_streams(DB, TopicFilter, StartTime), - {ok, Iter0} = emqx_ds:make_iterator(Stream, TopicFilter, StartTime), - {ok, Iter, Batch} = iterate(Iter0, 1), + {ok, Iter0} = emqx_ds:make_iterator(DB, Stream, TopicFilter, StartTime), + {ok, Iter, Batch} = iterate(DB, Iter0, 1), ?assertEqual(Msgs, Batch, {Iter0, Iter}). %% Verify that iterators survive restart of the application. This is @@ -91,14 +91,14 @@ t_04_restart(_Config) -> ], ?assertMatch(ok, emqx_ds:store_batch(DB, Msgs)), [{_, Stream}] = emqx_ds:get_streams(DB, TopicFilter, StartTime), - {ok, Iter0} = emqx_ds:make_iterator(Stream, TopicFilter, StartTime), + {ok, Iter0} = emqx_ds:make_iterator(DB, Stream, TopicFilter, StartTime), %% Restart the application: ?tp(warning, emqx_ds_SUITE_restart_app, #{}), ok = application:stop(emqx_durable_storage), {ok, _} = application:ensure_all_started(emqx_durable_storage), ok = emqx_ds:open_db(DB, opts()), %% The old iterator should be still operational: - {ok, Iter, Batch} = iterate(Iter0, 1), + {ok, Iter, Batch} = iterate(DB, Iter0, 1), ?assertEqual(Msgs, Batch, {Iter0, Iter}). message(Topic, Payload, PublishedAt) -> @@ -109,15 +109,15 @@ message(Topic, Payload, PublishedAt) -> id = emqx_guid:gen() }. -iterate(It, BatchSize) -> - iterate(It, BatchSize, []). +iterate(DB, It, BatchSize) -> + iterate(DB, It, BatchSize, []). -iterate(It0, BatchSize, Acc) -> - case emqx_ds:next(It0, BatchSize) of +iterate(DB, It0, BatchSize, Acc) -> + case emqx_ds:next(DB, It0, BatchSize) of {ok, It, []} -> {ok, It, Acc}; {ok, It, Msgs} -> - iterate(It, BatchSize, Acc ++ Msgs); + iterate(DB, It, BatchSize, Acc ++ Msgs); Ret -> Ret end. diff --git a/apps/emqx_enterprise/src/emqx_enterprise.app.src b/apps/emqx_enterprise/src/emqx_enterprise.app.src index 1a5359db6..06bc500f4 100644 --- a/apps/emqx_enterprise/src/emqx_enterprise.app.src +++ b/apps/emqx_enterprise/src/emqx_enterprise.app.src @@ -1,6 +1,6 @@ {application, emqx_enterprise, [ {description, "EMQX Enterprise Edition"}, - {vsn, "0.1.4"}, + {vsn, "0.1.5"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_enterprise/src/emqx_enterprise_schema.erl b/apps/emqx_enterprise/src/emqx_enterprise_schema.erl index dbd47093f..66af3206b 100644 --- a/apps/emqx_enterprise/src/emqx_enterprise_schema.erl +++ b/apps/emqx_enterprise/src/emqx_enterprise_schema.erl @@ -53,7 +53,7 @@ fields("log_audit_handler") -> {"path", hoconsc:mk( - emqx_conf_schema:file(), + string(), #{ desc => ?DESC(emqx_conf_schema, "audit_file_handler_path"), default => <<"${EMQX_LOG_DIR}/audit.log">>, diff --git a/apps/emqx_ft/src/emqx_ft.app.src b/apps/emqx_ft/src/emqx_ft.app.src index 2dd33479c..cb86c1450 100644 --- a/apps/emqx_ft/src/emqx_ft.app.src +++ b/apps/emqx_ft/src/emqx_ft.app.src @@ -1,6 +1,6 @@ {application, emqx_ft, [ {description, "EMQX file transfer over MQTT"}, - {vsn, "0.1.8"}, + {vsn, "0.1.9"}, {registered, []}, {mod, {emqx_ft_app, []}}, {applications, [ diff --git a/apps/emqx_gateway/src/emqx_gateway.app.src b/apps/emqx_gateway/src/emqx_gateway.app.src index 8dcbe500c..df681b00f 100644 --- a/apps/emqx_gateway/src/emqx_gateway.app.src +++ b/apps/emqx_gateway/src/emqx_gateway.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway, [ {description, "The Gateway management application"}, - {vsn, "0.1.26"}, + {vsn, "0.1.27"}, {registered, []}, {mod, {emqx_gateway_app, []}}, {applications, [kernel, stdlib, emqx, emqx_auth, emqx_ctl]}, diff --git a/apps/emqx_gateway/src/emqx_gateway_schema.erl b/apps/emqx_gateway/src/emqx_gateway_schema.erl index 5d8ac23d9..c84cf086b 100644 --- a/apps/emqx_gateway/src/emqx_gateway_schema.erl +++ b/apps/emqx_gateway/src/emqx_gateway_schema.erl @@ -32,19 +32,16 @@ -type duration() :: non_neg_integer(). -type duration_s() :: non_neg_integer(). -type bytesize() :: pos_integer(). --type comma_separated_list() :: list(). -typerefl_from_string({ip_port/0, emqx_schema, to_ip_port}). -typerefl_from_string({duration/0, emqx_schema, to_duration}). -typerefl_from_string({duration_s/0, emqx_schema, to_duration_s}). -typerefl_from_string({bytesize/0, emqx_schema, to_bytesize}). --typerefl_from_string({comma_separated_list/0, emqx_schema, to_comma_separated_list}). -reflect_type([ duration/0, duration_s/0, bytesize/0, - comma_separated_list/0, ip_port/0 ]). -elvis([{elvis_style, dont_repeat_yourself, disable}]). @@ -331,7 +328,7 @@ ws_opts(DefaultPath, DefaultSubProtocols) when )}, {"supported_subprotocols", sc( - comma_separated_list(), + emqx_schema:comma_separated_list(), #{ default => DefaultSubProtocols, desc => ?DESC(fields_ws_opts_supported_subprotocols) diff --git a/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src b/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src index f9bc57722..10dd6efef 100644 --- a/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src +++ b/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway_coap, [ {description, "CoAP Gateway"}, - {vsn, "0.1.4"}, + {vsn, "0.1.5"}, {registered, []}, {applications, [kernel, stdlib, emqx, emqx_gateway]}, {env, []}, diff --git a/apps/emqx_gateway_lwm2m/src/emqx_gateway_lwm2m.app.src b/apps/emqx_gateway_lwm2m/src/emqx_gateway_lwm2m.app.src index 7e502632c..97a6e04a1 100644 --- a/apps/emqx_gateway_lwm2m/src/emqx_gateway_lwm2m.app.src +++ b/apps/emqx_gateway_lwm2m/src/emqx_gateway_lwm2m.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway_lwm2m, [ {description, "LwM2M Gateway"}, - {vsn, "0.1.3"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [kernel, stdlib, emqx, emqx_gateway, emqx_gateway_coap]}, {env, []}, diff --git a/apps/emqx_gateway_stomp/src/emqx_gateway_stomp.app.src b/apps/emqx_gateway_stomp/src/emqx_gateway_stomp.app.src index 01d18b607..6913b2c5f 100644 --- a/apps/emqx_gateway_stomp/src/emqx_gateway_stomp.app.src +++ b/apps/emqx_gateway_stomp/src/emqx_gateway_stomp.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway_stomp, [ {description, "Stomp Gateway"}, - {vsn, "0.1.3"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [kernel, stdlib, emqx, emqx_gateway]}, {env, []}, diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device.app.src b/apps/emqx_gcp_device/src/emqx_gcp_device.app.src index 01c722e98..7f1d81f14 100644 --- a/apps/emqx_gcp_device/src/emqx_gcp_device.app.src +++ b/apps/emqx_gcp_device/src/emqx_gcp_device.app.src @@ -1,6 +1,6 @@ {application, emqx_gcp_device, [ {description, "Application simplifying migration from GCP IoT Core"}, - {vsn, "0.1.2"}, + {vsn, "0.1.3"}, {registered, []}, {mod, {emqx_gcp_device_app, []}}, {applications, [ diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device_authn_schema.erl b/apps/emqx_gcp_device/src/emqx_gcp_device_authn_schema.erl index a01c6d0e4..975e17ff0 100644 --- a/apps/emqx_gcp_device/src/emqx_gcp_device_authn_schema.erl +++ b/apps/emqx_gcp_device/src/emqx_gcp_device_authn_schema.erl @@ -16,18 +16,21 @@ -module(emqx_gcp_device_authn_schema). --include("emqx_gcp_device.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +-include("emqx_gcp_device.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +namespace() -> "authn". + refs() -> [?R_REF(gcp_device)]. select_union_member(#{<<"mechanism">> := ?AUTHN_MECHANISM_BIN}) -> diff --git a/apps/emqx_ldap/src/emqx_ldap.app.src b/apps/emqx_ldap/src/emqx_ldap.app.src index 1db88b924..774f11bd4 100644 --- a/apps/emqx_ldap/src/emqx_ldap.app.src +++ b/apps/emqx_ldap/src/emqx_ldap.app.src @@ -1,6 +1,6 @@ {application, emqx_ldap, [ {description, "EMQX LDAP Connector"}, - {vsn, "0.1.4"}, + {vsn, "0.1.5"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_ldap/src/emqx_ldap.erl b/apps/emqx_ldap/src/emqx_ldap.erl index 315733b79..1ff6861ed 100644 --- a/apps/emqx_ldap/src/emqx_ldap.erl +++ b/apps/emqx_ldap/src/emqx_ldap.erl @@ -37,7 +37,7 @@ %% ecpool connect & reconnect -export([connect/1]). --export([roots/0, fields/1, desc/1]). +-export([namespace/0, roots/0, fields/1, desc/1]). -export([do_get_status/1]). @@ -55,6 +55,9 @@ %%===================================================================== %% Hocon schema + +namespace() -> "ldap". + roots() -> [{config, #{type => hoconsc:ref(?MODULE, config)}}]. diff --git a/apps/emqx_license/src/emqx_license_schema.erl b/apps/emqx_license/src/emqx_license_schema.erl index 8f2d7f20d..f2b91811e 100644 --- a/apps/emqx_license/src/emqx_license_schema.erl +++ b/apps/emqx_license/src/emqx_license_schema.erl @@ -13,12 +13,14 @@ -behaviour(hocon_schema). --export([roots/0, fields/1, validations/0, desc/1, tags/0]). +-export([namespace/0, roots/0, fields/1, validations/0, desc/1, tags/0]). -export([ default_license/0 ]). +namespace() -> "license". + roots() -> [ {license, diff --git a/apps/emqx_machine/src/emqx_machine.app.src b/apps/emqx_machine/src/emqx_machine.app.src index 8cf85e936..496afcd64 100644 --- a/apps/emqx_machine/src/emqx_machine.app.src +++ b/apps/emqx_machine/src/emqx_machine.app.src @@ -3,7 +3,7 @@ {id, "emqx_machine"}, {description, "The EMQX Machine"}, % strict semver, bump manually! - {vsn, "0.2.15"}, + {vsn, "0.2.16"}, {modules, []}, {registered, []}, {applications, [kernel, stdlib, emqx_ctl]}, diff --git a/apps/emqx_management/src/emqx_management.app.src b/apps/emqx_management/src/emqx_management.app.src index 3c13a1935..efa05ad37 100644 --- a/apps/emqx_management/src/emqx_management.app.src +++ b/apps/emqx_management/src/emqx_management.app.src @@ -2,7 +2,7 @@ {application, emqx_management, [ {description, "EMQX Management API and CLI"}, % strict semver, bump manually! - {vsn, "5.0.32"}, + {vsn, "5.0.33"}, {modules, []}, {registered, [emqx_management_sup]}, {applications, [kernel, stdlib, emqx_plugins, minirest, emqx, emqx_ctl, emqx_bridge_http]}, diff --git a/apps/emqx_management/src/emqx_mgmt_api_listeners.erl b/apps/emqx_management/src/emqx_mgmt_api_listeners.erl index 8295047b9..1718a14cf 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_listeners.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_listeners.erl @@ -313,7 +313,7 @@ create_listener_schema(Opts) -> ], Example = maps:remove(id, tcp_schema_example()), emqx_dashboard_swagger:schema_with_example( - ?UNION(Schemas), + hoconsc:union(Schemas), Example#{name => <<"demo">>} ). diff --git a/apps/emqx_mongodb/src/emqx_mongodb.erl b/apps/emqx_mongodb/src/emqx_mongodb.erl index 6e623ea23..3adf52e6d 100644 --- a/apps/emqx_mongodb/src/emqx_mongodb.erl +++ b/apps/emqx_mongodb/src/emqx_mongodb.erl @@ -22,6 +22,7 @@ -include_lib("snabbkaffe/include/snabbkaffe.hrl"). -behaviour(emqx_resource). +-behaviour(hocon_schema). %% callbacks of behaviour emqx_resource -export([ @@ -29,7 +30,8 @@ on_start/2, on_stop/2, on_query/3, - on_get_status/2 + on_get_status/2, + namespace/0 ]). %% ecpool callback @@ -50,6 +52,9 @@ }). %%===================================================================== + +namespace() -> "mongo". + roots() -> [ {config, #{ diff --git a/apps/emqx_mysql/src/emqx_mysql.app.src b/apps/emqx_mysql/src/emqx_mysql.app.src index da24c5071..135f6878e 100644 --- a/apps/emqx_mysql/src/emqx_mysql.app.src +++ b/apps/emqx_mysql/src/emqx_mysql.app.src @@ -1,6 +1,6 @@ {application, emqx_mysql, [ {description, "EMQX MySQL Database Connector"}, - {vsn, "0.1.3"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_plugins/src/emqx_plugins.app.src b/apps/emqx_plugins/src/emqx_plugins.app.src index d9c2d50df..963d1ec39 100644 --- a/apps/emqx_plugins/src/emqx_plugins.app.src +++ b/apps/emqx_plugins/src/emqx_plugins.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_plugins, [ {description, "EMQX Plugin Management"}, - {vsn, "0.1.6"}, + {vsn, "0.1.7"}, {modules, []}, {mod, {emqx_plugins_app, []}}, {applications, [kernel, stdlib, emqx]}, diff --git a/apps/emqx_plugins/src/emqx_plugins.erl b/apps/emqx_plugins/src/emqx_plugins.erl index 0e11062fc..41538daf6 100644 --- a/apps/emqx_plugins/src/emqx_plugins.erl +++ b/apps/emqx_plugins/src/emqx_plugins.erl @@ -433,9 +433,16 @@ do_ensure_started(NameVsn) -> tryit( "start_plugins", fun() -> - ok = ensure_exists_and_installed(NameVsn), - Plugin = do_read_plugin(NameVsn), - ok = load_code_start_apps(NameVsn, Plugin) + case ensure_exists_and_installed(NameVsn) of + ok -> + Plugin = do_read_plugin(NameVsn), + ok = load_code_start_apps(NameVsn, Plugin); + {error, plugin_not_found} -> + ?SLOG(error, #{ + msg => "plugin_not_found", + name_vsn => NameVsn + }) + end end ). @@ -665,6 +672,7 @@ do_load_plugin_app(AppName, Ebin) -> lists:foreach( fun(BeamFile) -> Module = list_to_atom(filename:basename(BeamFile, ".beam")), + _ = code:purge(Module), case code:load_file(Module) of {module, _} -> ok; diff --git a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl index f34675c0b..3aaf4292f 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl @@ -57,7 +57,7 @@ fields("prometheus") -> )}, {headers, ?HOCON( - list({string(), string()}), + typerefl:alias("map", list({string(), string()}), #{}, [string(), string()]), #{ default => #{}, required => false, diff --git a/apps/emqx_redis/src/emqx_redis.app.src b/apps/emqx_redis/src/emqx_redis.app.src index 36f4b0cab..c9513bcf9 100644 --- a/apps/emqx_redis/src/emqx_redis.app.src +++ b/apps/emqx_redis/src/emqx_redis.app.src @@ -1,6 +1,6 @@ {application, emqx_redis, [ {description, "EMQX Redis Database Connector"}, - {vsn, "0.1.2"}, + {vsn, "0.1.3"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_resource/src/emqx_resource.app.src b/apps/emqx_resource/src/emqx_resource.app.src index 8092fadc8..9edd03078 100644 --- a/apps/emqx_resource/src/emqx_resource.app.src +++ b/apps/emqx_resource/src/emqx_resource.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_resource, [ {description, "Manager for all external resources"}, - {vsn, "0.1.24"}, + {vsn, "0.1.25"}, {registered, []}, {mod, {emqx_resource_app, []}}, {applications, [ diff --git a/apps/emqx_resource/src/emqx_resource.erl b/apps/emqx_resource/src/emqx_resource.erl index 60e94d7e3..f5bf65c0f 100644 --- a/apps/emqx_resource/src/emqx_resource.erl +++ b/apps/emqx_resource/src/emqx_resource.erl @@ -447,7 +447,7 @@ health_check(ResId) -> emqx_resource_manager:health_check(ResId). -spec channel_health_check(resource_id(), channel_id()) -> - #{status := channel_status(), error := term(), any() => any()}. + #{status := resource_status(), error := term()}. channel_health_check(ResId, ChannelId) -> emqx_resource_manager:channel_health_check(ResId, ChannelId). diff --git a/apps/emqx_resource/src/emqx_resource_manager.erl b/apps/emqx_resource/src/emqx_resource_manager.erl index a030080b7..11391fb2b 100644 --- a/apps/emqx_resource/src/emqx_resource_manager.erl +++ b/apps/emqx_resource/src/emqx_resource_manager.erl @@ -309,7 +309,7 @@ health_check(ResId) -> safe_call(ResId, health_check, ?T_OPERATION). -spec channel_health_check(resource_id(), channel_id()) -> - #{status := channel_status(), error := term(), any() => any()}. + #{status := resource_status(), error := term()}. channel_health_check(ResId, ChannelId) -> %% Do normal health check first to trigger health checks for channels %% and update the cached health status for the channels diff --git a/apps/emqx_retainer/src/emqx_retainer_schema.erl b/apps/emqx_retainer/src/emqx_retainer_schema.erl index 7b1a9675e..983b27601 100644 --- a/apps/emqx_retainer/src/emqx_retainer_schema.erl +++ b/apps/emqx_retainer/src/emqx_retainer_schema.erl @@ -77,7 +77,7 @@ fields("retainer") -> )}, {delivery_rate, ?HOCON( - emqx_limiter_schema:rate(), + emqx_limiter_schema:rate_type(), #{ required => false, desc => ?DESC(delivery_rate), diff --git a/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl b/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl index 0424bfb60..e9adbbdf6 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl @@ -24,7 +24,7 @@ -export([check_params/2]). --export([roots/0, fields/1]). +-export([namespace/0, roots/0, fields/1]). -type tag() :: rule_creation | rule_test | rule_engine. @@ -46,6 +46,8 @@ check_params(Params, Tag) -> %%====================================================================================== %% Hocon Schema Definitions +namespace() -> "rule_engine". + roots() -> [ {"rule_engine", sc(ref("rule_engine"), #{desc => ?DESC("root_rule_engine")})}, diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src index cad752886..7feacee77 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src @@ -2,7 +2,7 @@ {application, emqx_rule_engine, [ {description, "EMQX Rule Engine"}, % strict semver, bump manually! - {vsn, "5.0.28"}, + {vsn, "5.0.29"}, {modules, []}, {registered, [emqx_rule_engine_sup, emqx_rule_engine]}, {applications, [ diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl index d0019a1c5..c6d3c7ff8 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl @@ -262,7 +262,7 @@ actions() -> end. qos() -> - ?UNION([emqx_schema:qos(), binary()]). + hoconsc:union([emqx_schema:qos(), binary()]). rule_engine_settings() -> [ diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl index a4e659ce6..c2c52b6a6 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl @@ -311,6 +311,15 @@ t_rule_engine(_) -> {400, _} = emqx_rule_engine_api:'/rule_engine'(put, #{body => #{<<"something">> => <<"weird">>}}). t_downgrade_bridge_type(_) -> + case emqx_release:edition() of + ee -> + do_test_downgrade_bridge_type(); + ce -> + %% downgrade is not supported in CE + ok + end. + +do_test_downgrade_bridge_type() -> #{id := RuleId} = create_rule((?SIMPLE_RULE(<<>>))#{<<"actions">> => [<<"kafka:name">>]}), ?assertMatch( %% returns a bridges_v2 ID diff --git a/apps/emqx_s3/src/emqx_s3.app.src b/apps/emqx_s3/src/emqx_s3.app.src index ba94f66e1..bd17dc6c4 100644 --- a/apps/emqx_s3/src/emqx_s3.app.src +++ b/apps/emqx_s3/src/emqx_s3.app.src @@ -1,6 +1,6 @@ {application, emqx_s3, [ {description, "EMQX S3"}, - {vsn, "5.0.10"}, + {vsn, "5.0.11"}, {modules, []}, {registered, [emqx_s3_sup]}, {applications, [ diff --git a/apps/emqx_s3/src/emqx_s3_schema.erl b/apps/emqx_s3/src/emqx_s3_schema.erl index db37c6e2d..5478f6416 100644 --- a/apps/emqx_s3/src/emqx_s3_schema.erl +++ b/apps/emqx_s3/src/emqx_s3_schema.erl @@ -37,7 +37,7 @@ fields(s3) -> )}, {secret_access_key, mk( - secret_access_key(), + typerefl:alias("string", secret_access_key()), #{ desc => ?DESC("secret_access_key"), required => false, diff --git a/apps/emqx_telemetry/src/emqx_telemetry.app.src b/apps/emqx_telemetry/src/emqx_telemetry.app.src index d9483298f..32c2baa91 100644 --- a/apps/emqx_telemetry/src/emqx_telemetry.app.src +++ b/apps/emqx_telemetry/src/emqx_telemetry.app.src @@ -1,6 +1,6 @@ {application, emqx_telemetry, [ {description, "Report telemetry data for EMQX Opensource edition"}, - {vsn, "0.1.2"}, + {vsn, "0.1.3"}, {registered, [emqx_telemetry_sup, emqx_telemetry]}, {mod, {emqx_telemetry_app, []}}, {applications, [ diff --git a/apps/emqx_telemetry/src/emqx_telemetry_schema.erl b/apps/emqx_telemetry/src/emqx_telemetry_schema.erl index 1e1f547c5..586b70f72 100644 --- a/apps/emqx_telemetry/src/emqx_telemetry_schema.erl +++ b/apps/emqx_telemetry/src/emqx_telemetry_schema.erl @@ -22,11 +22,15 @@ -behaviour(hocon_schema). -export([ + namespace/0, roots/0, fields/1, desc/1 ]). +%% 'emqxtel' to distinguish open-telemetry +namespace() -> "emqxtel". + roots() -> ["telemetry"]. fields("telemetry") -> diff --git a/apps/emqx_utils/src/emqx_utils.app.src b/apps/emqx_utils/src/emqx_utils.app.src index 05e2d0162..a86a8d841 100644 --- a/apps/emqx_utils/src/emqx_utils.app.src +++ b/apps/emqx_utils/src/emqx_utils.app.src @@ -2,7 +2,7 @@ {application, emqx_utils, [ {description, "Miscellaneous utilities for EMQX apps"}, % strict semver, bump manually! - {vsn, "5.0.10"}, + {vsn, "5.0.11"}, {modules, [ emqx_utils, emqx_utils_api, diff --git a/build b/build index 8b485f3b6..c70f91b60 100755 --- a/build +++ b/build @@ -12,6 +12,12 @@ if [ "${DEBUG:-0}" -eq 1 ]; then export DIAGNOSTIC=1 fi +log_red() { + local RED='\033[0;31m' # Red + local NC='\033[0m' # No Color + echo -e "${RED}${1}${NC}" +} + PROFILE_ARG="$1" ARTIFACT="$2" @@ -34,7 +40,7 @@ case "$(is_enterprise "$PROFILE_ARG"),$(is_enterprise "$PROFILE_ENV")" in true ;; *) - echo "PROFILE env var is set to '$PROFILE_ENV', but '$0' arg1 is '$PROFILE_ARG'" + log_red "PROFILE env var is set to '$PROFILE_ENV', but '$0' arg1 is '$PROFILE_ARG'" exit 1 ;; esac @@ -133,6 +139,14 @@ make_docs() { erl -noshell -eval \ "ok = emqx_conf:dump_schema('$docdir', $SCHEMA_MODULE), \ halt(0)." + local desc="$docdir/desc.en.hocon" + if command -v jq &> /dev/null; then + log "Generating $desc" + scripts/merge-i18n.escript | jq --sort-keys . > "$desc" + else + # it is not a big deal if we cannot generate the desc + log_red "NOT Generated: $desc" + fi } ## arg1 is the profile for which the following args (as app names) should be excluded @@ -149,8 +163,8 @@ assert_no_excluded_deps() { for app in "${excluded_apps[@]}"; do found="$($FIND "$rel_dir" -maxdepth 1 -type d -name "$app-*")" if [ -n "${found}" ]; then - echo "ERROR: ${app} should not be included in ${PROFILE}" - echo "ERROR: found ${app} in ${rel_dir}" + log_red "ERROR: ${app} should not be included in ${PROFILE}" + log_red "ERROR: found ${app} in ${rel_dir}" exit 1 fi done @@ -291,7 +305,7 @@ make_tgz() { mkdir -p "${tard}/emqx" mkdir -p "${pkgpath}" if [ ! -f "$src_tarball" ]; then - log "ERROR: $src_tarball is not found" + log_red "ERROR: $src_tarball is not found" fi $TAR zxf "${src_tarball}" -C "${tard}/emqx" if [ -f "${tard}/emqx/releases/${PKG_VSN}/relup" ]; then diff --git a/changes/ce/fix-11897.en.md b/changes/ce/fix-11897.en.md new file mode 100644 index 000000000..383129b4a --- /dev/null +++ b/changes/ce/fix-11897.en.md @@ -0,0 +1 @@ +Fix config sync wait-loop race condition when cluster nodes boot around the same time. diff --git a/changes/e5.3.1.en.md b/changes/e5.3.1.en.md new file mode 100644 index 000000000..ab92f155a --- /dev/null +++ b/changes/e5.3.1.en.md @@ -0,0 +1,58 @@ +# e5.3.1 + +## Enhancements + +- [#11637](https://github.com/emqx/emqx/pull/11637) Added extra diagnostic checks to help debug issues when mnesia is stuck waiting for tables. Library Updates: `ekka` has been upgraded to version 0.15.15, and `mria` to version 0.6.4. + +- [#11581](https://github.com/emqx/emqx/pull/11581) Feature Preview: Planned for EMQX v5.4.0, introducing the concepts of *Connector* and *Action* base on data bridge. The existing data bridge will be gradually migrated to Connector and Action. Connector are designed to manage the integration with external systems, while Actions are solely used to configure the data processing methods. Connector can be reused across multiple Actions, providing greater flexibility and scalability. Currently, the migration has been completed for Kafka producer and Azure Event Hub producer. +- The Dashboard now supports MQTT 5.0 publish attribute settings for the rule engine's message republish action, allowing users more flexibility in publishing messages. + +## Bug Fixes + +- [#11565](https://github.com/emqx/emqx/pull/11565) Upgraded jq library from v0.3.10 to v0.3.11. In this version, jq_port programs are initiated on-demand and will not appear in users' processes unless the jq function in EMQX is used. Additionally, idle jq_port programs will auto-terminate after a set period. Note: Most EMQX users are running jq in NIF mode and will not be affected by this update. + +- [#11676](https://github.com/emqx/emqx/pull/11676) Hid a few pieces of sensitive information from debug-level logs. + +- [#11697](https://github.com/emqx/emqx/pull/11697) Disabled outdated TLS versions and cipher suites in the EMQX backplane network (`gen_rpc`). Added support for tlsv1.3 on the backplane and introduced new configuration parameters: `EMQX_RPC__TLS_VERSIONS` and `EMQX_RPC__CIPHERS`. + +- [#11734](https://github.com/emqx/emqx/pull/11734) Fixed clustering in IPv6 network. Added new configurations `rpc.listen_address` and `rpc.ipv6_only` to allow EMQX cluster RPC server and client to use IPv6. + +- [#11747](https://github.com/emqx/emqx/pull/11747) Updated QUIC stack to msquic 2.2.3. + +- [#11796](https://github.com/emqx/emqx/pull/11796) Fixed rpc schema to ensure that client/server uses same transport driver. + +- [#11798](https://github.com/emqx/emqx/pull/11798) Fixed the issue where the node could not start after executing `./bin/emqx data import [FILE]`. + + The connection between `apikey_key` and `apikey_name` is also enhanced for better consistency and unique identification. + - `apikey_key`: When generating an API key via the dashboard, `apikey_key` will now create a unique value derived from the provided human-readable `apikey_name`. + - `apikey_name` Conversely, when using a bootstrap file to generate an API key, `apikey_name` will be generated as a unique value based on the associated `apikey_key`. + +- [#11813](https://github.com/emqx/emqx/pull/11813) Fixed the schema to ensure that RPC client SSL port aligns with the configured server port. This fix also guarantees that the RPC ports are correctly opened in the Helm chart. + +- [#11819](https://github.com/emqx/emqx/pull/11819) Upgraded opentelemetry library to v1.3.1-emqx. This opentelemetry release fixes invalid metrics timestamps in the exported metrics. + +- [#11861](https://github.com/emqx/emqx/pull/11861) Fix excessive warning message print in remote console shell. + +- [#11722](https://github.com/emqx/emqx/pull/11722) Fixed an issue where a Kafka Producer bridge with `sync` query mode would not buffer messages when in the `connecting` state. + +- [#11724](https://github.com/emqx/emqx/pull/11724) Fixed a metrics-related issue where messages sent to Kafka would be counted as failed even when they were successfully transmitted afterward due to internal buffering. + +- [#11728](https://github.com/emqx/emqx/pull/11728) Enhanced the LDAP filter string parser with the following improvements: + - Automatic escaping of special characters within filter strings. + - Fixed a bug that previously prevented the use of `dn` as a filter value. + +- [#11733](https://github.com/emqx/emqx/pull/11733) Resolved an incompatibility issue that caused crashes during session takeover or channel eviction when the session was located on a remote node running EMQX v5.2.x or an earlier version. + +- [#11750](https://github.com/emqx/emqx/pull/11750) Eliminated logging and tracing of HTTP request bodies in HTTP authentification and HTTP bridges. + +- [#11760](https://github.com/emqx/emqx/pull/11760) Simplified the CQL query used for the Cassandra bridge health check, which was previously generating warnings in the Cassandra server logs. + +- [#11886](https://github.com/emqx/emqx/pull/11886) Fixed backward plugin compatibility. + + Currently, EMQX validates hookpoint names, so invalid hookspoints cannot be used for registering hooks. However, older versions of plugin templates used some misspelled hookpoints, and so could the real plugins. We allow the old hookpoints to be used for registering hooks, but issue a warning that they are deprecated. As before, these hooks are never called. + +- [#11897](https://github.com/emqx/emqx/pull/11897) Fix config sync wait-loop race condition when cluster nodes boot around the same time. + +## Breaking Changes + + diff --git a/changes/v5.3.1.en.md b/changes/v5.3.1.en.md new file mode 100644 index 000000000..fe1c150ab --- /dev/null +++ b/changes/v5.3.1.en.md @@ -0,0 +1,47 @@ +# v5.3.1 + +## Enhancements + +- [#11637](https://github.com/emqx/emqx/pull/11637) Added extra diagnostic checks to help debug issues when mnesia is stuck waiting for tables. Library Updates: `ekka` has been upgraded to version 0.15.15, and `mria` to version 0.6.4. + +## Bug Fixes + +- [#11565](https://github.com/emqx/emqx/pull/11565) Upgraded jq library from v0.3.10 to v0.3.11. In this version, jq_port programs are initiated on-demand and will not appear in users' processes unless the jq function in EMQX is used. Additionally, idle jq_port programs will auto-terminate after a set period. Note: Most EMQX users are running jq in NIF mode and will not be affected by this update. + +- [#11676](https://github.com/emqx/emqx/pull/11676) Hid a few pieces of sensitive information from debug-level logs. + +- [#11697](https://github.com/emqx/emqx/pull/11697) Disabled outdated TLS versions and cipher suites in the EMQX backplane network (`gen_rpc`). Added support for tlsv1.3 on the backplane and introduced new configuration parameters: `EMQX_RPC__TLS_VERSIONS` and `EMQX_RPC__CIPHERS`. + + The corresponding `gen_rpc` PR: https://github.com/emqx/gen_rpc/pull/36 + +- [#11734](https://github.com/emqx/emqx/pull/11734) Fixed clustering in IPv6 network. Added new configurations `rpc.listen_address` and `rpc.ipv6_only` to allow EMQX cluster RPC server and client to use IPv6. + +- [#11747](https://github.com/emqx/emqx/pull/11747) Updated QUIC stack to msquic 2.2.3. + +- [#11796](https://github.com/emqx/emqx/pull/11796) Fixed rpc schema to ensure that client/server uses same transport driver. + +- [#11798](https://github.com/emqx/emqx/pull/11798) Fixed the issue where the node could not start after executing `./bin/emqx data import [FILE]`. + + The connection between `apikey_key` and `apikey_name` is also enhanced for better consistency and unique identification. + - `apikey_key`: When generating an API key via the dashboard, `apikey_key` will now create a unique value derived from the provided human-readable `apikey_name`. + - `apikey_name` Conversely, when using a bootstrap file to generate an API key, `apikey_name` will be generated as a unique value based on the associated `apikey_key`. + +- [#11813](https://github.com/emqx/emqx/pull/11813) Fixed the schema to ensure that RPC client SSL port aligns with the configured server port. This fix also guarantees that the RPC ports are correctly opened in the Helm chart. + +- [#11819](https://github.com/emqx/emqx/pull/11819) Upgraded opentelemetry library to v1.3.1-emqx. This opentelemetry release fixes invalid metrics timestamps in the exported metrics. + +- [#11861](https://github.com/emqx/emqx/pull/11861) Fixed excessive warning message printed in remote console shell. + +- [#11733](https://github.com/emqx/emqx/pull/11733) Resolved an incompatibility issue that caused crashes during session takeover or channel eviction when the session was located on a remote node running EMQX v5.2.x or an earlier version. + +- [#11750](https://github.com/emqx/emqx/pull/11750) Eliminated logging and tracing of HTTP request bodies in HTTP authentification and HTTP bridges. + +- [#11886](https://github.com/emqx/emqx/pull/11886) Fixed backward plugin compatibility. + + Currently, EMQX validates hookpoint names, so invalid hookspoints cannot be used for registering hooks. However, older versions of plugin templates used some misspelled hookpoints, and so could the real plugins. We allow the old hookpoints to be used for registering hooks, but issue a warning that they are deprecated. As before, these hooks are never called. + +- [#11897](https://github.com/emqx/emqx/pull/11897) Fix config sync wait-loop race condition when cluster nodes boot around the same time. + +## Breaking Changes + + diff --git a/deploy/charts/emqx-enterprise/Chart.yaml b/deploy/charts/emqx-enterprise/Chart.yaml index 4211f37e4..d9ad72611 100644 --- a/deploy/charts/emqx-enterprise/Chart.yaml +++ b/deploy/charts/emqx-enterprise/Chart.yaml @@ -14,8 +14,8 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 5.3.1-alpha.4 +version: 5.3.1 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 5.3.1-alpha.4 +appVersion: 5.3.1 diff --git a/deploy/charts/emqx/Chart.yaml b/deploy/charts/emqx/Chart.yaml index 54d36eee3..76bcd3aaa 100644 --- a/deploy/charts/emqx/Chart.yaml +++ b/deploy/charts/emqx/Chart.yaml @@ -14,8 +14,8 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 5.3.1-alpha.1 +version: 5.3.1 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 5.3.1-alpha.1 +appVersion: 5.3.1 diff --git a/mix.exs b/mix.exs index 8a0f93dd6..d931b799d 100644 --- a/mix.exs +++ b/mix.exs @@ -72,7 +72,7 @@ defmodule EMQXUmbrella.MixProject do # in conflict by emqtt and hocon {:getopt, "1.0.2", override: true}, {:snabbkaffe, github: "kafka4beam/snabbkaffe", tag: "1.0.8", override: true}, - {:hocon, github: "emqx/hocon", tag: "0.39.19", override: true}, + {:hocon, github: "emqx/hocon", tag: "0.40.0", override: true}, {:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.5.3", override: true}, {:esasl, github: "emqx/esasl", tag: "0.2.0"}, {:jose, github: "potatosalad/erlang-jose", tag: "1.11.2"}, @@ -102,31 +102,31 @@ defmodule EMQXUmbrella.MixProject do {:opentelemetry_api, github: "emqx/opentelemetry-erlang", sparse: "apps/opentelemetry_api", - tag: "v1.3.1-emqx", + tag: "v1.3.2-emqx", override: true, runtime: false}, {:opentelemetry, github: "emqx/opentelemetry-erlang", sparse: "apps/opentelemetry", - tag: "v1.3.1-emqx", + tag: "v1.3.2-emqx", override: true, runtime: false}, {:opentelemetry_api_experimental, github: "emqx/opentelemetry-erlang", sparse: "apps/opentelemetry_api_experimental", - tag: "v1.3.1-emqx", + tag: "v1.3.2-emqx", override: true, runtime: false}, {:opentelemetry_experimental, github: "emqx/opentelemetry-erlang", sparse: "apps/opentelemetry_experimental", - tag: "v1.3.1-emqx", + tag: "v1.3.2-emqx", override: true, runtime: false}, {:opentelemetry_exporter, github: "emqx/opentelemetry-erlang", sparse: "apps/opentelemetry_exporter", - tag: "v1.3.1-emqx", + tag: "v1.3.2-emqx", override: true, runtime: false} ] ++ diff --git a/rebar.config b/rebar.config index db64d3081..f4273f6fb 100644 --- a/rebar.config +++ b/rebar.config @@ -75,7 +75,7 @@ , {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.3"}}} , {getopt, "1.0.2"} , {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.8"}}} - , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.19"}}} + , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.40.0"}}} , {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}} , {esasl, {git, "https://github.com/emqx/esasl", {tag, "0.2.0"}}} , {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}} @@ -85,13 +85,13 @@ , {jsone, {git, "https://github.com/emqx/jsone.git", {tag, "1.7.1"}}} , {uuid, {git, "https://github.com/okeuday/uuid.git", {tag, "v2.0.6"}}} %% trace - , {opentelemetry_api, {git_subdir, "https://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.1-emqx"}, "apps/opentelemetry_api"}} - , {opentelemetry, {git_subdir, "https://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.1-emqx"}, "apps/opentelemetry"}} + , {opentelemetry_api, {git_subdir, "https://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.2-emqx"}, "apps/opentelemetry_api"}} + , {opentelemetry, {git_subdir, "https://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.2-emqx"}, "apps/opentelemetry"}} %% log metrics - , {opentelemetry_experimental, {git_subdir, "https://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.1-emqx"}, "apps/opentelemetry_experimental"}} - , {opentelemetry_api_experimental, {git_subdir, "https://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.1-emqx"}, "apps/opentelemetry_api_experimental"}} + , {opentelemetry_experimental, {git_subdir, "https://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.2-emqx"}, "apps/opentelemetry_experimental"}} + , {opentelemetry_api_experimental, {git_subdir, "https://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.2-emqx"}, "apps/opentelemetry_api_experimental"}} %% export - , {opentelemetry_exporter, {git_subdir, "https://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.1-emqx"}, "apps/opentelemetry_exporter"}} + , {opentelemetry_exporter, {git_subdir, "https://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.2-emqx"}, "apps/opentelemetry_exporter"}} ]}. {xref_ignores, diff --git a/rel/i18n/emqx_bridge_api.hocon b/rel/i18n/emqx_bridge_api.hocon index 8b7950cdc..3567f03cc 100644 --- a/rel/i18n/emqx_bridge_api.hocon +++ b/rel/i18n/emqx_bridge_api.hocon @@ -49,7 +49,7 @@ desc_api8.label: """Node Bridge Operate""" desc_api9.desc: -"""Test creating a new bridge by given ID
+"""Test creating a new bridge by given ID
The ID must be of format '{type}:{name}'""" desc_api9.label: diff --git a/rel/i18n/emqx_bridge_cassandra.hocon b/rel/i18n/emqx_bridge_cassandra.hocon index d598d3921..a96315340 100644 --- a/rel/i18n/emqx_bridge_cassandra.hocon +++ b/rel/i18n/emqx_bridge_cassandra.hocon @@ -32,7 +32,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to Cassandra. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_clickhouse.hocon b/rel/i18n/emqx_bridge_clickhouse.hocon index 726d1eb7c..7d1961f98 100644 --- a/rel/i18n/emqx_bridge_clickhouse.hocon +++ b/rel/i18n/emqx_bridge_clickhouse.hocon @@ -32,7 +32,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to Clickhouse. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_dynamo.hocon b/rel/i18n/emqx_bridge_dynamo.hocon index 417b43c0c..a014aae9f 100644 --- a/rel/i18n/emqx_bridge_dynamo.hocon +++ b/rel/i18n/emqx_bridge_dynamo.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to DynamoDB. All MQTT `PUBLISH` messages with the topic -matching the `local_topic` will be forwarded.
+matching the `local_topic` will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also `local_topic` is configured, then both the data got from the rule and the MQTT messages that match `local_topic` will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_gcp_pubsub.hocon b/rel/i18n/emqx_bridge_gcp_pubsub.hocon index b5dffec1f..68a6f8578 100644 --- a/rel/i18n/emqx_bridge_gcp_pubsub.hocon +++ b/rel/i18n/emqx_bridge_gcp_pubsub.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to GCP PubSub. All MQTT 'PUBLISH' messages with the topic -matching `local_topic` will be forwarded.
+matching `local_topic` will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_greptimedb.hocon b/rel/i18n/emqx_bridge_greptimedb.hocon index 93d783332..977e6e064 100644 --- a/rel/i18n/emqx_bridge_greptimedb.hocon +++ b/rel/i18n/emqx_bridge_greptimedb.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to the GreptimeDB. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" @@ -37,8 +37,8 @@ local_topic.label: write_syntax.desc: """Conf of GreptimeDB gRPC protocol to write data points. Write syntax is a text-based format that provides the measurement, tag set, field set, and timestamp of a data point, and placeholder supported, which is the same as InfluxDB line protocol. See also [InfluxDB 2.3 Line Protocol](https://docs.influxdata.com/influxdb/v2.3/reference/syntax/line-protocol/) and -[GreptimeDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/)
-TLDR:
+[GreptimeDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/)
+TLDR:
``` [,=[,=]] =[,=] [] ``` diff --git a/rel/i18n/emqx_bridge_greptimedb_connector.hocon b/rel/i18n/emqx_bridge_greptimedb_connector.hocon index 9cb10951f..0a509ebfc 100644 --- a/rel/i18n/emqx_bridge_greptimedb_connector.hocon +++ b/rel/i18n/emqx_bridge_greptimedb_connector.hocon @@ -31,8 +31,8 @@ protocol.label: """Protocol""" server.desc: -"""The IPv4 or IPv6 address or the hostname to connect to.
-A host entry has the following form: `Host[:Port]`.
+"""The IPv4 or IPv6 address or the hostname to connect to.
+A host entry has the following form: `Host[:Port]`.
The GreptimeDB default port 8086 is used if `[:Port]` is not specified.""" server.label: diff --git a/rel/i18n/emqx_bridge_hstreamdb.hocon b/rel/i18n/emqx_bridge_hstreamdb.hocon index 809c60588..de9989953 100644 --- a/rel/i18n/emqx_bridge_hstreamdb.hocon +++ b/rel/i18n/emqx_bridge_hstreamdb.hocon @@ -32,7 +32,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to the HStreamDB. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_influxdb.hocon b/rel/i18n/emqx_bridge_influxdb.hocon index 4299f41ab..48454bbd3 100644 --- a/rel/i18n/emqx_bridge_influxdb.hocon +++ b/rel/i18n/emqx_bridge_influxdb.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to the InfluxDB. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" @@ -37,8 +37,8 @@ local_topic.label: write_syntax.desc: """Conf of InfluxDB line protocol to write data points. It is a text-based format that provides the measurement, tag set, field set, and timestamp of a data point, and placeholder supported. See also [InfluxDB 2.3 Line Protocol](https://docs.influxdata.com/influxdb/v2.3/reference/syntax/line-protocol/) and -[InfluxDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/)
-TLDR:
+[InfluxDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/)
+TLDR:
``` [,=[,=]] =[,=] [] ``` diff --git a/rel/i18n/emqx_bridge_influxdb_connector.hocon b/rel/i18n/emqx_bridge_influxdb_connector.hocon index 4169ce065..ce79c2a93 100644 --- a/rel/i18n/emqx_bridge_influxdb_connector.hocon +++ b/rel/i18n/emqx_bridge_influxdb_connector.hocon @@ -49,8 +49,8 @@ protocol.label: """Protocol""" server.desc: -"""The IPv4 or IPv6 address or the hostname to connect to.
-A host entry has the following form: `Host[:Port]`.
+"""The IPv4 or IPv6 address or the hostname to connect to.
+A host entry has the following form: `Host[:Port]`.
The InfluxDB default port 8086 is used if `[:Port]` is not specified.""" server.label: diff --git a/rel/i18n/emqx_bridge_kinesis.hocon b/rel/i18n/emqx_bridge_kinesis.hocon index 42329bcd6..188ab82f3 100644 --- a/rel/i18n/emqx_bridge_kinesis.hocon +++ b/rel/i18n/emqx_bridge_kinesis.hocon @@ -32,7 +32,7 @@ pool_size.label: local_topic.desc: """The MQTT topic filter to be forwarded to Amazon Kinesis. All MQTT `PUBLISH` messages with the topic -matching the `local_topic` will be forwarded.
+matching the `local_topic` will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also `local_topic` is configured, then both the data got from the rule and the MQTT messages that match `local_topic` will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_mysql.hocon b/rel/i18n/emqx_bridge_mysql.hocon index 10a02589c..37326be81 100644 --- a/rel/i18n/emqx_bridge_mysql.hocon +++ b/rel/i18n/emqx_bridge_mysql.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to MySQL. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_oracle.hocon b/rel/i18n/emqx_bridge_oracle.hocon index c0c8142e6..bcf41ea2c 100644 --- a/rel/i18n/emqx_bridge_oracle.hocon +++ b/rel/i18n/emqx_bridge_oracle.hocon @@ -2,7 +2,7 @@ emqx_bridge_oracle { local_topic { desc = "The MQTT topic filter to be forwarded to Oracle Database. All MQTT 'PUBLISH' messages with the topic" - " matching the local_topic will be forwarded.
" + " matching the local_topic will be forwarded.
" "NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is" " configured, then both the data got from the rule and the MQTT messages that match local_topic" " will be forwarded." diff --git a/rel/i18n/emqx_bridge_pgsql.hocon b/rel/i18n/emqx_bridge_pgsql.hocon index 5295abb35..0a5ca2b04 100644 --- a/rel/i18n/emqx_bridge_pgsql.hocon +++ b/rel/i18n/emqx_bridge_pgsql.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to PostgreSQL. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_redis.hocon b/rel/i18n/emqx_bridge_redis.hocon index 8e8c18de0..05c8d95a6 100644 --- a/rel/i18n/emqx_bridge_redis.hocon +++ b/rel/i18n/emqx_bridge_redis.hocon @@ -34,7 +34,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to Redis. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_rocketmq.hocon b/rel/i18n/emqx_bridge_rocketmq.hocon index ac5deb757..a2449c1a9 100644 --- a/rel/i18n/emqx_bridge_rocketmq.hocon +++ b/rel/i18n/emqx_bridge_rocketmq.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to RocketMQ. All MQTT `PUBLISH` messages with the topic -matching the `local_topic` will be forwarded.
+matching the `local_topic` will be forwarded.
NOTE: if the bridge is used as a rule action, `local_topic` should be left empty otherwise the messages will be duplicated.""" local_topic.label: diff --git a/rel/i18n/emqx_bridge_sqlserver.hocon b/rel/i18n/emqx_bridge_sqlserver.hocon index 0e0801f42..24e4615f3 100644 --- a/rel/i18n/emqx_bridge_sqlserver.hocon +++ b/rel/i18n/emqx_bridge_sqlserver.hocon @@ -32,7 +32,7 @@ driver.label: local_topic.desc: """The MQTT topic filter to be forwarded to Microsoft SQL Server. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_tdengine.hocon b/rel/i18n/emqx_bridge_tdengine.hocon index 2d1059d28..ec6c10779 100644 --- a/rel/i18n/emqx_bridge_tdengine.hocon +++ b/rel/i18n/emqx_bridge_tdengine.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to TDengine. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_v2_api.hocon b/rel/i18n/emqx_bridge_v2_api.hocon index 1f2c2bd8d..23a75712a 100644 --- a/rel/i18n/emqx_bridge_v2_api.hocon +++ b/rel/i18n/emqx_bridge_v2_api.hocon @@ -54,6 +54,12 @@ desc_api9.desc: desc_api9.label: """Test Bridge Creation""" +desc_api10.desc: +"""Lists the available action types.""" + +desc_api10.label: +"""List action types""" + desc_bridge_metrics.desc: """Get bridge metrics by id.""" diff --git a/rel/i18n/emqx_schema.hocon b/rel/i18n/emqx_schema.hocon index e1d086197..3eb816f3b 100644 --- a/rel/i18n/emqx_schema.hocon +++ b/rel/i18n/emqx_schema.hocon @@ -92,7 +92,7 @@ mqtt_max_topic_alias.label: """Max Topic Alias""" common_ssl_opts_schema_user_lookup_fun.desc: -"""EMQX-internal callback that is used to lookup pre-shared key (PSK) identity.
+"""EMQX-internal callback that is used to lookup pre-shared key (PSK) identity.
Has no effect when TLS version is configured (or negotiated) to 1.3""" common_ssl_opts_schema_user_lookup_fun.label: @@ -1207,7 +1207,7 @@ The SSL application already takes measures to counter-act such attempts, but client-initiated renegotiation can be strictly disabled by setting this option to false. The default value is true. Note that disabling renegotiation can result in long-lived connections becoming unusable due to limits on -the number of messages the underlying cipher suite can encipher.
+the number of messages the underlying cipher suite can encipher.
Has no effect when TLS version is configured (or negotiated) to 1.3""" server_ssl_opts_schema_client_renegotiation.label: @@ -1294,7 +1294,7 @@ common_ssl_opts_schema_secure_renegotiate.desc: """SSL parameter renegotiation is a feature that allows a client and a server to renegotiate the parameters of the SSL connection on the fly. RFC 5746 defines a more secure way of doing this. By enabling secure renegotiation, -you drop support for the insecure renegotiation, prone to MitM attacks.
+you drop support for the insecure renegotiation, prone to MitM attacks.
Has no effect when TLS version is configured (or negotiated) to 1.3""" common_ssl_opts_schema_secure_renegotiate.label: @@ -1330,7 +1330,7 @@ mqtt_max_packet_size.label: """Max Packet Size""" common_ssl_opts_schema_reuse_sessions.desc: -"""Enable TLS session reuse.
+"""Enable TLS session reuse.
Has no effect when TLS version is configured (or negotiated) to 1.3""" common_ssl_opts_schema_reuse_sessions.label: diff --git a/scripts/apps-version-check.sh b/scripts/apps-version-check.sh index b32b39fd1..b76e8d345 100755 --- a/scripts/apps-version-check.sh +++ b/scripts/apps-version-check.sh @@ -4,6 +4,12 @@ set -euo pipefail # ensure dir cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/.." +log_red() { + local RED='\033[0;31m' # Red + local NC='\033[0m' # No Color + echo -e "${RED}${1}${NC}" +} + # match any official release tag 'e*' and 'v*' latest_release="$(env PREV_TAG_MATCH_PATTERN='*' ./scripts/find-prev-rel-tag.sh)" echo "Version check compare base: $latest_release" @@ -47,7 +53,7 @@ for app in ${APPS}; do -- "$app_path/priv" \ -- "$app_path/c_src" | wc -l ) " if [ "$changed_lines" -gt 0 ]; then - echo "ERROR: $src_file needs a vsn bump" + log_red "ERROR: $src_file needs a vsn bump" bad_app_count=$(( bad_app_count + 1)) fi else diff --git a/scripts/merge-i18n.escript b/scripts/merge-i18n.escript new file mode 100755 index 000000000..dfd76f01f --- /dev/null +++ b/scripts/merge-i18n.escript @@ -0,0 +1,41 @@ +#!/usr/bin/env escript + +%% This script is only used at build time to generate the merged desc.en.hocon in JSON format +%% but NOT the file generated to _build/$PROFILE/lib/emqx_dashboard/priv (which is HOCON format). +%% +%% The generated JSON file is used as the source of truth when translating to other languages. + +-mode(compile). + +-define(RED, "\e[31m"). +-define(RESET, "\e[39m"). + +main(_) -> + try + _ = hocon:module_info() + catch + _:_ -> + fail("hocon module not found, please make sure the project is compiled") + end, + %% wildcard all .hocon files in rel/i18n + Files = filelib:wildcard("rel/i18n/*.hocon"), + case Files of + [_ | _] -> + ok; + [] -> + fail("No .hocon files found in rel/i18n") + end, + case hocon:files(Files) of + {ok, Map} -> + JSON = jiffy:encode(Map), + io:format("~s~n", [JSON]); + {error, Reason} -> + fail("~p~n", [Reason]) + end. + +fail(Str) -> + fail(Str, []). + +fail(Str, Args) -> + io:format(standard_error, ?RED ++ "ERROR: " ++ Str ++ ?RESET ++ "~n", Args), + halt(1). diff --git a/scripts/pre-compile.sh b/scripts/pre-compile.sh index 632aabfe4..dfad7c869 100755 --- a/scripts/pre-compile.sh +++ b/scripts/pre-compile.sh @@ -25,10 +25,12 @@ cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/.." # generate merged config files and English translation of the desc (desc.en.hocon) ./scripts/merge-config.escript +I18N_REPO_BRANCH="v$(./pkg-vsn.sh "${PROFILE_STR}" | tr -d '.' | cut -c 1-2)" + # download desc (i18n) translations curl -L --silent --show-error \ --output "apps/emqx_dashboard/priv/desc.zh.hocon" \ - 'https://raw.githubusercontent.com/emqx/emqx-i18n/main/desc.zh.hocon' + "https://raw.githubusercontent.com/emqx/emqx-i18n/${I18N_REPO_BRANCH}/desc.zh.hocon" # TODO # make sbom a build artifcat