Merge remote-tracking branch 'origin/release-53' into 0928-sync-release-53

This commit is contained in:
Zaiming (Stone) Shi 2023-09-28 13:53:03 +02:00
commit 1a13b2ac56
72 changed files with 1340 additions and 782 deletions

View File

@ -55,7 +55,7 @@ jobs:
cd apps/emqx cd apps/emqx
./rebar3 xref ./rebar3 xref
./rebar3 dialyzer ./rebar3 dialyzer
./rebar3 eunit -v ./rebar3 eunit -v --name 'eunit@127.0.0.1'
./rebar3 as standalone_test ct --name 'test@127.0.0.1' -v --readable=true ./rebar3 as standalone_test ct --name 'test@127.0.0.1' -v --readable=true
./rebar3 proper -d test/props ./rebar3 proper -d test/props
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v3

View File

@ -15,8 +15,8 @@ endif
# Dashboard version # Dashboard version
# from https://github.com/emqx/emqx-dashboard5 # from https://github.com/emqx/emqx-dashboard5
export EMQX_DASHBOARD_VERSION ?= v1.4.1 export EMQX_DASHBOARD_VERSION ?= v1.5.0
export EMQX_EE_DASHBOARD_VERSION ?= e1.2.1 export EMQX_EE_DASHBOARD_VERSION ?= e1.3.0
# `:=` should be used here, otherwise the `$(shell ...)` will be executed every time when the variable is used # `:=` should be used here, otherwise the `$(shell ...)` will be executed every time when the variable is used
# In make 4.4+, for backward-compatibility the value from the original environment is used. # In make 4.4+, for backward-compatibility the value from the original environment is used.
@ -75,7 +75,7 @@ mix-deps-get: $(ELIXIR_COMMON_DEPS)
.PHONY: eunit .PHONY: eunit
eunit: $(REBAR) merge-config eunit: $(REBAR) merge-config
@ENABLE_COVER_COMPILE=1 $(REBAR) eunit -v -c --cover_export_name $(CT_COVER_EXPORT_PREFIX)-eunit @ENABLE_COVER_COMPILE=1 $(REBAR) eunit --name eunit@127.0.0.1 -v -c --cover_export_name $(CT_COVER_EXPORT_PREFIX)-eunit
.PHONY: proper .PHONY: proper
proper: $(REBAR) proper: $(REBAR)

View File

@ -1,6 +1,6 @@
%% This additional config file is used when the config 'cluster.proto_dist' in emqx.conf is set to 'inet_tls'. %% This additional config file is used when the config 'cluster.proto_dist' in emqx.conf is set to 'inet_tls'.
%% Which means the EMQX nodes will connect to each other over TLS. %% Which means the EMQX nodes will connect to each other over TLS.
%% For more information about inter-broker security, see: https://docs.emqx.com/en/enterprise/v5.0/deploy/cluster/security.html %% For more information about inter-broker security, see: https://docs.emqx.com/en/enterprise/v5.3/deploy/cluster/security.html
%% For more information in technical details see: http://erlang.org/doc/apps/ssl/ssl_distribution.html %% For more information in technical details see: http://erlang.org/doc/apps/ssl/ssl_distribution.html

View File

@ -35,7 +35,7 @@
-define(EMQX_RELEASE_CE, "5.2.1"). -define(EMQX_RELEASE_CE, "5.2.1").
%% Enterprise edition %% Enterprise edition
-define(EMQX_RELEASE_EE, "5.3.0-alpha.1"). -define(EMQX_RELEASE_EE, "5.3.0-alpha.2").
%% The HTTP API version %% The HTTP API version
-define(EMQX_API_VERSION, "5.0"). -define(EMQX_API_VERSION, "5.0").

View File

@ -61,7 +61,7 @@
) )
end). end).
-define(AUDIT(_Level_, _Msg_, _Meta_), begin -define(AUDIT(_Level_, _From_, _Meta_), begin
case emqx_config:get([log, audit], #{enable => false}) of case emqx_config:get([log, audit], #{enable => false}) of
#{enable := false} -> #{enable := false} ->
ok; ok;
@ -71,8 +71,8 @@ end).
emqx_trace:log( emqx_trace:log(
_Level_, _Level_,
[{emqx_audit, fun(L, _) -> L end, undefined, undefined}], [{emqx_audit, fun(L, _) -> L end, undefined, undefined}],
{report, _Msg_}, _Msg = undefined,
_Meta_ _Meta_#{from => _From_}
); );
gt -> gt ->
ok ok

View File

@ -24,7 +24,7 @@ IsQuicSupp = fun() ->
end, end,
Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {tag, "0.6.0"}}}, Bcrypt = {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {tag, "0.6.0"}}},
Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.200"}}}. Quicer = {quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.201"}}}.
Dialyzer = fun(Config) -> Dialyzer = fun(Config) ->
{dialyzer, OldDialyzerConfig} = lists:keyfind(dialyzer, 1, Config), {dialyzer, OldDialyzerConfig} = lists:keyfind(dialyzer, 1, Config),

View File

@ -151,13 +151,22 @@ tr_file_handlers(Conf) ->
lists:map(fun tr_file_handler/1, Handlers). lists:map(fun tr_file_handler/1, Handlers).
tr_file_handler({HandlerName, SubConf}) -> tr_file_handler({HandlerName, SubConf}) ->
FilePath = conf_get("path", SubConf),
RotationCount = conf_get("rotation_count", SubConf),
RotationSize = conf_get("rotation_size", SubConf),
Type =
case RotationSize of
infinity -> halt;
_ -> wrap
end,
HandlerConf = log_handler_conf(SubConf),
{handler, atom(HandlerName), logger_disk_log_h, #{ {handler, atom(HandlerName), logger_disk_log_h, #{
level => conf_get("level", SubConf), level => conf_get("level", SubConf),
config => (log_handler_conf(SubConf))#{ config => HandlerConf#{
type => wrap, type => Type,
file => conf_get("path", SubConf), file => FilePath,
max_no_files => conf_get("rotation_count", SubConf), max_no_files => RotationCount,
max_no_bytes => conf_get("rotation_size", SubConf) max_no_bytes => RotationSize
}, },
formatter => log_formatter(HandlerName, SubConf), formatter => log_formatter(HandlerName, SubConf),
filters => log_filter(HandlerName, SubConf), filters => log_filter(HandlerName, SubConf),
@ -216,38 +225,26 @@ log_formatter(HandlerName, Conf) ->
end, end,
SingleLine = conf_get("single_line", Conf), SingleLine = conf_get("single_line", Conf),
Depth = conf_get("max_depth", Conf), Depth = conf_get("max_depth", Conf),
Format =
case HandlerName of
?AUDIT_HANDLER ->
json;
_ ->
conf_get("formatter", Conf)
end,
do_formatter( do_formatter(
HandlerName, conf_get("formatter", Conf), CharsLimit, SingleLine, TimeOffSet, Depth Format, CharsLimit, SingleLine, TimeOffSet, Depth
). ).
%% helpers %% helpers
do_formatter(?AUDIT_HANDLER, _, CharsLimit, SingleLine, TimeOffSet, Depth) -> do_formatter(json, CharsLimit, SingleLine, TimeOffSet, Depth) ->
{emqx_logger_jsonfmt, #{
template => [
time,
" [",
level,
"] ",
%% http api
{method, [code, " ", method, " ", operate_id, " ", username, " "], []},
%% cli
{cmd, [cmd, " "], []},
msg,
"\n"
],
chars_limit => CharsLimit,
single_line => SingleLine,
time_offset => TimeOffSet,
depth => Depth
}};
do_formatter(_, json, CharsLimit, SingleLine, TimeOffSet, Depth) ->
{emqx_logger_jsonfmt, #{ {emqx_logger_jsonfmt, #{
chars_limit => CharsLimit, chars_limit => CharsLimit,
single_line => SingleLine, single_line => SingleLine,
time_offset => TimeOffSet, time_offset => TimeOffSet,
depth => Depth depth => Depth
}}; }};
do_formatter(_, text, CharsLimit, SingleLine, TimeOffSet, Depth) -> do_formatter(text, CharsLimit, SingleLine, TimeOffSet, Depth) ->
{emqx_logger_textfmt, #{ {emqx_logger_textfmt, #{
template => [time, " [", level, "] ", msg, "\n"], template => [time, " [", level, "] ", msg, "\n"],
chars_limit => CharsLimit, chars_limit => CharsLimit,

View File

@ -678,7 +678,7 @@ return_change_result(ConfKeyPath, {{update, Req}, Opts}) ->
case Req =/= ?TOMBSTONE_CONFIG_CHANGE_REQ of case Req =/= ?TOMBSTONE_CONFIG_CHANGE_REQ of
true -> true ->
#{ #{
config => emqx_config:get(ConfKeyPath), config => emqx_config:get(ConfKeyPath, undefined),
raw_config => return_rawconf(ConfKeyPath, Opts) raw_config => return_rawconf(ConfKeyPath, Opts)
}; };
false -> false ->

View File

@ -437,6 +437,10 @@ do_start_listener(quic, ListenerName, #{bind := Bind} = Opts) ->
case maps:get(cacertfile, SSLOpts, undefined) of case maps:get(cacertfile, SSLOpts, undefined) of
undefined -> undefined ->
[]; [];
<<>> ->
[];
"" ->
[];
CaCertFile -> CaCertFile ->
[{cacertfile, emqx_schema:naive_env_interpolation(CaCertFile)}] [{cacertfile, emqx_schema:naive_env_interpolation(CaCertFile)}]
end ++ end ++

View File

@ -51,7 +51,8 @@
-type config() :: #{ -type config() :: #{
depth => pos_integer() | unlimited, depth => pos_integer() | unlimited,
report_cb => logger:report_cb(), report_cb => logger:report_cb(),
single_line => boolean() single_line => boolean(),
chars_limit => unlimited | pos_integer()
}. }.
-define(IS_STRING(String), (is_list(String) orelse is_binary(String))). -define(IS_STRING(String), (is_list(String) orelse is_binary(String))).
@ -64,19 +65,17 @@
best_effort_json(Input) -> best_effort_json(Input) ->
best_effort_json(Input, [pretty, force_utf8]). best_effort_json(Input, [pretty, force_utf8]).
best_effort_json(Input, Opts) -> best_effort_json(Input, Opts) ->
Config = #{depth => unlimited, single_line => true}, Config = #{depth => unlimited, single_line => true, chars_limit => unlimited},
JsonReady = best_effort_json_obj(Input, Config), JsonReady = best_effort_json_obj(Input, Config),
emqx_utils_json:encode(JsonReady, Opts). emqx_utils_json:encode(JsonReady, Opts).
-spec format(logger:log_event(), config()) -> iodata(). -spec format(logger:log_event(), config()) -> iodata().
format(#{level := Level, msg := Msg, meta := Meta} = Event, Config0) when is_map(Config0) -> format(#{level := Level, msg := Msg, meta := Meta}, Config0) when is_map(Config0) ->
Config = add_default_config(Config0), Config = add_default_config(Config0),
MsgBin = format(Msg, Meta#{level => Level}, Config), [format(Msg, Meta#{level => Level}, Config), "\n"].
logger_formatter:format(Event#{msg => {string, MsgBin}}, Config).
format(Msg, Meta0, Config) -> format(Msg, Meta, Config) ->
Meta = maps:without([time, level], Meta0), Data =
Data0 =
try maybe_format_msg(Msg, Meta, Config) of try maybe_format_msg(Msg, Meta, Config) of
Map when is_map(Map) -> Map when is_map(Map) ->
maps:merge(Map, Meta); maps:merge(Map, Meta);
@ -92,9 +91,10 @@ format(Msg, Meta0, Config) ->
fmt_stacktrace => S fmt_stacktrace => S
} }
end, end,
Data = maps:without([report_cb], Data0), emqx_utils_json:encode(json_obj_root(Data, Config)).
emqx_utils_json:encode(json_obj(Data, Config)).
maybe_format_msg(undefined, _Meta, _Config) ->
#{};
maybe_format_msg({report, Report} = Msg, #{report_cb := Cb} = Meta, Config) -> maybe_format_msg({report, Report} = Msg, #{report_cb := Cb} = Meta, Config) ->
case is_map(Report) andalso Cb =:= ?DEFAULT_FORMATTER of case is_map(Report) andalso Cb =:= ?DEFAULT_FORMATTER of
true -> true ->
@ -128,7 +128,7 @@ format_msg({report, Report}, #{report_cb := Fun} = Meta, Config) when is_functio
end; end;
format_msg({report, Report}, #{report_cb := Fun}, Config) when is_function(Fun, 2) -> format_msg({report, Report}, #{report_cb := Fun}, Config) when is_function(Fun, 2) ->
%% a format callback function of arity 2 %% a format callback function of arity 2
case Fun(Report, maps:with([depth, single_line], Config)) of case Fun(Report, maps:with([depth, single_line, chars_limit], Config)) of
Chardata when ?IS_STRING(Chardata) -> Chardata when ?IS_STRING(Chardata) ->
try try
unicode:characters_to_binary(Chardata, utf8) unicode:characters_to_binary(Chardata, utf8)
@ -152,11 +152,13 @@ format_msg({Fmt, Args}, _Meta, Config) ->
do_format_msg(Format0, Args, #{ do_format_msg(Format0, Args, #{
depth := Depth, depth := Depth,
single_line := SingleLine single_line := SingleLine,
chars_limit := Limit
}) -> }) ->
Opts = chars_limit_to_opts(Limit),
Format1 = io_lib:scan_format(Format0, Args), Format1 = io_lib:scan_format(Format0, Args),
Format = reformat(Format1, Depth, SingleLine), Format = reformat(Format1, Depth, SingleLine),
Text0 = io_lib:build_text(Format, []), Text0 = io_lib:build_text(Format, Opts),
Text = Text =
case SingleLine of case SingleLine of
true -> re:replace(Text0, ",?\r?\n\s*", ", ", [{return, list}, global, unicode]); true -> re:replace(Text0, ",?\r?\n\s*", ", ", [{return, list}, global, unicode]);
@ -164,6 +166,9 @@ do_format_msg(Format0, Args, #{
end, end,
trim(unicode:characters_to_binary(Text, utf8)). trim(unicode:characters_to_binary(Text, utf8)).
chars_limit_to_opts(unlimited) -> [];
chars_limit_to_opts(Limit) -> [{chars_limit, Limit}].
%% Get rid of the leading spaces. %% Get rid of the leading spaces.
%% leave alone the trailing spaces. %% leave alone the trailing spaces.
trim(<<$\s, Rest/binary>>) -> trim(Rest); trim(<<$\s, Rest/binary>>) -> trim(Rest);
@ -221,10 +226,6 @@ best_effort_json_obj(Map, Config) ->
do_format_msg("~p", [Map], Config) do_format_msg("~p", [Map], Config)
end. end.
json([], _) ->
"";
json(<<"">>, _) ->
"\"\"";
json(A, _) when is_atom(A) -> atom_to_binary(A, utf8); json(A, _) when is_atom(A) -> atom_to_binary(A, utf8);
json(I, _) when is_integer(I) -> I; json(I, _) when is_integer(I) -> I;
json(F, _) when is_float(F) -> F; json(F, _) when is_float(F) -> F;
@ -233,52 +234,76 @@ json(P, C) when is_port(P) -> json(port_to_list(P), C);
json(F, C) when is_function(F) -> json(erlang:fun_to_list(F), C); json(F, C) when is_function(F) -> json(erlang:fun_to_list(F), C);
json(B, Config) when is_binary(B) -> json(B, Config) when is_binary(B) ->
best_effort_unicode(B, Config); best_effort_unicode(B, Config);
json(L, Config) when is_list(L), is_integer(hd(L)) ->
best_effort_unicode(L, Config);
json(M, Config) when is_list(M), is_tuple(hd(M)), tuple_size(hd(M)) =:= 2 -> json(M, Config) when is_list(M), is_tuple(hd(M)), tuple_size(hd(M)) =:= 2 ->
best_effort_json_obj(M, Config); best_effort_json_obj(M, Config);
json(L, Config) when is_list(L) -> json(L, Config) when is_list(L) ->
[json(I, Config) || I <- L]; case lists:all(fun erlang:is_binary/1, L) of
true ->
%% string array
L;
false ->
try unicode:characters_to_binary(L, utf8) of
B when is_binary(B) -> B;
_ -> [json(I, Config) || I <- L]
catch
_:_ ->
[json(I, Config) || I <- L]
end
end;
json(Map, Config) when is_map(Map) -> json(Map, Config) when is_map(Map) ->
best_effort_json_obj(Map, Config); best_effort_json_obj(Map, Config);
json(Term, Config) -> json(Term, Config) ->
do_format_msg("~p", [Term], Config). do_format_msg("~p", [Term], Config).
json_obj_root(Data0, Config) ->
Time = maps:get(time, Data0, undefined),
Level = maps:get(level, Data0, undefined),
Msg1 =
case maps:get(msg, Data0, undefined) of
undefined ->
maps:get('$kind', Data0, undefined);
Msg0 ->
Msg0
end,
Msg =
case Msg1 of
undefined ->
undefined;
_ ->
json(Msg1, Config)
end,
Mfal = emqx_utils:format_mfal(Data0),
Data =
maps:fold(
fun(K, V, D) ->
{K1, V1} = json_kv(K, V, Config),
[{K1, V1} | D]
end,
[],
maps:without(
[time, gl, file, report_cb, msg, '$kind', mfa, level, line, is_trace], Data0
)
),
lists:filter(
fun({_, V}) -> V =/= undefined end,
[{time, Time}, {level, Level}, {msg, Msg}, {mfa, Mfal}]
) ++ Data.
json_obj(Data, Config) -> json_obj(Data, Config) ->
maps:fold( maps:fold(
fun(K, V, D) -> fun(K, V, D) ->
json_kv(K, V, D, Config) {K1, V1} = json_kv(K, V, Config),
maps:put(K1, V1, D)
end, end,
maps:new(), maps:new(),
Data Data
). ).
json_kv(mfa, {M, F, A}, Data, _Config) -> json_kv(K0, V, Config) ->
maps:put(
mfa,
<<
(atom_to_binary(M, utf8))/binary,
$:,
(atom_to_binary(F, utf8))/binary,
$/,
(integer_to_binary(A))/binary
>>,
Data
);
%% snabbkaffe
json_kv('$kind', Kind, Data, Config) ->
maps:put(msg, json(Kind, Config), Data);
json_kv(gl, _, Data, _Config) ->
%% drop gl because it's not interesting
Data;
json_kv(file, _, Data, _Config) ->
%% drop 'file' because we have mfa
Data;
json_kv(K0, V, Data, Config) ->
K = json_key(K0), K = json_key(K0),
case is_map(V) of case is_map(V) of
true -> maps:put(json(K, Config), best_effort_json_obj(V, Config), Data); true -> {K, best_effort_json_obj(V, Config)};
false -> maps:put(json(K, Config), json(V, Config), Data) false -> {K, json(V, Config)}
end. end.
json_key(A) when is_atom(A) -> json_key(atom_to_binary(A, utf8)); json_key(A) when is_atom(A) -> json_key(atom_to_binary(A, utf8));
@ -373,23 +398,83 @@ p_config() ->
proper_types:shrink_list( proper_types:shrink_list(
[ [
{depth, p_limit()}, {depth, p_limit()},
{single_line, proper_types:boolean()} {single_line, proper_types:boolean()},
{chars_limit, p_limit()}
] ]
). ).
%% NOTE: pretty-printing format is asserted in the test
%% This affects the CLI output format, consult the team before changing
%% the format.
best_effort_json_test() -> best_effort_json_test() ->
?assertEqual( ?assertEqual(
<<"{\n \n}">>, <<"{\n \n}">>,
emqx_logger_jsonfmt:best_effort_json([]) best_effort_json([])
), ),
?assertEqual( ?assertEqual(
<<"{\n \"key\" : [\n \n ]\n}">>, <<"{\n \"key\" : [\n \n ]\n}">>,
emqx_logger_jsonfmt:best_effort_json(#{key => []}) best_effort_json(#{key => []})
), ),
?assertEqual( ?assertEqual(
<<"[\n {\n \"key\" : [\n \n ]\n }\n]">>, <<"[\n {\n \"key\" : [\n \n ]\n }\n]">>,
emqx_logger_jsonfmt:best_effort_json([#{key => []}]) best_effort_json([#{key => []}])
), ),
ok. ok.
config() ->
#{
chars_limit => unlimited,
depth => unlimited,
single_line => true
}.
make_log(Report) ->
#{
level => info,
msg => Report,
meta => #{time => 1111, report_cb => ?DEFAULT_FORMATTER}
}.
ensure_json_output_test() ->
JSON = format(make_log({report, #{foo => bar}}), config()),
?assert(is_map(emqx_utils_json:decode(JSON))),
ok.
chars_limit_not_applied_on_raw_map_fields_test() ->
Limit = 32,
Len = 100,
LongStr = lists:duplicate(Len, $a),
Config0 = config(),
Config = Config0#{
chars_limit => Limit
},
JSON = format(make_log({report, #{foo => LongStr}}), Config),
#{<<"foo">> := LongStr1} = emqx_utils_json:decode(JSON),
?assertEqual(Len, size(LongStr1)),
ok.
chars_limit_applied_on_format_result_test() ->
Limit = 32,
Len = 100,
LongStr = lists:duplicate(Len, $a),
Config0 = config(),
Config = Config0#{
chars_limit => Limit
},
JSON = format(make_log({string, LongStr}), Config),
#{<<"msg">> := LongStr1} = emqx_utils_json:decode(JSON),
?assertEqual(Limit, size(LongStr1)),
ok.
string_array_test() ->
Array = #{<<"arr">> => [<<"a">>, <<"b">>]},
Encoded = emqx_utils_json:encode(json(Array, config())),
?assertEqual(Array, emqx_utils_json:decode(Encoded)).
iolist_test() ->
Iolist = #{iolist => ["a", ["b"]]},
Concat = #{<<"iolist">> => <<"ab">>},
Encoded = emqx_utils_json:encode(json(Iolist, config())),
?assertEqual(Concat, emqx_utils_json:decode(Encoded)).
-endif. -endif.

View File

@ -56,8 +56,7 @@ enrich_report(ReportRaw, Meta) ->
end, end,
ClientId = maps:get(clientid, Meta, undefined), ClientId = maps:get(clientid, Meta, undefined),
Peer = maps:get(peername, Meta, undefined), Peer = maps:get(peername, Meta, undefined),
MFA = maps:get(mfa, Meta, undefined), MFA = emqx_utils:format_mfal(Meta),
Line = maps:get(line, Meta, undefined),
Msg = maps:get(msg, ReportRaw, undefined), Msg = maps:get(msg, ReportRaw, undefined),
%% turn it into a list so that the order of the fields is determined %% turn it into a list so that the order of the fields is determined
lists:foldl( lists:foldl(
@ -70,8 +69,7 @@ enrich_report(ReportRaw, Meta) ->
{topic, try_format_unicode(Topic)}, {topic, try_format_unicode(Topic)},
{clientid, try_format_unicode(ClientId)}, {clientid, try_format_unicode(ClientId)},
{peername, Peer}, {peername, Peer},
{line, Line}, {mfa, try_format_unicode(MFA)},
{mfa, mfa(MFA)},
{msg, Msg} {msg, Msg}
] ]
). ).
@ -84,7 +82,7 @@ try_format_unicode(Char) ->
case unicode:characters_to_list(Char) of case unicode:characters_to_list(Char) of
{error, _, _} -> error; {error, _, _} -> error;
{incomplete, _, _} -> error; {incomplete, _, _} -> error;
Binary -> Binary List1 -> List1
end end
catch catch
_:_ -> _:_ ->
@ -95,8 +93,8 @@ try_format_unicode(Char) ->
_ -> List _ -> List
end. end.
enrich_mfa({Fmt, Args}, #{mfa := Mfa, line := Line}) when is_list(Fmt) -> enrich_mfa({Fmt, Args}, Data) when is_list(Fmt) ->
{Fmt ++ " mfa: ~ts line: ~w", Args ++ [mfa(Mfa), Line]}; {Fmt ++ " mfa: ~ts", Args ++ [emqx_utils:format_mfal(Data)]};
enrich_mfa(Msg, _) -> enrich_mfa(Msg, _) ->
Msg. Msg.
@ -113,6 +111,3 @@ enrich_topic({Fmt, Args}, #{topic := Topic}) when is_list(Fmt) ->
{" topic: ~ts" ++ Fmt, [Topic | Args]}; {" topic: ~ts" ++ Fmt, [Topic | Args]};
enrich_topic(Msg, _) -> enrich_topic(Msg, _) ->
Msg. Msg.
mfa(undefined) -> undefined;
mfa({M, F, A}) -> [atom_to_list(M), ":", atom_to_list(F), "/" ++ integer_to_list(A)].

View File

@ -43,6 +43,10 @@
erpc_multicall/1 erpc_multicall/1
]). ]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-compile( -compile(
{inline, [ {inline, [
rpc_node/1, rpc_node/1,
@ -75,15 +79,15 @@
-spec call(node(), module(), atom(), list()) -> call_result(). -spec call(node(), module(), atom(), list()) -> call_result().
call(Node, Mod, Fun, Args) -> call(Node, Mod, Fun, Args) ->
filter_result(gen_rpc:call(rpc_node(Node), Mod, Fun, Args)). maybe_badrpc(gen_rpc:call(rpc_node(Node), Mod, Fun, Args)).
-spec call(term(), node(), module(), atom(), list()) -> call_result(). -spec call(term(), node(), module(), atom(), list()) -> call_result().
call(Key, Node, Mod, Fun, Args) -> call(Key, Node, Mod, Fun, Args) ->
filter_result(gen_rpc:call(rpc_node({Key, Node}), Mod, Fun, Args)). maybe_badrpc(gen_rpc:call(rpc_node({Key, Node}), Mod, Fun, Args)).
-spec call(term(), node(), module(), atom(), list(), timeout()) -> call_result(). -spec call(term(), node(), module(), atom(), list(), timeout()) -> call_result().
call(Key, Node, Mod, Fun, Args, Timeout) -> call(Key, Node, Mod, Fun, Args, Timeout) ->
filter_result(gen_rpc:call(rpc_node({Key, Node}), Mod, Fun, Args, Timeout)). maybe_badrpc(gen_rpc:call(rpc_node({Key, Node}), Mod, Fun, Args, Timeout)).
-spec multicall([node()], module(), atom(), list()) -> multicall_result(). -spec multicall([node()], module(), atom(), list()) -> multicall_result().
multicall(Nodes, Mod, Fun, Args) -> multicall(Nodes, Mod, Fun, Args) ->
@ -127,18 +131,15 @@ rpc_nodes([], Acc) ->
rpc_nodes([Node | Nodes], Acc) -> rpc_nodes([Node | Nodes], Acc) ->
rpc_nodes(Nodes, [rpc_node(Node) | Acc]). rpc_nodes(Nodes, [rpc_node(Node) | Acc]).
filter_result({Error, Reason}) when maybe_badrpc({Error, Reason}) when Error =:= badrpc; Error =:= badtcp ->
Error =:= badrpc; Error =:= badtcp
->
{badrpc, Reason}; {badrpc, Reason};
filter_result(Delivery) -> maybe_badrpc(Delivery) ->
Delivery. Delivery.
max_client_num() -> max_client_num() ->
emqx:get_config([rpc, tcp_client_num], ?DefaultClientNum). emqx:get_config([rpc, tcp_client_num], ?DefaultClientNum).
-spec unwrap_erpc(emqx_rpc:erpc(A) | [emqx_rpc:erpc(A)]) -> A | {error, _Err} | list(). -spec unwrap_erpc(emqx_rpc:erpc(A) | [emqx_rpc:erpc(A)]) -> A | {error, _Err} | list().
unwrap_erpc(Res) when is_list(Res) -> unwrap_erpc(Res) when is_list(Res) ->
[unwrap_erpc(R) || R <- Res]; [unwrap_erpc(R) || R <- Res];
unwrap_erpc({ok, A}) -> unwrap_erpc({ok, A}) ->
@ -151,3 +152,73 @@ unwrap_erpc({exit, Err}) ->
{error, Err}; {error, Err};
unwrap_erpc({error, {erpc, Err}}) -> unwrap_erpc({error, {erpc, Err}}) ->
{error, Err}. {error, Err}.
-ifdef(TEST).
badrpc_call_test_() ->
application:ensure_all_started(gen_rpc),
Node = node(),
[
{"throw", fun() ->
?assertEqual(foo, call(Node, erlang, throw, [foo]))
end},
{"error", fun() ->
?assertMatch({badrpc, {'EXIT', {foo, _}}}, call(Node, erlang, error, [foo]))
end},
{"exit", fun() ->
?assertEqual({badrpc, {'EXIT', foo}}, call(Node, erlang, exit, [foo]))
end},
{"timeout", fun() ->
?assertEqual({badrpc, timeout}, call(key, Node, timer, sleep, [1000], 100))
end},
{"noconnection", fun() ->
%% mute crash report from gen_rpc
logger:set_primary_config(level, critical),
try
?assertEqual(
{badrpc, nxdomain}, call(key, 'no@such.node', foo, bar, [])
)
after
logger:set_primary_config(level, notice)
end
end}
].
multicall_test() ->
application:ensure_all_started(gen_rpc),
logger:set_primary_config(level, critical),
BadNode = 'no@such.node',
ThisNode = node(),
Nodes = [ThisNode, BadNode],
Call4 = fun(M, F, A) -> multicall(Nodes, M, F, A) end,
Call5 = fun(Key, M, F, A) -> multicall(Key, Nodes, M, F, A) end,
try
?assertMatch({[foo], [{BadNode, _}]}, Call4(erlang, throw, [foo])),
?assertMatch({[], [{ThisNode, _}, {BadNode, _}]}, Call4(erlang, error, [foo])),
?assertMatch({[], [{ThisNode, _}, {BadNode, _}]}, Call4(erlang, exit, [foo])),
?assertMatch({[], [{ThisNode, _}, {BadNode, _}]}, Call5(key, foo, bar, []))
after
logger:set_primary_config(level, notice)
end.
unwrap_erpc_test_() ->
Nodes = [node()],
MultiC = fun(M, F, A) -> unwrap_erpc(erpc:multicall(Nodes, M, F, A, 100)) end,
[
{"throw", fun() ->
?assertEqual([{error, foo}], MultiC(erlang, throw, [foo]))
end},
{"error", fun() ->
?assertEqual([{error, foo}], MultiC(erlang, error, [foo]))
end},
{"exit", fun() ->
?assertEqual([{error, {exception, foo}}], MultiC(erlang, exit, [foo]))
end},
{"noconnection", fun() ->
?assertEqual(
[{error, noconnection}], unwrap_erpc(erpc:multicall(['no@such.node'], foo, bar, []))
)
end}
].
-endif.

View File

@ -271,9 +271,12 @@ find_config_references(Root) ->
is_file_reference(Stack) -> is_file_reference(Stack) ->
lists:any( lists:any(
fun(KP) -> lists:prefix(lists:reverse(KP), Stack) end, fun(KP) -> lists:prefix(lists:reverse(KP), Stack) end,
emqx_tls_lib:ssl_file_conf_keypaths() conf_keypaths()
). ).
conf_keypaths() ->
emqx_tls_lib:ssl_file_conf_keypaths().
mk_fileref(AbsPath) -> mk_fileref(AbsPath) ->
case emqx_utils_fs:read_info(AbsPath) of case emqx_utils_fs:read_info(AbsPath) of
{ok, Info} -> {ok, Info} ->

View File

@ -50,11 +50,17 @@
-define(IS_FALSE(Val), ((Val =:= false) orelse (Val =:= <<"false">>))). -define(IS_FALSE(Val), ((Val =:= false) orelse (Val =:= <<"false">>))).
-define(SSL_FILE_OPT_PATHS, [ -define(SSL_FILE_OPT_PATHS, [
%% common ssl options
[<<"keyfile">>], [<<"keyfile">>],
[<<"certfile">>], [<<"certfile">>],
[<<"cacertfile">>], [<<"cacertfile">>],
[<<"ocsp">>, <<"issuer_pem">>] %% OCSP
[<<"ocsp">>, <<"issuer_pem">>],
%% SSO
[<<"sp_public_key">>],
[<<"sp_private_key">>]
]). ]).
-define(SSL_FILE_OPT_PATHS_A, [ -define(SSL_FILE_OPT_PATHS_A, [
[keyfile], [keyfile],
[certfile], [certfile],

View File

@ -52,7 +52,7 @@
%% (e.g. in `init_per_suite/1` / `init_per_group/2`), providing the appspecs %% (e.g. in `init_per_suite/1` / `init_per_group/2`), providing the appspecs
%% and unique work dir for the testrun (e.g. `work_dir/1`). Save the result %% and unique work dir for the testrun (e.g. `work_dir/1`). Save the result
%% in a context. %% in a context.
%% 3. Call `emqx_cth_sutie:stop/1` to stop the applications after the testrun %% 3. Call `emqx_cth_suite:stop/1` to stop the applications after the testrun
%% finishes (e.g. in `end_per_suite/1` / `end_per_group/2`), providing the %% finishes (e.g. in `end_per_suite/1` / `end_per_group/2`), providing the
%% result from step 2. %% result from step 2.
-module(emqx_cth_suite). -module(emqx_cth_suite).
@ -245,6 +245,9 @@ spec_fmt(ffun, {_, X}) -> X.
maybe_configure_app(_App, #{config := false}) -> maybe_configure_app(_App, #{config := false}) ->
ok; ok;
maybe_configure_app(_App, AppConfig = #{schema_mod := SchemaModule}) when is_atom(SchemaModule) ->
#{config := Config} = AppConfig,
configure_app(SchemaModule, Config);
maybe_configure_app(App, #{config := Config}) -> maybe_configure_app(App, #{config := Config}) ->
case app_schema(App) of case app_schema(App) of
{ok, SchemaModule} -> {ok, SchemaModule} ->

View File

@ -1,195 +0,0 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(prop_emqx_rpc).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(NODENAME, 'test@127.0.0.1').
-define(ALL(Vars, Types, Exprs),
?SETUP(
fun() ->
State = do_setup(),
fun() -> do_teardown(State) end
end,
?FORALL(Vars, Types, Exprs)
)
).
%%--------------------------------------------------------------------
%% Properties
%%--------------------------------------------------------------------
prop_node() ->
?ALL(
Node0,
nodename(),
begin
Node = punch(Node0),
?assert(emqx_rpc:cast(Node, erlang, system_time, [])),
case emqx_rpc:call(Node, erlang, system_time, []) of
{badrpc, _Reason} -> true;
Delivery when is_integer(Delivery) -> true;
_Other -> false
end
end
).
prop_node_with_key() ->
?ALL(
{Node0, Key},
nodename_with_key(),
begin
Node = punch(Node0),
?assert(emqx_rpc:cast(Key, Node, erlang, system_time, [])),
case emqx_rpc:call(Key, Node, erlang, system_time, []) of
{badrpc, _Reason} -> true;
Delivery when is_integer(Delivery) -> true;
_Other -> false
end
end
).
prop_nodes() ->
?ALL(
Nodes0,
nodesname(),
begin
Nodes = punch(Nodes0),
case emqx_rpc:multicall(Nodes, erlang, system_time, []) of
{RealResults, RealBadNodes} when
is_list(RealResults);
is_list(RealBadNodes)
->
true;
_Other ->
false
end
end
).
prop_nodes_with_key() ->
?ALL(
{Nodes0, Key},
nodesname_with_key(),
begin
Nodes = punch(Nodes0),
case emqx_rpc:multicall(Key, Nodes, erlang, system_time, []) of
{RealResults, RealBadNodes} when
is_list(RealResults);
is_list(RealBadNodes)
->
true;
_Other ->
false
end
end
).
%%--------------------------------------------------------------------
%% Helper
%%--------------------------------------------------------------------
do_setup() ->
ensure_distributed_nodename(),
ok = logger:set_primary_config(#{level => warning}),
{ok, _Apps} = application:ensure_all_started(gen_rpc),
ok = application:set_env(gen_rpc, call_receive_timeout, 100),
ok = meck:new(gen_rpc, [passthrough, no_history]),
ok = meck:expect(
gen_rpc,
multicall,
fun(Nodes, Mod, Fun, Args) ->
gen_rpc:multicall(Nodes, Mod, Fun, Args, 100)
end
).
do_teardown(_) ->
ok = net_kernel:stop(),
ok = application:stop(gen_rpc),
ok = meck:unload(gen_rpc),
%% wait for tcp close
timer:sleep(2500).
ensure_distributed_nodename() ->
case net_kernel:start([?NODENAME]) of
{ok, _} ->
ok;
{error, {already_started, _}} ->
net_kernel:stop(),
net_kernel:start([?NODENAME]);
{error, {{shutdown, {_, _, {'EXIT', nodistribution}}}, _}} ->
%% start epmd first
spawn_link(fun() -> os:cmd("epmd") end),
timer:sleep(100),
net_kernel:start([?NODENAME])
end.
%%--------------------------------------------------------------------
%% Generator
%%--------------------------------------------------------------------
nodename() ->
?LET(
{NodePrefix, HostName},
{node_prefix(), hostname()},
begin
Node = NodePrefix ++ "@" ++ HostName,
list_to_atom(Node)
end
).
nodename_with_key() ->
?LET(
{NodePrefix, HostName, Key},
{node_prefix(), hostname(), choose(0, 10)},
begin
Node = NodePrefix ++ "@" ++ HostName,
{list_to_atom(Node), Key}
end
).
nodesname() ->
oneof([list(nodename()), [node()]]).
nodesname_with_key() ->
oneof([{list(nodename()), choose(0, 10)}, {[node()], 1}]).
node_prefix() ->
oneof(["emqxct", text_like()]).
text_like() ->
?SUCHTHAT(Text, list(range($a, $z)), (length(Text) =< 100 andalso length(Text) > 0)).
hostname() ->
oneof(["127.0.0.1", "localhost"]).
%%--------------------------------------------------------------------
%% Utils
%%--------------------------------------------------------------------
%% After running the props, the `node()` () is only able to return an
%% incorrect node name - `nonode@nohost`, But we want a distributed nodename
%% So, just translate the `nonode@nohost` to ?NODENAME
punch(Nodes) when is_list(Nodes) ->
lists:map(fun punch/1, Nodes);
punch('nonode@nohost') ->
%% Equal to ?NODENAME
node();
punch(GoodBoy) ->
GoodBoy.

View File

@ -139,6 +139,7 @@ kafka_consumer_test() ->
ok. ok.
message_key_dispatch_validations_test() -> message_key_dispatch_validations_test() ->
Name = myproducer,
Conf0 = kafka_producer_new_hocon(), Conf0 = kafka_producer_new_hocon(),
Conf1 = Conf1 =
Conf0 ++ Conf0 ++
@ -155,7 +156,7 @@ message_key_dispatch_validations_test() ->
<<"message">> := #{<<"key">> := <<>>} <<"message">> := #{<<"key">> := <<>>}
} }
}, },
emqx_utils_maps:deep_get([<<"bridges">>, <<"kafka">>, <<"myproducer">>], Conf) emqx_utils_maps:deep_get([<<"bridges">>, <<"kafka">>, atom_to_binary(Name)], Conf)
), ),
?assertThrow( ?assertThrow(
{_, [ {_, [
@ -166,8 +167,6 @@ message_key_dispatch_validations_test() ->
]}, ]},
check(Conf) check(Conf)
), ),
%% ensure atoms exist
_ = [myproducer],
?assertThrow( ?assertThrow(
{_, [ {_, [
#{ #{

View File

@ -11,6 +11,7 @@
%%=========================================================================== %%===========================================================================
pulsar_producer_validations_test() -> pulsar_producer_validations_test() ->
Name = my_producer,
Conf0 = pulsar_producer_hocon(), Conf0 = pulsar_producer_hocon(),
Conf1 = Conf1 =
Conf0 ++ Conf0 ++
@ -24,7 +25,7 @@ pulsar_producer_validations_test() ->
<<"strategy">> := <<"key_dispatch">>, <<"strategy">> := <<"key_dispatch">>,
<<"message">> := #{<<"key">> := <<>>} <<"message">> := #{<<"key">> := <<>>}
}, },
emqx_utils_maps:deep_get([<<"bridges">>, <<"pulsar_producer">>, <<"my_producer">>], Conf) emqx_utils_maps:deep_get([<<"bridges">>, <<"pulsar_producer">>, atom_to_binary(Name)], Conf)
), ),
?assertThrow( ?assertThrow(
{_, [ {_, [
@ -35,8 +36,6 @@ pulsar_producer_validations_test() ->
]}, ]},
check(Conf) check(Conf)
), ),
%% ensure atoms exist
_ = [my_producer],
?assertThrow( ?assertThrow(
{_, [ {_, [
#{ #{

View File

@ -108,7 +108,17 @@ admins(_) ->
emqx_ctl:usage(usage_sync()). emqx_ctl:usage(usage_sync()).
audit(Level, From, Log) -> audit(Level, From, Log) ->
?AUDIT(Level, From, Log#{time => logger:timestamp()}). Log1 = redact(Log#{time => logger:timestamp()}),
?AUDIT(Level, From, Log1).
redact(Logs = #{cmd := admins, args := ["add", Username, _Password | Rest]}) ->
Logs#{args => ["add", Username, "******" | Rest]};
redact(Logs = #{cmd := admins, args := ["passwd", Username, _Password]}) ->
Logs#{args => ["passwd", Username, "******"]};
redact(Logs = #{cmd := license, args := ["update", _License]}) ->
Logs#{args => ["update", "******"]};
redact(Logs) ->
Logs.
usage_conf() -> usage_conf() ->
[ [

View File

@ -43,6 +43,9 @@
]). ]).
-export([conf_get/2, conf_get/3, keys/2, filter/1]). -export([conf_get/2, conf_get/3, keys/2, filter/1]).
%% internal exports for `emqx_enterprise_schema' only.
-export([ensure_unicode_path/2, convert_rotation/2, log_handler_common_confs/2]).
%% Static apps which merge their configs into the merged emqx.conf %% Static apps which merge their configs into the merged emqx.conf
%% The list can not be made a dynamic read at run-time as it is used %% The list can not be made a dynamic read at run-time as it is used
%% by nodetool to generate app.<time>.config before EMQX is started %% by nodetool to generate app.<time>.config before EMQX is started
@ -964,15 +967,6 @@ fields("log") ->
aliases => [file_handlers], aliases => [file_handlers],
importance => ?IMPORTANCE_HIGH importance => ?IMPORTANCE_HIGH
} }
)},
{"audit",
sc(
?R_REF("log_audit_handler"),
#{
desc => ?DESC("log_audit_handler"),
importance => ?IMPORTANCE_HIGH,
default => #{<<"enable">> => true, <<"level">> => <<"info">>}
}
)} )}
]; ];
fields("console_handler") -> fields("console_handler") ->
@ -1014,49 +1008,6 @@ fields("log_file_handler") ->
} }
)} )}
] ++ log_handler_common_confs(file, #{}); ] ++ log_handler_common_confs(file, #{});
fields("log_audit_handler") ->
[
{"path",
sc(
file(),
#{
desc => ?DESC("audit_file_handler_path"),
default => <<"${EMQX_LOG_DIR}/audit.log">>,
importance => ?IMPORTANCE_HIGH,
converter => fun(Path, Opts) ->
emqx_schema:naive_env_interpolation(ensure_unicode_path(Path, Opts))
end
}
)},
{"rotation_count",
sc(
range(1, 128),
#{
default => 10,
converter => fun convert_rotation/2,
desc => ?DESC("log_rotation_count"),
importance => ?IMPORTANCE_MEDIUM
}
)},
{"rotation_size",
sc(
hoconsc:union([infinity, emqx_schema:bytesize()]),
#{
default => <<"50MB">>,
desc => ?DESC("log_file_handler_max_size"),
importance => ?IMPORTANCE_MEDIUM
}
)}
] ++
%% Only support json
lists:keydelete(
"formatter",
1,
log_handler_common_confs(
file,
#{level => info, level_desc => "audit_handler_level"}
)
);
fields("log_overload_kill") -> fields("log_overload_kill") ->
[ [
{"enable", {"enable",
@ -1147,8 +1098,6 @@ desc("console_handler") ->
?DESC("desc_console_handler"); ?DESC("desc_console_handler");
desc("log_file_handler") -> desc("log_file_handler") ->
?DESC("desc_log_file_handler"); ?DESC("desc_log_file_handler");
desc("log_audit_handler") ->
?DESC("desc_audit_log_handler");
desc("log_rotation") -> desc("log_rotation") ->
?DESC("desc_log_rotation"); ?DESC("desc_log_rotation");
desc("log_overload_kill") -> desc("log_overload_kill") ->
@ -1314,6 +1263,7 @@ log_handler_common_confs(Handler, Default) ->
sc( sc(
hoconsc:enum([text, json]), hoconsc:enum([text, json]),
#{ #{
aliases => [format],
default => maps:get(formatter, Default, text), default => maps:get(formatter, Default, text),
desc => ?DESC("common_handler_formatter"), desc => ?DESC("common_handler_formatter"),
importance => ?IMPORTANCE_MEDIUM importance => ?IMPORTANCE_MEDIUM

View File

@ -19,22 +19,12 @@
-compile(export_all). -compile(export_all).
-include_lib("eunit/include/eunit.hrl"). -include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl").
%% erlfmt-ignore %% erlfmt-ignore
-define(BASE_CONF, -define(BASE_CONF,
""" """
node {
name = \"emqx1@127.0.0.1\"
cookie = \"emqxsecretcookie\"
data_dir = \"data\"
}
cluster {
name = emqxcl
discovery_strategy = static
static.seeds = \"emqx1@127.0.0.1\"
core_nodes = \"emqx1@127.0.0.1\"
}
log { log {
console { console {
enable = true enable = true
@ -52,12 +42,28 @@ all() ->
emqx_common_test_helpers:all(?MODULE). emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) -> init_per_suite(Config) ->
emqx_common_test_helpers:load_config(emqx_conf_schema, iolist_to_binary(?BASE_CONF)), Apps = emqx_cth_suite:start(
emqx_mgmt_api_test_util:init_suite([emqx_conf]), [
Config. emqx,
{emqx_conf, ?BASE_CONF}
],
#{work_dir => emqx_cth_suite:work_dir(Config)}
),
[{apps, Apps} | Config].
end_per_suite(_Config) -> end_per_suite(Config) ->
emqx_mgmt_api_test_util:end_suite([emqx_conf]). Apps = ?config(apps, Config),
ok = emqx_cth_suite:stop(Apps),
ok.
init_per_testcase(_TestCase, Config) ->
LogConfRaw = emqx_conf:get_raw([<<"log">>]),
[{log_conf_raw, LogConfRaw} | Config].
end_per_testcase(_TestCase, Config) ->
LogConfRaw = ?config(log_conf_raw, Config),
{ok, _} = emqx_conf:update([<<"log">>], LogConfRaw, #{}),
ok.
t_log_conf(_Conf) -> t_log_conf(_Conf) ->
FileExpect = #{ FileExpect = #{
@ -78,16 +84,7 @@ t_log_conf(_Conf) ->
<<"time_offset">> => <<"system">> <<"time_offset">> => <<"system">>
}, },
<<"file">> => <<"file">> =>
#{<<"default">> => FileExpect}, #{<<"default">> => FileExpect}
<<"audit">> =>
#{
<<"enable">> => true,
<<"level">> => <<"info">>,
<<"path">> => <<"log/audit.log">>,
<<"rotation_count">> => 10,
<<"rotation_size">> => <<"50MB">>,
<<"time_offset">> => <<"system">>
}
}, },
?assertEqual(ExpectLog1, emqx_conf:get_raw([<<"log">>])), ?assertEqual(ExpectLog1, emqx_conf:get_raw([<<"log">>])),
UpdateLog0 = emqx_utils_maps:deep_remove([<<"file">>, <<"default">>], ExpectLog1), UpdateLog0 = emqx_utils_maps:deep_remove([<<"file">>, <<"default">>], ExpectLog1),
@ -118,3 +115,23 @@ t_log_conf(_Conf) ->
?assertMatch({error, {not_found, default}}, logger:get_handler_config(default)), ?assertMatch({error, {not_found, default}}, logger:get_handler_config(default)),
?assertMatch({error, {not_found, console}}, logger:get_handler_config(console)), ?assertMatch({error, {not_found, console}}, logger:get_handler_config(console)),
ok. ok.
t_file_logger_infinity_rotation(_Config) ->
ConfPath = [<<"log">>],
FileConfPath = [<<"file">>, <<"default">>],
ConfRaw = emqx_conf:get_raw(ConfPath),
FileConfRaw = emqx_utils_maps:deep_get(FileConfPath, ConfRaw),
%% inconsistent config: infinity rotation size, but finite rotation count
BadFileConfRaw = maps:merge(
FileConfRaw,
#{
<<"rotation_size">> => <<"infinity">>,
<<"rotation_count">> => 10
}
),
BadConfRaw = emqx_utils_maps:deep_put(FileConfPath, ConfRaw, BadFileConfRaw),
?assertMatch({ok, _}, emqx_conf:update(ConfPath, BadConfRaw, #{})),
HandlerIds = logger:get_handler_ids(),
%% ensure that the handler is correctly added
?assert(lists:member(default, HandlerIds), #{handler_ids => HandlerIds}),
ok.

View File

@ -181,23 +181,8 @@ validate_log(Conf) ->
}}, }},
FileHandler FileHandler
), ),
AuditHandler = lists:keyfind(emqx_audit, 2, FileHandlers), %% audit is an EE-only feature
%% default is enable and log level is info. ?assertNot(lists:keyfind(emqx_audit, 2, FileHandlers)),
?assertMatch(
{handler, emqx_audit, logger_disk_log_h, #{
config := #{
type := wrap,
file := "log/audit.log",
max_no_bytes := _,
max_no_files := _
},
filesync_repeat_interval := no_repeat,
filters := [{filter_audit, {_, stop}}],
formatter := _,
level := info
}},
AuditHandler
),
ConsoleHandler = lists:keyfind(logger_std_h, 3, Loggers), ConsoleHandler = lists:keyfind(logger_std_h, 3, Loggers),
?assertEqual( ?assertEqual(
{handler, console, logger_std_h, #{ {handler, console, logger_std_h, #{
@ -209,6 +194,59 @@ validate_log(Conf) ->
ConsoleHandler ConsoleHandler
). ).
%% erlfmt-ignore
-define(FILE_LOG_BASE_CONF,
"""
log.file.default {
enable = true
file = \"log/xx-emqx.log\"
formatter = text
level = debug
rotation_count = ~s
rotation_size = ~s
time_offset = \"+01:00\"
}
"""
).
file_log_infinity_rotation_size_test_() ->
ensure_acl_conf(),
BaseConf = to_bin(?BASE_CONF, ["emqx1@127.0.0.1", "emqx1@127.0.0.1"]),
Gen = fun(#{count := Count, size := Size}) ->
Conf0 = to_bin(?FILE_LOG_BASE_CONF, [Count, Size]),
Conf1 = [BaseConf, Conf0],
{ok, Conf} = hocon:binary(Conf1, #{format => richmap}),
ConfList = hocon_tconf:generate(emqx_conf_schema, Conf),
Kernel = proplists:get_value(kernel, ConfList),
Loggers = proplists:get_value(logger, Kernel),
FileHandlers = lists:filter(fun(L) -> element(3, L) =:= logger_disk_log_h end, Loggers),
lists:keyfind(default, 2, FileHandlers)
end,
[
{"base conf: finite log (type = wrap)",
?_assertMatch(
{handler, default, logger_disk_log_h, #{
config := #{
type := wrap,
max_no_bytes := 1073741824,
max_no_files := 20
}
}},
Gen(#{count => "20", size => "\"1024MB\""})
)},
{"rotation size = infinity (type = halt)",
?_assertMatch(
{handler, default, logger_disk_log_h, #{
config := #{
type := halt,
max_no_bytes := infinity,
max_no_files := 9
}
}},
Gen(#{count => "9", size => "\"infinity\""})
)}
].
%% erlfmt-ignore %% erlfmt-ignore
-define(KERNEL_LOG_CONF, -define(KERNEL_LOG_CONF,
""" """

View File

@ -145,8 +145,8 @@ run_command(Cmd, Args) when is_atom(Cmd) ->
audit_log( audit_log(
audit_level(Result, Duration), audit_level(Result, Duration),
"from_cli", cli,
#{duration_ms => Duration, result => Result, cmd => Cmd, args => Args, node => node()} #{duration_ms => Duration, cmd => Cmd, args => Args, node => node()}
), ),
Result. Result.
@ -350,8 +350,6 @@ audit_log(Level, From, Log) ->
-define(TOO_SLOW, 3000). -define(TOO_SLOW, 3000).
audit_level(ok, Duration) when Duration >= ?TOO_SLOW -> warning;
audit_level({ok, _}, Duration) when Duration >= ?TOO_SLOW -> warning;
audit_level(ok, _Duration) -> info; audit_level(ok, _Duration) -> info;
audit_level({ok, _}, _Duration) -> info; audit_level({ok, _}, _Duration) -> info;
audit_level(_, _) -> error. audit_level(_, _) -> error.

View File

@ -24,6 +24,7 @@
-define(ROLE_SUPERUSER, <<"administrator">>). -define(ROLE_SUPERUSER, <<"administrator">>).
-define(ROLE_DEFAULT, ?ROLE_SUPERUSER). -define(ROLE_DEFAULT, ?ROLE_SUPERUSER).
-define(BACKEND_LOCAL, local).
-define(SSO_USERNAME(Backend, Name), {Backend, Name}). -define(SSO_USERNAME(Backend, Name), {Backend, Name}).
-type dashboard_sso_backend() :: atom(). -type dashboard_sso_backend() :: atom().

View File

@ -212,8 +212,8 @@ add_user_(Username, Password, Role, Desc) ->
mnesia:abort(<<"username_already_exist">>) mnesia:abort(<<"username_already_exist">>)
end. end.
-spec remove_user(binary()) -> {ok, any()} | {error, any()}. -spec remove_user(dashboard_username()) -> {ok, any()} | {error, any()}.
remove_user(Username) when is_binary(Username) -> remove_user(Username) ->
Trans = fun() -> Trans = fun() ->
case lookup_user(Username) of case lookup_user(Username) of
[] -> mnesia:abort(<<"username_not_found">>); [] -> mnesia:abort(<<"username_not_found">>);
@ -230,7 +230,7 @@ remove_user(Username) when is_binary(Username) ->
-spec update_user(dashboard_username(), dashboard_user_role(), binary()) -> -spec update_user(dashboard_username(), dashboard_user_role(), binary()) ->
{ok, map()} | {error, term()}. {ok, map()} | {error, term()}.
update_user(Username, Role, Desc) when is_binary(Username) -> update_user(Username, Role, Desc) ->
case legal_role(Role) of case legal_role(Role) of
ok -> ok ->
case case
@ -427,7 +427,7 @@ flatten_username(#{username := ?SSO_USERNAME(Backend, Name)} = Data) ->
backend => Backend backend => Backend
}; };
flatten_username(#{username := Username} = Data) when is_binary(Username) -> flatten_username(#{username := Username} = Data) when is_binary(Username) ->
Data#{backend => local}. Data#{backend => ?BACKEND_LOCAL}.
-spec add_sso_user(dashboard_sso_backend(), binary(), dashboard_user_role(), binary()) -> -spec add_sso_user(dashboard_sso_backend(), binary(), dashboard_user_role(), binary()) ->
{ok, map()} | {error, any()}. {ok, map()} | {error, any()}.

View File

@ -379,9 +379,9 @@ sso_parameters() ->
sso_parameters(Params) -> sso_parameters(Params) ->
emqx_dashboard_sso_api:sso_parameters(Params). emqx_dashboard_sso_api:sso_parameters(Params).
username(#{bindings := #{backend := local}}, Username) -> username(#{query_string := #{<<"backend">> := ?BACKEND_LOCAL}}, Username) ->
Username; Username;
username(#{bindings := #{backend := Backend}}, Username) -> username(#{query_string := #{<<"backend">> := Backend}}, Username) ->
?SSO_USERNAME(Backend, Username); ?SSO_USERNAME(Backend, Username);
username(_Req, Username) -> username(_Req, Username) ->
Username. Username.

View File

@ -25,26 +25,21 @@ log(Meta0) ->
Duration = erlang:convert_time_unit(ReqEnd - ReqStart, native, millisecond), Duration = erlang:convert_time_unit(ReqEnd - ReqStart, native, millisecond),
Level = level(Method, Code, Duration), Level = level(Method, Code, Duration),
Username = maps:get(username, Meta0, <<"">>), Username = maps:get(username, Meta0, <<"">>),
From = from(maps:get(auth_type, Meta0, "")),
Meta1 = maps:without([req_start, req_end], Meta0), Meta1 = maps:without([req_start, req_end], Meta0),
Meta2 = Meta1#{time => logger:timestamp(), duration_ms => Duration}, Meta2 = Meta1#{time => logger:timestamp(), duration_ms => Duration},
Meta = emqx_utils:redact(Meta2), Meta = emqx_utils:redact(Meta2),
?AUDIT( ?AUDIT(
Level, Level,
"from_api", From,
Meta#{ Meta#{username => binary_to_list(Username), node => node()}
from => from(maps:get(auth_type, Meta0, "")),
username => binary_to_list(Username),
node => node()
}
), ),
ok. ok.
from(jwt_token) -> "dashboard"; from(jwt_token) -> "dashboard";
from(api_key) -> "aip_key"; from(_) -> "rest_api".
from(_) -> "unauthorized".
level(_, _Code, Duration) when Duration > 3000 -> warning; level(get, _Code, _) -> debug;
level(get, Code, _) when Code >= 200 andalso Code < 300 -> debug;
level(_, Code, _) when Code >= 200 andalso Code < 300 -> info; level(_, Code, _) when Code >= 200 andalso Code < 300 -> info;
level(_, Code, _) when Code >= 300 andalso Code < 400 -> warning; level(_, Code, _) when Code >= 300 andalso Code < 400 -> warning;
level(_, Code, _) when Code >= 400 andalso Code < 500 -> error; level(_, Code, _) when Code >= 400 andalso Code < 500 -> error;

View File

@ -24,9 +24,26 @@
unload/0 unload/0
]). ]).
load() -> -export([bin/1, print_error/1]).
emqx_ctl:register_command(admins, {?MODULE, admins}, []).
-if(?EMQX_RELEASE_EDITION == ee).
-define(CLI_MOD, emqx_dashboard_sso_cli).
-else.
-define(CLI_MOD, ?MODULE).
-endif.
load() ->
emqx_ctl:register_command(admins, {?CLI_MOD, admins}, []).
admins(["add", Username, Password]) ->
admins(["add", Username, Password, ""]);
admins(["add", Username, Password, Desc]) ->
case emqx_dashboard_admin:add_user(bin(Username), bin(Password), ?ROLE_DEFAULT, bin(Desc)) of
{ok, _} ->
emqx_ctl:print("ok~n");
{error, Reason} ->
print_error(Reason)
end;
admins(["passwd", Username, Password]) -> admins(["passwd", Username, Password]) ->
case emqx_dashboard_admin:change_password(bin(Username), bin(Password)) of case emqx_dashboard_admin:change_password(bin(Username), bin(Password)) of
{ok, _} -> {ok, _} ->
@ -41,8 +58,14 @@ admins(["del", Username]) ->
{error, Reason} -> {error, Reason} ->
print_error(Reason) print_error(Reason)
end; end;
admins(Args) -> admins(_) ->
inner_admins(Args). emqx_ctl:usage(
[
{"admins add <Username> <Password> <Description>", "Add dashboard user"},
{"admins passwd <Username> <Password>", "Reset dashboard user password"},
{"admins del <Username>", "Delete dashboard user"}
]
).
unload() -> unload() ->
emqx_ctl:unregister_command(admins). emqx_ctl:unregister_command(admins).
@ -54,47 +77,3 @@ print_error(Reason) when is_binary(Reason) ->
%% Maybe has more types of error, but there is only binary now. So close it for dialyzer. %% Maybe has more types of error, but there is only binary now. So close it for dialyzer.
% print_error(Reason) -> % print_error(Reason) ->
% emqx_ctl:print("Error: ~p~n", [Reason]). % emqx_ctl:print("Error: ~p~n", [Reason]).
-if(?EMQX_RELEASE_EDITION == ee).
usage() ->
[
{"admins add <Username> <Password> <Role> <Description>", "Add dashboard user"},
{"admins passwd <Username> <Password>", "Reset dashboard user password"},
{"admins del <Username>", "Delete dashboard user"}
].
inner_admins(["add", Username, Password]) ->
inner_admins(["add", Username, Password, ?ROLE_SUPERUSER]);
inner_admins(["add", Username, Password, Role]) ->
inner_admins(["add", Username, Password, Role, ""]);
inner_admins(["add", Username, Password, Role, Desc]) ->
case emqx_dashboard_admin:add_user(bin(Username), bin(Password), bin(Role), bin(Desc)) of
{ok, _} ->
emqx_ctl:print("ok~n");
{error, Reason} ->
print_error(Reason)
end;
inner_admins(_) ->
emqx_ctl:usage(usage()).
-else.
usage() ->
[
{"admins add <Username> <Password> <Description>", "Add dashboard user"},
{"admins passwd <Username> <Password>", "Reset dashboard user password"},
{"admins del <Username>", "Delete dashboard user"}
].
inner_admins(["add", Username, Password]) ->
inner_admins(["add", Username, Password, ""]);
inner_admins(["add", Username, Password, Desc]) ->
case emqx_dashboard_admin:add_user(bin(Username), bin(Password), ?ROLE_SUPERUSER, bin(Desc)) of
{ok, _} ->
emqx_ctl:print("ok~n");
{error, Reason} ->
print_error(Reason)
end;
inner_admins(_) ->
emqx_ctl:usage(usage()).
-endif.

View File

@ -68,7 +68,7 @@ fields("dashboard") ->
importance => ?IMPORTANCE_HIDDEN importance => ?IMPORTANCE_HIDDEN
} }
)} )}
]; ] ++ sso_fields();
fields("listeners") -> fields("listeners") ->
[ [
{"http", {"http",
@ -299,3 +299,18 @@ https_converter(Conf = #{}, _Opts) ->
Conf1#{<<"ssl_options">> => SslOpts}; Conf1#{<<"ssl_options">> => SslOpts};
https_converter(Conf, _Opts) -> https_converter(Conf, _Opts) ->
Conf. Conf.
-if(?EMQX_RELEASE_EDITION == ee).
sso_fields() ->
[
{sso,
?HOCON(
?R_REF(emqx_dashboard_sso_schema, sso),
#{required => {false, recursively}}
)}
].
-else.
sso_fields() ->
[].
-endif.

View File

@ -191,7 +191,7 @@ token_ttl() ->
format(Token, ?SSO_USERNAME(Backend, Name), Role, ExpTime) -> format(Token, ?SSO_USERNAME(Backend, Name), Role, ExpTime) ->
format(Token, Backend, Name, Role, ExpTime); format(Token, Backend, Name, Role, ExpTime);
format(Token, Username, Role, ExpTime) -> format(Token, Username, Role, ExpTime) ->
format(Token, local, Username, Role, ExpTime). format(Token, ?BACKEND_LOCAL, Username, Role, ExpTime).
format(Token, Backend, Username, Role, ExpTime) -> format(Token, Backend, Username, Role, ExpTime) ->
#?ADMIN_JWT{ #?ADMIN_JWT{

View File

@ -4,5 +4,5 @@
{deps, [ {deps, [
{emqx_ldap, {path, "../../apps/emqx_ldap"}}, {emqx_ldap, {path, "../../apps/emqx_ldap"}},
{emqx_dashboard, {path, "../../apps/emqx_dashboard"}}, {emqx_dashboard, {path, "../../apps/emqx_dashboard"}},
{esaml, {git, "https://github.com/emqx/esaml", {tag, "v1.1.1"}}} {esaml, {git, "https://github.com/emqx/esaml", {tag, "v1.1.2"}}}
]}. ]}.

View File

@ -13,10 +13,11 @@
create/2, create/2,
update/3, update/3,
destroy/2, destroy/2,
login/3 login/3,
convert_certs/3
]). ]).
-export([types/0, modules/0, provider/1, backends/0]). -export([types/0, modules/0, provider/1, backends/0, format/1]).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Callbacks %% Callbacks
@ -26,7 +27,9 @@
backend => atom(), backend => atom(),
atom() => term() atom() => term()
}. }.
-type state() :: #{atom() => term()}.
%% Note: if a backend has a resource, it must be stored in the state and named resource_id
-type state() :: #{resource_id => binary(), atom() => term()}.
-type raw_config() :: #{binary() => term()}. -type raw_config() :: #{binary() => term()}.
-type config() :: parsed_config() | raw_config(). -type config() :: parsed_config() | raw_config().
-type hocon_ref() :: ?R_REF(Module :: atom(), Name :: atom() | binary()). -type hocon_ref() :: ?R_REF(Module :: atom(), Name :: atom() | binary()).
@ -43,6 +46,11 @@
| {redirect, tuple()} | {redirect, tuple()}
| {error, Reason :: term()}. | {error, Reason :: term()}.
-callback convert_certs(
Dir :: file:filename_all(),
config()
) -> config().
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Callback Interface %% Callback Interface
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -66,6 +74,9 @@ destroy(Mod, State) ->
login(Mod, Req, State) -> login(Mod, Req, State) ->
Mod:login(Req, State). Mod:login(Req, State).
convert_certs(Mod, Dir, Config) ->
Mod:convert_certs(Dir, Config).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% API %% API
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -83,3 +94,23 @@ backends() ->
ldap => emqx_dashboard_sso_ldap, ldap => emqx_dashboard_sso_ldap,
saml => emqx_dashboard_sso_saml saml => emqx_dashboard_sso_saml
}. }.
format(Args) ->
lists:foldl(fun combine/2, <<>>, Args).
combine(Arg, Bin) when is_binary(Arg) ->
<<Bin/binary, Arg/binary>>;
combine(Arg, Bin) when is_list(Arg) ->
case io_lib:printable_unicode_list(Arg) of
true ->
ArgBin = unicode:characters_to_binary(Arg),
<<Bin/binary, ArgBin/binary>>;
_ ->
generic_combine(Arg, Bin)
end;
combine(Arg, Bin) ->
generic_combine(Arg, Bin).
generic_combine(Arg, Bin) ->
Str = io_lib:format("~0p", [Arg]),
erlang:iolist_to_binary([Bin, Str]).

View File

@ -33,13 +33,14 @@
backend/2 backend/2
]). ]).
-export([sso_parameters/1, login_reply/2]). -export([sso_parameters/1, login_meta/3]).
-define(REDIRECT, 'REDIRECT'). -define(REDIRECT, 'REDIRECT').
-define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD'). -define(BAD_USERNAME_OR_PWD, 'BAD_USERNAME_OR_PWD').
-define(BAD_REQUEST, 'BAD_REQUEST'). -define(BAD_REQUEST, 'BAD_REQUEST').
-define(BACKEND_NOT_FOUND, 'BACKEND_NOT_FOUND'). -define(BACKEND_NOT_FOUND, 'BACKEND_NOT_FOUND').
-define(TAGS, <<"Dashboard Single Sign-On">>). -define(TAGS, <<"Dashboard Single Sign-On">>).
-define(MOD_KEY_PATH, [dashboard, sso]).
namespace() -> "dashboard_sso". namespace() -> "dashboard_sso".
@ -132,69 +133,88 @@ schema("/sso/:backend") ->
}. }.
fields(backend_status) -> fields(backend_status) ->
emqx_dashboard_sso_schema:common_backend_schema(emqx_dashboard_sso:types()). emqx_dashboard_sso_schema:common_backend_schema(emqx_dashboard_sso:types()) ++
[
{running,
mk(
boolean(), #{
desc => ?DESC(running)
}
)},
{last_error,
mk(
binary(), #{
desc => ?DESC(last_error)
}
)}
].
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
%% API %% API
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------
running(get, _Request) -> running(get, _Request) ->
SSO = emqx:get_config([dashboard_sso], #{}), {200, emqx_dashboard_sso_manager:running()}.
{200,
lists:filtermap(
fun
(#{backend := Backend, enable := true}) ->
{true, Backend};
(_) ->
false
end,
maps:values(SSO)
)}.
login(post, #{bindings := #{backend := Backend}} = Request) -> login(post, #{bindings := #{backend := Backend}, body := Body} = Request) ->
case emqx_dashboard_sso_manager:lookup_state(Backend) of case emqx_dashboard_sso_manager:lookup_state(Backend) of
undefined -> undefined ->
{404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}}; {404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}};
State -> State ->
case emqx_dashboard_sso:login(provider(Backend), Request, State) of case emqx_dashboard_sso:login(provider(Backend), Request, State) of
{ok, Role, Token} -> {ok, Role, Token} ->
?SLOG(info, #{msg => "dashboard_sso_login_successful", request => Request}), ?SLOG(info, #{
{200, login_reply(Role, Token)}; msg => "dashboard_sso_login_successful",
request => emqx_utils:redact(Request)
}),
Username = maps:get(<<"username">>, Body),
{200, login_meta(Username, Role, Token)};
{redirect, Redirect} -> {redirect, Redirect} ->
?SLOG(info, #{msg => "dashboard_sso_login_redirect", request => Request}), ?SLOG(info, #{
msg => "dashboard_sso_login_redirect",
request => emqx_utils:redact(Request)
}),
Redirect; Redirect;
{error, Reason} -> {error, Reason} ->
?SLOG(info, #{ ?SLOG(info, #{
msg => "dashboard_sso_login_failed", msg => "dashboard_sso_login_failed",
request => Request, request => emqx_utils:redact(Request),
reason => Reason reason => emqx_utils:redact(Reason)
}), }),
{401, #{code => ?BAD_USERNAME_OR_PWD, message => <<"Auth failed">>}} {401, #{code => ?BAD_USERNAME_OR_PWD, message => <<"Auth failed">>}}
end end
end. end.
sso(get, _Request) -> sso(get, _Request) ->
SSO = emqx:get_config([dashboard_sso], #{}), SSO = emqx:get_config(?MOD_KEY_PATH, #{}),
{200, {200,
lists:map( lists:map(
fun(Backend) -> fun(#{backend := Backend, enable := Enable}) ->
maps:with([backend, enable], Backend) Status = emqx_dashboard_sso_manager:get_backend_status(Backend, Enable),
Status#{
backend => Backend,
enable => Enable
}
end, end,
maps:values(SSO) maps:values(SSO)
)}. )}.
backend(get, #{bindings := #{backend := Type}}) -> backend(get, #{bindings := #{backend := Type}}) ->
case emqx:get_config([dashboard_sso, Type], undefined) of case emqx:get_config(?MOD_KEY_PATH ++ [Type], undefined) of
undefined -> undefined ->
{404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}}; {404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}};
Backend -> Backend ->
{200, to_json(Backend)} {200, to_json(Backend)}
end; end;
backend(put, #{bindings := #{backend := Backend}, body := Config}) -> backend(put, #{bindings := #{backend := Backend}, body := Config}) ->
?SLOG(info, #{msg => "Update SSO backend", backend => Backend, config => Config}), ?SLOG(info, #{
msg => "update_sso_backend",
backend => Backend,
config => emqx_utils:redact(Config)
}),
on_backend_update(Backend, Config, fun emqx_dashboard_sso_manager:update/2); on_backend_update(Backend, Config, fun emqx_dashboard_sso_manager:update/2);
backend(delete, #{bindings := #{backend := Backend}}) -> backend(delete, #{bindings := #{backend := Backend}}) ->
?SLOG(info, #{msg => "Delete SSO backend", backend => Backend}), ?SLOG(info, #{msg => "delete_sso_backend", backend => Backend}),
handle_backend_update_result(emqx_dashboard_sso_manager:delete(Backend), undefined). handle_backend_update_result(emqx_dashboard_sso_manager:delete(Backend), undefined).
sso_parameters(Params) -> sso_parameters(Params) ->
@ -251,12 +271,12 @@ handle_backend_update_result(ok, _) ->
204; 204;
handle_backend_update_result({error, not_exists}, _) -> handle_backend_update_result({error, not_exists}, _) ->
{404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}}; {404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}};
handle_backend_update_result({error, already_exists}, _) ->
{400, #{code => ?BAD_REQUEST, message => <<"Backend already exists">>}};
handle_backend_update_result({error, failed_to_load_metadata}, _) -> handle_backend_update_result({error, failed_to_load_metadata}, _) ->
{400, #{code => ?BAD_REQUEST, message => <<"Failed to load metadata">>}}; {400, #{code => ?BAD_REQUEST, message => <<"Failed to load metadata">>}};
handle_backend_update_result({error, Reason}, _) when is_binary(Reason) ->
{400, #{code => ?BAD_REQUEST, message => Reason}};
handle_backend_update_result({error, Reason}, _) -> handle_backend_update_result({error, Reason}, _) ->
{400, #{code => ?BAD_REQUEST, message => Reason}}. {400, #{code => ?BAD_REQUEST, message => emqx_dashboard_sso:format(["Reason: ", Reason])}}.
to_json(Data) -> to_json(Data) ->
emqx_utils_maps:jsonable_map( emqx_utils_maps:jsonable_map(
@ -266,8 +286,9 @@ to_json(Data) ->
end end
). ).
login_reply(Role, Token) -> login_meta(Username, Role, Token) ->
#{ #{
username => Username,
role => Role, role => Role,
token => Token, token => Token,
version => iolist_to_binary(proplists:get_value(version, emqx_sys:info())), version => iolist_to_binary(proplists:get_value(version, emqx_sys:info())),

View File

@ -0,0 +1,66 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%--------------------------------------------------------------------
-module(emqx_dashboard_sso_cli).
-include_lib("emqx_dashboard/include/emqx_dashboard.hrl").
-export([admins/1]).
-import(emqx_dashboard_cli, [bin/1, print_error/1]).
admins(["add", Username, Password]) ->
admins(["add", Username, Password, ""]);
admins(["add", Username, Password, Desc]) ->
case emqx_dashboard_admin:add_user(bin(Username), bin(Password), ?ROLE_DEFAULT, bin(Desc)) of
{ok, _} ->
emqx_ctl:print("ok~n");
{error, Reason} ->
print_error(Reason)
end;
admins(["add", Username, Password, Desc, Role]) ->
case emqx_dashboard_admin:add_user(bin(Username), bin(Password), bin(Role), bin(Desc)) of
{ok, _} ->
emqx_ctl:print("ok~n");
{error, Reason} ->
print_error(Reason)
end;
admins(["passwd", Username, Password]) ->
case emqx_dashboard_admin:change_password(bin(Username), bin(Password)) of
{ok, _} ->
emqx_ctl:print("ok~n");
{error, Reason} ->
print_error(Reason)
end;
admins(["del", Username]) ->
delete_user(bin(Username));
admins(["del", Username, BackendName]) ->
case atom(BackendName) of
{ok, ?BACKEND_LOCAL} ->
delete_user(bin(Username));
{ok, Backend} ->
delete_user(?SSO_USERNAME(Backend, bin(Username)));
{error, Reason} ->
print_error(Reason)
end;
admins(_) ->
emqx_ctl:usage(
[
{"admins add <Username> <Password> <Description> <Role>", "Add dashboard user"},
{"admins passwd <Username> <Password>", "Reset dashboard user password"},
{"admins del <Username> <Backend>",
"Delete dashboard user, <Backend> can be omitted, the default value is 'local'"}
]
).
atom(S) ->
emqx_utils:safe_to_existing_atom(S).
delete_user(Username) ->
case emqx_dashboard_admin:remove_user(Username) of
{ok, _} ->
emqx_ctl:print("ok~n");
{error, Reason} ->
print_error(Reason)
end.

View File

@ -22,7 +22,8 @@
login/2, login/2,
create/1, create/1,
update/2, update/2,
destroy/1 destroy/1,
convert_certs/2
]). ]).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -86,19 +87,7 @@ destroy(#{resource_id := ResourceId}) ->
parse_config(Config0) -> parse_config(Config0) ->
Config = ensure_bind_password(Config0), Config = ensure_bind_password(Config0),
State = lists:foldl( {Config, maps:with([query_timeout], Config0)}.
fun(Key, Acc) ->
case maps:find(Key, Config) of
{ok, Value} when is_binary(Value) ->
Acc#{Key := erlang:binary_to_list(Value)};
_ ->
Acc
end
end,
Config,
[query_timeout]
),
{Config, State}.
%% In this feature, the `bind_password` is fixed, so it should conceal from the swagger, %% In this feature, the `bind_password` is fixed, so it should conceal from the swagger,
%% but the connector still needs it, hence we should add it back here %% but the connector still needs it, hence we should add it back here
@ -163,3 +152,21 @@ ensure_user_exists(Username) ->
Error Error
end end
end. end.
convert_certs(Dir, Conf) ->
case
emqx_tls_lib:ensure_ssl_files(
Dir, maps:get(<<"ssl">>, Conf, undefined)
)
of
{ok, SSL} ->
new_ssl_source(Conf, SSL);
{error, Reason} ->
?SLOG(error, Reason#{msg => "bad_ssl_config"}),
throw({bad_ssl_config, Reason})
end.
new_ssl_source(Source, undefined) ->
Source;
new_ssl_source(Source, SSL) ->
Source#{<<"ssl">> => SSL}.

View File

@ -25,30 +25,39 @@
-export([ -export([
running/0, running/0,
lookup_state/1, lookup_state/1,
get_backend_status/2,
make_resource_id/1, make_resource_id/1,
create_resource/3, create_resource/3,
update_resource/3, update_resource/3
call/1
]). ]).
-export([ -export([
update/2, update/2,
delete/1, delete/1,
pre_config_update/3, pre_config_update/3,
post_config_update/5 post_config_update/5,
propagated_post_config_update/5
]). ]).
-import(emqx_dashboard_sso, [provider/1]). -import(emqx_dashboard_sso, [provider/1, format/1]).
-define(MOD_KEY_PATH, [dashboard_sso]). -define(MOD_TAB, emqx_dashboard_sso).
-define(MOD_KEY_PATH, [dashboard, sso]).
-define(MOD_KEY_PATH(Sub), [dashboard, sso, Sub]).
-define(RESOURCE_GROUP, <<"emqx_dashboard_sso">>). -define(RESOURCE_GROUP, <<"emqx_dashboard_sso">>).
-define(NO_ERROR, <<>>).
-define(DEFAULT_RESOURCE_OPTS, #{ -define(DEFAULT_RESOURCE_OPTS, #{
start_after_created => false start_after_created => false
}). }).
-record(dashboard_sso, { -define(DEFAULT_START_OPTS, #{
start_timeout => timer:seconds(30)
}).
-record(?MOD_TAB, {
backend :: atom(), backend :: atom(),
state :: map() state :: undefined | map(),
last_error = ?NO_ERROR :: term()
}). }).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -58,26 +67,53 @@ start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
running() -> running() ->
maps:fold( SSO = emqx:get_config(?MOD_KEY_PATH, #{}),
lists:filtermap(
fun fun
(Type, #{enable := true}, Acc) -> (#{backend := Backend, enable := true}) ->
[Type | Acc]; case lookup(Backend) of
(_Type, _Cfg, Acc) -> undefined ->
Acc false;
#?MOD_TAB{last_error = ?NO_ERROR} ->
{true, Backend};
_ ->
false
end;
(_) ->
false
end, end,
[], maps:values(SSO)
emqx:get_config([emqx_dashboard_sso])
). ).
get_backend_status(Backend, false) ->
#{
backend => Backend,
enable => false,
running => false,
last_error => ?NO_ERROR
};
get_backend_status(Backend, _) ->
case lookup(Backend) of
undefined ->
#{
backend => Backend,
enable => true,
running => false,
last_error => <<"Resource not found">>
};
Data ->
maps:merge(#{backend => Backend, enable => true}, do_get_backend_status(Data))
end.
update(Backend, Config) -> update(Backend, Config) ->
update_config(Backend, {?FUNCTION_NAME, Backend, Config}). update_config(Backend, {?FUNCTION_NAME, Backend, Config}).
delete(Backend) -> delete(Backend) ->
update_config(Backend, {?FUNCTION_NAME, Backend}). update_config(Backend, {?FUNCTION_NAME, Backend}).
lookup_state(Backend) -> lookup_state(Backend) ->
case ets:lookup(dashboard_sso, Backend) of case ets:lookup(?MOD_TAB, Backend) of
[Data] -> [Data] ->
Data#dashboard_sso.state; Data#?MOD_TAB.state;
[] -> [] ->
undefined undefined
end. end.
@ -102,31 +138,25 @@ update_resource(ResourceId, Module, Config) ->
), ),
start_resource_if_enabled(ResourceId, Result, Config). start_resource_if_enabled(ResourceId, Result, Config).
call(Req) ->
gen_server:call(?MODULE, Req).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% gen_server callbacks %% gen_server callbacks
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
init([]) -> init([]) ->
process_flag(trap_exit, true), process_flag(trap_exit, true),
emqx_conf:add_handler(?MOD_KEY_PATH, ?MODULE), add_handler(),
emqx_utils_ets:new( emqx_utils_ets:new(
dashboard_sso, ?MOD_TAB,
[ [
set, ordered_set,
public, public,
named_table, named_table,
{keypos, #dashboard_sso.backend}, {keypos, #?MOD_TAB.backend},
{read_concurrency, true} {read_concurrency, true}
] ]
), ),
start_backend_services(), start_backend_services(),
{ok, #{}}. {ok, #{}}.
handle_call({update_config, Req, NewConf}, _From, State) ->
Result = on_config_update(Req, NewConf),
{reply, Result, State};
handle_call(_Request, _From, State) -> handle_call(_Request, _From, State) ->
Reply = ok, Reply = ok,
{reply, Reply, State}. {reply, Reply, State}.
@ -138,7 +168,7 @@ handle_info(_Info, State) ->
{noreply, State}. {noreply, State}.
terminate(_Reason, _State) -> terminate(_Reason, _State) ->
emqx_conf:remove_handler(?MOD_KEY_PATH), remove_handler(),
ok. ok.
code_change(_OldVsn, State, _Extra) -> code_change(_OldVsn, State, _Extra) ->
@ -151,22 +181,24 @@ format_status(_Opt, Status) ->
%% Internal functions %% Internal functions
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
start_backend_services() -> start_backend_services() ->
Backends = emqx_conf:get([dashboard_sso], #{}), Backends = emqx_conf:get(?MOD_KEY_PATH, #{}),
lists:foreach( lists:foreach(
fun({Backend, Config}) -> fun({Backend, Config}) ->
Provider = provider(Backend), Provider = provider(Backend),
case emqx_dashboard_sso:create(Provider, Config) of case emqx_dashboard_sso:create(Provider, Config) of
{ok, State} -> {ok, State} ->
?SLOG(info, #{ ?SLOG(info, #{
msg => "Start SSO backend successfully", msg => "start_sso_backend_successfully",
backend => Backend backend => Backend
}), }),
ets:insert(dashboard_sso, #dashboard_sso{backend = Backend, state = State}); update_state(Backend, State);
{error, Reason} -> {error, Reason} ->
SafeReason = emqx_utils:redact(Reason),
update_last_error(Backend, SafeReason),
?SLOG(error, #{ ?SLOG(error, #{
msg => "Start SSO backend failed", msg => "start_sso_backend_failed",
backend => Backend, backend => Backend,
reason => Reason reason => SafeReason
}) })
end end
end, end,
@ -174,96 +206,188 @@ start_backend_services() ->
). ).
update_config(Backend, UpdateReq) -> update_config(Backend, UpdateReq) ->
case emqx_conf:update([dashboard_sso], UpdateReq, #{override_to => cluster}) of %% we always make sure the valid configuration will update successfully,
{ok, UpdateResult} -> %% ignore the runtime error during its update
#{post_config_update := #{?MODULE := Result}} = UpdateResult, case emqx_conf:update(?MOD_KEY_PATH(Backend), UpdateReq, #{override_to => cluster}) of
?SLOG(info, #{ {ok, _UpdateResult} ->
msg => "Update SSO configuration successfully", case lookup(Backend) of
backend => Backend, undefined ->
result => Result ok;
}), #?MOD_TAB{state = State, last_error = ?NO_ERROR} ->
Result; {ok, State};
Data ->
{error, Data#?MOD_TAB.last_error}
end;
{error, Reason} = Error -> {error, Reason} = Error ->
SafeReason = emqx_utils:redact(Reason),
?SLOG(error, #{ ?SLOG(error, #{
msg => "Update SSO configuration failed", msg => "update_sso_failed",
backend => Backend, backend => Backend,
reason => Reason reason => SafeReason
}), }),
Error Error
end. end.
pre_config_update(_Path, {update, Backend, Config}, OldConf) -> pre_config_update(_, {update, _Backend, Config}, _OldConf) ->
BackendBin = bin(Backend), {ok, maybe_write_certs(Config)};
{ok, OldConf#{BackendBin => Config}}; pre_config_update(_, {delete, _Backend}, undefined) ->
pre_config_update(_Path, {delete, Backend}, OldConf) ->
BackendBin = bin(Backend),
case maps:find(BackendBin, OldConf) of
error ->
throw(not_exists); throw(not_exists);
{ok, _} -> pre_config_update(_, {delete, _Backend}, _OldConf) ->
{ok, maps:remove(BackendBin, OldConf)} {ok, null}.
post_config_update(_, UpdateReq, NewConf, _OldConf, _AppEnvs) ->
_ = on_config_update(UpdateReq, NewConf),
ok.
propagated_post_config_update(
?MOD_KEY_PATH(BackendBin) = Path, _UpdateReq, undefined, OldConf, AppEnvs
) ->
case atom(BackendBin) of
{ok, Backend} ->
post_config_update(Path, {delete, Backend}, undefined, OldConf, AppEnvs);
Error ->
Error
end;
propagated_post_config_update(
?MOD_KEY_PATH(BackendBin) = Path, _UpdateReq, NewConf, OldConf, AppEnvs
) ->
case atom(BackendBin) of
{ok, Backend} ->
post_config_update(Path, {update, Backend, undefined}, NewConf, OldConf, AppEnvs);
Error ->
Error
end. end.
post_config_update(_Path, UpdateReq, NewConf, _OldConf, _AppEnvs) -> on_config_update({update, Backend, _RawConfig}, Config) ->
Result = call({update_config, UpdateReq, NewConf}),
{ok, Result}.
on_config_update({update, Backend, _Config}, NewConf) ->
Provider = provider(Backend), Provider = provider(Backend),
Config = maps:get(Backend, NewConf),
case lookup(Backend) of case lookup(Backend) of
undefined -> undefined ->
on_backend_updated( on_backend_updated(
Backend,
emqx_dashboard_sso:create(Provider, Config), emqx_dashboard_sso:create(Provider, Config),
fun(State) -> fun(State) ->
ets:insert(dashboard_sso, #dashboard_sso{backend = Backend, state = State}) update_state(Backend, State)
end end
); );
Data -> Data ->
update_last_error(Backend, ?NO_ERROR),
on_backend_updated( on_backend_updated(
emqx_dashboard_sso:update(Provider, Config, Data#dashboard_sso.state), Backend,
emqx_dashboard_sso:update(Provider, Config, Data#?MOD_TAB.state),
fun(State) -> fun(State) ->
ets:insert(dashboard_sso, Data#dashboard_sso{state = State}) update_state(Backend, State)
end end
) )
end; end;
on_config_update({delete, Backend}, _NewConf) -> on_config_update({delete, Backend}, _NewConf) ->
case lookup(Backend) of case lookup(Backend) of
undefined -> undefined ->
{error, not_exists}; on_backend_updated(Backend, {error, not_exists}, undefined);
Data -> Data ->
Provider = provider(Backend), Provider = provider(Backend),
on_backend_updated( on_backend_updated(
emqx_dashboard_sso:destroy(Provider, Data#dashboard_sso.state), Backend,
emqx_dashboard_sso:destroy(Provider, Data#?MOD_TAB.state),
fun() -> fun() ->
ets:delete(dashboard_sso, Backend) ets:delete(?MOD_TAB, Backend)
end end
) )
end. end.
lookup(Backend) -> lookup(Backend) ->
case ets:lookup(dashboard_sso, Backend) of case ets:lookup(?MOD_TAB, Backend) of
[Data] -> [Data] ->
Data; Data;
[] -> [] ->
undefined undefined
end. end.
start_resource_if_enabled(ResourceId, {ok, _} = Result, #{enable := true}) -> %% to avoid resource leakage the resource start will never affect the update result,
_ = emqx_resource:start(ResourceId), %% so the resource_id will always be recorded
start_resource_if_enabled(ResourceId, {ok, _} = Result, #{enable := true, backend := Backend}) ->
case emqx_resource:start(ResourceId, ?DEFAULT_START_OPTS) of
ok ->
ok;
{error, Reason} ->
SafeReason = emqx_utils:redact(Reason),
?SLOG(error, #{
msg => "start_backend_failed",
resource_id => ResourceId,
reason => SafeReason
}),
update_last_error(Backend, SafeReason),
ok
end,
Result; Result;
start_resource_if_enabled(_ResourceId, Result, _Config) -> start_resource_if_enabled(_ResourceId, Result, _Config) ->
Result. Result.
on_backend_updated({ok, State} = Ok, Fun) -> on_backend_updated(_Backend, {ok, State} = Ok, Fun) ->
Fun(State), Fun(State),
Ok; Ok;
on_backend_updated(ok, Fun) -> on_backend_updated(_Backend, ok, Fun) ->
Fun(), Fun(),
ok; ok;
on_backend_updated(Error, _) -> on_backend_updated(Backend, {error, Reason} = Error, _) ->
update_last_error(Backend, Reason),
Error. Error.
bin(A) when is_atom(A) -> atom_to_binary(A, utf8); bin(A) when is_atom(A) -> atom_to_binary(A, utf8);
bin(L) when is_list(L) -> list_to_binary(L); bin(L) when is_list(L) -> list_to_binary(L);
bin(X) -> X. bin(X) -> X.
atom(B) ->
emqx_utils:safe_to_existing_atom(B).
add_handler() ->
ok = emqx_conf:add_handler(?MOD_KEY_PATH('?'), ?MODULE).
remove_handler() ->
ok = emqx_conf:remove_handler(?MOD_KEY_PATH('?')).
maybe_write_certs(#{<<"backend">> := Backend} = Conf) ->
Dir = certs_path(Backend),
Provider = provider(Backend),
emqx_dashboard_sso:convert_certs(Provider, Dir, Conf).
certs_path(Backend) ->
filename:join(["sso", Backend]).
update_state(Backend, State) ->
Data = ensure_backend_data(Backend),
ets:insert(?MOD_TAB, Data#?MOD_TAB{state = State}).
update_last_error(Backend, LastError) ->
Data = ensure_backend_data(Backend),
ets:insert(?MOD_TAB, Data#?MOD_TAB{last_error = LastError}).
ensure_backend_data(Backend) ->
case ets:lookup(?MOD_TAB, Backend) of
[Data] ->
Data;
[] ->
#?MOD_TAB{backend = Backend}
end.
do_get_backend_status(#?MOD_TAB{state = #{resource_id := ResourceId}}) ->
case emqx_resource_manager:lookup(ResourceId) of
{ok, _Group, #{status := connected}} ->
#{running => true, last_error => ?NO_ERROR};
{ok, _Group, #{status := Status}} ->
#{
running => false,
last_error => format([<<"Resource not valid, status: ">>, Status])
};
{error, not_found} ->
#{
running => false,
last_error => <<"Resource not found">>
}
end;
do_get_backend_status(#?MOD_TAB{last_error = ?NO_ERROR}) ->
#{running => true, last_error => ?NO_ERROR};
do_get_backend_status(#?MOD_TAB{last_error = LastError}) ->
#{
running => false,
last_error => format([LastError])
}.

View File

@ -22,13 +22,21 @@
-export([ -export([
create/1, create/1,
update/2, update/2,
destroy/1 destroy/1,
convert_certs/2
]). ]).
-export([login/2, callback/2]). -export([login/2, callback/2]).
-dialyzer({nowarn_function, do_create/1}). -dialyzer({nowarn_function, do_create/1}).
-define(RESPHEADERS, #{
<<"cache-control">> => <<"no-cache">>,
<<"pragma">> => <<"no-cache">>,
<<"content-type">> => <<"text/plain">>
}).
-define(REDIRECT_BODY, <<"Redirecting...">>).
-define(DIR, <<"saml_sp_certs">>). -define(DIR, <<"saml_sp_certs">>).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -93,9 +101,11 @@ desc(_) ->
%% APIs %% APIs
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
create(#{enable := false} = _Config) ->
{ok, undefined};
create(#{sp_sign_request := true} = Config) -> create(#{sp_sign_request := true} = Config) ->
try try
do_create(ensure_cert_and_key(Config)) do_create(Config)
catch catch
Kind:Error -> Kind:Error ->
Msg = failed_to_ensure_cert_and_key, Msg = failed_to_ensure_cert_and_key,
@ -103,7 +113,70 @@ create(#{sp_sign_request := true} = Config) ->
{error, Msg} {error, Msg}
end; end;
create(#{sp_sign_request := false} = Config) -> create(#{sp_sign_request := false} = Config) ->
do_create(Config#{key => undefined, certificate => undefined}). do_create(Config#{sp_private_key => undefined, sp_public_key => undefined}).
update(Config0, State) ->
destroy(State),
create(Config0).
destroy(_State) ->
_ = file:del_dir_r(emqx_tls_lib:pem_dir(?DIR)),
_ = application:stop(esaml),
ok.
login(
#{headers := Headers} = _Req,
#{sp := SP, idp_meta := #esaml_idp_metadata{login_location = IDP}} = _State
) ->
SignedXml = esaml_sp:generate_authn_request(IDP, SP),
Target = esaml_binding:encode_http_redirect(IDP, SignedXml, <<>>),
Redirect =
case is_msie(Headers) of
true ->
Html = esaml_binding:encode_http_post(IDP, SignedXml, <<>>),
{200, ?RESPHEADERS, Html};
false ->
{302, ?RESPHEADERS#{<<"location">> => Target}, ?REDIRECT_BODY}
end,
{redirect, Redirect}.
callback(_Req = #{body := Body}, #{sp := SP, dashboard_addr := DashboardAddr} = _State) ->
case do_validate_assertion(SP, fun esaml_util:check_dupe_ets/2, Body) of
{ok, Assertion, _RelayState} ->
Subject = Assertion#esaml_assertion.subject,
Username = iolist_to_binary(Subject#esaml_subject.name),
gen_redirect_response(DashboardAddr, Username);
{error, Reason0} ->
Reason = [
"Access denied, assertion failed validation:\n", io_lib:format("~p\n", [Reason0])
],
{error, iolist_to_binary(Reason)}
end.
convert_certs(
Dir,
#{<<"sp_sign_request">> := true, <<"sp_public_key">> := Cert, <<"sp_private_key">> := Key} =
Conf
) ->
case
emqx_tls_lib:ensure_ssl_files(
Dir, #{enable => ture, certfile => Cert, keyfile => Key}, #{}
)
of
{ok, #{certfile := CertPath, keyfile := KeyPath}} ->
Conf#{<<"sp_public_key">> => bin(CertPath), <<"sp_private_key">> => bin(KeyPath)};
{error, Reason} ->
?SLOG(error, #{msg => "failed_to_save_sp_sign_keys", reason => Reason}),
throw("Failed to save sp signing key(s)")
end;
convert_certs(_Dir, Conf) ->
Conf.
%%------------------------------------------------------------------------------
%% Internal functions
%%------------------------------------------------------------------------------
bin(X) -> iolist_to_binary(X).
do_create( do_create(
#{ #{
@ -145,46 +218,6 @@ do_create(
{error, Reason} {error, Reason}
end. end.
update(Config0, State) ->
destroy(State),
create(Config0).
destroy(_State) ->
_ = file:del_dir_r(emqx_tls_lib:pem_dir(?DIR)),
_ = application:stop(esaml),
ok.
login(
#{headers := Headers} = _Req,
#{sp := SP, idp_meta := #esaml_idp_metadata{login_location = IDP}} = _State
) ->
SignedXml = esaml_sp:generate_authn_request(IDP, SP),
Target = esaml_binding:encode_http_redirect(IDP, SignedXml, <<>>),
RespHeaders = #{<<"Cache-Control">> => <<"no-cache">>, <<"Pragma">> => <<"no-cache">>},
Redirect =
case is_msie(Headers) of
true ->
Html = esaml_binding:encode_http_post(IDP, SignedXml, <<>>),
{200, RespHeaders, Html};
false ->
RespHeaders1 = RespHeaders#{<<"Location">> => Target},
{302, RespHeaders1, <<"Redirecting...">>}
end,
{redirect, Redirect}.
callback(_Req = #{body := Body}, #{sp := SP} = _State) ->
case do_validate_assertion(SP, fun esaml_util:check_dupe_ets/2, Body) of
{ok, Assertion, _RelayState} ->
Subject = Assertion#esaml_assertion.subject,
Username = iolist_to_binary(Subject#esaml_subject.name),
ensure_user_exists(Username);
{error, Reason0} ->
Reason = [
"Access denied, assertion failed validation:\n", io_lib:format("~p\n", [Reason0])
],
{error, iolist_to_binary(Reason)}
end.
do_validate_assertion(SP, DuplicateFun, Body) -> do_validate_assertion(SP, DuplicateFun, Body) ->
PostVals = cow_qs:parse_qs(Body), PostVals = cow_qs:parse_qs(Body),
SAMLEncoding = proplists:get_value(<<"SAMLEncoding">>, PostVals), SAMLEncoding = proplists:get_value(<<"SAMLEncoding">>, PostVals),
@ -200,30 +233,18 @@ do_validate_assertion(SP, DuplicateFun, Body) ->
end end
end. end.
%%------------------------------------------------------------------------------ gen_redirect_response(DashboardAddr, Username) ->
%% Internal functions case ensure_user_exists(Username) of
%%------------------------------------------------------------------------------ {ok, Role, Token} ->
Target = login_redirect_target(DashboardAddr, Username, Role, Token),
ensure_cert_and_key(#{sp_public_key := Cert, sp_private_key := Key} = Config) -> {redirect, {302, ?RESPHEADERS#{<<"location">> => Target}, ?REDIRECT_BODY}};
case {error, Reason} ->
emqx_tls_lib:ensure_ssl_files( {error, Reason}
?DIR, #{enable => ture, certfile => Cert, keyfile => Key}, #{}
)
of
{ok, #{certfile := CertPath, keyfile := KeyPath} = _NSSL} ->
Config#{sp_public_key => CertPath, sp_private_key => KeyPath};
{error, #{which_options := KeyPath}} ->
error({missing_key, lists:flatten(KeyPath)})
end. end.
maybe_load_cert_or_key(undefined, _) -> %%------------------------------------------------------------------------------
undefined; %% Helpers functions
maybe_load_cert_or_key(Path, Func) -> %%------------------------------------------------------------------------------
Func(Path).
is_msie(Headers) ->
UA = maps:get(<<"user-agent">>, Headers, <<"">>),
not (binary:match(UA, <<"MSIE">>) =:= nomatch).
%% TODO: unify with emqx_dashboard_sso_manager:ensure_user_exists/1 %% TODO: unify with emqx_dashboard_sso_manager:ensure_user_exists/1
ensure_user_exists(Username) -> ensure_user_exists(Username) ->
@ -238,3 +259,19 @@ ensure_user_exists(Username) ->
Error Error
end end
end. end.
maybe_load_cert_or_key(undefined, _) ->
undefined;
maybe_load_cert_or_key(Path, Func) ->
Func(Path).
is_msie(Headers) ->
UA = maps:get(<<"user-agent">>, Headers, <<"">>),
not (binary:match(UA, <<"MSIE">>) =:= nomatch).
login_redirect_target(DashboardAddr, Username, Role, Token) ->
LoginMeta = emqx_dashboard_sso_api:login_meta(Username, Role, Token),
<<DashboardAddr/binary, "/?login_meta=", (base64_login_meta(LoginMeta))/binary>>.
base64_login_meta(LoginMeta) ->
base64:encode(emqx_utils_json:encode(LoginMeta)).

View File

@ -82,22 +82,20 @@ schema("/sso/saml/metadata") ->
sp_saml_metadata(get, _Req) -> sp_saml_metadata(get, _Req) ->
case emqx_dashboard_sso_manager:lookup_state(saml) of case emqx_dashboard_sso_manager:lookup_state(saml) of
undefined -> #{enable := true, sp := SP} = _State ->
{404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}};
#{sp := SP} = _State ->
SignedXml = esaml_sp:generate_metadata(SP), SignedXml = esaml_sp:generate_metadata(SP),
Metadata = xmerl:export([SignedXml], xmerl_xml), Metadata = xmerl:export([SignedXml], xmerl_xml),
{200, #{<<"Content-Type">> => <<"text/xml">>}, erlang:iolist_to_binary(Metadata)} {200, #{<<"content-type">> => <<"text/xml">>}, erlang:iolist_to_binary(Metadata)};
_ ->
{404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}}
end. end.
sp_saml_callback(post, Req) -> sp_saml_callback(post, Req) ->
case emqx_dashboard_sso_manager:lookup_state(saml) of case emqx_dashboard_sso_manager:lookup_state(saml) of
undefined -> State = #{enable := true} ->
{404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}};
State ->
case (provider(saml)):callback(Req, State) of case (provider(saml)):callback(Req, State) of
{ok, Role, Token} -> {redirect, Redirect} ->
{200, emqx_dashboard_sso_api:login_reply(Role, Token)}; Redirect;
{error, Reason} -> {error, Reason} ->
?SLOG(info, #{ ?SLOG(info, #{
msg => "dashboard_saml_sso_login_failed", msg => "dashboard_saml_sso_login_failed",
@ -105,7 +103,9 @@ sp_saml_callback(post, Req) ->
reason => Reason reason => Reason
}), }),
{403, #{code => <<"UNAUTHORIZED">>, message => Reason}} {403, #{code => <<"UNAUTHORIZED">>, message => Reason}}
end end;
_ ->
{404, #{code => ?BACKEND_NOT_FOUND, message => <<"Backend not found">>}}
end. end.
%%-------------------------------------------------------------------- %%--------------------------------------------------------------------

View File

@ -8,33 +8,32 @@
-include_lib("typerefl/include/types.hrl"). -include_lib("typerefl/include/types.hrl").
%% Hocon %% Hocon
-export([namespace/0, roots/0, fields/1, tags/0, desc/1]). -export([fields/1, desc/1]).
-export([ -export([
common_backend_schema/1, common_backend_schema/1,
backend_schema/1, backend_schema/1,
username_password_schema/0 username_password_schema/0
]). ]).
-import(hoconsc, [ref/2, mk/2, enum/1]). -import(hoconsc, [ref/2, mk/2, enum/1]).
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Hocon Schema %% Hocon Schema
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
namespace() -> dashboard_sso. fields(sso) ->
tags() ->
[<<"Dashboard Single Sign-On">>].
roots() -> [dashboard_sso].
fields(dashboard_sso) ->
lists:map( lists:map(
fun({Type, Module}) -> fun({Type, Module}) ->
{Type, mk(emqx_dashboard_sso:hocon_ref(Module), #{required => {false, recursively}})} {Type,
mk(
emqx_dashboard_sso:hocon_ref(Module),
#{required => {false, recursively}}
)}
end, end,
maps:to_list(emqx_dashboard_sso:backends()) maps:to_list(emqx_dashboard_sso:backends())
). ).
desc(dashboard_sso) -> desc(sso) ->
"Dashboard Single Sign-On"; "Dashboard Single Sign-On";
desc(_) -> desc(_) ->
undefined. undefined.

View File

@ -8,28 +8,36 @@
-compile(export_all). -compile(export_all).
-include_lib("emqx_dashboard/include/emqx_dashboard.hrl"). -include_lib("emqx_dashboard/include/emqx_dashboard.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-include_lib("eunit/include/eunit.hrl"). -include_lib("eunit/include/eunit.hrl").
-define(LDAP_HOST, "ldap"). -define(LDAP_HOST, "ldap").
-define(LDAP_DEFAULT_PORT, 389). -define(LDAP_DEFAULT_PORT, 389).
-define(LDAP_USER, <<"mqttuser0001">>). -define(LDAP_USER, <<"mqttuser0001">>).
-define(LDAP_USER_PASSWORD, <<"mqttuser0001">>). -define(LDAP_USER_PASSWORD, <<"mqttuser0001">>).
-define(MOD_TAB, emqx_dashboard_sso).
-define(MOD_KEY_PATH, [dashboard, sso, ldap]).
-define(RESOURCE_GROUP, <<"emqx_dashboard_sso">>).
-import(emqx_mgmt_api_test_util, [request/2, request/3, uri/1, request_api/3]). -import(emqx_mgmt_api_test_util, [request/2, request/3, uri/1, request_api/3]).
all() -> all() ->
[ [
t_bad_create,
t_create, t_create,
t_update, t_update,
t_get, t_get,
t_login_with_bad, t_login_with_bad,
t_first_login, t_first_login,
t_next_login, t_next_login,
t_bad_update,
t_delete t_delete
]. ].
init_per_suite(Config) -> init_per_suite(Config) ->
_ = application:load(emqx_conf), _ = application:load(emqx_conf),
emqx_config:save_schema_mod_and_names(emqx_dashboard_sso_schema), emqx_config:save_schema_mod_and_names(emqx_dashboard_schema),
emqx_mgmt_api_test_util:init_suite([emqx_dashboard, emqx_dashboard_sso]), emqx_mgmt_api_test_util:init_suite([emqx_dashboard, emqx_dashboard_sso]),
Config. Config.
@ -53,11 +61,49 @@ end_per_testcase(Case, _) ->
end, end,
ok. ok.
t_bad_create(_) ->
Path = uri(["sso", "ldap"]),
?assertMatch(
{ok, 400, _},
request(
put,
Path,
ldap_config(#{
<<"username">> => <<"invalid">>,
<<"enable">> => true,
<<"request_timeout">> => <<"1s">>
})
)
),
?assertMatch(#{backend := ldap}, emqx:get_config(?MOD_KEY_PATH, undefined)),
check_running([]),
?assertMatch(
[#{backend := <<"ldap">>, enable := true, running := false, last_error := _}], get_sso()
),
emqx_dashboard_sso_manager:delete(ldap),
?retry(
_Interval = 500,
_NAttempts = 10,
?assertMatch([], emqx_resource_manager:list_group(?RESOURCE_GROUP))
),
ok.
t_create(_) -> t_create(_) ->
check_running([]), check_running([]),
Path = uri(["sso", "ldap"]), Path = uri(["sso", "ldap"]),
{ok, 200, Result} = request(put, Path, ldap_config()), {ok, 200, Result} = request(put, Path, ldap_config()),
check_running([]), check_running([]),
?assertMatch(#{backend := ldap}, emqx:get_config(?MOD_KEY_PATH, undefined)),
?assertMatch([_], ets:tab2list(?MOD_TAB)),
?retry(
_Interval = 500,
_NAttempts = 10,
?assertMatch([_], emqx_resource_manager:list_group(?RESOURCE_GROUP))
),
?assertMatch(#{backend := <<"ldap">>, enable := false}, decode_json(Result)), ?assertMatch(#{backend := <<"ldap">>, enable := false}, decode_json(Result)),
?assertMatch([#{backend := <<"ldap">>, enable := false}], get_sso()), ?assertMatch([#{backend := <<"ldap">>, enable := false}], get_sso()),
?assertNotEqual(undefined, emqx_dashboard_sso_manager:lookup_state(ldap)), ?assertNotEqual(undefined, emqx_dashboard_sso_manager:lookup_state(ldap)),
@ -119,6 +165,28 @@ t_next_login(_) ->
?assertMatch(#{license := _, token := _}, decode_json(Result)), ?assertMatch(#{license := _, token := _}, decode_json(Result)),
ok. ok.
t_bad_update(_) ->
Path = uri(["sso", "ldap"]),
?assertMatch(
{ok, 400, _},
request(
put,
Path,
ldap_config(#{
<<"username">> => <<"invalid">>,
<<"enable">> => true,
<<"request_timeout">> => <<"1s">>
})
)
),
?assertMatch(#{backend := ldap}, emqx:get_config(?MOD_KEY_PATH, undefined)),
check_running([]),
?assertMatch(
[#{backend := <<"ldap">>, enable := true, running := false, last_error := _}], get_sso()
),
ok.
t_delete(_) -> t_delete(_) ->
Path = uri(["sso", "ldap"]), Path = uri(["sso", "ldap"]),
?assertMatch({ok, 204, _}, request(delete, Path)), ?assertMatch({ok, 204, _}, request(delete, Path)),

View File

@ -6,13 +6,15 @@
-behaviour(hocon_schema). -behaviour(hocon_schema).
-include_lib("typerefl/include/types.hrl").
-include_lib("hocon/include/hoconsc.hrl").
-export([namespace/0, roots/0, fields/1, translations/0, translation/1, desc/1, validations/0]). -export([namespace/0, roots/0, fields/1, translations/0, translation/1, desc/1, validations/0]).
-define(EE_SCHEMA_MODULES, [ -define(EE_SCHEMA_MODULES, [
emqx_license_schema, emqx_license_schema,
emqx_schema_registry_schema, emqx_schema_registry_schema,
emqx_ft_schema, emqx_ft_schema
emqx_dashboard_sso_schema
]). ]).
namespace() -> namespace() ->
@ -23,6 +25,61 @@ roots() ->
fields("node") -> fields("node") ->
redefine_node(emqx_conf_schema:fields("node")); redefine_node(emqx_conf_schema:fields("node"));
fields("log") ->
redefine_log(emqx_conf_schema:fields("log"));
fields("log_audit_handler") ->
CommonConfs = emqx_conf_schema:log_handler_common_confs(file, #{}),
CommonConfs1 = lists:filter(
fun({Key, _}) ->
not lists:member(Key, ["level", "formatter"])
end,
CommonConfs
),
[
{"level",
hoconsc:mk(
emqx_conf_schema:log_level(),
#{
default => info,
desc => ?DESC(emqx_conf_schema, "audit_handler_level"),
importance => ?IMPORTANCE_HIDDEN
}
)},
{"path",
hoconsc:mk(
emqx_conf_schema:file(),
#{
desc => ?DESC(emqx_conf_schema, "audit_file_handler_path"),
default => <<"${EMQX_LOG_DIR}/audit.log">>,
importance => ?IMPORTANCE_HIGH,
converter => fun(Path, Opts) ->
emqx_schema:naive_env_interpolation(
emqx_conf_schema:ensure_unicode_path(Path, Opts)
)
end
}
)},
{"rotation_count",
hoconsc:mk(
range(1, 128),
#{
default => 10,
converter => fun emqx_conf_schema:convert_rotation/2,
desc => ?DESC(emqx_conf_schema, "log_rotation_count"),
importance => ?IMPORTANCE_MEDIUM
}
)},
{"rotation_size",
hoconsc:mk(
hoconsc:union([infinity, emqx_schema:bytesize()]),
#{
default => <<"50MB">>,
desc => ?DESC(emqx_conf_schema, "log_file_handler_max_size"),
importance => ?IMPORTANCE_MEDIUM
}
)}
] ++ CommonConfs1;
fields(Name) -> fields(Name) ->
ee_delegate(fields, ?EE_SCHEMA_MODULES, Name). ee_delegate(fields, ?EE_SCHEMA_MODULES, Name).
@ -32,6 +89,8 @@ translations() ->
translation(Name) -> translation(Name) ->
emqx_conf_schema:translation(Name). emqx_conf_schema:translation(Name).
desc("log_audit_handler") ->
?DESC(emqx_conf_schema, "desc_audit_log_handler");
desc(Name) -> desc(Name) ->
ee_delegate(desc, ?EE_SCHEMA_MODULES, Name). ee_delegate(desc, ?EE_SCHEMA_MODULES, Name).
@ -61,13 +120,20 @@ ee_delegate(Method, [], Name) ->
apply(emqx_conf_schema, Method, [Name]). apply(emqx_conf_schema, Method, [Name]).
redefine_roots(Roots) -> redefine_roots(Roots) ->
Overrides = [{"node", #{type => hoconsc:ref(?MODULE, "node")}}], Overrides = [
{"node", #{type => hoconsc:ref(?MODULE, "node")}},
{"log", #{type => hoconsc:ref(?MODULE, "log")}}
],
override(Roots, Overrides). override(Roots, Overrides).
redefine_node(Fields) -> redefine_node(Fields) ->
Overrides = [], Overrides = [],
override(Fields, Overrides). override(Fields, Overrides).
redefine_log(Fields) ->
Overrides = [],
override(Fields, Overrides) ++ audit_log_conf().
override(Fields, []) -> override(Fields, []) ->
Fields; Fields;
override(Fields, [{Name, Override} | More]) -> override(Fields, [{Name, Override} | More]) ->
@ -82,3 +148,19 @@ find_schema(Name, Fields) ->
replace_schema(Name, Schema, Fields) -> replace_schema(Name, Schema, Fields) ->
lists:keyreplace(Name, 1, Fields, {Name, Schema}). lists:keyreplace(Name, 1, Fields, {Name, Schema}).
audit_log_conf() ->
[
{"audit",
hoconsc:mk(
hoconsc:ref(?MODULE, "log_audit_handler"),
#{
%% note: we need to keep the descriptions associated with
%% `emqx_conf_schema' module hocon i18n file because that's what
%% `emqx_conf:gen_config_md' seems to expect.
desc => ?DESC(emqx_conf_schema, "log_audit_handler"),
importance => ?IMPORTANCE_HIGH,
default => #{<<"enable">> => true, <<"level">> => <<"info">>}
}
)}
].

View File

@ -13,6 +13,25 @@
all() -> all() ->
emqx_common_test_helpers:all(?MODULE). emqx_common_test_helpers:all(?MODULE).
init_per_testcase(t_audit_log_conf, Config) ->
Apps = emqx_cth_suite:start(
[
emqx_enterprise,
{emqx_conf, #{schema_mod => emqx_enterprise_schema}}
],
#{work_dir => emqx_cth_suite:work_dir(Config)}
),
[{apps, Apps} | Config];
init_per_testcase(_TestCase, Config) ->
Config.
end_per_testcase(t_audit_log_conf, Config) ->
Apps = ?config(apps, Config),
ok = emqx_cth_suite:stop(Apps),
ok;
end_per_testcase(_TestCase, _Config) ->
ok.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Tests %% Tests
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -50,3 +69,36 @@ t_translations(_Config) ->
emqx_conf_schema:translation(Root), emqx_conf_schema:translation(Root),
emqx_enterprise_schema:translation(Root) emqx_enterprise_schema:translation(Root)
). ).
t_audit_log_conf(_Config) ->
FileExpect = #{
<<"enable">> => true,
<<"formatter">> => <<"text">>,
<<"level">> => <<"warning">>,
<<"rotation_count">> => 10,
<<"rotation_size">> => <<"50MB">>,
<<"time_offset">> => <<"system">>,
<<"path">> => <<"log/emqx.log">>
},
ExpectLog1 = #{
<<"console">> =>
#{
<<"enable">> => false,
<<"formatter">> => <<"text">>,
<<"level">> => <<"warning">>,
<<"time_offset">> => <<"system">>
},
<<"file">> =>
#{<<"default">> => FileExpect},
<<"audit">> =>
#{
<<"enable">> => true,
<<"level">> => <<"info">>,
<<"path">> => <<"log/audit.log">>,
<<"rotation_count">> => 10,
<<"rotation_size">> => <<"50MB">>,
<<"time_offset">> => <<"system">>
}
},
?assertEqual(ExpectLog1, emqx_conf:get_raw([<<"log">>])),
ok.

View File

@ -16,3 +16,38 @@ doc_gen_test() ->
ok = emqx_conf:dump_schema(Dir, emqx_enterprise_schema) ok = emqx_conf:dump_schema(Dir, emqx_enterprise_schema)
end end
}. }.
audit_log_test() ->
ensure_acl_conf(),
Conf0 = <<"node {cookie = aaa, data_dir = \"/tmp\"}">>,
{ok, ConfMap0} = hocon:binary(Conf0, #{format => richmap}),
ConfList = hocon_tconf:generate(emqx_enterprise_schema, ConfMap0),
Kernel = proplists:get_value(kernel, ConfList),
Loggers = proplists:get_value(logger, Kernel),
FileHandlers = lists:filter(fun(L) -> element(3, L) =:= logger_disk_log_h end, Loggers),
AuditHandler = lists:keyfind(emqx_audit, 2, FileHandlers),
%% default is enable and log level is info.
?assertMatch(
{handler, emqx_audit, logger_disk_log_h, #{
config := #{
type := wrap,
file := "log/audit.log",
max_no_bytes := _,
max_no_files := _
},
filesync_repeat_interval := no_repeat,
filters := [{filter_audit, {_, stop}}],
formatter := _,
level := info
}},
AuditHandler
),
ok.
ensure_acl_conf() ->
File = emqx_schema:naive_env_interpolation(<<"${EMQX_ETC_DIR}/acl.conf">>),
ok = filelib:ensure_dir(filename:dirname(File)),
case filelib:is_regular(File) of
true -> ok;
false -> file:write_file(File, <<"">>)
end.

View File

@ -74,7 +74,7 @@ fields(config) ->
{request_timeout, {request_timeout,
?HOCON(emqx_schema:timeout_duration_ms(), #{ ?HOCON(emqx_schema:timeout_duration_ms(), #{
desc => ?DESC(request_timeout), desc => ?DESC(request_timeout),
default => <<"5s">> default => <<"10s">>
})}, })},
{ssl, {ssl,
?HOCON(?R_REF(?MODULE, ssl), #{ ?HOCON(?R_REF(?MODULE, ssl), #{
@ -158,7 +158,7 @@ on_start(
{error, Reason} -> {error, Reason} ->
?tp( ?tp(
ldap_connector_start_failed, ldap_connector_start_failed,
#{error => Reason} #{error => emqx_utils:redact(Reason)}
), ),
{error, Reason} {error, Reason}
end. end.
@ -248,7 +248,7 @@ do_ldap_query(
SearchOptions, SearchOptions,
#{pool_name := PoolName} = State #{pool_name := PoolName} = State
) -> ) ->
LogMeta = #{connector => InstId, search => SearchOptions, state => State}, LogMeta = #{connector => InstId, search => SearchOptions, state => emqx_utils:redact(State)},
?TRACE("QUERY", "ldap_connector_received", LogMeta), ?TRACE("QUERY", "ldap_connector_received", LogMeta),
case case
ecpool:pick_and_do( ecpool:pick_and_do(
@ -268,7 +268,10 @@ do_ldap_query(
{error, Reason} -> {error, Reason} ->
?SLOG( ?SLOG(
error, error,
LogMeta#{msg => "ldap_connector_do_query_failed", reason => Reason} LogMeta#{
msg => "ldap_connector_do_query_failed",
reason => emqx_utils:redact(Reason)
}
), ),
{error, {unrecoverable_error, Reason}} {error, {unrecoverable_error, Reason}}
end. end.

View File

@ -91,14 +91,14 @@ refs() ->
create(_AuthenticatorID, Config) -> create(_AuthenticatorID, Config) ->
do_create(?MODULE, Config). do_create(?MODULE, Config).
do_create(Module, Config0) -> do_create(Module, Config) ->
ResourceId = emqx_authn_utils:make_resource_id(Module), ResourceId = emqx_authn_utils:make_resource_id(Module),
{Config, State} = parse_config(Config0), State = parse_config(Config),
{ok, _Data} = emqx_authn_utils:create_resource(ResourceId, emqx_ldap, Config), {ok, _Data} = emqx_authn_utils:create_resource(ResourceId, emqx_ldap, Config),
{ok, State#{resource_id => ResourceId}}. {ok, State#{resource_id => ResourceId}}.
update(Config0, #{resource_id := ResourceId} = _State) -> update(Config, #{resource_id := ResourceId} = _State) ->
{Config, NState} = parse_config(Config0), NState = parse_config(Config),
case emqx_authn_utils:update_resource(emqx_ldap, Config, ResourceId) of case emqx_authn_utils:update_resource(emqx_ldap, Config, ResourceId) of
{error, Reason} -> {error, Reason} ->
error({load_config_error, Reason}); error({load_config_error, Reason});
@ -143,19 +143,7 @@ authenticate(
end. end.
parse_config(Config) -> parse_config(Config) ->
State = lists:foldl( maps:with([query_timeout, password_attribute, is_superuser_attribute], Config).
fun(Key, Acc) ->
case maps:find(Key, Config) of
{ok, Value} when is_binary(Value) ->
Acc#{Key := erlang:binary_to_list(Value)};
_ ->
Acc
end
end,
Config,
[password_attribute, is_superuser_attribute, query_timeout]
),
{Config, State}.
%% To compatible v4.x %% To compatible v4.x
is_enabled(Password, #eldap_entry{attributes = Attributes} = Entry, State) -> is_enabled(Password, #eldap_entry{attributes = Attributes} = Entry, State) ->

View File

@ -116,7 +116,7 @@ authorize(
{error, Reason} -> {error, Reason} ->
?SLOG(error, #{ ?SLOG(error, #{
msg => "query_ldap_error", msg => "query_ldap_error",
reason => Reason, reason => emqx_utils:redact(Reason),
resource_id => ResourceID resource_id => ResourceID
}), }),
nomatch nomatch
@ -134,21 +134,10 @@ do_authorize(_Action, _Topic, [], _Entry) ->
nomatch. nomatch.
new_annotations(Init, Source) -> new_annotations(Init, Source) ->
lists:foldl( State = maps:with(
fun(Attr, Acc) -> [query_timeout, publish_attribute, subscribe_attribute, all_attribute], Source
Acc#{ ),
Attr => maps:merge(Init, State).
case maps:get(Attr, Source) of
Value when is_binary(Value) ->
erlang:binary_to_list(Value);
Value ->
Value
end
}
end,
Init,
[publish_attribute, subscribe_attribute, all_attribute]
).
select_attrs(#{action_type := publish}, #{publish_attribute := Pub, all_attribute := All}) -> select_attrs(#{action_type := publish}, #{publish_attribute := Pub, all_attribute := All}) ->
[Pub, All]; [Pub, All];

View File

@ -61,7 +61,7 @@ on_query(
{bind, Data}, {bind, Data},
#{ #{
base_tokens := DNTks, base_tokens := DNTks,
bind_password_tokens := PWTks, bind_password := PWTks,
bind_pool_name := PoolName bind_pool_name := PoolName
} = State } = State
) -> ) ->
@ -86,7 +86,7 @@ on_query(
{error, Reason} -> {error, Reason} ->
?SLOG( ?SLOG(
error, error,
LogMeta#{msg => "ldap_bind_failed", reason => Reason} LogMeta#{msg => "ldap_bind_failed", reason => emqx_utils:redact(Reason)}
), ),
{error, {unrecoverable_error, Reason}} {error, {unrecoverable_error, Reason}}
end. end.
@ -100,7 +100,9 @@ prepare_template(Config, State) ->
do_prepare_template(maps:to_list(maps:with([bind_password], Config)), State). do_prepare_template(maps:to_list(maps:with([bind_password], Config)), State).
do_prepare_template([{bind_password, V} | T], State) -> do_prepare_template([{bind_password, V} | T], State) ->
do_prepare_template(T, State#{bind_password_tokens => emqx_placeholder:preproc_tmpl(V)}); %% This is sensitive data
%% to reduce match cases, here we reuse the existing sensitive filter key: bind_password
do_prepare_template(T, State#{bind_password => emqx_placeholder:preproc_tmpl(V)});
do_prepare_template([], State) -> do_prepare_template([], State) ->
State. State.

View File

@ -47,7 +47,7 @@ post_boot() ->
ok = ensure_apps_started(), ok = ensure_apps_started(),
ok = print_vsn(), ok = print_vsn(),
ok = start_autocluster(), ok = start_autocluster(),
?AUDIT(alert, "from_cli", #{time => logger:timestamp(), event => "emqx_start"}), ?AUDIT(alert, cli, #{time => logger:timestamp(), event => "emqx_start"}),
ignore. ignore.
-ifdef(TEST). -ifdef(TEST).

View File

@ -67,8 +67,9 @@ graceful() ->
%% @doc Shutdown the Erlang VM and wait indefinitely. %% @doc Shutdown the Erlang VM and wait indefinitely.
graceful_wait() -> graceful_wait() ->
?AUDIT(alert, "from_cli", #{ ?AUDIT(alert, cli, #{
time => logger:timestamp(), msg => "run_emqx_stop_to_grace_shutdown" time => logger:timestamp(),
event => emqx_gracefully_stop
}), }),
ok = graceful(), ok = graceful(),
exit_loop(). exit_loop().

View File

@ -104,7 +104,7 @@ max_heap_size_warning(MF, Args) ->
msg => "shell_process_exceed_max_heap_size", msg => "shell_process_exceed_max_heap_size",
current_heap_size => HeapSize, current_heap_size => HeapSize,
function => MF, function => MF,
args => Args, args => pp_args(Args),
max_heap_size => ?MAX_HEAP_SIZE max_heap_size => ?MAX_HEAP_SIZE
}) })
end. end.
@ -112,24 +112,33 @@ max_heap_size_warning(MF, Args) ->
log(_, {?MODULE, prompt_func}, [[{history, _}]]) -> log(_, {?MODULE, prompt_func}, [[{history, _}]]) ->
ok; ok;
log(IsAllow, MF, Args) -> log(IsAllow, MF, Args) ->
?AUDIT(warning, "from_remote_console", #{ ?AUDIT(warning, erlang_console, #{
time => logger:timestamp(), time => logger:timestamp(),
function => MF, function => MF,
args => Args, args => pp_args(Args),
permission => IsAllow permission => IsAllow
}), }),
to_console(IsAllow, MF, Args). to_console(IsAllow, MF, Args).
to_console(prohibited, MF, Args) -> to_console(prohibited, MF, Args) ->
warning("DANGEROUS FUNCTION: FORBIDDEN IN SHELL!!!!!", []), warning("DANGEROUS FUNCTION: FORBIDDEN IN SHELL!!!!!", []),
?SLOG(error, #{msg => "execute_function_in_shell_prohibited", function => MF, args => Args}); ?SLOG(error, #{
msg => "execute_function_in_shell_prohibited",
function => MF,
args => pp_args(Args)
});
to_console(exempted, MF, Args) -> to_console(exempted, MF, Args) ->
limit_warning(MF, Args), limit_warning(MF, Args),
?SLOG(error, #{ ?SLOG(error, #{
msg => "execute_dangerous_function_in_shell_exempted", function => MF, args => Args msg => "execute_dangerous_function_in_shell_exempted",
function => MF,
args => pp_args(Args)
}); });
to_console(ok, MF, Args) -> to_console(ok, MF, Args) ->
limit_warning(MF, Args). limit_warning(MF, Args).
warning(Format, Args) -> warning(Format, Args) ->
io:format(?RED_BG ++ Format ++ ?RESET ++ "~n", Args). io:format(?RED_BG ++ Format ++ ?RESET ++ "~n", Args).
pp_args(Args) ->
iolist_to_binary(io_lib:format("~0p", [Args])).

View File

@ -266,7 +266,7 @@ fields(node_status) ->
})}, })},
{status, ?HOCON(?R_REF(status))} {status, ?HOCON(?R_REF(status))}
]; ];
fields({Type, with_name}) -> fields("with_name_" ++ Type) ->
listener_struct_with_name(Type); listener_struct_with_name(Type);
fields(Type) -> fields(Type) ->
listener_struct(Type). listener_struct(Type).
@ -308,7 +308,7 @@ listener_union_member_selector(Opts) ->
create_listener_schema(Opts) -> create_listener_schema(Opts) ->
Schemas = [ Schemas = [
?R_REF(Mod, {Type, with_name}) ?R_REF(Mod, "with_name_" ++ Type)
|| #{ref := ?R_REF(Mod, Type)} <- listeners_info(Opts) || #{ref := ?R_REF(Mod, Type)} <- listeners_info(Opts)
], ],
Example = maps:remove(id, tcp_schema_example()), Example = maps:remove(id, tcp_schema_example()),
@ -399,7 +399,7 @@ list_listeners(get, #{query_string := Query}) ->
end, end,
{200, listener_status_by_id(NodeL)}; {200, listener_status_by_id(NodeL)};
list_listeners(post, #{body := Body}) -> list_listeners(post, #{body := Body}) ->
create_listener(Body). create_listener(name, Body).
crud_listeners_by_id(get, #{bindings := #{id := Id}}) -> crud_listeners_by_id(get, #{bindings := #{id := Id}}) ->
case find_listeners_by_id(Id) of case find_listeners_by_id(Id) of
@ -407,7 +407,7 @@ crud_listeners_by_id(get, #{bindings := #{id := Id}}) ->
[L] -> {200, L} [L] -> {200, L}
end; end;
crud_listeners_by_id(put, #{bindings := #{id := Id}, body := Body0}) -> crud_listeners_by_id(put, #{bindings := #{id := Id}, body := Body0}) ->
case parse_listener_conf(Body0) of case parse_listener_conf(id, Body0) of
{Id, Type, Name, Conf} -> {Id, Type, Name, Conf} ->
case get_raw(Type, Name) of case get_raw(Type, Name) of
undefined -> undefined ->
@ -430,7 +430,7 @@ crud_listeners_by_id(put, #{bindings := #{id := Id}, body := Body0}) ->
{400, #{code => 'BAD_LISTENER_ID', message => ?LISTENER_ID_INCONSISTENT}} {400, #{code => 'BAD_LISTENER_ID', message => ?LISTENER_ID_INCONSISTENT}}
end; end;
crud_listeners_by_id(post, #{body := Body}) -> crud_listeners_by_id(post, #{body := Body}) ->
create_listener(Body); create_listener(id, Body);
crud_listeners_by_id(delete, #{bindings := #{id := Id}}) -> crud_listeners_by_id(delete, #{bindings := #{id := Id}}) ->
{ok, #{type := Type, name := Name}} = emqx_listeners:parse_listener_id(Id), {ok, #{type := Type, name := Name}} = emqx_listeners:parse_listener_id(Id),
case find_listeners_by_id(Id) of case find_listeners_by_id(Id) of
@ -441,11 +441,10 @@ crud_listeners_by_id(delete, #{bindings := #{id := Id}}) ->
{404, #{code => 'BAD_LISTENER_ID', message => ?LISTENER_NOT_FOUND}} {404, #{code => 'BAD_LISTENER_ID', message => ?LISTENER_NOT_FOUND}}
end. end.
parse_listener_conf(Conf0) -> parse_listener_conf(id, Conf0) ->
Conf1 = maps:without([<<"running">>, <<"current_connections">>], Conf0), Conf1 = maps:without([<<"running">>, <<"current_connections">>], Conf0),
{TypeBin, Conf2} = maps:take(<<"type">>, Conf1), {TypeBin, Conf2} = maps:take(<<"type">>, Conf1),
TypeAtom = binary_to_existing_atom(TypeBin), TypeAtom = binary_to_existing_atom(TypeBin),
case maps:take(<<"id">>, Conf2) of case maps:take(<<"id">>, Conf2) of
{IdBin, Conf3} -> {IdBin, Conf3} ->
{ok, #{type := Type, name := Name}} = emqx_listeners:parse_listener_id(IdBin), {ok, #{type := Type, name := Name}} = emqx_listeners:parse_listener_id(IdBin),
@ -454,13 +453,18 @@ parse_listener_conf(Conf0) ->
false -> {error, listener_type_inconsistent} false -> {error, listener_type_inconsistent}
end; end;
_ -> _ ->
{error, listener_config_invalid}
end;
parse_listener_conf(name, Conf0) ->
Conf1 = maps:without([<<"running">>, <<"current_connections">>], Conf0),
{TypeBin, Conf2} = maps:take(<<"type">>, Conf1),
TypeAtom = binary_to_existing_atom(TypeBin),
case maps:take(<<"name">>, Conf2) of case maps:take(<<"name">>, Conf2) of
{Name, Conf3} -> {Name, Conf3} ->
IdBin = <<TypeBin/binary, $:, Name/binary>>, IdBin = <<TypeBin/binary, $:, Name/binary>>,
{binary_to_atom(IdBin), TypeAtom, Name, Conf3}; {binary_to_atom(IdBin), TypeAtom, Name, Conf3};
_ -> _ ->
{error, listener_config_invalid} {error, listener_config_invalid}
end
end. end.
stop_listeners_by_id(Method, Body = #{bindings := Bindings}) -> stop_listeners_by_id(Method, Body = #{bindings := Bindings}) ->
@ -832,8 +836,8 @@ tcp_schema_example() ->
type => tcp type => tcp
}. }.
create_listener(Body) -> create_listener(From, Body) ->
case parse_listener_conf(Body) of case parse_listener_conf(From, Body) of
{Id, Type, Name, Conf} -> {Id, Type, Name, Conf} ->
case create(Type, Name, Conf) of case create(Type, Name, Conf) of
{ok, #{raw_config := _RawConf}} -> {ok, #{raw_config := _RawConf}} ->

View File

@ -156,6 +156,8 @@ authorize(<<"/api/v5/users", _/binary>>, _ApiKey, _ApiSecret) ->
{error, <<"not_allowed">>}; {error, <<"not_allowed">>};
authorize(<<"/api/v5/api_key", _/binary>>, _ApiKey, _ApiSecret) -> authorize(<<"/api/v5/api_key", _/binary>>, _ApiKey, _ApiSecret) ->
{error, <<"not_allowed">>}; {error, <<"not_allowed">>};
authorize(<<"/api/v5/logout", _/binary>>, _ApiKey, _ApiSecret) ->
{error, <<"not_allowed">>};
authorize(_Path, ApiKey, ApiSecret) -> authorize(_Path, ApiKey, ApiSecret) ->
Now = erlang:system_time(second), Now = erlang:system_time(second),
case find_by_api_key(ApiKey) of case find_by_api_key(ApiKey) of

View File

@ -482,7 +482,7 @@ trace(_) ->
{"trace stop topic <Topic> ", "Stop tracing for a topic on local node"}, {"trace stop topic <Topic> ", "Stop tracing for a topic on local node"},
{"trace start ip_address <IP> <File> [<Level>] ", {"trace start ip_address <IP> <File> [<Level>] ",
"Traces for a client ip on local node"}, "Traces for a client ip on local node"},
{"trace stop ip_addresss <IP> ", "Stop tracing for a client ip on local node"} {"trace stop ip_address <IP> ", "Stop tracing for a client ip on local node"}
]). ]).
trace_on(Name, Type, Filter, Level, LogFile) -> trace_on(Name, Type, Filter, Level, LogFile) ->

View File

@ -238,7 +238,6 @@ t_clear_certs(Config) when is_list(Config) ->
NewConf2 = emqx_utils_maps:deep_put( NewConf2 = emqx_utils_maps:deep_put(
[<<"ssl_options">>, <<"keyfile">>], NewConf, cert_file("keyfile") [<<"ssl_options">>, <<"keyfile">>], NewConf, cert_file("keyfile")
), ),
_ = request(post, NewPath, [], NewConf2), _ = request(post, NewPath, [], NewConf2),
ListResult1 = list_pem_dir("ssl", "clear"), ListResult1 = list_pem_dir("ssl", "clear"),
?assertMatch({ok, [_, _]}, ListResult1), ?assertMatch({ok, [_, _]}, ListResult1),
@ -251,7 +250,7 @@ t_clear_certs(Config) when is_list(Config) ->
_ = emqx_tls_certfile_gc:force(), _ = emqx_tls_certfile_gc:force(),
ListResult2 = list_pem_dir("ssl", "clear"), ListResult2 = list_pem_dir("ssl", "clear"),
%% make sure the old cret file is deleted %% make sure the old cert file is deleted
?assertMatch({ok, [_, _]}, ListResult2), ?assertMatch({ok, [_, _]}, ListResult2),
{ok, ResultList1} = ListResult1, {ok, ResultList1} = ListResult1,
@ -273,6 +272,17 @@ t_clear_certs(Config) when is_list(Config) ->
_ = delete(NewPath), _ = delete(NewPath),
_ = emqx_tls_certfile_gc:force(), _ = emqx_tls_certfile_gc:force(),
?assertMatch({error, enoent}, list_pem_dir("ssl", "clear")), ?assertMatch({error, enoent}, list_pem_dir("ssl", "clear")),
%% test create listeners without id in path
NewPath1 = emqx_mgmt_api_test_util:api_path(["listeners"]),
NewConf3 = maps:remove(<<"id">>, NewConf2#{<<"name">> => <<"clear">>}),
?assertNotMatch({error, {"HTTP/1.1", 400, _}}, request(post, NewPath1, [], NewConf3)),
ListResult3 = list_pem_dir("ssl", "clear"),
?assertMatch({ok, [_, _]}, ListResult3),
_ = delete(NewPath),
_ = emqx_tls_certfile_gc:force(),
?assertMatch({error, enoent}, list_pem_dir("ssl", "clear")),
ok. ok.
get_tcp_listeners(Node) -> get_tcp_listeners(Node) ->

View File

@ -223,9 +223,10 @@ restart(ResId, Opts) when is_binary(ResId) ->
%% @doc Start the resource %% @doc Start the resource
-spec start(resource_id(), creation_opts()) -> ok | {error, Reason :: term()}. -spec start(resource_id(), creation_opts()) -> ok | {error, Reason :: term()}.
start(ResId, Opts) -> start(ResId, Opts) ->
case safe_call(ResId, start, ?T_OPERATION) of StartTimeout = maps:get(start_timeout, Opts, ?T_OPERATION),
case safe_call(ResId, start, StartTimeout) of
ok -> ok ->
wait_for_ready(ResId, maps:get(start_timeout, Opts, 5000)); wait_for_ready(ResId, StartTimeout);
{error, _Reason} = Error -> {error, _Reason} = Error ->
Error Error
end. end.

View File

@ -278,7 +278,7 @@ t_crud(Config) ->
<<"code">> := <<"BAD_REQUEST">>, <<"code">> := <<"BAD_REQUEST">>,
<<"message">> := <<"message">> :=
#{ #{
<<"expected">> := [_ | _], <<"expected">> := <<"avro | protobuf">>,
<<"field_name">> := <<"type">> <<"field_name">> := <<"type">>
} }
}}, }},
@ -301,7 +301,7 @@ t_crud(Config) ->
<<"code">> := <<"BAD_REQUEST">>, <<"code">> := <<"BAD_REQUEST">>,
<<"message">> := <<"message">> :=
#{ #{
<<"expected">> := [_ | _], <<"expected">> := <<"avro | protobuf">>,
<<"field_name">> := <<"type">> <<"field_name">> := <<"type">>
} }
}}, }},

View File

@ -61,7 +61,8 @@
diff_lists/3, diff_lists/3,
merge_lists/3, merge_lists/3,
tcp_keepalive_opts/4, tcp_keepalive_opts/4,
format/1 format/1,
format_mfal/1
]). ]).
-export([ -export([
@ -529,6 +530,30 @@ tcp_keepalive_opts(OS, _Idle, _Interval, _Probes) ->
format(Term) -> format(Term) ->
iolist_to_binary(io_lib:format("~0p", [Term])). iolist_to_binary(io_lib:format("~0p", [Term])).
%% @doc Helper function for log formatters.
-spec format_mfal(map()) -> undefined | binary().
format_mfal(Data) ->
Line =
case maps:get(line, Data, undefined) of
undefined ->
<<"">>;
Num ->
["(", integer_to_list(Num), ")"]
end,
case maps:get(mfa, Data, undefined) of
{M, F, A} ->
iolist_to_binary([
atom_to_binary(M, utf8),
$:,
atom_to_binary(F, utf8),
$/,
integer_to_binary(A),
Line
]);
_ ->
undefined
end.
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
%% Internal Functions %% Internal Functions
%%------------------------------------------------------------------------------ %%------------------------------------------------------------------------------
@ -620,6 +645,7 @@ try_to_existing_atom(Convert, Data, Encoding) ->
_:Reason -> {error, Reason} _:Reason -> {error, Reason}
end. end.
%% NOTE: keep alphabetical order
is_sensitive_key(aws_secret_access_key) -> true; is_sensitive_key(aws_secret_access_key) -> true;
is_sensitive_key("aws_secret_access_key") -> true; is_sensitive_key("aws_secret_access_key") -> true;
is_sensitive_key(<<"aws_secret_access_key">>) -> true; is_sensitive_key(<<"aws_secret_access_key">>) -> true;
@ -641,6 +667,8 @@ is_sensitive_key(<<"secret_key">>) -> true;
is_sensitive_key(security_token) -> true; is_sensitive_key(security_token) -> true;
is_sensitive_key("security_token") -> true; is_sensitive_key("security_token") -> true;
is_sensitive_key(<<"security_token">>) -> true; is_sensitive_key(<<"security_token">>) -> true;
is_sensitive_key(sp_private_key) -> true;
is_sensitive_key(<<"sp_private_key">>) -> true;
is_sensitive_key(token) -> true; is_sensitive_key(token) -> true;
is_sensitive_key("token") -> true; is_sensitive_key("token") -> true;
is_sensitive_key(<<"token">>) -> true; is_sensitive_key(<<"token">>) -> true;

View File

@ -140,9 +140,10 @@ do(Args) ->
io:format("~p\n", [Other]) io:format("~p\n", [Other])
end; end;
["eval" | ListOfArgs] -> ["eval" | ListOfArgs] ->
% parse args locally in the remsh node
Parsed = parse_eval_args(ListOfArgs), Parsed = parse_eval_args(ListOfArgs),
% and evaluate it on the remote node % and evaluate it on the remote node
case rpc:call(TargetNode, emqx_ctl, eval_erl, [Parsed]) of case rpc:call(TargetNode, emqx_ctl, run_command, [eval_erl, Parsed], infinity) of
{ok, Value} -> {ok, Value} ->
io:format("~p~n", [Value]); io:format("~p~n", [Value]);
{badrpc, Reason} -> {badrpc, Reason} ->

View File

@ -0,0 +1 @@
Fixed an issue where logging would stop if "Rotation Size" would be set to `infinity` on file log handlers.

View File

@ -0,0 +1,3 @@
Fix log formatter when log.HANDLER.formatter is set to 'json'.
The bug was introduced in v5.0.4 where the log line was no longer a valid JSON, but prefixed with timestamp string and level name.

View File

@ -0,0 +1 @@
Disable access to the `logout` endpoint by the API key, this endpoint is for the Dashboard only.

View File

@ -0,0 +1 @@
Make dashboard login support SSO (Single Sign On) using LDAP.

View File

@ -0,0 +1 @@
Make dashboard login support SSO (Single Sign On) using SAML 2.0.

View File

@ -14,8 +14,8 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes # This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version. # to the chart and its templates, including the app version.
version: 5.3.0-alpha.1 version: 5.3.0-alpha.2
# This is the version number of the application being deployed. This version number should be # This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. # incremented each time you make changes to the application.
appVersion: 5.3.0-alpha.1 appVersion: 5.3.0-alpha.2

2
dev
View File

@ -416,7 +416,7 @@ boot() {
gen_tmp_node_name() { gen_tmp_node_name() {
local rnd local rnd
rnd="$(od -t u -N 4 /dev/urandom | head -n1 | awk '{print $2 % 1000}')" rnd="$(od -t u -N 4 /dev/urandom | head -n1 | awk '{print $2 % 1000}')"
echo "remsh${rnd}-$EMQX_NODE_NAME}" echo "remsh${rnd}-${EMQX_NODE_NAME}"
} }
remsh() { remsh() {

View File

@ -835,7 +835,7 @@ defmodule EMQXUmbrella.MixProject do
defp quicer_dep() do defp quicer_dep() do
if enable_quicer?(), if enable_quicer?(),
# in conflict with emqx and emqtt # in conflict with emqx and emqtt
do: [{:quicer, github: "emqx/quic", tag: "0.0.200", override: true}], do: [{:quicer, github: "emqx/quic", tag: "0.0.201", override: true}],
else: [] else: []
end end

View File

@ -39,7 +39,7 @@ bcrypt() ->
{bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {tag, "0.6.1"}}}. {bcrypt, {git, "https://github.com/emqx/erlang-bcrypt.git", {tag, "0.6.1"}}}.
quicer() -> quicer() ->
{quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.200"}}}. {quicer, {git, "https://github.com/emqx/quic.git", {tag, "0.0.201"}}}.
jq() -> jq() ->
{jq, {git, "https://github.com/emqx/jq", {tag, "v0.3.10"}}}. {jq, {git, "https://github.com/emqx/jq", {tag, "v0.3.10"}}}.

View File

@ -101,7 +101,7 @@ common_handler_flush_qlen.label:
common_handler_chars_limit.desc: common_handler_chars_limit.desc:
"""Set the maximum length of a single log message. If this length is exceeded, the log message will be truncated. """Set the maximum length of a single log message. If this length is exceeded, the log message will be truncated.
NOTE: Restrict char limiter if formatter is JSON , it will get a truncated incomplete JSON data, which is not recommended.""" When formatter is <code>json</code> the truncation is done on the JSON values, but not on the log message itself."""
common_handler_chars_limit.label: common_handler_chars_limit.label:
"""Single Log Max Length""" """Single Log Max Length"""
@ -660,7 +660,8 @@ Can be one of:
- <code>system</code>: the time offset used by the local system - <code>system</code>: the time offset used by the local system
- <code>utc</code>: the UTC time offset - <code>utc</code>: the UTC time offset
- <code>+-[hh]:[mm]</code>: user specified time offset, such as "-02:00" or "+00:00" - <code>+-[hh]:[mm]</code>: user specified time offset, such as "-02:00" or "+00:00"
Defaults to: <code>system</code>.""" Defaults to: <code>system</code>.
This config has no effect for when formatter is <code>json</code> as the timestamp in JSON is milliseconds since epoch."""
common_handler_time_offset.label: common_handler_time_offset.label:
"""Time Offset""" """Time Offset"""
@ -841,7 +842,4 @@ Defaults to 100000."""
node_channel_cleanup_batch_size.label: node_channel_cleanup_batch_size.label:
"""Node Channel Cleanup Batch Size""" """Node Channel Cleanup Batch Size"""
prevent_overlapping_partitions.desc:
"""https://www.erlang.org/doc/man/global.html#description"""
} }

View File

@ -43,7 +43,9 @@ login_success.desc:
"""Dashboard Auth Success""" """Dashboard Auth Success"""
logout_api.desc: logout_api.desc:
"""Dashboard user logout""" """Dashboard user logout.
This endpoint is only for the Dashboard, not the `API Key`.
The token from the `/login` endpoint must be a bearer authorization in the headers."""
logout_api.label: logout_api.label:
"""Dashboard user logout""" """Dashboard user logout"""

View File

@ -51,4 +51,16 @@ backend_name.desc:
backend_name.label: backend_name.label:
"""Backend Name""" """Backend Name"""
running.desc:
"""Is the backend running"""
running.label:
"""Running"""
last_error.desc:
"""Last error of this backend"""
last_error.label:
"""Last Error"""
} }

View File

@ -166,6 +166,7 @@ ip
ipv ipv
jenkins jenkins
jq jq
json
kb kb
keepalive keepalive
keyfile keyfile

View File

@ -12,10 +12,18 @@
[[ "$output" =~ "ERROR: Invalid node name,".+ ]] [[ "$output" =~ "ERROR: Invalid node name,".+ ]]
} }
@test "corrupted cluster config file" { @test "corrupted cluster-override.conf" {
conffile="./_build/$PROFILE/rel/emqx/data/configs/cluster-override.conf" conffile="./_build/$PROFILE/rel/emqx/data/configs/cluster-override.conf"
echo "{" > $conffile echo "{" > $conffile
run ./_build/$PROFILE/rel/emqx/bin/emqx console run ./_build/$PROFILE/rel/emqx/bin/emqx console
[[ $status -ne 0 ]] [[ $status -ne 0 ]]
rm -f $conffile rm -f $conffile
} }
@test "corrupted cluster.hocon" {
conffile="./_build/$PROFILE/rel/emqx/data/configs/cluster.hocon"
echo "{" > $conffile
run ./_build/$PROFILE/rel/emqx/bin/emqx console
[[ $status -ne 0 ]]
rm -f $conffile
}