Merge pull request #11661 from zmstone/0922-fix-json-log-formatter
0922 fix json log formatter
This commit is contained in:
commit
4e15edb5e4
|
@ -216,38 +216,26 @@ log_formatter(HandlerName, Conf) ->
|
||||||
end,
|
end,
|
||||||
SingleLine = conf_get("single_line", Conf),
|
SingleLine = conf_get("single_line", Conf),
|
||||||
Depth = conf_get("max_depth", Conf),
|
Depth = conf_get("max_depth", Conf),
|
||||||
|
Format =
|
||||||
|
case HandlerName of
|
||||||
|
?AUDIT_HANDLER ->
|
||||||
|
json;
|
||||||
|
_ ->
|
||||||
|
conf_get("formatter", Conf)
|
||||||
|
end,
|
||||||
do_formatter(
|
do_formatter(
|
||||||
HandlerName, conf_get("formatter", Conf), CharsLimit, SingleLine, TimeOffSet, Depth
|
Format, CharsLimit, SingleLine, TimeOffSet, Depth
|
||||||
).
|
).
|
||||||
|
|
||||||
%% helpers
|
%% helpers
|
||||||
do_formatter(?AUDIT_HANDLER, _, CharsLimit, SingleLine, TimeOffSet, Depth) ->
|
do_formatter(json, CharsLimit, SingleLine, TimeOffSet, Depth) ->
|
||||||
{emqx_logger_jsonfmt, #{
|
|
||||||
template => [
|
|
||||||
time,
|
|
||||||
" [",
|
|
||||||
level,
|
|
||||||
"] ",
|
|
||||||
%% http api
|
|
||||||
{method, [code, " ", method, " ", operate_id, " ", username, " "], []},
|
|
||||||
%% cli
|
|
||||||
{cmd, [cmd, " "], []},
|
|
||||||
msg,
|
|
||||||
"\n"
|
|
||||||
],
|
|
||||||
chars_limit => CharsLimit,
|
|
||||||
single_line => SingleLine,
|
|
||||||
time_offset => TimeOffSet,
|
|
||||||
depth => Depth
|
|
||||||
}};
|
|
||||||
do_formatter(_, json, CharsLimit, SingleLine, TimeOffSet, Depth) ->
|
|
||||||
{emqx_logger_jsonfmt, #{
|
{emqx_logger_jsonfmt, #{
|
||||||
chars_limit => CharsLimit,
|
chars_limit => CharsLimit,
|
||||||
single_line => SingleLine,
|
single_line => SingleLine,
|
||||||
time_offset => TimeOffSet,
|
time_offset => TimeOffSet,
|
||||||
depth => Depth
|
depth => Depth
|
||||||
}};
|
}};
|
||||||
do_formatter(_, text, CharsLimit, SingleLine, TimeOffSet, Depth) ->
|
do_formatter(text, CharsLimit, SingleLine, TimeOffSet, Depth) ->
|
||||||
{emqx_logger_textfmt, #{
|
{emqx_logger_textfmt, #{
|
||||||
template => [time, " [", level, "] ", msg, "\n"],
|
template => [time, " [", level, "] ", msg, "\n"],
|
||||||
chars_limit => CharsLimit,
|
chars_limit => CharsLimit,
|
||||||
|
|
|
@ -51,7 +51,8 @@
|
||||||
-type config() :: #{
|
-type config() :: #{
|
||||||
depth => pos_integer() | unlimited,
|
depth => pos_integer() | unlimited,
|
||||||
report_cb => logger:report_cb(),
|
report_cb => logger:report_cb(),
|
||||||
single_line => boolean()
|
single_line => boolean(),
|
||||||
|
chars_limit => unlimited | pos_integer()
|
||||||
}.
|
}.
|
||||||
|
|
||||||
-define(IS_STRING(String), (is_list(String) orelse is_binary(String))).
|
-define(IS_STRING(String), (is_list(String) orelse is_binary(String))).
|
||||||
|
@ -64,19 +65,17 @@
|
||||||
best_effort_json(Input) ->
|
best_effort_json(Input) ->
|
||||||
best_effort_json(Input, [pretty, force_utf8]).
|
best_effort_json(Input, [pretty, force_utf8]).
|
||||||
best_effort_json(Input, Opts) ->
|
best_effort_json(Input, Opts) ->
|
||||||
Config = #{depth => unlimited, single_line => true},
|
Config = #{depth => unlimited, single_line => true, chars_limit => unlimited},
|
||||||
JsonReady = best_effort_json_obj(Input, Config),
|
JsonReady = best_effort_json_obj(Input, Config),
|
||||||
emqx_utils_json:encode(JsonReady, Opts).
|
emqx_utils_json:encode(JsonReady, Opts).
|
||||||
|
|
||||||
-spec format(logger:log_event(), config()) -> iodata().
|
-spec format(logger:log_event(), config()) -> iodata().
|
||||||
format(#{level := Level, msg := Msg, meta := Meta} = Event, Config0) when is_map(Config0) ->
|
format(#{level := Level, msg := Msg, meta := Meta}, Config0) when is_map(Config0) ->
|
||||||
Config = add_default_config(Config0),
|
Config = add_default_config(Config0),
|
||||||
MsgBin = format(Msg, Meta#{level => Level}, Config),
|
[format(Msg, Meta#{level => Level}, Config), "\n"].
|
||||||
logger_formatter:format(Event#{msg => {string, MsgBin}}, Config).
|
|
||||||
|
|
||||||
format(Msg, Meta0, Config) ->
|
format(Msg, Meta, Config) ->
|
||||||
Meta = maps:without([time, level], Meta0),
|
Data =
|
||||||
Data0 =
|
|
||||||
try maybe_format_msg(Msg, Meta, Config) of
|
try maybe_format_msg(Msg, Meta, Config) of
|
||||||
Map when is_map(Map) ->
|
Map when is_map(Map) ->
|
||||||
maps:merge(Map, Meta);
|
maps:merge(Map, Meta);
|
||||||
|
@ -92,8 +91,7 @@ format(Msg, Meta0, Config) ->
|
||||||
fmt_stacktrace => S
|
fmt_stacktrace => S
|
||||||
}
|
}
|
||||||
end,
|
end,
|
||||||
Data = maps:without([report_cb], Data0),
|
emqx_utils_json:encode(json_obj_root(Data, Config)).
|
||||||
emqx_utils_json:encode(json_obj(Data, Config)).
|
|
||||||
|
|
||||||
maybe_format_msg({report, Report} = Msg, #{report_cb := Cb} = Meta, Config) ->
|
maybe_format_msg({report, Report} = Msg, #{report_cb := Cb} = Meta, Config) ->
|
||||||
case is_map(Report) andalso Cb =:= ?DEFAULT_FORMATTER of
|
case is_map(Report) andalso Cb =:= ?DEFAULT_FORMATTER of
|
||||||
|
@ -128,7 +126,7 @@ format_msg({report, Report}, #{report_cb := Fun} = Meta, Config) when is_functio
|
||||||
end;
|
end;
|
||||||
format_msg({report, Report}, #{report_cb := Fun}, Config) when is_function(Fun, 2) ->
|
format_msg({report, Report}, #{report_cb := Fun}, Config) when is_function(Fun, 2) ->
|
||||||
%% a format callback function of arity 2
|
%% a format callback function of arity 2
|
||||||
case Fun(Report, maps:with([depth, single_line], Config)) of
|
case Fun(Report, maps:with([depth, single_line, chars_limit], Config)) of
|
||||||
Chardata when ?IS_STRING(Chardata) ->
|
Chardata when ?IS_STRING(Chardata) ->
|
||||||
try
|
try
|
||||||
unicode:characters_to_binary(Chardata, utf8)
|
unicode:characters_to_binary(Chardata, utf8)
|
||||||
|
@ -152,11 +150,13 @@ format_msg({Fmt, Args}, _Meta, Config) ->
|
||||||
|
|
||||||
do_format_msg(Format0, Args, #{
|
do_format_msg(Format0, Args, #{
|
||||||
depth := Depth,
|
depth := Depth,
|
||||||
single_line := SingleLine
|
single_line := SingleLine,
|
||||||
|
chars_limit := Limit
|
||||||
}) ->
|
}) ->
|
||||||
|
Opts = chars_limit_to_opts(Limit),
|
||||||
Format1 = io_lib:scan_format(Format0, Args),
|
Format1 = io_lib:scan_format(Format0, Args),
|
||||||
Format = reformat(Format1, Depth, SingleLine),
|
Format = reformat(Format1, Depth, SingleLine),
|
||||||
Text0 = io_lib:build_text(Format, []),
|
Text0 = io_lib:build_text(Format, Opts),
|
||||||
Text =
|
Text =
|
||||||
case SingleLine of
|
case SingleLine of
|
||||||
true -> re:replace(Text0, ",?\r?\n\s*", ", ", [{return, list}, global, unicode]);
|
true -> re:replace(Text0, ",?\r?\n\s*", ", ", [{return, list}, global, unicode]);
|
||||||
|
@ -164,6 +164,9 @@ do_format_msg(Format0, Args, #{
|
||||||
end,
|
end,
|
||||||
trim(unicode:characters_to_binary(Text, utf8)).
|
trim(unicode:characters_to_binary(Text, utf8)).
|
||||||
|
|
||||||
|
chars_limit_to_opts(unlimited) -> [];
|
||||||
|
chars_limit_to_opts(Limit) -> [{chars_limit, Limit}].
|
||||||
|
|
||||||
%% Get rid of the leading spaces.
|
%% Get rid of the leading spaces.
|
||||||
%% leave alone the trailing spaces.
|
%% leave alone the trailing spaces.
|
||||||
trim(<<$\s, Rest/binary>>) -> trim(Rest);
|
trim(<<$\s, Rest/binary>>) -> trim(Rest);
|
||||||
|
@ -233,52 +236,70 @@ json(P, C) when is_port(P) -> json(port_to_list(P), C);
|
||||||
json(F, C) when is_function(F) -> json(erlang:fun_to_list(F), C);
|
json(F, C) when is_function(F) -> json(erlang:fun_to_list(F), C);
|
||||||
json(B, Config) when is_binary(B) ->
|
json(B, Config) when is_binary(B) ->
|
||||||
best_effort_unicode(B, Config);
|
best_effort_unicode(B, Config);
|
||||||
json(L, Config) when is_list(L), is_integer(hd(L)) ->
|
|
||||||
best_effort_unicode(L, Config);
|
|
||||||
json(M, Config) when is_list(M), is_tuple(hd(M)), tuple_size(hd(M)) =:= 2 ->
|
json(M, Config) when is_list(M), is_tuple(hd(M)), tuple_size(hd(M)) =:= 2 ->
|
||||||
best_effort_json_obj(M, Config);
|
best_effort_json_obj(M, Config);
|
||||||
json(L, Config) when is_list(L) ->
|
json(L, Config) when is_list(L) ->
|
||||||
[json(I, Config) || I <- L];
|
try unicode:characters_to_binary(L, utf8) of
|
||||||
|
B when is_binary(B) -> B;
|
||||||
|
_ -> [json(I, Config) || I <- L]
|
||||||
|
catch
|
||||||
|
_:_ ->
|
||||||
|
[json(I, Config) || I <- L]
|
||||||
|
end;
|
||||||
json(Map, Config) when is_map(Map) ->
|
json(Map, Config) when is_map(Map) ->
|
||||||
best_effort_json_obj(Map, Config);
|
best_effort_json_obj(Map, Config);
|
||||||
json(Term, Config) ->
|
json(Term, Config) ->
|
||||||
do_format_msg("~p", [Term], Config).
|
do_format_msg("~p", [Term], Config).
|
||||||
|
|
||||||
|
json_obj_root(Data0, Config) ->
|
||||||
|
Time = maps:get(time, Data0, undefined),
|
||||||
|
Level = maps:get(level, Data0, undefined),
|
||||||
|
Msg1 =
|
||||||
|
case maps:get(msg, Data0, undefined) of
|
||||||
|
undefined ->
|
||||||
|
maps:get('$kind', Data0, undefined);
|
||||||
|
Msg0 ->
|
||||||
|
Msg0
|
||||||
|
end,
|
||||||
|
Msg =
|
||||||
|
case Msg1 of
|
||||||
|
undefined ->
|
||||||
|
undefined;
|
||||||
|
_ ->
|
||||||
|
json(Msg1, Config)
|
||||||
|
end,
|
||||||
|
Mfal = emqx_utils:format_mfal(Data0),
|
||||||
|
Data =
|
||||||
|
maps:fold(
|
||||||
|
fun(K, V, D) ->
|
||||||
|
{K1, V1} = json_kv(K, V, Config),
|
||||||
|
[{K1, V1} | D]
|
||||||
|
end,
|
||||||
|
[],
|
||||||
|
maps:without(
|
||||||
|
[time, gl, file, report_cb, msg, '$kind', mfa, level, line, is_trace], Data0
|
||||||
|
)
|
||||||
|
),
|
||||||
|
lists:filter(
|
||||||
|
fun({_, V}) -> V =/= undefined end,
|
||||||
|
[{time, Time}, {level, Level}, {msg, Msg}, {mfa, Mfal}]
|
||||||
|
) ++ Data.
|
||||||
|
|
||||||
json_obj(Data, Config) ->
|
json_obj(Data, Config) ->
|
||||||
maps:fold(
|
maps:fold(
|
||||||
fun(K, V, D) ->
|
fun(K, V, D) ->
|
||||||
json_kv(K, V, D, Config)
|
{K1, V1} = json_kv(K, V, Config),
|
||||||
|
maps:put(K1, V1, D)
|
||||||
end,
|
end,
|
||||||
maps:new(),
|
maps:new(),
|
||||||
Data
|
Data
|
||||||
).
|
).
|
||||||
|
|
||||||
json_kv(mfa, {M, F, A}, Data, _Config) ->
|
json_kv(K0, V, Config) ->
|
||||||
maps:put(
|
|
||||||
mfa,
|
|
||||||
<<
|
|
||||||
(atom_to_binary(M, utf8))/binary,
|
|
||||||
$:,
|
|
||||||
(atom_to_binary(F, utf8))/binary,
|
|
||||||
$/,
|
|
||||||
(integer_to_binary(A))/binary
|
|
||||||
>>,
|
|
||||||
Data
|
|
||||||
);
|
|
||||||
%% snabbkaffe
|
|
||||||
json_kv('$kind', Kind, Data, Config) ->
|
|
||||||
maps:put(msg, json(Kind, Config), Data);
|
|
||||||
json_kv(gl, _, Data, _Config) ->
|
|
||||||
%% drop gl because it's not interesting
|
|
||||||
Data;
|
|
||||||
json_kv(file, _, Data, _Config) ->
|
|
||||||
%% drop 'file' because we have mfa
|
|
||||||
Data;
|
|
||||||
json_kv(K0, V, Data, Config) ->
|
|
||||||
K = json_key(K0),
|
K = json_key(K0),
|
||||||
case is_map(V) of
|
case is_map(V) of
|
||||||
true -> maps:put(json(K, Config), best_effort_json_obj(V, Config), Data);
|
true -> {K, best_effort_json_obj(V, Config)};
|
||||||
false -> maps:put(json(K, Config), json(V, Config), Data)
|
false -> {K, json(V, Config)}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
json_key(A) when is_atom(A) -> json_key(atom_to_binary(A, utf8));
|
json_key(A) when is_atom(A) -> json_key(atom_to_binary(A, utf8));
|
||||||
|
@ -373,23 +394,72 @@ p_config() ->
|
||||||
proper_types:shrink_list(
|
proper_types:shrink_list(
|
||||||
[
|
[
|
||||||
{depth, p_limit()},
|
{depth, p_limit()},
|
||||||
{single_line, proper_types:boolean()}
|
{single_line, proper_types:boolean()},
|
||||||
|
{chars_limit, p_limit()}
|
||||||
]
|
]
|
||||||
).
|
).
|
||||||
|
|
||||||
|
%% NOTE: pretty-printing format is asserted in the test
|
||||||
|
%% This affects the CLI output format, consult the team before changing
|
||||||
|
%% the format.
|
||||||
best_effort_json_test() ->
|
best_effort_json_test() ->
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
<<"{\n \n}">>,
|
<<"{\n \n}">>,
|
||||||
emqx_logger_jsonfmt:best_effort_json([])
|
best_effort_json([])
|
||||||
),
|
),
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
<<"{\n \"key\" : [\n \n ]\n}">>,
|
<<"{\n \"key\" : [\n \n ]\n}">>,
|
||||||
emqx_logger_jsonfmt:best_effort_json(#{key => []})
|
best_effort_json(#{key => []})
|
||||||
),
|
),
|
||||||
?assertEqual(
|
?assertEqual(
|
||||||
<<"[\n {\n \"key\" : [\n \n ]\n }\n]">>,
|
<<"[\n {\n \"key\" : [\n \n ]\n }\n]">>,
|
||||||
emqx_logger_jsonfmt:best_effort_json([#{key => []}])
|
best_effort_json([#{key => []}])
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
config() ->
|
||||||
|
#{
|
||||||
|
chars_limit => unlimited,
|
||||||
|
depth => unlimited,
|
||||||
|
single_line => true
|
||||||
|
}.
|
||||||
|
|
||||||
|
make_log(Report) ->
|
||||||
|
#{
|
||||||
|
level => info,
|
||||||
|
msg => Report,
|
||||||
|
meta => #{time => 1111, report_cb => ?DEFAULT_FORMATTER}
|
||||||
|
}.
|
||||||
|
|
||||||
|
ensure_json_output_test() ->
|
||||||
|
JSON = format(make_log({report, #{foo => bar}}), config()),
|
||||||
|
?assert(is_map(emqx_utils_json:decode(JSON))),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
chars_limit_not_applied_on_raw_map_fields_test() ->
|
||||||
|
Limit = 32,
|
||||||
|
Len = 100,
|
||||||
|
LongStr = lists:duplicate(Len, $a),
|
||||||
|
Config0 = config(),
|
||||||
|
Config = Config0#{
|
||||||
|
chars_limit => Limit
|
||||||
|
},
|
||||||
|
JSON = format(make_log({report, #{foo => LongStr}}), Config),
|
||||||
|
#{<<"foo">> := LongStr1} = emqx_utils_json:decode(JSON),
|
||||||
|
?assertEqual(Len, size(LongStr1)),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
chars_limit_applied_on_format_result_test() ->
|
||||||
|
Limit = 32,
|
||||||
|
Len = 100,
|
||||||
|
LongStr = lists:duplicate(Len, $a),
|
||||||
|
Config0 = config(),
|
||||||
|
Config = Config0#{
|
||||||
|
chars_limit => Limit
|
||||||
|
},
|
||||||
|
JSON = format(make_log({string, LongStr}), Config),
|
||||||
|
#{<<"msg">> := LongStr1} = emqx_utils_json:decode(JSON),
|
||||||
|
?assertEqual(Limit, size(LongStr1)),
|
||||||
|
ok.
|
||||||
|
|
||||||
-endif.
|
-endif.
|
||||||
|
|
|
@ -56,8 +56,7 @@ enrich_report(ReportRaw, Meta) ->
|
||||||
end,
|
end,
|
||||||
ClientId = maps:get(clientid, Meta, undefined),
|
ClientId = maps:get(clientid, Meta, undefined),
|
||||||
Peer = maps:get(peername, Meta, undefined),
|
Peer = maps:get(peername, Meta, undefined),
|
||||||
MFA = maps:get(mfa, Meta, undefined),
|
MFA = emqx_utils:format_mfal(Meta),
|
||||||
Line = maps:get(line, Meta, undefined),
|
|
||||||
Msg = maps:get(msg, ReportRaw, undefined),
|
Msg = maps:get(msg, ReportRaw, undefined),
|
||||||
%% turn it into a list so that the order of the fields is determined
|
%% turn it into a list so that the order of the fields is determined
|
||||||
lists:foldl(
|
lists:foldl(
|
||||||
|
@ -70,8 +69,7 @@ enrich_report(ReportRaw, Meta) ->
|
||||||
{topic, try_format_unicode(Topic)},
|
{topic, try_format_unicode(Topic)},
|
||||||
{clientid, try_format_unicode(ClientId)},
|
{clientid, try_format_unicode(ClientId)},
|
||||||
{peername, Peer},
|
{peername, Peer},
|
||||||
{line, Line},
|
{mfa, try_format_unicode(MFA)},
|
||||||
{mfa, mfa(MFA)},
|
|
||||||
{msg, Msg}
|
{msg, Msg}
|
||||||
]
|
]
|
||||||
).
|
).
|
||||||
|
@ -84,7 +82,7 @@ try_format_unicode(Char) ->
|
||||||
case unicode:characters_to_list(Char) of
|
case unicode:characters_to_list(Char) of
|
||||||
{error, _, _} -> error;
|
{error, _, _} -> error;
|
||||||
{incomplete, _, _} -> error;
|
{incomplete, _, _} -> error;
|
||||||
Binary -> Binary
|
List1 -> List1
|
||||||
end
|
end
|
||||||
catch
|
catch
|
||||||
_:_ ->
|
_:_ ->
|
||||||
|
@ -95,8 +93,8 @@ try_format_unicode(Char) ->
|
||||||
_ -> List
|
_ -> List
|
||||||
end.
|
end.
|
||||||
|
|
||||||
enrich_mfa({Fmt, Args}, #{mfa := Mfa, line := Line}) when is_list(Fmt) ->
|
enrich_mfa({Fmt, Args}, Data) when is_list(Fmt) ->
|
||||||
{Fmt ++ " mfa: ~ts line: ~w", Args ++ [mfa(Mfa), Line]};
|
{Fmt ++ " mfa: ~ts", Args ++ [emqx_utils:format_mfal(Data)]};
|
||||||
enrich_mfa(Msg, _) ->
|
enrich_mfa(Msg, _) ->
|
||||||
Msg.
|
Msg.
|
||||||
|
|
||||||
|
@ -113,6 +111,3 @@ enrich_topic({Fmt, Args}, #{topic := Topic}) when is_list(Fmt) ->
|
||||||
{" topic: ~ts" ++ Fmt, [Topic | Args]};
|
{" topic: ~ts" ++ Fmt, [Topic | Args]};
|
||||||
enrich_topic(Msg, _) ->
|
enrich_topic(Msg, _) ->
|
||||||
Msg.
|
Msg.
|
||||||
|
|
||||||
mfa(undefined) -> undefined;
|
|
||||||
mfa({M, F, A}) -> [atom_to_list(M), ":", atom_to_list(F), "/" ++ integer_to_list(A)].
|
|
||||||
|
|
|
@ -1252,6 +1252,7 @@ log_handler_common_confs(Handler, Default) ->
|
||||||
sc(
|
sc(
|
||||||
hoconsc:enum([text, json]),
|
hoconsc:enum([text, json]),
|
||||||
#{
|
#{
|
||||||
|
aliases => [format],
|
||||||
default => maps:get(formatter, Default, text),
|
default => maps:get(formatter, Default, text),
|
||||||
desc => ?DESC("common_handler_formatter"),
|
desc => ?DESC("common_handler_formatter"),
|
||||||
importance => ?IMPORTANCE_MEDIUM
|
importance => ?IMPORTANCE_MEDIUM
|
||||||
|
|
|
@ -278,7 +278,7 @@ t_crud(Config) ->
|
||||||
<<"code">> := <<"BAD_REQUEST">>,
|
<<"code">> := <<"BAD_REQUEST">>,
|
||||||
<<"message">> :=
|
<<"message">> :=
|
||||||
#{
|
#{
|
||||||
<<"expected">> := [_ | _],
|
<<"expected">> := <<"avro | protobuf">>,
|
||||||
<<"field_name">> := <<"type">>
|
<<"field_name">> := <<"type">>
|
||||||
}
|
}
|
||||||
}},
|
}},
|
||||||
|
@ -301,7 +301,7 @@ t_crud(Config) ->
|
||||||
<<"code">> := <<"BAD_REQUEST">>,
|
<<"code">> := <<"BAD_REQUEST">>,
|
||||||
<<"message">> :=
|
<<"message">> :=
|
||||||
#{
|
#{
|
||||||
<<"expected">> := [_ | _],
|
<<"expected">> := <<"avro | protobuf">>,
|
||||||
<<"field_name">> := <<"type">>
|
<<"field_name">> := <<"type">>
|
||||||
}
|
}
|
||||||
}},
|
}},
|
||||||
|
|
|
@ -61,7 +61,8 @@
|
||||||
diff_lists/3,
|
diff_lists/3,
|
||||||
merge_lists/3,
|
merge_lists/3,
|
||||||
tcp_keepalive_opts/4,
|
tcp_keepalive_opts/4,
|
||||||
format/1
|
format/1,
|
||||||
|
format_mfal/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-export([
|
-export([
|
||||||
|
@ -529,6 +530,30 @@ tcp_keepalive_opts(OS, _Idle, _Interval, _Probes) ->
|
||||||
format(Term) ->
|
format(Term) ->
|
||||||
iolist_to_binary(io_lib:format("~0p", [Term])).
|
iolist_to_binary(io_lib:format("~0p", [Term])).
|
||||||
|
|
||||||
|
%% @doc Helper function for log formatters.
|
||||||
|
-spec format_mfal(map()) -> undefined | binary().
|
||||||
|
format_mfal(Data) ->
|
||||||
|
Line =
|
||||||
|
case maps:get(line, Data, undefined) of
|
||||||
|
undefined ->
|
||||||
|
<<"">>;
|
||||||
|
Num ->
|
||||||
|
["(", integer_to_list(Num), ")"]
|
||||||
|
end,
|
||||||
|
case maps:get(mfa, Data, undefined) of
|
||||||
|
{M, F, A} ->
|
||||||
|
iolist_to_binary([
|
||||||
|
atom_to_binary(M, utf8),
|
||||||
|
$:,
|
||||||
|
atom_to_binary(F, utf8),
|
||||||
|
$/,
|
||||||
|
integer_to_binary(A),
|
||||||
|
Line
|
||||||
|
]);
|
||||||
|
_ ->
|
||||||
|
undefined
|
||||||
|
end.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Internal Functions
|
%% Internal Functions
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
Fix log formatter when log.HANDLER.formatter is set to 'json'.
|
||||||
|
|
||||||
|
The bug was introduced in v5.0.4 where the log line was no longer a valid JSON, but prefixed with timestamp string and level name.
|
|
@ -101,7 +101,7 @@ common_handler_flush_qlen.label:
|
||||||
|
|
||||||
common_handler_chars_limit.desc:
|
common_handler_chars_limit.desc:
|
||||||
"""Set the maximum length of a single log message. If this length is exceeded, the log message will be truncated.
|
"""Set the maximum length of a single log message. If this length is exceeded, the log message will be truncated.
|
||||||
NOTE: Restrict char limiter if formatter is JSON , it will get a truncated incomplete JSON data, which is not recommended."""
|
When formatter is <code>json</code> the truncation is done on the JSON values, but not on the log message itself."""
|
||||||
|
|
||||||
common_handler_chars_limit.label:
|
common_handler_chars_limit.label:
|
||||||
"""Single Log Max Length"""
|
"""Single Log Max Length"""
|
||||||
|
@ -660,7 +660,8 @@ Can be one of:
|
||||||
- <code>system</code>: the time offset used by the local system
|
- <code>system</code>: the time offset used by the local system
|
||||||
- <code>utc</code>: the UTC time offset
|
- <code>utc</code>: the UTC time offset
|
||||||
- <code>+-[hh]:[mm]</code>: user specified time offset, such as "-02:00" or "+00:00"
|
- <code>+-[hh]:[mm]</code>: user specified time offset, such as "-02:00" or "+00:00"
|
||||||
Defaults to: <code>system</code>."""
|
Defaults to: <code>system</code>.
|
||||||
|
This config has no effect for when formatter is <code>json</code> as the timestamp in JSON is milliseconds since epoch."""
|
||||||
|
|
||||||
common_handler_time_offset.label:
|
common_handler_time_offset.label:
|
||||||
"""Time Offset"""
|
"""Time Offset"""
|
||||||
|
|
|
@ -166,6 +166,7 @@ ip
|
||||||
ipv
|
ipv
|
||||||
jenkins
|
jenkins
|
||||||
jq
|
jq
|
||||||
|
json
|
||||||
kb
|
kb
|
||||||
keepalive
|
keepalive
|
||||||
keyfile
|
keyfile
|
||||||
|
|
Loading…
Reference in New Issue