Merge pull request #13053 from kjellwinblad/kjell/fix_rule_trace_issues/EMQX-12327/EMQX-12335/EMQX-12336

Fix small action trace issues
This commit is contained in:
Kjell Winblad 2024-05-20 16:25:38 +02:00 committed by GitHub
commit 8279d8c787
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 170 additions and 100 deletions

View File

@ -221,7 +221,7 @@ best_effort_unicode(Input, Config) ->
best_effort_json_obj(List, Config) when is_list(List) -> best_effort_json_obj(List, Config) when is_list(List) ->
try try
json_obj(maps:from_list(List), Config) json_obj(convert_tuple_list_to_map(List), Config)
catch catch
_:_ -> _:_ ->
[json(I, Config) || I <- List] [json(I, Config) || I <- List]
@ -234,6 +234,16 @@ best_effort_json_obj(Map, Config) ->
do_format_msg("~p", [Map], Config) do_format_msg("~p", [Map], Config)
end. end.
%% This function will throw if the list do not only contain tuples or if there
%% are duplicate keys.
convert_tuple_list_to_map(List) ->
%% Crash if this is not a tuple list
CandidateMap = maps:from_list(List),
%% Crash if there are duplicates
NumberOfItems = length(List),
NumberOfItems = maps:size(CandidateMap),
CandidateMap.
json(A, _) when is_atom(A) -> A; json(A, _) when is_atom(A) -> A;
json(I, _) when is_integer(I) -> I; json(I, _) when is_integer(I) -> I;
json(F, _) when is_float(F) -> F; json(F, _) when is_float(F) -> F;

View File

@ -12,6 +12,7 @@
-include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/logger.hrl").
-include_lib("hocon/include/hoconsc.hrl"). -include_lib("hocon/include/hoconsc.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl").
-include_lib("emqx/include/emqx_trace.hrl").
%% schema %% schema
-export([roots/0, fields/1, desc/1, namespace/0]). -export([roots/0, fields/1, desc/1, namespace/0]).
@ -273,11 +274,14 @@ do_batch_query(InstId, Requests, Async, #{pool_name := PoolName} = State) ->
_ -> none _ -> none
end, end,
emqx_trace:rendered_action_template(ChannelID, #{ emqx_trace:rendered_action_template(ChannelID, #{
cqls => CQLs cqls => #emqx_trace_format_func_data{data = CQLs, function = fun trace_format_cql_tuples/1}
}), }),
Res = exec_cql_batch_query(InstId, PoolName, Async, CQLs), Res = exec_cql_batch_query(InstId, PoolName, Async, CQLs),
handle_result(Res). handle_result(Res).
trace_format_cql_tuples(Tuples) ->
[CQL || {_, CQL} <- Tuples].
parse_request_to_cql({query, CQL}) -> parse_request_to_cql({query, CQL}) ->
{query, CQL, #{}}; {query, CQL, #{}};
parse_request_to_cql({query, CQL, Params}) -> parse_request_to_cql({query, CQL, Params}) ->

View File

@ -422,7 +422,7 @@ is_auth_key(_) ->
%% ------------------------------------------------------------------------------------------------- %% -------------------------------------------------------------------------------------------------
%% Query %% Query
do_query(InstId, Channel, Client, Points) -> do_query(InstId, Channel, Client, Points) ->
emqx_trace:rendered_action_template(Channel, #{points => Points, is_async => false}), emqx_trace:rendered_action_template(Channel, #{points => Points}),
case greptimedb:write_batch(Client, Points) of case greptimedb:write_batch(Client, Points) of
{ok, #{response := {affected_rows, #{value := Rows}}}} -> {ok, #{response := {affected_rows, #{value := Rows}}}} ->
?SLOG(debug, #{ ?SLOG(debug, #{
@ -465,7 +465,7 @@ do_async_query(InstId, Channel, Client, Points, ReplyFunAndArgs) ->
connector => InstId, connector => InstId,
points => Points points => Points
}), }),
emqx_trace:rendered_action_template(Channel, #{points => Points, is_async => true}), emqx_trace:rendered_action_template(Channel, #{points => Points}),
WrappedReplyFunAndArgs = {fun ?MODULE:reply_callback/2, [ReplyFunAndArgs]}, WrappedReplyFunAndArgs = {fun ?MODULE:reply_callback/2, [ReplyFunAndArgs]},
ok = greptimedb:async_write_batch(Client, Points, WrappedReplyFunAndArgs). ok = greptimedb:async_write_batch(Client, Points, WrappedReplyFunAndArgs).

View File

@ -119,7 +119,7 @@ on_query(
), ),
emqx_trace:rendered_action_template( emqx_trace:rendered_action_template(
MessageTag, MessageTag,
#{command => Cmd, batch => false, mode => sync} #{command => Cmd, batch => false}
), ),
Result = query(InstId, {cmd, Cmd}, RedisConnSt), Result = query(InstId, {cmd, Cmd}, RedisConnSt),
?tp( ?tp(
@ -143,7 +143,7 @@ on_batch_query(
[{ChannelID, _} | _] = BatchData, [{ChannelID, _} | _] = BatchData,
emqx_trace:rendered_action_template( emqx_trace:rendered_action_template(
ChannelID, ChannelID,
#{commands => Cmds, batch => ture, mode => sync} #{commands => Cmds, batch => ture}
), ),
Result = query(InstId, {cmds, Cmds}, RedisConnSt), Result = query(InstId, {cmds, Cmds}, RedisConnSt),
?tp( ?tp(

View File

@ -271,6 +271,16 @@ t_http_test_json_formatter(_Config) ->
}), }),
%% We should handle report style logging %% We should handle report style logging
?SLOG(error, #{msg => "recursive_republish_detected"}, #{topic => Topic}), ?SLOG(error, #{msg => "recursive_republish_detected"}, #{topic => Topic}),
?TRACE("CUSTOM", "my_log_msg", #{
topic => Topic,
%% This will be converted to map
map_key => [{a, a}, {b, b}]
}),
?TRACE("CUSTOM", "my_log_msg", #{
topic => Topic,
%% We should not convert this to a map as we will lose information
map_key => [{a, a}, {a, b}]
}),
ok = emqx_trace_handler_SUITE:filesync(Name, topic), ok = emqx_trace_handler_SUITE:filesync(Name, topic),
{ok, _Detail2} = request_api(get, api_path("trace/" ++ binary_to_list(Name) ++ "/log_detail")), {ok, _Detail2} = request_api(get, api_path("trace/" ++ binary_to_list(Name) ++ "/log_detail")),
{ok, Bin} = request_api(get, api_path("trace/" ++ binary_to_list(Name) ++ "/download")), {ok, Bin} = request_api(get, api_path("trace/" ++ binary_to_list(Name) ++ "/download")),
@ -425,6 +435,25 @@ t_http_test_json_formatter(_Config) ->
}, },
NextFun() NextFun()
), ),
?assertMatch(
#{
<<"meta">> := #{
<<"map_key">> := #{
<<"a">> := <<"a">>,
<<"b">> := <<"b">>
}
}
},
NextFun()
),
?assertMatch(
#{
<<"meta">> := #{
<<"map_key">> := [_, _]
}
},
NextFun()
),
{ok, Delete} = request_api(delete, api_path("trace/" ++ binary_to_list(Name))), {ok, Delete} = request_api(delete, api_path("trace/" ++ binary_to_list(Name))),
?assertEqual(<<>>, Delete), ?assertEqual(<<>>, Delete),

View File

@ -507,7 +507,13 @@ on_sql_query(
LogMeta = #{connector => InstId, sql => SQLOrKey, state => State}, LogMeta = #{connector => InstId, sql => SQLOrKey, state => State},
?TRACE("QUERY", "mysql_connector_received", LogMeta), ?TRACE("QUERY", "mysql_connector_received", LogMeta),
ChannelID = maps:get(channel_id, State, no_channel), ChannelID = maps:get(channel_id, State, no_channel),
emqx_trace:rendered_action_template(ChannelID, #{sql => SQLOrKey}), emqx_trace:rendered_action_template(
ChannelID,
#{
sql_or_key => SQLOrKey,
parameters => Params
}
),
Worker = ecpool:get_client(PoolName), Worker = ecpool:get_client(PoolName),
case ecpool_worker:client(Worker) of case ecpool_worker:client(Worker) of
{ok, Conn} -> {ok, Conn} ->

View File

@ -1174,12 +1174,13 @@ call_query(QM, Id, Index, Ref, Query, QueryOpts) ->
{ok, _Group, #{status := ?status_connecting, error := unhealthy_target}} -> {ok, _Group, #{status := ?status_connecting, error := unhealthy_target}} ->
{error, {unrecoverable_error, unhealthy_target}}; {error, {unrecoverable_error, unhealthy_target}};
{ok, _Group, Resource} -> {ok, _Group, Resource} ->
PrevLoggerProcessMetadata = logger:get_process_metadata(),
QueryResult = QueryResult =
try try
set_rule_id_trace_meta_data(Query), set_rule_id_trace_meta_data(Query),
do_call_query(QM, Id, Index, Ref, Query, QueryOpts, Resource) do_call_query(QM, Id, Index, Ref, Query, QueryOpts, Resource)
after after
unset_rule_id_trace_meta_data() reset_logger_process_metadata(PrevLoggerProcessMetadata)
end, end,
QueryResult; QueryResult;
{error, not_found} -> {error, not_found} ->
@ -1190,27 +1191,37 @@ set_rule_id_trace_meta_data(Requests) when is_list(Requests) ->
%% Get the rule ids from requests %% Get the rule ids from requests
RuleIDs = lists:foldl(fun collect_rule_id/2, #{}, Requests), RuleIDs = lists:foldl(fun collect_rule_id/2, #{}, Requests),
ClientIDs = lists:foldl(fun collect_client_id/2, #{}, Requests), ClientIDs = lists:foldl(fun collect_client_id/2, #{}, Requests),
RuleTriggerTimes = lists:foldl(fun collect_rule_trigger_times/2, [], Requests), RuleTriggerTimes0 = lists:foldl(fun collect_rule_trigger_times/2, [], Requests),
StopAfterRenderVal = RuleTriggerTimes = lists:flatten(RuleTriggerTimes0),
TraceMetadata =
case Requests of case Requests of
%% We know that the batch is not mixed since we prevent this by %% We know that the batch is not mixed since we prevent this by
%% using a stop_after function in the replayq:pop call %% using a stop_after function in the replayq:pop call
[?QUERY(_, _, _, _, #{stop_action_after_render := true}) | _] -> [?QUERY(_, _, _, _, #{stop_action_after_render := true}) | _] ->
true; #{
rule_ids => RuleIDs,
client_ids => ClientIDs,
rule_trigger_ts => RuleTriggerTimes,
stop_action_after_render => true
};
[?QUERY(_, _, _, _, _TraceCTX) | _] -> [?QUERY(_, _, _, _, _TraceCTX) | _] ->
false #{
rule_ids => RuleIDs,
client_ids => ClientIDs,
rule_trigger_ts => RuleTriggerTimes
}
end, end,
logger:update_process_metadata(#{ logger:update_process_metadata(TraceMetadata),
rule_ids => RuleIDs,
client_ids => ClientIDs,
rule_trigger_times => RuleTriggerTimes,
stop_action_after_render => StopAfterRenderVal
}),
ok; ok;
set_rule_id_trace_meta_data(Request) -> set_rule_id_trace_meta_data(Request) ->
set_rule_id_trace_meta_data([Request]), set_rule_id_trace_meta_data([Request]),
ok. ok.
reset_logger_process_metadata(undefined = _PrevProcessMetadata) ->
logger:unset_process_metadata();
reset_logger_process_metadata(PrevProcessMetadata) ->
logger:set_process_metadata(PrevProcessMetadata).
collect_rule_id(?QUERY(_, _, _, _, #{rule_id := RuleId}), Acc) -> collect_rule_id(?QUERY(_, _, _, _, #{rule_id := RuleId}), Acc) ->
Acc#{RuleId => true}; Acc#{RuleId => true};
collect_rule_id(?QUERY(_, _, _, _, _), Acc) -> collect_rule_id(?QUERY(_, _, _, _, _), Acc) ->
@ -1221,19 +1232,11 @@ collect_client_id(?QUERY(_, _, _, _, #{clientid := ClientId}), Acc) ->
collect_client_id(?QUERY(_, _, _, _, _), Acc) -> collect_client_id(?QUERY(_, _, _, _, _), Acc) ->
Acc. Acc.
collect_rule_trigger_times(?QUERY(_, _, _, _, #{rule_trigger_time := Time}), Acc) -> collect_rule_trigger_times(?QUERY(_, _, _, _, #{rule_trigger_ts := Time}), Acc) ->
[Time | Acc]; [Time | Acc];
collect_rule_trigger_times(?QUERY(_, _, _, _, _), Acc) -> collect_rule_trigger_times(?QUERY(_, _, _, _, _), Acc) ->
Acc. Acc.
unset_rule_id_trace_meta_data() ->
logger:update_process_metadata(#{
rule_ids => #{},
client_ids => #{},
stop_action_after_render => false,
rule_trigger_times => []
}).
%% action:kafka_producer:myproducer1:connector:kafka_producer:mykakfaclient1 %% action:kafka_producer:myproducer1:connector:kafka_producer:mykakfaclient1
extract_connector_id(Id) when is_binary(Id) -> extract_connector_id(Id) when is_binary(Id) ->
case binary:split(Id, <<":">>, [global]) of case binary:split(Id, <<":">>, [global]) of

View File

@ -70,6 +70,7 @@ apply_rule_discard_result(Rule, Columns, Envs) ->
ok. ok.
apply_rule(Rule = #{id := RuleID}, Columns, Envs) -> apply_rule(Rule = #{id := RuleID}, Columns, Envs) ->
PrevProcessMetadata = logger:get_process_metadata(),
set_process_trace_metadata(RuleID, Columns), set_process_trace_metadata(RuleID, Columns),
trace_rule_sql( trace_rule_sql(
"rule_activated", "rule_activated",
@ -137,21 +138,26 @@ apply_rule(Rule = #{id := RuleID}, Columns, Envs) ->
), ),
{error, {Error, StkTrace}} {error, {Error, StkTrace}}
after after
reset_process_trace_metadata(Columns) reset_logger_process_metadata(PrevProcessMetadata)
end. end.
set_process_trace_metadata(RuleID, #{clientid := ClientID} = Columns) -> set_process_trace_metadata(RuleID, #{clientid := ClientID} = Columns) ->
logger:update_process_metadata(#{ logger:update_process_metadata(#{
clientid => ClientID, clientid => ClientID,
rule_id => RuleID, rule_id => RuleID,
rule_trigger_time => rule_trigger_time(Columns) rule_trigger_ts => [rule_trigger_time(Columns)]
}); });
set_process_trace_metadata(RuleID, Columns) -> set_process_trace_metadata(RuleID, Columns) ->
logger:update_process_metadata(#{ logger:update_process_metadata(#{
rule_id => RuleID, rule_id => RuleID,
rule_trigger_time => rule_trigger_time(Columns) rule_trigger_ts => [rule_trigger_time(Columns)]
}). }).
reset_logger_process_metadata(undefined = _PrevProcessMetadata) ->
logger:unset_process_metadata();
reset_logger_process_metadata(PrevProcessMetadata) ->
logger:set_process_metadata(PrevProcessMetadata).
rule_trigger_time(Columns) -> rule_trigger_time(Columns) ->
case Columns of case Columns of
#{timestamp := Timestamp} -> #{timestamp := Timestamp} ->
@ -160,18 +166,6 @@ rule_trigger_time(Columns) ->
erlang:system_time(millisecond) erlang:system_time(millisecond)
end. end.
reset_process_trace_metadata(#{clientid := _ClientID}) ->
Meta = logger:get_process_metadata(),
Meta1 = maps:remove(clientid, Meta),
Meta2 = maps:remove(rule_id, Meta1),
Meta3 = maps:remove(rule_trigger_time, Meta2),
logger:set_process_metadata(Meta3);
reset_process_trace_metadata(_) ->
Meta = logger:get_process_metadata(),
Meta1 = maps:remove(rule_id, Meta),
Meta2 = maps:remove(rule_trigger_time, Meta1),
logger:set_process_metadata(Meta2).
do_apply_rule( do_apply_rule(
#{ #{
id := RuleId, id := RuleId,
@ -528,30 +522,40 @@ do_handle_action_get_trace_inc_metrics_context(RuleID, Action) ->
end. end.
do_handle_action_get_trace_inc_metrics_context_unconditionally(Action, TraceMeta) -> do_handle_action_get_trace_inc_metrics_context_unconditionally(Action, TraceMeta) ->
StopAfterRender = maps:get(stop_action_after_render, TraceMeta, false), StopAfterRenderMap =
case maps:get(stop_action_after_render, TraceMeta, false) of
false ->
#{};
true ->
#{stop_action_after_render => true}
end,
case TraceMeta of case TraceMeta of
#{ #{
rule_id := RuleID, rule_id := RuleID,
clientid := ClientID, clientid := ClientID,
rule_trigger_time := Timestamp rule_trigger_ts := Timestamp
} -> } ->
#{ maps:merge(
rule_id => RuleID, #{
clientid => ClientID, rule_id => RuleID,
action_id => Action, clientid => ClientID,
stop_action_after_render => StopAfterRender, action_id => Action,
rule_trigger_time => Timestamp rule_trigger_ts => Timestamp
}; },
StopAfterRenderMap
);
#{ #{
rule_id := RuleID, rule_id := RuleID,
rule_trigger_time := Timestamp rule_trigger_ts := Timestamp
} -> } ->
#{ maps:merge(
rule_id => RuleID, #{
action_id => Action, rule_id => RuleID,
stop_action_after_render => StopAfterRender, action_id => Action,
rule_trigger_time => Timestamp rule_trigger_ts => Timestamp
} },
StopAfterRenderMap
)
end. end.
action_info({bridge, BridgeType, BridgeName, _ResId}) -> action_info({bridge, BridgeType, BridgeName, _ResId}) ->
@ -740,7 +744,20 @@ nested_put(Alias, Val, Columns0) ->
emqx_rule_maps:nested_put(Alias, Val, Columns). emqx_rule_maps:nested_put(Alias, Val, Columns).
inc_action_metrics(TraceCtx, Result) -> inc_action_metrics(TraceCtx, Result) ->
_ = do_inc_action_metrics(TraceCtx, Result), SavedMetaData = logger:get_process_metadata(),
try
%% To not pollute the trace we temporary remove the process meta data
logger:unset_process_metadata(),
_ = do_inc_action_metrics(TraceCtx, Result)
after
%% Setting process metadata to undefined yields an error
case SavedMetaData of
undefined ->
ok;
_ ->
logger:set_process_metadata(SavedMetaData)
end
end,
Result. Result.
do_inc_action_metrics( do_inc_action_metrics(

View File

@ -52,7 +52,8 @@ do_apply_rule(
do_apply_matched_rule( do_apply_matched_rule(
Rule, Rule,
Context, Context,
StopAfterRender StopAfterRender,
EventTopics
); );
false -> false ->
{error, nomatch} {error, nomatch}
@ -61,21 +62,29 @@ do_apply_rule(
case lists:member(InTopic, EventTopics) of case lists:member(InTopic, EventTopics) of
true -> true ->
%% the rule is for both publish and events, test it directly %% the rule is for both publish and events, test it directly
do_apply_matched_rule(Rule, Context, StopAfterRender); do_apply_matched_rule(Rule, Context, StopAfterRender, EventTopics);
false -> false ->
{error, nomatch} {error, nomatch}
end end
end. end.
do_apply_matched_rule(Rule, Context, StopAfterRender) -> do_apply_matched_rule(Rule, Context, StopAfterRender, EventTopics) ->
update_process_trace_metadata(StopAfterRender), PrevLoggerProcessMetadata = logger:get_process_metadata(),
ApplyRuleRes = emqx_rule_runtime:apply_rule( try
Rule, update_process_trace_metadata(StopAfterRender),
Context, FullContext = fill_default_values(
apply_rule_environment() hd(EventTopics),
), emqx_rule_maps:atom_key_map(Context)
reset_trace_process_metadata(StopAfterRender), ),
ApplyRuleRes. ApplyRuleRes = emqx_rule_runtime:apply_rule(
Rule,
FullContext,
apply_rule_environment()
),
ApplyRuleRes
after
reset_logger_process_metadata(PrevLoggerProcessMetadata)
end.
update_process_trace_metadata(true = _StopAfterRender) -> update_process_trace_metadata(true = _StopAfterRender) ->
logger:update_process_metadata(#{ logger:update_process_metadata(#{
@ -84,12 +93,10 @@ update_process_trace_metadata(true = _StopAfterRender) ->
update_process_trace_metadata(false = _StopAfterRender) -> update_process_trace_metadata(false = _StopAfterRender) ->
ok. ok.
reset_trace_process_metadata(true = _StopAfterRender) -> reset_logger_process_metadata(undefined = _PrevProcessMetadata) ->
Meta = logger:get_process_metadata(), logger:unset_process_metadata();
NewMeta = maps:remove(stop_action_after_render, Meta), reset_logger_process_metadata(PrevProcessMetadata) ->
logger:set_process_metadata(NewMeta); logger:set_process_metadata(PrevProcessMetadata).
reset_trace_process_metadata(false = _StopAfterRender) ->
ok.
%% At the time of writing the environment passed to the apply rule function is %% At the time of writing the environment passed to the apply rule function is
%% not used at all for normal actions. When it is used for custom functions it %% not used at all for normal actions. When it is used for custom functions it

View File

@ -216,18 +216,15 @@ basic_apply_rule_test_helper(Action, TraceType, StopAfterRender) ->
end end
) )
end, end,
%% Check that rule_trigger_time meta field is present in all log entries %% Check that rule_trigger_ts meta field is present in all log entries
Log0 = read_rule_trace_file(TraceName, TraceType, Now), Log0 = read_rule_trace_file(TraceName, TraceType, Now),
Log1 = binary:split(Log0, <<"\n">>, [global, trim]), Log1 = binary:split(Log0, <<"\n">>, [global, trim]),
Log2 = lists:join(<<",\n">>, Log1), Log2 = lists:join(<<",\n">>, Log1),
Log3 = iolist_to_binary(["[", Log2, "]"]), Log3 = iolist_to_binary(["[", Log2, "]"]),
{ok, LogEntries} = emqx_utils_json:safe_decode(Log3, [return_maps]), {ok, LogEntries} = emqx_utils_json:safe_decode(Log3, [return_maps]),
[#{<<"meta">> := #{<<"rule_trigger_time">> := RuleTriggerTime}} | _] = LogEntries, [#{<<"meta">> := #{<<"rule_trigger_ts">> := [RuleTriggerTime]}} | _] = LogEntries,
[ [
?assert( ?assert(lists:member(RuleTriggerTime, maps:get(<<"rule_trigger_ts">>, Meta, [])))
(maps:get(<<"rule_trigger_time">>, Meta, no_time) =:= RuleTriggerTime) orelse
(lists:member(RuleTriggerTime, maps:get(<<"rule_trigger_times">>, Meta, [])))
)
|| #{<<"meta">> := Meta} <- LogEntries || #{<<"meta">> := Meta} <- LogEntries
], ],
ok. ok.
@ -265,8 +262,7 @@ do_final_log_check(Action, Bin0) when is_binary(Action) ->
<<"result">> := <<"ok">> <<"result">> := <<"ok">>
}, },
<<"rule_id">> := _, <<"rule_id">> := _,
<<"rule_trigger_time">> := _, <<"rule_trigger_ts">> := _,
<<"stop_action_after_render">> := false,
<<"trace_tag">> := <<"ACTION">> <<"trace_tag">> := <<"ACTION">>
}, },
<<"msg">> := <<"action_success">>, <<"msg">> := <<"action_success">>,
@ -360,9 +356,10 @@ t_apply_rule_test_batch_separation_stop_after_render(_Config) ->
ok; ok;
CheckBatchesFunRec(CurCount) -> CheckBatchesFunRec(CurCount) ->
receive receive
[{_, #{<<"stop_after_render">> := StopValue}} | _] = List -> [{_, FirstMsg} | _] = List ->
StopValue = maps:get(<<"stop_after_render">>, FirstMsg, false),
[ [
?assertMatch(#{<<"stop_after_render">> := StopValue}, Msg) ?assertEqual(StopValue, maps:get(<<"stop_after_render">>, Msg, false))
|| {_, Msg} <- List || {_, Msg} <- List
], ],
Len = length(List), Len = length(List),
@ -419,21 +416,20 @@ t_apply_rule_test_format_action_failed(_Config) ->
<<"name">> := _, <<"name">> := _,
<<"type">> := <<"rule_engine_test">> <<"type">> := <<"rule_engine_test">>
}, },
<<"client_ids">> := [],
<<"clientid">> := _, <<"clientid">> := _,
<<"reason">> := <<"MY REASON">>, <<"reason">> := <<"MY REASON">>,
<<"rule_id">> := _, <<"rule_id">> := _,
<<"rule_ids">> := [], <<"rule_trigger_ts">> := _,
<<"rule_trigger_time">> := _,
<<"rule_trigger_times">> := [],
<<"stop_action_after_render">> := false,
<<"trace_tag">> := <<"ACTION">> <<"trace_tag">> := <<"ACTION">>
}, },
<<"msg">> := <<"action_failed">>, <<"msg">> := <<"action_failed">>,
<<"time">> := _ <<"time">> := _
}, },
LastEntryJSON LastEntryJSON
) ),
MetaMap = maps:get(<<"meta">>, LastEntryJSON),
?assert(not maps:is_key(<<"client_ids">>, MetaMap)),
?assert(not maps:is_key(<<"rule_ids">>, MetaMap))
end, end,
do_apply_rule_test_format_action_failed_test(1, CheckFun). do_apply_rule_test_format_action_failed_test(1, CheckFun).
@ -494,8 +490,7 @@ out_of_service_check_fun(SendErrorMsg, Reason) ->
<<"clientid">> := _, <<"clientid">> := _,
<<"reason">> := <<"request_expired">>, <<"reason">> := <<"request_expired">>,
<<"rule_id">> := _, <<"rule_id">> := _,
<<"rule_trigger_time">> := _, <<"rule_trigger_ts">> := _,
<<"stop_action_after_render">> := false,
<<"trace_tag">> := <<"ACTION">> <<"trace_tag">> := <<"ACTION">>
}, },
<<"msg">> := <<"action_failed">>, <<"msg">> := <<"action_failed">>,
@ -511,7 +506,6 @@ out_of_service_check_fun(SendErrorMsg, Reason) ->
<<"level">> := <<"debug">>, <<"level">> := <<"debug">>,
<<"meta">> := <<"meta">> :=
#{ #{
<<"client_ids">> := [],
<<"clientid">> := _, <<"clientid">> := _,
<<"id">> := _, <<"id">> := _,
<<"reason">> := <<"reason">> :=
@ -521,17 +515,17 @@ out_of_service_check_fun(SendErrorMsg, Reason) ->
<<"msg">> := <<"MY_RECOVERABLE_REASON">> <<"msg">> := <<"MY_RECOVERABLE_REASON">>
}, },
<<"rule_id">> := _, <<"rule_id">> := _,
<<"rule_ids">> := [], <<"rule_trigger_ts">> := _,
<<"rule_trigger_time">> := _,
<<"rule_trigger_times">> := [],
<<"stop_action_after_render">> := false,
<<"trace_tag">> := <<"ERROR">> <<"trace_tag">> := <<"ERROR">>
}, },
<<"msg">> := SendErrorMsg, <<"msg">> := SendErrorMsg,
<<"time">> := _ <<"time">> := _
}, },
ReasonEntryJSON ReasonEntryJSON
) ),
MetaMap = maps:get(<<"meta">>, ReasonEntryJSON),
?assert(not maps:is_key(<<"client_ids">>, MetaMap)),
?assert(not maps:is_key(<<"rule_ids">>, MetaMap))
end. end.
meck_test_connector_recoverable_errors(Reason) -> meck_test_connector_recoverable_errors(Reason) ->