diff --git a/apps/emqx_bridge/rebar.config b/apps/emqx_bridge/rebar.config index d24d23f8c..0a1cbc29b 100644 --- a/apps/emqx_bridge/rebar.config +++ b/apps/emqx_bridge/rebar.config @@ -1,8 +1,9 @@ {erl_opts, [debug_info]}. -{deps, [ {emqx, {path, "../emqx"}} - ]}. +{deps, [{emqx, {path, "../emqx"}}]}. {shell, [ - % {config, "config/sys.config"}, + % {config, "config/sys.config"}, {apps, [emqx_bridge]} ]}. + +{project_plugins, [erlfmt]}. diff --git a/apps/emqx_bridge/src/emqx_bridge.app.src b/apps/emqx_bridge/src/emqx_bridge.app.src index 2a2f11603..70550efe4 100644 --- a/apps/emqx_bridge/src/emqx_bridge.app.src +++ b/apps/emqx_bridge/src/emqx_bridge.app.src @@ -1,18 +1,18 @@ %% -*- mode: erlang -*- -{application, emqx_bridge, - [{description, "An OTP application"}, - {vsn, "0.1.0"}, - {registered, []}, - {mod, {emqx_bridge_app, []}}, - {applications, - [kernel, - stdlib, - emqx, - emqx_connector - ]}, - {env,[]}, - {modules, []}, +{application, emqx_bridge, [ + {description, "An OTP application"}, + {vsn, "0.1.0"}, + {registered, []}, + {mod, {emqx_bridge_app, []}}, + {applications, [ + kernel, + stdlib, + emqx, + emqx_connector + ]}, + {env, []}, + {modules, []}, - {licenses, ["Apache 2.0"]}, - {links, []} - ]}. + {licenses, ["Apache 2.0"]}, + {links, []} +]}. diff --git a/apps/emqx_bridge/src/emqx_bridge.erl b/apps/emqx_bridge/src/emqx_bridge.erl index b2939f9a9..88f12c1f5 100644 --- a/apps/emqx_bridge/src/emqx_bridge.erl +++ b/apps/emqx_bridge/src/emqx_bridge.erl @@ -18,48 +18,48 @@ -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). --export([ post_config_update/5 - ]). +-export([post_config_update/5]). --export([ load_hook/0 - , unload_hook/0 - ]). +-export([ + load_hook/0, + unload_hook/0 +]). -export([on_message_publish/1]). --export([ resource_type/1 - , bridge_type/1 - , resource_id/1 - , resource_id/2 - , bridge_id/2 - , parse_bridge_id/1 - ]). +-export([ + resource_type/1, + bridge_type/1, + resource_id/1, + resource_id/2, + bridge_id/2, + parse_bridge_id/1 +]). --export([ load/0 - , lookup/1 - , lookup/2 - , lookup/3 - , list/0 - , list_bridges_by_connector/1 - , create/2 - , create/3 - , recreate/2 - , recreate/3 - , create_dry_run/2 - , remove/1 - , remove/2 - , update/2 - , update/3 - , stop/2 - , restart/2 - , reset_metrics/1 - ]). +-export([ + load/0, + lookup/1, + lookup/2, + lookup/3, + list/0, + list_bridges_by_connector/1, + create/2, + create/3, + recreate/2, + recreate/3, + create_dry_run/2, + remove/1, + remove/2, + update/2, + update/3, + stop/2, + restart/2, + reset_metrics/1 +]). --export([ send_message/2 - ]). +-export([send_message/2]). --export([ config_key_path/0 - ]). +-export([config_key_path/0]). %% exported for `emqx_telemetry' -export([get_basic_usage_info/0]). @@ -69,18 +69,25 @@ load_hook() -> load_hook(Bridges). load_hook(Bridges) -> - lists:foreach(fun({_Type, Bridge}) -> - lists:foreach(fun({_Name, BridgeConf}) -> + lists:foreach( + fun({_Type, Bridge}) -> + lists:foreach( + fun({_Name, BridgeConf}) -> do_load_hook(BridgeConf) - end, maps:to_list(Bridge)) - end, maps:to_list(Bridges)). + end, + maps:to_list(Bridge) + ) + end, + maps:to_list(Bridges) + ). do_load_hook(#{local_topic := _} = Conf) -> case maps:get(direction, Conf, egress) of egress -> emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []}); ingress -> ok end; -do_load_hook(_Conf) -> ok. +do_load_hook(_Conf) -> + ok. unload_hook() -> ok = emqx_hooks:del('message.publish', {?MODULE, on_message_publish}). @@ -90,23 +97,36 @@ on_message_publish(Message = #message{topic = Topic, flags = Flags}) -> false -> Msg = emqx_rule_events:eventmsg_publish(Message), send_to_matched_egress_bridges(Topic, Msg); - true -> ok + true -> + ok end, {ok, Message}. send_to_matched_egress_bridges(Topic, Msg) -> - lists:foreach(fun (Id) -> - try send_message(Id, Msg) of - {error, Reason} -> - ?SLOG(error, #{msg => "send_message_to_bridge_failed", - bridge => Id, error => Reason}); - _ -> ok - catch Err:Reason:ST -> - ?SLOG(error, #{msg => "send_message_to_bridge_exception", - bridge => Id, error => Err, reason => Reason, - stacktrace => ST}) - end - end, get_matched_bridges(Topic)). + lists:foreach( + fun(Id) -> + try send_message(Id, Msg) of + {error, Reason} -> + ?SLOG(error, #{ + msg => "send_message_to_bridge_failed", + bridge => Id, + error => Reason + }); + _ -> + ok + catch + Err:Reason:ST -> + ?SLOG(error, #{ + msg => "send_message_to_bridge_exception", + bridge => Id, + error => Err, + reason => Reason, + stacktrace => ST + }) + end + end, + get_matched_bridges(Topic) + ). send_message(BridgeId, Message) -> {BridgeType, BridgeName} = parse_bridge_id(BridgeId), @@ -132,8 +152,8 @@ bridge_type(emqx_connector_mqtt) -> mqtt; bridge_type(emqx_connector_http) -> http. post_config_update(_, _Req, NewConf, OldConf, _AppEnv) -> - #{added := Added, removed := Removed, changed := Updated} - = diff_confs(NewConf, OldConf), + #{added := Added, removed := Removed, changed := Updated} = + diff_confs(NewConf, OldConf), %% The config update will be failed if any task in `perform_bridge_changes` failed. Result = perform_bridge_changes([ {fun remove/3, Removed}, @@ -150,15 +170,19 @@ perform_bridge_changes(Tasks) -> perform_bridge_changes([], Result) -> Result; perform_bridge_changes([{Action, MapConfs} | Tasks], Result0) -> - Result = maps:fold(fun - ({_Type, _Name}, _Conf, {error, Reason}) -> - {error, Reason}; - ({Type, Name}, Conf, _) -> - case Action(Type, Name, Conf) of - {error, Reason} -> {error, Reason}; - Return -> Return - end - end, Result0, MapConfs), + Result = maps:fold( + fun + ({_Type, _Name}, _Conf, {error, Reason}) -> + {error, Reason}; + ({Type, Name}, Conf, _) -> + case Action(Type, Name, Conf) of + {error, Reason} -> {error, Reason}; + Return -> Return + end + end, + Result0, + MapConfs + ), perform_bridge_changes(Tasks, Result). load() -> @@ -184,18 +208,29 @@ parse_bridge_id(BridgeId) -> end. list() -> - lists:foldl(fun({Type, NameAndConf}, Bridges) -> - lists:foldl(fun({Name, RawConf}, Acc) -> + lists:foldl( + fun({Type, NameAndConf}, Bridges) -> + lists:foldl( + fun({Name, RawConf}, Acc) -> case lookup(Type, Name, RawConf) of {error, not_found} -> Acc; {ok, Res} -> [Res | Acc] end - end, Bridges, maps:to_list(NameAndConf)) - end, [], maps:to_list(emqx:get_raw_config([bridges], #{}))). + end, + Bridges, + maps:to_list(NameAndConf) + ) + end, + [], + maps:to_list(emqx:get_raw_config([bridges], #{})) + ). list_bridges_by_connector(ConnectorId) -> - [B || B = #{raw_config := #{<<"connector">> := Id}} <- list(), - ConnectorId =:= Id]. + [ + B + || B = #{raw_config := #{<<"connector">> := Id}} <- list(), + ConnectorId =:= Id + ]. lookup(Id) -> {Type, Name} = parse_bridge_id(Id), @@ -206,10 +241,15 @@ lookup(Type, Name) -> lookup(Type, Name, RawConf). lookup(Type, Name, RawConf) -> case emqx_resource:get_instance(resource_id(Type, Name)) of - {error, not_found} -> {error, not_found}; + {error, not_found} -> + {error, not_found}; {ok, _, Data} -> - {ok, #{type => Type, name => Name, resource_data => Data, - raw_config => RawConf}} + {ok, #{ + type => Type, + name => Name, + resource_data => Data, + raw_config => RawConf + }} end. reset_metrics(ResourceId) -> @@ -227,13 +267,21 @@ create(BridgeId, Conf) -> create(BridgeType, BridgeName, Conf). create(Type, Name, Conf) -> - ?SLOG(info, #{msg => "create bridge", type => Type, name => Name, - config => Conf}), - case emqx_resource:create_local(resource_id(Type, Name), - <<"emqx_bridge">>, - emqx_bridge:resource_type(Type), - parse_confs(Type, Name, Conf), - #{}) of + ?SLOG(info, #{ + msg => "create bridge", + type => Type, + name => Name, + config => Conf + }), + case + emqx_resource:create_local( + resource_id(Type, Name), + <<"emqx_bridge">>, + emqx_bridge:resource_type(Type), + parse_confs(Type, Name, Conf), + #{} + ) + of {ok, already_created} -> maybe_disable_bridge(Type, Name, Conf); {ok, _} -> maybe_disable_bridge(Type, Name, Conf); {error, Reason} -> {error, Reason} @@ -254,15 +302,25 @@ update(Type, Name, {OldConf, Conf}) -> %% case if_only_to_toggle_enable(OldConf, Conf) of false -> - ?SLOG(info, #{msg => "update bridge", type => Type, name => Name, - config => Conf}), + ?SLOG(info, #{ + msg => "update bridge", + type => Type, + name => Name, + config => Conf + }), case recreate(Type, Name, Conf) of - {ok, _} -> maybe_disable_bridge(Type, Name, Conf); + {ok, _} -> + maybe_disable_bridge(Type, Name, Conf); {error, not_found} -> - ?SLOG(warning, #{ msg => "updating_a_non-exist_bridge_need_create_a_new_one" - , type => Type, name => Name, config => Conf}), + ?SLOG(warning, #{ + msg => "updating_a_non-exist_bridge_need_create_a_new_one", + type => Type, + name => Name, + config => Conf + }), create(Type, Name, Conf); - {error, Reason} -> {error, {update_bridge_failed, Reason}} + {error, Reason} -> + {error, {update_bridge_failed, Reason}} end; true -> %% we don't need to recreate the bridge if this config change is only to @@ -277,22 +335,25 @@ recreate(Type, Name) -> recreate(Type, Name, emqx:get_config([bridges, Type, Name])). recreate(Type, Name, Conf) -> - emqx_resource:recreate_local(resource_id(Type, Name), + emqx_resource:recreate_local( + resource_id(Type, Name), emqx_bridge:resource_type(Type), parse_confs(Type, Name, Conf), - #{}). + #{} + ). create_dry_run(Type, Conf) -> - - Conf0 = Conf#{<<"egress">> => - #{ <<"remote_topic">> => <<"t">> - , <<"remote_qos">> => 0 - , <<"retain">> => true - , <<"payload">> => <<"val">> - }, - <<"ingress">> => - #{ <<"remote_topic">> => <<"t">> - }}, + Conf0 = Conf#{ + <<"egress">> => + #{ + <<"remote_topic">> => <<"t">>, + <<"remote_qos">> => 0, + <<"retain">> => true, + <<"payload">> => <<"val">> + }, + <<"ingress">> => + #{<<"remote_topic">> => <<"t">>} + }, case emqx_resource:check_config(emqx_bridge:resource_type(Type), Conf0) of {ok, Conf1} -> emqx_resource:create_dry_run_local(emqx_bridge:resource_type(Type), Conf1); @@ -313,35 +374,48 @@ remove(Type, Name, _Conf) -> case emqx_resource:remove_local(resource_id(Type, Name)) of ok -> ok; {error, not_found} -> ok; - {error, Reason} -> - {error, Reason} + {error, Reason} -> {error, Reason} end. diff_confs(NewConfs, OldConfs) -> - emqx_map_lib:diff_maps(flatten_confs(NewConfs), - flatten_confs(OldConfs)). + emqx_map_lib:diff_maps( + flatten_confs(NewConfs), + flatten_confs(OldConfs) + ). flatten_confs(Conf0) -> maps:from_list( - lists:flatmap(fun({Type, Conf}) -> + lists:flatmap( + fun({Type, Conf}) -> do_flatten_confs(Type, Conf) - end, maps:to_list(Conf0))). + end, + maps:to_list(Conf0) + ) + ). do_flatten_confs(Type, Conf0) -> [{{Type, Name}, Conf} || {Name, Conf} <- maps:to_list(Conf0)]. get_matched_bridges(Topic) -> Bridges = emqx:get_config([bridges], #{}), - maps:fold(fun (BType, Conf, Acc0) -> - maps:fold(fun - %% Confs for MQTT, Kafka bridges have the `direction` flag - (_BName, #{direction := ingress}, Acc1) -> - Acc1; - (BName, #{direction := egress} = Egress, Acc1) -> - %% HTTP, MySQL bridges only have egress direction - get_matched_bridge_id(Egress, Topic, BType, BName, Acc1) - end, Acc0, Conf) - end, [], Bridges). + maps:fold( + fun(BType, Conf, Acc0) -> + maps:fold( + fun + %% Confs for MQTT, Kafka bridges have the `direction` flag + (_BName, #{direction := ingress}, Acc1) -> + Acc1; + (BName, #{direction := egress} = Egress, Acc1) -> + %% HTTP, MySQL bridges only have egress direction + get_matched_bridge_id(Egress, Topic, BType, BName, Acc1) + end, + Acc0, + Conf + ) + end, + [], + Bridges + ). get_matched_bridge_id(#{enable := false}, _Topic, _BType, _BName, Acc) -> Acc; @@ -351,38 +425,56 @@ get_matched_bridge_id(#{local_topic := Filter}, Topic, BType, BName, Acc) -> false -> Acc end. -parse_confs(http, _Name, - #{ url := Url - , method := Method - , body := Body - , headers := Headers - , request_timeout := ReqTimeout - } = Conf) -> +parse_confs( + http, + _Name, + #{ + url := Url, + method := Method, + body := Body, + headers := Headers, + request_timeout := ReqTimeout + } = Conf +) -> {BaseUrl, Path} = parse_url(Url), {ok, BaseUrl2} = emqx_http_lib:uri_parse(BaseUrl), - Conf#{ base_url => BaseUrl2 - , request => - #{ path => Path - , method => Method - , body => Body - , headers => Headers - , request_timeout => ReqTimeout - } - }; -parse_confs(Type, Name, #{connector := ConnId, direction := Direction} = Conf) - when is_binary(ConnId) -> + Conf#{ + base_url => BaseUrl2, + request => + #{ + path => Path, + method => Method, + body => Body, + headers => Headers, + request_timeout => ReqTimeout + } + }; +parse_confs(Type, Name, #{connector := ConnId, direction := Direction} = Conf) when + is_binary(ConnId) +-> case emqx_connector:parse_connector_id(ConnId) of {Type, ConnName} -> ConnectorConfs = emqx:get_config([connectors, Type, ConnName]), - make_resource_confs(Direction, ConnectorConfs, - maps:without([connector, direction], Conf), Type, Name); + make_resource_confs( + Direction, + ConnectorConfs, + maps:without([connector, direction], Conf), + Type, + Name + ); {_ConnType, _ConnName} -> error({cannot_use_connector_with_different_type, ConnId}) end; -parse_confs(Type, Name, #{connector := ConnectorConfs, direction := Direction} = Conf) - when is_map(ConnectorConfs) -> - make_resource_confs(Direction, ConnectorConfs, - maps:without([connector, direction], Conf), Type, Name). +parse_confs(Type, Name, #{connector := ConnectorConfs, direction := Direction} = Conf) when + is_map(ConnectorConfs) +-> + make_resource_confs( + Direction, + ConnectorConfs, + maps:without([connector, direction], Conf), + Type, + Name + ). make_resource_confs(ingress, ConnectorConfs, BridgeConf, Type, Name) -> BName = bridge_id(Type, Name), @@ -417,39 +509,48 @@ if_only_to_toggle_enable(OldConf, Conf) -> #{added := Added, removed := Removed, changed := Updated} = emqx_map_lib:diff_maps(OldConf, Conf), case {Added, Removed, Updated} of - {Added, Removed, #{enable := _}= Updated} - when map_size(Added) =:= 0, - map_size(Removed) =:= 0, - map_size(Updated) =:= 1 -> true; - {_, _, _} -> false + {Added, Removed, #{enable := _} = Updated} when + map_size(Added) =:= 0, + map_size(Removed) =:= 0, + map_size(Updated) =:= 1 + -> + true; + {_, _, _} -> + false end. -spec get_basic_usage_info() -> - #{ num_bridges => non_neg_integer() - , count_by_type => - #{ BridgeType => non_neg_integer() - } - } when BridgeType :: atom(). + #{ + num_bridges => non_neg_integer(), + count_by_type => + #{BridgeType => non_neg_integer()} + } +when + BridgeType :: atom(). get_basic_usage_info() -> InitialAcc = #{num_bridges => 0, count_by_type => #{}}, try lists:foldl( - fun(#{resource_data := #{config := #{enable := false}}}, Acc) -> - Acc; - (#{type := BridgeType}, Acc) -> - NumBridges = maps:get(num_bridges, Acc), - CountByType0 = maps:get(count_by_type, Acc), - CountByType = maps:update_with( - binary_to_atom(BridgeType, utf8), - fun(X) -> X + 1 end, - 1, - CountByType0), - Acc#{ num_bridges => NumBridges + 1 - , count_by_type => CountByType - } - end, - InitialAcc, - list()) + fun + (#{resource_data := #{config := #{enable := false}}}, Acc) -> + Acc; + (#{type := BridgeType}, Acc) -> + NumBridges = maps:get(num_bridges, Acc), + CountByType0 = maps:get(count_by_type, Acc), + CountByType = maps:update_with( + binary_to_atom(BridgeType, utf8), + fun(X) -> X + 1 end, + 1, + CountByType0 + ), + Acc#{ + num_bridges => NumBridges + 1, + count_by_type => CountByType + } + end, + InitialAcc, + list() + ) catch %% for instance, when the bridge app is not ready yet. _:_ -> diff --git a/apps/emqx_bridge/src/emqx_bridge_api.erl b/apps/emqx_bridge/src/emqx_bridge_api.erl index ff2250844..9870830d0 100644 --- a/apps/emqx_bridge/src/emqx_bridge_api.erl +++ b/apps/emqx_bridge/src/emqx_bridge_api.erl @@ -24,22 +24,23 @@ -import(hoconsc, [mk/2, array/1, enum/1]). %% Swagger specs from hocon schema --export([ api_spec/0 - , paths/0 - , schema/1 - , namespace/0 - ]). +-export([ + api_spec/0, + paths/0, + schema/1, + namespace/0 +]). %% API callbacks --export([ '/bridges'/2 - , '/bridges/:id'/2 - , '/bridges/:id/operation/:operation'/2 - , '/nodes/:node/bridges/:id/operation/:operation'/2 - , '/bridges/:id/reset_metrics'/2 - ]). +-export([ + '/bridges'/2, + '/bridges/:id'/2, + '/bridges/:id/operation/:operation'/2, + '/nodes/:node/bridges/:id/operation/:operation'/2, + '/bridges/:id/reset_metrics'/2 +]). --export([ lookup_from_local_node/2 - ]). +-export([lookup_from_local_node/2]). -define(TYPES, [mqtt, http]). @@ -51,35 +52,45 @@ EXPR catch error:{invalid_bridge_id, Id0} -> - {400, error_msg('INVALID_ID', <<"invalid_bridge_id: ", Id0/binary, - ". Bridge Ids must be of format {type}:{name}">>)} - end). + {400, + error_msg( + 'INVALID_ID', + <<"invalid_bridge_id: ", Id0/binary, + ". Bridge Ids must be of format {type}:{name}">> + )} + end +). --define(METRICS(MATCH, SUCC, FAILED, RATE, RATE_5, RATE_MAX), - #{ matched => MATCH, - success => SUCC, - failed => FAILED, - rate => RATE, - rate_last5m => RATE_5, - rate_max => RATE_MAX - }). --define(metrics(MATCH, SUCC, FAILED, RATE, RATE_5, RATE_MAX), - #{ matched := MATCH, - success := SUCC, - failed := FAILED, - rate := RATE, - rate_last5m := RATE_5, - rate_max := RATE_MAX - }). +-define(METRICS(MATCH, SUCC, FAILED, RATE, RATE_5, RATE_MAX), #{ + matched => MATCH, + success => SUCC, + failed => FAILED, + rate => RATE, + rate_last5m => RATE_5, + rate_max => RATE_MAX +}). +-define(metrics(MATCH, SUCC, FAILED, RATE, RATE_5, RATE_MAX), #{ + matched := MATCH, + success := SUCC, + failed := FAILED, + rate := RATE, + rate_last5m := RATE_5, + rate_max := RATE_MAX +}). namespace() -> "bridge". api_spec() -> emqx_dashboard_swagger:spec(?MODULE, #{check_schema => false}). -paths() -> ["/bridges", "/bridges/:id", "/bridges/:id/operation/:operation", - "/nodes/:node/bridges/:id/operation/:operation", - "/bridges/:id/reset_metrics"]. +paths() -> + [ + "/bridges", + "/bridges/:id", + "/bridges/:id/operation/:operation", + "/nodes/:node/bridges/:id/operation/:operation", + "/bridges/:id/reset_metrics" + ]. error_schema(Code, Message) when is_atom(Code) -> error_schema([Code], Message); @@ -89,40 +100,58 @@ error_schema(Codes, Message) when is_list(Codes) andalso is_binary(Message) -> emqx_dashboard_swagger:error_codes(Codes, Message). get_response_body_schema() -> - emqx_dashboard_swagger:schema_with_examples(emqx_bridge_schema:get_response(), - bridge_info_examples(get)). + emqx_dashboard_swagger:schema_with_examples( + emqx_bridge_schema:get_response(), + bridge_info_examples(get) + ). param_path_operation_cluster() -> - {operation, mk(enum([enable, disable, stop, restart]), - #{ in => path - , required => true - , example => <<"start">> - , desc => ?DESC("desc_param_path_operation_cluster") - })}. + {operation, + mk( + enum([enable, disable, stop, restart]), + #{ + in => path, + required => true, + example => <<"start">>, + desc => ?DESC("desc_param_path_operation_cluster") + } + )}. param_path_operation_on_node() -> - {operation, mk(enum([stop, restart]), - #{ in => path - , required => true - , example => <<"start">> - , desc => ?DESC("desc_param_path_operation_on_node") - })}. + {operation, + mk( + enum([stop, restart]), + #{ + in => path, + required => true, + example => <<"start">>, + desc => ?DESC("desc_param_path_operation_on_node") + } + )}. param_path_node() -> - {node, mk(binary(), - #{ in => path - , required => true - , example => <<"emqx@127.0.0.1">> - , desc => ?DESC("desc_param_path_node") - })}. + {node, + mk( + binary(), + #{ + in => path, + required => true, + example => <<"emqx@127.0.0.1">>, + desc => ?DESC("desc_param_path_node") + } + )}. param_path_id() -> - {id, mk(binary(), - #{ in => path - , required => true - , example => <<"http:my_http_bridge">> - , desc => ?DESC("desc_param_path_id") - })}. + {id, + mk( + binary(), + #{ + in => path, + required => true, + example => <<"http:my_http_bridge">>, + desc => ?DESC("desc_param_path_id") + } + )}. bridge_info_array_example(Method) -> [Config || #{value := Config} <- maps:values(bridge_info_examples(Method))]. @@ -136,7 +165,8 @@ bridge_info_examples(Method) -> }). conn_bridge_examples(Method) -> - lists:foldl(fun(Type, Acc) -> + lists:foldl( + fun(Type, Acc) -> SType = atom_to_list(Type), KeyIngress = bin(SType ++ "_ingress"), KeyEgress = bin(SType ++ "_egress"), @@ -150,19 +180,25 @@ conn_bridge_examples(Method) -> value => info_example(Type, egress, Method) } }) - end, #{}, ?CONN_TYPES). + end, + #{}, + ?CONN_TYPES + ). info_example(Type, Direction, Method) -> - maps:merge(info_example_basic(Type, Direction), - method_example(Type, Direction, Method)). + maps:merge( + info_example_basic(Type, Direction), + method_example(Type, Direction, Method) + ). method_example(Type, Direction, Method) when Method == get; Method == post -> SType = atom_to_list(Type), SDir = atom_to_list(Direction), - SName = case Type of - http -> "my_" ++ SType ++ "_bridge"; - _ -> "my_" ++ SDir ++ "_" ++ SType ++ "_bridge" - end, + SName = + case Type of + http -> "my_" ++ SType ++ "_bridge"; + _ -> "my_" ++ SDir ++ "_" ++ SType ++ "_bridge" + end, TypeNameExamp = #{ type => bin(SType), name => bin(SName) @@ -175,8 +211,10 @@ maybe_with_metrics_example(TypeNameExamp, get) -> TypeNameExamp#{ metrics => ?METRICS(0, 0, 0, 0, 0, 0), node_metrics => [ - #{node => node(), - metrics => ?METRICS(0, 0, 0, 0, 0, 0)} + #{ + node => node(), + metrics => ?METRICS(0, 0, 0, 0, 0, 0) + } ] }; maybe_with_metrics_example(TypeNameExamp, _) -> @@ -231,8 +269,9 @@ schema("/bridges") -> description => ?DESC("desc_api1"), responses => #{ 200 => emqx_dashboard_swagger:schema_with_example( - array(emqx_bridge_schema:get_response()), - bridge_info_array_example(get)) + array(emqx_bridge_schema:get_response()), + bridge_info_array_example(get) + ) } }, post => #{ @@ -240,15 +279,15 @@ schema("/bridges") -> summary => <<"Create Bridge">>, description => ?DESC("desc_api2"), 'requestBody' => emqx_dashboard_swagger:schema_with_examples( - emqx_bridge_schema:post_request(), - bridge_info_examples(post)), + emqx_bridge_schema:post_request(), + bridge_info_examples(post) + ), responses => #{ 201 => get_response_body_schema(), 400 => error_schema('ALREADY_EXISTS', "Bridge already exists") } } }; - schema("/bridges/:id") -> #{ 'operationId' => '/bridges/:id', @@ -268,8 +307,9 @@ schema("/bridges/:id") -> description => ?DESC("desc_api4"), parameters => [param_path_id()], 'requestBody' => emqx_dashboard_swagger:schema_with_examples( - emqx_bridge_schema:put_request(), - bridge_info_examples(put)), + emqx_bridge_schema:put_request(), + bridge_info_examples(put) + ), responses => #{ 200 => get_response_body_schema(), 404 => error_schema('NOT_FOUND', "Bridge not found"), @@ -287,7 +327,6 @@ schema("/bridges/:id") -> } } }; - schema("/bridges/:id/reset_metrics") -> #{ 'operationId' => '/bridges/:id/reset_metrics', @@ -319,7 +358,6 @@ schema("/bridges/:id/operation/:operation") -> } } }; - schema("/nodes/:node/bridges/:id/operation/:operation") -> #{ 'operationId' => '/nodes/:node/bridges/:id/operation/:operation', @@ -336,7 +374,6 @@ schema("/nodes/:node/bridges/:id/operation/:operation") -> 200 => <<"Operation success">>, 400 => error_schema('INVALID_ID', "Bad bridge ID"), 403 => error_schema('FORBIDDEN_REQUEST', "forbidden operation") - } } }. @@ -353,15 +390,18 @@ schema("/nodes/:node/bridges/:id/operation/:operation") -> end end; '/bridges'(get, _Params) -> - {200, zip_bridges([[format_resp(Data) || Data <- emqx_bridge_proto_v1:list_bridges(Node)] - || Node <- mria_mnesia:running_nodes()])}. + {200, + zip_bridges([ + [format_resp(Data) || Data <- emqx_bridge_proto_v1:list_bridges(Node)] + || Node <- mria_mnesia:running_nodes() + ])}. '/bridges/:id'(get, #{bindings := #{id := Id}}) -> ?TRY_PARSE_ID(Id, lookup_from_all_nodes(BridgeType, BridgeName, 200)); - '/bridges/:id'(put, #{bindings := #{id := Id}, body := Conf0}) -> Conf = filter_out_request_body(Conf0), - ?TRY_PARSE_ID(Id, + ?TRY_PARSE_ID( + Id, case emqx_bridge:lookup(BridgeType, BridgeName) of {ok, _} -> case ensure_bridge_created(BridgeType, BridgeName, Conf) of @@ -371,24 +411,31 @@ schema("/nodes/:node/bridges/:id/operation/:operation") -> {400, Error} end; {error, not_found} -> - {404, error_msg('NOT_FOUND',<<"bridge not found">>)} - end); - + {404, error_msg('NOT_FOUND', <<"bridge not found">>)} + end + ); '/bridges/:id'(delete, #{bindings := #{id := Id}}) -> - ?TRY_PARSE_ID(Id, - case emqx_conf:remove(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], - #{override_to => cluster}) of + ?TRY_PARSE_ID( + Id, + case + emqx_conf:remove( + emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], + #{override_to => cluster} + ) + of {ok, _} -> {204}; - {error, Reason} -> - {500, error_msg('INTERNAL_ERROR', Reason)} - end). + {error, Reason} -> {500, error_msg('INTERNAL_ERROR', Reason)} + end + ). '/bridges/:id/reset_metrics'(put, #{bindings := #{id := Id}}) -> - ?TRY_PARSE_ID(Id, + ?TRY_PARSE_ID( + Id, case emqx_bridge:reset_metrics(emqx_bridge:resource_id(BridgeType, BridgeName)) of ok -> {200, <<"Reset success">>}; Reason -> {400, error_msg('BAD_REQUEST', Reason)} - end). + end + ). lookup_from_all_nodes(BridgeType, BridgeName, SuccCode) -> Nodes = mria_mnesia:running_nodes(), @@ -407,40 +454,58 @@ lookup_from_local_node(BridgeType, BridgeName) -> Error -> Error end. -'/bridges/:id/operation/:operation'(post, #{bindings := - #{id := Id, operation := Op}}) -> - ?TRY_PARSE_ID(Id, case operation_func(Op) of - invalid -> {400, error_msg('BAD_REQUEST', <<"invalid operation">>)}; - OperFunc when OperFunc == enable; OperFunc == disable -> - case emqx_conf:update(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], - {OperFunc, BridgeType, BridgeName}, #{override_to => cluster}) of - {ok, _} -> {200}; - {error, {pre_config_update, _, bridge_not_found}} -> - {404, error_msg('NOT_FOUND', <<"bridge not found">>)}; - {error, Reason} -> - {500, error_msg('INTERNAL_ERROR', Reason)} - end; - OperFunc -> - Nodes = mria_mnesia:running_nodes(), - operation_to_all_nodes(Nodes, OperFunc, BridgeType, BridgeName) - end). +'/bridges/:id/operation/:operation'(post, #{ + bindings := + #{id := Id, operation := Op} +}) -> + ?TRY_PARSE_ID( + Id, + case operation_func(Op) of + invalid -> + {400, error_msg('BAD_REQUEST', <<"invalid operation">>)}; + OperFunc when OperFunc == enable; OperFunc == disable -> + case + emqx_conf:update( + emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], + {OperFunc, BridgeType, BridgeName}, + #{override_to => cluster} + ) + of + {ok, _} -> + {200}; + {error, {pre_config_update, _, bridge_not_found}} -> + {404, error_msg('NOT_FOUND', <<"bridge not found">>)}; + {error, Reason} -> + {500, error_msg('INTERNAL_ERROR', Reason)} + end; + OperFunc -> + Nodes = mria_mnesia:running_nodes(), + operation_to_all_nodes(Nodes, OperFunc, BridgeType, BridgeName) + end + ). -'/nodes/:node/bridges/:id/operation/:operation'(post, #{bindings := - #{id := Id, operation := Op}}) -> - ?TRY_PARSE_ID(Id, case operation_func(Op) of - invalid -> {400, error_msg('BAD_REQUEST', <<"invalid operation">>)}; - OperFunc when OperFunc == restart; OperFunc == stop -> - ConfMap = emqx:get_config([bridges, BridgeType, BridgeName]), - case maps:get(enable, ConfMap, false) of - false -> {403, error_msg('FORBIDDEN_REQUEST', <<"forbidden operation">>)}; - true -> - case emqx_bridge:OperFunc(BridgeType, BridgeName) of - ok -> {200}; - {error, Reason} -> - {500, error_msg('INTERNAL_ERROR', Reason)} - end - end - end). +'/nodes/:node/bridges/:id/operation/:operation'(post, #{ + bindings := + #{id := Id, operation := Op} +}) -> + ?TRY_PARSE_ID( + Id, + case operation_func(Op) of + invalid -> + {400, error_msg('BAD_REQUEST', <<"invalid operation">>)}; + OperFunc when OperFunc == restart; OperFunc == stop -> + ConfMap = emqx:get_config([bridges, BridgeType, BridgeName]), + case maps:get(enable, ConfMap, false) of + false -> + {403, error_msg('FORBIDDEN_REQUEST', <<"forbidden operation">>)}; + true -> + case emqx_bridge:OperFunc(BridgeType, BridgeName) of + ok -> {200}; + {error, Reason} -> {500, error_msg('INTERNAL_ERROR', Reason)} + end + end + end + ). operation_func(<<"stop">>) -> stop; operation_func(<<"restart">>) -> restart; @@ -449,10 +514,11 @@ operation_func(<<"disable">>) -> disable; operation_func(_) -> invalid. operation_to_all_nodes(Nodes, OperFunc, BridgeType, BridgeName) -> - RpcFunc = case OperFunc of - restart -> restart_bridges_to_all_nodes; - stop -> stop_bridges_to_all_nodes - end, + RpcFunc = + case OperFunc of + restart -> restart_bridges_to_all_nodes; + stop -> stop_bridges_to_all_nodes + end, case is_ok(emqx_bridge_proto_v1:RpcFunc(Nodes, BridgeType, BridgeName)) of {ok, _} -> {200}; @@ -461,48 +527,70 @@ operation_to_all_nodes(Nodes, OperFunc, BridgeType, BridgeName) -> end. ensure_bridge_created(BridgeType, BridgeName, Conf) -> - case emqx_conf:update(emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], - Conf, #{override_to => cluster}) of + case + emqx_conf:update( + emqx_bridge:config_key_path() ++ [BridgeType, BridgeName], + Conf, + #{override_to => cluster} + ) + of {ok, _} -> ok; - {error, Reason} -> - {error, error_msg('BAD_REQUEST', Reason)} + {error, Reason} -> {error, error_msg('BAD_REQUEST', Reason)} end. zip_bridges([BridgesFirstNode | _] = BridgesAllNodes) -> - lists:foldl(fun(#{type := Type, name := Name}, Acc) -> + lists:foldl( + fun(#{type := Type, name := Name}, Acc) -> Bridges = pick_bridges_by_id(Type, Name, BridgesAllNodes), [format_bridge_info(Bridges) | Acc] - end, [], BridgesFirstNode). + end, + [], + BridgesFirstNode + ). pick_bridges_by_id(Type, Name, BridgesAllNodes) -> - lists:foldl(fun(BridgesOneNode, Acc) -> - case [Bridge || Bridge = #{type := Type0, name := Name0} <- BridgesOneNode, - Type0 == Type, Name0 == Name] of - [BridgeInfo] -> [BridgeInfo | Acc]; + lists:foldl( + fun(BridgesOneNode, Acc) -> + case + [ + Bridge + || Bridge = #{type := Type0, name := Name0} <- BridgesOneNode, + Type0 == Type, + Name0 == Name + ] + of + [BridgeInfo] -> + [BridgeInfo | Acc]; [] -> - ?SLOG(warning, #{msg => "bridge_inconsistent_in_cluster", - bridge => emqx_bridge:bridge_id(Type, Name)}), + ?SLOG(warning, #{ + msg => "bridge_inconsistent_in_cluster", + bridge => emqx_bridge:bridge_id(Type, Name) + }), Acc end - end, [], BridgesAllNodes). + end, + [], + BridgesAllNodes + ). format_bridge_info([FirstBridge | _] = Bridges) -> Res = maps:remove(node, FirstBridge), NodeStatus = collect_status(Bridges), NodeMetrics = collect_metrics(Bridges), - Res#{ status => aggregate_status(NodeStatus) - , node_status => NodeStatus - , metrics => aggregate_metrics(NodeMetrics) - , node_metrics => NodeMetrics - }. + Res#{ + status => aggregate_status(NodeStatus), + node_status => NodeStatus, + metrics => aggregate_metrics(NodeMetrics), + node_metrics => NodeMetrics + }. collect_status(Bridges) -> [maps:with([node, status], B) || B <- Bridges]. aggregate_status(AllStatus) -> - Head = fun ([A | _]) -> A end, + Head = fun([A | _]) -> A end, HeadVal = maps:get(status, Head(AllStatus), connecting), - AllRes = lists:all(fun (#{status := Val}) -> Val == HeadVal end, AllStatus), + AllRes = lists:all(fun(#{status := Val}) -> Val == HeadVal end, AllStatus), case AllRes of true -> HeadVal; false -> inconsistent @@ -512,15 +600,31 @@ collect_metrics(Bridges) -> [maps:with([node, metrics], B) || B <- Bridges]. aggregate_metrics(AllMetrics) -> - InitMetrics = ?METRICS(0,0,0,0,0,0), - lists:foldl(fun(#{metrics := ?metrics(Match1, Succ1, Failed1, Rate1, Rate5m1, RateMax1)}, - ?metrics(Match0, Succ0, Failed0, Rate0, Rate5m0, RateMax0)) -> - ?METRICS(Match1 + Match0, Succ1 + Succ0, Failed1 + Failed0, - Rate1 + Rate0, Rate5m1 + Rate5m0, RateMax1 + RateMax0) - end, InitMetrics, AllMetrics). + InitMetrics = ?METRICS(0, 0, 0, 0, 0, 0), + lists:foldl( + fun( + #{metrics := ?metrics(Match1, Succ1, Failed1, Rate1, Rate5m1, RateMax1)}, + ?metrics(Match0, Succ0, Failed0, Rate0, Rate5m0, RateMax0) + ) -> + ?METRICS( + Match1 + Match0, + Succ1 + Succ0, + Failed1 + Failed0, + Rate1 + Rate0, + Rate5m1 + Rate5m0, + RateMax1 + RateMax0 + ) + end, + InitMetrics, + AllMetrics + ). -format_resp(#{type := Type, name := BridgeName, raw_config := RawConf, - resource_data := #{status := Status, metrics := Metrics}}) -> +format_resp(#{ + type := Type, + name := BridgeName, + raw_config := RawConf, + resource_data := #{status := Status, metrics := Metrics} +}) -> RawConfFull = fill_defaults(Type, RawConf), RawConfFull#{ type => Type, @@ -531,10 +635,11 @@ format_resp(#{type := Type, name := BridgeName, raw_config := RawConf, }. format_metrics(#{ - counters := #{failed := Failed, exception := Ex, matched := Match, success := Succ}, - rate := #{ - matched := #{current := Rate, last5m := Rate5m, max := RateMax} - } }) -> + counters := #{failed := Failed, exception := Ex, matched := Match, success := Succ}, + rate := #{ + matched := #{current := Rate, last5m := Rate5m, max := RateMax} + } +}) -> ?METRICS(Match, Succ, Failed + Ex, Rate, Rate5m, RateMax). fill_defaults(Type, RawConf) -> @@ -551,14 +656,31 @@ unpack_bridge_conf(Type, PackedConf) -> RawConf. is_ok(ResL) -> - case lists:filter(fun({ok, _}) -> false; (ok) -> false; (_) -> true end, ResL) of + case + lists:filter( + fun + ({ok, _}) -> false; + (ok) -> false; + (_) -> true + end, + ResL + ) + of [] -> {ok, [Res || {ok, Res} <- ResL]}; ErrL -> {error, ErrL} end. filter_out_request_body(Conf) -> - ExtraConfs = [<<"id">>, <<"type">>, <<"name">>, <<"status">>, <<"node_status">>, - <<"node_metrics">>, <<"metrics">>, <<"node">>], + ExtraConfs = [ + <<"id">>, + <<"type">>, + <<"name">>, + <<"status">>, + <<"node_status">>, + <<"node_metrics">>, + <<"metrics">>, + <<"node">> + ], maps:without(ExtraConfs, Conf). error_msg(Code, Msg) when is_binary(Msg) -> diff --git a/apps/emqx_bridge/src/emqx_bridge_app.erl b/apps/emqx_bridge/src/emqx_bridge_app.erl index 99b2c4a84..3fc4d57ba 100644 --- a/apps/emqx_bridge/src/emqx_bridge_app.erl +++ b/apps/emqx_bridge/src/emqx_bridge_app.erl @@ -19,9 +19,10 @@ -export([start/2, stop/1]). --export([ pre_config_update/3 - , post_config_update/5 - ]). +-export([ + pre_config_update/3, + post_config_update/5 +]). -define(TOP_LELVE_HDLR_PATH, (emqx_bridge:config_key_path())). -define(LEAF_NODE_HDLR_PATH, (emqx_bridge:config_key_path() ++ ['?', '?'])). diff --git a/apps/emqx_bridge/src/emqx_bridge_http_schema.erl b/apps/emqx_bridge/src/emqx_bridge_http_schema.erl index 3bf4e8160..ff1ab2c05 100644 --- a/apps/emqx_bridge/src/emqx_bridge_http_schema.erl +++ b/apps/emqx_bridge/src/emqx_bridge_http_schema.erl @@ -15,45 +15,66 @@ roots() -> []. fields("config") -> basic_config() ++ - [ {url, mk(binary(), - #{ required => true - , desc => ?DESC("config_url") - })} - , {local_topic, mk(binary(), - #{ desc => ?DESC("config_local_topic") - })} - , {method, mk(method(), - #{ default => post - , desc => ?DESC("config_method") - })} - , {headers, mk(map(), - #{ default => #{ - <<"accept">> => <<"application/json">>, - <<"cache-control">> => <<"no-cache">>, - <<"connection">> => <<"keep-alive">>, - <<"content-type">> => <<"application/json">>, - <<"keep-alive">> => <<"timeout=5">>} - , desc => ?DESC("config_headers") - }) - } - , {body, mk(binary(), - #{ default => <<"${payload}">> - , desc => ?DESC("config_body") - })} - , {request_timeout, mk(emqx_schema:duration_ms(), - #{ default => <<"15s">> - , desc => ?DESC("config_request_timeout") - })} - ]; - + [ + {url, + mk( + binary(), + #{ + required => true, + desc => ?DESC("config_url") + } + )}, + {local_topic, + mk( + binary(), + #{desc => ?DESC("config_local_topic")} + )}, + {method, + mk( + method(), + #{ + default => post, + desc => ?DESC("config_method") + } + )}, + {headers, + mk( + map(), + #{ + default => #{ + <<"accept">> => <<"application/json">>, + <<"cache-control">> => <<"no-cache">>, + <<"connection">> => <<"keep-alive">>, + <<"content-type">> => <<"application/json">>, + <<"keep-alive">> => <<"timeout=5">> + }, + desc => ?DESC("config_headers") + } + )}, + {body, + mk( + binary(), + #{ + default => <<"${payload}">>, + desc => ?DESC("config_body") + } + )}, + {request_timeout, + mk( + emqx_schema:duration_ms(), + #{ + default => <<"15s">>, + desc => ?DESC("config_request_timeout") + } + )} + ]; fields("post") -> - [ type_field() - , name_field() + [ + type_field(), + name_field() ] ++ fields("config"); - fields("put") -> fields("config"); - fields("get") -> emqx_bridge_schema:metrics_status_fields() ++ fields("post"). @@ -65,32 +86,47 @@ desc(_) -> undefined. basic_config() -> - [ {enable, - mk(boolean(), - #{ desc => ?DESC("config_enable") - , default => true - })} - , {direction, - mk(egress, - #{ desc => ?DESC("config_direction") - , default => egress - })} - ] - ++ proplists:delete(base_url, emqx_connector_http:fields(config)). + [ + {enable, + mk( + boolean(), + #{ + desc => ?DESC("config_enable"), + default => true + } + )}, + {direction, + mk( + egress, + #{ + desc => ?DESC("config_direction"), + default => egress + } + )} + ] ++ + proplists:delete(base_url, emqx_connector_http:fields(config)). %%====================================================================================== type_field() -> - {type, mk(http, - #{ required => true - , desc => ?DESC("desc_type") - })}. + {type, + mk( + http, + #{ + required => true, + desc => ?DESC("desc_type") + } + )}. name_field() -> - {name, mk(binary(), - #{ required => true - , desc => ?DESC("desc_name") - })}. + {name, + mk( + binary(), + #{ + required => true, + desc => ?DESC("desc_name") + } + )}. method() -> enum([post, put, get, delete]). diff --git a/apps/emqx_bridge/src/emqx_bridge_monitor.erl b/apps/emqx_bridge/src/emqx_bridge_monitor.erl index 8de216974..b9a22bb8c 100644 --- a/apps/emqx_bridge/src/emqx_bridge_monitor.erl +++ b/apps/emqx_bridge/src/emqx_bridge_monitor.erl @@ -22,17 +22,20 @@ -include_lib("snabbkaffe/include/snabbkaffe.hrl"). %% API functions --export([ start_link/0 - , ensure_all_started/1 - ]). +-export([ + start_link/0, + ensure_all_started/1 +]). %% gen_server callbacks --export([init/1, - handle_call/3, - handle_cast/2, - handle_info/2, - terminate/2, - code_change/3]). +-export([ + init/1, + handle_call/3, + handle_cast/2, + handle_info/2, + terminate/2, + code_change/3 +]). -record(state, {}). @@ -52,7 +55,6 @@ handle_call(_Request, _From, State) -> handle_cast({start_and_monitor, Configs}, State) -> ok = load_bridges(Configs), {noreply, State}; - handle_cast(_Msg, State) -> {noreply, State}. @@ -67,13 +69,22 @@ code_change(_OldVsn, State, _Extra) -> %%============================================================================ load_bridges(Configs) -> - lists:foreach(fun({Type, NamedConf}) -> - lists:foreach(fun({Name, Conf}) -> + lists:foreach( + fun({Type, NamedConf}) -> + lists:foreach( + fun({Name, Conf}) -> _Res = emqx_bridge:create(Type, Name, Conf), - ?tp(emqx_bridge_monitor_loaded_bridge, - #{ type => Type - , name => Name - , res => _Res - }) - end, maps:to_list(NamedConf)) - end, maps:to_list(Configs)). + ?tp( + emqx_bridge_monitor_loaded_bridge, + #{ + type => Type, + name => Name, + res => _Res + } + ) + end, + maps:to_list(NamedConf) + ) + end, + maps:to_list(Configs) + ). diff --git a/apps/emqx_bridge/src/emqx_bridge_mqtt_schema.erl b/apps/emqx_bridge/src/emqx_bridge_mqtt_schema.erl index 304df779f..15e24024a 100644 --- a/apps/emqx_bridge/src/emqx_bridge_mqtt_schema.erl +++ b/apps/emqx_bridge/src/emqx_bridge_mqtt_schema.erl @@ -12,31 +12,27 @@ roots() -> []. fields("ingress") -> - [ emqx_bridge_schema:direction_field(ingress, emqx_connector_mqtt_schema:ingress_desc()) - ] - ++ emqx_bridge_schema:common_bridge_fields() - ++ proplists:delete(hookpoint, emqx_connector_mqtt_schema:fields("ingress")); - + [emqx_bridge_schema:direction_field(ingress, emqx_connector_mqtt_schema:ingress_desc())] ++ + emqx_bridge_schema:common_bridge_fields() ++ + proplists:delete(hookpoint, emqx_connector_mqtt_schema:fields("ingress")); fields("egress") -> - [ emqx_bridge_schema:direction_field(egress, emqx_connector_mqtt_schema:egress_desc()) - ] - ++ emqx_bridge_schema:common_bridge_fields() - ++ emqx_connector_mqtt_schema:fields("egress"); - + [emqx_bridge_schema:direction_field(egress, emqx_connector_mqtt_schema:egress_desc())] ++ + emqx_bridge_schema:common_bridge_fields() ++ + emqx_connector_mqtt_schema:fields("egress"); fields("post_ingress") -> - [ type_field() - , name_field() + [ + type_field(), + name_field() ] ++ proplists:delete(enable, fields("ingress")); fields("post_egress") -> - [ type_field() - , name_field() + [ + type_field(), + name_field() ] ++ proplists:delete(enable, fields("egress")); - fields("put_ingress") -> proplists:delete(enable, fields("ingress")); fields("put_egress") -> proplists:delete(enable, fields("egress")); - fields("get_ingress") -> emqx_bridge_schema:metrics_status_fields() ++ fields("post_ingress"); fields("get_egress") -> @@ -49,13 +45,21 @@ desc(_) -> %%====================================================================================== type_field() -> - {type, mk(mqtt, - #{ required => true - , desc => ?DESC("desc_type") - })}. + {type, + mk( + mqtt, + #{ + required => true, + desc => ?DESC("desc_type") + } + )}. name_field() -> - {name, mk(binary(), - #{ required => true - , desc => ?DESC("desc_name") - })}. + {name, + mk( + binary(), + #{ + required => true, + desc => ?DESC("desc_name") + } + )}. diff --git a/apps/emqx_bridge/src/emqx_bridge_schema.erl b/apps/emqx_bridge/src/emqx_bridge_schema.erl index fd5d68aa1..7c54652ef 100644 --- a/apps/emqx_bridge/src/emqx_bridge_schema.erl +++ b/apps/emqx_bridge/src/emqx_bridge_schema.erl @@ -7,15 +7,17 @@ -export([roots/0, fields/1, desc/1, namespace/0]). --export([ get_response/0 - , put_request/0 - , post_request/0 - ]). +-export([ + get_response/0, + put_request/0, + post_request/0 +]). --export([ common_bridge_fields/0 - , metrics_status_fields/0 - , direction_field/2 - ]). +-export([ + common_bridge_fields/0, + metrics_status_fields/0, + direction_field/2 +]). %%====================================================================================== %% Hocon Schema Definitions @@ -34,43 +36,68 @@ post_request() -> http_schema("post"). http_schema(Method) -> - Schemas = lists:flatmap(fun(Type) -> - [ref(schema_mod(Type), Method ++ "_ingress"), - ref(schema_mod(Type), Method ++ "_egress")] - end, ?CONN_TYPES), - hoconsc:union([ref(emqx_bridge_http_schema, Method) - | Schemas]). + Schemas = lists:flatmap( + fun(Type) -> + [ + ref(schema_mod(Type), Method ++ "_ingress"), + ref(schema_mod(Type), Method ++ "_egress") + ] + end, + ?CONN_TYPES + ), + hoconsc:union([ + ref(emqx_bridge_http_schema, Method) + | Schemas + ]). common_bridge_fields() -> - [ {enable, - mk(boolean(), - #{ desc => ?DESC("desc_enable") - , default => true - })} - , {connector, - mk(binary(), - #{ required => true - , example => <<"mqtt:my_mqtt_connector">> - , desc => ?DESC("desc_connector") - })} + [ + {enable, + mk( + boolean(), + #{ + desc => ?DESC("desc_enable"), + default => true + } + )}, + {connector, + mk( + binary(), + #{ + required => true, + example => <<"mqtt:my_mqtt_connector">>, + desc => ?DESC("desc_connector") + } + )} ]. metrics_status_fields() -> - [ {"metrics", mk(ref(?MODULE, "metrics"), #{desc => ?DESC("desc_metrics")})} - , {"node_metrics", mk(hoconsc:array(ref(?MODULE, "node_metrics")), - #{ desc => ?DESC("desc_node_metrics")})} - , {"status", mk(status(), #{desc => ?DESC("desc_status")})} - , {"node_status", mk(hoconsc:array(ref(?MODULE, "node_status")), - #{ desc => ?DESC("desc_node_status")})} + [ + {"metrics", mk(ref(?MODULE, "metrics"), #{desc => ?DESC("desc_metrics")})}, + {"node_metrics", + mk( + hoconsc:array(ref(?MODULE, "node_metrics")), + #{desc => ?DESC("desc_node_metrics")} + )}, + {"status", mk(status(), #{desc => ?DESC("desc_status")})}, + {"node_status", + mk( + hoconsc:array(ref(?MODULE, "node_status")), + #{desc => ?DESC("desc_node_status")} + )} ]. direction_field(Dir, Desc) -> - {direction, mk(Dir, - #{ required => true - , default => egress - , desc => "The direction of the bridge. Can be one of 'ingress' or 'egress'.
" - ++ Desc - })}. + {direction, + mk( + Dir, + #{ + required => true, + default => egress, + desc => "The direction of the bridge. Can be one of 'ingress' or 'egress'.
" ++ + Desc + } + )}. %%====================================================================================== %% For config files @@ -80,31 +107,49 @@ namespace() -> "bridge". roots() -> [bridges]. fields(bridges) -> - [{http, mk(hoconsc:map(name, ref(emqx_bridge_http_schema, "config")), - #{desc => ?DESC("bridges_http")})}] - ++ [{T, mk(hoconsc:map(name, hoconsc:union([ ref(schema_mod(T), "ingress") - , ref(schema_mod(T), "egress") - ])), - #{desc => ?DESC("bridges_name")})} || T <- ?CONN_TYPES]; - + [ + {http, + mk( + hoconsc:map(name, ref(emqx_bridge_http_schema, "config")), + #{desc => ?DESC("bridges_http")} + )} + ] ++ + [ + {T, + mk( + hoconsc:map( + name, + hoconsc:union([ + ref(schema_mod(T), "ingress"), + ref(schema_mod(T), "egress") + ]) + ), + #{desc => ?DESC("bridges_name")} + )} + || T <- ?CONN_TYPES + ]; fields("metrics") -> - [ {"matched", mk(integer(), #{desc => ?DESC("metric_matched")})} - , {"success", mk(integer(), #{desc => ?DESC("metric_success")})} - , {"failed", mk(integer(), #{desc => ?DESC("metric_failed")})} - , {"rate", mk(float(), #{desc => ?DESC("metric_rate")})} - , {"rate_max", mk(float(), #{desc => ?DESC("metric_rate_max")})} - , {"rate_last5m", mk(float(), - #{desc => ?DESC("metric_rate_last5m")})} + [ + {"matched", mk(integer(), #{desc => ?DESC("metric_matched")})}, + {"success", mk(integer(), #{desc => ?DESC("metric_success")})}, + {"failed", mk(integer(), #{desc => ?DESC("metric_failed")})}, + {"rate", mk(float(), #{desc => ?DESC("metric_rate")})}, + {"rate_max", mk(float(), #{desc => ?DESC("metric_rate_max")})}, + {"rate_last5m", + mk( + float(), + #{desc => ?DESC("metric_rate_last5m")} + )} ]; - fields("node_metrics") -> - [ node_name() - , {"metrics", mk(ref(?MODULE, "metrics"), #{})} + [ + node_name(), + {"metrics", mk(ref(?MODULE, "metrics"), #{})} ]; - fields("node_status") -> - [ node_name() - , {"status", mk(status(), #{})} + [ + node_name(), + {"status", mk(status(), #{})} ]. desc(bridges) -> diff --git a/apps/emqx_bridge/src/emqx_bridge_sup.erl b/apps/emqx_bridge/src/emqx_bridge_sup.erl index 0c73ac585..cce3a066b 100644 --- a/apps/emqx_bridge/src/emqx_bridge_sup.erl +++ b/apps/emqx_bridge/src/emqx_bridge_sup.erl @@ -27,15 +27,19 @@ start_link() -> supervisor:start_link({local, ?SERVER}, ?MODULE, []). init([]) -> - SupFlags = #{strategy => one_for_one, - intensity => 10, - period => 10}, + SupFlags = #{ + strategy => one_for_one, + intensity => 10, + period => 10 + }, ChildSpecs = [ - #{id => emqx_bridge_monitor, - start => {emqx_bridge_monitor, start_link, []}, - restart => permanent, - type => worker, - modules => [emqx_bridge_monitor]} + #{ + id => emqx_bridge_monitor, + start => {emqx_bridge_monitor, start_link, []}, + restart => permanent, + type => worker, + modules => [emqx_bridge_monitor] + } ], {ok, {SupFlags, ChildSpecs}}. diff --git a/apps/emqx_bridge/src/proto/emqx_bridge_proto_v1.erl b/apps/emqx_bridge/src/proto/emqx_bridge_proto_v1.erl index 021074a1c..75060c7c1 100644 --- a/apps/emqx_bridge/src/proto/emqx_bridge_proto_v1.erl +++ b/apps/emqx_bridge/src/proto/emqx_bridge_proto_v1.erl @@ -18,13 +18,14 @@ -behaviour(emqx_bpapi). --export([ introduced_in/0 +-export([ + introduced_in/0, - , list_bridges/1 - , lookup_from_all_nodes/3 - , restart_bridges_to_all_nodes/3 - , stop_bridges_to_all_nodes/3 - ]). + list_bridges/1, + lookup_from_all_nodes/3, + restart_bridges_to_all_nodes/3, + stop_bridges_to_all_nodes/3 +]). -include_lib("emqx/include/bpapi.hrl"). @@ -40,19 +41,34 @@ list_bridges(Node) -> -type key() :: atom() | binary() | [byte()]. -spec restart_bridges_to_all_nodes([node()], key(), key()) -> - emqx_rpc:erpc_multicall(). + emqx_rpc:erpc_multicall(). restart_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) -> - erpc:multicall(Nodes, emqx_bridge, restart, - [BridgeType, BridgeName], ?TIMEOUT). + erpc:multicall( + Nodes, + emqx_bridge, + restart, + [BridgeType, BridgeName], + ?TIMEOUT + ). -spec stop_bridges_to_all_nodes([node()], key(), key()) -> - emqx_rpc:erpc_multicall(). + emqx_rpc:erpc_multicall(). stop_bridges_to_all_nodes(Nodes, BridgeType, BridgeName) -> - erpc:multicall(Nodes, emqx_bridge, stop, - [BridgeType, BridgeName], ?TIMEOUT). + erpc:multicall( + Nodes, + emqx_bridge, + stop, + [BridgeType, BridgeName], + ?TIMEOUT + ). -spec lookup_from_all_nodes([node()], key(), key()) -> - emqx_rpc:erpc_multicall(). + emqx_rpc:erpc_multicall(). lookup_from_all_nodes(Nodes, BridgeType, BridgeName) -> - erpc:multicall(Nodes, emqx_bridge_api, lookup_from_local_node, - [BridgeType, BridgeName], ?TIMEOUT). + erpc:multicall( + Nodes, + emqx_bridge_api, + lookup_from_local_node, + [BridgeType, BridgeName], + ?TIMEOUT + ). diff --git a/apps/emqx_bridge/test/emqx_bridge_SUITE.erl b/apps/emqx_bridge/test/emqx_bridge_SUITE.erl index 9aa98de7c..d49c907b7 100644 --- a/apps/emqx_bridge/test/emqx_bridge_SUITE.erl +++ b/apps/emqx_bridge/test/emqx_bridge_SUITE.erl @@ -23,7 +23,7 @@ -include_lib("snabbkaffe/include/snabbkaffe.hrl"). all() -> - emqx_common_test_helpers:all(?MODULE). + emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> %% to avoid inter-suite dependencies @@ -32,8 +32,12 @@ init_per_suite(Config) -> Config. end_per_suite(_Config) -> - emqx_common_test_helpers:stop_apps([emqx, emqx_bridge, - emqx_resource, emqx_connector]). + emqx_common_test_helpers:stop_apps([ + emqx, + emqx_bridge, + emqx_resource, + emqx_connector + ]). init_per_testcase(t_get_basic_usage_info_1, Config) -> setup_fake_telemetry_data(), @@ -43,13 +47,15 @@ init_per_testcase(_TestCase, Config) -> end_per_testcase(t_get_basic_usage_info_1, _Config) -> lists:foreach( - fun({BridgeType, BridgeName}) -> - ok = emqx_bridge:remove(BridgeType, BridgeName) - end, - [ {http, <<"basic_usage_info_http">>} - , {http, <<"basic_usage_info_http_disabled">>} - , {mqtt, <<"basic_usage_info_mqtt">>} - ]), + fun({BridgeType, BridgeName}) -> + ok = emqx_bridge:remove(BridgeType, BridgeName) + end, + [ + {http, <<"basic_usage_info_http">>}, + {http, <<"basic_usage_info_http_disabled">>}, + {mqtt, <<"basic_usage_info_mqtt">>} + ] + ), ok = emqx_config:delete_override_conf_files(), ok = emqx_config:put([bridges], #{}), ok = emqx_config:put_raw([bridges], #{}), @@ -59,53 +65,68 @@ end_per_testcase(_TestCase, _Config) -> t_get_basic_usage_info_0(_Config) -> ?assertEqual( - #{ num_bridges => 0 - , count_by_type => #{} + #{ + num_bridges => 0, + count_by_type => #{} }, - emqx_bridge:get_basic_usage_info()). + emqx_bridge:get_basic_usage_info() + ). t_get_basic_usage_info_1(_Config) -> BasicUsageInfo = emqx_bridge:get_basic_usage_info(), ?assertEqual( - #{ num_bridges => 2 - , count_by_type => #{ http => 1 - , mqtt => 1 - } + #{ + num_bridges => 2, + count_by_type => #{ + http => 1, + mqtt => 1 + } }, - BasicUsageInfo). + BasicUsageInfo + ). setup_fake_telemetry_data() -> ConnectorConf = - #{<<"connectors">> => - #{<<"mqtt">> => #{<<"my_mqtt_connector">> => - #{ server => "127.0.0.1:1883" }}}}, - MQTTConfig = #{ connector => <<"mqtt:my_mqtt_connector">> - , enable => true - , direction => ingress - , remote_topic => <<"aws/#">> - , remote_qos => 1 - }, - HTTPConfig = #{ url => <<"http://localhost:9901/messages/${topic}">> - , enable => true - , direction => egress - , local_topic => "emqx_http/#" - , method => post - , body => <<"${payload}">> - , headers => #{} - , request_timeout => "15s" - }, - Conf = - #{ <<"bridges">> => - #{ <<"http">> => - #{ <<"basic_usage_info_http">> => HTTPConfig - , <<"basic_usage_info_http_disabled">> => - HTTPConfig#{enable => false} - } - , <<"mqtt">> => - #{ <<"basic_usage_info_mqtt">> => MQTTConfig - } + #{ + <<"connectors">> => + #{ + <<"mqtt">> => #{ + <<"my_mqtt_connector">> => + #{server => "127.0.0.1:1883"} + } } - }, + }, + MQTTConfig = #{ + connector => <<"mqtt:my_mqtt_connector">>, + enable => true, + direction => ingress, + remote_topic => <<"aws/#">>, + remote_qos => 1 + }, + HTTPConfig = #{ + url => <<"http://localhost:9901/messages/${topic}">>, + enable => true, + direction => egress, + local_topic => "emqx_http/#", + method => post, + body => <<"${payload}">>, + headers => #{}, + request_timeout => "15s" + }, + Conf = + #{ + <<"bridges">> => + #{ + <<"http">> => + #{ + <<"basic_usage_info_http">> => HTTPConfig, + <<"basic_usage_info_http_disabled">> => + HTTPConfig#{enable => false} + }, + <<"mqtt">> => + #{<<"basic_usage_info_mqtt">> => MQTTConfig} + } + }, ok = emqx_common_test_helpers:load_config(emqx_connector_schema, ConnectorConf), ok = emqx_common_test_helpers:load_config(emqx_bridge_schema, Conf), diff --git a/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl b/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl index 5fea93a94..5142ee5c1 100644 --- a/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl +++ b/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl @@ -25,11 +25,15 @@ -define(CONF_DEFAULT, <<"bridges: {}">>). -define(BRIDGE_TYPE, <<"http">>). -define(BRIDGE_NAME, <<"test_bridge">>). --define(URL(PORT, PATH), list_to_binary( - io_lib:format("http://localhost:~s/~s", - [integer_to_list(PORT), PATH]))). --define(HTTP_BRIDGE(URL, TYPE, NAME), -#{ +-define(URL(PORT, PATH), + list_to_binary( + io_lib:format( + "http://localhost:~s/~s", + [integer_to_list(PORT), PATH] + ) + ) +). +-define(HTTP_BRIDGE(URL, TYPE, NAME), #{ <<"type">> => TYPE, <<"name">> => NAME, <<"url">> => URL, @@ -40,7 +44,6 @@ <<"headers">> => #{ <<"content-type">> => <<"application/json">> } - }). all() -> @@ -50,15 +53,17 @@ groups() -> []. suite() -> - [{timetrap,{seconds,60}}]. + [{timetrap, {seconds, 60}}]. init_per_suite(Config) -> _ = application:load(emqx_conf), %% some testcases (may from other app) already get emqx_connector started _ = application:stop(emqx_resource), _ = application:stop(emqx_connector), - ok = emqx_common_test_helpers:start_apps([emqx_bridge, emqx_dashboard], - fun set_special_configs/1), + ok = emqx_common_test_helpers:start_apps( + [emqx_bridge, emqx_dashboard], + fun set_special_configs/1 + ), ok = emqx_common_test_helpers:load_config(emqx_bridge_schema, ?CONF_DEFAULT), Config. @@ -79,9 +84,12 @@ end_per_testcase(_, _Config) -> ok. clear_resources() -> - lists:foreach(fun(#{type := Type, name := Name}) -> + lists:foreach( + fun(#{type := Type, name := Name}) -> ok = emqx_bridge:remove(Type, Name) - end, emqx_bridge:list()). + end, + emqx_bridge:list() + ). %%------------------------------------------------------------------------------ %% HTTP server for testing @@ -95,12 +103,12 @@ start_http_server(HandleFun) -> end), receive {port, Port} -> Port - after - 2000 -> error({timeout, start_http_server}) + after 2000 -> error({timeout, start_http_server}) end. listen_on_random_port() -> - Min = 1024, Max = 65000, + Min = 1024, + Max = 65000, Port = rand:uniform(Max - Min) + Min, case gen_tcp:listen(Port, [{active, false}, {reuseaddr, true}, binary]) of {ok, Sock} -> {Port, Sock}; @@ -109,16 +117,18 @@ listen_on_random_port() -> loop(Sock, HandleFun, Parent) -> {ok, Conn} = gen_tcp:accept(Sock), - Handler = spawn(fun () -> HandleFun(Conn, Parent) end), + Handler = spawn(fun() -> HandleFun(Conn, Parent) end), gen_tcp:controlling_process(Conn, Handler), loop(Sock, HandleFun, Parent). make_response(CodeStr, Str) -> B = iolist_to_binary(Str), iolist_to_binary( - io_lib:fwrite( - "HTTP/1.0 ~s\r\nContent-Type: text/html\r\nContent-Length: ~p\r\n\r\n~s", - [CodeStr, size(B), B])). + io_lib:fwrite( + "HTTP/1.0 ~s\r\nContent-Type: text/html\r\nContent-Length: ~p\r\n\r\n~s", + [CodeStr, size(B), B] + ) + ). handle_fun_200_ok(Conn, Parent) -> case gen_tcp:recv(Conn, 0) of @@ -151,18 +161,22 @@ t_http_crud_apis(_) -> %% then we add a http bridge, using POST %% POST /bridges/ will create a bridge URL1 = ?URL(Port, "path1"), - {ok, 201, Bridge} = request(post, uri(["bridges"]), - ?HTTP_BRIDGE(URL1, ?BRIDGE_TYPE, ?BRIDGE_NAME)), + {ok, 201, Bridge} = request( + post, + uri(["bridges"]), + ?HTTP_BRIDGE(URL1, ?BRIDGE_TYPE, ?BRIDGE_NAME) + ), %ct:pal("---bridge: ~p", [Bridge]), - #{ <<"type">> := ?BRIDGE_TYPE - , <<"name">> := ?BRIDGE_NAME - , <<"status">> := _ - , <<"node_status">> := [_|_] - , <<"metrics">> := _ - , <<"node_metrics">> := [_|_] - , <<"url">> := URL1 - } = jsx:decode(Bridge), + #{ + <<"type">> := ?BRIDGE_TYPE, + <<"name">> := ?BRIDGE_NAME, + <<"status">> := _, + <<"node_status">> := [_ | _], + <<"metrics">> := _, + <<"node_metrics">> := [_ | _], + <<"url">> := URL1 + } = jsx:decode(Bridge), BridgeID = emqx_bridge:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME), %% send an message to emqx and the message should be forwarded to the HTTP server @@ -170,49 +184,70 @@ t_http_crud_apis(_) -> emqx:publish(emqx_message:make(<<"emqx_http/1">>, Body)), ?assert( receive - {http_server, received, #{method := <<"POST">>, path := <<"/path1">>, - body := Body}} -> + {http_server, received, #{ + method := <<"POST">>, + path := <<"/path1">>, + body := Body + }} -> true; Msg -> ct:pal("error: http got unexpected request: ~p", [Msg]), false after 100 -> false - end), + end + ), %% update the request-path of the bridge URL2 = ?URL(Port, "path2"), - {ok, 200, Bridge2} = request(put, uri(["bridges", BridgeID]), - ?HTTP_BRIDGE(URL2, ?BRIDGE_TYPE, ?BRIDGE_NAME)), - ?assertMatch(#{ <<"type">> := ?BRIDGE_TYPE - , <<"name">> := ?BRIDGE_NAME - , <<"status">> := _ - , <<"node_status">> := [_|_] - , <<"metrics">> := _ - , <<"node_metrics">> := [_|_] - , <<"url">> := URL2 - }, jsx:decode(Bridge2)), + {ok, 200, Bridge2} = request( + put, + uri(["bridges", BridgeID]), + ?HTTP_BRIDGE(URL2, ?BRIDGE_TYPE, ?BRIDGE_NAME) + ), + ?assertMatch( + #{ + <<"type">> := ?BRIDGE_TYPE, + <<"name">> := ?BRIDGE_NAME, + <<"status">> := _, + <<"node_status">> := [_ | _], + <<"metrics">> := _, + <<"node_metrics">> := [_ | _], + <<"url">> := URL2 + }, + jsx:decode(Bridge2) + ), %% list all bridges again, assert Bridge2 is in it {ok, 200, Bridge2Str} = request(get, uri(["bridges"]), []), - ?assertMatch([#{ <<"type">> := ?BRIDGE_TYPE - , <<"name">> := ?BRIDGE_NAME - , <<"status">> := _ - , <<"node_status">> := [_|_] - , <<"metrics">> := _ - , <<"node_metrics">> := [_|_] - , <<"url">> := URL2 - }], jsx:decode(Bridge2Str)), + ?assertMatch( + [ + #{ + <<"type">> := ?BRIDGE_TYPE, + <<"name">> := ?BRIDGE_NAME, + <<"status">> := _, + <<"node_status">> := [_ | _], + <<"metrics">> := _, + <<"node_metrics">> := [_ | _], + <<"url">> := URL2 + } + ], + jsx:decode(Bridge2Str) + ), %% get the bridge by id {ok, 200, Bridge3Str} = request(get, uri(["bridges", BridgeID]), []), - ?assertMatch(#{ <<"type">> := ?BRIDGE_TYPE - , <<"name">> := ?BRIDGE_NAME - , <<"status">> := _ - , <<"node_status">> := [_|_] - , <<"metrics">> := _ - , <<"node_metrics">> := [_|_] - , <<"url">> := URL2 - }, jsx:decode(Bridge3Str)), + ?assertMatch( + #{ + <<"type">> := ?BRIDGE_TYPE, + <<"name">> := ?BRIDGE_NAME, + <<"status">> := _, + <<"node_status">> := [_ | _], + <<"metrics">> := _, + <<"node_metrics">> := [_ | _], + <<"url">> := URL2 + }, + jsx:decode(Bridge3Str) + ), %% send an message to emqx again, check the path has been changed emqx:publish(emqx_message:make(<<"emqx_http/1">>, Body)), @@ -225,25 +260,35 @@ t_http_crud_apis(_) -> false after 100 -> false - end), + end + ), %% delete the bridge {ok, 204, <<>>} = request(delete, uri(["bridges", BridgeID]), []), {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), %% update a deleted bridge returns an error - {ok, 404, ErrMsg2} = request(put, uri(["bridges", BridgeID]), - ?HTTP_BRIDGE(URL2, ?BRIDGE_TYPE, ?BRIDGE_NAME)), + {ok, 404, ErrMsg2} = request( + put, + uri(["bridges", BridgeID]), + ?HTTP_BRIDGE(URL2, ?BRIDGE_TYPE, ?BRIDGE_NAME) + ), ?assertMatch( - #{ <<"code">> := _ - , <<"message">> := <<"bridge not found">> - }, jsx:decode(ErrMsg2)), + #{ + <<"code">> := _, + <<"message">> := <<"bridge not found">> + }, + jsx:decode(ErrMsg2) + ), ok. t_start_stop_bridges(_) -> - lists:foreach(fun(Type) -> + lists:foreach( + fun(Type) -> do_start_stop_bridges(Type) - end, [node, cluster]). + end, + [node, cluster] + ). do_start_stop_bridges(Type) -> %% assert we there's no bridges at first @@ -251,40 +296,40 @@ do_start_stop_bridges(Type) -> Port = start_http_server(fun handle_fun_200_ok/2), URL1 = ?URL(Port, "abc"), - {ok, 201, Bridge} = request(post, uri(["bridges"]), - ?HTTP_BRIDGE(URL1, ?BRIDGE_TYPE, ?BRIDGE_NAME)), + {ok, 201, Bridge} = request( + post, + uri(["bridges"]), + ?HTTP_BRIDGE(URL1, ?BRIDGE_TYPE, ?BRIDGE_NAME) + ), %ct:pal("the bridge ==== ~p", [Bridge]), - #{ <<"type">> := ?BRIDGE_TYPE - , <<"name">> := ?BRIDGE_NAME - , <<"status">> := <<"connected">> - , <<"node_status">> := [_|_] - , <<"metrics">> := _ - , <<"node_metrics">> := [_|_] - , <<"url">> := URL1 - } = jsx:decode(Bridge), + #{ + <<"type">> := ?BRIDGE_TYPE, + <<"name">> := ?BRIDGE_NAME, + <<"status">> := <<"connected">>, + <<"node_status">> := [_ | _], + <<"metrics">> := _, + <<"node_metrics">> := [_ | _], + <<"url">> := URL1 + } = jsx:decode(Bridge), BridgeID = emqx_bridge:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME), %% stop it {ok, 200, <<>>} = request(post, operation_path(Type, stop, BridgeID), <<"">>), {ok, 200, Bridge2} = request(get, uri(["bridges", BridgeID]), []), - ?assertMatch(#{ <<"status">> := <<"disconnected">> - }, jsx:decode(Bridge2)), + ?assertMatch(#{<<"status">> := <<"disconnected">>}, jsx:decode(Bridge2)), %% start again {ok, 200, <<>>} = request(post, operation_path(Type, restart, BridgeID), <<"">>), {ok, 200, Bridge3} = request(get, uri(["bridges", BridgeID]), []), - ?assertMatch(#{ <<"status">> := <<"connected">> - }, jsx:decode(Bridge3)), + ?assertMatch(#{<<"status">> := <<"connected">>}, jsx:decode(Bridge3)), %% restart an already started bridge {ok, 200, <<>>} = request(post, operation_path(Type, restart, BridgeID), <<"">>), {ok, 200, Bridge3} = request(get, uri(["bridges", BridgeID]), []), - ?assertMatch(#{ <<"status">> := <<"connected">> - }, jsx:decode(Bridge3)), + ?assertMatch(#{<<"status">> := <<"connected">>}, jsx:decode(Bridge3)), %% stop it again {ok, 200, <<>>} = request(post, operation_path(Type, stop, BridgeID), <<"">>), %% restart a stopped bridge {ok, 200, <<>>} = request(post, operation_path(Type, restart, BridgeID), <<"">>), {ok, 200, Bridge4} = request(get, uri(["bridges", BridgeID]), []), - ?assertMatch(#{ <<"status">> := <<"connected">> - }, jsx:decode(Bridge4)), + ?assertMatch(#{<<"status">> := <<"connected">>}, jsx:decode(Bridge4)), %% delete the bridge {ok, 204, <<>>} = request(delete, uri(["bridges", BridgeID]), []), {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []). @@ -295,33 +340,34 @@ t_enable_disable_bridges(_) -> Port = start_http_server(fun handle_fun_200_ok/2), URL1 = ?URL(Port, "abc"), - {ok, 201, Bridge} = request(post, uri(["bridges"]), - ?HTTP_BRIDGE(URL1, ?BRIDGE_TYPE, ?BRIDGE_NAME)), + {ok, 201, Bridge} = request( + post, + uri(["bridges"]), + ?HTTP_BRIDGE(URL1, ?BRIDGE_TYPE, ?BRIDGE_NAME) + ), %ct:pal("the bridge ==== ~p", [Bridge]), - #{ <<"type">> := ?BRIDGE_TYPE - , <<"name">> := ?BRIDGE_NAME - , <<"status">> := <<"connected">> - , <<"node_status">> := [_|_] - , <<"metrics">> := _ - , <<"node_metrics">> := [_|_] - , <<"url">> := URL1 - } = jsx:decode(Bridge), + #{ + <<"type">> := ?BRIDGE_TYPE, + <<"name">> := ?BRIDGE_NAME, + <<"status">> := <<"connected">>, + <<"node_status">> := [_ | _], + <<"metrics">> := _, + <<"node_metrics">> := [_ | _], + <<"url">> := URL1 + } = jsx:decode(Bridge), BridgeID = emqx_bridge:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME), %% disable it {ok, 200, <<>>} = request(post, operation_path(cluster, disable, BridgeID), <<"">>), {ok, 200, Bridge2} = request(get, uri(["bridges", BridgeID]), []), - ?assertMatch(#{ <<"status">> := <<"disconnected">> - }, jsx:decode(Bridge2)), + ?assertMatch(#{<<"status">> := <<"disconnected">>}, jsx:decode(Bridge2)), %% enable again {ok, 200, <<>>} = request(post, operation_path(cluster, enable, BridgeID), <<"">>), {ok, 200, Bridge3} = request(get, uri(["bridges", BridgeID]), []), - ?assertMatch(#{ <<"status">> := <<"connected">> - }, jsx:decode(Bridge3)), + ?assertMatch(#{<<"status">> := <<"connected">>}, jsx:decode(Bridge3)), %% enable an already started bridge {ok, 200, <<>>} = request(post, operation_path(cluster, enable, BridgeID), <<"">>), {ok, 200, Bridge3} = request(get, uri(["bridges", BridgeID]), []), - ?assertMatch(#{ <<"status">> := <<"connected">> - }, jsx:decode(Bridge3)), + ?assertMatch(#{<<"status">> := <<"connected">>}, jsx:decode(Bridge3)), %% disable it again {ok, 200, <<>>} = request(post, operation_path(cluster, disable, BridgeID), <<"">>), @@ -331,8 +377,7 @@ t_enable_disable_bridges(_) -> %% enable a stopped bridge {ok, 200, <<>>} = request(post, operation_path(cluster, enable, BridgeID), <<"">>), {ok, 200, Bridge4} = request(get, uri(["bridges", BridgeID]), []), - ?assertMatch(#{ <<"status">> := <<"connected">> - }, jsx:decode(Bridge4)), + ?assertMatch(#{<<"status">> := <<"connected">>}, jsx:decode(Bridge4)), %% delete the bridge {ok, 204, <<>>} = request(delete, uri(["bridges", BridgeID]), []), {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []). @@ -343,17 +388,21 @@ t_reset_bridges(_) -> Port = start_http_server(fun handle_fun_200_ok/2), URL1 = ?URL(Port, "abc"), - {ok, 201, Bridge} = request(post, uri(["bridges"]), - ?HTTP_BRIDGE(URL1, ?BRIDGE_TYPE, ?BRIDGE_NAME)), + {ok, 201, Bridge} = request( + post, + uri(["bridges"]), + ?HTTP_BRIDGE(URL1, ?BRIDGE_TYPE, ?BRIDGE_NAME) + ), %ct:pal("the bridge ==== ~p", [Bridge]), - #{ <<"type">> := ?BRIDGE_TYPE - , <<"name">> := ?BRIDGE_NAME - , <<"status">> := <<"connected">> - , <<"node_status">> := [_|_] - , <<"metrics">> := _ - , <<"node_metrics">> := [_|_] - , <<"url">> := URL1 - } = jsx:decode(Bridge), + #{ + <<"type">> := ?BRIDGE_TYPE, + <<"name">> := ?BRIDGE_NAME, + <<"status">> := <<"connected">>, + <<"node_status">> := [_ | _], + <<"metrics">> := _, + <<"node_metrics">> := [_ | _], + <<"url">> := URL1 + } = jsx:decode(Bridge), BridgeID = emqx_bridge:bridge_id(?BRIDGE_TYPE, ?BRIDGE_NAME), {ok, 200, <<"Reset success">>} = request(put, uri(["bridges", BridgeID, "reset_metrics"]), []), diff --git a/apps/emqx_connector/include/emqx_connector.hrl b/apps/emqx_connector/include/emqx_connector.hrl index f0e07166d..ec7c2cd5e 100644 --- a/apps/emqx_connector/include/emqx_connector.hrl +++ b/apps/emqx_connector/include/emqx_connector.hrl @@ -24,13 +24,16 @@ -define(REDIS_DEFAULT_PORT, 6379). -define(PGSQL_DEFAULT_PORT, 5432). --define(SERVERS_DESC, "A Node list for Cluster to connect to. The nodes should be separated with commas, such as: `Node[,Node].` -For each Node should be: "). +-define(SERVERS_DESC, + "A Node list for Cluster to connect to. The nodes should be separated with commas, such as: `Node[,Node].`\n" + "For each Node should be: " +). --define(SERVER_DESC(TYPE, DEFAULT_PORT), " -The IPv4 or IPv6 address or the hostname to connect to.
-A host entry has the following form: `Host[:Port]`.
-The " ++ TYPE ++ " default port " ++ DEFAULT_PORT ++ " is used if `[:Port]` is not specified." +-define(SERVER_DESC(TYPE, DEFAULT_PORT), + "\n" + "The IPv4 or IPv6 address or the hostname to connect to.
\n" + "A host entry has the following form: `Host[:Port]`.
\n" + "The " ++ TYPE ++ " default port " ++ DEFAULT_PORT ++ " is used if `[:Port]` is not specified." ). -define(THROW_ERROR(Str), erlang:throw({error, Str})). diff --git a/apps/emqx_connector/rebar.config b/apps/emqx_connector/rebar.config index 5467c5261..a6fd0c77e 100644 --- a/apps/emqx_connector/rebar.config +++ b/apps/emqx_connector/rebar.config @@ -1,30 +1,32 @@ %% -*- mode: erlang -*- {erl_opts, [ - nowarn_unused_import, - debug_info + nowarn_unused_import, + debug_info ]}. {deps, [ - {emqx, {path, "../emqx"}}, - {eldap2, {git, "https://github.com/emqx/eldap2", {tag, "v0.2.2"}}}, - {mysql, {git, "https://github.com/emqx/mysql-otp", {tag, "1.7.1"}}}, - {epgsql, {git, "https://github.com/emqx/epgsql", {tag, "4.7-emqx.2"}}}, - %% NOTE: mind poolboy version when updating mongodb-erlang version - {mongodb, {git,"https://github.com/emqx/mongodb-erlang", {tag, "v3.0.13"}}}, - %% NOTE: mind poolboy version when updating eredis_cluster version - {eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.7.1"}}}, - %% mongodb-erlang uses a special fork https://github.com/comtihon/poolboy.git - %% (which has overflow_ttl feature added). - %% However, it references `{branch, "master}` (commit 9c06a9a on 2021-04-07). - %% By accident, We have always been using the upstream fork due to - %% eredis_cluster's dependency getting resolved earlier. - %% Here we pin 1.5.2 to avoid surprises in the future. - {poolboy, {git, "https://github.com/emqx/poolboy.git", {tag, "1.5.2"}}}, - {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.5.0"}}} + {emqx, {path, "../emqx"}}, + {eldap2, {git, "https://github.com/emqx/eldap2", {tag, "v0.2.2"}}}, + {mysql, {git, "https://github.com/emqx/mysql-otp", {tag, "1.7.1"}}}, + {epgsql, {git, "https://github.com/emqx/epgsql", {tag, "4.7-emqx.2"}}}, + %% NOTE: mind poolboy version when updating mongodb-erlang version + {mongodb, {git, "https://github.com/emqx/mongodb-erlang", {tag, "v3.0.13"}}}, + %% NOTE: mind poolboy version when updating eredis_cluster version + {eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.7.1"}}}, + %% mongodb-erlang uses a special fork https://github.com/comtihon/poolboy.git + %% (which has overflow_ttl feature added). + %% However, it references `{branch, "master}` (commit 9c06a9a on 2021-04-07). + %% By accident, We have always been using the upstream fork due to + %% eredis_cluster's dependency getting resolved earlier. + %% Here we pin 1.5.2 to avoid surprises in the future. + {poolboy, {git, "https://github.com/emqx/poolboy.git", {tag, "1.5.2"}}}, + {emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.5.0"}}} ]}. {shell, [ - % {config, "config/sys.config"}, + % {config, "config/sys.config"}, {apps, [emqx_connector]} ]}. + +{project_plugins, [erlfmt]}. diff --git a/apps/emqx_connector/src/emqx_connector.app.src b/apps/emqx_connector/src/emqx_connector.app.src index d83d16764..fd804f6c5 100644 --- a/apps/emqx_connector/src/emqx_connector.app.src +++ b/apps/emqx_connector/src/emqx_connector.app.src @@ -1,27 +1,27 @@ %% -*- mode: erlang -*- -{application, emqx_connector, - [{description, "An OTP application"}, - {vsn, "0.1.1"}, - {registered, []}, - {mod, {emqx_connector_app, []}}, - {applications, - [kernel, - stdlib, - ecpool, - emqx_resource, - eredis_cluster, - eredis, - epgsql, - eldap2, - mysql, - mongodb, - ehttpc, - emqx, - emqtt - ]}, - {env,[]}, - {modules, []}, +{application, emqx_connector, [ + {description, "An OTP application"}, + {vsn, "0.1.1"}, + {registered, []}, + {mod, {emqx_connector_app, []}}, + {applications, [ + kernel, + stdlib, + ecpool, + emqx_resource, + eredis_cluster, + eredis, + epgsql, + eldap2, + mysql, + mongodb, + ehttpc, + emqx, + emqtt + ]}, + {env, []}, + {modules, []}, - {licenses, ["Apache 2.0"]}, - {links, []} - ]}. + {licenses, ["Apache 2.0"]}, + {links, []} +]}. diff --git a/apps/emqx_connector/src/emqx_connector.erl b/apps/emqx_connector/src/emqx_connector.erl index fbb89e8e7..aefff666a 100644 --- a/apps/emqx_connector/src/emqx_connector.erl +++ b/apps/emqx_connector/src/emqx_connector.erl @@ -15,24 +15,27 @@ %%-------------------------------------------------------------------- -module(emqx_connector). --export([ config_key_path/0 - , pre_config_update/3 - , post_config_update/5 - ]). +-export([ + config_key_path/0, + pre_config_update/3, + post_config_update/5 +]). --export([ parse_connector_id/1 - , connector_id/2 - ]). +-export([ + parse_connector_id/1, + connector_id/2 +]). --export([ list_raw/0 - , lookup_raw/1 - , lookup_raw/2 - , create_dry_run/2 - , update/2 - , update/3 - , delete/1 - , delete/2 - ]). +-export([ + list_raw/0, + lookup_raw/1, + lookup_raw/2, + create_dry_run/2, + update/2, + update/3, + delete/1, + delete/2 +]). config_key_path() -> [connectors]. @@ -53,19 +56,27 @@ post_config_update([connectors, Type, Name] = Path, '$remove', _, OldConf, _AppE throw({dependency_bridges_exist, emqx_bridge:bridge_id(BType, BName)}) end), _ = emqx_connector_ssl:clear_certs(filename:join(Path), OldConf) - catch throw:Error -> {error, Error} + catch + throw:Error -> {error, Error} end; post_config_update([connectors, Type, Name], _Req, NewConf, OldConf, _AppEnvs) -> ConnId = connector_id(Type, Name), - foreach_linked_bridges(ConnId, + foreach_linked_bridges( + ConnId, fun(#{type := BType, name := BName}) -> BridgeConf = emqx:get_config([bridges, BType, BName]), - case emqx_bridge:update(BType, BName, {BridgeConf#{connector => OldConf}, - BridgeConf#{connector => NewConf}}) of + case + emqx_bridge:update( + BType, + BName, + {BridgeConf#{connector => OldConf}, BridgeConf#{connector => NewConf}} + ) + of ok -> ok; {error, Reason} -> error({update_bridge_error, Reason}) end - end). + end + ). connector_id(Type0, Name0) -> Type = bin(Type0), @@ -80,13 +91,22 @@ parse_connector_id(ConnectorId) -> list_raw() -> case get_raw_connector_conf() of - not_found -> []; + not_found -> + []; Config -> - lists:foldl(fun({Type, NameAndConf}, Connectors) -> - lists:foldl(fun({Name, RawConf}, Acc) -> - [RawConf#{<<"type">> => Type, <<"name">> => Name} | Acc] - end, Connectors, maps:to_list(NameAndConf)) - end, [], maps:to_list(Config)) + lists:foldl( + fun({Type, NameAndConf}, Connectors) -> + lists:foldl( + fun({Name, RawConf}, Acc) -> + [RawConf#{<<"type">> => Type, <<"name">> => Name} | Acc] + end, + Connectors, + maps:to_list(NameAndConf) + ) + end, + [], + maps:to_list(Config) + ) end. lookup_raw(Id) when is_binary(Id) -> @@ -96,7 +116,8 @@ lookup_raw(Id) when is_binary(Id) -> lookup_raw(Type, Name) -> Path = [bin(P) || P <- [Type, Name]], case get_raw_connector_conf() of - not_found -> {error, not_found}; + not_found -> + {error, not_found}; Conf -> case emqx_map_lib:deep_get(Path, Conf, not_found) of not_found -> {error, not_found}; @@ -123,7 +144,8 @@ delete(Type, Name) -> get_raw_connector_conf() -> case emqx:get_raw_config(config_key_path(), not_found) of - not_found -> not_found; + not_found -> + not_found; RawConf -> #{<<"connectors">> := Conf} = emqx_config:fill_defaults(#{<<"connectors">> => RawConf}), @@ -135,8 +157,12 @@ bin(Str) when is_list(Str) -> list_to_binary(Str); bin(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8). foreach_linked_bridges(ConnId, Do) -> - lists:foreach(fun - (#{raw_config := #{<<"connector">> := ConnId0}} = Bridge) when ConnId0 == ConnId -> - Do(Bridge); - (_) -> ok - end, emqx_bridge:list()). + lists:foreach( + fun + (#{raw_config := #{<<"connector">> := ConnId0}} = Bridge) when ConnId0 == ConnId -> + Do(Bridge); + (_) -> + ok + end, + emqx_bridge:list() + ). diff --git a/apps/emqx_connector/src/emqx_connector_api.erl b/apps/emqx_connector/src/emqx_connector_api.erl index 4aa710c79..8fb4f596d 100644 --- a/apps/emqx_connector/src/emqx_connector_api.erl +++ b/apps/emqx_connector/src/emqx_connector_api.erl @@ -40,9 +40,14 @@ EXPR catch error:{invalid_connector_id, Id0} -> - {400, #{code => 'INVALID_ID', message => <<"invalid_connector_id: ", Id0/binary, - ". Connector Ids must be of format {type}:{name}">>}} - end). + {400, #{ + code => 'INVALID_ID', + message => + <<"invalid_connector_id: ", Id0/binary, + ". Connector Ids must be of format {type}:{name}">> + }} + end +). namespace() -> "connector". @@ -58,21 +63,25 @@ error_schema(Codes, Message) when is_binary(Message) -> put_request_body_schema() -> emqx_dashboard_swagger:schema_with_examples( - emqx_connector_schema:put_request(), connector_info_examples(put)). + emqx_connector_schema:put_request(), connector_info_examples(put) + ). post_request_body_schema() -> emqx_dashboard_swagger:schema_with_examples( - emqx_connector_schema:post_request(), connector_info_examples(post)). + emqx_connector_schema:post_request(), connector_info_examples(post) + ). get_response_body_schema() -> emqx_dashboard_swagger:schema_with_examples( - emqx_connector_schema:get_response(), connector_info_examples(get)). + emqx_connector_schema:get_response(), connector_info_examples(get) + ). connector_info_array_example(Method) -> [Config || #{value := Config} <- maps:values(connector_info_examples(Method))]. connector_info_examples(Method) -> - lists:foldl(fun(Type, Acc) -> + lists:foldl( + fun(Type, Acc) -> SType = atom_to_list(Type), maps:merge(Acc, #{ Type => #{ @@ -80,11 +89,16 @@ connector_info_examples(Method) -> value => info_example(Type, Method) } }) - end, #{}, ?CONN_TYPES). + end, + #{}, + ?CONN_TYPES + ). info_example(Type, Method) -> - maps:merge(info_example_basic(Type), - method_example(Type, Method)). + maps:merge( + info_example_basic(Type), + method_example(Type, Method) + ). method_example(Type, Method) when Method == get; Method == post -> SType = atom_to_list(Type), @@ -115,11 +129,17 @@ info_example_basic(mqtt) -> }. param_path_id() -> - [{id, mk(binary(), - #{ in => path - , example => <<"mqtt:my_mqtt_connector">> - , desc => ?DESC("id") - })}]. + [ + {id, + mk( + binary(), + #{ + in => path, + example => <<"mqtt:my_mqtt_connector">>, + desc => ?DESC("id") + } + )} + ]. schema("/connectors_test") -> #{ @@ -135,7 +155,6 @@ schema("/connectors_test") -> } } }; - schema("/connectors") -> #{ 'operationId' => '/connectors', @@ -145,8 +164,9 @@ schema("/connectors") -> summary => <<"List connectors">>, responses => #{ 200 => emqx_dashboard_swagger:schema_with_example( - array(emqx_connector_schema:get_response()), - connector_info_array_example(get)) + array(emqx_connector_schema:get_response()), + connector_info_array_example(get) + ) } }, post => #{ @@ -160,7 +180,6 @@ schema("/connectors") -> } } }; - schema("/connectors/:id") -> #{ 'operationId' => '/connectors/:id', @@ -185,7 +204,8 @@ schema("/connectors/:id") -> 200 => get_response_body_schema(), 404 => error_schema(['NOT_FOUND'], "Connector not found"), 400 => error_schema(['INVALID_ID'], "Bad connector ID") - }}, + } + }, delete => #{ tags => [<<"connectors">>], desc => ?DESC("conn_id_delete"), @@ -196,7 +216,8 @@ schema("/connectors/:id") -> 403 => error_schema(['DEPENDENCY_EXISTS'], "Cannot remove dependent connector"), 404 => error_schema(['NOT_FOUND'], "Delete failed, not found"), 400 => error_schema(['INVALID_ID'], "Bad connector ID") - }} + } + } }. '/connectors_test'(post, #{body := #{<<"type">> := ConnType} = Params}) -> @@ -209,67 +230,83 @@ schema("/connectors/:id") -> '/connectors'(get, _Request) -> {200, [format_resp(Conn) || Conn <- emqx_connector:list_raw()]}; - '/connectors'(post, #{body := #{<<"type">> := ConnType, <<"name">> := ConnName} = Params}) -> case emqx_connector:lookup_raw(ConnType, ConnName) of {ok, _} -> {400, error_msg('ALREADY_EXISTS', <<"connector already exists">>)}; {error, not_found} -> - case emqx_connector:update(ConnType, ConnName, - filter_out_request_body(Params)) of + case + emqx_connector:update( + ConnType, + ConnName, + filter_out_request_body(Params) + ) + of {ok, #{raw_config := RawConf}} -> - {201, format_resp(RawConf#{<<"type">> => ConnType, - <<"name">> => ConnName})}; + {201, + format_resp(RawConf#{ + <<"type">> => ConnType, + <<"name">> => ConnName + })}; {error, Error} -> {400, error_msg('BAD_REQUEST', Error)} end end; - '/connectors'(post, _) -> {400, error_msg('BAD_REQUEST', <<"missing some required fields: [name, type]">>)}. '/connectors/:id'(get, #{bindings := #{id := Id}}) -> - ?TRY_PARSE_ID(Id, + ?TRY_PARSE_ID( + Id, case emqx_connector:lookup_raw(ConnType, ConnName) of {ok, Conf} -> {200, format_resp(Conf)}; {error, not_found} -> {404, error_msg('NOT_FOUND', <<"connector not found">>)} - end); - + end + ); '/connectors/:id'(put, #{bindings := #{id := Id}, body := Params0}) -> Params = filter_out_request_body(Params0), - ?TRY_PARSE_ID(Id, + ?TRY_PARSE_ID( + Id, case emqx_connector:lookup_raw(ConnType, ConnName) of {ok, _} -> case emqx_connector:update(ConnType, ConnName, Params) of {ok, #{raw_config := RawConf}} -> - {200, format_resp(RawConf#{<<"type">> => ConnType, - <<"name">> => ConnName})}; + {200, + format_resp(RawConf#{ + <<"type">> => ConnType, + <<"name">> => ConnName + })}; {error, Error} -> {500, error_msg('INTERNAL_ERROR', Error)} end; {error, not_found} -> {404, error_msg('NOT_FOUND', <<"connector not found">>)} - end); - + end + ); '/connectors/:id'(delete, #{bindings := #{id := Id}}) -> - ?TRY_PARSE_ID(Id, + ?TRY_PARSE_ID( + Id, case emqx_connector:lookup_raw(ConnType, ConnName) of {ok, _} -> case emqx_connector:delete(ConnType, ConnName) of {ok, _} -> {204}; {error, {post_config_update, _, {dependency_bridges_exist, BridgeID}}} -> - {403, error_msg('DEPENDENCY_EXISTS', - <<"Cannot remove the connector as it's in use by a bridge: ", - BridgeID/binary>>)}; + {403, + error_msg( + 'DEPENDENCY_EXISTS', + <<"Cannot remove the connector as it's in use by a bridge: ", + BridgeID/binary>> + )}; {error, Error} -> {500, error_msg('INTERNAL_ERROR', Error)} end; {error, not_found} -> {404, error_msg('NOT_FOUND', <<"connector not found">>)} - end). + end + ). error_msg(Code, Msg) when is_binary(Msg) -> #{code => Code, message => Msg}; @@ -277,8 +314,11 @@ error_msg(Code, Msg) -> #{code => Code, message => bin(io_lib:format("~p", [Msg]))}. format_resp(#{<<"type">> := ConnType, <<"name">> := ConnName} = RawConf) -> - NumOfBridges = length(emqx_bridge:list_bridges_by_connector( - emqx_connector:connector_id(ConnType, ConnName))), + NumOfBridges = length( + emqx_bridge:list_bridges_by_connector( + emqx_connector:connector_id(ConnType, ConnName) + ) + ), RawConf#{ <<"type">> => ConnType, <<"name">> => ConnName, diff --git a/apps/emqx_connector/src/emqx_connector_http.erl b/apps/emqx_connector/src/emqx_connector_http.erl index 641e70959..38ddd2c23 100644 --- a/apps/emqx_connector/src/emqx_connector_http.erl +++ b/apps/emqx_connector/src/emqx_connector_http.erl @@ -25,32 +25,34 @@ -behaviour(emqx_resource). %% callbacks of behaviour emqx_resource --export([ on_start/2 - , on_stop/2 - , on_query/4 - , on_health_check/2 - ]). +-export([ + on_start/2, + on_stop/2, + on_query/4, + on_health_check/2 +]). -type url() :: emqx_http_lib:uri_map(). -reflect_type([url/0]). -typerefl_from_string({url/0, emqx_http_lib, uri_parse}). --export([ roots/0 - , fields/1 - , desc/1 - , validations/0 - , namespace/0 - ]). +-export([ + roots/0, + fields/1, + desc/1, + validations/0, + namespace/0 +]). --export([ check_ssl_opts/2 - ]). +-export([check_ssl_opts/2]). -type connect_timeout() :: emqx_schema:duration() | infinity. -type pool_type() :: random | hash. --reflect_type([ connect_timeout/0 - , pool_type/0 - ]). +-reflect_type([ + connect_timeout/0, + pool_type/0 +]). %%===================================================================== %% Hocon schema @@ -61,63 +63,96 @@ roots() -> fields(config). fields(config) -> - [ {base_url, - sc(url(), - #{ required => true - , validator => fun(#{query := _Query}) -> + [ + {base_url, + sc( + url(), + #{ + required => true, + validator => fun + (#{query := _Query}) -> {error, "There must be no query in the base_url"}; - (_) -> ok - end - , desc => ?DESC("base_url") - })} - , {connect_timeout, - sc(emqx_schema:duration_ms(), - #{ default => "15s" - , desc => ?DESC("connect_timeout") - })} - , {max_retries, - sc(non_neg_integer(), - #{ default => 5 - , desc => ?DESC("max_retries") - })} - , {retry_interval, - sc(emqx_schema:duration(), - #{ default => "1s" - , desc => ?DESC("retry_interval") - })} - , {pool_type, - sc(pool_type(), - #{ default => random - , desc => ?DESC("pool_type") - })} - , {pool_size, - sc(pos_integer(), - #{ default => 8 - , desc => ?DESC("pool_size") - })} - , {enable_pipelining, - sc(boolean(), - #{ default => true - , desc => ?DESC("enable_pipelining") - })} - , {request, hoconsc:mk( - ref("request"), - #{ default => undefined - , required => false - , desc => ?DESC("request") - })} + (_) -> + ok + end, + desc => ?DESC("base_url") + } + )}, + {connect_timeout, + sc( + emqx_schema:duration_ms(), + #{ + default => "15s", + desc => ?DESC("connect_timeout") + } + )}, + {max_retries, + sc( + non_neg_integer(), + #{ + default => 5, + desc => ?DESC("max_retries") + } + )}, + {retry_interval, + sc( + emqx_schema:duration(), + #{ + default => "1s", + desc => ?DESC("retry_interval") + } + )}, + {pool_type, + sc( + pool_type(), + #{ + default => random, + desc => ?DESC("pool_type") + } + )}, + {pool_size, + sc( + pos_integer(), + #{ + default => 8, + desc => ?DESC("pool_size") + } + )}, + {enable_pipelining, + sc( + boolean(), + #{ + default => true, + desc => ?DESC("enable_pipelining") + } + )}, + {request, + hoconsc:mk( + ref("request"), + #{ + default => undefined, + required => false, + desc => ?DESC("request") + } + )} ] ++ emqx_connector_schema_lib:ssl_fields(); - fields("request") -> - [ {method, hoconsc:mk(hoconsc:enum([post, put, get, delete]), #{required => false, desc => ?DESC("method")})} - , {path, hoconsc:mk(binary(), #{required => false, desc => ?DESC("path")})} - , {body, hoconsc:mk(binary(), #{required => false, desc => ?DESC("body")})} - , {headers, hoconsc:mk(map(), #{required => false, desc => ?DESC("headers")})} - , {request_timeout, - sc(emqx_schema:duration_ms(), - #{ required => false - , desc => ?DESC("request_timeout") - })} + [ + {method, + hoconsc:mk(hoconsc:enum([post, put, get, delete]), #{ + required => false, desc => ?DESC("method") + })}, + {path, hoconsc:mk(binary(), #{required => false, desc => ?DESC("path")})}, + {body, hoconsc:mk(binary(), #{required => false, desc => ?DESC("body")})}, + {headers, hoconsc:mk(map(), #{required => false, desc => ?DESC("headers")})}, + {request_timeout, + sc( + emqx_schema:duration_ms(), + #{ + required => false, + desc => ?DESC("request_timeout") + } + )} ]. desc(config) -> @@ -128,24 +163,34 @@ desc(_) -> undefined. validations() -> - [ {check_ssl_opts, fun check_ssl_opts/1} ]. + [{check_ssl_opts, fun check_ssl_opts/1}]. sc(Type, Meta) -> hoconsc:mk(Type, Meta). ref(Field) -> hoconsc:ref(?MODULE, Field). %% =================================================================== -on_start(InstId, #{base_url := #{scheme := Scheme, - host := Host, - port := Port, - path := BasePath}, - connect_timeout := ConnectTimeout, - max_retries := MaxRetries, - retry_interval := RetryInterval, - pool_type := PoolType, - pool_size := PoolSize} = Config) -> - ?SLOG(info, #{msg => "starting_http_connector", - connector => InstId, config => Config}), +on_start( + InstId, + #{ + base_url := #{ + scheme := Scheme, + host := Host, + port := Port, + path := BasePath + }, + connect_timeout := ConnectTimeout, + max_retries := MaxRetries, + retry_interval := RetryInterval, + pool_type := PoolType, + pool_size := PoolSize + } = Config +) -> + ?SLOG(info, #{ + msg => "starting_http_connector", + connector => InstId, + config => Config + }), {Transport, TransportOpts} = case Scheme of http -> @@ -155,16 +200,18 @@ on_start(InstId, #{base_url := #{scheme := Scheme, {tls, SSLOpts} end, NTransportOpts = emqx_misc:ipv6_probe(TransportOpts), - PoolOpts = [ {host, Host} - , {port, Port} - , {connect_timeout, ConnectTimeout} - , {retry, MaxRetries} - , {retry_timeout, RetryInterval} - , {keepalive, 30000} - , {pool_type, PoolType} - , {pool_size, PoolSize} - , {transport, Transport} - , {transport_opts, NTransportOpts}], + PoolOpts = [ + {host, Host}, + {port, Port}, + {connect_timeout, ConnectTimeout}, + {retry, MaxRetries}, + {retry_timeout, RetryInterval}, + {keepalive, 30000}, + {pool_type, PoolType}, + {pool_size, PoolSize}, + {transport, Transport}, + {transport_opts, NTransportOpts} + ], PoolName = emqx_plugin_libs_pool:pool_name(InstId), State = #{ pool_name => PoolName, @@ -177,54 +224,84 @@ on_start(InstId, #{base_url := #{scheme := Scheme, case ehttpc_sup:start_pool(PoolName, PoolOpts) of {ok, _} -> {ok, State}; {error, {already_started, _}} -> {ok, State}; - {error, Reason} -> - {error, Reason} + {error, Reason} -> {error, Reason} end. on_stop(InstId, #{pool_name := PoolName}) -> - ?SLOG(info, #{msg => "stopping_http_connector", - connector => InstId}), + ?SLOG(info, #{ + msg => "stopping_http_connector", + connector => InstId + }), ehttpc_sup:stop_pool(PoolName). on_query(InstId, {send_message, Msg}, AfterQuery, State) -> case maps:get(request, State, undefined) of - undefined -> ?SLOG(error, #{msg => "request_not_found", connector => InstId}); + undefined -> + ?SLOG(error, #{msg => "request_not_found", connector => InstId}); Request -> - #{method := Method, path := Path, body := Body, headers := Headers, - request_timeout := Timeout} = process_request(Request, Msg), + #{ + method := Method, + path := Path, + body := Body, + headers := Headers, + request_timeout := Timeout + } = process_request(Request, Msg), on_query(InstId, {Method, {Path, Headers, Body}, Timeout}, AfterQuery, State) end; on_query(InstId, {Method, Request}, AfterQuery, State) -> on_query(InstId, {undefined, Method, Request, 5000}, AfterQuery, State); on_query(InstId, {Method, Request, Timeout}, AfterQuery, State) -> on_query(InstId, {undefined, Method, Request, Timeout}, AfterQuery, State); -on_query(InstId, {KeyOrNum, Method, Request, Timeout}, AfterQuery, - #{pool_name := PoolName, base_path := BasePath} = State) -> - ?TRACE("QUERY", "http_connector_received", - #{request => Request, connector => InstId, state => State}), +on_query( + InstId, + {KeyOrNum, Method, Request, Timeout}, + AfterQuery, + #{pool_name := PoolName, base_path := BasePath} = State +) -> + ?TRACE( + "QUERY", + "http_connector_received", + #{request => Request, connector => InstId, state => State} + ), NRequest = formalize_request(Method, BasePath, Request), - case Result = ehttpc:request(case KeyOrNum of - undefined -> PoolName; - _ -> {PoolName, KeyOrNum} - end, Method, NRequest, Timeout) of + case + Result = ehttpc:request( + case KeyOrNum of + undefined -> PoolName; + _ -> {PoolName, KeyOrNum} + end, + Method, + NRequest, + Timeout + ) + of {error, Reason} -> - ?SLOG(error, #{msg => "http_connector_do_reqeust_failed", - request => NRequest, reason => Reason, - connector => InstId}), + ?SLOG(error, #{ + msg => "http_connector_do_reqeust_failed", + request => NRequest, + reason => Reason, + connector => InstId + }), emqx_resource:query_failed(AfterQuery); {ok, StatusCode, _} when StatusCode >= 200 andalso StatusCode < 300 -> emqx_resource:query_success(AfterQuery); {ok, StatusCode, _, _} when StatusCode >= 200 andalso StatusCode < 300 -> emqx_resource:query_success(AfterQuery); {ok, StatusCode, _} -> - ?SLOG(error, #{msg => "http connector do request, received error response", - request => NRequest, connector => InstId, - status_code => StatusCode}), + ?SLOG(error, #{ + msg => "http connector do request, received error response", + request => NRequest, + connector => InstId, + status_code => StatusCode + }), emqx_resource:query_failed(AfterQuery); {ok, StatusCode, _, _} -> - ?SLOG(error, #{msg => "http connector do request, received error response", - request => NRequest, connector => InstId, - status_code => StatusCode}), + ?SLOG(error, #{ + msg => "http connector do request, received error response", + request => NRequest, + connector => InstId, + status_code => StatusCode + }), emqx_resource:query_failed(AfterQuery) end, Result. @@ -232,14 +309,16 @@ on_query(InstId, {KeyOrNum, Method, Request, Timeout}, AfterQuery, on_health_check(_InstId, #{host := Host, port := Port, connect_timeout := Timeout} = State) -> case do_health_check(Host, Port, Timeout) of ok -> {ok, State}; - {error, Reason} -> - {error, {http_health_check_failed, Reason}, State} + {error, Reason} -> {error, {http_health_check_failed, Reason}, State} end. do_health_check(Host, Port, Timeout) -> case gen_tcp:connect(Host, Port, emqx_misc:ipv6_probe([]), Timeout) of - {ok, Sock} -> gen_tcp:close(Sock), ok; - {error, Reason} -> {error, Reason} + {ok, Sock} -> + gen_tcp:close(Sock), + ok; + {error, Reason} -> + {error, Reason} end. %%-------------------------------------------------------------------- @@ -250,47 +329,64 @@ preprocess_request(undefined) -> undefined; preprocess_request(Req) when map_size(Req) == 0 -> undefined; -preprocess_request(#{ - method := Method, - path := Path, - body := Body, - headers := Headers - } = Req) -> - #{ method => emqx_plugin_libs_rule:preproc_tmpl(bin(Method)) - , path => emqx_plugin_libs_rule:preproc_tmpl(Path) - , body => emqx_plugin_libs_rule:preproc_tmpl(Body) - , headers => preproc_headers(Headers) - , request_timeout => maps:get(request_timeout, Req, 30000) - }. +preprocess_request( + #{ + method := Method, + path := Path, + body := Body, + headers := Headers + } = Req +) -> + #{ + method => emqx_plugin_libs_rule:preproc_tmpl(bin(Method)), + path => emqx_plugin_libs_rule:preproc_tmpl(Path), + body => emqx_plugin_libs_rule:preproc_tmpl(Body), + headers => preproc_headers(Headers), + request_timeout => maps:get(request_timeout, Req, 30000) + }. preproc_headers(Headers) when is_map(Headers) -> - maps:fold(fun(K, V, Acc) -> - [{ + maps:fold( + fun(K, V, Acc) -> + [ + { + emqx_plugin_libs_rule:preproc_tmpl(bin(K)), + emqx_plugin_libs_rule:preproc_tmpl(bin(V)) + } + | Acc + ] + end, + [], + Headers + ); +preproc_headers(Headers) when is_list(Headers) -> + lists:map( + fun({K, V}) -> + { emqx_plugin_libs_rule:preproc_tmpl(bin(K)), emqx_plugin_libs_rule:preproc_tmpl(bin(V)) - } | Acc] - end, [], Headers); -preproc_headers(Headers) when is_list(Headers) -> - lists:map(fun({K, V}) -> - { - emqx_plugin_libs_rule:preproc_tmpl(bin(K)), - emqx_plugin_libs_rule:preproc_tmpl(bin(V)) - } - end, Headers). + } + end, + Headers + ). -process_request(#{ - method := MethodTks, - path := PathTks, - body := BodyTks, - headers := HeadersTks, - request_timeout := ReqTimeout - } = Conf, Msg) -> - Conf#{ method => make_method(emqx_plugin_libs_rule:proc_tmpl(MethodTks, Msg)) - , path => emqx_plugin_libs_rule:proc_tmpl(PathTks, Msg) - , body => process_request_body(BodyTks, Msg) - , headers => proc_headers(HeadersTks, Msg) - , request_timeout => ReqTimeout - }. +process_request( + #{ + method := MethodTks, + path := PathTks, + body := BodyTks, + headers := HeadersTks, + request_timeout := ReqTimeout + } = Conf, + Msg +) -> + Conf#{ + method => make_method(emqx_plugin_libs_rule:proc_tmpl(MethodTks, Msg)), + path => emqx_plugin_libs_rule:proc_tmpl(PathTks, Msg), + body => process_request_body(BodyTks, Msg), + headers => proc_headers(HeadersTks, Msg), + request_timeout => ReqTimeout + }. process_request_body([], Msg) -> emqx_json:encode(Msg); @@ -298,12 +394,15 @@ process_request_body(BodyTks, Msg) -> emqx_plugin_libs_rule:proc_tmpl(BodyTks, Msg). proc_headers(HeaderTks, Msg) -> - lists:map(fun({K, V}) -> + lists:map( + fun({K, V}) -> { emqx_plugin_libs_rule:proc_tmpl(K, Msg), emqx_plugin_libs_rule:proc_tmpl(V, Msg) } - end, HeaderTks). + end, + HeaderTks + ). make_method(M) when M == <<"POST">>; M == <<"post">> -> post; make_method(M) when M == <<"PUT">>; M == <<"put">> -> put; @@ -315,19 +414,19 @@ check_ssl_opts(Conf) -> check_ssl_opts(URLFrom, Conf) -> #{scheme := Scheme} = hocon_maps:get(URLFrom, Conf), - SSL= hocon_maps:get("ssl", Conf), + SSL = hocon_maps:get("ssl", Conf), case {Scheme, maps:get(enable, SSL, false)} of {http, false} -> true; {https, true} -> true; {_, _} -> false end. -formalize_request(Method, BasePath, {Path, Headers, _Body}) - when Method =:= get; Method =:= delete -> +formalize_request(Method, BasePath, {Path, Headers, _Body}) when + Method =:= get; Method =:= delete +-> formalize_request(Method, BasePath, {Path, Headers}); formalize_request(_Method, BasePath, {Path, Headers, Body}) -> {filename:join(BasePath, Path), Headers, Body}; - formalize_request(_Method, BasePath, {Path, Headers}) -> {filename:join(BasePath, Path), Headers}. diff --git a/apps/emqx_connector/src/emqx_connector_ldap.erl b/apps/emqx_connector/src/emqx_connector_ldap.erl index 918338ea1..25798fae5 100644 --- a/apps/emqx_connector/src/emqx_connector_ldap.erl +++ b/apps/emqx_connector/src/emqx_connector_ldap.erl @@ -24,11 +24,12 @@ -behaviour(emqx_resource). %% callbacks of behaviour emqx_resource --export([ on_start/2 - , on_stop/2 - , on_query/4 - , on_health_check/2 - ]). +-export([ + on_start/2, + on_stop/2, + on_query/4, + on_health_check/2 +]). -export([do_health_check/1]). @@ -43,54 +44,84 @@ roots() -> fields(_) -> []. %% =================================================================== -on_start(InstId, #{servers := Servers0, - port := Port, - bind_dn := BindDn, - bind_password := BindPassword, - timeout := Timeout, - pool_size := PoolSize, - auto_reconnect := AutoReconn, - ssl := SSL} = Config) -> - ?SLOG(info, #{msg => "starting_ldap_connector", - connector => InstId, config => Config}), - Servers = [begin proplists:get_value(host, S) end || S <- Servers0], - SslOpts = case maps:get(enable, SSL) of - true -> - [{ssl, true}, - {sslopts, emqx_tls_lib:to_client_opts(SSL)} - ]; - false -> [{ssl, false}] - end, - Opts = [{servers, Servers}, - {port, Port}, - {bind_dn, BindDn}, - {bind_password, BindPassword}, - {timeout, Timeout}, - {pool_size, PoolSize}, - {auto_reconnect, reconn_interval(AutoReconn)}, - {servers, Servers}], +on_start( + InstId, + #{ + servers := Servers0, + port := Port, + bind_dn := BindDn, + bind_password := BindPassword, + timeout := Timeout, + pool_size := PoolSize, + auto_reconnect := AutoReconn, + ssl := SSL + } = Config +) -> + ?SLOG(info, #{ + msg => "starting_ldap_connector", + connector => InstId, + config => Config + }), + Servers = [ + begin + proplists:get_value(host, S) + end + || S <- Servers0 + ], + SslOpts = + case maps:get(enable, SSL) of + true -> + [ + {ssl, true}, + {sslopts, emqx_tls_lib:to_client_opts(SSL)} + ]; + false -> + [{ssl, false}] + end, + Opts = [ + {servers, Servers}, + {port, Port}, + {bind_dn, BindDn}, + {bind_password, BindPassword}, + {timeout, Timeout}, + {pool_size, PoolSize}, + {auto_reconnect, reconn_interval(AutoReconn)}, + {servers, Servers} + ], PoolName = emqx_plugin_libs_pool:pool_name(InstId), case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Opts ++ SslOpts) of - ok -> {ok, #{poolname => PoolName}}; + ok -> {ok, #{poolname => PoolName}}; {error, Reason} -> {error, Reason} end. on_stop(InstId, #{poolname := PoolName}) -> - ?SLOG(info, #{msg => "stopping_ldap_connector", - connector => InstId}), + ?SLOG(info, #{ + msg => "stopping_ldap_connector", + connector => InstId + }), emqx_plugin_libs_pool:stop_pool(PoolName). on_query(InstId, {search, Base, Filter, Attributes}, AfterQuery, #{poolname := PoolName} = State) -> Request = {Base, Filter, Attributes}, - ?TRACE("QUERY", "ldap_connector_received", - #{request => Request, connector => InstId, state => State}), - case Result = ecpool:pick_and_do( - PoolName, - {?MODULE, search, [Base, Filter, Attributes]}, - no_handover) of + ?TRACE( + "QUERY", + "ldap_connector_received", + #{request => Request, connector => InstId, state => State} + ), + case + Result = ecpool:pick_and_do( + PoolName, + {?MODULE, search, [Base, Filter, Attributes]}, + no_handover + ) + of {error, Reason} -> - ?SLOG(error, #{msg => "ldap_connector_do_request_failed", - request => Request, connector => InstId, reason => Reason}), + ?SLOG(error, #{ + msg => "ldap_connector_do_request_failed", + request => Request, + connector => InstId, + reason => Reason + }), emqx_resource:query_failed(AfterQuery); _ -> emqx_resource:query_success(AfterQuery) @@ -107,38 +138,45 @@ reconn_interval(true) -> 15; reconn_interval(false) -> false. search(Conn, Base, Filter, Attributes) -> - eldap2:search(Conn, [{base, Base}, - {filter, Filter}, - {attributes, Attributes}, - {deref, eldap2:'derefFindingBaseObj'()}]). + eldap2:search(Conn, [ + {base, Base}, + {filter, Filter}, + {attributes, Attributes}, + {deref, eldap2:'derefFindingBaseObj'()} + ]). %% =================================================================== connect(Opts) -> - Servers = proplists:get_value(servers, Opts, ["localhost"]), - Port = proplists:get_value(port, Opts, 389), - Timeout = proplists:get_value(timeout, Opts, 30), - BindDn = proplists:get_value(bind_dn, Opts), + Servers = proplists:get_value(servers, Opts, ["localhost"]), + Port = proplists:get_value(port, Opts, 389), + Timeout = proplists:get_value(timeout, Opts, 30), + BindDn = proplists:get_value(bind_dn, Opts), BindPassword = proplists:get_value(bind_password, Opts), - SslOpts = case proplists:get_value(ssl, Opts, false) of - true -> - [{sslopts, proplists:get_value(sslopts, Opts, [])}, {ssl, true}]; - false -> - [{ssl, false}] - end, - LdapOpts = [{port, Port}, - {timeout, Timeout}] ++ SslOpts, + SslOpts = + case proplists:get_value(ssl, Opts, false) of + true -> + [{sslopts, proplists:get_value(sslopts, Opts, [])}, {ssl, true}]; + false -> + [{ssl, false}] + end, + LdapOpts = + [ + {port, Port}, + {timeout, Timeout} + ] ++ SslOpts, {ok, LDAP} = eldap2:open(Servers, LdapOpts), ok = eldap2:simple_bind(LDAP, BindDn, BindPassword), {ok, LDAP}. ldap_fields() -> - [ {servers, fun servers/1} - , {port, fun port/1} - , {pool_size, fun emqx_connector_schema_lib:pool_size/1} - , {bind_dn, fun bind_dn/1} - , {bind_password, fun emqx_connector_schema_lib:password/1} - , {timeout, fun duration/1} - , {auto_reconnect, fun emqx_connector_schema_lib:auto_reconnect/1} + [ + {servers, fun servers/1}, + {port, fun port/1}, + {pool_size, fun emqx_connector_schema_lib:pool_size/1}, + {bind_dn, fun bind_dn/1}, + {bind_password, fun emqx_connector_schema_lib:password/1}, + {timeout, fun duration/1}, + {auto_reconnect, fun emqx_connector_schema_lib:auto_reconnect/1} ]. servers(type) -> list(); @@ -159,14 +197,18 @@ duration(type) -> emqx_schema:duration_ms(); duration(_) -> undefined. to_servers_raw(Servers) -> - {ok, lists:map( fun(Server) -> - case string:tokens(Server, ": ") of - [Ip] -> - [{host, Ip}]; - [Ip, Port] -> - [{host, Ip}, {port, list_to_integer(Port)}] - end - end, string:tokens(str(Servers), ", "))}. + {ok, + lists:map( + fun(Server) -> + case string:tokens(Server, ": ") of + [Ip] -> + [{host, Ip}]; + [Ip, Port] -> + [{host, Ip}, {port, list_to_integer(Port)}] + end + end, + string:tokens(str(Servers), ", ") + )}. str(A) when is_atom(A) -> atom_to_list(A); diff --git a/apps/emqx_connector/src/emqx_connector_mongo.erl b/apps/emqx_connector/src/emqx_connector_mongo.erl index d81aa04a4..170fa096b 100644 --- a/apps/emqx_connector/src/emqx_connector_mongo.erl +++ b/apps/emqx_connector/src/emqx_connector_mongo.erl @@ -24,11 +24,12 @@ -behaviour(emqx_resource). %% callbacks of behaviour emqx_resource --export([ on_start/2 - , on_stop/2 - , on_query/4 - , on_health_check/2 - ]). +-export([ + on_start/2, + on_stop/2, + on_query/4, + on_health_check/2 +]). %% ecpool callback -export([connect/1]). @@ -40,57 +41,73 @@ -define(HEALTH_CHECK_TIMEOUT, 10000). %% mongo servers don't need parse --define( MONGO_HOST_OPTIONS - , #{ host_type => hostname - , default_port => ?MONGO_DEFAULT_PORT}). +-define(MONGO_HOST_OPTIONS, #{ + host_type => hostname, + default_port => ?MONGO_DEFAULT_PORT +}). %%===================================================================== roots() -> - [ {config, #{type => hoconsc:union( - [ hoconsc:ref(?MODULE, single) - , hoconsc:ref(?MODULE, rs) - , hoconsc:ref(?MODULE, sharded) - ])}} + [ + {config, #{ + type => hoconsc:union( + [ + hoconsc:ref(?MODULE, single), + hoconsc:ref(?MODULE, rs), + hoconsc:ref(?MODULE, sharded) + ] + ) + }} ]. fields(single) -> - [ {mongo_type, #{type => single, - default => single, - required => true, - desc => ?DESC("single_mongo_type")}} - , {server, fun server/1} - , {w_mode, fun w_mode/1} + [ + {mongo_type, #{ + type => single, + default => single, + required => true, + desc => ?DESC("single_mongo_type") + }}, + {server, fun server/1}, + {w_mode, fun w_mode/1} ] ++ mongo_fields(); fields(rs) -> - [ {mongo_type, #{type => rs, - default => rs, - required => true, - desc => ?DESC("rs_mongo_type")}} - , {servers, fun servers/1} - , {w_mode, fun w_mode/1} - , {r_mode, fun r_mode/1} - , {replica_set_name, fun replica_set_name/1} + [ + {mongo_type, #{ + type => rs, + default => rs, + required => true, + desc => ?DESC("rs_mongo_type") + }}, + {servers, fun servers/1}, + {w_mode, fun w_mode/1}, + {r_mode, fun r_mode/1}, + {replica_set_name, fun replica_set_name/1} ] ++ mongo_fields(); fields(sharded) -> - [ {mongo_type, #{type => sharded, - default => sharded, - required => true, - desc => ?DESC("sharded_mongo_type")}} - , {servers, fun servers/1} - , {w_mode, fun w_mode/1} + [ + {mongo_type, #{ + type => sharded, + default => sharded, + required => true, + desc => ?DESC("sharded_mongo_type") + }}, + {servers, fun servers/1}, + {w_mode, fun w_mode/1} ] ++ mongo_fields(); fields(topology) -> - [ {pool_size, fun emqx_connector_schema_lib:pool_size/1} - , {max_overflow, fun max_overflow/1} - , {overflow_ttl, fun duration/1} - , {overflow_check_period, fun duration/1} - , {local_threshold_ms, fun duration/1} - , {connect_timeout_ms, fun duration/1} - , {socket_timeout_ms, fun duration/1} - , {server_selection_timeout_ms, fun duration/1} - , {wait_queue_timeout_ms, fun duration/1} - , {heartbeat_frequency_ms, fun duration/1} - , {min_heartbeat_frequency_ms, fun duration/1} + [ + {pool_size, fun emqx_connector_schema_lib:pool_size/1}, + {max_overflow, fun max_overflow/1}, + {overflow_ttl, fun duration/1}, + {overflow_check_period, fun duration/1}, + {local_threshold_ms, fun duration/1}, + {connect_timeout_ms, fun duration/1}, + {socket_timeout_ms, fun duration/1}, + {server_selection_timeout_ms, fun duration/1}, + {wait_queue_timeout_ms, fun duration/1}, + {heartbeat_frequency_ms, fun duration/1}, + {min_heartbeat_frequency_ms, fun duration/1} ]. desc(single) -> @@ -105,69 +122,96 @@ desc(_) -> undefined. mongo_fields() -> - [ {srv_record, fun srv_record/1} - , {pool_size, fun emqx_connector_schema_lib:pool_size/1} - , {username, fun emqx_connector_schema_lib:username/1} - , {password, fun emqx_connector_schema_lib:password/1} - , {auth_source, #{ type => binary() - , required => false - , desc => ?DESC("auth_source") - }} - , {database, fun emqx_connector_schema_lib:database/1} - , {topology, #{type => hoconsc:ref(?MODULE, topology), required => false}} + [ + {srv_record, fun srv_record/1}, + {pool_size, fun emqx_connector_schema_lib:pool_size/1}, + {username, fun emqx_connector_schema_lib:username/1}, + {password, fun emqx_connector_schema_lib:password/1}, + {auth_source, #{ + type => binary(), + required => false, + desc => ?DESC("auth_source") + }}, + {database, fun emqx_connector_schema_lib:database/1}, + {topology, #{type => hoconsc:ref(?MODULE, topology), required => false}} ] ++ - emqx_connector_schema_lib:ssl_fields(). + emqx_connector_schema_lib:ssl_fields(). %% =================================================================== -on_start(InstId, Config = #{mongo_type := Type, - pool_size := PoolSize, - ssl := SSL}) -> - Msg = case Type of - single -> "starting_mongodb_single_connector"; - rs -> "starting_mongodb_replica_set_connector"; - sharded -> "starting_mongodb_sharded_connector" - end, +on_start( + InstId, + Config = #{ + mongo_type := Type, + pool_size := PoolSize, + ssl := SSL + } +) -> + Msg = + case Type of + single -> "starting_mongodb_single_connector"; + rs -> "starting_mongodb_replica_set_connector"; + sharded -> "starting_mongodb_sharded_connector" + end, ?SLOG(info, #{msg => Msg, connector => InstId, config => Config}), NConfig = #{hosts := Hosts} = may_parse_srv_and_txt_records(Config), - SslOpts = case maps:get(enable, SSL) of - true -> - [{ssl, true}, - {ssl_opts, emqx_tls_lib:to_client_opts(SSL)} - ]; - false -> [{ssl, false}] - end, + SslOpts = + case maps:get(enable, SSL) of + true -> + [ + {ssl, true}, + {ssl_opts, emqx_tls_lib:to_client_opts(SSL)} + ]; + false -> + [{ssl, false}] + end, Topology = maps:get(topology, NConfig, #{}), - Opts = [{mongo_type, init_type(NConfig)}, - {hosts, Hosts}, - {pool_size, PoolSize}, - {options, init_topology_options(maps:to_list(Topology), [])}, - {worker_options, init_worker_options(maps:to_list(NConfig), SslOpts)}], + Opts = [ + {mongo_type, init_type(NConfig)}, + {hosts, Hosts}, + {pool_size, PoolSize}, + {options, init_topology_options(maps:to_list(Topology), [])}, + {worker_options, init_worker_options(maps:to_list(NConfig), SslOpts)} + ], PoolName = emqx_plugin_libs_pool:pool_name(InstId), case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Opts) of - ok -> {ok, #{poolname => PoolName, type => Type}}; + ok -> {ok, #{poolname => PoolName, type => Type}}; {error, Reason} -> {error, Reason} end. on_stop(InstId, #{poolname := PoolName}) -> - ?SLOG(info, #{msg => "stopping_mongodb_connector", - connector => InstId}), + ?SLOG(info, #{ + msg => "stopping_mongodb_connector", + connector => InstId + }), emqx_plugin_libs_pool:stop_pool(PoolName). -on_query(InstId, - {Action, Collection, Selector, Projector}, - AfterQuery, - #{poolname := PoolName} = State) -> +on_query( + InstId, + {Action, Collection, Selector, Projector}, + AfterQuery, + #{poolname := PoolName} = State +) -> Request = {Action, Collection, Selector, Projector}, - ?TRACE("QUERY", "mongodb_connector_received", - #{request => Request, connector => InstId, state => State}), - case ecpool:pick_and_do(PoolName, - {?MODULE, mongo_query, [Action, Collection, Selector, Projector]}, - no_handover) of + ?TRACE( + "QUERY", + "mongodb_connector_received", + #{request => Request, connector => InstId, state => State} + ), + case + ecpool:pick_and_do( + PoolName, + {?MODULE, mongo_query, [Action, Collection, Selector, Projector]}, + no_handover + ) + of {error, Reason} -> - ?SLOG(error, #{msg => "mongodb_connector_do_query_failed", - request => Request, reason => Reason, - connector => InstId}), + ?SLOG(error, #{ + msg => "mongodb_connector_do_query_failed", + request => Request, + reason => Reason, + connector => InstId + }), emqx_resource:query_failed(AfterQuery), {error, Reason}; {ok, Cursor} when is_pid(Cursor) -> @@ -182,12 +226,16 @@ on_query(InstId, on_health_check(InstId, #{poolname := PoolName} = State) -> case health_check(PoolName) of true -> - ?tp(debug, emqx_connector_mongo_health_check, #{instance_id => InstId, - status => ok}), + ?tp(debug, emqx_connector_mongo_health_check, #{ + instance_id => InstId, + status => ok + }), {ok, State}; false -> - ?tp(warning, emqx_connector_mongo_health_check, #{instance_id => InstId, - status => failed}), + ?tp(warning, emqx_connector_mongo_health_check, #{ + instance_id => InstId, + status => failed + }), {error, health_check_failed, State} end. @@ -204,36 +252,43 @@ check_worker_health(Worker) -> %% we don't care if this returns something or not, we just to test the connection try do_test_query(Conn) of {error, Reason} -> - ?SLOG(warning, #{msg => "mongo_connection_health_check_error", - worker => Worker, - reason => Reason}), + ?SLOG(warning, #{ + msg => "mongo_connection_health_check_error", + worker => Worker, + reason => Reason + }), false; _ -> true catch Class:Error -> - ?SLOG(warning, #{msg => "mongo_connection_health_check_exception", - worker => Worker, - class => Class, - error => Error}), + ?SLOG(warning, #{ + msg => "mongo_connection_health_check_exception", + worker => Worker, + class => Class, + error => Error + }), false end; _ -> - ?SLOG(warning, #{msg => "mongo_connection_health_check_error", - worker => Worker, - reason => worker_not_found}), + ?SLOG(warning, #{ + msg => "mongo_connection_health_check_error", + worker => Worker, + reason => worker_not_found + }), false end. do_test_query(Conn) -> mongoc:transaction_query( - Conn, - fun(Conf = #{pool := Worker}) -> - Query = mongoc:find_one_query(Conf, <<"foo">>, #{}, #{}, 0), - mc_worker_api:find_one(Worker, Query) - end, - #{}, - ?HEALTH_CHECK_TIMEOUT). + Conn, + fun(Conf = #{pool := Worker}) -> + Query = mongoc:find_one_query(Conf, <<"foo">>, #{}, #{}, 0), + mc_worker_api:find_one(Worker, Query) + end, + #{}, + ?HEALTH_CHECK_TIMEOUT + ). connect(Opts) -> Type = proplists:get_value(mongo_type, Opts, single), @@ -244,10 +299,8 @@ connect(Opts) -> mongo_query(Conn, find, Collection, Selector, Projector) -> mongo_api:find(Conn, Collection, Selector, Projector); - mongo_query(Conn, find_one, Collection, Selector, Projector) -> mongo_api:find_one(Conn, Collection, Selector, Projector); - %% Todo xxx mongo_query(_Conn, _Action, _Collection, _Selector, _Projector) -> ok. @@ -298,7 +351,8 @@ init_worker_options([{r_mode, V} | R], Acc) -> init_worker_options(R, [{r_mode, V} | Acc]); init_worker_options([_ | R], Acc) -> init_worker_options(R, Acc); -init_worker_options([], Acc) -> Acc. +init_worker_options([], Acc) -> + Acc. %% =================================================================== %% Schema funcs @@ -356,59 +410,76 @@ may_parse_srv_and_txt_records(#{server := Server} = Config) -> may_parse_srv_and_txt_records(Config) -> may_parse_srv_and_txt_records_(Config). -may_parse_srv_and_txt_records_(#{mongo_type := Type, - srv_record := false, - servers := Servers} = Config) -> +may_parse_srv_and_txt_records_( + #{ + mongo_type := Type, + srv_record := false, + servers := Servers + } = Config +) -> case Type =:= rs andalso maps:is_key(replica_set_name, Config) =:= false of true -> error({missing_parameter, replica_set_name}); false -> Config#{hosts => servers_to_bin(Servers)} end; -may_parse_srv_and_txt_records_(#{mongo_type := Type, - srv_record := true, - servers := Servers} = Config) -> +may_parse_srv_and_txt_records_( + #{ + mongo_type := Type, + srv_record := true, + servers := Servers + } = Config +) -> Hosts = parse_srv_records(Type, Servers), ExtraOpts = parse_txt_records(Type, Servers), maps:merge(Config#{hosts => Hosts}, ExtraOpts). parse_srv_records(Type, Servers) -> Fun = fun(AccIn, {IpOrHost, _Port}) -> - case inet_res:lookup("_mongodb._tcp." - ++ ip_or_host_to_string(IpOrHost), in, srv) of - [] -> - error(service_not_found); - Services -> - [ [server_to_bin({Host, Port}) || {_, _, Port, Host} <- Services] - | AccIn] - end - end, + case + inet_res:lookup( + "_mongodb._tcp." ++ + ip_or_host_to_string(IpOrHost), + in, + srv + ) + of + [] -> + error(service_not_found); + Services -> + [ + [server_to_bin({Host, Port}) || {_, _, Port, Host} <- Services] + | AccIn + ] + end + end, Res = lists:foldl(Fun, [], Servers), case Type of single -> lists:nth(1, Res); - _ -> Res + _ -> Res end. parse_txt_records(Type, Servers) -> - Fields = case Type of - rs -> ["authSource", "replicaSet"]; - _ -> ["authSource"] - end, + Fields = + case Type of + rs -> ["authSource", "replicaSet"]; + _ -> ["authSource"] + end, Fun = fun(AccIn, {IpOrHost, _Port}) -> - case inet_res:lookup(IpOrHost, in, txt) of - [] -> - #{}; - [[QueryString]] -> - case uri_string:dissect_query(QueryString) of - {error, _, _} -> - error({invalid_txt_record, invalid_query_string}); - Options -> - maps:merge(AccIn, take_and_convert(Fields, Options)) - end; - _ -> - error({invalid_txt_record, multiple_records}) - end - end, + case inet_res:lookup(IpOrHost, in, txt) of + [] -> + #{}; + [[QueryString]] -> + case uri_string:dissect_query(QueryString) of + {error, _, _} -> + error({invalid_txt_record, invalid_query_string}); + Options -> + maps:merge(AccIn, take_and_convert(Fields, Options)) + end; + _ -> + error({invalid_txt_record, multiple_records}) + end + end, lists:foldl(Fun, #{}, Servers). take_and_convert(Fields, Options) -> @@ -430,8 +501,8 @@ take_and_convert([Field | More], Options, Acc) -> take_and_convert(More, Options, Acc) end. --spec ip_or_host_to_string(binary() | string() | tuple()) - -> string(). +-spec ip_or_host_to_string(binary() | string() | tuple()) -> + string(). ip_or_host_to_string(Ip) when is_tuple(Ip) -> inet:ntoa(Ip); ip_or_host_to_string(Host) -> @@ -448,18 +519,20 @@ server_to_bin({IpOrHost, Port}) -> %% =================================================================== %% typereflt funcs --spec to_server_raw(string()) - -> {string(), pos_integer()}. +-spec to_server_raw(string()) -> + {string(), pos_integer()}. to_server_raw(Server) -> emqx_connector_schema_lib:parse_server(Server, ?MONGO_HOST_OPTIONS). --spec to_servers_raw(string()) - -> [{string(), pos_integer()}]. +-spec to_servers_raw(string()) -> + [{string(), pos_integer()}]. to_servers_raw(Servers) -> - lists:map( fun(Server) -> - emqx_connector_schema_lib:parse_server(Server, ?MONGO_HOST_OPTIONS) - end - , string:tokens(str(Servers), ", ")). + lists:map( + fun(Server) -> + emqx_connector_schema_lib:parse_server(Server, ?MONGO_HOST_OPTIONS) + end, + string:tokens(str(Servers), ", ") + ). str(A) when is_atom(A) -> atom_to_list(A); diff --git a/apps/emqx_connector/src/emqx_connector_mqtt.erl b/apps/emqx_connector/src/emqx_connector_mqtt.erl index d59cdf239..5f228027f 100644 --- a/apps/emqx_connector/src/emqx_connector_mqtt.erl +++ b/apps/emqx_connector/src/emqx_connector_mqtt.erl @@ -23,28 +23,32 @@ -behaviour(emqx_resource). %% API and callbacks for supervisor --export([ start_link/0 - , init/1 - , create_bridge/1 - , drop_bridge/1 - , bridges/0 - ]). +-export([ + start_link/0, + init/1, + create_bridge/1, + drop_bridge/1, + bridges/0 +]). -export([on_message_received/3]). %% callbacks of behaviour emqx_resource --export([ on_start/2 - , on_stop/2 - , on_query/4 - , on_health_check/2 - ]). +-export([ + on_start/2, + on_stop/2, + on_query/4, + on_health_check/2 +]). -behaviour(hocon_schema). -import(hoconsc, [mk/2]). --export([ roots/0 - , fields/1]). +-export([ + roots/0, + fields/1 +]). %%===================================================================== %% Hocon schema @@ -53,25 +57,34 @@ roots() -> fields("config") -> emqx_connector_mqtt_schema:fields("config"); - fields("get") -> - [ {num_of_bridges, mk(integer(), - #{ desc => ?DESC("num_of_bridges") - })} + [ + {num_of_bridges, + mk( + integer(), + #{desc => ?DESC("num_of_bridges")} + )} ] ++ fields("post"); - fields("put") -> emqx_connector_mqtt_schema:fields("connector"); - fields("post") -> - [ {type, mk(mqtt, - #{ required => true - , desc => ?DESC("type") - })} - , {name, mk(binary(), - #{ required => true - , desc => ?DESC("name") - })} + [ + {type, + mk( + mqtt, + #{ + required => true, + desc => ?DESC("type") + } + )}, + {name, + mk( + binary(), + #{ + required => true, + desc => ?DESC("name") + } + )} ] ++ fields("put"). %% =================================================================== @@ -80,23 +93,29 @@ start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []). init([]) -> - SupFlag = #{strategy => one_for_one, - intensity => 100, - period => 10}, + SupFlag = #{ + strategy => one_for_one, + intensity => 100, + period => 10 + }, {ok, {SupFlag, []}}. bridge_spec(Config) -> - #{id => maps:get(name, Config), - start => {emqx_connector_mqtt_worker, start_link, [Config]}, - restart => permanent, - shutdown => 5000, - type => worker, - modules => [emqx_connector_mqtt_worker]}. + #{ + id => maps:get(name, Config), + start => {emqx_connector_mqtt_worker, start_link, [Config]}, + restart => permanent, + shutdown => 5000, + type => worker, + modules => [emqx_connector_mqtt_worker] + }. --spec(bridges() -> [{node(), map()}]). +-spec bridges() -> [{node(), map()}]. bridges() -> - [{Name, emqx_connector_mqtt_worker:status(Name)} - || {Name, _Pid, _, _} <- supervisor:which_children(?MODULE)]. + [ + {Name, emqx_connector_mqtt_worker:status(Name)} + || {Name, _Pid, _, _} <- supervisor:which_children(?MODULE) + ]. create_bridge(Config) -> supervisor:start_child(?MODULE, bridge_spec(Config)). @@ -121,8 +140,11 @@ on_message_received(Msg, HookPoint, InstId) -> %% =================================================================== on_start(InstId, Conf) -> InstanceId = binary_to_atom(InstId, utf8), - ?SLOG(info, #{msg => "starting_mqtt_connector", - connector => InstanceId, config => Conf}), + ?SLOG(info, #{ + msg => "starting_mqtt_connector", + connector => InstanceId, + config => Conf + }), BasicConf = basic_config(Conf), BridgeConf = BasicConf#{ name => InstanceId, @@ -142,19 +164,25 @@ on_start(InstId, Conf) -> end. on_stop(_InstId, #{name := InstanceId}) -> - ?SLOG(info, #{msg => "stopping_mqtt_connector", - connector => InstanceId}), + ?SLOG(info, #{ + msg => "stopping_mqtt_connector", + connector => InstanceId + }), case ?MODULE:drop_bridge(InstanceId) of - ok -> ok; - {error, not_found} -> ok; + ok -> + ok; + {error, not_found} -> + ok; {error, Reason} -> - ?SLOG(error, #{msg => "stop_mqtt_connector", - connector => InstanceId, reason => Reason}) + ?SLOG(error, #{ + msg => "stop_mqtt_connector", + connector => InstanceId, + reason => Reason + }) end. on_query(_InstId, {message_received, _Msg}, AfterQuery, _State) -> emqx_resource:query_success(AfterQuery); - on_query(_InstId, {send_message, Msg}, AfterQuery, #{name := InstanceId}) -> ?TRACE("QUERY", "send_msg_to_remote_node", #{message => Msg, connector => InstanceId}), emqx_connector_mqtt_worker:send_to_remote(InstanceId, Msg), @@ -178,7 +206,8 @@ make_sub_confs(undefined, _) -> undefined; make_sub_confs(SubRemoteConf, InstId) -> case maps:take(hookpoint, SubRemoteConf) of - error -> SubRemoteConf; + error -> + SubRemoteConf; {HookPoint, SubConf} -> MFA = {?MODULE, on_message_received, [HookPoint, InstId]}, SubConf#{on_message_received => MFA} @@ -192,22 +221,24 @@ make_forward_confs(FrowardConf) -> FrowardConf. basic_config(#{ - server := Server, - reconnect_interval := ReconnIntv, - proto_ver := ProtoVer, - username := User, - password := Password, - clean_start := CleanStart, - keepalive := KeepAlive, - retry_interval := RetryIntv, - max_inflight := MaxInflight, - replayq := ReplayQ, - ssl := #{enable := EnableSsl} = Ssl}) -> + server := Server, + reconnect_interval := ReconnIntv, + proto_ver := ProtoVer, + username := User, + password := Password, + clean_start := CleanStart, + keepalive := KeepAlive, + retry_interval := RetryIntv, + max_inflight := MaxInflight, + replayq := ReplayQ, + ssl := #{enable := EnableSsl} = Ssl +}) -> #{ replayq => ReplayQ, %% connection opts server => Server, - connect_timeout => 30, %% 30s + %% 30s + connect_timeout => 30, reconnect_interval => ReconnIntv, proto_ver => ProtoVer, bridge_mode => true, diff --git a/apps/emqx_connector/src/emqx_connector_mysql.erl b/apps/emqx_connector/src/emqx_connector_mysql.erl index 7a5ff9130..5e42a2ee2 100644 --- a/apps/emqx_connector/src/emqx_connector_mysql.erl +++ b/apps/emqx_connector/src/emqx_connector_mysql.erl @@ -23,11 +23,12 @@ -behaviour(emqx_resource). %% callbacks of behaviour emqx_resource --export([ on_start/2 - , on_stop/2 - , on_query/4 - , on_health_check/2 - ]). +-export([ + on_start/2, + on_stop/2, + on_query/4, + on_health_check/2 +]). %% ecpool connect & reconnect -export([connect/1, prepare_sql_to_conn/2]). @@ -38,9 +39,10 @@ -export([do_health_check/1]). --define( MYSQL_HOST_OPTIONS - , #{ host_type => inet_addr - , default_port => ?MYSQL_DEFAULT_PORT}). +-define(MYSQL_HOST_OPTIONS, #{ + host_type => inet_addr, + default_port => ?MYSQL_DEFAULT_PORT +}). %%===================================================================== %% Hocon schema @@ -48,11 +50,10 @@ roots() -> [{config, #{type => hoconsc:ref(?MODULE, config)}}]. fields(config) -> - [ {server, fun server/1} - ] ++ - emqx_connector_schema_lib:relational_db_fields() ++ - emqx_connector_schema_lib:ssl_fields() ++ - emqx_connector_schema_lib:prepare_statement_fields(). + [{server, fun server/1}] ++ + emqx_connector_schema_lib:relational_db_fields() ++ + emqx_connector_schema_lib:ssl_fields() ++ + emqx_connector_schema_lib:prepare_statement_fields(). server(type) -> emqx_schema:ip_port(); server(required) -> true; @@ -62,47 +63,64 @@ server(desc) -> ?DESC("server"); server(_) -> undefined. %% =================================================================== -on_start(InstId, #{server := {Host, Port}, - database := DB, - username := User, - password := Password, - auto_reconnect := AutoReconn, - pool_size := PoolSize, - ssl := SSL } = Config) -> - ?SLOG(info, #{msg => "starting_mysql_connector", - connector => InstId, config => Config}), - SslOpts = case maps:get(enable, SSL) of - true -> - [{ssl, emqx_tls_lib:to_client_opts(SSL)}]; - false -> - [] - end, - Options = [{host, Host}, - {port, Port}, - {user, User}, - {password, Password}, - {database, DB}, - {auto_reconnect, reconn_interval(AutoReconn)}, - {pool_size, PoolSize}], +on_start( + InstId, + #{ + server := {Host, Port}, + database := DB, + username := User, + password := Password, + auto_reconnect := AutoReconn, + pool_size := PoolSize, + ssl := SSL + } = Config +) -> + ?SLOG(info, #{ + msg => "starting_mysql_connector", + connector => InstId, + config => Config + }), + SslOpts = + case maps:get(enable, SSL) of + true -> + [{ssl, emqx_tls_lib:to_client_opts(SSL)}]; + false -> + [] + end, + Options = [ + {host, Host}, + {port, Port}, + {user, User}, + {password, Password}, + {database, DB}, + {auto_reconnect, reconn_interval(AutoReconn)}, + {pool_size, PoolSize} + ], PoolName = emqx_plugin_libs_pool:pool_name(InstId), Prepares = maps:get(prepare_statement, Config, #{}), State = init_prepare(#{poolname => PoolName, prepare_statement => Prepares}), case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Options ++ SslOpts) of - ok -> {ok, State}; + ok -> {ok, State}; {error, Reason} -> {error, Reason} end. on_stop(InstId, #{poolname := PoolName}) -> - ?SLOG(info, #{msg => "stopping_mysql_connector", - connector => InstId}), + ?SLOG(info, #{ + msg => "stopping_mysql_connector", + connector => InstId + }), emqx_plugin_libs_pool:stop_pool(PoolName). on_query(InstId, {Type, SQLOrKey}, AfterQuery, State) -> on_query(InstId, {Type, SQLOrKey, [], default_timeout}, AfterQuery, State); on_query(InstId, {Type, SQLOrKey, Params}, AfterQuery, State) -> on_query(InstId, {Type, SQLOrKey, Params, default_timeout}, AfterQuery, State); -on_query(InstId, {Type, SQLOrKey, Params, Timeout}, AfterQuery, - #{poolname := PoolName, prepare_statement := Prepares} = State) -> +on_query( + InstId, + {Type, SQLOrKey, Params, Timeout}, + AfterQuery, + #{poolname := PoolName, prepare_statement := Prepares} = State +) -> LogMeta = #{connector => InstId, sql => SQLOrKey, state => State}, ?TRACE("QUERY", "mysql_connector_received", LogMeta), Worker = ecpool:get_client(PoolName), @@ -111,28 +129,36 @@ on_query(InstId, {Type, SQLOrKey, Params, Timeout}, AfterQuery, Result = erlang:apply(mysql, MySqlFunction, [Conn, SQLOrKey, Params, Timeout]), case Result of {error, disconnected} -> - ?SLOG(error, - LogMeta#{msg => "mysql_connector_do_sql_query_failed", reason => disconnected}), + ?SLOG( + error, + LogMeta#{msg => "mysql_connector_do_sql_query_failed", reason => disconnected} + ), %% kill the poll worker to trigger reconnection _ = exit(Conn, restart), emqx_resource:query_failed(AfterQuery), Result; {error, not_prepared} -> - ?SLOG(warning, - LogMeta#{msg => "mysql_connector_prepare_query_failed", reason => not_prepared}), + ?SLOG( + warning, + LogMeta#{msg => "mysql_connector_prepare_query_failed", reason => not_prepared} + ), case prepare_sql(Prepares, PoolName) of ok -> %% not return result, next loop will try again on_query(InstId, {Type, SQLOrKey, Params, Timeout}, AfterQuery, State); {error, Reason} -> - ?SLOG(error, - LogMeta#{msg => "mysql_connector_do_prepare_failed", reason => Reason}), + ?SLOG( + error, + LogMeta#{msg => "mysql_connector_do_prepare_failed", reason => Reason} + ), emqx_resource:query_failed(AfterQuery), {error, Reason} end; {error, Reason} -> - ?SLOG(error, - LogMeta#{msg => "mysql_connector_do_sql_query_failed", reason => Reason}), + ?SLOG( + error, + LogMeta#{msg => "mysql_connector_do_sql_query_failed", reason => Reason} + ), emqx_resource:query_failed(AfterQuery), Result; _ -> @@ -147,7 +173,7 @@ on_health_check(_InstId, #{poolname := PoolName} = State) -> case emqx_plugin_libs_pool:health_check(PoolName, fun ?MODULE:do_health_check/1, State) of {ok, State} -> case do_health_check_prepares(State) of - ok-> + ok -> {ok, State}; {ok, NState} -> {ok, NState}; @@ -161,7 +187,7 @@ on_health_check(_InstId, #{poolname := PoolName} = State) -> do_health_check(Conn) -> ok == element(1, mysql:query(Conn, <<"SELECT count(1) AS T">>)). -do_health_check_prepares(#{prepare_statement := Prepares})when is_map(Prepares) -> +do_health_check_prepares(#{prepare_statement := Prepares}) when is_map(Prepares) -> ok; do_health_check_prepares(State = #{poolname := PoolName, prepare_statement := {error, Prepares}}) -> %% retry to prepare @@ -180,8 +206,8 @@ reconn_interval(false) -> false. connect(Options) -> mysql:start_link(Options). --spec to_server(string()) - -> {inet:ip_address() | inet:hostname(), pos_integer()}. +-spec to_server(string()) -> + {inet:ip_address() | inet:hostname(), pos_integer()}. to_server(Str) -> emqx_connector_schema_lib:parse_server(Str, ?MYSQL_HOST_OPTIONS). @@ -215,20 +241,27 @@ prepare_sql(Prepares, PoolName) -> do_prepare_sql(Prepares, PoolName) -> Conns = - [begin - {ok, Conn} = ecpool_worker:client(Worker), - Conn - end || {_Name, Worker} <- ecpool:workers(PoolName)], + [ + begin + {ok, Conn} = ecpool_worker:client(Worker), + Conn + end + || {_Name, Worker} <- ecpool:workers(PoolName) + ], prepare_sql_to_conn_list(Conns, Prepares). -prepare_sql_to_conn_list([], _PrepareList) -> ok; +prepare_sql_to_conn_list([], _PrepareList) -> + ok; prepare_sql_to_conn_list([Conn | ConnList], PrepareList) -> case prepare_sql_to_conn(Conn, PrepareList) of ok -> prepare_sql_to_conn_list(ConnList, PrepareList); {error, R} -> %% rollback - Fun = fun({Key, _}) -> _ = unprepare_sql_to_conn(Conn, Key), ok end, + Fun = fun({Key, _}) -> + _ = unprepare_sql_to_conn(Conn, Key), + ok + end, lists:foreach(Fun, PrepareList), {error, R} end. diff --git a/apps/emqx_connector/src/emqx_connector_pgsql.erl b/apps/emqx_connector/src/emqx_connector_pgsql.erl index d7b6c1b14..00ec31849 100644 --- a/apps/emqx_connector/src/emqx_connector_pgsql.erl +++ b/apps/emqx_connector/src/emqx_connector_pgsql.erl @@ -26,24 +26,26 @@ -behaviour(emqx_resource). %% callbacks of behaviour emqx_resource --export([ on_start/2 - , on_stop/2 - , on_query/4 - , on_health_check/2 - ]). +-export([ + on_start/2, + on_stop/2, + on_query/4, + on_health_check/2 +]). -export([connect/1]). --export([ query/3 - , prepared_query/3 - ]). +-export([ + query/3, + prepared_query/3 +]). -export([do_health_check/1]). --define( PGSQL_HOST_OPTIONS - , #{ host_type => inet_addr - , default_port => ?PGSQL_DEFAULT_PORT}). - +-define(PGSQL_HOST_OPTIONS, #{ + host_type => inet_addr, + default_port => ?PGSQL_DEFAULT_PORT +}). %%===================================================================== @@ -52,9 +54,9 @@ roots() -> fields(config) -> [{server, fun server/1}] ++ - emqx_connector_schema_lib:relational_db_fields() ++ - emqx_connector_schema_lib:ssl_fields() ++ - emqx_connector_schema_lib:prepare_statement_fields(). + emqx_connector_schema_lib:relational_db_fields() ++ + emqx_connector_schema_lib:ssl_fields() ++ + emqx_connector_schema_lib:prepare_statement_fields(). server(type) -> emqx_schema:ip_port(); server(required) -> true; @@ -64,52 +66,73 @@ server(desc) -> ?DESC("server"); server(_) -> undefined. %% =================================================================== -on_start(InstId, #{server := {Host, Port}, - database := DB, - username := User, - password := Password, - auto_reconnect := AutoReconn, - pool_size := PoolSize, - ssl := SSL} = Config) -> - ?SLOG(info, #{msg => "starting_postgresql_connector", - connector => InstId, config => Config}), - SslOpts = case maps:get(enable, SSL) of - true -> - [{ssl, true}, - {ssl_opts, emqx_tls_lib:to_client_opts(SSL)}]; - false -> - [{ssl, false}] - end, - Options = [{host, Host}, - {port, Port}, - {username, User}, - {password, Password}, - {database, DB}, - {auto_reconnect, reconn_interval(AutoReconn)}, - {pool_size, PoolSize}, - {prepare_statement, maps:to_list(maps:get(prepare_statement, Config, #{}))}], +on_start( + InstId, + #{ + server := {Host, Port}, + database := DB, + username := User, + password := Password, + auto_reconnect := AutoReconn, + pool_size := PoolSize, + ssl := SSL + } = Config +) -> + ?SLOG(info, #{ + msg => "starting_postgresql_connector", + connector => InstId, + config => Config + }), + SslOpts = + case maps:get(enable, SSL) of + true -> + [ + {ssl, true}, + {ssl_opts, emqx_tls_lib:to_client_opts(SSL)} + ]; + false -> + [{ssl, false}] + end, + Options = [ + {host, Host}, + {port, Port}, + {username, User}, + {password, Password}, + {database, DB}, + {auto_reconnect, reconn_interval(AutoReconn)}, + {pool_size, PoolSize}, + {prepare_statement, maps:to_list(maps:get(prepare_statement, Config, #{}))} + ], PoolName = emqx_plugin_libs_pool:pool_name(InstId), case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Options ++ SslOpts) of - ok -> {ok, #{poolname => PoolName}}; + ok -> {ok, #{poolname => PoolName}}; {error, Reason} -> {error, Reason} end. on_stop(InstId, #{poolname := PoolName}) -> - ?SLOG(info, #{msg => "stopping postgresql connector", - connector => InstId}), + ?SLOG(info, #{ + msg => "stopping postgresql connector", + connector => InstId + }), emqx_plugin_libs_pool:stop_pool(PoolName). on_query(InstId, {Type, NameOrSQL}, AfterQuery, #{poolname := _PoolName} = State) -> on_query(InstId, {Type, NameOrSQL, []}, AfterQuery, State); - on_query(InstId, {Type, NameOrSQL, Params}, AfterQuery, #{poolname := PoolName} = State) -> - ?SLOG(debug, #{msg => "postgresql connector received sql query", - connector => InstId, sql => NameOrSQL, state => State}), + ?SLOG(debug, #{ + msg => "postgresql connector received sql query", + connector => InstId, + sql => NameOrSQL, + state => State + }), case Result = ecpool:pick_and_do(PoolName, {?MODULE, Type, [NameOrSQL, Params]}, no_handover) of {error, Reason} -> ?SLOG(error, #{ msg => "postgresql connector do sql query failed", - connector => InstId, sql => NameOrSQL, reason => Reason}), + connector => InstId, + sql => NameOrSQL, + reason => Reason + }), emqx_resource:query_failed(AfterQuery); _ -> emqx_resource:query_success(AfterQuery) @@ -127,7 +150,7 @@ reconn_interval(true) -> 15; reconn_interval(false) -> false. connect(Opts) -> - Host = proplists:get_value(host, Opts), + Host = proplists:get_value(host, Opts), Username = proplists:get_value(username, Opts), Password = proplists:get_value(password, Opts), PrepareStatement = proplists:get_value(prepare_statement, Opts), @@ -177,7 +200,7 @@ conn_opts([_Opt | Opts], Acc) -> %% =================================================================== %% typereflt funcs --spec to_server(string()) - -> {inet:ip_address() | inet:hostname(), pos_integer()}. +-spec to_server(string()) -> + {inet:ip_address() | inet:hostname(), pos_integer()}. to_server(Str) -> emqx_connector_schema_lib:parse_server(Str, ?PGSQL_HOST_OPTIONS). diff --git a/apps/emqx_connector/src/emqx_connector_redis.erl b/apps/emqx_connector/src/emqx_connector_redis.erl index 78607aac0..189d5e8c2 100644 --- a/apps/emqx_connector/src/emqx_connector_redis.erl +++ b/apps/emqx_connector/src/emqx_connector_redis.erl @@ -25,11 +25,12 @@ -behaviour(emqx_resource). %% callbacks of behaviour emqx_resource --export([ on_start/2 - , on_stop/2 - , on_query/4 - , on_health_check/2 - ]). +-export([ + on_start/2, + on_stop/2, + on_query/4, + on_health_check/2 +]). -export([do_health_check/1]). @@ -38,50 +39,59 @@ -export([cmd/3]). %% redis host don't need parse --define( REDIS_HOST_OPTIONS - , #{ host_type => hostname - , default_port => ?REDIS_DEFAULT_PORT}). - +-define(REDIS_HOST_OPTIONS, #{ + host_type => hostname, + default_port => ?REDIS_DEFAULT_PORT +}). %%===================================================================== roots() -> - [ {config, #{type => hoconsc:union( - [ hoconsc:ref(?MODULE, cluster) - , hoconsc:ref(?MODULE, single) - , hoconsc:ref(?MODULE, sentinel) - ])} - } + [ + {config, #{ + type => hoconsc:union( + [ + hoconsc:ref(?MODULE, cluster), + hoconsc:ref(?MODULE, single), + hoconsc:ref(?MODULE, sentinel) + ] + ) + }} ]. fields(single) -> - [ {server, fun server/1} - , {redis_type, #{type => hoconsc:enum([single]), - required => true, - desc => ?DESC("single") - }} + [ + {server, fun server/1}, + {redis_type, #{ + type => hoconsc:enum([single]), + required => true, + desc => ?DESC("single") + }} ] ++ - redis_fields() ++ - emqx_connector_schema_lib:ssl_fields(); + redis_fields() ++ + emqx_connector_schema_lib:ssl_fields(); fields(cluster) -> - [ {servers, fun servers/1} - , {redis_type, #{type => hoconsc:enum([cluster]), - required => true, - desc => ?DESC("cluster") - }} + [ + {servers, fun servers/1}, + {redis_type, #{ + type => hoconsc:enum([cluster]), + required => true, + desc => ?DESC("cluster") + }} ] ++ - redis_fields() ++ - emqx_connector_schema_lib:ssl_fields(); + redis_fields() ++ + emqx_connector_schema_lib:ssl_fields(); fields(sentinel) -> - [ {servers, fun servers/1} - , {redis_type, #{type => hoconsc:enum([sentinel]), - required => true, - desc => ?DESC("sentinel") - }} - , {sentinel, #{type => string(), desc => ?DESC("sentinel_desc") - }} + [ + {servers, fun servers/1}, + {redis_type, #{ + type => hoconsc:enum([sentinel]), + required => true, + desc => ?DESC("sentinel") + }}, + {sentinel, #{type => string(), desc => ?DESC("sentinel_desc")}} ] ++ - redis_fields() ++ - emqx_connector_schema_lib:ssl_fields(). + redis_fields() ++ + emqx_connector_schema_lib:ssl_fields(). server(type) -> emqx_schema:ip_port(); server(required) -> true; @@ -98,62 +108,89 @@ servers(desc) -> ?DESC("servers"); servers(_) -> undefined. %% =================================================================== -on_start(InstId, #{redis_type := Type, - database := Database, - pool_size := PoolSize, - auto_reconnect := AutoReconn, - ssl := SSL } = Config) -> - ?SLOG(info, #{msg => "starting_redis_connector", - connector => InstId, config => Config}), - Servers = case Type of - single -> [{servers, [maps:get(server, Config)]}]; - _ ->[{servers, maps:get(servers, Config)}] - end, - Opts = [{pool_size, PoolSize}, +on_start( + InstId, + #{ + redis_type := Type, + database := Database, + pool_size := PoolSize, + auto_reconnect := AutoReconn, + ssl := SSL + } = Config +) -> + ?SLOG(info, #{ + msg => "starting_redis_connector", + connector => InstId, + config => Config + }), + Servers = + case Type of + single -> [{servers, [maps:get(server, Config)]}]; + _ -> [{servers, maps:get(servers, Config)}] + end, + Opts = + [ + {pool_size, PoolSize}, {database, Database}, {password, maps:get(password, Config, "")}, {auto_reconnect, reconn_interval(AutoReconn)} - ] ++ Servers, - Options = case maps:get(enable, SSL) of - true -> - [{ssl, true}, - {ssl_options, emqx_tls_lib:to_client_opts(SSL)}]; - false -> [{ssl, false}] - end ++ [{sentinel, maps:get(sentinel, Config, undefined)}], + ] ++ Servers, + Options = + case maps:get(enable, SSL) of + true -> + [ + {ssl, true}, + {ssl_options, emqx_tls_lib:to_client_opts(SSL)} + ]; + false -> + [{ssl, false}] + end ++ [{sentinel, maps:get(sentinel, Config, undefined)}], PoolName = emqx_plugin_libs_pool:pool_name(InstId), case Type of cluster -> case eredis_cluster:start_pool(PoolName, Opts ++ [{options, Options}]) of - {ok, _} -> {ok, #{poolname => PoolName, type => Type}}; - {ok, _, _} -> {ok, #{poolname => PoolName, type => Type}}; + {ok, _} -> {ok, #{poolname => PoolName, type => Type}}; + {ok, _, _} -> {ok, #{poolname => PoolName, type => Type}}; {error, Reason} -> {error, Reason} end; _ -> - case emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Opts ++ [{options, Options}]) of - ok -> {ok, #{poolname => PoolName, type => Type}}; + case + emqx_plugin_libs_pool:start_pool(PoolName, ?MODULE, Opts ++ [{options, Options}]) + of + ok -> {ok, #{poolname => PoolName, type => Type}}; {error, Reason} -> {error, Reason} end end. on_stop(InstId, #{poolname := PoolName, type := Type}) -> - ?SLOG(info, #{msg => "stopping_redis_connector", - connector => InstId}), + ?SLOG(info, #{ + msg => "stopping_redis_connector", + connector => InstId + }), case Type of cluster -> eredis_cluster:stop_pool(PoolName); _ -> emqx_plugin_libs_pool:stop_pool(PoolName) end. on_query(InstId, {cmd, Command}, AfterCommand, #{poolname := PoolName, type := Type} = State) -> - ?TRACE("QUERY", "redis_connector_received", - #{connector => InstId, sql => Command, state => State}), - Result = case Type of - cluster -> eredis_cluster:q(PoolName, Command); - _ -> ecpool:pick_and_do(PoolName, {?MODULE, cmd, [Type, Command]}, no_handover) - end, + ?TRACE( + "QUERY", + "redis_connector_received", + #{connector => InstId, sql => Command, state => State} + ), + Result = + case Type of + cluster -> eredis_cluster:q(PoolName, Command); + _ -> ecpool:pick_and_do(PoolName, {?MODULE, cmd, [Type, Command]}, no_handover) + end, case Result of {error, Reason} -> - ?SLOG(error, #{msg => "redis_connector_do_cmd_query_failed", - connector => InstId, sql => Command, reason => Reason}), + ?SLOG(error, #{ + msg => "redis_connector_do_cmd_query_failed", + connector => InstId, + sql => Command, + reason => Reason + }), emqx_resource:query_failed(AfterCommand); _ -> emqx_resource:query_success(AfterCommand) @@ -161,14 +198,19 @@ on_query(InstId, {cmd, Command}, AfterCommand, #{poolname := PoolName, type := T Result. extract_eredis_cluster_workers(PoolName) -> - lists:flatten([gen_server:call(PoolPid, get_all_workers) || - PoolPid <- eredis_cluster_monitor:get_all_pools(PoolName)]). + lists:flatten([ + gen_server:call(PoolPid, get_all_workers) + || PoolPid <- eredis_cluster_monitor:get_all_pools(PoolName) + ]). eredis_cluster_workers_exist_and_are_connected(Workers) -> - length(Workers) > 0 andalso lists:all( - fun({_, Pid, _, _}) -> - eredis_cluster_pool_worker:is_connected(Pid) =:= true - end, Workers). + length(Workers) > 0 andalso + lists:all( + fun({_, Pid, _, _}) -> + eredis_cluster_pool_worker:is_connected(Pid) =:= true + end, + Workers + ). on_health_check(_InstId, #{type := cluster, poolname := PoolName} = State) -> case eredis_cluster:pool_exists(PoolName) of @@ -178,12 +220,9 @@ on_health_check(_InstId, #{type := cluster, poolname := PoolName} = State) -> true -> {ok, State}; false -> {error, health_check_failed, State} end; - false -> {error, health_check_failed, State} end; - - on_health_check(_InstId, #{poolname := PoolName} = State) -> emqx_plugin_libs_pool:health_check(PoolName, fun ?MODULE:do_health_check/1, State). @@ -206,28 +245,32 @@ connect(Opts) -> eredis:start_link(Opts). redis_fields() -> - [ {pool_size, fun emqx_connector_schema_lib:pool_size/1} - , {password, fun emqx_connector_schema_lib:password/1} - , {database, #{type => integer(), - default => 0, - required => true, - desc => ?DESC("database") - }} - , {auto_reconnect, fun emqx_connector_schema_lib:auto_reconnect/1} + [ + {pool_size, fun emqx_connector_schema_lib:pool_size/1}, + {password, fun emqx_connector_schema_lib:password/1}, + {database, #{ + type => integer(), + default => 0, + required => true, + desc => ?DESC("database") + }}, + {auto_reconnect, fun emqx_connector_schema_lib:auto_reconnect/1} ]. --spec to_server_raw(string()) - -> {string(), pos_integer()}. +-spec to_server_raw(string()) -> + {string(), pos_integer()}. to_server_raw(Server) -> emqx_connector_schema_lib:parse_server(Server, ?REDIS_HOST_OPTIONS). --spec to_servers_raw(string()) - -> [{string(), pos_integer()}]. +-spec to_servers_raw(string()) -> + [{string(), pos_integer()}]. to_servers_raw(Servers) -> - lists:map( fun(Server) -> - emqx_connector_schema_lib:parse_server(Server, ?REDIS_HOST_OPTIONS) - end - , string:tokens(str(Servers), ", ")). + lists:map( + fun(Server) -> + emqx_connector_schema_lib:parse_server(Server, ?REDIS_HOST_OPTIONS) + end, + string:tokens(str(Servers), ", ") + ). str(A) when is_atom(A) -> atom_to_list(A); diff --git a/apps/emqx_connector/src/emqx_connector_schema.erl b/apps/emqx_connector/src/emqx_connector_schema.erl index 2b1d026b1..27f982e74 100644 --- a/apps/emqx_connector/src/emqx_connector_schema.erl +++ b/apps/emqx_connector/src/emqx_connector_schema.erl @@ -24,10 +24,11 @@ -export([namespace/0, roots/0, fields/1, desc/1]). --export([ get_response/0 - , put_request/0 - , post_request/0 - ]). +-export([ + get_response/0, + put_request/0, + post_request/0 +]). %% the config for http bridges do not need connectors -define(CONN_TYPES, [mqtt]). @@ -55,18 +56,25 @@ namespace() -> connector. roots() -> ["connectors"]. -fields(connectors) -> fields("connectors"); +fields(connectors) -> + fields("connectors"); fields("connectors") -> - [ {mqtt, - mk(hoconsc:map(name, - hoconsc:union([ ref(emqx_connector_mqtt_schema, "connector") - ])), - #{ desc => ?DESC("mqtt") - })} + [ + {mqtt, + mk( + hoconsc:map( + name, + hoconsc:union([ref(emqx_connector_mqtt_schema, "connector")]) + ), + #{desc => ?DESC("mqtt")} + )} ]. -desc(Record) when Record =:= connectors; - Record =:= "connectors" -> ?DESC("desc_connector"); +desc(Record) when + Record =:= connectors; + Record =:= "connectors" +-> + ?DESC("desc_connector"); desc(_) -> undefined. diff --git a/apps/emqx_connector/src/emqx_connector_schema_lib.erl b/apps/emqx_connector/src/emqx_connector_schema_lib.erl index 8600253c6..86b12dcf3 100644 --- a/apps/emqx_connector/src/emqx_connector_schema_lib.erl +++ b/apps/emqx_connector/src/emqx_connector_schema_lib.erl @@ -19,32 +19,36 @@ -include_lib("typerefl/include/types.hrl"). -include_lib("hocon/include/hoconsc.hrl"). --export([ relational_db_fields/0 - , ssl_fields/0 - , prepare_statement_fields/0 - ]). +-export([ + relational_db_fields/0, + ssl_fields/0, + prepare_statement_fields/0 +]). --export([ ip_port_to_string/1 - , parse_server/2 - ]). +-export([ + ip_port_to_string/1, + parse_server/2 +]). --export([ pool_size/1 - , database/1 - , username/1 - , password/1 - , auto_reconnect/1 - ]). +-export([ + pool_size/1, + database/1, + username/1, + password/1, + auto_reconnect/1 +]). -type database() :: binary(). -type pool_size() :: pos_integer(). -type username() :: binary(). -type password() :: binary(). --reflect_type([ database/0 - , pool_size/0 - , username/0 - , password/0 - ]). +-reflect_type([ + database/0, + pool_size/0, + username/0, + password/0 +]). -export([roots/0, fields/1]). @@ -53,24 +57,25 @@ roots() -> []. fields(_) -> []. ssl_fields() -> - [ {ssl, #{type => hoconsc:ref(emqx_schema, "ssl_client_opts"), - default => #{<<"enable">> => false}, - desc => ?DESC("ssl") - } - } + [ + {ssl, #{ + type => hoconsc:ref(emqx_schema, "ssl_client_opts"), + default => #{<<"enable">> => false}, + desc => ?DESC("ssl") + }} ]. relational_db_fields() -> - [ {database, fun database/1} - , {pool_size, fun pool_size/1} - , {username, fun username/1} - , {password, fun password/1} - , {auto_reconnect, fun auto_reconnect/1} + [ + {database, fun database/1}, + {pool_size, fun pool_size/1}, + {username, fun username/1}, + {password, fun password/1}, + {auto_reconnect, fun auto_reconnect/1} ]. prepare_statement_fields() -> - [ {prepare_statement, fun prepare_statement/1} - ]. + [{prepare_statement, fun prepare_statement/1}]. prepare_statement(type) -> map(); prepare_statement(desc) -> ?DESC("prepare_statement"); @@ -113,16 +118,16 @@ parse_server(Str, #{host_type := inet_addr, default_port := DefaultPort}) -> try string:tokens(str(Str), ": ") of [Ip, Port] -> case parse_ip(Ip) of - {ok, R} -> {R, list_to_integer(Port)} + {ok, R} -> {R, list_to_integer(Port)} end; [Ip] -> case parse_ip(Ip) of - {ok, R} -> {R, DefaultPort} + {ok, R} -> {R, DefaultPort} end; _ -> ?THROW_ERROR("Bad server schema.") catch - error : Reason -> + error:Reason -> ?THROW_ERROR(Reason) end; parse_server(Str, #{host_type := hostname, default_port := DefaultPort}) -> @@ -134,7 +139,7 @@ parse_server(Str, #{host_type := hostname, default_port := DefaultPort}) -> _ -> ?THROW_ERROR("Bad server schema.") catch - error : Reason -> + error:Reason -> ?THROW_ERROR(Reason) end; parse_server(_, _) -> diff --git a/apps/emqx_connector/src/emqx_connector_ssl.erl b/apps/emqx_connector/src/emqx_connector_ssl.erl index 02d9a4070..131b6cbd8 100644 --- a/apps/emqx_connector/src/emqx_connector_ssl.erl +++ b/apps/emqx_connector/src/emqx_connector_ssl.erl @@ -1,4 +1,3 @@ - %%-------------------------------------------------------------------- %% Copyright (c) 2020-2022 EMQ Technologies Co., Ltd. All Rights Reserved. %% @@ -17,9 +16,10 @@ -module(emqx_connector_ssl). --export([ convert_certs/2 - , clear_certs/2 - ]). +-export([ + convert_certs/2, + clear_certs/2 +]). convert_certs(RltvDir, NewConfig) -> NewSSL = drop_invalid_certs(maps:get(<<"ssl">>, NewConfig, undefined)), @@ -40,7 +40,8 @@ new_ssl_config(Config, SSL) -> Config#{<<"ssl">> => SSL}. drop_invalid_certs(undefined) -> undefined; drop_invalid_certs(SSL) -> emqx_tls_lib:drop_invalid_certs(SSL). -map_get_oneof([], _Map, Default) -> Default; +map_get_oneof([], _Map, Default) -> + Default; map_get_oneof([Key | Keys], Map, Default) -> case maps:find(Key, Map) of error -> diff --git a/apps/emqx_connector/src/emqx_connector_sup.erl b/apps/emqx_connector/src/emqx_connector_sup.erl index 0824a68bf..29fc5b4b5 100644 --- a/apps/emqx_connector/src/emqx_connector_sup.erl +++ b/apps/emqx_connector/src/emqx_connector_sup.erl @@ -27,20 +27,24 @@ start_link() -> supervisor:start_link({local, ?SERVER}, ?MODULE, []). init([]) -> - SupFlags = #{strategy => one_for_all, - intensity => 5, - period => 20}, + SupFlags = #{ + strategy => one_for_all, + intensity => 5, + period => 20 + }, ChildSpecs = [ child_spec(emqx_connector_mqtt) ], {ok, {SupFlags, ChildSpecs}}. child_spec(Mod) -> - #{id => Mod, - start => {Mod, start_link, []}, - restart => permanent, - shutdown => 3000, - type => supervisor, - modules => [Mod]}. + #{ + id => Mod, + start => {Mod, start_link, []}, + restart => permanent, + shutdown => 3000, + type => supervisor, + modules => [Mod] + }. %% internal functions diff --git a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_mod.erl b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_mod.erl index 1a9c55ced..2f4f61043 100644 --- a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_mod.erl +++ b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_mod.erl @@ -18,21 +18,24 @@ -module(emqx_connector_mqtt_mod). --export([ start/1 - , send/2 - , stop/1 - , ping/1 - ]). +-export([ + start/1, + send/2, + stop/1, + ping/1 +]). --export([ ensure_subscribed/3 - , ensure_unsubscribed/2 - ]). +-export([ + ensure_subscribed/3, + ensure_unsubscribed/2 +]). %% callbacks for emqtt --export([ handle_puback/2 - , handle_publish/3 - , handle_disconnected/2 - ]). +-export([ + handle_puback/2, + handle_publish/3, + handle_disconnected/2 +]). -include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl"). @@ -69,7 +72,7 @@ start(Config) -> ok = sub_remote_topics(Pid, Subscriptions), {ok, #{client_pid => Pid, subscriptions => Subscriptions}} catch - throw : Reason -> + throw:Reason -> ok = stop(#{client_pid => Pid}), {error, error_reason(Reason, ServerStr)} end; @@ -90,13 +93,14 @@ stop(#{client_pid := Pid}) -> ping(undefined) -> pang; - ping(#{client_pid := Pid}) -> emqtt:ping(Pid). -ensure_subscribed(#{client_pid := Pid, subscriptions := Subs} = Conn, Topic, QoS) when is_pid(Pid) -> +ensure_subscribed(#{client_pid := Pid, subscriptions := Subs} = Conn, Topic, QoS) when + is_pid(Pid) +-> case emqtt:subscribe(Pid, Topic, QoS) of - {ok, _, _} -> Conn#{subscriptions => [{Topic, QoS}|Subs]}; + {ok, _, _} -> Conn#{subscriptions => [{Topic, QoS} | Subs]}; Error -> {error, Error} end; ensure_subscribed(_Conn, _Topic, _QoS) -> @@ -120,15 +124,14 @@ safe_stop(Pid, StopF, Timeout) -> try StopF() catch - _ : _ -> + _:_ -> ok end, receive {'DOWN', MRef, _, _, _} -> ok - after - Timeout -> - exit(Pid, kill) + after Timeout -> + exit(Pid, kill) end. send(Conn, Msgs) -> @@ -157,26 +160,38 @@ send(#{client_pid := ClientPid} = Conn, [Msg | Rest], PktIds) -> {error, Reason} end. -handle_puback(#{packet_id := PktId, reason_code := RC}, Parent) - when RC =:= ?RC_SUCCESS; - RC =:= ?RC_NO_MATCHING_SUBSCRIBERS -> - Parent ! {batch_ack, PktId}, ok; +handle_puback(#{packet_id := PktId, reason_code := RC}, Parent) when + RC =:= ?RC_SUCCESS; + RC =:= ?RC_NO_MATCHING_SUBSCRIBERS +-> + Parent ! {batch_ack, PktId}, + ok; handle_puback(#{packet_id := PktId, reason_code := RC}, _Parent) -> - ?SLOG(warning, #{msg => "publish_to_remote_node_falied", - packet_id => PktId, reason_code => RC}). + ?SLOG(warning, #{ + msg => "publish_to_remote_node_falied", + packet_id => PktId, + reason_code => RC + }). handle_publish(Msg, undefined, _Opts) -> - ?SLOG(error, #{msg => "cannot_publish_to_local_broker_as" - "_'ingress'_is_not_configured", - message => Msg}); + ?SLOG(error, #{ + msg => + "cannot_publish_to_local_broker_as" + "_'ingress'_is_not_configured", + message => Msg + }); handle_publish(#{properties := Props} = Msg0, Vars, Opts) -> Msg = format_msg_received(Msg0, Opts), - ?SLOG(debug, #{msg => "publish_to_local_broker", - message => Msg, vars => Vars}), + ?SLOG(debug, #{ + msg => "publish_to_local_broker", + message => Msg, + vars => Vars + }), case Vars of #{on_message_received := {Mod, Func, Args}} -> _ = erlang:apply(Mod, Func, [Msg | Args]); - _ -> ok + _ -> + ok end, maybe_publish_to_local_broker(Msg, Vars, Props). @@ -184,12 +199,14 @@ handle_disconnected(Reason, Parent) -> Parent ! {disconnected, self(), Reason}. make_hdlr(Parent, Vars, Opts) -> - #{puback => {fun ?MODULE:handle_puback/2, [Parent]}, - publish => {fun ?MODULE:handle_publish/3, [Vars, Opts]}, - disconnected => {fun ?MODULE:handle_disconnected/2, [Parent]} - }. + #{ + puback => {fun ?MODULE:handle_puback/2, [Parent]}, + publish => {fun ?MODULE:handle_publish/3, [Vars, Opts]}, + disconnected => {fun ?MODULE:handle_disconnected/2, [Parent]} + }. -sub_remote_topics(_ClientPid, undefined) -> ok; +sub_remote_topics(_ClientPid, undefined) -> + ok; sub_remote_topics(ClientPid, #{remote_topic := FromTopic, remote_qos := QoS}) -> case emqtt:subscribe(ClientPid, FromTopic, QoS) of {ok, _, _} -> ok; @@ -199,52 +216,82 @@ sub_remote_topics(ClientPid, #{remote_topic := FromTopic, remote_qos := QoS}) -> process_config(Config) -> maps:without([conn_type, address, receive_mountpoint, subscriptions, name], Config). -maybe_publish_to_local_broker(#{topic := Topic} = Msg, #{remote_topic := SubTopic} = Vars, - Props) -> +maybe_publish_to_local_broker( + #{topic := Topic} = Msg, + #{remote_topic := SubTopic} = Vars, + Props +) -> case maps:get(local_topic, Vars, undefined) of undefined -> - ok; %% local topic is not set, discard it + %% local topic is not set, discard it + ok; _ -> case emqx_topic:match(Topic, SubTopic) of true -> - _ = emqx_broker:publish(emqx_connector_mqtt_msg:to_broker_msg(Msg, Vars, Props)), + _ = emqx_broker:publish( + emqx_connector_mqtt_msg:to_broker_msg(Msg, Vars, Props) + ), ok; false -> - ?SLOG(warning, #{msg => "discard_message_as_topic_not_matched", - message => Msg, subscribed => SubTopic, got_topic => Topic}) + ?SLOG(warning, #{ + msg => "discard_message_as_topic_not_matched", + message => Msg, + subscribed => SubTopic, + got_topic => Topic + }) end end. -format_msg_received(#{dup := Dup, payload := Payload, properties := Props, - qos := QoS, retain := Retain, topic := Topic}, #{server := Server}) -> - #{ id => emqx_guid:to_hexstr(emqx_guid:gen()) - , server => Server - , payload => Payload - , topic => Topic - , qos => QoS - , dup => Dup - , retain => Retain - , pub_props => printable_maps(Props) - , message_received_at => erlang:system_time(millisecond) - }. +format_msg_received( + #{ + dup := Dup, + payload := Payload, + properties := Props, + qos := QoS, + retain := Retain, + topic := Topic + }, + #{server := Server} +) -> + #{ + id => emqx_guid:to_hexstr(emqx_guid:gen()), + server => Server, + payload => Payload, + topic => Topic, + qos => QoS, + dup => Dup, + retain => Retain, + pub_props => printable_maps(Props), + message_received_at => erlang:system_time(millisecond) + }. -printable_maps(undefined) -> #{}; +printable_maps(undefined) -> + #{}; printable_maps(Headers) -> maps:fold( - fun ('User-Property', V0, AccIn) when is_list(V0) -> + fun + ('User-Property', V0, AccIn) when is_list(V0) -> AccIn#{ 'User-Property' => maps:from_list(V0), - 'User-Property-Pairs' => [#{ - key => Key, - value => Value - } || {Key, Value} <- V0] + 'User-Property-Pairs' => [ + #{ + key => Key, + value => Value + } + || {Key, Value} <- V0 + ] }; - (K, V0, AccIn) -> AccIn#{K => V0} - end, #{}, Headers). + (K, V0, AccIn) -> + AccIn#{K => V0} + end, + #{}, + Headers + ). ip_port_to_server_str(Host, Port) -> - HostStr = case inet:ntoa(Host) of - {error, einval} -> Host; - IPStr -> IPStr - end, + HostStr = + case inet:ntoa(Host) of + {error, einval} -> Host; + IPStr -> IPStr + end, list_to_binary(io_lib:format("~s:~w", [HostStr, Port])). diff --git a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_msg.erl b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_msg.erl index e8e4580f4..8cc582512 100644 --- a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_msg.erl +++ b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_msg.erl @@ -16,17 +16,19 @@ -module(emqx_connector_mqtt_msg). --export([ to_binary/1 - , from_binary/1 - , make_pub_vars/2 - , to_remote_msg/2 - , to_broker_msg/3 - , estimate_size/1 - ]). +-export([ + to_binary/1, + from_binary/1, + make_pub_vars/2, + to_remote_msg/2, + to_broker_msg/3, + estimate_size/1 +]). --export([ replace_vars_in_str/2 - , replace_simple_var/2 - ]). +-export([ + replace_vars_in_str/2, + replace_simple_var/2 +]). -export_type([msg/0]). @@ -34,7 +36,6 @@ -include_lib("emqtt/include/emqtt.hrl"). - -type msg() :: emqx_types:message(). -type exp_msg() :: emqx_types:message() | #mqtt_msg{}. @@ -46,7 +47,8 @@ payload := binary() }. -make_pub_vars(_, undefined) -> undefined; +make_pub_vars(_, undefined) -> + undefined; make_pub_vars(Mountpoint, Conf) when is_map(Conf) -> Conf#{mountpoint => Mountpoint}. @@ -57,37 +59,56 @@ make_pub_vars(Mountpoint, Conf) when is_map(Conf) -> %% Shame that we have to know the callback module here %% would be great if we can get rid of #mqtt_msg{} record %% and use #message{} in all places. --spec to_remote_msg(msg() | map(), variables()) - -> exp_msg(). +-spec to_remote_msg(msg() | map(), variables()) -> + exp_msg(). to_remote_msg(#message{flags = Flags0} = Msg, Vars) -> Retain0 = maps:get(retain, Flags0, false), MapMsg = maps:put(retain, Retain0, emqx_rule_events:eventmsg_publish(Msg)), to_remote_msg(MapMsg, Vars); -to_remote_msg(MapMsg, #{remote_topic := TopicToken, payload := PayloadToken, - remote_qos := QoSToken, retain := RetainToken, mountpoint := Mountpoint}) when is_map(MapMsg) -> +to_remote_msg(MapMsg, #{ + remote_topic := TopicToken, + payload := PayloadToken, + remote_qos := QoSToken, + retain := RetainToken, + mountpoint := Mountpoint +}) when is_map(MapMsg) -> Topic = replace_vars_in_str(TopicToken, MapMsg), Payload = process_payload(PayloadToken, MapMsg), QoS = replace_simple_var(QoSToken, MapMsg), Retain = replace_simple_var(RetainToken, MapMsg), - #mqtt_msg{qos = QoS, - retain = Retain, - topic = topic(Mountpoint, Topic), - props = #{}, - payload = Payload}; + #mqtt_msg{ + qos = QoS, + retain = Retain, + topic = topic(Mountpoint, Topic), + props = #{}, + payload = Payload + }; to_remote_msg(#message{topic = Topic} = Msg, #{mountpoint := Mountpoint}) -> Msg#message{topic = topic(Mountpoint, Topic)}. %% published from remote node over a MQTT connection -to_broker_msg(#{dup := Dup} = MapMsg, - #{local_topic := TopicToken, payload := PayloadToken, - local_qos := QoSToken, retain := RetainToken, mountpoint := Mountpoint}, Props) -> +to_broker_msg( + #{dup := Dup} = MapMsg, + #{ + local_topic := TopicToken, + payload := PayloadToken, + local_qos := QoSToken, + retain := RetainToken, + mountpoint := Mountpoint + }, + Props +) -> Topic = replace_vars_in_str(TopicToken, MapMsg), Payload = process_payload(PayloadToken, MapMsg), QoS = replace_simple_var(QoSToken, MapMsg), Retain = replace_simple_var(RetainToken, MapMsg), - set_headers(Props, - emqx_message:set_flags(#{dup => Dup, retain => Retain}, - emqx_message:make(bridge, QoS, topic(Mountpoint, Topic), Payload))). + set_headers( + Props, + emqx_message:set_flags( + #{dup => Dup, retain => Retain}, + emqx_message:make(bridge, QoS, topic(Mountpoint, Topic), Payload) + ) + ). process_payload([], Msg) -> emqx_json:encode(Msg); diff --git a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_schema.erl b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_schema.erl index d913e1ecf..25dc4a50f 100644 --- a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_schema.erl +++ b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_schema.erl @@ -21,15 +21,17 @@ -behaviour(hocon_schema). --export([ namespace/0 - , roots/0 - , fields/1 - , desc/1 - ]). +-export([ + namespace/0, + roots/0, + fields/1, + desc/1 +]). --export([ ingress_desc/0 - , egress_desc/0 - ]). +-export([ + ingress_desc/0, + egress_desc/0 +]). -import(emqx_schema, [mk_duration/2]). @@ -40,146 +42,210 @@ roots() -> fields("config") -> fields("connector") ++ - topic_mappings(); - + topic_mappings(); fields("connector") -> - [ {mode, - sc(hoconsc:enum([cluster_shareload]), - #{ default => cluster_shareload - , desc => ?DESC("mode") - })} - , {server, - sc(emqx_schema:ip_port(), - #{ required => true - , desc => ?DESC("server") - })} - , {reconnect_interval, mk_duration( - "Reconnect interval. Delay for the MQTT bridge to retry establishing the connection " - "in case of transportation failure.", - #{default => "15s"})} - , {proto_ver, - sc(hoconsc:enum([v3, v4, v5]), - #{ default => v4 - , desc => ?DESC("proto_ver") - })} - , {username, - sc(binary(), - #{ default => "emqx" - , desc => ?DESC("username") - })} - , {password, - sc(binary(), - #{ default => "emqx" - , desc => ?DESC("password") - })} - , {clean_start, - sc(boolean(), - #{ default => true - , desc => ?DESC("clean_start") - })} - , {keepalive, mk_duration("MQTT Keepalive.", #{default => "300s"})} - , {retry_interval, mk_duration( - "Message retry interval. Delay for the MQTT bridge to retry sending the QoS1/QoS2 " - "messages in case of ACK not received.", - #{default => "15s"})} - , {max_inflight, - sc(non_neg_integer(), - #{ default => 32 - , desc => ?DESC("max_inflight") - })} - , {replayq, - sc(ref("replayq"), #{})} + [ + {mode, + sc( + hoconsc:enum([cluster_shareload]), + #{ + default => cluster_shareload, + desc => ?DESC("mode") + } + )}, + {server, + sc( + emqx_schema:ip_port(), + #{ + required => true, + desc => ?DESC("server") + } + )}, + {reconnect_interval, + mk_duration( + "Reconnect interval. Delay for the MQTT bridge to retry establishing the connection " + "in case of transportation failure.", + #{default => "15s"} + )}, + {proto_ver, + sc( + hoconsc:enum([v3, v4, v5]), + #{ + default => v4, + desc => ?DESC("proto_ver") + } + )}, + {username, + sc( + binary(), + #{ + default => "emqx", + desc => ?DESC("username") + } + )}, + {password, + sc( + binary(), + #{ + default => "emqx", + desc => ?DESC("password") + } + )}, + {clean_start, + sc( + boolean(), + #{ + default => true, + desc => ?DESC("clean_start") + } + )}, + {keepalive, mk_duration("MQTT Keepalive.", #{default => "300s"})}, + {retry_interval, + mk_duration( + "Message retry interval. Delay for the MQTT bridge to retry sending the QoS1/QoS2 " + "messages in case of ACK not received.", + #{default => "15s"} + )}, + {max_inflight, + sc( + non_neg_integer(), + #{ + default => 32, + desc => ?DESC("max_inflight") + } + )}, + {replayq, sc(ref("replayq"), #{})} ] ++ emqx_connector_schema_lib:ssl_fields(); - fields("ingress") -> %% the message maybe subscribed by rules, in this case 'local_topic' is not necessary - [ {remote_topic, - sc(binary(), - #{ required => true - , validator => fun emqx_schema:non_empty_string/1 - , desc => ?DESC("ingress_remote_topic") - })} - , {remote_qos, - sc(qos(), - #{ default => 1 - , desc => ?DESC("ingress_remote_qos") - })} - , {local_topic, - sc(binary(), - #{ validator => fun emqx_schema:non_empty_string/1 - , desc => ?DESC("ingress_local_topic") - })} - , {local_qos, - sc(qos(), - #{ default => <<"${qos}">> - , desc => ?DESC("ingress_local_qos") - })} - , {hookpoint, - sc(binary(), - #{ desc => ?DESC("ingress_hookpoint") - })} + [ + {remote_topic, + sc( + binary(), + #{ + required => true, + validator => fun emqx_schema:non_empty_string/1, + desc => ?DESC("ingress_remote_topic") + } + )}, + {remote_qos, + sc( + qos(), + #{ + default => 1, + desc => ?DESC("ingress_remote_qos") + } + )}, + {local_topic, + sc( + binary(), + #{ + validator => fun emqx_schema:non_empty_string/1, + desc => ?DESC("ingress_local_topic") + } + )}, + {local_qos, + sc( + qos(), + #{ + default => <<"${qos}">>, + desc => ?DESC("ingress_local_qos") + } + )}, + {hookpoint, + sc( + binary(), + #{desc => ?DESC("ingress_hookpoint")} + )}, - , {retain, - sc(hoconsc:union([boolean(), binary()]), - #{ default => <<"${retain}">> - , desc => ?DESC("retain") - })} + {retain, + sc( + hoconsc:union([boolean(), binary()]), + #{ + default => <<"${retain}">>, + desc => ?DESC("retain") + } + )}, - , {payload, - sc(binary(), - #{ default => <<"${payload}">> - , desc => ?DESC("payload") - })} + {payload, + sc( + binary(), + #{ + default => <<"${payload}">>, + desc => ?DESC("payload") + } + )} ]; - - fields("egress") -> %% the message maybe sent from rules, in this case 'local_topic' is not necessary - [ {local_topic, - sc(binary(), - #{ desc => ?DESC("egress_local_topic") - , validator => fun emqx_schema:non_empty_string/1 - })} - , {remote_topic, - sc(binary(), - #{ required => true - , validator => fun emqx_schema:non_empty_string/1 - , desc => ?DESC("egress_remote_topic") - })} - , {remote_qos, - sc(qos(), - #{ required => true - , desc => ?DESC("egress_remote_qos") - })} + [ + {local_topic, + sc( + binary(), + #{ + desc => ?DESC("egress_local_topic"), + validator => fun emqx_schema:non_empty_string/1 + } + )}, + {remote_topic, + sc( + binary(), + #{ + required => true, + validator => fun emqx_schema:non_empty_string/1, + desc => ?DESC("egress_remote_topic") + } + )}, + {remote_qos, + sc( + qos(), + #{ + required => true, + desc => ?DESC("egress_remote_qos") + } + )}, - , {retain, - sc(hoconsc:union([boolean(), binary()]), - #{ required => true - , desc => ?DESC("retain") - })} + {retain, + sc( + hoconsc:union([boolean(), binary()]), + #{ + required => true, + desc => ?DESC("retain") + } + )}, - , {payload, - sc(binary(), - #{ required => true - , desc => ?DESC("payload") - })} + {payload, + sc( + binary(), + #{ + required => true, + desc => ?DESC("payload") + } + )} ]; - fields("replayq") -> - [ {dir, - sc(hoconsc:union([boolean(), string()]), - #{ desc => ?DESC("dir") - })} - , {seg_bytes, - sc(emqx_schema:bytesize(), - #{ default => "100MB" - , desc => ?DESC("seg_bytes") - })} - , {offload, - sc(boolean(), - #{ default => false - , desc => ?DESC("offload") - })} + [ + {dir, + sc( + hoconsc:union([boolean(), string()]), + #{desc => ?DESC("dir")} + )}, + {seg_bytes, + sc( + emqx_schema:bytesize(), + #{ + default => "100MB", + desc => ?DESC("seg_bytes") + } + )}, + {offload, + sc( + boolean(), + #{ + default => false, + desc => ?DESC("offload") + } + )} ]. desc("connector") -> @@ -194,34 +260,37 @@ desc(_) -> undefined. topic_mappings() -> - [ {ingress, - sc(ref("ingress"), - #{ default => #{} - })} - , {egress, - sc(ref("egress"), - #{ default => #{} - })} + [ + {ingress, + sc( + ref("ingress"), + #{default => #{}} + )}, + {egress, + sc( + ref("egress"), + #{default => #{}} + )} ]. -ingress_desc() -> " -The ingress config defines how this bridge receive messages from the remote MQTT broker, and then -send them to the local broker.
-Template with variables is allowed in 'local_topic', 'remote_qos', 'qos', 'retain', -'payload'.
-NOTE: if this bridge is used as the input of a rule (emqx rule engine), and also local_topic is -configured, then messages got from the remote broker will be sent to both the 'local_topic' and -the rule. -". +ingress_desc() -> + "\n" + "The ingress config defines how this bridge receive messages from the remote MQTT broker, and then\n" + "send them to the local broker.
\n" + "Template with variables is allowed in 'local_topic', 'remote_qos', 'qos', 'retain',\n" + "'payload'.
\n" + "NOTE: if this bridge is used as the input of a rule (emqx rule engine), and also local_topic is\n" + "configured, then messages got from the remote broker will be sent to both the 'local_topic' and\n" + "the rule.\n". -egress_desc() -> " -The egress config defines how this bridge forwards messages from the local broker to the remote -broker.
-Template with variables is allowed in 'remote_topic', 'qos', 'retain', 'payload'.
-NOTE: if this bridge is used as the output of a rule (emqx rule engine), and also local_topic -is configured, then both the data got from the rule and the MQTT messages that matches -local_topic will be forwarded. -". +egress_desc() -> + "\n" + "The egress config defines how this bridge forwards messages from the local broker to the remote\n" + "broker.
\n" + "Template with variables is allowed in 'remote_topic', 'qos', 'retain', 'payload'.
\n" + "NOTE: if this bridge is used as the output of a rule (emqx rule engine), and also local_topic\n" + "is configured, then both the data got from the rule and the MQTT messages that matches\n" + "local_topic will be forwarded.\n". qos() -> hoconsc:union([emqx_schema:qos(), binary()]). diff --git a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_worker.erl b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_worker.erl index 184c112ed..a434dd762 100644 --- a/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_worker.erl +++ b/apps/emqx_connector/src/mqtt/emqx_connector_mqtt_worker.erl @@ -66,43 +66,46 @@ -include_lib("emqx/include/logger.hrl"). %% APIs --export([ start_link/1 - , register_metrics/0 - , stop/1 - ]). +-export([ + start_link/1, + register_metrics/0, + stop/1 +]). %% gen_statem callbacks --export([ terminate/3 - , code_change/4 - , init/1 - , callback_mode/0 - ]). +-export([ + terminate/3, + code_change/4, + init/1, + callback_mode/0 +]). %% state functions --export([ idle/3 - , connected/3 - ]). +-export([ + idle/3, + connected/3 +]). %% management APIs --export([ ensure_started/1 - , ensure_stopped/1 - , status/1 - , ping/1 - , send_to_remote/2 - ]). +-export([ + ensure_started/1, + ensure_stopped/1, + status/1, + ping/1, + send_to_remote/2 +]). --export([ get_forwards/1 - ]). +-export([get_forwards/1]). --export([ get_subscriptions/1 - ]). +-export([get_subscriptions/1]). %% Internal -export([msg_marshaller/1]). --export_type([ config/0 - , ack_ref/0 - ]). +-export_type([ + config/0, + ack_ref/0 +]). -type id() :: atom() | string() | pid(). -type qos() :: emqx_types:qos(). @@ -113,7 +116,6 @@ -include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl"). - %% same as default in-flight limit for emqtt -define(DEFAULT_INFLIGHT_SIZE, 32). -define(DEFAULT_RECONNECT_DELAY_MS, timer:seconds(5)). @@ -188,8 +190,10 @@ callback_mode() -> [state_functions]. %% @doc Config should be a map(). init(#{name := Name} = ConnectOpts) -> - ?SLOG(debug, #{msg => "starting_bridge_worker", - name => Name}), + ?SLOG(debug, #{ + msg => "starting_bridge_worker", + name => Name + }), erlang:process_flag(trap_exit, true), Queue = open_replayq(Name, maps:get(replayq, ConnectOpts, #{})), State = init_state(ConnectOpts), @@ -205,31 +209,44 @@ init_state(Opts) -> Mountpoint = maps:get(forward_mountpoint, Opts, undefined), MaxInflightSize = maps:get(max_inflight, Opts, ?DEFAULT_INFLIGHT_SIZE), Name = maps:get(name, Opts, undefined), - #{start_type => StartType, - reconnect_interval => ReconnDelayMs, - mountpoint => format_mountpoint(Mountpoint), - inflight => [], - max_inflight => MaxInflightSize, - connection => undefined, - name => Name}. + #{ + start_type => StartType, + reconnect_interval => ReconnDelayMs, + mountpoint => format_mountpoint(Mountpoint), + inflight => [], + max_inflight => MaxInflightSize, + connection => undefined, + name => Name + }. open_replayq(Name, QCfg) -> Dir = maps:get(dir, QCfg, undefined), SegBytes = maps:get(seg_bytes, QCfg, ?DEFAULT_SEG_BYTES), MaxTotalSize = maps:get(max_total_size, QCfg, ?DEFAULT_MAX_TOTAL_SIZE), - QueueConfig = case Dir =:= undefined orelse Dir =:= "" of - true -> #{mem_only => true}; - false -> #{dir => filename:join([Dir, node(), Name]), - seg_bytes => SegBytes, max_total_size => MaxTotalSize} - end, - replayq:open(QueueConfig#{sizer => fun emqx_connector_mqtt_msg:estimate_size/1, - marshaller => fun ?MODULE:msg_marshaller/1}). + QueueConfig = + case Dir =:= undefined orelse Dir =:= "" of + true -> + #{mem_only => true}; + false -> + #{ + dir => filename:join([Dir, node(), Name]), + seg_bytes => SegBytes, + max_total_size => MaxTotalSize + } + end, + replayq:open(QueueConfig#{ + sizer => fun emqx_connector_mqtt_msg:estimate_size/1, + marshaller => fun ?MODULE:msg_marshaller/1 + }). pre_process_opts(#{subscriptions := InConf, forwards := OutConf} = ConnectOpts) -> - ConnectOpts#{subscriptions => pre_process_in_out(in, InConf), - forwards => pre_process_in_out(out, OutConf)}. + ConnectOpts#{ + subscriptions => pre_process_in_out(in, InConf), + forwards => pre_process_in_out(out, OutConf) + }. -pre_process_in_out(_, undefined) -> undefined; +pre_process_in_out(_, undefined) -> + undefined; pre_process_in_out(in, Conf) when is_map(Conf) -> Conf1 = pre_process_conf(local_topic, Conf), Conf2 = pre_process_conf(local_qos, Conf1), @@ -245,7 +262,8 @@ pre_process_in_out_common(Conf) -> pre_process_conf(Key, Conf) -> case maps:find(Key, Conf) of - error -> Conf; + error -> + Conf; {ok, Val} when is_binary(Val) -> Conf#{Key => emqx_plugin_libs_rule:preproc_tmpl(Val)}; {ok, Val} -> @@ -276,7 +294,6 @@ idle(info, idle, #{start_type := auto} = State) -> connecting(State); idle(state_timeout, reconnect, State) -> connecting(State); - idle(Type, Content, State) -> common(idle, Type, Content, State). @@ -298,13 +315,16 @@ connected(state_timeout, connected, #{inflight := Inflight} = State) -> connected(internal, maybe_send, State) -> {_, NewState} = pop_and_send(State), {keep_state, NewState}; - -connected(info, {disconnected, Conn, Reason}, - #{connection := Connection, name := Name, reconnect_interval := ReconnectDelayMs} = State) -> +connected( + info, + {disconnected, Conn, Reason}, + #{connection := Connection, name := Name, reconnect_interval := ReconnectDelayMs} = State +) -> ?tp(info, disconnected, #{name => Name, reason => Reason}), - case Conn =:= maps:get(client_pid, Connection, undefined) of + case Conn =:= maps:get(client_pid, Connection, undefined) of true -> - {next_state, idle, State#{connection => undefined}, {state_timeout, ReconnectDelayMs, reconnect}}; + {next_state, idle, State#{connection => undefined}, + {state_timeout, ReconnectDelayMs, reconnect}}; false -> keep_state_and_data end; @@ -317,7 +337,7 @@ connected(Type, Content, State) -> %% Common handlers common(StateName, {call, From}, status, _State) -> {keep_state_and_data, [{reply, From, StateName}]}; -common(_StateName, {call, From}, ping, #{connection := Conn} =_State) -> +common(_StateName, {call, From}, ping, #{connection := Conn} = _State) -> Reply = emqx_connector_mqtt_mod:ping(Conn), {keep_state_and_data, [{reply, From, Reply}]}; common(_StateName, {call, From}, ensure_stopped, #{connection := undefined} = _State) -> @@ -335,27 +355,39 @@ common(_StateName, cast, {send_to_remote, Msg}, #{replayq := Q} = State) -> NewQ = replayq:append(Q, [Msg]), {keep_state, State#{replayq => NewQ}, {next_event, internal, maybe_send}}; common(StateName, Type, Content, #{name := Name} = State) -> - ?SLOG(notice, #{msg => "bridge_discarded_event", - name => Name, type => Type, state_name => StateName, - content => Content}), + ?SLOG(notice, #{ + msg => "bridge_discarded_event", + name => Name, + type => Type, + state_name => StateName, + content => Content + }), {keep_state, State}. -do_connect(#{connect_opts := ConnectOpts, - inflight := Inflight, - name := Name} = State) -> +do_connect( + #{ + connect_opts := ConnectOpts, + inflight := Inflight, + name := Name + } = State +) -> case emqx_connector_mqtt_mod:start(ConnectOpts) of {ok, Conn} -> ?tp(info, connected, #{name => Name, inflight => length(Inflight)}), {ok, State#{connection => Conn}}; {error, Reason} -> ConnectOpts1 = obfuscate(ConnectOpts), - ?SLOG(error, #{msg => "failed_to_connect", - config => ConnectOpts1, reason => Reason}), + ?SLOG(error, #{ + msg => "failed_to_connect", + config => ConnectOpts1, + reason => Reason + }), {error, Reason, State} end. %% Retry all inflight (previously sent but not acked) batches. -retry_inflight(State, []) -> {ok, State}; +retry_inflight(State, []) -> + {ok, State}; retry_inflight(State, [#{q_ack_ref := QAckRef, msg := Msg} | Rest] = OldInf) -> case do_send(State, QAckRef, Msg) of {ok, State1} -> @@ -386,28 +418,49 @@ pop_and_send_loop(#{replayq := Q} = State, N) -> end. do_send(#{connect_opts := #{forwards := undefined}}, _QAckRef, Msg) -> - ?SLOG(error, #{msg => "cannot_forward_messages_to_remote_broker" - "_as_'egress'_is_not_configured", - messages => Msg}); -do_send(#{inflight := Inflight, - connection := Connection, - mountpoint := Mountpoint, - connect_opts := #{forwards := Forwards}} = State, QAckRef, Msg) -> + ?SLOG(error, #{ + msg => + "cannot_forward_messages_to_remote_broker" + "_as_'egress'_is_not_configured", + messages => Msg + }); +do_send( + #{ + inflight := Inflight, + connection := Connection, + mountpoint := Mountpoint, + connect_opts := #{forwards := Forwards} + } = State, + QAckRef, + Msg +) -> Vars = emqx_connector_mqtt_msg:make_pub_vars(Mountpoint, Forwards), ExportMsg = fun(Message) -> - emqx_metrics:inc('bridge.mqtt.message_sent_to_remote'), - emqx_connector_mqtt_msg:to_remote_msg(Message, Vars) - end, - ?SLOG(debug, #{msg => "publish_to_remote_broker", - message => Msg, vars => Vars}), + emqx_metrics:inc('bridge.mqtt.message_sent_to_remote'), + emqx_connector_mqtt_msg:to_remote_msg(Message, Vars) + end, + ?SLOG(debug, #{ + msg => "publish_to_remote_broker", + message => Msg, + vars => Vars + }), case emqx_connector_mqtt_mod:send(Connection, [ExportMsg(Msg)]) of {ok, Refs} -> - {ok, State#{inflight := Inflight ++ [#{q_ack_ref => QAckRef, - send_ack_ref => map_set(Refs), - msg => Msg}]}}; + {ok, State#{ + inflight := Inflight ++ + [ + #{ + q_ack_ref => QAckRef, + send_ack_ref => map_set(Refs), + msg => Msg + } + ] + }}; {error, Reason} -> - ?SLOG(info, #{msg => "mqtt_bridge_produce_failed", - reason => Reason}), + ?SLOG(info, #{ + msg => "mqtt_bridge_produce_failed", + reason => Reason + }), {error, State} end. @@ -427,8 +480,10 @@ handle_batch_ack(#{inflight := Inflight0, replayq := Q} = State, Ref) -> State#{inflight := Inflight}. do_ack([], Ref) -> - ?SLOG(debug, #{msg => "stale_batch_ack_reference", - ref => Ref}), + ?SLOG(debug, #{ + msg => "stale_batch_ack_reference", + ref => Ref + }), []; do_ack([#{send_ack_ref := Refs} = First | Rest], Ref) -> case maps:is_key(Ref, Refs) of @@ -443,8 +498,16 @@ do_ack([#{send_ack_ref := Refs} = First | Rest], Ref) -> drop_acked_batches(_Q, []) -> ?tp(debug, inflight_drained, #{}), []; -drop_acked_batches(Q, [#{send_ack_ref := Refs, - q_ack_ref := QAckRef} | Rest] = All) -> +drop_acked_batches( + Q, + [ + #{ + send_ack_ref := Refs, + q_ack_ref := QAckRef + } + | Rest + ] = All +) -> case maps:size(Refs) of 0 -> %% all messages are acked by bridge target @@ -475,18 +538,25 @@ format_mountpoint(Prefix) -> name(Id) -> list_to_atom(str(Id)). register_metrics() -> - lists:foreach(fun emqx_metrics:ensure/1, - ['bridge.mqtt.message_sent_to_remote', - 'bridge.mqtt.message_received_from_remote' - ]). + lists:foreach( + fun emqx_metrics:ensure/1, + [ + 'bridge.mqtt.message_sent_to_remote', + 'bridge.mqtt.message_received_from_remote' + ] + ). obfuscate(Map) -> - maps:fold(fun(K, V, Acc) -> - case is_sensitive(K) of - true -> [{K, '***'} | Acc]; - false -> [{K, V} | Acc] - end - end, [], Map). + maps:fold( + fun(K, V, Acc) -> + case is_sensitive(K) of + true -> [{K, '***'} | Acc]; + false -> [{K, V} | Acc] + end + end, + [], + Map + ). is_sensitive(password) -> true; is_sensitive(_) -> false. diff --git a/apps/emqx_connector/test/emqx_connector_api_SUITE.erl b/apps/emqx_connector/test/emqx_connector_api_SUITE.erl index 329bfe059..65a965d60 100644 --- a/apps/emqx_connector/test/emqx_connector_api_SUITE.erl +++ b/apps/emqx_connector/test/emqx_connector_api_SUITE.erl @@ -26,27 +26,23 @@ -include("emqx_dashboard/include/emqx_dashboard.hrl"). %% output functions --export([ inspect/3 - ]). +-export([inspect/3]). -define(BRIDGE_CONF_DEFAULT, <<"bridges: {}">>). -define(CONNECTR_TYPE, <<"mqtt">>). -define(CONNECTR_NAME, <<"test_connector">>). -define(BRIDGE_NAME_INGRESS, <<"ingress_test_bridge">>). -define(BRIDGE_NAME_EGRESS, <<"egress_test_bridge">>). --define(MQTT_CONNECTOR(Username), -#{ +-define(MQTT_CONNECTOR(Username), #{ <<"server">> => <<"127.0.0.1:1883">>, <<"username">> => Username, <<"password">> => <<"">>, <<"proto_ver">> => <<"v4">>, <<"ssl">> => #{<<"enable">> => false} }). --define(MQTT_CONNECTOR2(Server), - ?MQTT_CONNECTOR(<<"user1">>)#{<<"server">> => Server}). +-define(MQTT_CONNECTOR2(Server), ?MQTT_CONNECTOR(<<"user1">>)#{<<"server">> => Server}). --define(MQTT_BRIDGE_INGRESS(ID), -#{ +-define(MQTT_BRIDGE_INGRESS(ID), #{ <<"connector">> => ID, <<"direction">> => <<"ingress">>, <<"remote_topic">> => <<"remote_topic/#">>, @@ -57,8 +53,7 @@ <<"retain">> => <<"${retain}">> }). --define(MQTT_BRIDGE_EGRESS(ID), -#{ +-define(MQTT_BRIDGE_EGRESS(ID), #{ <<"connector">> => ID, <<"direction">> => <<"egress">>, <<"local_topic">> => <<"local_topic/#">>, @@ -68,10 +63,14 @@ <<"retain">> => <<"${retain}">> }). --define(metrics(MATCH, SUCC, FAILED, SPEED, SPEED5M, SPEEDMAX), - #{<<"matched">> := MATCH, <<"success">> := SUCC, - <<"failed">> := FAILED, <<"rate">> := SPEED, - <<"rate_last5m">> := SPEED5M, <<"rate_max">> := SPEEDMAX}). +-define(metrics(MATCH, SUCC, FAILED, SPEED, SPEED5M, SPEEDMAX), #{ + <<"matched">> := MATCH, + <<"success">> := SUCC, + <<"failed">> := FAILED, + <<"rate">> := SPEED, + <<"rate_last5m">> := SPEED5M, + <<"rate_max">> := SPEEDMAX +}). inspect(Selected, _Envs, _Args) -> persistent_term:put(?MODULE, #{inspect => Selected}). @@ -83,24 +82,37 @@ groups() -> []. suite() -> - [{timetrap,{seconds,30}}]. + [{timetrap, {seconds, 30}}]. init_per_suite(Config) -> _ = application:load(emqx_conf), %% some testcases (may from other app) already get emqx_connector started _ = application:stop(emqx_resource), _ = application:stop(emqx_connector), - ok = emqx_common_test_helpers:start_apps([emqx_rule_engine, emqx_connector, - emqx_bridge, emqx_dashboard], fun set_special_configs/1), + ok = emqx_common_test_helpers:start_apps( + [ + emqx_rule_engine, + emqx_connector, + emqx_bridge, + emqx_dashboard + ], + fun set_special_configs/1 + ), ok = emqx_common_test_helpers:load_config(emqx_connector_schema, <<"connectors: {}">>), - ok = emqx_common_test_helpers:load_config(emqx_rule_engine_schema, - <<"rule_engine {rules {}}">>), + ok = emqx_common_test_helpers:load_config( + emqx_rule_engine_schema, + <<"rule_engine {rules {}}">> + ), ok = emqx_common_test_helpers:load_config(emqx_bridge_schema, ?BRIDGE_CONF_DEFAULT), Config. end_per_suite(_Config) -> - emqx_common_test_helpers:stop_apps([emqx_rule_engine, emqx_connector, emqx_bridge, - emqx_dashboard]), + emqx_common_test_helpers:stop_apps([ + emqx_rule_engine, + emqx_connector, + emqx_bridge, + emqx_dashboard + ]), ok. set_special_configs(emqx_dashboard) -> @@ -116,15 +128,24 @@ end_per_testcase(_, _Config) -> ok. clear_resources() -> - lists:foreach(fun(#{id := Id}) -> + lists:foreach( + fun(#{id := Id}) -> ok = emqx_rule_engine:delete_rule(Id) - end, emqx_rule_engine:get_rules()), - lists:foreach(fun(#{type := Type, name := Name}) -> + end, + emqx_rule_engine:get_rules() + ), + lists:foreach( + fun(#{type := Type, name := Name}) -> ok = emqx_bridge:remove(Type, Name) - end, emqx_bridge:list()), - lists:foreach(fun(#{<<"type">> := Type, <<"name">> := Name}) -> + end, + emqx_bridge:list() + ), + lists:foreach( + fun(#{<<"type">> := Type, <<"name">> := Name}) -> ok = emqx_connector:delete(Type, Name) - end, emqx_connector:list_raw()). + end, + emqx_connector:list_raw() + ). %%------------------------------------------------------------------------------ %% Testcases @@ -137,103 +158,144 @@ t_mqtt_crud_apis(_) -> %% then we add a mqtt connector, using POST %% POST /connectors/ will create a connector User1 = <<"user1">>, - {ok, 400, <<"{\"code\":\"BAD_REQUEST\",\"message\"" - ":\"missing some required fields: [name, type]\"}">>} - = request(post, uri(["connectors"]), - ?MQTT_CONNECTOR(User1)#{ <<"type">> => ?CONNECTR_TYPE - }), - {ok, 201, Connector} = request(post, uri(["connectors"]), - ?MQTT_CONNECTOR(User1)#{ <<"type">> => ?CONNECTR_TYPE - , <<"name">> => ?CONNECTR_NAME - }), + {ok, 400, << + "{\"code\":\"BAD_REQUEST\",\"message\"" + ":\"missing some required fields: [name, type]\"}" + >>} = + request( + post, + uri(["connectors"]), + ?MQTT_CONNECTOR(User1)#{<<"type">> => ?CONNECTR_TYPE} + ), + {ok, 201, Connector} = request( + post, + uri(["connectors"]), + ?MQTT_CONNECTOR(User1)#{ + <<"type">> => ?CONNECTR_TYPE, + <<"name">> => ?CONNECTR_NAME + } + ), - #{ <<"type">> := ?CONNECTR_TYPE - , <<"name">> := ?CONNECTR_NAME - , <<"server">> := <<"127.0.0.1:1883">> - , <<"username">> := User1 - , <<"password">> := <<"">> - , <<"proto_ver">> := <<"v4">> - , <<"ssl">> := #{<<"enable">> := false} - } = jsx:decode(Connector), + #{ + <<"type">> := ?CONNECTR_TYPE, + <<"name">> := ?CONNECTR_NAME, + <<"server">> := <<"127.0.0.1:1883">>, + <<"username">> := User1, + <<"password">> := <<"">>, + <<"proto_ver">> := <<"v4">>, + <<"ssl">> := #{<<"enable">> := false} + } = jsx:decode(Connector), ConnctorID = emqx_connector:connector_id(?CONNECTR_TYPE, ?CONNECTR_NAME), %% update the request-path of the connector User2 = <<"user2">>, - {ok, 200, Connector2} = request(put, uri(["connectors", ConnctorID]), - ?MQTT_CONNECTOR(User2)), - ?assertMatch(#{ <<"type">> := ?CONNECTR_TYPE - , <<"name">> := ?CONNECTR_NAME - , <<"server">> := <<"127.0.0.1:1883">> - , <<"username">> := User2 - , <<"password">> := <<"">> - , <<"proto_ver">> := <<"v4">> - , <<"ssl">> := #{<<"enable">> := false} - }, jsx:decode(Connector2)), + {ok, 200, Connector2} = request( + put, + uri(["connectors", ConnctorID]), + ?MQTT_CONNECTOR(User2) + ), + ?assertMatch( + #{ + <<"type">> := ?CONNECTR_TYPE, + <<"name">> := ?CONNECTR_NAME, + <<"server">> := <<"127.0.0.1:1883">>, + <<"username">> := User2, + <<"password">> := <<"">>, + <<"proto_ver">> := <<"v4">>, + <<"ssl">> := #{<<"enable">> := false} + }, + jsx:decode(Connector2) + ), %% list all connectors again, assert Connector2 is in it {ok, 200, Connector2Str} = request(get, uri(["connectors"]), []), - ?assertMatch([#{ <<"type">> := ?CONNECTR_TYPE - , <<"name">> := ?CONNECTR_NAME - , <<"server">> := <<"127.0.0.1:1883">> - , <<"username">> := User2 - , <<"password">> := <<"">> - , <<"proto_ver">> := <<"v4">> - , <<"ssl">> := #{<<"enable">> := false} - }], jsx:decode(Connector2Str)), + ?assertMatch( + [ + #{ + <<"type">> := ?CONNECTR_TYPE, + <<"name">> := ?CONNECTR_NAME, + <<"server">> := <<"127.0.0.1:1883">>, + <<"username">> := User2, + <<"password">> := <<"">>, + <<"proto_ver">> := <<"v4">>, + <<"ssl">> := #{<<"enable">> := false} + } + ], + jsx:decode(Connector2Str) + ), %% get the connector by id {ok, 200, Connector3Str} = request(get, uri(["connectors", ConnctorID]), []), - ?assertMatch(#{ <<"type">> := ?CONNECTR_TYPE - , <<"name">> := ?CONNECTR_NAME - , <<"server">> := <<"127.0.0.1:1883">> - , <<"username">> := User2 - , <<"password">> := <<"">> - , <<"proto_ver">> := <<"v4">> - , <<"ssl">> := #{<<"enable">> := false} - }, jsx:decode(Connector3Str)), + ?assertMatch( + #{ + <<"type">> := ?CONNECTR_TYPE, + <<"name">> := ?CONNECTR_NAME, + <<"server">> := <<"127.0.0.1:1883">>, + <<"username">> := User2, + <<"password">> := <<"">>, + <<"proto_ver">> := <<"v4">>, + <<"ssl">> := #{<<"enable">> := false} + }, + jsx:decode(Connector3Str) + ), %% delete the connector {ok, 204, <<>>} = request(delete, uri(["connectors", ConnctorID]), []), {ok, 200, <<"[]">>} = request(get, uri(["connectors"]), []), %% update a deleted connector returns an error - {ok, 404, ErrMsg2} = request(put, uri(["connectors", ConnctorID]), - ?MQTT_CONNECTOR(User2)), + {ok, 404, ErrMsg2} = request( + put, + uri(["connectors", ConnctorID]), + ?MQTT_CONNECTOR(User2) + ), ?assertMatch( - #{ <<"code">> := _ - , <<"message">> := <<"connector not found">> - }, jsx:decode(ErrMsg2)), + #{ + <<"code">> := _, + <<"message">> := <<"connector not found">> + }, + jsx:decode(ErrMsg2) + ), ok. t_mqtt_conn_bridge_ingress(_) -> %% then we add a mqtt connector, using POST User1 = <<"user1">>, - {ok, 201, Connector} = request(post, uri(["connectors"]), - ?MQTT_CONNECTOR(User1)#{ <<"type">> => ?CONNECTR_TYPE - , <<"name">> => ?CONNECTR_NAME - }), + {ok, 201, Connector} = request( + post, + uri(["connectors"]), + ?MQTT_CONNECTOR(User1)#{ + <<"type">> => ?CONNECTR_TYPE, + <<"name">> => ?CONNECTR_NAME + } + ), - #{ <<"type">> := ?CONNECTR_TYPE - , <<"name">> := ?CONNECTR_NAME - , <<"server">> := <<"127.0.0.1:1883">> - , <<"num_of_bridges">> := 0 - , <<"username">> := User1 - , <<"password">> := <<"">> - , <<"proto_ver">> := <<"v4">> - , <<"ssl">> := #{<<"enable">> := false} - } = jsx:decode(Connector), + #{ + <<"type">> := ?CONNECTR_TYPE, + <<"name">> := ?CONNECTR_NAME, + <<"server">> := <<"127.0.0.1:1883">>, + <<"num_of_bridges">> := 0, + <<"username">> := User1, + <<"password">> := <<"">>, + <<"proto_ver">> := <<"v4">>, + <<"ssl">> := #{<<"enable">> := false} + } = jsx:decode(Connector), ConnctorID = emqx_connector:connector_id(?CONNECTR_TYPE, ?CONNECTR_NAME), %% ... and a MQTT bridge, using POST %% we bind this bridge to the connector created just now timer:sleep(50), - {ok, 201, Bridge} = request(post, uri(["bridges"]), + {ok, 201, Bridge} = request( + post, + uri(["bridges"]), ?MQTT_BRIDGE_INGRESS(ConnctorID)#{ <<"type">> => ?CONNECTR_TYPE, <<"name">> => ?BRIDGE_NAME_INGRESS - }), - #{ <<"type">> := ?CONNECTR_TYPE - , <<"name">> := ?BRIDGE_NAME_INGRESS - , <<"connector">> := ConnctorID - } = jsx:decode(Bridge), + } + ), + #{ + <<"type">> := ?CONNECTR_TYPE, + <<"name">> := ?BRIDGE_NAME_INGRESS, + <<"connector">> := ConnctorID + } = jsx:decode(Bridge), BridgeIDIngress = emqx_bridge:bridge_id(?CONNECTR_TYPE, ?BRIDGE_NAME_INGRESS), wait_for_resource_ready(BridgeIDIngress, 5), @@ -257,12 +319,12 @@ t_mqtt_conn_bridge_ingress(_) -> false after 100 -> false - end), + end + ), %% get the connector by id, verify the num_of_bridges now is 1 {ok, 200, Connector1Str} = request(get, uri(["connectors", ConnctorID]), []), - ?assertMatch(#{ <<"num_of_bridges">> := 1 - }, jsx:decode(Connector1Str)), + ?assertMatch(#{<<"num_of_bridges">> := 1}, jsx:decode(Connector1Str)), %% delete the bridge {ok, 204, <<>>} = request(delete, uri(["bridges", BridgeIDIngress]), []), @@ -276,30 +338,39 @@ t_mqtt_conn_bridge_ingress(_) -> t_mqtt_conn_bridge_egress(_) -> %% then we add a mqtt connector, using POST User1 = <<"user1">>, - {ok, 201, Connector} = request(post, uri(["connectors"]), - ?MQTT_CONNECTOR(User1)#{ <<"type">> => ?CONNECTR_TYPE - , <<"name">> => ?CONNECTR_NAME - }), + {ok, 201, Connector} = request( + post, + uri(["connectors"]), + ?MQTT_CONNECTOR(User1)#{ + <<"type">> => ?CONNECTR_TYPE, + <<"name">> => ?CONNECTR_NAME + } + ), %ct:pal("---connector: ~p", [Connector]), - #{ <<"server">> := <<"127.0.0.1:1883">> - , <<"username">> := User1 - , <<"password">> := <<"">> - , <<"proto_ver">> := <<"v4">> - , <<"ssl">> := #{<<"enable">> := false} - } = jsx:decode(Connector), + #{ + <<"server">> := <<"127.0.0.1:1883">>, + <<"username">> := User1, + <<"password">> := <<"">>, + <<"proto_ver">> := <<"v4">>, + <<"ssl">> := #{<<"enable">> := false} + } = jsx:decode(Connector), ConnctorID = emqx_connector:connector_id(?CONNECTR_TYPE, ?CONNECTR_NAME), %% ... and a MQTT bridge, using POST %% we bind this bridge to the connector created just now - {ok, 201, Bridge} = request(post, uri(["bridges"]), + {ok, 201, Bridge} = request( + post, + uri(["bridges"]), ?MQTT_BRIDGE_EGRESS(ConnctorID)#{ <<"type">> => ?CONNECTR_TYPE, <<"name">> => ?BRIDGE_NAME_EGRESS - }), - #{ <<"type">> := ?CONNECTR_TYPE - , <<"name">> := ?BRIDGE_NAME_EGRESS - , <<"connector">> := ConnctorID - } = jsx:decode(Bridge), + } + ), + #{ + <<"type">> := ?CONNECTR_TYPE, + <<"name">> := ?BRIDGE_NAME_EGRESS, + <<"connector">> := ConnctorID + } = jsx:decode(Bridge), BridgeIDEgress = emqx_bridge:bridge_id(?CONNECTR_TYPE, ?BRIDGE_NAME_EGRESS), wait_for_resource_ready(BridgeIDEgress, 5), @@ -324,14 +395,19 @@ t_mqtt_conn_bridge_egress(_) -> false after 100 -> false - end), + end + ), %% verify the metrics of the bridge {ok, 200, BridgeStr} = request(get, uri(["bridges", BridgeIDEgress]), []), - ?assertMatch(#{ <<"metrics">> := ?metrics(1, 1, 0, _, _, _) - , <<"node_metrics">> := - [#{<<"node">> := _, <<"metrics">> := ?metrics(1, 1, 0, _, _, _)}] - }, jsx:decode(BridgeStr)), + ?assertMatch( + #{ + <<"metrics">> := ?metrics(1, 1, 0, _, _, _), + <<"node_metrics">> := + [#{<<"node">> := _, <<"metrics">> := ?metrics(1, 1, 0, _, _, _)}] + }, + jsx:decode(BridgeStr) + ), %% delete the bridge {ok, 204, <<>>} = request(delete, uri(["bridges", BridgeIDEgress]), []), @@ -347,38 +423,50 @@ t_mqtt_conn_bridge_egress(_) -> %% - cannot delete a connector that is used by at least one bridge t_mqtt_conn_update(_) -> %% then we add a mqtt connector, using POST - {ok, 201, Connector} = request(post, uri(["connectors"]), - ?MQTT_CONNECTOR2(<<"127.0.0.1:1883">>) - #{ <<"type">> => ?CONNECTR_TYPE - , <<"name">> => ?CONNECTR_NAME - }), + {ok, 201, Connector} = request( + post, + uri(["connectors"]), + ?MQTT_CONNECTOR2(<<"127.0.0.1:1883">>)#{ + <<"type">> => ?CONNECTR_TYPE, + <<"name">> => ?CONNECTR_NAME + } + ), %ct:pal("---connector: ~p", [Connector]), - #{ <<"server">> := <<"127.0.0.1:1883">> - } = jsx:decode(Connector), + #{<<"server">> := <<"127.0.0.1:1883">>} = jsx:decode(Connector), ConnctorID = emqx_connector:connector_id(?CONNECTR_TYPE, ?CONNECTR_NAME), %% ... and a MQTT bridge, using POST %% we bind this bridge to the connector created just now - {ok, 201, Bridge} = request(post, uri(["bridges"]), + {ok, 201, Bridge} = request( + post, + uri(["bridges"]), ?MQTT_BRIDGE_EGRESS(ConnctorID)#{ <<"type">> => ?CONNECTR_TYPE, <<"name">> => ?BRIDGE_NAME_EGRESS - }), - #{ <<"type">> := ?CONNECTR_TYPE - , <<"name">> := ?BRIDGE_NAME_EGRESS - , <<"connector">> := ConnctorID - } = jsx:decode(Bridge), + } + ), + #{ + <<"type">> := ?CONNECTR_TYPE, + <<"name">> := ?BRIDGE_NAME_EGRESS, + <<"connector">> := ConnctorID + } = jsx:decode(Bridge), BridgeIDEgress = emqx_bridge:bridge_id(?CONNECTR_TYPE, ?BRIDGE_NAME_EGRESS), wait_for_resource_ready(BridgeIDEgress, 5), %% Then we try to update 'server' of the connector, to an unavailable IP address %% The update OK, we recreate the resource even if the resource is current connected, %% and the target resource we're going to update is unavailable. - {ok, 200, _} = request(put, uri(["connectors", ConnctorID]), - ?MQTT_CONNECTOR2(<<"127.0.0.1:2603">>)), + {ok, 200, _} = request( + put, + uri(["connectors", ConnctorID]), + ?MQTT_CONNECTOR2(<<"127.0.0.1:2603">>) + ), %% we fix the 'server' parameter to a normal one, it should work - {ok, 200, _} = request(put, uri(["connectors", ConnctorID]), - ?MQTT_CONNECTOR2(<<"127.0.0.1 : 1883">>)), + {ok, 200, _} = request( + put, + uri(["connectors", ConnctorID]), + ?MQTT_CONNECTOR2(<<"127.0.0.1 : 1883">>) + ), %% delete the bridge {ok, 204, <<>>} = request(delete, uri(["bridges", BridgeIDEgress]), []), {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), @@ -390,40 +478,51 @@ t_mqtt_conn_update(_) -> t_mqtt_conn_update2(_) -> %% then we add a mqtt connector, using POST %% but this connector is point to a unreachable server "2603" - {ok, 201, Connector} = request(post, uri(["connectors"]), - ?MQTT_CONNECTOR2(<<"127.0.0.1:2603">>) - #{ <<"type">> => ?CONNECTR_TYPE - , <<"name">> => ?CONNECTR_NAME - }), + {ok, 201, Connector} = request( + post, + uri(["connectors"]), + ?MQTT_CONNECTOR2(<<"127.0.0.1:2603">>)#{ + <<"type">> => ?CONNECTR_TYPE, + <<"name">> => ?CONNECTR_NAME + } + ), - #{ <<"server">> := <<"127.0.0.1:2603">> - } = jsx:decode(Connector), + #{<<"server">> := <<"127.0.0.1:2603">>} = jsx:decode(Connector), ConnctorID = emqx_connector:connector_id(?CONNECTR_TYPE, ?CONNECTR_NAME), %% ... and a MQTT bridge, using POST %% we bind this bridge to the connector created just now - {ok, 201, Bridge} = request(post, uri(["bridges"]), + {ok, 201, Bridge} = request( + post, + uri(["bridges"]), ?MQTT_BRIDGE_EGRESS(ConnctorID)#{ <<"type">> => ?CONNECTR_TYPE, <<"name">> => ?BRIDGE_NAME_EGRESS - }), - #{ <<"type">> := ?CONNECTR_TYPE - , <<"name">> := ?BRIDGE_NAME_EGRESS - , <<"status">> := <<"disconnected">> - , <<"connector">> := ConnctorID - } = jsx:decode(Bridge), + } + ), + #{ + <<"type">> := ?CONNECTR_TYPE, + <<"name">> := ?BRIDGE_NAME_EGRESS, + <<"status">> := <<"disconnected">>, + <<"connector">> := ConnctorID + } = jsx:decode(Bridge), BridgeIDEgress = emqx_bridge:bridge_id(?CONNECTR_TYPE, ?BRIDGE_NAME_EGRESS), %% We try to fix the 'server' parameter, to another unavailable server.. %% The update should success: we don't check the connectivity of the new config %% if the resource is now disconnected. - {ok, 200, _} = request(put, uri(["connectors", ConnctorID]), - ?MQTT_CONNECTOR2(<<"127.0.0.1:2604">>)), + {ok, 200, _} = request( + put, + uri(["connectors", ConnctorID]), + ?MQTT_CONNECTOR2(<<"127.0.0.1:2604">>) + ), %% we fix the 'server' parameter to a normal one, it should work - {ok, 200, _} = request(put, uri(["connectors", ConnctorID]), - ?MQTT_CONNECTOR2(<<"127.0.0.1:1883">>)), + {ok, 200, _} = request( + put, + uri(["connectors", ConnctorID]), + ?MQTT_CONNECTOR2(<<"127.0.0.1:1883">>) + ), wait_for_resource_ready(BridgeIDEgress, 5), {ok, 200, BridgeStr} = request(get, uri(["bridges", BridgeIDEgress]), []), - ?assertMatch(#{ <<"status">> := <<"connected">> - }, jsx:decode(BridgeStr)), + ?assertMatch(#{<<"status">> := <<"connected">>}, jsx:decode(BridgeStr)), %% delete the bridge {ok, 204, <<>>} = request(delete, uri(["bridges", BridgeIDEgress]), []), {ok, 200, <<"[]">>} = request(get, uri(["bridges"]), []), @@ -434,21 +533,26 @@ t_mqtt_conn_update2(_) -> t_mqtt_conn_update3(_) -> %% we add a mqtt connector, using POST - {ok, 201, _} = request(post, uri(["connectors"]), - ?MQTT_CONNECTOR2(<<"127.0.0.1:1883">>) - #{ <<"type">> => ?CONNECTR_TYPE - , <<"name">> => ?CONNECTR_NAME - }), + {ok, 201, _} = request( + post, + uri(["connectors"]), + ?MQTT_CONNECTOR2(<<"127.0.0.1:1883">>)#{ + <<"type">> => ?CONNECTR_TYPE, + <<"name">> => ?CONNECTR_NAME + } + ), ConnctorID = emqx_connector:connector_id(?CONNECTR_TYPE, ?CONNECTR_NAME), %% ... and a MQTT bridge, using POST %% we bind this bridge to the connector created just now - {ok, 201, Bridge} = request(post, uri(["bridges"]), + {ok, 201, Bridge} = request( + post, + uri(["bridges"]), ?MQTT_BRIDGE_EGRESS(ConnctorID)#{ <<"type">> => ?CONNECTR_TYPE, <<"name">> => ?BRIDGE_NAME_EGRESS - }), - #{ <<"connector">> := ConnctorID - } = jsx:decode(Bridge), + } + ), + #{<<"connector">> := ConnctorID} = jsx:decode(Bridge), BridgeIDEgress = emqx_bridge:bridge_id(?CONNECTR_TYPE, ?BRIDGE_NAME_EGRESS), wait_for_resource_ready(BridgeIDEgress, 5), @@ -462,37 +566,54 @@ t_mqtt_conn_update3(_) -> t_mqtt_conn_testing(_) -> %% APIs for testing the connectivity %% then we add a mqtt connector, using POST - {ok, 204, <<>>} = request(post, uri(["connectors_test"]), + {ok, 204, <<>>} = request( + post, + uri(["connectors_test"]), ?MQTT_CONNECTOR2(<<"127.0.0.1:1883">>)#{ <<"type">> => ?CONNECTR_TYPE, <<"name">> => ?BRIDGE_NAME_EGRESS - }), - {ok, 400, _} = request(post, uri(["connectors_test"]), + } + ), + {ok, 400, _} = request( + post, + uri(["connectors_test"]), ?MQTT_CONNECTOR2(<<"127.0.0.1:2883">>)#{ <<"type">> => ?CONNECTR_TYPE, <<"name">> => ?BRIDGE_NAME_EGRESS - }). + } + ). t_ingress_mqtt_bridge_with_rules(_) -> - {ok, 201, _} = request(post, uri(["connectors"]), - ?MQTT_CONNECTOR(<<"user1">>)#{ <<"type">> => ?CONNECTR_TYPE - , <<"name">> => ?CONNECTR_NAME - }), + {ok, 201, _} = request( + post, + uri(["connectors"]), + ?MQTT_CONNECTOR(<<"user1">>)#{ + <<"type">> => ?CONNECTR_TYPE, + <<"name">> => ?CONNECTR_NAME + } + ), ConnctorID = emqx_connector:connector_id(?CONNECTR_TYPE, ?CONNECTR_NAME), - {ok, 201, _} = request(post, uri(["bridges"]), + {ok, 201, _} = request( + post, + uri(["bridges"]), ?MQTT_BRIDGE_INGRESS(ConnctorID)#{ <<"type">> => ?CONNECTR_TYPE, <<"name">> => ?BRIDGE_NAME_INGRESS - }), + } + ), BridgeIDIngress = emqx_bridge:bridge_id(?CONNECTR_TYPE, ?BRIDGE_NAME_INGRESS), - {ok, 201, Rule} = request(post, uri(["rules"]), - #{<<"name">> => <<"A rule get messages from a source mqtt bridge">>, - <<"enable">> => true, - <<"outputs">> => [#{<<"function">> => "emqx_connector_api_SUITE:inspect"}], - <<"sql">> => <<"SELECT * from \"$bridges/", BridgeIDIngress/binary, "\"">> - }), + {ok, 201, Rule} = request( + post, + uri(["rules"]), + #{ + <<"name">> => <<"A rule get messages from a source mqtt bridge">>, + <<"enable">> => true, + <<"outputs">> => [#{<<"function">> => "emqx_connector_api_SUITE:inspect"}], + <<"sql">> => <<"SELECT * from \"$bridges/", BridgeIDIngress/binary, "\"">> + } + ), #{<<"id">> := RuleId} = jsx:decode(Rule), %% we now test if the bridge works as expected @@ -517,63 +638,81 @@ t_ingress_mqtt_bridge_with_rules(_) -> false after 100 -> false - end), + end + ), %% and also the rule should be matched, with matched + 1: {ok, 200, Rule1} = request(get, uri(["rules", RuleId]), []), - #{ <<"id">> := RuleId - , <<"metrics">> := #{ - <<"sql.matched">> := 1, - <<"sql.passed">> := 1, - <<"sql.failed">> := 0, - <<"sql.failed.exception">> := 0, - <<"sql.failed.no_result">> := 0, - <<"sql.matched.rate">> := _, - <<"sql.matched.rate.max">> := _, - <<"sql.matched.rate.last5m">> := _, - <<"outputs.total">> := 1, - <<"outputs.success">> := 1, - <<"outputs.failed">> := 0, - <<"outputs.failed.out_of_service">> := 0, - <<"outputs.failed.unknown">> := 0 - } - } = jsx:decode(Rule1), + #{ + <<"id">> := RuleId, + <<"metrics">> := #{ + <<"sql.matched">> := 1, + <<"sql.passed">> := 1, + <<"sql.failed">> := 0, + <<"sql.failed.exception">> := 0, + <<"sql.failed.no_result">> := 0, + <<"sql.matched.rate">> := _, + <<"sql.matched.rate.max">> := _, + <<"sql.matched.rate.last5m">> := _, + <<"outputs.total">> := 1, + <<"outputs.success">> := 1, + <<"outputs.failed">> := 0, + <<"outputs.failed.out_of_service">> := 0, + <<"outputs.failed.unknown">> := 0 + } + } = jsx:decode(Rule1), %% we also check if the outputs of the rule is triggered - ?assertMatch(#{inspect := #{ - event := <<"$bridges/mqtt", _/binary>>, - id := MsgId, - payload := Payload, - topic := RemoteTopic, - qos := 0, - dup := false, - retain := false, - pub_props := #{}, - timestamp := _ - }} when is_binary(MsgId), persistent_term:get(?MODULE)), + ?assertMatch( + #{ + inspect := #{ + event := <<"$bridges/mqtt", _/binary>>, + id := MsgId, + payload := Payload, + topic := RemoteTopic, + qos := 0, + dup := false, + retain := false, + pub_props := #{}, + timestamp := _ + } + } when is_binary(MsgId), + persistent_term:get(?MODULE) + ), {ok, 204, <<>>} = request(delete, uri(["rules", RuleId]), []), {ok, 204, <<>>} = request(delete, uri(["bridges", BridgeIDIngress]), []), {ok, 204, <<>>} = request(delete, uri(["connectors", ConnctorID]), []). t_egress_mqtt_bridge_with_rules(_) -> - {ok, 201, _} = request(post, uri(["connectors"]), - ?MQTT_CONNECTOR(<<"user1">>)#{ <<"type">> => ?CONNECTR_TYPE - , <<"name">> => ?CONNECTR_NAME - }), + {ok, 201, _} = request( + post, + uri(["connectors"]), + ?MQTT_CONNECTOR(<<"user1">>)#{ + <<"type">> => ?CONNECTR_TYPE, + <<"name">> => ?CONNECTR_NAME + } + ), ConnctorID = emqx_connector:connector_id(?CONNECTR_TYPE, ?CONNECTR_NAME), - {ok, 201, Bridge} = request(post, uri(["bridges"]), + {ok, 201, Bridge} = request( + post, + uri(["bridges"]), ?MQTT_BRIDGE_EGRESS(ConnctorID)#{ <<"type">> => ?CONNECTR_TYPE, <<"name">> => ?BRIDGE_NAME_EGRESS - }), - #{ <<"type">> := ?CONNECTR_TYPE, <<"name">> := ?BRIDGE_NAME_EGRESS } = jsx:decode(Bridge), + } + ), + #{<<"type">> := ?CONNECTR_TYPE, <<"name">> := ?BRIDGE_NAME_EGRESS} = jsx:decode(Bridge), BridgeIDEgress = emqx_bridge:bridge_id(?CONNECTR_TYPE, ?BRIDGE_NAME_EGRESS), - {ok, 201, Rule} = request(post, uri(["rules"]), - #{<<"name">> => <<"A rule send messages to a sink mqtt bridge">>, - <<"enable">> => true, - <<"outputs">> => [BridgeIDEgress], - <<"sql">> => <<"SELECT * from \"t/1\"">> - }), + {ok, 201, Rule} = request( + post, + uri(["rules"]), + #{ + <<"name">> => <<"A rule send messages to a sink mqtt bridge">>, + <<"enable">> => true, + <<"outputs">> => [BridgeIDEgress], + <<"sql">> => <<"SELECT * from \"t/1\"">> + } + ), #{<<"id">> := RuleId} = jsx:decode(Rule), %% we now test if the bridge works as expected @@ -597,7 +736,8 @@ t_egress_mqtt_bridge_with_rules(_) -> false after 100 -> false - end), + end + ), emqx:unsubscribe(RemoteTopic), %% PUBLISH a message to the rule. @@ -609,23 +749,24 @@ t_egress_mqtt_bridge_with_rules(_) -> wait_for_resource_ready(BridgeIDEgress, 5), emqx:publish(emqx_message:make(RuleTopic, Payload2)), {ok, 200, Rule1} = request(get, uri(["rules", RuleId]), []), - #{ <<"id">> := RuleId - , <<"metrics">> := #{ - <<"sql.matched">> := 1, - <<"sql.passed">> := 1, - <<"sql.failed">> := 0, - <<"sql.failed.exception">> := 0, - <<"sql.failed.no_result">> := 0, - <<"sql.matched.rate">> := _, - <<"sql.matched.rate.max">> := _, - <<"sql.matched.rate.last5m">> := _, - <<"outputs.total">> := 1, - <<"outputs.success">> := 1, - <<"outputs.failed">> := 0, - <<"outputs.failed.out_of_service">> := 0, - <<"outputs.failed.unknown">> := 0 - } - } = jsx:decode(Rule1), + #{ + <<"id">> := RuleId, + <<"metrics">> := #{ + <<"sql.matched">> := 1, + <<"sql.passed">> := 1, + <<"sql.failed">> := 0, + <<"sql.failed.exception">> := 0, + <<"sql.failed.no_result">> := 0, + <<"sql.matched.rate">> := _, + <<"sql.matched.rate.max">> := _, + <<"sql.matched.rate.last5m">> := _, + <<"outputs.total">> := 1, + <<"outputs.success">> := 1, + <<"outputs.failed">> := 0, + <<"outputs.failed.out_of_service">> := 0, + <<"outputs.failed.unknown">> := 0 + } + } = jsx:decode(Rule1), %% we should receive a message on the "remote" broker, with specified topic ?assert( receive @@ -637,14 +778,19 @@ t_egress_mqtt_bridge_with_rules(_) -> false after 100 -> false - end), + end + ), %% verify the metrics of the bridge {ok, 200, BridgeStr} = request(get, uri(["bridges", BridgeIDEgress]), []), - ?assertMatch(#{ <<"metrics">> := ?metrics(2, 2, 0, _, _, _) - , <<"node_metrics">> := - [#{<<"node">> := _, <<"metrics">> := ?metrics(2, 2, 0, _, _, _)}] - }, jsx:decode(BridgeStr)), + ?assertMatch( + #{ + <<"metrics">> := ?metrics(2, 2, 0, _, _, _), + <<"node_metrics">> := + [#{<<"node">> := _, <<"metrics">> := ?metrics(2, 2, 0, _, _, _)}] + }, + jsx:decode(BridgeStr) + ), {ok, 204, <<>>} = request(delete, uri(["rules", RuleId]), []), {ok, 204, <<>>} = request(delete, uri(["bridges", BridgeIDEgress]), []), @@ -658,8 +804,9 @@ wait_for_resource_ready(InstId, 0) -> ct:fail(wait_resource_timeout); wait_for_resource_ready(InstId, Retry) -> case emqx_bridge:lookup(InstId) of - {ok, #{resource_data := #{status := connected}}} -> ok; + {ok, #{resource_data := #{status := connected}}} -> + ok; _ -> timer:sleep(100), - wait_for_resource_ready(InstId, Retry-1) + wait_for_resource_ready(InstId, Retry - 1) end. diff --git a/apps/emqx_connector/test/emqx_connector_mongo_SUITE.erl b/apps/emqx_connector/test/emqx_connector_mongo_SUITE.erl index fa24ae724..33a0397de 100644 --- a/apps/emqx_connector/test/emqx_connector_mongo_SUITE.erl +++ b/apps/emqx_connector/test/emqx_connector_mongo_SUITE.erl @@ -65,20 +65,24 @@ t_lifecycle(_Config) -> perform_lifecycle_check(PoolName, InitialConfig) -> {ok, #{config := CheckedConfig}} = emqx_resource:check_config(?MONGO_RESOURCE_MOD, InitialConfig), - {ok, #{state := #{poolname := ReturnedPoolName} = State, - status := InitialStatus}} - = emqx_resource:create_local( - PoolName, - ?CONNECTOR_RESOURCE_GROUP, - ?MONGO_RESOURCE_MOD, - CheckedConfig, - #{} - ), + {ok, #{ + state := #{poolname := ReturnedPoolName} = State, + status := InitialStatus + }} = + emqx_resource:create_local( + PoolName, + ?CONNECTOR_RESOURCE_GROUP, + ?MONGO_RESOURCE_MOD, + CheckedConfig, + #{} + ), ?assertEqual(InitialStatus, connected), % Instance should match the state and status of the just started resource - {ok, ?CONNECTOR_RESOURCE_GROUP, #{state := State, - status := InitialStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{ + state := State, + status := InitialStatus + }} = + emqx_resource:get_instance(PoolName), ?assertEqual(ok, emqx_resource:health_check(PoolName)), % % Perform query as further check that the resource is working as expected ?assertMatch([], emqx_resource:query(PoolName, test_query_find())), @@ -86,11 +90,13 @@ perform_lifecycle_check(PoolName, InitialConfig) -> ?assertEqual(ok, emqx_resource:stop(PoolName)), % Resource will be listed still, but state will be changed and healthcheck will fail % as the worker no longer exists. - {ok, ?CONNECTOR_RESOURCE_GROUP, #{state := State, - status := StoppedStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{ + state := State, + status := StoppedStatus + }} = + emqx_resource:get_instance(PoolName), ?assertEqual(StoppedStatus, disconnected), - ?assertEqual({error,health_check_failed}, emqx_resource:health_check(PoolName)), + ?assertEqual({error, health_check_failed}, emqx_resource:health_check(PoolName)), % Resource healthcheck shortcuts things by checking ets. Go deeper by checking pool itself. ?assertEqual({error, not_found}, ecpool:stop_sup_pool(ReturnedPoolName)), % Can call stop/1 again on an already stopped instance @@ -99,8 +105,8 @@ perform_lifecycle_check(PoolName, InitialConfig) -> ?assertEqual(ok, emqx_resource:restart(PoolName)), % async restart, need to wait resource timer:sleep(500), - {ok, ?CONNECTOR_RESOURCE_GROUP, #{status := InitialStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{status := InitialStatus}} = + emqx_resource:get_instance(PoolName), ?assertEqual(ok, emqx_resource:health_check(PoolName)), ?assertMatch([], emqx_resource:query(PoolName, test_query_find())), ?assertMatch(undefined, emqx_resource:query(PoolName, test_query_find_one())), @@ -115,12 +121,19 @@ perform_lifecycle_check(PoolName, InitialConfig) -> % %%------------------------------------------------------------------------------ mongo_config() -> - RawConfig = list_to_binary(io_lib:format(""" - mongo_type = single - database = mqtt - pool_size = 8 - server = \"~s:~b\" - """, [?MONGO_HOST, ?MONGO_DEFAULT_PORT])), + RawConfig = list_to_binary( + io_lib:format( + "" + "\n" + " mongo_type = single\n" + " database = mqtt\n" + " pool_size = 8\n" + " server = \"~s:~b\"\n" + " " + "", + [?MONGO_HOST, ?MONGO_DEFAULT_PORT] + ) + ), {ok, Config} = hocon:binary(RawConfig), #{<<"config">> => Config}. diff --git a/apps/emqx_connector/test/emqx_connector_mqtt_tests.erl b/apps/emqx_connector/test/emqx_connector_mqtt_tests.erl index 550f264d3..2bb9abd84 100644 --- a/apps/emqx_connector/test/emqx_connector_mqtt_tests.erl +++ b/apps/emqx_connector/test/emqx_connector_mqtt_tests.erl @@ -22,23 +22,36 @@ send_and_ack_test() -> %% delegate from gen_rpc to rpc for unit test meck:new(emqtt, [passthrough, no_history]), - meck:expect(emqtt, start_link, 1, - fun(_) -> - {ok, spawn_link(fun() -> ok end)} - end), + meck:expect( + emqtt, + start_link, + 1, + fun(_) -> + {ok, spawn_link(fun() -> ok end)} + end + ), meck:expect(emqtt, connect, 1, {ok, dummy}), - meck:expect(emqtt, stop, 1, - fun(Pid) -> Pid ! stop end), - meck:expect(emqtt, publish, 2, - fun(Client, Msg) -> - Client ! {publish, Msg}, - {ok, Msg} %% as packet id - end), + meck:expect( + emqtt, + stop, + 1, + fun(Pid) -> Pid ! stop end + ), + meck:expect( + emqtt, + publish, + 2, + fun(Client, Msg) -> + Client ! {publish, Msg}, + %% as packet id + {ok, Msg} + end + ), try Max = 1, Batch = lists:seq(1, Max), - {ok, Conn} = emqx_connector_mqtt_mod:start(#{server => {{127,0,0,1}, 1883}}), - % %% return last packet id as batch reference + {ok, Conn} = emqx_connector_mqtt_mod:start(#{server => {{127, 0, 0, 1}, 1883}}), + % %% return last packet id as batch reference {ok, _AckRef} = emqx_connector_mqtt_mod:send(Conn, Batch), ok = emqx_connector_mqtt_mod:stop(Conn) diff --git a/apps/emqx_connector/test/emqx_connector_mqtt_worker_tests.erl b/apps/emqx_connector/test/emqx_connector_mqtt_worker_tests.erl index cf1a190d4..aff1a92a6 100644 --- a/apps/emqx_connector/test/emqx_connector_mqtt_worker_tests.erl +++ b/apps/emqx_connector/test/emqx_connector_mqtt_worker_tests.erl @@ -23,13 +23,13 @@ -define(BRIDGE_NAME, test). -define(BRIDGE_REG_NAME, emqx_connector_mqtt_worker_test). -define(WAIT(PATTERN, TIMEOUT), - receive - PATTERN -> - ok - after - TIMEOUT -> - error(timeout) - end). + receive + PATTERN -> + ok + after TIMEOUT -> + error(timeout) + end +). -export([start/1, send/2, stop/1]). @@ -125,7 +125,7 @@ manual_start_stop_test() -> Ref = make_ref(), TestPid = self(), BridgeName = manual_start_stop, - Config0 = make_config(Ref, TestPid, {ok, #{client_pid => TestPid}}), + Config0 = make_config(Ref, TestPid, {ok, #{client_pid => TestPid}}), Config = Config0#{start_type := manual}, {ok, Pid} = emqx_connector_mqtt_worker:start_link(Config#{name => BridgeName}), %% call ensure_started again should yield the same result diff --git a/apps/emqx_connector/test/emqx_connector_mysql_SUITE.erl b/apps/emqx_connector/test/emqx_connector_mysql_SUITE.erl index 29ba2c181..c039da168 100644 --- a/apps/emqx_connector/test/emqx_connector_mysql_SUITE.erl +++ b/apps/emqx_connector/test/emqx_connector_mysql_SUITE.erl @@ -64,9 +64,11 @@ t_lifecycle(_Config) -> perform_lifecycle_check(PoolName, InitialConfig) -> {ok, #{config := CheckedConfig}} = - emqx_resource:check_config(?MYSQL_RESOURCE_MOD, InitialConfig), - {ok, #{state := #{poolname := ReturnedPoolName} = State, - status := InitialStatus}} = emqx_resource:create_local( + emqx_resource:check_config(?MYSQL_RESOURCE_MOD, InitialConfig), + {ok, #{ + state := #{poolname := ReturnedPoolName} = State, + status := InitialStatus + }} = emqx_resource:create_local( PoolName, ?CONNECTOR_RESOURCE_GROUP, ?MYSQL_RESOURCE_MOD, @@ -75,23 +77,32 @@ perform_lifecycle_check(PoolName, InitialConfig) -> ), ?assertEqual(InitialStatus, connected), % Instance should match the state and status of the just started resource - {ok, ?CONNECTOR_RESOURCE_GROUP, #{state := State, - status := InitialStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{ + state := State, + status := InitialStatus + }} = + emqx_resource:get_instance(PoolName), ?assertEqual(ok, emqx_resource:health_check(PoolName)), % % Perform query as further check that the resource is working as expected ?assertMatch({ok, _, [[1]]}, emqx_resource:query(PoolName, test_query_no_params())), ?assertMatch({ok, _, [[1]]}, emqx_resource:query(PoolName, test_query_with_params())), - ?assertMatch({ok, _, [[1]]}, emqx_resource:query(PoolName, - test_query_with_params_and_timeout())), + ?assertMatch( + {ok, _, [[1]]}, + emqx_resource:query( + PoolName, + test_query_with_params_and_timeout() + ) + ), ?assertEqual(ok, emqx_resource:stop(PoolName)), % Resource will be listed still, but state will be changed and healthcheck will fail % as the worker no longer exists. - {ok, ?CONNECTOR_RESOURCE_GROUP, #{state := State, - status := StoppedStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{ + state := State, + status := StoppedStatus + }} = + emqx_resource:get_instance(PoolName), ?assertEqual(StoppedStatus, disconnected), - ?assertEqual({error,health_check_failed}, emqx_resource:health_check(PoolName)), + ?assertEqual({error, health_check_failed}, emqx_resource:health_check(PoolName)), % Resource healthcheck shortcuts things by checking ets. Go deeper by checking pool itself. ?assertEqual({error, not_found}, ecpool:stop_sup_pool(ReturnedPoolName)), % Can call stop/1 again on an already stopped instance @@ -105,8 +116,13 @@ perform_lifecycle_check(PoolName, InitialConfig) -> ?assertEqual(ok, emqx_resource:health_check(PoolName)), ?assertMatch({ok, _, [[1]]}, emqx_resource:query(PoolName, test_query_no_params())), ?assertMatch({ok, _, [[1]]}, emqx_resource:query(PoolName, test_query_with_params())), - ?assertMatch({ok, _, [[1]]}, emqx_resource:query(PoolName, - test_query_with_params_and_timeout())), + ?assertMatch( + {ok, _, [[1]]}, + emqx_resource:query( + PoolName, + test_query_with_params_and_timeout() + ) + ), % Stop and remove the resource in one go. ?assertEqual(ok, emqx_resource:remove_local(PoolName)), ?assertEqual({error, not_found}, ecpool:stop_sup_pool(ReturnedPoolName)), @@ -118,14 +134,21 @@ perform_lifecycle_check(PoolName, InitialConfig) -> % %%------------------------------------------------------------------------------ mysql_config() -> - RawConfig = list_to_binary(io_lib:format(""" - auto_reconnect = true - database = mqtt - username= root - password = public - pool_size = 8 - server = \"~s:~b\" - """, [?MYSQL_HOST, ?MYSQL_DEFAULT_PORT])), + RawConfig = list_to_binary( + io_lib:format( + "" + "\n" + " auto_reconnect = true\n" + " database = mqtt\n" + " username= root\n" + " password = public\n" + " pool_size = 8\n" + " server = \"~s:~b\"\n" + " " + "", + [?MYSQL_HOST, ?MYSQL_DEFAULT_PORT] + ) + ), {ok, Config} = hocon:binary(RawConfig), #{<<"config">> => Config}. diff --git a/apps/emqx_connector/test/emqx_connector_pgsql_SUITE.erl b/apps/emqx_connector/test/emqx_connector_pgsql_SUITE.erl index 0252e0816..b7044ea38 100644 --- a/apps/emqx_connector/test/emqx_connector_pgsql_SUITE.erl +++ b/apps/emqx_connector/test/emqx_connector_pgsql_SUITE.erl @@ -65,20 +65,24 @@ t_lifecycle(_Config) -> perform_lifecycle_check(PoolName, InitialConfig) -> {ok, #{config := CheckedConfig}} = emqx_resource:check_config(?PGSQL_RESOURCE_MOD, InitialConfig), - {ok, #{state := #{poolname := ReturnedPoolName} = State, - status := InitialStatus}} - = emqx_resource:create_local( - PoolName, - ?CONNECTOR_RESOURCE_GROUP, - ?PGSQL_RESOURCE_MOD, - CheckedConfig, - #{} - ), + {ok, #{ + state := #{poolname := ReturnedPoolName} = State, + status := InitialStatus + }} = + emqx_resource:create_local( + PoolName, + ?CONNECTOR_RESOURCE_GROUP, + ?PGSQL_RESOURCE_MOD, + CheckedConfig, + #{} + ), ?assertEqual(InitialStatus, connected), % Instance should match the state and status of the just started resource - {ok, ?CONNECTOR_RESOURCE_GROUP, #{state := State, - status := InitialStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{ + state := State, + status := InitialStatus + }} = + emqx_resource:get_instance(PoolName), ?assertEqual(ok, emqx_resource:health_check(PoolName)), % % Perform query as further check that the resource is working as expected ?assertMatch({ok, _, [{1}]}, emqx_resource:query(PoolName, test_query_no_params())), @@ -86,11 +90,13 @@ perform_lifecycle_check(PoolName, InitialConfig) -> ?assertEqual(ok, emqx_resource:stop(PoolName)), % Resource will be listed still, but state will be changed and healthcheck will fail % as the worker no longer exists. - {ok, ?CONNECTOR_RESOURCE_GROUP, #{state := State, - status := StoppedStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{ + state := State, + status := StoppedStatus + }} = + emqx_resource:get_instance(PoolName), ?assertEqual(StoppedStatus, disconnected), - ?assertEqual({error,health_check_failed}, emqx_resource:health_check(PoolName)), + ?assertEqual({error, health_check_failed}, emqx_resource:health_check(PoolName)), % Resource healthcheck shortcuts things by checking ets. Go deeper by checking pool itself. ?assertEqual({error, not_found}, ecpool:stop_sup_pool(ReturnedPoolName)), % Can call stop/1 again on an already stopped instance @@ -99,8 +105,8 @@ perform_lifecycle_check(PoolName, InitialConfig) -> ?assertEqual(ok, emqx_resource:restart(PoolName)), % async restart, need to wait resource timer:sleep(500), - {ok, ?CONNECTOR_RESOURCE_GROUP, #{status := InitialStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{status := InitialStatus}} = + emqx_resource:get_instance(PoolName), ?assertEqual(ok, emqx_resource:health_check(PoolName)), ?assertMatch({ok, _, [{1}]}, emqx_resource:query(PoolName, test_query_no_params())), ?assertMatch({ok, _, [{1}]}, emqx_resource:query(PoolName, test_query_with_params())), @@ -115,14 +121,21 @@ perform_lifecycle_check(PoolName, InitialConfig) -> % %%------------------------------------------------------------------------------ pgsql_config() -> - RawConfig = list_to_binary(io_lib:format(""" - auto_reconnect = true - database = mqtt - username= root - password = public - pool_size = 8 - server = \"~s:~b\" - """, [?PGSQL_HOST, ?PGSQL_DEFAULT_PORT])), + RawConfig = list_to_binary( + io_lib:format( + "" + "\n" + " auto_reconnect = true\n" + " database = mqtt\n" + " username= root\n" + " password = public\n" + " pool_size = 8\n" + " server = \"~s:~b\"\n" + " " + "", + [?PGSQL_HOST, ?PGSQL_DEFAULT_PORT] + ) + ), {ok, Config} = hocon:binary(RawConfig), #{<<"config">> => Config}. diff --git a/apps/emqx_connector/test/emqx_connector_redis_SUITE.erl b/apps/emqx_connector/test/emqx_connector_redis_SUITE.erl index 8e473c397..64dd9e683 100644 --- a/apps/emqx_connector/test/emqx_connector_redis_SUITE.erl +++ b/apps/emqx_connector/test/emqx_connector_redis_SUITE.erl @@ -80,8 +80,10 @@ t_sentinel_lifecycle(_Config) -> perform_lifecycle_check(PoolName, InitialConfig, RedisCommand) -> {ok, #{config := CheckedConfig}} = emqx_resource:check_config(?REDIS_RESOURCE_MOD, InitialConfig), - {ok, #{state := #{poolname := ReturnedPoolName} = State, - status := InitialStatus}} = emqx_resource:create_local( + {ok, #{ + state := #{poolname := ReturnedPoolName} = State, + status := InitialStatus + }} = emqx_resource:create_local( PoolName, ?CONNECTOR_RESOURCE_GROUP, ?REDIS_RESOURCE_MOD, @@ -90,20 +92,24 @@ perform_lifecycle_check(PoolName, InitialConfig, RedisCommand) -> ), ?assertEqual(InitialStatus, connected), % Instance should match the state and status of the just started resource - {ok, ?CONNECTOR_RESOURCE_GROUP, #{state := State, - status := InitialStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{ + state := State, + status := InitialStatus + }} = + emqx_resource:get_instance(PoolName), ?assertEqual(ok, emqx_resource:health_check(PoolName)), % Perform query as further check that the resource is working as expected ?assertEqual({ok, <<"PONG">>}, emqx_resource:query(PoolName, {cmd, RedisCommand})), ?assertEqual(ok, emqx_resource:stop(PoolName)), % Resource will be listed still, but state will be changed and healthcheck will fail % as the worker no longer exists. - {ok, ?CONNECTOR_RESOURCE_GROUP, #{state := State, - status := StoppedStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{ + state := State, + status := StoppedStatus + }} = + emqx_resource:get_instance(PoolName), ?assertEqual(StoppedStatus, disconnected), - ?assertEqual({error,health_check_failed}, emqx_resource:health_check(PoolName)), + ?assertEqual({error, health_check_failed}, emqx_resource:health_check(PoolName)), % Resource healthcheck shortcuts things by checking ets. Go deeper by checking pool itself. ?assertEqual({error, not_found}, ecpool:stop_sup_pool(ReturnedPoolName)), % Can call stop/1 again on an already stopped instance @@ -112,8 +118,8 @@ perform_lifecycle_check(PoolName, InitialConfig, RedisCommand) -> ?assertEqual(ok, emqx_resource:restart(PoolName)), % async restart, need to wait resource timer:sleep(500), - {ok, ?CONNECTOR_RESOURCE_GROUP, #{status := InitialStatus}} - = emqx_resource:get_instance(PoolName), + {ok, ?CONNECTOR_RESOURCE_GROUP, #{status := InitialStatus}} = + emqx_resource:get_instance(PoolName), ?assertEqual(ok, emqx_resource:health_check(PoolName)), ?assertEqual({ok, <<"PONG">>}, emqx_resource:query(PoolName, {cmd, RedisCommand})), % Stop and remove the resource in one go. @@ -136,14 +142,21 @@ redis_config_sentinel() -> redis_config_base("sentinel", "servers"). redis_config_base(Type, ServerKey) -> - RawConfig = list_to_binary(io_lib:format(""" - auto_reconnect = true - database = 1 - pool_size = 8 - redis_type = ~s - password = public - ~s = \"~s:~b\" - """, [Type, ServerKey, ?REDIS_HOST, ?REDIS_PORT])), + RawConfig = list_to_binary( + io_lib:format( + "" + "\n" + " auto_reconnect = true\n" + " database = 1\n" + " pool_size = 8\n" + " redis_type = ~s\n" + " password = public\n" + " ~s = \"~s:~b\"\n" + " " + "", + [Type, ServerKey, ?REDIS_HOST, ?REDIS_PORT] + ) + ), {ok, Config} = hocon:binary(RawConfig), #{<<"config">> => Config}. diff --git a/apps/emqx_connector/test/emqx_connector_test_helpers.erl b/apps/emqx_connector/test/emqx_connector_test_helpers.erl index 9e3fc5257..ea3380e85 100644 --- a/apps/emqx_connector/test/emqx_connector_test_helpers.erl +++ b/apps/emqx_connector/test/emqx_connector_test_helpers.erl @@ -19,10 +19,11 @@ -include_lib("common_test/include/ct.hrl"). -include_lib("eunit/include/eunit.hrl"). --export([ check_fields/1 - , start_apps/1 - , stop_apps/1 - ]). +-export([ + check_fields/1, + start_apps/1, + stop_apps/1 +]). check_fields({FieldName, FieldValue}) -> ?assert(is_atom(FieldName)), @@ -30,10 +31,10 @@ check_fields({FieldName, FieldValue}) -> is_map(FieldValue) -> ct:pal("~p~n", [{FieldName, FieldValue}]), ?assert( - (maps:is_key(type, FieldValue) - andalso maps:is_key(default, FieldValue)) - orelse ((maps:is_key(required, FieldValue) - andalso maps:get(required, FieldValue) =:= false)) + (maps:is_key(type, FieldValue) andalso + maps:is_key(default, FieldValue)) orelse + (maps:is_key(required, FieldValue) andalso + maps:get(required, FieldValue) =:= false) ); true -> ?assert(is_function(FieldValue)) diff --git a/apps/emqx_plugins/rebar.config b/apps/emqx_plugins/rebar.config index 528efecb6..9f17b7657 100644 --- a/apps/emqx_plugins/rebar.config +++ b/apps/emqx_plugins/rebar.config @@ -1,4 +1,5 @@ %% -*- mode: erlang -*- -{deps, [ {emqx, {path, "../emqx"}} - ]}. +{deps, [{emqx, {path, "../emqx"}}]}. + +{project_plugins, [erlfmt]}. diff --git a/apps/emqx_plugins/src/emqx_plugins.app.src b/apps/emqx_plugins/src/emqx_plugins.app.src index cc5b846b1..1635bb516 100644 --- a/apps/emqx_plugins/src/emqx_plugins.app.src +++ b/apps/emqx_plugins/src/emqx_plugins.app.src @@ -1,9 +1,9 @@ %% -*- mode: erlang -*- -{application, emqx_plugins, - [{description, "EMQX Plugin Management"}, - {vsn, "0.1.0"}, - {modules, []}, - {mod, {emqx_plugins_app,[]}}, - {applications, [kernel,stdlib,emqx]}, - {env, []} - ]}. +{application, emqx_plugins, [ + {description, "EMQX Plugin Management"}, + {vsn, "0.1.0"}, + {modules, []}, + {mod, {emqx_plugins_app, []}}, + {applications, [kernel, stdlib, emqx]}, + {env, []} +]}. diff --git a/apps/emqx_plugins/src/emqx_plugins.erl b/apps/emqx_plugins/src/emqx_plugins.erl index 09b7a735e..0376eaf27 100644 --- a/apps/emqx_plugins/src/emqx_plugins.erl +++ b/apps/emqx_plugins/src/emqx_plugins.erl @@ -19,35 +19,37 @@ -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). --export([ ensure_installed/1 - , ensure_uninstalled/1 - , ensure_enabled/1 - , ensure_enabled/2 - , ensure_disabled/1 - , purge/1 - , delete_package/1 - ]). +-export([ + ensure_installed/1, + ensure_uninstalled/1, + ensure_enabled/1, + ensure_enabled/2, + ensure_disabled/1, + purge/1, + delete_package/1 +]). --export([ ensure_started/0 - , ensure_started/1 - , ensure_stopped/0 - , ensure_stopped/1 - , restart/1 - , list/0 - , describe/1 - , parse_name_vsn/1 - ]). +-export([ + ensure_started/0, + ensure_started/1, + ensure_stopped/0, + ensure_stopped/1, + restart/1, + list/0, + describe/1, + parse_name_vsn/1 +]). --export([ get_config/2 - , put_config/2 - ]). +-export([ + get_config/2, + put_config/2 +]). %% internal --export([ do_ensure_started/1 - ]). +-export([do_ensure_started/1]). -export([ - install_dir/0 - ]). + install_dir/0 +]). -ifdef(TEST). -compile(export_all). @@ -58,8 +60,10 @@ -include_lib("emqx/include/logger.hrl"). -include("emqx_plugins.hrl"). --type name_vsn() :: binary() | string(). %% "my_plugin-0.1.0" --type plugin() :: map(). %% the parse result of the JSON info file +%% "my_plugin-0.1.0" +-type name_vsn() :: binary() | string(). +%% the parse result of the JSON info file +-type plugin() :: map(). -type position() :: no_move | front | rear | {before, name_vsn()} | {behind, name_vsn()}. %%-------------------------------------------------------------------- @@ -86,22 +90,25 @@ do_ensure_installed(NameVsn) -> case erl_tar:extract(TarGz, [{cwd, install_dir()}, compressed]) of ok -> case read_plugin(NameVsn, #{}) of - {ok, _} -> ok; + {ok, _} -> + ok; {error, Reason} -> ?SLOG(warning, Reason#{msg => "failed_to_read_after_install"}), _ = ensure_uninstalled(NameVsn), {error, Reason} end; {error, {_, enoent}} -> - {error, #{ reason => "failed_to_extract_plugin_package" - , path => TarGz - , return => not_found - }}; + {error, #{ + reason => "failed_to_extract_plugin_package", + path => TarGz, + return => not_found + }}; {error, Reason} -> - {error, #{ reason => "bad_plugin_package" - , path => TarGz - , return => Reason - }} + {error, #{ + reason => "bad_plugin_package", + path => TarGz, + return => Reason + }} end. %% @doc Ensure files and directories for the given plugin are delete. @@ -110,13 +117,15 @@ do_ensure_installed(NameVsn) -> ensure_uninstalled(NameVsn) -> case read_plugin(NameVsn, #{}) of {ok, #{running_status := RunningSt}} when RunningSt =/= stopped -> - {error, #{reason => "bad_plugin_running_status", - hint => "stop_the_plugin_first" - }}; + {error, #{ + reason => "bad_plugin_running_status", + hint => "stop_the_plugin_first" + }}; {ok, #{config_status := enabled}} -> - {error, #{reason => "bad_plugin_config_status", - hint => "disable_the_plugin_first" - }}; + {error, #{ + reason => "bad_plugin_config_status", + hint => "disable_the_plugin_first" + }}; _ -> purge(NameVsn) end. @@ -141,9 +150,10 @@ ensure_state(NameVsn, Position, State) when is_binary(NameVsn) -> ensure_state(NameVsn, Position, State) -> case read_plugin(NameVsn, #{}) of {ok, _} -> - Item = #{ name_vsn => NameVsn - , enable => State - }, + Item = #{ + name_vsn => NameVsn, + enable => State + }, tryit("ensure_state", fun() -> ensure_configured(Item, Position) end); {error, Reason} -> {error, Reason} @@ -175,18 +185,19 @@ add_new_configured(Configured, {Action, NameVsn}, Item) -> SplitFun = fun(#{name_vsn := Nv}) -> bin(Nv) =/= bin(NameVsn) end, {Front, Rear} = lists:splitwith(SplitFun, Configured), Rear =:= [] andalso - throw(#{error => "position_anchor_plugin_not_configured", - hint => "maybe_install_and_configure", - name_vsn => NameVsn - }), + throw(#{ + error => "position_anchor_plugin_not_configured", + hint => "maybe_install_and_configure", + name_vsn => NameVsn + }), case Action of - before -> Front ++ [Item | Rear]; + before -> + Front ++ [Item | Rear]; behind -> [Anchor | Rear0] = Rear, Front ++ [Anchor, Item | Rear0] end. - %% @doc Delete the package file. -spec delete_package(name_vsn()) -> ok. delete_package(NameVsn) -> @@ -198,9 +209,11 @@ delete_package(NameVsn) -> {error, enoent} -> ok; {error, Reason} -> - ?SLOG(error, #{msg => "failed_to_delete_package_file", - path => File, - reason => Reason}), + ?SLOG(error, #{ + msg => "failed_to_delete_package_file", + path => File, + reason => Reason + }), {error, Reason} end. @@ -219,9 +232,11 @@ purge(NameVsn) -> {error, enoent} -> ok; {error, Reason} -> - ?SLOG(error, #{msg => "failed_to_purge_plugin_dir", - dir => Dir, - reason => Reason}), + ?SLOG(error, #{ + msg => "failed_to_purge_plugin_dir", + dir => Dir, + reason => Reason + }), {error, Reason} end. @@ -235,10 +250,13 @@ ensure_started() -> -spec ensure_started(name_vsn()) -> ok | {error, term()}. ensure_started(NameVsn) -> case do_ensure_started(NameVsn) of - ok -> ok; + ok -> + ok; {error, Reason} -> - ?SLOG(alert, #{msg => "failed_to_start_plugin", - reason => Reason}), + ?SLOG(alert, #{ + msg => "failed_to_start_plugin", + reason => Reason + }), {error, Reason} end. @@ -250,11 +268,13 @@ ensure_stopped() -> %% @doc Stop a plugin from Management API or CLI. -spec ensure_stopped(name_vsn()) -> ok | {error, term()}. ensure_stopped(NameVsn) -> - tryit("stop_plugin", - fun() -> - Plugin = do_read_plugin(NameVsn), - ensure_apps_stopped(Plugin) - end). + tryit( + "stop_plugin", + fun() -> + Plugin = do_read_plugin(NameVsn), + ensure_apps_stopped(Plugin) + end + ). %% @doc Stop and then start the plugin. restart(NameVsn) -> @@ -269,39 +289,45 @@ restart(NameVsn) -> list() -> Pattern = filename:join([install_dir(), "*", "release.json"]), All = lists:filtermap( - fun(JsonFile) -> - case read_plugin({file, JsonFile}, #{}) of - {ok, Info} -> - {true, Info}; - {error, Reason} -> - ?SLOG(warning, Reason), - false - end - end, filelib:wildcard(Pattern)), + fun(JsonFile) -> + case read_plugin({file, JsonFile}, #{}) of + {ok, Info} -> + {true, Info}; + {error, Reason} -> + ?SLOG(warning, Reason), + false + end + end, + filelib:wildcard(Pattern) + ), list(configured(), All). %% Make sure configured ones are ordered in front. -list([], All) -> All; +list([], All) -> + All; list([#{name_vsn := NameVsn} | Rest], All) -> SplitF = fun(#{<<"name">> := Name, <<"rel_vsn">> := Vsn}) -> - bin([Name, "-", Vsn]) =/= bin(NameVsn) - end, + bin([Name, "-", Vsn]) =/= bin(NameVsn) + end, case lists:splitwith(SplitF, All) of {_, []} -> - ?SLOG(warning, #{msg => "configured_plugin_not_installed", - name_vsn => NameVsn - }), + ?SLOG(warning, #{ + msg => "configured_plugin_not_installed", + name_vsn => NameVsn + }), list(Rest, All); {Front, [I | Rear]} -> [I | list(Rest, Front ++ Rear)] end. do_ensure_started(NameVsn) -> - tryit("start_plugins", - fun() -> - Plugin = do_read_plugin(NameVsn), - ok = load_code_start_apps(NameVsn, Plugin) - end). + tryit( + "start_plugins", + fun() -> + Plugin = do_read_plugin(NameVsn), + ok = load_code_start_apps(NameVsn, Plugin) + end + ). %% try the function, catch 'throw' exceptions as normal 'error' return %% other exceptions with stacktrace returned. @@ -309,25 +335,28 @@ tryit(WhichOp, F) -> try F() catch - throw : Reason -> + throw:Reason -> %% thrown exceptions are known errors %% translate to a return value without stacktrace {error, Reason}; - error : Reason : Stacktrace -> + error:Reason:Stacktrace -> %% unexpected errors, log stacktrace - ?SLOG(warning, #{ msg => "plugin_op_failed" - , which_op => WhichOp - , exception => Reason - , stacktrace => Stacktrace - }), + ?SLOG(warning, #{ + msg => "plugin_op_failed", + which_op => WhichOp, + exception => Reason, + stacktrace => Stacktrace + }), {error, {failed, WhichOp}} end. %% read plugin info from the JSON file %% returns {ok, Info} or {error, Reason} read_plugin(NameVsn, Options) -> - tryit("read_plugin_info", - fun() -> {ok, do_read_plugin(NameVsn, Options)} end). + tryit( + "read_plugin_info", + fun() -> {ok, do_read_plugin(NameVsn, Options)} end + ). do_read_plugin(Plugin) -> do_read_plugin(Plugin, #{}). @@ -339,10 +368,11 @@ do_read_plugin({file, InfoFile}, Options) -> Info1 = plugins_readme(NameVsn, Options, Info0), plugin_status(NameVsn, Info1); {error, Reason} -> - throw(#{error => "bad_info_file", - path => InfoFile, - return => Reason - }) + throw(#{ + error => "bad_info_file", + path => InfoFile, + return => Reason + }) end; do_read_plugin(NameVsn, Options) -> do_read_plugin({file, info_file(NameVsn)}, Options). @@ -352,7 +382,8 @@ plugins_readme(NameVsn, #{fill_readme := true}, Info) -> {ok, Bin} -> Info#{readme => Bin}; _ -> Info#{readme => <<>>} end; -plugins_readme(_NameVsn, _Options, Info) -> Info. +plugins_readme(_NameVsn, _Options, Info) -> + Info. plugin_status(NameVsn, Info) -> {AppName, _AppVsn} = parse_name_vsn(NameVsn), @@ -368,74 +399,91 @@ plugin_status(NameVsn, Info) -> end, Configured = lists:filtermap( fun(#{name_vsn := Nv, enable := St}) -> - case bin(Nv) =:= bin(NameVsn) of - true -> {true, St}; - false -> false - end - end, configured()), - ConfSt = case Configured of - [] -> not_configured; - [true] -> enabled; - [false] -> disabled - end, - Info#{ running_status => RunningSt - , config_status => ConfSt + case bin(Nv) =:= bin(NameVsn) of + true -> {true, St}; + false -> false + end + end, + configured() + ), + ConfSt = + case Configured of + [] -> not_configured; + [true] -> enabled; + [false] -> disabled + end, + Info#{ + running_status => RunningSt, + config_status => ConfSt }. bin(A) when is_atom(A) -> atom_to_binary(A, utf8); bin(L) when is_list(L) -> unicode:characters_to_binary(L, utf8); bin(B) when is_binary(B) -> B. -check_plugin(#{ <<"name">> := Name - , <<"rel_vsn">> := Vsn - , <<"rel_apps">> := Apps - , <<"description">> := _ - } = Info, NameVsn, File) -> +check_plugin( + #{ + <<"name">> := Name, + <<"rel_vsn">> := Vsn, + <<"rel_apps">> := Apps, + <<"description">> := _ + } = Info, + NameVsn, + File +) -> case bin(NameVsn) =:= bin([Name, "-", Vsn]) of true -> try - [_ | _ ] = Apps, %% assert + %% assert + [_ | _] = Apps, %% validate if the list is all - strings lists:foreach(fun parse_name_vsn/1, Apps) catch - _ : _ -> - throw(#{ error => "bad_rel_apps" - , rel_apps => Apps - , hint => "A non-empty string list of app_name-app_vsn format" - }) + _:_ -> + throw(#{ + error => "bad_rel_apps", + rel_apps => Apps, + hint => "A non-empty string list of app_name-app_vsn format" + }) end, Info; false -> - throw(#{ error => "name_vsn_mismatch" - , name_vsn => NameVsn - , path => File - , name => Name - , rel_vsn => Vsn - }) + throw(#{ + error => "name_vsn_mismatch", + name_vsn => NameVsn, + path => File, + name => Name, + rel_vsn => Vsn + }) end; check_plugin(_What, NameVsn, File) -> - throw(#{ error => "bad_info_file_content" - , mandatory_fields => [rel_vsn, name, rel_apps, description] - , name_vsn => NameVsn - , path => File - }). + throw(#{ + error => "bad_info_file_content", + mandatory_fields => [rel_vsn, name, rel_apps, description], + name_vsn => NameVsn, + path => File + }). load_code_start_apps(RelNameVsn, #{<<"rel_apps">> := Apps}) -> LibDir = filename:join([install_dir(), RelNameVsn]), RunningApps = running_apps(), %% load plugin apps and beam code AppNames = - lists:map(fun(AppNameVsn) -> - {AppName, AppVsn} = parse_name_vsn(AppNameVsn), - EbinDir = filename:join([LibDir, AppNameVsn, "ebin"]), - ok = load_plugin_app(AppName, AppVsn, EbinDir, RunningApps), - AppName - end, Apps), + lists:map( + fun(AppNameVsn) -> + {AppName, AppVsn} = parse_name_vsn(AppNameVsn), + EbinDir = filename:join([LibDir, AppNameVsn, "ebin"]), + ok = load_plugin_app(AppName, AppVsn, EbinDir, RunningApps), + AppName + end, + Apps + ), lists:foreach(fun start_app/1, AppNames). load_plugin_app(AppName, AppVsn, Ebin, RunningApps) -> case lists:keyfind(AppName, 1, RunningApps) of - false -> do_load_plugin_app(AppName, Ebin); + false -> + do_load_plugin_app(AppName, Ebin); {_, Vsn} -> case bin(Vsn) =:= bin(AppVsn) of true -> @@ -443,10 +491,12 @@ load_plugin_app(AppName, AppVsn, Ebin, RunningApps) -> ok; false -> %% running but a different version - ?SLOG(warning, #{msg => "plugin_app_already_running", name => AppName, - running_vsn => Vsn, - loading_vsn => AppVsn - }) + ?SLOG(warning, #{ + msg => "plugin_app_already_running", + name => AppName, + running_vsn => Vsn, + loading_vsn => AppVsn + }) end end. @@ -457,21 +507,31 @@ do_load_plugin_app(AppName, Ebin) -> Modules = filelib:wildcard(filename:join([Ebin, "*.beam"])), lists:foreach( fun(BeamFile) -> - Module = list_to_atom(filename:basename(BeamFile, ".beam")), - case code:load_file(Module) of - {module, _} -> ok; - {error, Reason} -> throw(#{error => "failed_to_load_plugin_beam", - path => BeamFile, - reason => Reason - }) - end - end, Modules), + Module = list_to_atom(filename:basename(BeamFile, ".beam")), + case code:load_file(Module) of + {module, _} -> + ok; + {error, Reason} -> + throw(#{ + error => "failed_to_load_plugin_beam", + path => BeamFile, + reason => Reason + }) + end + end, + Modules + ), case application:load(AppName) of - ok -> ok; - {error, {already_loaded, _}} -> ok; - {error, Reason} -> throw(#{error => "failed_to_load_plugin_app", - name => AppName, - reason => Reason}) + ok -> + ok; + {error, {already_loaded, _}} -> + ok; + {error, Reason} -> + throw(#{ + error => "failed_to_load_plugin_app", + name => AppName, + reason => Reason + }) end. start_app(App) -> @@ -484,11 +544,12 @@ start_app(App) -> ?SLOG(debug, #{msg => "started_plugin_app", app => App}), ok; {error, {ErrApp, Reason}} -> - throw(#{error => "failed_to_start_plugin_app", - app => App, - err_app => ErrApp, - reason => Reason - }) + throw(#{ + error => "failed_to_start_plugin_app", + app => App, + err_app => ErrApp, + reason => Reason + }) end. %% Stop all apps installed by the plugin package, @@ -496,18 +557,22 @@ start_app(App) -> ensure_apps_stopped(#{<<"rel_apps">> := Apps}) -> %% load plugin apps and beam code AppsToStop = - lists:map(fun(NameVsn) -> - {AppName, _AppVsn} = parse_name_vsn(NameVsn), - AppName - end, Apps), + lists:map( + fun(NameVsn) -> + {AppName, _AppVsn} = parse_name_vsn(NameVsn), + AppName + end, + Apps + ), case tryit("stop_apps", fun() -> stop_apps(AppsToStop) end) of {ok, []} -> %% all apps stopped ok; {ok, Left} -> - ?SLOG(warning, #{msg => "unabled_to_stop_plugin_apps", - apps => Left - }), + ?SLOG(warning, #{ + msg => "unabled_to_stop_plugin_apps", + apps => Left + }), ok; {error, Reason} -> {error, Reason} @@ -516,9 +581,12 @@ ensure_apps_stopped(#{<<"rel_apps">> := Apps}) -> stop_apps(Apps) -> RunningApps = running_apps(), case do_stop_apps(Apps, [], RunningApps) of - {ok, []} -> {ok, []}; %% all stopped - {ok, Remain} when Remain =:= Apps -> {ok, Apps}; %% no progress - {ok, Remain} -> stop_apps(Remain) %% try again + %% all stopped + {ok, []} -> {ok, []}; + %% no progress + {ok, Remain} when Remain =:= Apps -> {ok, Apps}; + %% try again + {ok, Remain} -> stop_apps(Remain) end. do_stop_apps([], Remain, _AllApps) -> @@ -553,11 +621,15 @@ unload_moudle_and_app(App) -> ok. is_needed_by_any(AppToStop, RunningApps) -> - lists:any(fun({RunningApp, _RunningAppVsn}) -> - is_needed_by(AppToStop, RunningApp) - end, RunningApps). + lists:any( + fun({RunningApp, _RunningAppVsn}) -> + is_needed_by(AppToStop, RunningApp) + end, + RunningApps + ). -is_needed_by(AppToStop, AppToStop) -> false; +is_needed_by(AppToStop, AppToStop) -> + false; is_needed_by(AppToStop, RunningApp) -> case application:get_key(RunningApp, applications) of {ok, Deps} -> lists:member(AppToStop, Deps); @@ -577,7 +649,8 @@ bin_key(Map) when is_map(Map) -> maps:fold(fun(K, V, Acc) -> Acc#{bin(K) => V} end, #{}, Map); bin_key(List = [#{} | _]) -> lists:map(fun(M) -> bin_key(M) end, List); -bin_key(Term) -> Term. +bin_key(Term) -> + Term. get_config(Key, Default) when is_atom(Key) -> get_config([Key], Default); @@ -604,8 +677,10 @@ for_plugin(#{name_vsn := NameVsn, enable := true}, Fun) -> {error, Reason} -> [{NameVsn, Reason}] end; for_plugin(#{name_vsn := NameVsn, enable := false}, _Fun) -> - ?SLOG(debug, #{msg => "plugin_disabled", - name_vsn => NameVsn}), + ?SLOG(debug, #{ + msg => "plugin_disabled", + name_vsn => NameVsn + }), []. parse_name_vsn(NameVsn) when is_binary(NameVsn) -> @@ -627,6 +702,9 @@ readme_file(NameVsn) -> filename:join([dir(NameVsn), "README.md"]). running_apps() -> - lists:map(fun({N, _, V}) -> - {N, V} - end, application:which_applications(infinity)). + lists:map( + fun({N, _, V}) -> + {N, V} + end, + application:which_applications(infinity) + ). diff --git a/apps/emqx_plugins/src/emqx_plugins_app.erl b/apps/emqx_plugins/src/emqx_plugins_app.erl index 70fab549f..5d3828fb5 100644 --- a/apps/emqx_plugins/src/emqx_plugins_app.erl +++ b/apps/emqx_plugins/src/emqx_plugins_app.erl @@ -18,12 +18,14 @@ -behaviour(application). --export([ start/2 - , stop/1 - ]). +-export([ + start/2, + stop/1 +]). start(_Type, _Args) -> - ok = emqx_plugins:ensure_started(), %% load all pre-configured + %% load all pre-configured + ok = emqx_plugins:ensure_started(), {ok, Sup} = emqx_plugins_sup:start_link(), {ok, Sup}. diff --git a/apps/emqx_plugins/src/emqx_plugins_cli.erl b/apps/emqx_plugins/src/emqx_plugins_cli.erl index 6e9f1dca2..2ea965ce7 100644 --- a/apps/emqx_plugins/src/emqx_plugins_cli.erl +++ b/apps/emqx_plugins/src/emqx_plugins_cli.erl @@ -16,21 +16,23 @@ -module(emqx_plugins_cli). --export([ list/1 - , describe/2 - , ensure_installed/2 - , ensure_uninstalled/2 - , ensure_started/2 - , ensure_stopped/2 - , restart/2 - , ensure_disabled/2 - , ensure_enabled/3 - ]). +-export([ + list/1, + describe/2, + ensure_installed/2, + ensure_uninstalled/2, + ensure_started/2, + ensure_stopped/2, + restart/2, + ensure_disabled/2, + ensure_enabled/3 +]). -include_lib("emqx/include/logger.hrl"). -define(PRINT(EXPR, LOG_FUN), - print(NameVsn, fun()-> EXPR end(), LOG_FUN, ?FUNCTION_NAME)). + print(NameVsn, fun() -> EXPR end(), LOG_FUN, ?FUNCTION_NAME) +). list(LogFun) -> LogFun("~ts~n", [to_json(emqx_plugins:list())]). @@ -43,9 +45,11 @@ describe(NameVsn, LogFun) -> %% this should not happen unless the package is manually installed %% corrupted packages installed from emqx_plugins:ensure_installed %% should not leave behind corrupted files - ?SLOG(error, #{msg => "failed_to_describe_plugin", - name_vsn => NameVsn, - cause => Reason}), + ?SLOG(error, #{ + msg => "failed_to_describe_plugin", + name_vsn => NameVsn, + cause => Reason + }), %% do nothing to the CLI console ok end. @@ -75,14 +79,18 @@ to_json(Input) -> emqx_logger_jsonfmt:best_effort_json(Input). print(NameVsn, Res, LogFun, Action) -> - Obj = #{action => Action, - name_vsn => NameVsn}, + Obj = #{ + action => Action, + name_vsn => NameVsn + }, JsonReady = case Res of ok -> Obj#{result => ok}; {error, Reason} -> - Obj#{result => not_ok, - cause => Reason} + Obj#{ + result => not_ok, + cause => Reason + } end, LogFun("~ts~n", [to_json(JsonReady)]). diff --git a/apps/emqx_plugins/src/emqx_plugins_schema.erl b/apps/emqx_plugins/src/emqx_plugins_schema.erl index eed85558c..ceb8b992f 100644 --- a/apps/emqx_plugins/src/emqx_plugins_schema.erl +++ b/apps/emqx_plugins/src/emqx_plugins_schema.erl @@ -18,10 +18,11 @@ -behaviour(hocon_schema). --export([ roots/0 - , fields/1 - , namespace/0 - ]). +-export([ + roots/0, + fields/1, + namespace/0 +]). -include_lib("hocon/include/hoconsc.hrl"). -include("emqx_plugins.hrl"). @@ -31,31 +32,41 @@ namespace() -> "plugin". roots() -> [?CONF_ROOT]. fields(?CONF_ROOT) -> - #{fields => root_fields(), - desc => ?DESC(?CONF_ROOT) - }; + #{ + fields => root_fields(), + desc => ?DESC(?CONF_ROOT) + }; fields(state) -> - #{ fields => state_fields(), - desc => ?DESC(state) - }. + #{ + fields => state_fields(), + desc => ?DESC(state) + }. state_fields() -> - [ {name_vsn, - hoconsc:mk(string(), - #{ desc => ?DESC(name_vsn) - , required => true - })} - , {enable, - hoconsc:mk(boolean(), - #{ desc => ?DESC(enable) - , required => true - })} + [ + {name_vsn, + hoconsc:mk( + string(), + #{ + desc => ?DESC(name_vsn), + required => true + } + )}, + {enable, + hoconsc:mk( + boolean(), + #{ + desc => ?DESC(enable), + required => true + } + )} ]. root_fields() -> - [ {states, fun states/1} - , {install_dir, fun install_dir/1} - , {check_interval, fun check_interval/1} + [ + {states, fun states/1}, + {install_dir, fun install_dir/1}, + {check_interval, fun check_interval/1} ]. states(type) -> hoconsc:array(hoconsc:ref(?MODULE, state)); @@ -66,7 +77,8 @@ states(_) -> undefined. install_dir(type) -> string(); install_dir(required) -> false; -install_dir(default) -> "plugins"; %% runner's root dir +%% runner's root dir +install_dir(default) -> "plugins"; install_dir(T) when T =/= desc -> undefined; install_dir(desc) -> ?DESC(install_dir). diff --git a/apps/emqx_plugins/src/emqx_plugins_sup.erl b/apps/emqx_plugins/src/emqx_plugins_sup.erl index 488372cc6..687e5d39e 100644 --- a/apps/emqx_plugins/src/emqx_plugins_sup.erl +++ b/apps/emqx_plugins/src/emqx_plugins_sup.erl @@ -29,7 +29,8 @@ init([]) -> %% TODO: Add monitor plugins change. Monitor = emqx_plugins_monitor, _Children = [ - #{id => Monitor, + #{ + id => Monitor, start => {Monitor, start_link, []}, restart => permanent, shutdown => brutal_kill, diff --git a/apps/emqx_plugins/test/emqx_plugins_SUITE.erl b/apps/emqx_plugins/test/emqx_plugins_SUITE.erl index 00c8b4226..317519124 100644 --- a/apps/emqx_plugins/test/emqx_plugins_SUITE.erl +++ b/apps/emqx_plugins/test/emqx_plugins_SUITE.erl @@ -48,9 +48,12 @@ end_per_suite(Config) -> init_per_testcase(TestCase, Config) -> emqx_plugins:put_configured([]), - lists:foreach(fun(#{<<"name">> := Name, <<"rel_vsn">> := Vsn}) -> - emqx_plugins:purge(bin([Name, "-", Vsn])) - end, emqx_plugins:list()), + lists:foreach( + fun(#{<<"name">> := Name, <<"rel_vsn">> := Vsn}) -> + emqx_plugins:purge(bin([Name, "-", Vsn])) + end, + emqx_plugins:list() + ), ?MODULE:TestCase({init, Config}). end_per_testcase(TestCase, Config) -> @@ -59,35 +62,46 @@ end_per_testcase(TestCase, Config) -> build_demo_plugin_package() -> build_demo_plugin_package( - #{ target_path => "_build/default/emqx_plugrel" - , release_name => "emqx_plugin_template" - , git_url => "https://github.com/emqx/emqx-plugin-template.git" - , vsn => ?EMQX_PLUGIN_TEMPLATE_VSN - , workdir => "demo_src" - , shdir => emqx_plugins:install_dir() - }). + #{ + target_path => "_build/default/emqx_plugrel", + release_name => "emqx_plugin_template", + git_url => "https://github.com/emqx/emqx-plugin-template.git", + vsn => ?EMQX_PLUGIN_TEMPLATE_VSN, + workdir => "demo_src", + shdir => emqx_plugins:install_dir() + } + ). -build_demo_plugin_package(#{ target_path := TargetPath - , release_name := ReleaseName - , git_url := GitUrl - , vsn := PluginVsn - , workdir := DemoWorkDir - , shdir := WorkDir - } = Opts) -> +build_demo_plugin_package( + #{ + target_path := TargetPath, + release_name := ReleaseName, + git_url := GitUrl, + vsn := PluginVsn, + workdir := DemoWorkDir, + shdir := WorkDir + } = Opts +) -> BuildSh = filename:join([WorkDir, "build-demo-plugin.sh"]), - Cmd = string:join([ BuildSh - , PluginVsn - , TargetPath - , ReleaseName - , GitUrl - , DemoWorkDir - ], - " "), + Cmd = string:join( + [ + BuildSh, + PluginVsn, + TargetPath, + ReleaseName, + GitUrl, + DemoWorkDir + ], + " " + ), case emqx_run_sh:do(Cmd, [{cd, WorkDir}]) of {ok, _} -> - Pkg = filename:join([WorkDir, ReleaseName ++ "-" ++ - PluginVsn ++ - ?PACKAGE_SUFFIX]), + Pkg = filename:join([ + WorkDir, + ReleaseName ++ "-" ++ + PluginVsn ++ + ?PACKAGE_SUFFIX + ]), case filelib:is_regular(Pkg) of true -> Opts#{package => Pkg}; false -> error(#{reason => unexpected_build_result, not_found => Pkg}) @@ -104,16 +118,19 @@ bin(B) when is_binary(B) -> B. t_demo_install_start_stop_uninstall({init, Config}) -> Opts = #{package := Package} = build_demo_plugin_package(), NameVsn = filename:basename(Package, ?PACKAGE_SUFFIX), - [ {name_vsn, NameVsn} - , {plugin_opts, Opts} - | Config + [ + {name_vsn, NameVsn}, + {plugin_opts, Opts} + | Config ]; -t_demo_install_start_stop_uninstall({'end', _Config}) -> ok; +t_demo_install_start_stop_uninstall({'end', _Config}) -> + ok; t_demo_install_start_stop_uninstall(Config) -> NameVsn = proplists:get_value(name_vsn, Config), - #{ release_name := ReleaseName - , vsn := PluginVsn - } = proplists:get_value(plugin_opts, Config), + #{ + release_name := ReleaseName, + vsn := PluginVsn + } = proplists:get_value(plugin_opts, Config), ok = emqx_plugins:ensure_installed(NameVsn), %% idempotent ok = emqx_plugins:ensure_installed(NameVsn), @@ -129,8 +146,10 @@ t_demo_install_start_stop_uninstall(Config) -> ok = assert_app_running(map_sets, true), %% running app can not be un-installed - ?assertMatch({error, _}, - emqx_plugins:ensure_uninstalled(NameVsn)), + ?assertMatch( + {error, _}, + emqx_plugins:ensure_uninstalled(NameVsn) + ), %% stop ok = emqx_plugins:ensure_stopped(NameVsn), @@ -143,9 +162,15 @@ t_demo_install_start_stop_uninstall(Config) -> %% still listed after stopped ReleaseNameBin = list_to_binary(ReleaseName), PluginVsnBin = list_to_binary(PluginVsn), - ?assertMatch([#{<<"name">> := ReleaseNameBin, - <<"rel_vsn">> := PluginVsnBin - }], emqx_plugins:list()), + ?assertMatch( + [ + #{ + <<"name">> := ReleaseNameBin, + <<"rel_vsn">> := PluginVsnBin + } + ], + emqx_plugins:list() + ), ok = emqx_plugins:ensure_uninstalled(NameVsn), ?assertEqual([], emqx_plugins:list()), ok. @@ -164,23 +189,29 @@ t_position({init, Config}) -> #{package := Package} = build_demo_plugin_package(), NameVsn = filename:basename(Package, ?PACKAGE_SUFFIX), [{name_vsn, NameVsn} | Config]; -t_position({'end', _Config}) -> ok; +t_position({'end', _Config}) -> + ok; t_position(Config) -> NameVsn = proplists:get_value(name_vsn, Config), ok = emqx_plugins:ensure_installed(NameVsn), ok = emqx_plugins:ensure_enabled(NameVsn), - FakeInfo = "name=position, rel_vsn=\"2\", rel_apps=[\"position-9\"]," - "description=\"desc fake position app\"", + FakeInfo = + "name=position, rel_vsn=\"2\", rel_apps=[\"position-9\"]," + "description=\"desc fake position app\"", PosApp2 = <<"position-2">>, ok = write_info_file(Config, PosApp2, FakeInfo), %% fake a disabled plugin in config ok = emqx_plugins:ensure_state(PosApp2, {before, NameVsn}, false), ListFun = fun() -> - lists:map(fun( - #{<<"name">> := Name, <<"rel_vsn">> := Vsn}) -> - <> - end, emqx_plugins:list()) - end, + lists:map( + fun( + #{<<"name">> := Name, <<"rel_vsn">> := Vsn} + ) -> + <> + end, + emqx_plugins:list() + ) + end, ?assertEqual([PosApp2, list_to_binary(NameVsn)], ListFun()), emqx_plugins:ensure_enabled(PosApp2, {behind, NameVsn}), ?assertEqual([list_to_binary(NameVsn), PosApp2], ListFun()), @@ -197,13 +228,15 @@ t_start_restart_and_stop({init, Config}) -> #{package := Package} = build_demo_plugin_package(), NameVsn = filename:basename(Package, ?PACKAGE_SUFFIX), [{name_vsn, NameVsn} | Config]; -t_start_restart_and_stop({'end', _Config}) -> ok; +t_start_restart_and_stop({'end', _Config}) -> + ok; t_start_restart_and_stop(Config) -> NameVsn = proplists:get_value(name_vsn, Config), ok = emqx_plugins:ensure_installed(NameVsn), ok = emqx_plugins:ensure_enabled(NameVsn), - FakeInfo = "name=bar, rel_vsn=\"2\", rel_apps=[\"bar-9\"]," - "description=\"desc bar\"", + FakeInfo = + "name=bar, rel_vsn=\"2\", rel_apps=[\"bar-9\"]," + "description=\"desc bar\"", Bar2 = <<"bar-2">>, ok = write_info_file(Config, Bar2, FakeInfo), %% fake a disabled plugin in config @@ -216,8 +249,10 @@ t_start_restart_and_stop(Config) -> %% fake enable bar-2 ok = emqx_plugins:ensure_state(Bar2, rear, true), %% should cause an error - ?assertError(#{function := _, errors := [_ | _]}, - emqx_plugins:ensure_started()), + ?assertError( + #{function := _, errors := [_ | _]}, + emqx_plugins:ensure_started() + ), %% but demo plugin should still be running assert_app_running(emqx_plugin_template, true), @@ -255,9 +290,13 @@ t_enable_disable(Config) -> ?assertEqual([#{name_vsn => NameVsn, enable => false}], emqx_plugins:configured()), ok = emqx_plugins:ensure_enabled(bin(NameVsn)), ?assertEqual([#{name_vsn => NameVsn, enable => true}], emqx_plugins:configured()), - ?assertMatch({error, #{reason := "bad_plugin_config_status", - hint := "disable_the_plugin_first" - }}, emqx_plugins:ensure_uninstalled(NameVsn)), + ?assertMatch( + {error, #{ + reason := "bad_plugin_config_status", + hint := "disable_the_plugin_first" + }}, + emqx_plugins:ensure_uninstalled(NameVsn) + ), ok = emqx_plugins:ensure_disabled(bin(NameVsn)), ok = emqx_plugins:ensure_uninstalled(NameVsn), ?assertMatch({error, _}, emqx_plugins:ensure_enabled(NameVsn)), @@ -271,20 +310,28 @@ assert_app_running(Name, false) -> AllApps = application:which_applications(), ?assertEqual(false, lists:keyfind(Name, 1, AllApps)). -t_bad_tar_gz({init, Config}) -> Config; -t_bad_tar_gz({'end', _Config}) -> ok; +t_bad_tar_gz({init, Config}) -> + Config; +t_bad_tar_gz({'end', _Config}) -> + ok; t_bad_tar_gz(Config) -> WorkDir = proplists:get_value(data_dir, Config), FakeTarTz = filename:join([WorkDir, "fake-vsn.tar.gz"]), ok = file:write_file(FakeTarTz, "a\n"), - ?assertMatch({error, #{reason := "bad_plugin_package", - return := eof - }}, - emqx_plugins:ensure_installed("fake-vsn")), - ?assertMatch({error, #{reason := "failed_to_extract_plugin_package", - return := not_found - }}, - emqx_plugins:ensure_installed("nonexisting")), + ?assertMatch( + {error, #{ + reason := "bad_plugin_package", + return := eof + }}, + emqx_plugins:ensure_installed("fake-vsn") + ), + ?assertMatch( + {error, #{ + reason := "failed_to_extract_plugin_package", + return := not_found + }}, + emqx_plugins:ensure_installed("nonexisting") + ), ?assertEqual([], emqx_plugins:list()), ok = emqx_plugins:delete_package("fake-vsn"), %% idempotent @@ -292,8 +339,10 @@ t_bad_tar_gz(Config) -> %% create a corrupted .tar.gz %% failed install attempts should not leave behind extracted dir -t_bad_tar_gz2({init, Config}) -> Config; -t_bad_tar_gz2({'end', _Config}) -> ok; +t_bad_tar_gz2({init, Config}) -> + Config; +t_bad_tar_gz2({'end', _Config}) -> + ok; t_bad_tar_gz2(Config) -> WorkDir = proplists:get_value(data_dir, Config), NameVsn = "foo-0.2", @@ -310,45 +359,57 @@ t_bad_tar_gz2(Config) -> ?assertEqual({error, enoent}, file:read_file_info(emqx_plugins:dir(NameVsn))), ok = emqx_plugins:delete_package(NameVsn). -t_bad_info_json({init, Config}) -> Config; -t_bad_info_json({'end', _}) -> ok; +t_bad_info_json({init, Config}) -> + Config; +t_bad_info_json({'end', _}) -> + ok; t_bad_info_json(Config) -> NameVsn = "test-2", ok = write_info_file(Config, NameVsn, "bad-syntax"), - ?assertMatch({error, #{error := "bad_info_file", - return := {parse_error, _} - }}, - emqx_plugins:describe(NameVsn)), + ?assertMatch( + {error, #{ + error := "bad_info_file", + return := {parse_error, _} + }}, + emqx_plugins:describe(NameVsn) + ), ok = write_info_file(Config, NameVsn, "{\"bad\": \"obj\"}"), - ?assertMatch({error, #{error := "bad_info_file_content", - mandatory_fields := _ - }}, - emqx_plugins:describe(NameVsn)), + ?assertMatch( + {error, #{ + error := "bad_info_file_content", + mandatory_fields := _ + }}, + emqx_plugins:describe(NameVsn) + ), ?assertEqual([], emqx_plugins:list()), emqx_plugins:purge(NameVsn), ok. t_elixir_plugin({init, Config}) -> Opts0 = - #{ target_path => "_build/prod/plugrelex/elixir_plugin_template" - , release_name => "elixir_plugin_template" - , git_url => "https://github.com/emqx/emqx-elixir-plugin.git" - , vsn => ?EMQX_ELIXIR_PLUGIN_TEMPLATE_VSN - , workdir => "demo_src_elixir" - , shdir => emqx_plugins:install_dir() - }, + #{ + target_path => "_build/prod/plugrelex/elixir_plugin_template", + release_name => "elixir_plugin_template", + git_url => "https://github.com/emqx/emqx-elixir-plugin.git", + vsn => ?EMQX_ELIXIR_PLUGIN_TEMPLATE_VSN, + workdir => "demo_src_elixir", + shdir => emqx_plugins:install_dir() + }, Opts = #{package := Package} = build_demo_plugin_package(Opts0), NameVsn = filename:basename(Package, ?PACKAGE_SUFFIX), - [ {name_vsn, NameVsn} - , {plugin_opts, Opts} - | Config + [ + {name_vsn, NameVsn}, + {plugin_opts, Opts} + | Config ]; -t_elixir_plugin({'end', _Config}) -> ok; +t_elixir_plugin({'end', _Config}) -> + ok; t_elixir_plugin(Config) -> NameVsn = proplists:get_value(name_vsn, Config), - #{ release_name := ReleaseName - , vsn := PluginVsn - } = proplists:get_value(plugin_opts, Config), + #{ + release_name := ReleaseName, + vsn := PluginVsn + } = proplists:get_value(plugin_opts, Config), ok = emqx_plugins:ensure_installed(NameVsn), %% idempotent ok = emqx_plugins:ensure_installed(NameVsn), @@ -368,8 +429,10 @@ t_elixir_plugin(Config) -> 3 = 'Elixir.Kernel':'+'(1, 2), %% running app can not be un-installed - ?assertMatch({error, _}, - emqx_plugins:ensure_uninstalled(NameVsn)), + ?assertMatch( + {error, _}, + emqx_plugins:ensure_uninstalled(NameVsn) + ), %% stop ok = emqx_plugins:ensure_stopped(NameVsn), @@ -382,9 +445,15 @@ t_elixir_plugin(Config) -> %% still listed after stopped ReleaseNameBin = list_to_binary(ReleaseName), PluginVsnBin = list_to_binary(PluginVsn), - ?assertMatch([#{<<"name">> := ReleaseNameBin, - <<"rel_vsn">> := PluginVsnBin - }], emqx_plugins:list()), + ?assertMatch( + [ + #{ + <<"name">> := ReleaseNameBin, + <<"rel_vsn">> := PluginVsnBin + } + ], + emqx_plugins:list() + ), ok = emqx_plugins:ensure_uninstalled(NameVsn), ?assertEqual([], emqx_plugins:list()), ok. diff --git a/apps/emqx_plugins/test/emqx_plugins_tests.erl b/apps/emqx_plugins/test/emqx_plugins_tests.erl index f118cbb28..cda6dbf0f 100644 --- a/apps/emqx_plugins/test/emqx_plugins_tests.erl +++ b/apps/emqx_plugins/test/emqx_plugins_tests.erl @@ -23,23 +23,26 @@ ensure_configured_test_todo() -> meck_emqx(), - try test_ensure_configured() - after emqx_plugins:put_configured([]) + try + test_ensure_configured() + after + emqx_plugins:put_configured([]) end, meck:unload(emqx). - test_ensure_configured() -> ok = emqx_plugins:put_configured([]), - P1 =#{name_vsn => "p-1", enable => true}, - P2 =#{name_vsn => "p-2", enable => true}, - P3 =#{name_vsn => "p-3", enable => false}, + P1 = #{name_vsn => "p-1", enable => true}, + P2 = #{name_vsn => "p-2", enable => true}, + P3 = #{name_vsn => "p-3", enable => false}, emqx_plugins:ensure_configured(P1, front), emqx_plugins:ensure_configured(P2, {before, <<"p-1">>}), emqx_plugins:ensure_configured(P3, {before, <<"p-1">>}), ?assertEqual([P2, P3, P1], emqx_plugins:configured()), - ?assertThrow(#{error := "position_anchor_plugin_not_configured"}, - emqx_plugins:ensure_configured(P3, {before, <<"unknown-x">>})). + ?assertThrow( + #{error := "position_anchor_plugin_not_configured"}, + emqx_plugins:ensure_configured(P3, {before, <<"unknown-x">>}) + ). read_plugin_test() -> meck_emqx(), @@ -47,16 +50,20 @@ read_plugin_test() -> fun(_Dir) -> NameVsn = "bar-5", InfoFile = emqx_plugins:info_file(NameVsn), - FakeInfo = "name=bar, rel_vsn=\"5\", rel_apps=[justname_no_vsn]," - "description=\"desc bar\"", + FakeInfo = + "name=bar, rel_vsn=\"5\", rel_apps=[justname_no_vsn]," + "description=\"desc bar\"", try ok = write_file(InfoFile, FakeInfo), - ?assertMatch({error, #{error := "bad_rel_apps"}}, - emqx_plugins:read_plugin(NameVsn, #{})) + ?assertMatch( + {error, #{error := "bad_rel_apps"}}, + emqx_plugins:read_plugin(NameVsn, #{}) + ) after emqx_plugins:purge(NameVsn) end - end), + end + ), meck:unload(emqx). with_rand_install_dir(F) -> @@ -91,7 +98,8 @@ delete_package_test() -> Dir = File, ok = filelib:ensure_dir(filename:join([Dir, "foo"])), ?assertMatch({error, _}, emqx_plugins:delete_package("a-1")) - end), + end + ), meck:unload(emqx). %% purge plugin's install dir should mostly work and return ok @@ -110,15 +118,19 @@ purge_test() -> %% write a file for the dir path ok = file:write_file(Dir, "a"), ?assertEqual(ok, emqx_plugins:purge("a-1")) - end), + end + ), meck:unload(emqx). meck_emqx() -> meck:new(emqx, [unstick, passthrough]), - meck:expect(emqx, update_config, + meck:expect( + emqx, + update_config, fun(Path, Values, _Opts) -> emqx_config:put(Path, Values) - end), + end + ), %meck:expect(emqx, get_config, % fun(KeyPath, Default) -> % Map = emqx:get_raw_config(KeyPath, Default), diff --git a/apps/emqx_prometheus/rebar.config b/apps/emqx_prometheus/rebar.config index 974192a41..910ee9f82 100644 --- a/apps/emqx_prometheus/rebar.config +++ b/apps/emqx_prometheus/rebar.config @@ -1,23 +1,32 @@ %% -*- mode: erlang -*- -{deps, - [ {emqx, {path, "../emqx"}}, - %% FIXME: tag this as v3.1.3 - {prometheus, {git, "https://github.com/deadtrickster/prometheus.erl", {tag, "v4.8.1"}}}, - {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.27.4"}}} - ]}. +{deps, [ + {emqx, {path, "../emqx"}}, + %% FIXME: tag this as v3.1.3 + {prometheus, {git, "https://github.com/deadtrickster/prometheus.erl", {tag, "v4.8.1"}}}, + {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.27.4"}}} +]}. {edoc_opts, [{preprocess, true}]}. -{erl_opts, [warn_unused_vars, - warn_shadow_vars, - warn_unused_import, - warn_obsolete_guard, - debug_info, - {parse_transform}]}. +{erl_opts, [ + warn_unused_vars, + warn_shadow_vars, + warn_unused_import, + warn_obsolete_guard, + debug_info, + {parse_transform} +]}. -{xref_checks, [undefined_function_calls, undefined_functions, - locals_not_used, deprecated_function_calls, - warnings_as_errors, deprecated_functions]}. +{xref_checks, [ + undefined_function_calls, + undefined_functions, + locals_not_used, + deprecated_function_calls, + warnings_as_errors, + deprecated_functions +]}. {cover_enabled, true}. {cover_opts, [verbose]}. {cover_export_enabled, true}. + +{project_plugins, [erlfmt]}. diff --git a/apps/emqx_prometheus/src/emqx_prometheus.app.src b/apps/emqx_prometheus/src/emqx_prometheus.app.src index 0edac7b69..de3089524 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.app.src +++ b/apps/emqx_prometheus/src/emqx_prometheus.app.src @@ -1,15 +1,17 @@ %% -*- mode: erlang -*- -{application, emqx_prometheus, - [{description, "Prometheus for EMQX"}, - {vsn, "5.0.0"}, % strict semver, bump manually! - {modules, []}, - {registered, [emqx_prometheus_sup]}, - {applications, [kernel,stdlib,prometheus,emqx]}, - {mod, {emqx_prometheus_app,[]}}, - {env, []}, - {licenses, ["Apache-2.0"]}, - {maintainers, ["EMQX Team "]}, - {links, [{"Homepage", "https://emqx.io/"}, - {"Github", "https://github.com/emqx/emqx-prometheus"} - ]} - ]}. +{application, emqx_prometheus, [ + {description, "Prometheus for EMQX"}, + % strict semver, bump manually! + {vsn, "5.0.0"}, + {modules, []}, + {registered, [emqx_prometheus_sup]}, + {applications, [kernel, stdlib, prometheus, emqx]}, + {mod, {emqx_prometheus_app, []}}, + {env, []}, + {licenses, ["Apache-2.0"]}, + {maintainers, ["EMQX Team "]}, + {links, [ + {"Homepage", "https://emqx.io/"}, + {"Github", "https://github.com/emqx/emqx-prometheus"} + ]} +]}. diff --git a/apps/emqx_prometheus/src/emqx_prometheus.erl b/apps/emqx_prometheus/src/emqx_prometheus.erl index 131e3fc12..4bbfbe524 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus.erl @@ -28,38 +28,44 @@ -include_lib("prometheus/include/prometheus_model.hrl"). -include_lib("emqx/include/logger.hrl"). --import(prometheus_model_helpers, - [ create_mf/5 - , gauge_metric/1 - , counter_metric/1 - ]). +-import( + prometheus_model_helpers, + [ + create_mf/5, + gauge_metric/1, + counter_metric/1 + ] +). --export([ update/1 - , start/0 - , stop/0 - , restart/0 - % for rpc - , do_start/0 - , do_stop/0 - ]). +-export([ + update/1, + start/0, + stop/0, + restart/0, + % for rpc + do_start/0, + do_stop/0 +]). %% APIs -export([start_link/1]). %% gen_server callbacks --export([ init/1 - , handle_call/3 - , handle_cast/2 - , handle_info/2 - , code_change/3 - , terminate/2 - ]). +-export([ + init/1, + handle_call/3, + handle_cast/2, + handle_info/2, + code_change/3, + terminate/2 +]). %% prometheus_collector callback --export([ deregister_cleanup/1 - , collect_mf/2 - , collect_metrics/2 - ]). +-export([ + deregister_cleanup/1, + collect_mf/2, + collect_metrics/2 +]). -export([collect/1]). @@ -72,8 +78,13 @@ %%-------------------------------------------------------------------- %% update new config update(Config) -> - case emqx_conf:update([prometheus], Config, - #{rawconf_with_defaults => true, override_to => cluster}) of + case + emqx_conf:update( + [prometheus], + Config, + #{rawconf_with_defaults => true, override_to => cluster} + ) + of {ok, #{raw_config := NewConfigRows}} -> case maps:get(<<"enable">>, Config, true) of true -> @@ -131,13 +142,12 @@ handle_call(_Msg, _From, State) -> handle_cast(_Msg, State) -> {noreply, State}. -handle_info({timeout, R, ?TIMER_MSG}, State = #state{timer=R, push_gateway=Uri}) -> +handle_info({timeout, R, ?TIMER_MSG}, State = #state{timer = R, push_gateway = Uri}) -> [Name, Ip] = string:tokens(atom_to_list(node()), "@"), - Url = lists:concat([Uri, "/metrics/job/", Name, "/instance/",Name, "~", Ip]), + Url = lists:concat([Uri, "/metrics/job/", Name, "/instance/", Name, "~", Ip]), Data = prometheus_text_format:format(), httpc:request(post, {Url, [], "text/plain", Data}, [{autoredirect, true}], []), {noreply, ensure_timer(State)}; - handle_info(_Msg, State) -> {noreply, State}. @@ -176,14 +186,15 @@ collect(<<"json">>) -> Metrics = emqx_metrics:all(), Stats = emqx_stats:getstats(), VMData = emqx_vm_data(), - #{stats => maps:from_list([collect_stats(Name, Stats) || Name <- emqx_stats()]), - metrics => maps:from_list([collect_stats(Name, VMData) || Name <- emqx_vm()]), - packets => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_packets()]), - messages => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_messages()]), - delivery => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_delivery()]), - client => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_client()]), - session => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_session()])}; - + #{ + stats => maps:from_list([collect_stats(Name, Stats) || Name <- emqx_stats()]), + metrics => maps:from_list([collect_stats(Name, VMData) || Name <- emqx_vm()]), + packets => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_packets()]), + messages => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_messages()]), + delivery => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_delivery()]), + client => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_client()]), + session => maps:from_list([collect_stats(Name, Metrics) || Name <- emqx_metrics_session()]) + }; collect(<<"prometheus">>) -> prometheus_text_format:format(). @@ -219,13 +230,11 @@ emqx_collect(emqx_connections_count, Stats) -> gauge_metric(?C('connections.count', Stats)); emqx_collect(emqx_connections_max, Stats) -> gauge_metric(?C('connections.max', Stats)); - %% sessions emqx_collect(emqx_sessions_count, Stats) -> gauge_metric(?C('sessions.count', Stats)); emqx_collect(emqx_sessions_max, Stats) -> gauge_metric(?C('sessions.max', Stats)); - %% pub/sub stats emqx_collect(emqx_topics_count, Stats) -> gauge_metric(?C('topics.count', Stats)); @@ -247,13 +256,11 @@ emqx_collect(emqx_subscriptions_shared_count, Stats) -> gauge_metric(?C('subscriptions.shared.count', Stats)); emqx_collect(emqx_subscriptions_shared_max, Stats) -> gauge_metric(?C('subscriptions.shared.max', Stats)); - %% retained emqx_collect(emqx_retained_count, Stats) -> gauge_metric(?C('retained.count', Stats)); emqx_collect(emqx_retained_max, Stats) -> gauge_metric(?C('retained.max', Stats)); - %%-------------------------------------------------------------------- %% Metrics - packets & bytes @@ -262,13 +269,11 @@ emqx_collect(emqx_bytes_received, Metrics) -> counter_metric(?C('bytes.received', Metrics)); emqx_collect(emqx_bytes_sent, Metrics) -> counter_metric(?C('bytes.sent', Metrics)); - %% received.sent emqx_collect(emqx_packets_received, Metrics) -> counter_metric(?C('packets.received', Metrics)); emqx_collect(emqx_packets_sent, Metrics) -> counter_metric(?C('packets.sent', Metrics)); - %% connect emqx_collect(emqx_packets_connect, Metrics) -> counter_metric(?C('packets.connect.received', Metrics)); @@ -278,7 +283,6 @@ emqx_collect(emqx_packets_connack_error, Metrics) -> counter_metric(?C('packets.connack.error', Metrics)); emqx_collect(emqx_packets_connack_auth_error, Metrics) -> counter_metric(?C('packets.connack.auth_error', Metrics)); - %% sub.unsub emqx_collect(emqx_packets_subscribe_received, Metrics) -> counter_metric(?C('packets.subscribe.received', Metrics)); @@ -294,7 +298,6 @@ emqx_collect(emqx_packets_unsubscribe_error, Metrics) -> counter_metric(?C('packets.unsubscribe.error', Metrics)); emqx_collect(emqx_packets_unsuback_sent, Metrics) -> counter_metric(?C('packets.unsuback.sent', Metrics)); - %% publish.puback emqx_collect(emqx_packets_publish_received, Metrics) -> counter_metric(?C('packets.publish.received', Metrics)); @@ -308,7 +311,6 @@ emqx_collect(emqx_packets_publish_auth_error, Metrics) -> counter_metric(?C('packets.publish.auth_error', Metrics)); emqx_collect(emqx_packets_publish_dropped, Metrics) -> counter_metric(?C('packets.publish.dropped', Metrics)); - %% puback emqx_collect(emqx_packets_puback_received, Metrics) -> counter_metric(?C('packets.puback.received', Metrics)); @@ -318,7 +320,6 @@ emqx_collect(emqx_packets_puback_inuse, Metrics) -> counter_metric(?C('packets.puback.inuse', Metrics)); emqx_collect(emqx_packets_puback_missed, Metrics) -> counter_metric(?C('packets.puback.missed', Metrics)); - %% pubrec emqx_collect(emqx_packets_pubrec_received, Metrics) -> counter_metric(?C('packets.pubrec.received', Metrics)); @@ -328,7 +329,6 @@ emqx_collect(emqx_packets_pubrec_inuse, Metrics) -> counter_metric(?C('packets.pubrec.inuse', Metrics)); emqx_collect(emqx_packets_pubrec_missed, Metrics) -> counter_metric(?C('packets.pubrec.missed', Metrics)); - %% pubrel emqx_collect(emqx_packets_pubrel_received, Metrics) -> counter_metric(?C('packets.pubrel.received', Metrics)); @@ -336,7 +336,6 @@ emqx_collect(emqx_packets_pubrel_sent, Metrics) -> counter_metric(?C('packets.pubrel.sent', Metrics)); emqx_collect(emqx_packets_pubrel_missed, Metrics) -> counter_metric(?C('packets.pubrel.missed', Metrics)); - %% pubcomp emqx_collect(emqx_packets_pubcomp_received, Metrics) -> counter_metric(?C('packets.pubcomp.received', Metrics)); @@ -346,77 +345,59 @@ emqx_collect(emqx_packets_pubcomp_inuse, Metrics) -> counter_metric(?C('packets.pubcomp.inuse', Metrics)); emqx_collect(emqx_packets_pubcomp_missed, Metrics) -> counter_metric(?C('packets.pubcomp.missed', Metrics)); - %% pingreq emqx_collect(emqx_packets_pingreq_received, Metrics) -> counter_metric(?C('packets.pingreq.received', Metrics)); emqx_collect(emqx_packets_pingresp_sent, Metrics) -> counter_metric(?C('packets.pingresp.sent', Metrics)); - %% disconnect emqx_collect(emqx_packets_disconnect_received, Metrics) -> counter_metric(?C('packets.disconnect.received', Metrics)); emqx_collect(emqx_packets_disconnect_sent, Metrics) -> counter_metric(?C('packets.disconnect.sent', Metrics)); - %% auth emqx_collect(emqx_packets_auth_received, Metrics) -> counter_metric(?C('packets.auth.received', Metrics)); emqx_collect(emqx_packets_auth_sent, Metrics) -> counter_metric(?C('packets.auth.sent', Metrics)); - %%-------------------------------------------------------------------- %% Metrics - messages %% messages emqx_collect(emqx_messages_received, Metrics) -> counter_metric(?C('messages.received', Metrics)); - emqx_collect(emqx_messages_sent, Metrics) -> counter_metric(?C('messages.sent', Metrics)); - emqx_collect(emqx_messages_qos0_received, Metrics) -> counter_metric(?C('messages.qos0.received', Metrics)); emqx_collect(emqx_messages_qos0_sent, Metrics) -> counter_metric(?C('messages.qos0.sent', Metrics)); - emqx_collect(emqx_messages_qos1_received, Metrics) -> counter_metric(?C('messages.qos1.received', Metrics)); emqx_collect(emqx_messages_qos1_sent, Metrics) -> counter_metric(?C('messages.qos1.sent', Metrics)); - emqx_collect(emqx_messages_qos2_received, Metrics) -> counter_metric(?C('messages.qos2.received', Metrics)); emqx_collect(emqx_messages_qos2_sent, Metrics) -> counter_metric(?C('messages.qos2.sent', Metrics)); - emqx_collect(emqx_messages_publish, Metrics) -> counter_metric(?C('messages.publish', Metrics)); - emqx_collect(emqx_messages_dropped, Metrics) -> counter_metric(?C('messages.dropped', Metrics)); - emqx_collect(emqx_messages_dropped_expired, Metrics) -> counter_metric(?C('messages.dropped.await_pubrel_timeout', Metrics)); - emqx_collect(emqx_messages_dropped_no_subscribers, Metrics) -> counter_metric(?C('messages.dropped.no_subscribers', Metrics)); - emqx_collect(emqx_messages_forward, Metrics) -> counter_metric(?C('messages.forward', Metrics)); - emqx_collect(emqx_messages_retained, Metrics) -> counter_metric(?C('messages.retained', Metrics)); - emqx_collect(emqx_messages_delayed, Stats) -> counter_metric(?C('messages.delayed', Stats)); - emqx_collect(emqx_messages_delivered, Stats) -> counter_metric(?C('messages.delivered', Stats)); - emqx_collect(emqx_messages_acked, Stats) -> counter_metric(?C('messages.acked', Stats)); - %%-------------------------------------------------------------------- %% Metrics - delivery @@ -432,7 +413,6 @@ emqx_collect(emqx_delivery_dropped_queue_full, Stats) -> counter_metric(?C('delivery.dropped.queue_full', Stats)); emqx_collect(emqx_delivery_dropped_expired, Stats) -> counter_metric(?C('delivery.dropped.expired', Stats)); - %%-------------------------------------------------------------------- %% Metrics - client @@ -450,7 +430,6 @@ emqx_collect(emqx_client_unsubscribe, Stats) -> counter_metric(?C('client.unsubscribe', Stats)); emqx_collect(emqx_client_disconnected, Stats) -> counter_metric(?C('client.disconnected', Stats)); - %%-------------------------------------------------------------------- %% Metrics - session @@ -464,31 +443,23 @@ emqx_collect(emqx_session_discarded, Stats) -> counter_metric(?C('session.discarded', Stats)); emqx_collect(emqx_session_terminated, Stats) -> counter_metric(?C('session.terminated', Stats)); - %%-------------------------------------------------------------------- %% VM emqx_collect(emqx_vm_cpu_use, VMData) -> gauge_metric(?C(cpu_use, VMData)); - emqx_collect(emqx_vm_cpu_idle, VMData) -> gauge_metric(?C(cpu_idle, VMData)); - emqx_collect(emqx_vm_run_queue, VMData) -> gauge_metric(?C(run_queue, VMData)); - emqx_collect(emqx_vm_process_messages_in_queues, VMData) -> gauge_metric(?C(process_total_messages, VMData)); - emqx_collect(emqx_vm_total_memory, VMData) -> gauge_metric(?C(total_memory, VMData)); - emqx_collect(emqx_vm_used_memory, VMData) -> gauge_metric(?C(used_memory, VMData)); - emqx_collect(emqx_cluster_nodes_running, ClusterData) -> gauge_metric(?C(nodes_running, ClusterData)); - emqx_collect(emqx_cluster_nodes_stopped, ClusterData) -> gauge_metric(?C(nodes_stopped, ClusterData)). @@ -497,142 +468,157 @@ emqx_collect(emqx_cluster_nodes_stopped, ClusterData) -> %%-------------------------------------------------------------------- emqx_stats() -> - [ emqx_connections_count - , emqx_connections_max - , emqx_sessions_count - , emqx_sessions_max - , emqx_topics_count - , emqx_topics_max - , emqx_suboptions_count - , emqx_suboptions_max - , emqx_subscribers_count - , emqx_subscribers_max - , emqx_subscriptions_count - , emqx_subscriptions_max - , emqx_subscriptions_shared_count - , emqx_subscriptions_shared_max - , emqx_retained_count - , emqx_retained_max + [ + emqx_connections_count, + emqx_connections_max, + emqx_sessions_count, + emqx_sessions_max, + emqx_topics_count, + emqx_topics_max, + emqx_suboptions_count, + emqx_suboptions_max, + emqx_subscribers_count, + emqx_subscribers_max, + emqx_subscriptions_count, + emqx_subscriptions_max, + emqx_subscriptions_shared_count, + emqx_subscriptions_shared_max, + emqx_retained_count, + emqx_retained_max ]. emqx_metrics_packets() -> - [ emqx_bytes_received - , emqx_bytes_sent - , emqx_packets_received - , emqx_packets_sent - , emqx_packets_connect - , emqx_packets_connack_sent - , emqx_packets_connack_error - , emqx_packets_connack_auth_error - , emqx_packets_publish_received - , emqx_packets_publish_sent - , emqx_packets_publish_inuse - , emqx_packets_publish_error - , emqx_packets_publish_auth_error - , emqx_packets_publish_dropped - , emqx_packets_puback_received - , emqx_packets_puback_sent - , emqx_packets_puback_inuse - , emqx_packets_puback_missed - , emqx_packets_pubrec_received - , emqx_packets_pubrec_sent - , emqx_packets_pubrec_inuse - , emqx_packets_pubrec_missed - , emqx_packets_pubrel_received - , emqx_packets_pubrel_sent - , emqx_packets_pubrel_missed - , emqx_packets_pubcomp_received - , emqx_packets_pubcomp_sent - , emqx_packets_pubcomp_inuse - , emqx_packets_pubcomp_missed - , emqx_packets_subscribe_received - , emqx_packets_subscribe_error - , emqx_packets_subscribe_auth_error - , emqx_packets_suback_sent - , emqx_packets_unsubscribe_received - , emqx_packets_unsubscribe_error - , emqx_packets_unsuback_sent - , emqx_packets_pingreq_received - , emqx_packets_pingresp_sent - , emqx_packets_disconnect_received - , emqx_packets_disconnect_sent - , emqx_packets_auth_received - , emqx_packets_auth_sent + [ + emqx_bytes_received, + emqx_bytes_sent, + emqx_packets_received, + emqx_packets_sent, + emqx_packets_connect, + emqx_packets_connack_sent, + emqx_packets_connack_error, + emqx_packets_connack_auth_error, + emqx_packets_publish_received, + emqx_packets_publish_sent, + emqx_packets_publish_inuse, + emqx_packets_publish_error, + emqx_packets_publish_auth_error, + emqx_packets_publish_dropped, + emqx_packets_puback_received, + emqx_packets_puback_sent, + emqx_packets_puback_inuse, + emqx_packets_puback_missed, + emqx_packets_pubrec_received, + emqx_packets_pubrec_sent, + emqx_packets_pubrec_inuse, + emqx_packets_pubrec_missed, + emqx_packets_pubrel_received, + emqx_packets_pubrel_sent, + emqx_packets_pubrel_missed, + emqx_packets_pubcomp_received, + emqx_packets_pubcomp_sent, + emqx_packets_pubcomp_inuse, + emqx_packets_pubcomp_missed, + emqx_packets_subscribe_received, + emqx_packets_subscribe_error, + emqx_packets_subscribe_auth_error, + emqx_packets_suback_sent, + emqx_packets_unsubscribe_received, + emqx_packets_unsubscribe_error, + emqx_packets_unsuback_sent, + emqx_packets_pingreq_received, + emqx_packets_pingresp_sent, + emqx_packets_disconnect_received, + emqx_packets_disconnect_sent, + emqx_packets_auth_received, + emqx_packets_auth_sent ]. emqx_metrics_messages() -> - [ emqx_messages_received - , emqx_messages_sent - , emqx_messages_qos0_received - , emqx_messages_qos0_sent - , emqx_messages_qos1_received - , emqx_messages_qos1_sent - , emqx_messages_qos2_received - , emqx_messages_qos2_sent - , emqx_messages_publish - , emqx_messages_dropped - , emqx_messages_dropped_expired - , emqx_messages_dropped_no_subscribers - , emqx_messages_forward - , emqx_messages_retained - , emqx_messages_delayed - , emqx_messages_delivered - , emqx_messages_acked + [ + emqx_messages_received, + emqx_messages_sent, + emqx_messages_qos0_received, + emqx_messages_qos0_sent, + emqx_messages_qos1_received, + emqx_messages_qos1_sent, + emqx_messages_qos2_received, + emqx_messages_qos2_sent, + emqx_messages_publish, + emqx_messages_dropped, + emqx_messages_dropped_expired, + emqx_messages_dropped_no_subscribers, + emqx_messages_forward, + emqx_messages_retained, + emqx_messages_delayed, + emqx_messages_delivered, + emqx_messages_acked ]. emqx_metrics_delivery() -> - [ emqx_delivery_dropped - , emqx_delivery_dropped_no_local - , emqx_delivery_dropped_too_large - , emqx_delivery_dropped_qos0_msg - , emqx_delivery_dropped_queue_full - , emqx_delivery_dropped_expired + [ + emqx_delivery_dropped, + emqx_delivery_dropped_no_local, + emqx_delivery_dropped_too_large, + emqx_delivery_dropped_qos0_msg, + emqx_delivery_dropped_queue_full, + emqx_delivery_dropped_expired ]. emqx_metrics_client() -> - [ emqx_client_connected - , emqx_client_authenticate - , emqx_client_auth_anonymous - , emqx_client_authorize - , emqx_client_subscribe - , emqx_client_unsubscribe - , emqx_client_disconnected + [ + emqx_client_connected, + emqx_client_authenticate, + emqx_client_auth_anonymous, + emqx_client_authorize, + emqx_client_subscribe, + emqx_client_unsubscribe, + emqx_client_disconnected ]. emqx_metrics_session() -> - [ emqx_session_created - , emqx_session_resumed - , emqx_session_takenover - , emqx_session_discarded - , emqx_session_terminated + [ + emqx_session_created, + emqx_session_resumed, + emqx_session_takenover, + emqx_session_discarded, + emqx_session_terminated ]. emqx_vm() -> - [ emqx_vm_cpu_use - , emqx_vm_cpu_idle - , emqx_vm_run_queue - , emqx_vm_process_messages_in_queues - , emqx_vm_total_memory - , emqx_vm_used_memory + [ + emqx_vm_cpu_use, + emqx_vm_cpu_idle, + emqx_vm_run_queue, + emqx_vm_process_messages_in_queues, + emqx_vm_total_memory, + emqx_vm_used_memory ]. emqx_vm_data() -> - Idle = case cpu_sup:util([detailed]) of - {_, 0, 0, _} -> 0; %% Not support for Windows - {_Num, _Use, IdleList, _} -> ?C(idle, IdleList) - end, + Idle = + case cpu_sup:util([detailed]) of + %% Not support for Windows + {_, 0, 0, _} -> 0; + {_Num, _Use, IdleList, _} -> ?C(idle, IdleList) + end, RunQueue = erlang:statistics(run_queue), - [{run_queue, RunQueue}, - {process_total_messages, 0}, %% XXX: Plan removed at v5.0 - {cpu_idle, Idle}, - {cpu_use, 100 - Idle}] ++ emqx_vm:mem_info(). + [ + {run_queue, RunQueue}, + %% XXX: Plan removed at v5.0 + {process_total_messages, 0}, + {cpu_idle, Idle}, + {cpu_use, 100 - Idle} + ] ++ emqx_vm:mem_info(). emqx_cluster() -> - [ emqx_cluster_nodes_running - , emqx_cluster_nodes_stopped + [ + emqx_cluster_nodes_running, + emqx_cluster_nodes_stopped ]. emqx_cluster_data() -> #{running_nodes := Running, stopped_nodes := Stopped} = mria_mnesia:cluster_info(), - [{nodes_running, length(Running)}, - {nodes_stopped, length(Stopped)}]. + [ + {nodes_running, length(Running)}, + {nodes_stopped, length(Stopped)} + ]. diff --git a/apps/emqx_prometheus/src/emqx_prometheus_api.erl b/apps/emqx_prometheus/src/emqx_prometheus_api.erl index 72611d5cd..01764e1b5 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_api.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_api.erl @@ -22,14 +22,16 @@ -import(hoconsc, [ref/2]). --export([ api_spec/0 - , paths/0 - , schema/1 - ]). +-export([ + api_spec/0, + paths/0, + schema/1 +]). --export([ prometheus/2 - , stats/2 - ]). +-export([ + prometheus/2, + stats/2 +]). -define(SCHEMA_MODULE, emqx_prometheus_schema). @@ -37,32 +39,38 @@ api_spec() -> emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). paths() -> - [ "/prometheus" - , "/prometheus/stats" + [ + "/prometheus", + "/prometheus/stats" ]. schema("/prometheus") -> - #{ 'operationId' => prometheus - , get => - #{ description => <<"Get Prometheus config info">> - , responses => - #{200 => prometheus_config_schema()} + #{ + 'operationId' => prometheus, + get => + #{ + description => <<"Get Prometheus config info">>, + responses => + #{200 => prometheus_config_schema()} + }, + put => + #{ + description => <<"Update Prometheus config">>, + 'requestBody' => prometheus_config_schema(), + responses => + #{200 => prometheus_config_schema()} } - , put => - #{ description => <<"Update Prometheus config">> - , 'requestBody' => prometheus_config_schema() - , responses => - #{200 => prometheus_config_schema()} - } - }; + }; schema("/prometheus/stats") -> - #{ 'operationId' => stats - , get => - #{ description => <<"Get Prometheus Data">> - , responses => - #{200 => prometheus_data_schema()} + #{ + 'operationId' => stats, + get => + #{ + description => <<"Get Prometheus Data">>, + responses => + #{200 => prometheus_data_schema()} } - }. + }. %%-------------------------------------------------------------------- %% API Handler funcs @@ -70,7 +78,6 @@ schema("/prometheus/stats") -> prometheus(get, _Params) -> {200, emqx:get_raw_config([<<"prometheus">>], #{})}; - prometheus(put, #{body := Body}) -> case emqx_prometheus:update(Body) of {ok, NewConfig} -> @@ -100,21 +107,25 @@ stats(get, #{headers := Headers}) -> prometheus_config_schema() -> emqx_dashboard_swagger:schema_with_example( - ref(?SCHEMA_MODULE, "prometheus"), - prometheus_config_example()). + ref(?SCHEMA_MODULE, "prometheus"), + prometheus_config_example() + ). prometheus_config_example() -> - #{ enable => true - , interval => "15s" - , push_gateway_server => <<"http://127.0.0.1:9091">> - }. + #{ + enable => true, + interval => "15s", + push_gateway_server => <<"http://127.0.0.1:9091">> + }. prometheus_data_schema() -> - #{ description => <<"Get Prometheus Data">> - , content => - #{ 'application/json' => - #{schema => #{type => object}} - , 'text/plain' => - #{schema => #{type => string}} + #{ + description => <<"Get Prometheus Data">>, + content => + #{ + 'application/json' => + #{schema => #{type => object}}, + 'text/plain' => + #{schema => #{type => string}} } - }. + }. diff --git a/apps/emqx_prometheus/src/emqx_prometheus_app.erl b/apps/emqx_prometheus/src/emqx_prometheus_app.erl index 5b34ba1df..b9dd9c466 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_app.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_app.erl @@ -21,9 +21,10 @@ -include("emqx_prometheus.hrl"). %% Application callbacks --export([ start/2 - , stop/1 - ]). +-export([ + start/2, + stop/1 +]). start(_StartType, _StartArgs) -> {ok, Sup} = emqx_prometheus_sup:start_link(), diff --git a/apps/emqx_prometheus/src/emqx_prometheus_mria.erl b/apps/emqx_prometheus/src/emqx_prometheus_mria.erl index a79611439..c81e0885e 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_mria.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_mria.erl @@ -15,9 +15,10 @@ %%-------------------------------------------------------------------- -module(emqx_prometheus_mria). --export([deregister_cleanup/1, - collect_mf/2 - ]). +-export([ + deregister_cleanup/1, + collect_mf/2 +]). -include_lib("prometheus/include/prometheus.hrl"). @@ -43,39 +44,45 @@ deregister_cleanup(_) -> ok. _Registry :: prometheus_registry:registry(), Callback :: prometheus_collector:callback(). collect_mf(_Registry, Callback) -> - case mria_rlog:backend() of - rlog -> - Metrics = metrics(), - _ = [add_metric_family(Metric, Callback) || Metric <- Metrics], - ok; - mnesia -> - ok - end. + case mria_rlog:backend() of + rlog -> + Metrics = metrics(), + _ = [add_metric_family(Metric, Callback) || Metric <- Metrics], + ok; + mnesia -> + ok + end. add_metric_family({Name, Metrics}, Callback) -> - Callback(prometheus_model_helpers:create_mf( ?METRIC_NAME(Name) - , <<"">> - , gauge - , catch_all(Metrics) - )). + Callback( + prometheus_model_helpers:create_mf( + ?METRIC_NAME(Name), + <<"">>, + gauge, + catch_all(Metrics) + ) + ). %%==================================================================== %% Internal functions %%==================================================================== metrics() -> - Metrics = case mria_rlog:role() of - replicant -> - [lag, bootstrap_time, bootstrap_num_keys, message_queue_len, replayq_len]; - core -> - [last_intercepted_trans, weight, replicants, server_mql] - end, + Metrics = + case mria_rlog:role() of + replicant -> + [lag, bootstrap_time, bootstrap_num_keys, message_queue_len, replayq_len]; + core -> + [last_intercepted_trans, weight, replicants, server_mql] + end, [{MetricId, fun() -> get_shard_metric(MetricId) end} || MetricId <- Metrics]. get_shard_metric(Metric) -> %% TODO: only report shards that are up - [{[{shard, Shard}], get_shard_metric(Metric, Shard)} || - Shard <- mria_schema:shards(), Shard =/= undefined]. + [ + {[{shard, Shard}], get_shard_metric(Metric, Shard)} + || Shard <- mria_schema:shards(), Shard =/= undefined + ]. get_shard_metric(replicants, Shard) -> length(mria_status:agents(Shard)); @@ -88,6 +95,8 @@ get_shard_metric(Metric, Shard) -> end. catch_all(DataFun) -> - try DataFun() - catch _:_ -> undefined + try + DataFun() + catch + _:_ -> undefined end. diff --git a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl index 98f9a519b..300450260 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl @@ -20,11 +20,12 @@ -behaviour(hocon_schema). --export([ namespace/0 - , roots/0 - , fields/1 - , desc/1 - ]). +-export([ + namespace/0, + roots/0, + fields/1, + desc/1 +]). namespace() -> "prometheus". @@ -32,25 +33,36 @@ roots() -> ["prometheus"]. fields("prometheus") -> [ - {push_gateway_server, sc(string(), - #{ default => "http://127.0.0.1:9091" - , required => true - , desc => ?DESC(push_gateway_server) - })}, - {interval, sc(emqx_schema:duration_ms(), - #{ default => "15s" - , required => true - , desc => ?DESC(interval) - })}, - {enable, sc(boolean(), - #{ default => false - , required => true - , desc => ?DESC(enable) - })} + {push_gateway_server, + sc( + string(), + #{ + default => "http://127.0.0.1:9091", + required => true, + desc => ?DESC(push_gateway_server) + } + )}, + {interval, + sc( + emqx_schema:duration_ms(), + #{ + default => "15s", + required => true, + desc => ?DESC(interval) + } + )}, + {enable, + sc( + boolean(), + #{ + default => false, + required => true, + desc => ?DESC(enable) + } + )} ]. desc("prometheus") -> ?DESC(prometheus); -desc(_) -> - undefined. +desc(_) -> undefined. sc(Type, Meta) -> hoconsc:mk(Type, Meta). diff --git a/apps/emqx_prometheus/src/emqx_prometheus_sup.erl b/apps/emqx_prometheus/src/emqx_prometheus_sup.erl index 3766100d5..65023da14 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_sup.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_sup.erl @@ -18,21 +18,24 @@ -behaviour(supervisor). --export([ start_link/0 - , start_child/1 - , start_child/2 - , stop_child/1 - ]). +-export([ + start_link/0, + start_child/1, + start_child/2, + stop_child/1 +]). -export([init/1]). %% Helper macro for declaring children of supervisor --define(CHILD(Mod, Opts), #{id => Mod, - start => {Mod, start_link, [Opts]}, - restart => permanent, - shutdown => 5000, - type => worker, - modules => [Mod]}). +-define(CHILD(Mod, Opts), #{ + id => Mod, + start => {Mod, start_link, [Opts]}, + restart => permanent, + shutdown => 5000, + type => worker, + modules => [Mod] +}). start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []). @@ -45,7 +48,7 @@ start_child(ChildSpec) when is_map(ChildSpec) -> start_child(Mod, Opts) when is_atom(Mod) andalso is_map(Opts) -> assert_started(supervisor:start_child(?MODULE, ?CHILD(Mod, Opts))). --spec(stop_child(any()) -> ok | {error, term()}). +-spec stop_child(any()) -> ok | {error, term()}. stop_child(ChildId) -> case supervisor:terminate_child(?MODULE, ChildId) of ok -> supervisor:delete_child(?MODULE, ChildId); diff --git a/apps/emqx_prometheus/src/proto/emqx_prometheus_proto_v1.erl b/apps/emqx_prometheus/src/proto/emqx_prometheus_proto_v1.erl index 992c6e22b..c0529cabd 100644 --- a/apps/emqx_prometheus/src/proto/emqx_prometheus_proto_v1.erl +++ b/apps/emqx_prometheus/src/proto/emqx_prometheus_proto_v1.erl @@ -18,11 +18,12 @@ -behaviour(emqx_bpapi). --export([ introduced_in/0 +-export([ + introduced_in/0, - , start/1 - , stop/1 - ]). + start/1, + stop/1 +]). -include_lib("emqx/include/bpapi.hrl"). diff --git a/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl b/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl index f1c4b3800..03e8d6d78 100644 --- a/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl +++ b/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl @@ -22,13 +22,14 @@ -compile(export_all). -define(CLUSTER_RPC_SHARD, emqx_cluster_rpc_shard). --define(CONF_DEFAULT, <<" -prometheus { - push_gateway_server = \"http://127.0.0.1:9091\" - interval = \"1s\" - enable = true -} -">>). +-define(CONF_DEFAULT, + <<"\n" + "prometheus {\n" + " push_gateway_server = \"http://127.0.0.1:9091\"\n" + " interval = \"1s\"\n" + " enable = true\n" + "}\n">> +). %%-------------------------------------------------------------------- %% Setups diff --git a/apps/emqx_prometheus/test/emqx_prometheus_api_SUITE.erl b/apps/emqx_prometheus/test/emqx_prometheus_api_SUITE.erl index 69836f033..e72d7865a 100644 --- a/apps/emqx_prometheus/test/emqx_prometheus_api_SUITE.erl +++ b/apps/emqx_prometheus/test/emqx_prometheus_api_SUITE.erl @@ -67,9 +67,14 @@ t_prometheus_api(_) -> {ok, Response} = emqx_mgmt_api_test_util:request_api(get, Path, "", Auth), Conf = emqx_json:decode(Response, [return_maps]), - ?assertMatch(#{<<"push_gateway_server">> := _, - <<"interval">> := _, - <<"enable">> := _}, Conf), + ?assertMatch( + #{ + <<"push_gateway_server">> := _, + <<"interval">> := _, + <<"enable">> := _ + }, + Conf + ), NewConf = Conf#{<<"interval">> := <<"2s">>}, {ok, Response2} = emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, NewConf), diff --git a/apps/emqx_resource/include/emqx_resource.hrl b/apps/emqx_resource/include/emqx_resource.hrl index fdddcdc87..f10f4b440 100644 --- a/apps/emqx_resource/include/emqx_resource.hrl +++ b/apps/emqx_resource/include/emqx_resource.hrl @@ -30,12 +30,13 @@ }. -type resource_group() :: binary(). -type create_opts() :: #{ - health_check_interval => integer(), - health_check_timeout => integer(), - waiting_connect_complete => integer() - }. --type after_query() :: {[OnSuccess :: after_query_fun()], [OnFailed :: after_query_fun()]} | - undefined. + health_check_interval => integer(), + health_check_timeout => integer(), + waiting_connect_complete => integer() +}. +-type after_query() :: + {[OnSuccess :: after_query_fun()], [OnFailed :: after_query_fun()]} + | undefined. %% the `after_query_fun()` is mainly for callbacks that increment counters or do some fallback %% actions upon query failure diff --git a/apps/emqx_resource/include/emqx_resource_utils.hrl b/apps/emqx_resource/include/emqx_resource_utils.hrl index 8d13036ce..8d94746eb 100644 --- a/apps/emqx_resource/include/emqx_resource_utils.hrl +++ b/apps/emqx_resource/include/emqx_resource_utils.hrl @@ -15,13 +15,17 @@ %%-------------------------------------------------------------------- -define(SAFE_CALL(_EXP_), - ?SAFE_CALL(_EXP_, ok)). + ?SAFE_CALL(_EXP_, ok) +). -define(SAFE_CALL(_EXP_, _EXP_ON_FAIL_), - fun() -> - try (_EXP_) - catch _EXCLASS_:_EXCPTION_:_ST_ -> + fun() -> + try + (_EXP_) + catch + _EXCLASS_:_EXCPTION_:_ST_ -> _EXP_ON_FAIL_, {error, {_EXCLASS_, _EXCPTION_, _ST_}} - end - end()). + end + end() +). diff --git a/apps/emqx_resource/rebar.config b/apps/emqx_resource/rebar.config index d5d608a71..e6857829f 100644 --- a/apps/emqx_resource/rebar.config +++ b/apps/emqx_resource/rebar.config @@ -1,9 +1,10 @@ %% -*- mode: erlang -*- -{erl_opts, [ debug_info - , nowarn_unused_import - %, {d, 'RESOURCE_DEBUG'} - ]}. +{erl_opts, [ + debug_info, + nowarn_unused_import + %, {d, 'RESOURCE_DEBUG'} +]}. {erl_first_files, ["src/emqx_resource_transform.erl"]}. @@ -11,9 +12,11 @@ %% try to override the dialyzer 'race_conditions' defined in the top-level dir, %% but it doesn't work -{dialyzer, [{warnings, [unmatched_returns, error_handling]} - ]}. +{dialyzer, [{warnings, [unmatched_returns, error_handling]}]}. -{deps, [ {jsx, {git, "https://github.com/talentdeficit/jsx", {tag, "v3.1.0"}}} - , {emqx, {path, "../emqx"}} - ]}. +{deps, [ + {jsx, {git, "https://github.com/talentdeficit/jsx", {tag, "v3.1.0"}}}, + {emqx, {path, "../emqx"}} +]}. + +{project_plugins, [erlfmt]}. diff --git a/apps/emqx_resource/src/emqx_resource.app.src b/apps/emqx_resource/src/emqx_resource.app.src index 9591c5718..56391713f 100644 --- a/apps/emqx_resource/src/emqx_resource.app.src +++ b/apps/emqx_resource/src/emqx_resource.app.src @@ -1,19 +1,19 @@ %% -*- mode: erlang -*- -{application, emqx_resource, - [{description, "An OTP application"}, - {vsn, "0.1.0"}, - {registered, []}, - {mod, {emqx_resource_app, []}}, - {applications, - [kernel, - stdlib, - gproc, - jsx, - emqx - ]}, - {env,[]}, - {modules, []}, +{application, emqx_resource, [ + {description, "An OTP application"}, + {vsn, "0.1.0"}, + {registered, []}, + {mod, {emqx_resource_app, []}}, + {applications, [ + kernel, + stdlib, + gproc, + jsx, + emqx + ]}, + {env, []}, + {modules, []}, - {licenses, ["Apache 2.0"]}, - {links, []} - ]}. + {licenses, ["Apache 2.0"]}, + {links, []} +]}. diff --git a/apps/emqx_resource/src/emqx_resource.erl b/apps/emqx_resource/src/emqx_resource.erl index f6bdf55d4..48615a6e3 100644 --- a/apps/emqx_resource/src/emqx_resource.erl +++ b/apps/emqx_resource/src/emqx_resource.erl @@ -25,66 +25,93 @@ %% APIs for behaviour implementations --export([ query_success/1 - , query_failed/1 - ]). +-export([ + query_success/1, + query_failed/1 +]). %% APIs for instances --export([ check_config/2 - , check_and_create/4 - , check_and_create/5 - , check_and_create_local/4 - , check_and_create_local/5 - , check_and_recreate/4 - , check_and_recreate_local/4 - ]). +-export([ + check_config/2, + check_and_create/4, + check_and_create/5, + check_and_create_local/4, + check_and_create_local/5, + check_and_recreate/4, + check_and_recreate_local/4 +]). %% Sync resource instances and files %% provisional solution: rpc:multicall to all the nodes for creating/updating/removing %% todo: replicate operations --export([ create/4 %% store the config and start the instance - , create/5 - , create_local/4 - , create_local/5 - , create_dry_run/2 %% run start/2, health_check/2 and stop/1 sequentially - , create_dry_run_local/2 - , recreate/4 %% this will do create_dry_run, stop the old instance and start a new one - , recreate_local/4 - , remove/1 %% remove the config and stop the instance - , remove_local/1 - , reset_metrics/1 - , reset_metrics_local/1 - ]). + +%% store the config and start the instance +-export([ + create/4, + create/5, + create_local/4, + create_local/5, + %% run start/2, health_check/2 and stop/1 sequentially + create_dry_run/2, + create_dry_run_local/2, + %% this will do create_dry_run, stop the old instance and start a new one + recreate/4, + recreate_local/4, + %% remove the config and stop the instance + remove/1, + remove_local/1, + reset_metrics/1, + reset_metrics_local/1 +]). %% Calls to the callback module with current resource state %% They also save the state after the call finished (except query/2,3). --export([ restart/1 %% restart the instance. - , restart/2 - , health_check/1 %% verify if the resource is working normally - , set_resource_status_connecting/1 %% set resource status to disconnected - , stop/1 %% stop the instance - , query/2 %% query the instance - , query/3 %% query the instance with after_query() - ]). + +%% restart the instance. +-export([ + restart/1, + restart/2, + %% verify if the resource is working normally + health_check/1, + %% set resource status to disconnected + set_resource_status_connecting/1, + %% stop the instance + stop/1, + %% query the instance + query/2, + %% query the instance with after_query() + query/3 +]). %% Direct calls to the callback module --export([ call_start/3 %% start the instance - , call_health_check/3 %% verify if the resource is working normally - , call_stop/3 %% stop the instance - ]). --export([ list_instances/0 %% list all the instances, id only. - , list_instances_verbose/0 %% list all the instances - , get_instance/1 %% return the data of the instance - , list_instances_by_type/1 %% return all the instances of the same resource type - , generate_id/1 - , list_group_instances/1 - ]). +%% start the instance +-export([ + call_start/3, + %% verify if the resource is working normally + call_health_check/3, + %% stop the instance + call_stop/3 +]). --optional_callbacks([ on_query/4 - , on_health_check/2 - ]). +%% list all the instances, id only. +-export([ + list_instances/0, + %% list all the instances + list_instances_verbose/0, + %% return the data of the instance + get_instance/1, + %% return all the instances of the same resource type + list_instances_by_type/1, + generate_id/1, + list_group_instances/1 +]). + +-optional_callbacks([ + on_query/4, + on_health_check/2 +]). %% when calling emqx_resource:start/1 -callback on_start(instance_id(), resource_config()) -> @@ -98,7 +125,7 @@ %% when calling emqx_resource:health_check/2 -callback on_health_check(instance_id(), resource_state()) -> - {ok, resource_state()} | {error, Reason:: term(), resource_state()}. + {ok, resource_state()} | {error, Reason :: term(), resource_state()}. -spec list_types() -> [module()]. list_types() -> @@ -111,24 +138,26 @@ discover_resource_mods() -> -spec is_resource_mod(module()) -> boolean(). is_resource_mod(Module) -> Info = Module:module_info(attributes), - Behaviour = proplists:get_value(behavior, Info, []) ++ - proplists:get_value(behaviour, Info, []), + Behaviour = + proplists:get_value(behavior, Info, []) ++ + proplists:get_value(behaviour, Info, []), lists:member(?MODULE, Behaviour). -spec query_success(after_query()) -> ok. query_success(undefined) -> ok; -query_success({OnSucc, _}) -> - apply_query_after_calls(OnSucc). +query_success({OnSucc, _}) -> apply_query_after_calls(OnSucc). -spec query_failed(after_query()) -> ok. query_failed(undefined) -> ok; -query_failed({_, OnFailed}) -> - apply_query_after_calls(OnFailed). +query_failed({_, OnFailed}) -> apply_query_after_calls(OnFailed). apply_query_after_calls(Funcs) -> - lists:foreach(fun({Fun, Args}) -> + lists:foreach( + fun({Fun, Args}) -> safe_apply(Fun, Args) - end, Funcs). + end, + Funcs + ). %% ================================================================================= %% APIs for resource instances @@ -149,11 +178,13 @@ create(InstId, Group, ResourceType, Config, Opts) -> create_local(InstId, Group, ResourceType, Config) -> create_local(InstId, Group, ResourceType, Config, #{}). --spec create_local(instance_id(), - resource_group(), - resource_type(), - resource_config(), - create_opts()) -> +-spec create_local( + instance_id(), + resource_group(), + resource_type(), + resource_config(), + create_opts() +) -> {ok, resource_data() | 'already_created'} | {error, Reason :: term()}. create_local(InstId, Group, ResourceType, Config, Opts) -> call_instance(InstId, {create, InstId, Group, ResourceType, Config, Opts}). @@ -206,19 +237,25 @@ query(InstId, Request) -> query(InstId, Request, AfterQuery) -> case get_instance(InstId) of {ok, _Group, #{status := connecting}} -> - query_error(connecting, <<"cannot serve query when the resource " - "instance is still connecting">>); + query_error(connecting, << + "cannot serve query when the resource " + "instance is still connecting" + >>); {ok, _Group, #{status := disconnected}} -> - query_error(disconnected, <<"cannot serve query when the resource " - "instance is disconnected">>); + query_error(disconnected, << + "cannot serve query when the resource " + "instance is disconnected" + >>); {ok, _Group, #{mod := Mod, state := ResourceState, status := connected}} -> %% the resource state is readonly to Module:on_query/4 %% and the `after_query()` functions should be thread safe ok = emqx_plugin_libs_metrics:inc(resource_metrics, InstId, matched), - try Mod:on_query(InstId, Request, AfterQuery, ResourceState) - catch Err:Reason:ST -> - emqx_plugin_libs_metrics:inc(resource_metrics, InstId, exception), - erlang:raise(Err, Reason, ST) + try + Mod:on_query(InstId, Request, AfterQuery, ResourceState) + catch + Err:Reason:ST -> + emqx_plugin_libs_metrics:inc(resource_metrics, InstId, exception), + erlang:raise(Err, Reason, ST) end; {error, not_found} -> query_error(not_found, <<"the resource id not exists">>) @@ -258,9 +295,10 @@ list_instances_verbose() -> -spec list_instances_by_type(module()) -> [instance_id()]. list_instances_by_type(ResourceType) -> - filter_instances(fun(_, RT) when RT =:= ResourceType -> true; - (_, _) -> false - end). + filter_instances(fun + (_, RT) when RT =:= ResourceType -> true; + (_, _) -> false + end). -spec generate_id(term()) -> instance_id(). generate_id(Name) when is_binary(Name) -> @@ -276,7 +314,9 @@ call_start(InstId, Mod, Config) -> ?SAFE_CALL(Mod:on_start(InstId, Config)). -spec call_health_check(instance_id(), module(), resource_state()) -> - {ok, resource_state()} | {error, Reason:: term()} | {error, Reason:: term(), resource_state()}. + {ok, resource_state()} + | {error, Reason :: term()} + | {error, Reason :: term(), resource_state()}. call_health_check(InstId, Mod, ResourceState) -> ?SAFE_CALL(Mod:on_health_check(InstId, ResourceState)). @@ -289,58 +329,82 @@ call_stop(InstId, Mod, ResourceState) -> check_config(ResourceType, Conf) -> emqx_hocon:check(ResourceType, Conf). --spec check_and_create(instance_id(), - resource_group(), - resource_type(), - raw_resource_config()) -> +-spec check_and_create( + instance_id(), + resource_group(), + resource_type(), + raw_resource_config() +) -> {ok, resource_data() | 'already_created'} | {error, term()}. check_and_create(InstId, Group, ResourceType, RawConfig) -> check_and_create(InstId, Group, ResourceType, RawConfig, #{}). --spec check_and_create(instance_id(), - resource_group(), - resource_type(), - raw_resource_config(), - create_opts()) -> +-spec check_and_create( + instance_id(), + resource_group(), + resource_type(), + raw_resource_config(), + create_opts() +) -> {ok, resource_data() | 'already_created'} | {error, term()}. check_and_create(InstId, Group, ResourceType, RawConfig, Opts) -> - check_and_do(ResourceType, RawConfig, - fun(InstConf) -> create(InstId, Group, ResourceType, InstConf, Opts) end). + check_and_do( + ResourceType, + RawConfig, + fun(InstConf) -> create(InstId, Group, ResourceType, InstConf, Opts) end + ). --spec check_and_create_local(instance_id(), - resource_group(), - resource_type(), - raw_resource_config()) -> +-spec check_and_create_local( + instance_id(), + resource_group(), + resource_type(), + raw_resource_config() +) -> {ok, resource_data()} | {error, term()}. check_and_create_local(InstId, Group, ResourceType, RawConfig) -> check_and_create_local(InstId, Group, ResourceType, RawConfig, #{}). --spec check_and_create_local(instance_id(), - resource_group(), - resource_type(), - raw_resource_config(), - create_opts()) -> {ok, resource_data()} | {error, term()}. +-spec check_and_create_local( + instance_id(), + resource_group(), + resource_type(), + raw_resource_config(), + create_opts() +) -> {ok, resource_data()} | {error, term()}. check_and_create_local(InstId, Group, ResourceType, RawConfig, Opts) -> - check_and_do(ResourceType, RawConfig, - fun(InstConf) -> create_local(InstId, Group, ResourceType, InstConf, Opts) end). + check_and_do( + ResourceType, + RawConfig, + fun(InstConf) -> create_local(InstId, Group, ResourceType, InstConf, Opts) end + ). --spec check_and_recreate(instance_id(), - resource_type(), - raw_resource_config(), - create_opts()) -> +-spec check_and_recreate( + instance_id(), + resource_type(), + raw_resource_config(), + create_opts() +) -> {ok, resource_data()} | {error, term()}. check_and_recreate(InstId, ResourceType, RawConfig, Opts) -> - check_and_do(ResourceType, RawConfig, - fun(InstConf) -> recreate(InstId, ResourceType, InstConf, Opts) end). + check_and_do( + ResourceType, + RawConfig, + fun(InstConf) -> recreate(InstId, ResourceType, InstConf, Opts) end + ). --spec check_and_recreate_local(instance_id(), - resource_type(), - raw_resource_config(), - create_opts()) -> +-spec check_and_recreate_local( + instance_id(), + resource_type(), + raw_resource_config(), + create_opts() +) -> {ok, resource_data()} | {error, term()}. check_and_recreate_local(InstId, ResourceType, RawConfig, Opts) -> - check_and_do(ResourceType, RawConfig, - fun(InstConf) -> recreate_local(InstId, ResourceType, InstConf, Opts) end). + check_and_do( + ResourceType, + RawConfig, + fun(InstConf) -> recreate_local(InstId, ResourceType, InstConf, Opts) end + ). check_and_do(ResourceType, RawConfig, Do) when is_function(Do) -> case check_config(ResourceType, RawConfig) of @@ -355,8 +419,7 @@ filter_instances(Filter) -> inc_metrics_funcs(InstId) -> OnFailed = [{fun emqx_plugin_libs_metrics:inc/3, [resource_metrics, InstId, failed]}], - OnSucc = [ {fun emqx_plugin_libs_metrics:inc/3, [resource_metrics, InstId, success]} - ], + OnSucc = [{fun emqx_plugin_libs_metrics:inc/3, [resource_metrics, InstId, success]}], {OnSucc, OnFailed}. call_instance(InstId, Query) -> diff --git a/apps/emqx_resource/src/emqx_resource_health_check.erl b/apps/emqx_resource/src/emqx_resource_health_check.erl index 0faaebabb..ab74296a5 100644 --- a/apps/emqx_resource/src/emqx_resource_health_check.erl +++ b/apps/emqx_resource/src/emqx_resource_health_check.erl @@ -15,23 +15,29 @@ %%-------------------------------------------------------------------- -module(emqx_resource_health_check). --export([ start_link/3 - , create_checker/3 - , delete_checker/1 - ]). +-export([ + start_link/3, + create_checker/3, + delete_checker/1 +]). --export([ start_health_check/3 - , health_check_timeout_checker/4 - ]). +-export([ + start_health_check/3, + health_check_timeout_checker/4 +]). -define(SUP, emqx_resource_health_check_sup). -define(ID(NAME), {resource_health_check, NAME}). child_spec(Name, Sleep, Timeout) -> - #{id => ?ID(Name), - start => {?MODULE, start_link, [Name, Sleep, Timeout]}, - restart => transient, - shutdown => 5000, type => worker, modules => [?MODULE]}. + #{ + id => ?ID(Name), + start => {?MODULE, start_link, [Name, Sleep, Timeout]}, + restart => transient, + shutdown => 5000, + type => worker, + modules => [?MODULE] + }. start_link(Name, Sleep, Timeout) -> Pid = proc_lib:spawn_link(?MODULE, start_health_check, [Name, Sleep, Timeout]), @@ -42,19 +48,22 @@ create_checker(Name, Sleep, Timeout) -> create_checker(Name, Sleep, Retry, Timeout) -> case supervisor:start_child(?SUP, child_spec(Name, Sleep, Timeout)) of - {ok, _} -> ok; - {error, already_present} -> ok; + {ok, _} -> + ok; + {error, already_present} -> + ok; {error, {already_started, _}} when Retry == false -> ok = delete_checker(Name), create_checker(Name, Sleep, true, Timeout); - Error -> Error + Error -> + Error end. delete_checker(Name) -> case supervisor:terminate_child(?SUP, ?ID(Name)) of ok -> supervisor:delete_child(?SUP, ?ID(Name)); Error -> Error - end. + end. start_health_check(Name, Sleep, Timeout) -> Pid = self(), @@ -63,13 +72,16 @@ start_health_check(Name, Sleep, Timeout) -> health_check(Name) -> receive - {Pid, begin_health_check} -> + {Pid, begin_health_check} -> case emqx_resource:health_check(Name) of ok -> emqx_alarm:deactivate(Name); {error, _} -> - emqx_alarm:activate(Name, #{name => Name}, - <>) + emqx_alarm:activate( + Name, + #{name => Name}, + <> + ) end, Pid ! health_check_finish end, @@ -81,8 +93,11 @@ health_check_timeout_checker(Pid, Name, SleepTime, Timeout) -> receive health_check_finish -> timer:sleep(SleepTime) after Timeout -> - emqx_alarm:activate(Name, #{name => Name}, - <>), + emqx_alarm:activate( + Name, + #{name => Name}, + <> + ), emqx_resource:set_resource_status_connecting(Name), receive health_check_finish -> timer:sleep(SleepTime) diff --git a/apps/emqx_resource/src/emqx_resource_instance.erl b/apps/emqx_resource/src/emqx_resource_instance.erl index 60b2babe5..97014e413 100644 --- a/apps/emqx_resource/src/emqx_resource_instance.erl +++ b/apps/emqx_resource/src/emqx_resource_instance.erl @@ -23,25 +23,28 @@ -export([start_link/2]). %% load resource instances from *.conf files --export([ lookup/1 - , get_metrics/1 - , reset_metrics/1 - , list_all/0 - , list_group/1 - ]). +-export([ + lookup/1, + get_metrics/1, + reset_metrics/1, + list_all/0, + list_group/1 +]). --export([ hash_call/2 - , hash_call/3 - ]). +-export([ + hash_call/2, + hash_call/3 +]). %% gen_server Callbacks --export([ init/1 - , handle_call/3 - , handle_cast/2 - , handle_info/2 - , terminate/2 - , code_change/3 - ]). +-export([ + init/1, + handle_call/3, + handle_cast/2, + handle_info/2, + terminate/2, + code_change/3 +]). -record(state, {worker_pool, worker_id}). @@ -52,8 +55,12 @@ %%------------------------------------------------------------------------------ start_link(Pool, Id) -> - gen_server:start_link({local, proc_name(?MODULE, Id)}, - ?MODULE, {Pool, Id}, []). + gen_server:start_link( + {local, proc_name(?MODULE, Id)}, + ?MODULE, + {Pool, Id}, + [] + ). %% call the worker by the hash of resource-instance-id, to make sure we always handle %% operations on the same instance in the same worker. @@ -67,8 +74,7 @@ hash_call(InstId, Request, Timeout) -> lookup(InstId) -> case ets:lookup(emqx_resource_instance, InstId) of [] -> {error, not_found}; - [{_, Group, Data}] -> - {ok, Group, Data#{id => InstId, metrics => get_metrics(InstId)}} + [{_, Group, Data}] -> {ok, Group, Data#{id => InstId, metrics => get_metrics(InstId)}} end. make_test_id() -> @@ -103,39 +109,32 @@ list_group(Group) -> %%------------------------------------------------------------------------------ -spec init({atom(), integer()}) -> - {ok, State :: state()} | {ok, State :: state(), timeout() | hibernate | {continue, term()}} | - {stop, Reason :: term()} | ignore. + {ok, State :: state()} + | {ok, State :: state(), timeout() | hibernate | {continue, term()}} + | {stop, Reason :: term()} + | ignore. init({Pool, Id}) -> true = gproc_pool:connect_worker(Pool, {Pool, Id}), {ok, #state{worker_pool = Pool, worker_id = Id}}. handle_call({create, InstId, Group, ResourceType, Config, Opts}, _From, State) -> {reply, do_create(InstId, Group, ResourceType, Config, Opts), State}; - handle_call({create_dry_run, ResourceType, Config}, _From, State) -> {reply, do_create_dry_run(ResourceType, Config), State}; - handle_call({recreate, InstId, ResourceType, Config, Opts}, _From, State) -> {reply, do_recreate(InstId, ResourceType, Config, Opts), State}; - handle_call({reset_metrics, InstId}, _From, State) -> {reply, do_reset_metrics(InstId), State}; - handle_call({remove, InstId}, _From, State) -> {reply, do_remove(InstId), State}; - handle_call({restart, InstId, Opts}, _From, State) -> {reply, do_restart(InstId, Opts), State}; - handle_call({stop, InstId}, _From, State) -> {reply, do_stop(InstId), State}; - handle_call({health_check, InstId}, _From, State) -> {reply, do_health_check(InstId), State}; - handle_call({set_resource_status_connecting, InstId}, _From, State) -> {reply, do_set_resource_status_connecting(InstId), State}; - handle_call(Req, _From, State) -> logger:error("Received unexpected call: ~p", [Req]), {reply, ignored, State}. @@ -155,14 +154,17 @@ code_change(_OldVsn, State, _Extra) -> %%------------------------------------------------------------------------------ %% suppress the race condition check, as these functions are protected in gproc workers --dialyzer({nowarn_function, [ do_recreate/4 - , do_create/5 - , do_restart/2 - , do_start/5 - , do_stop/1 - , do_health_check/1 - , start_and_check/6 - ]}). +-dialyzer( + {nowarn_function, [ + do_recreate/4, + do_create/5, + do_restart/2, + do_start/5, + do_stop/1, + do_health_check/1, + start_and_check/6 + ]} +). do_recreate(InstId, ResourceType, NewConfig, Opts) -> case lookup(InstId) of @@ -185,10 +187,11 @@ do_wait_for_resource_ready(_InstId, 0) -> timeout; do_wait_for_resource_ready(InstId, Retry) -> case force_lookup(InstId) of - #{status := connected} -> ok; + #{status := connected} -> + ok; _ -> timer:sleep(100), - do_wait_for_resource_ready(InstId, Retry-1) + do_wait_for_resource_ready(InstId, Retry - 1) end. do_create(InstId, Group, ResourceType, Config, Opts) -> @@ -197,8 +200,12 @@ do_create(InstId, Group, ResourceType, Config, Opts) -> {ok, already_created}; {error, not_found} -> ok = do_start(InstId, Group, ResourceType, Config, Opts), - ok = emqx_plugin_libs_metrics:create_metrics(resource_metrics, InstId, - [matched, success, failed, exception], [matched]), + ok = emqx_plugin_libs_metrics:create_metrics( + resource_metrics, + InstId, + [matched, success, failed, exception], + [matched] + ), {ok, force_lookup(InstId)} end. @@ -212,7 +219,8 @@ do_create_dry_run(ResourceType, Config) -> {error, _} = Error -> Error; _ -> ok end; - {error, Reason, _} -> {error, Reason} + {error, Reason, _} -> + {error, Reason} end; {error, Reason} -> {error, Reason} @@ -246,13 +254,18 @@ do_restart(InstId, Opts) -> end. do_start(InstId, Group, ResourceType, Config, Opts) when is_binary(InstId) -> - InitData = #{id => InstId, mod => ResourceType, config => Config, - status => connecting, state => undefined}, + InitData = #{ + id => InstId, + mod => ResourceType, + config => Config, + status => connecting, + state => undefined + }, %% The `emqx_resource:call_start/3` need the instance exist beforehand ets:insert(emqx_resource_instance, {InstId, Group, InitData}), spawn(fun() -> - start_and_check(InstId, Group, ResourceType, Config, Opts, InitData) - end), + start_and_check(InstId, Group, ResourceType, Config, Opts, InitData) + end), _ = wait_for_resource_ready(InstId, maps:get(wait_for_resource_ready, Opts, 5000)), ok. @@ -268,9 +281,11 @@ start_and_check(InstId, Group, ResourceType, Config, Opts, Data) -> end. create_default_checker(InstId, Opts) -> - emqx_resource_health_check:create_checker(InstId, + emqx_resource_health_check:create_checker( + InstId, maps:get(health_check_interval, Opts, 15000), - maps:get(health_check_timeout, Opts, 10000)). + maps:get(health_check_timeout, Opts, 10000) + ). do_stop(InstId) when is_binary(InstId) -> do_with_group_and_instance_data(InstId, fun do_stop/2, []). @@ -291,18 +306,24 @@ do_health_check(_Group, #{state := undefined}) -> do_health_check(Group, #{id := InstId, mod := Mod, state := ResourceState0} = Data) -> case emqx_resource:call_health_check(InstId, Mod, ResourceState0) of {ok, ResourceState1} -> - ets:insert(emqx_resource_instance, - {InstId, Group, Data#{status => connected, state => ResourceState1}}), + ets:insert( + emqx_resource_instance, + {InstId, Group, Data#{status => connected, state => ResourceState1}} + ), ok; {error, Reason} -> logger:error("health check for ~p failed: ~p", [InstId, Reason]), - ets:insert(emqx_resource_instance, - {InstId, Group, Data#{status => connecting}}), + ets:insert( + emqx_resource_instance, + {InstId, Group, Data#{status => connecting}} + ), {error, Reason}; {error, Reason, ResourceState1} -> logger:error("health check for ~p failed: ~p", [InstId, Reason]), - ets:insert(emqx_resource_instance, - {InstId, Group, Data#{status => connecting, state => ResourceState1}}), + ets:insert( + emqx_resource_instance, + {InstId, Group, Data#{status => connecting, state => ResourceState1}} + ), {error, Reason} end. @@ -311,7 +332,8 @@ do_set_resource_status_connecting(InstId) -> {ok, Group, #{id := InstId} = Data} -> logger:error("health check for ~p failed: timeout", [InstId]), ets:insert(emqx_resource_instance, {InstId, Group, Data#{status => connecting}}); - Error -> {error, Error} + Error -> + {error, Error} end. %%------------------------------------------------------------------------------ diff --git a/apps/emqx_resource/src/emqx_resource_sup.erl b/apps/emqx_resource/src/emqx_resource_sup.erl index a439c76bd..770ca1fed 100644 --- a/apps/emqx_resource/src/emqx_resource_sup.erl +++ b/apps/emqx_resource/src/emqx_resource_sup.erl @@ -22,7 +22,8 @@ -export([init/1]). -define(RESOURCE_INST_MOD, emqx_resource_instance). --define(POOL_SIZE, 64). %% set a very large pool size in case all the workers busy +%% set a very large pool size in case all the workers busy +-define(POOL_SIZE, 64). start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []). @@ -40,27 +41,39 @@ init([]) -> ResourceInsts = [ begin ensure_pool_worker(Pool, {Pool, Idx}, Idx), - #{id => {Mod, Idx}, - start => {Mod, start_link, [Pool, Idx]}, - restart => transient, - shutdown => 5000, type => worker, modules => [Mod]} - end || Idx <- lists:seq(1, ?POOL_SIZE)], - HealthCheck = - #{id => emqx_resource_health_check_sup, - start => {emqx_resource_health_check_sup, start_link, []}, - restart => transient, - shutdown => infinity, type => supervisor, modules => [emqx_resource_health_check_sup]}, + #{ + id => {Mod, Idx}, + start => {Mod, start_link, [Pool, Idx]}, + restart => transient, + shutdown => 5000, + type => worker, + modules => [Mod] + } + end + || Idx <- lists:seq(1, ?POOL_SIZE) + ], + HealthCheck = + #{ + id => emqx_resource_health_check_sup, + start => {emqx_resource_health_check_sup, start_link, []}, + restart => transient, + shutdown => infinity, + type => supervisor, + modules => [emqx_resource_health_check_sup] + }, {ok, {SupFlags, [HealthCheck, Metrics | ResourceInsts]}}. %% internal functions ensure_pool(Pool, Type, Opts) -> - try gproc_pool:new(Pool, Type, Opts) + try + gproc_pool:new(Pool, Type, Opts) catch error:exists -> ok end. ensure_pool_worker(Pool, Name, Slot) -> - try gproc_pool:add_worker(Pool, Name, Slot) + try + gproc_pool:add_worker(Pool, Name, Slot) catch error:exists -> ok end. diff --git a/apps/emqx_resource/src/emqx_resource_validator.erl b/apps/emqx_resource/src/emqx_resource_validator.erl index 4d745d1e3..7623ae7fa 100644 --- a/apps/emqx_resource/src/emqx_resource_validator.erl +++ b/apps/emqx_resource/src/emqx_resource_validator.erl @@ -16,10 +16,11 @@ -module(emqx_resource_validator). --export([ min/2 - , max/2 - , not_empty/1 - ]). +-export([ + min/2, + max/2, + not_empty/1 +]). max(Type, Max) -> limit(Type, '=<', Max). @@ -28,16 +29,19 @@ min(Type, Min) -> limit(Type, '>=', Min). not_empty(ErrMsg) -> - fun(<<>>) -> {error, ErrMsg}; - (_) -> ok + fun + (<<>>) -> {error, ErrMsg}; + (_) -> ok end. limit(Type, Op, Expected) -> L = len(Type), fun(Value) -> Got = L(Value), - return(erlang:Op(Got, Expected), - err_limit({Type, {Op, Expected}, {got, Got}})) + return( + erlang:Op(Got, Expected), + err_limit({Type, {Op, Expected}, {got, Got}}) + ) end. len(array) -> fun erlang:length/1; @@ -48,5 +52,4 @@ err_limit({Type, {Op, Expected}, {got, Got}}) -> io_lib:format("Expect the ~ts value ~ts ~p but got: ~p", [Type, Op, Expected, Got]). return(true, _) -> ok; -return(false, Error) -> - {error, Error}. +return(false, Error) -> {error, Error}. diff --git a/apps/emqx_resource/src/proto/emqx_resource_proto_v1.erl b/apps/emqx_resource/src/proto/emqx_resource_proto_v1.erl index f39533c82..0aa4fd40b 100644 --- a/apps/emqx_resource/src/proto/emqx_resource_proto_v1.erl +++ b/apps/emqx_resource/src/proto/emqx_resource_proto_v1.erl @@ -18,52 +18,58 @@ -behaviour(emqx_bpapi). --export([ introduced_in/0 +-export([ + introduced_in/0, - , create/5 - , create_dry_run/2 - , recreate/4 - , remove/1 - , reset_metrics/1 - ]). + create/5, + create_dry_run/2, + recreate/4, + remove/1, + reset_metrics/1 +]). -include_lib("emqx/include/bpapi.hrl"). introduced_in() -> "5.0.0". --spec create( emqx_resource:instance_id() - , emqx_resource:resource_group() - , emqx_resource:resource_type() - , emqx_resource:resource_config() - , emqx_resource:create_opts() - ) -> - emqx_cluster_rpc:multicall_return(emqx_resource:resource_data()). +-spec create( + emqx_resource:instance_id(), + emqx_resource:resource_group(), + emqx_resource:resource_type(), + emqx_resource:resource_config(), + emqx_resource:create_opts() +) -> + emqx_cluster_rpc:multicall_return(emqx_resource:resource_data()). create(InstId, Group, ResourceType, Config, Opts) -> - emqx_cluster_rpc:multicall(emqx_resource, create_local, [InstId, Group, ResourceType, Config, Opts]). + emqx_cluster_rpc:multicall(emqx_resource, create_local, [ + InstId, Group, ResourceType, Config, Opts + ]). --spec create_dry_run( emqx_resource:resource_type() - , emqx_resource:resource_config() - ) -> - emqx_cluster_rpc:multicall_return(emqx_resource:resource_data()). +-spec create_dry_run( + emqx_resource:resource_type(), + emqx_resource:resource_config() +) -> + emqx_cluster_rpc:multicall_return(emqx_resource:resource_data()). create_dry_run(ResourceType, Config) -> emqx_cluster_rpc:multicall(emqx_resource, create_dry_run_local, [ResourceType, Config]). --spec recreate( emqx_resource:instance_id() - , emqx_resource:resource_type() - , emqx_resource:resource_config() - , emqx_resource:create_opts() - ) -> - emqx_cluster_rpc:multicall_return(emqx_resource:resource_data()). +-spec recreate( + emqx_resource:instance_id(), + emqx_resource:resource_type(), + emqx_resource:resource_config(), + emqx_resource:create_opts() +) -> + emqx_cluster_rpc:multicall_return(emqx_resource:resource_data()). recreate(InstId, ResourceType, Config, Opts) -> emqx_cluster_rpc:multicall(emqx_resource, recreate_local, [InstId, ResourceType, Config, Opts]). -spec remove(emqx_resource:instance_id()) -> - emqx_cluster_rpc:multicall_return(ok). + emqx_cluster_rpc:multicall_return(ok). remove(InstId) -> emqx_cluster_rpc:multicall(emqx_resource, remove_local, [InstId]). -spec reset_metrics(emqx_resource:instance_id()) -> - emqx_cluster_rpc:multicall_return(ok). + emqx_cluster_rpc:multicall_return(ok). reset_metrics(InstId) -> emqx_cluster_rpc:multicall(emqx_resource, reset_metrics_local, [InstId]). diff --git a/apps/emqx_resource/test/emqx_resource_SUITE.erl b/apps/emqx_resource/test/emqx_resource_SUITE.erl index 8a952e036..973cf0ab7 100644 --- a/apps/emqx_resource/test/emqx_resource_SUITE.erl +++ b/apps/emqx_resource/test/emqx_resource_SUITE.erl @@ -60,22 +60,25 @@ t_check_config(_) -> t_create_remove(_) -> {error, _} = emqx_resource:check_and_create_local( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{unknown => test_resource}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{unknown => test_resource} + ), {ok, _} = emqx_resource:create( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{name => test_resource}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{name => test_resource} + ), emqx_resource:recreate( - ?ID, - ?TEST_RESOURCE, - #{name => test_resource}, - #{}), + ?ID, + ?TEST_RESOURCE, + #{name => test_resource}, + #{} + ), #{pid := Pid} = emqx_resource:query(?ID, get_state), ?assert(is_process_alive(Pid)), @@ -87,22 +90,25 @@ t_create_remove(_) -> t_create_remove_local(_) -> {error, _} = emqx_resource:check_and_create_local( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{unknown => test_resource}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{unknown => test_resource} + ), {ok, _} = emqx_resource:create_local( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{name => test_resource}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{name => test_resource} + ), emqx_resource:recreate_local( - ?ID, - ?TEST_RESOURCE, - #{name => test_resource}, - #{}), + ?ID, + ?TEST_RESOURCE, + #{name => test_resource}, + #{} + ), #{pid := Pid} = emqx_resource:query(?ID, get_state), ?assert(is_process_alive(Pid)), @@ -110,10 +116,11 @@ t_create_remove_local(_) -> emqx_resource:set_resource_status_connecting(?ID), emqx_resource:recreate_local( - ?ID, - ?TEST_RESOURCE, - #{name => test_resource}, - #{}), + ?ID, + ?TEST_RESOURCE, + #{name => test_resource}, + #{} + ), ok = emqx_resource:remove_local(?ID), {error, _} = emqx_resource:remove_local(?ID), @@ -122,10 +129,11 @@ t_create_remove_local(_) -> t_query(_) -> {ok, _} = emqx_resource:create_local( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{name => test_resource}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{name => test_resource} + ), Pid = self(), Success = fun() -> Pid ! success end, @@ -142,28 +150,32 @@ t_query(_) -> ?assert(false) end, - ?assertMatch({error, {emqx_resource, #{reason := not_found}}}, - emqx_resource:query(<<"unknown">>, get_state)), + ?assertMatch( + {error, {emqx_resource, #{reason := not_found}}}, + emqx_resource:query(<<"unknown">>, get_state) + ), ok = emqx_resource:remove_local(?ID). t_healthy_timeout(_) -> {ok, _} = emqx_resource:create_local( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{name => <<"test_resource">>}, - #{health_check_timeout => 200}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{name => <<"test_resource">>}, + #{health_check_timeout => 200} + ), timer:sleep(500), ok = emqx_resource:remove_local(?ID). t_healthy(_) -> {ok, _} = emqx_resource:create_local( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{name => <<"test_resource">>}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{name => <<"test_resource">>} + ), timer:sleep(400), emqx_resource_health_check:create_checker(?ID, 15000, 10000), @@ -175,38 +187,44 @@ t_healthy(_) -> ?assertMatch( [#{status := connected}], - emqx_resource:list_instances_verbose()), + emqx_resource:list_instances_verbose() + ), erlang:exit(Pid, shutdown), ?assertEqual( {error, dead}, - emqx_resource:health_check(?ID)), + emqx_resource:health_check(?ID) + ), ?assertMatch( [#{status := connecting}], - emqx_resource:list_instances_verbose()), + emqx_resource:list_instances_verbose() + ), ok = emqx_resource:remove_local(?ID). t_stop_start(_) -> {error, _} = emqx_resource:check_and_create( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{unknown => test_resource}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{unknown => test_resource} + ), {ok, _} = emqx_resource:check_and_create( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{<<"name">> => <<"test_resource">>}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{<<"name">> => <<"test_resource">>} + ), {ok, _} = emqx_resource:check_and_recreate( - ?ID, - ?TEST_RESOURCE, - #{<<"name">> => <<"test_resource">>}, - #{}), + ?ID, + ?TEST_RESOURCE, + #{<<"name">> => <<"test_resource">>}, + #{} + ), #{pid := Pid0} = emqx_resource:query(?ID, get_state), @@ -216,8 +234,10 @@ t_stop_start(_) -> ?assertNot(is_process_alive(Pid0)), - ?assertMatch({error, {emqx_resource, #{reason := disconnected}}}, - emqx_resource:query(?ID, get_state)), + ?assertMatch( + {error, {emqx_resource, #{reason := disconnected}}}, + emqx_resource:query(?ID, get_state) + ), ok = emqx_resource:restart(?ID), @@ -229,22 +249,25 @@ t_stop_start(_) -> t_stop_start_local(_) -> {error, _} = emqx_resource:check_and_create_local( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{unknown => test_resource}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{unknown => test_resource} + ), {ok, _} = emqx_resource:check_and_create_local( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{<<"name">> => <<"test_resource">>}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{<<"name">> => <<"test_resource">>} + ), {ok, _} = emqx_resource:check_and_recreate_local( - ?ID, - ?TEST_RESOURCE, - #{<<"name">> => <<"test_resource">>}, - #{}), + ?ID, + ?TEST_RESOURCE, + #{<<"name">> => <<"test_resource">>}, + #{} + ), #{pid := Pid0} = emqx_resource:query(?ID, get_state), @@ -254,8 +277,10 @@ t_stop_start_local(_) -> ?assertNot(is_process_alive(Pid0)), - ?assertMatch({error, {emqx_resource, #{reason := disconnected}}}, - emqx_resource:query(?ID, get_state)), + ?assertMatch( + {error, {emqx_resource, #{reason := disconnected}}}, + emqx_resource:query(?ID, get_state) + ), ok = emqx_resource:restart(?ID), @@ -265,60 +290,73 @@ t_stop_start_local(_) -> t_list_filter(_) -> {ok, _} = emqx_resource:create_local( - emqx_resource:generate_id(<<"a">>), - <<"group1">>, - ?TEST_RESOURCE, - #{name => a}), + emqx_resource:generate_id(<<"a">>), + <<"group1">>, + ?TEST_RESOURCE, + #{name => a} + ), {ok, _} = emqx_resource:create_local( - emqx_resource:generate_id(<<"a">>), - <<"group2">>, - ?TEST_RESOURCE, - #{name => grouped_a}), + emqx_resource:generate_id(<<"a">>), + <<"group2">>, + ?TEST_RESOURCE, + #{name => grouped_a} + ), [Id1] = emqx_resource:list_group_instances(<<"group1">>), ?assertMatch( {ok, <<"group1">>, #{config := #{name := a}}}, - emqx_resource:get_instance(Id1)), + emqx_resource:get_instance(Id1) + ), [Id2] = emqx_resource:list_group_instances(<<"group2">>), ?assertMatch( {ok, <<"group2">>, #{config := #{name := grouped_a}}}, - emqx_resource:get_instance(Id2)). + emqx_resource:get_instance(Id2) + ). t_create_dry_run_local(_) -> ?assertEqual( - ok, - emqx_resource:create_dry_run_local( - ?TEST_RESOURCE, - #{name => test_resource, register => true})), + ok, + emqx_resource:create_dry_run_local( + ?TEST_RESOURCE, + #{name => test_resource, register => true} + ) + ), ?assertEqual(undefined, whereis(test_resource)). t_create_dry_run_local_failed(_) -> - {Res, _} = emqx_resource:create_dry_run_local(?TEST_RESOURCE, - #{cteate_error => true}), + {Res, _} = emqx_resource:create_dry_run_local( + ?TEST_RESOURCE, + #{cteate_error => true} + ), ?assertEqual(error, Res), - {Res, _} = emqx_resource:create_dry_run_local(?TEST_RESOURCE, - #{name => test_resource, health_check_error => true}), + {Res, _} = emqx_resource:create_dry_run_local( + ?TEST_RESOURCE, + #{name => test_resource, health_check_error => true} + ), ?assertEqual(error, Res), - {Res, _} = emqx_resource:create_dry_run_local(?TEST_RESOURCE, - #{name => test_resource, stop_error => true}), + {Res, _} = emqx_resource:create_dry_run_local( + ?TEST_RESOURCE, + #{name => test_resource, stop_error => true} + ), ?assertEqual(error, Res). t_test_func(_) -> ?assertEqual(ok, erlang:apply(emqx_resource_validator:not_empty("not_empty"), [<<"someval">>])), ?assertEqual(ok, erlang:apply(emqx_resource_validator:min(int, 3), [4])), - ?assertEqual(ok, erlang:apply(emqx_resource_validator:max(array, 10), [[a,b,c,d]])), + ?assertEqual(ok, erlang:apply(emqx_resource_validator:max(array, 10), [[a, b, c, d]])), ?assertEqual(ok, erlang:apply(emqx_resource_validator:max(string, 10), ["less10"])). t_reset_metrics(_) -> {ok, _} = emqx_resource:create( - ?ID, - ?DEFAULT_RESOURCE_GROUP, - ?TEST_RESOURCE, - #{name => test_resource}), + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{name => test_resource} + ), #{pid := Pid} = emqx_resource:query(?ID, get_state), emqx_resource:reset_metrics(?ID), diff --git a/apps/emqx_resource/test/emqx_test_resource.erl b/apps/emqx_resource/test/emqx_test_resource.erl index c0c4b4ff5..783393e74 100644 --- a/apps/emqx_resource/test/emqx_test_resource.erl +++ b/apps/emqx_resource/test/emqx_test_resource.erl @@ -21,17 +21,21 @@ -behaviour(emqx_resource). %% callbacks of behaviour emqx_resource --export([ on_start/2 - , on_stop/2 - , on_query/4 - , on_health_check/2 - ]). +-export([ + on_start/2, + on_stop/2, + on_query/4, + on_health_check/2 +]). %% callbacks for emqx_resource config schema -export([roots/0]). -roots() -> [{name, fun name/1}, - {register, fun register/1}]. +roots() -> + [ + {name, fun name/1}, + {register, fun register/1} + ]. name(type) -> atom(); name(required) -> true; @@ -46,21 +50,27 @@ on_start(_InstId, #{create_error := true}) -> error("some error"); on_start(InstId, #{name := Name, stop_error := true} = Opts) -> Register = maps:get(register, Opts, false), - {ok, #{name => Name, - id => InstId, - stop_error => true, - pid => spawn_dummy_process(Name, Register)}}; + {ok, #{ + name => Name, + id => InstId, + stop_error => true, + pid => spawn_dummy_process(Name, Register) + }}; on_start(InstId, #{name := Name, health_check_error := true} = Opts) -> Register = maps:get(register, Opts, false), - {ok, #{name => Name, - id => InstId, - health_check_error => true, - pid => spawn_dummy_process(Name, Register)}}; + {ok, #{ + name => Name, + id => InstId, + health_check_error => true, + pid => spawn_dummy_process(Name, Register) + }}; on_start(InstId, #{name := Name} = Opts) -> Register = maps:get(register, Opts, false), - {ok, #{name => Name, - id => InstId, - pid => spawn_dummy_process(Name, Register)}}. + {ok, #{ + name => Name, + id => InstId, + pid => spawn_dummy_process(Name, Register) + }}. on_stop(_InstId, #{stop_error := true}) -> {error, stop_error}; @@ -86,13 +96,15 @@ on_health_check(_InstId, State = #{pid := Pid}) -> spawn_dummy_process(Name, Register) -> spawn( - fun() -> - true = case Register of - true -> register(Name, self()); - _ -> true - end, - Ref = make_ref(), - receive - Ref -> ok - end - end). + fun() -> + true = + case Register of + true -> register(Name, self()); + _ -> true + end, + Ref = make_ref(), + receive + Ref -> ok + end + end + ). diff --git a/apps/emqx_rule_engine/include/rule_engine.hrl b/apps/emqx_rule_engine/include/rule_engine.hrl index a8b8ae1bb..20c339377 100644 --- a/apps/emqx_rule_engine/include/rule_engine.hrl +++ b/apps/emqx_rule_engine/include/rule_engine.hrl @@ -23,7 +23,7 @@ -type rule_id() :: binary(). -type rule_name() :: binary(). --type mf() :: {Module::atom(), Fun::atom()}. +-type mf() :: {Module :: atom(), Fun :: atom()}. -type hook() :: atom() | 'any'. -type topic() :: binary(). @@ -36,60 +36,73 @@ -type bridge_channel_id() :: binary(). -type output_fun_args() :: map(). --type output() :: #{ - mod := builtin_output_module() | module(), - func := builtin_output_func() | atom(), - args => output_fun_args() -} | bridge_channel_id(). +-type output() :: + #{ + mod := builtin_output_module() | module(), + func := builtin_output_func() | atom(), + args => output_fun_args() + } + | bridge_channel_id(). -type rule() :: - #{ id := rule_id() - , name := binary() - , sql := binary() - , outputs := [output()] - , enable := boolean() - , description => binary() - , created_at := integer() %% epoch in millisecond precision - , updated_at := integer() %% epoch in millisecond precision - , from := list(topic()) - , is_foreach := boolean() - , fields := list() - , doeach := term() - , incase := term() - , conditions := tuple() - }. + #{ + id := rule_id(), + name := binary(), + sql := binary(), + outputs := [output()], + enable := boolean(), + description => binary(), + %% epoch in millisecond precision + created_at := integer(), + %% epoch in millisecond precision + updated_at := integer(), + from := list(topic()), + is_foreach := boolean(), + fields := list(), + doeach := term(), + incase := term(), + conditions := tuple() + }. %% Arithmetic operators --define(is_arith(Op), (Op =:= '+' orelse - Op =:= '-' orelse - Op =:= '*' orelse - Op =:= '/' orelse - Op =:= 'div')). +-define(is_arith(Op), + (Op =:= '+' orelse + Op =:= '-' orelse + Op =:= '*' orelse + Op =:= '/' orelse + Op =:= 'div') +). %% Compare operators --define(is_comp(Op), (Op =:= '=' orelse - Op =:= '=~' orelse - Op =:= '>' orelse - Op =:= '<' orelse - Op =:= '<=' orelse - Op =:= '>=' orelse - Op =:= '<>' orelse - Op =:= '!=')). +-define(is_comp(Op), + (Op =:= '=' orelse + Op =:= '=~' orelse + Op =:= '>' orelse + Op =:= '<' orelse + Op =:= '<=' orelse + Op =:= '>=' orelse + Op =:= '<>' orelse + Op =:= '!=') +). %% Logical operators -define(is_logical(Op), (Op =:= 'and' orelse Op =:= 'or')). -define(RAISE(_EXP_, _ERROR_), - ?RAISE(_EXP_, _ = do_nothing, _ERROR_)). + ?RAISE(_EXP_, _ = do_nothing, _ERROR_) +). -define(RAISE(_EXP_, _EXP_ON_FAIL_, _ERROR_), - fun() -> - try (_EXP_) - catch _EXCLASS_:_EXCPTION_:_ST_ -> + fun() -> + try + (_EXP_) + catch + _EXCLASS_:_EXCPTION_:_ST_ -> _EXP_ON_FAIL_, throw(_ERROR_) - end - end()). + end + end() +). %% Tables -define(RULE_TAB, emqx_rule_engine). diff --git a/apps/emqx_rule_engine/rebar.config b/apps/emqx_rule_engine/rebar.config index 72be78a1d..2bc40e977 100644 --- a/apps/emqx_rule_engine/rebar.config +++ b/apps/emqx_rule_engine/rebar.config @@ -1,28 +1,35 @@ %% -*- mode: erlang -*- -{deps, [ {emqx, {path, "../emqx"}} - ]}. +{deps, [{emqx, {path, "../emqx"}}]}. -{erl_opts, [warn_unused_vars, - warn_shadow_vars, - warn_unused_import, - warn_obsolete_guard, - no_debug_info, - compressed, %% for edge - {parse_transform} - ]}. +{erl_opts, [ + warn_unused_vars, + warn_shadow_vars, + warn_unused_import, + warn_obsolete_guard, + no_debug_info, + %% for edge + compressed, + {parse_transform} +]}. {overrides, [{add, [{erl_opts, [no_debug_info, compressed]}]}]}. {edoc_opts, [{preprocess, true}]}. -{xref_checks, [undefined_function_calls, undefined_functions, - locals_not_used, deprecated_function_calls, - warnings_as_errors, deprecated_functions - ]}. +{xref_checks, [ + undefined_function_calls, + undefined_functions, + locals_not_used, + deprecated_function_calls, + warnings_as_errors, + deprecated_functions +]}. {cover_enabled, true}. {cover_opts, [verbose]}. {cover_export_enabled, true}. {plugins, [rebar3_proper]}. + +{project_plugins, [erlfmt]}. diff --git a/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl b/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl index 9997043ff..2623dd0a7 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl @@ -6,8 +6,7 @@ -include_lib("hocon/include/hoconsc.hrl"). -include_lib("emqx/include/logger.hrl"). --export([ check_params/2 - ]). +-export([check_params/2]). -export([roots/0, fields/1]). @@ -20,10 +19,11 @@ check_params(Params, Tag) -> try hocon_tconf:check_plain(?MODULE, #{BTag => Params}, Opts, [Tag]) of #{Tag := Checked} -> {ok, Checked} catch - throw : Reason -> - ?SLOG(error, #{msg => "check_rule_params_failed", - reason => Reason - }), + throw:Reason -> + ?SLOG(error, #{ + msg => "check_rule_params_failed", + reason => Reason + }), {error, Reason} end. @@ -31,226 +31,285 @@ check_params(Params, Tag) -> %% Hocon Schema Definitions roots() -> - [ {"rule_creation", sc(ref("rule_creation"), #{desc => ?DESC("root_rule_creation")})} - , {"rule_info", sc(ref("rule_info"), #{desc => ?DESC("root_rule_info")})} - , {"rule_events", sc(ref("rule_events"), #{desc => ?DESC("root_rule_events")})} - , {"rule_test", sc(ref("rule_test"), #{desc => ?DESC("root_rule_test")})} + [ + {"rule_creation", sc(ref("rule_creation"), #{desc => ?DESC("root_rule_creation")})}, + {"rule_info", sc(ref("rule_info"), #{desc => ?DESC("root_rule_info")})}, + {"rule_events", sc(ref("rule_events"), #{desc => ?DESC("root_rule_events")})}, + {"rule_test", sc(ref("rule_test"), #{desc => ?DESC("root_rule_test")})} ]. fields("rule_creation") -> emqx_rule_engine_schema:fields("rules"); - fields("rule_info") -> - [ rule_id() - , {"metrics", sc(ref("metrics"), #{desc => ?DESC("ri_metrics")})} - , {"node_metrics", sc(hoconsc:array(ref("node_metrics")), - #{ desc => ?DESC("ri_node_metrics") - })} - , {"from", sc(hoconsc:array(binary()), - #{desc => ?DESC("ri_from"), example => "t/#"})} - , {"created_at", sc(binary(), - #{ desc => ?DESC("ri_created_at") - , example => "2021-12-01T15:00:43.153+08:00" - })} + [ + rule_id(), + {"metrics", sc(ref("metrics"), #{desc => ?DESC("ri_metrics")})}, + {"node_metrics", + sc( + hoconsc:array(ref("node_metrics")), + #{desc => ?DESC("ri_node_metrics")} + )}, + {"from", + sc( + hoconsc:array(binary()), + #{desc => ?DESC("ri_from"), example => "t/#"} + )}, + {"created_at", + sc( + binary(), + #{ + desc => ?DESC("ri_created_at"), + example => "2021-12-01T15:00:43.153+08:00" + } + )} ] ++ fields("rule_creation"); - %% TODO: we can delete this API if the Dashboard not depends on it fields("rule_events") -> - ETopics = [binary_to_atom(emqx_rule_events:event_topic(E)) || E <- emqx_rule_events:event_names()], - [ {"event", sc(hoconsc:enum(ETopics), #{desc => ?DESC("rs_event"), required => true})} - , {"title", sc(binary(), #{desc => ?DESC("rs_title"), example => "some title"})} - , {"description", sc(binary(), #{desc => ?DESC("rs_description"), example => "some desc"})} - , {"columns", sc(map(), #{desc => ?DESC("rs_columns")})} - , {"test_columns", sc(map(), #{desc => ?DESC("rs_test_columns")})} - , {"sql_example", sc(binary(), #{desc => ?DESC("rs_sql_example")})} + ETopics = [ + binary_to_atom(emqx_rule_events:event_topic(E)) + || E <- emqx_rule_events:event_names() + ], + [ + {"event", sc(hoconsc:enum(ETopics), #{desc => ?DESC("rs_event"), required => true})}, + {"title", sc(binary(), #{desc => ?DESC("rs_title"), example => "some title"})}, + {"description", sc(binary(), #{desc => ?DESC("rs_description"), example => "some desc"})}, + {"columns", sc(map(), #{desc => ?DESC("rs_columns")})}, + {"test_columns", sc(map(), #{desc => ?DESC("rs_test_columns")})}, + {"sql_example", sc(binary(), #{desc => ?DESC("rs_sql_example")})} ]; - fields("rule_test") -> - [ {"context", sc(hoconsc:union([ ref("ctx_pub") - , ref("ctx_sub") - , ref("ctx_unsub") - , ref("ctx_delivered") - , ref("ctx_acked") - , ref("ctx_dropped") - , ref("ctx_connected") - , ref("ctx_disconnected") - , ref("ctx_connack") - , ref("ctx_check_authz_complete") - , ref("ctx_bridge_mqtt") - ]), - #{desc => ?DESC("test_context"), - default => #{}})} - , {"sql", sc(binary(), #{desc => ?DESC("test_sql"), required => true})} + [ + {"context", + sc( + hoconsc:union([ + ref("ctx_pub"), + ref("ctx_sub"), + ref("ctx_unsub"), + ref("ctx_delivered"), + ref("ctx_acked"), + ref("ctx_dropped"), + ref("ctx_connected"), + ref("ctx_disconnected"), + ref("ctx_connack"), + ref("ctx_check_authz_complete"), + ref("ctx_bridge_mqtt") + ]), + #{ + desc => ?DESC("test_context"), + default => #{} + } + )}, + {"sql", sc(binary(), #{desc => ?DESC("test_sql"), required => true})} ]; - fields("metrics") -> - [ {"sql.matched", sc(non_neg_integer(), #{ - desc => ?DESC("metrics_sql_matched") - })} - , {"sql.matched.rate", sc(float(), #{desc => ?DESC("metrics_sql_matched_rate") })} - , {"sql.matched.rate.max", sc(float(), #{desc => ?DESC("metrics_sql_matched_rate_max") })} - , {"sql.matched.rate.last5m", sc(float(), - #{desc => ?DESC("metrics_sql_matched_rate_last5m") })} - , {"sql.passed", sc(non_neg_integer(), #{desc => ?DESC("metrics_sql_passed") })} - , {"sql.failed", sc(non_neg_integer(), #{desc => ?DESC("metrics_sql_failed") })} - , {"sql.failed.exception", sc(non_neg_integer(), #{ - desc => ?DESC("metrics_sql_failed_exception") - })} - , {"sql.failed.unknown", sc(non_neg_integer(), #{ - desc => ?DESC("metrics_sql_failed_unknown") - })} - , {"outputs.total", sc(non_neg_integer(), #{ - desc => ?DESC("metrics_outputs_total") - })} - , {"outputs.success", sc(non_neg_integer(), #{ - desc => ?DESC("metrics_outputs_success") - })} - , {"outputs.failed", sc(non_neg_integer(), #{ - desc => ?DESC("metrics_outputs_failed") - })} - , {"outputs.failed.out_of_service", sc(non_neg_integer(), #{ - desc => ?DESC("metrics_outputs_failed_out_of_service") - })} - , {"outputs.failed.unknown", sc(non_neg_integer(), #{ - desc => ?DESC("metrics_outputs_failed_unknown") - })} + [ + {"sql.matched", + sc(non_neg_integer(), #{ + desc => ?DESC("metrics_sql_matched") + })}, + {"sql.matched.rate", sc(float(), #{desc => ?DESC("metrics_sql_matched_rate")})}, + {"sql.matched.rate.max", sc(float(), #{desc => ?DESC("metrics_sql_matched_rate_max")})}, + {"sql.matched.rate.last5m", + sc( + float(), + #{desc => ?DESC("metrics_sql_matched_rate_last5m")} + )}, + {"sql.passed", sc(non_neg_integer(), #{desc => ?DESC("metrics_sql_passed")})}, + {"sql.failed", sc(non_neg_integer(), #{desc => ?DESC("metrics_sql_failed")})}, + {"sql.failed.exception", + sc(non_neg_integer(), #{ + desc => ?DESC("metrics_sql_failed_exception") + })}, + {"sql.failed.unknown", + sc(non_neg_integer(), #{ + desc => ?DESC("metrics_sql_failed_unknown") + })}, + {"outputs.total", + sc(non_neg_integer(), #{ + desc => ?DESC("metrics_outputs_total") + })}, + {"outputs.success", + sc(non_neg_integer(), #{ + desc => ?DESC("metrics_outputs_success") + })}, + {"outputs.failed", + sc(non_neg_integer(), #{ + desc => ?DESC("metrics_outputs_failed") + })}, + {"outputs.failed.out_of_service", + sc(non_neg_integer(), #{ + desc => ?DESC("metrics_outputs_failed_out_of_service") + })}, + {"outputs.failed.unknown", + sc(non_neg_integer(), #{ + desc => ?DESC("metrics_outputs_failed_unknown") + })} ]; - fields("node_metrics") -> - [ {"node", sc(binary(), #{desc => ?DESC("node_node"), example => "emqx@127.0.0.1"})} - ] ++ fields("metrics"); - + [{"node", sc(binary(), #{desc => ?DESC("node_node"), example => "emqx@127.0.0.1"})}] ++ + fields("metrics"); fields("ctx_pub") -> - [ {"event_type", sc(message_publish, #{desc => ?DESC("event_event_type"), required => true})} - , {"id", sc(binary(), #{desc => ?DESC("event_id")})} - , {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})} - , {"username", sc(binary(), #{desc => ?DESC("event_username")})} - , {"payload", sc(binary(), #{desc => ?DESC("event_payload")})} - , {"peerhost", sc(binary(), #{desc => ?DESC("event_peerhost")})} - , {"topic", sc(binary(), #{desc => ?DESC("event_topic")})} - , {"publish_received_at", sc(integer(), #{ - desc => ?DESC("event_publish_received_at")})} + [ + {"event_type", sc(message_publish, #{desc => ?DESC("event_event_type"), required => true})}, + {"id", sc(binary(), #{desc => ?DESC("event_id")})}, + {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})}, + {"username", sc(binary(), #{desc => ?DESC("event_username")})}, + {"payload", sc(binary(), #{desc => ?DESC("event_payload")})}, + {"peerhost", sc(binary(), #{desc => ?DESC("event_peerhost")})}, + {"topic", sc(binary(), #{desc => ?DESC("event_topic")})}, + {"publish_received_at", + sc(integer(), #{ + desc => ?DESC("event_publish_received_at") + })} ] ++ [qos()]; - fields("ctx_sub") -> - [ {"event_type", sc(session_subscribed, #{desc => ?DESC("event_event_type"), required => true})} - , {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})} - , {"username", sc(binary(), #{desc => ?DESC("event_username")})} - , {"payload", sc(binary(), #{desc => ?DESC("event_payload")})} - , {"peerhost", sc(binary(), #{desc => ?DESC("event_peerhost")})} - , {"topic", sc(binary(), #{desc => ?DESC("event_topic")})} - , {"publish_received_at", sc(integer(), #{ - desc => ?DESC("event_publish_received_at")})} + [ + {"event_type", + sc(session_subscribed, #{desc => ?DESC("event_event_type"), required => true})}, + {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})}, + {"username", sc(binary(), #{desc => ?DESC("event_username")})}, + {"payload", sc(binary(), #{desc => ?DESC("event_payload")})}, + {"peerhost", sc(binary(), #{desc => ?DESC("event_peerhost")})}, + {"topic", sc(binary(), #{desc => ?DESC("event_topic")})}, + {"publish_received_at", + sc(integer(), #{ + desc => ?DESC("event_publish_received_at") + })} ] ++ [qos()]; - fields("ctx_unsub") -> - [{"event_type", sc(session_unsubscribed, #{desc => ?DESC("event_event_type"), required => true})}] ++ - proplists:delete("event_type", fields("ctx_sub")); - + [ + {"event_type", + sc(session_unsubscribed, #{desc => ?DESC("event_event_type"), required => true})} + ] ++ + proplists:delete("event_type", fields("ctx_sub")); fields("ctx_delivered") -> - [ {"event_type", sc(message_delivered, #{desc => ?DESC("event_event_type"), required => true})} - , {"id", sc(binary(), #{desc => ?DESC("event_id")})} - , {"from_clientid", sc(binary(), #{desc => ?DESC("event_from_clientid")})} - , {"from_username", sc(binary(), #{desc => ?DESC("event_from_username")})} - , {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})} - , {"username", sc(binary(), #{desc => ?DESC("event_username")})} - , {"payload", sc(binary(), #{desc => ?DESC("event_payload")})} - , {"peerhost", sc(binary(), #{desc => ?DESC("event_peerhost")})} - , {"topic", sc(binary(), #{desc => ?DESC("event_topic")})} - , {"publish_received_at", sc(integer(), #{ - desc => ?DESC("event_publish_received_at")})} + [ + {"event_type", + sc(message_delivered, #{desc => ?DESC("event_event_type"), required => true})}, + {"id", sc(binary(), #{desc => ?DESC("event_id")})}, + {"from_clientid", sc(binary(), #{desc => ?DESC("event_from_clientid")})}, + {"from_username", sc(binary(), #{desc => ?DESC("event_from_username")})}, + {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})}, + {"username", sc(binary(), #{desc => ?DESC("event_username")})}, + {"payload", sc(binary(), #{desc => ?DESC("event_payload")})}, + {"peerhost", sc(binary(), #{desc => ?DESC("event_peerhost")})}, + {"topic", sc(binary(), #{desc => ?DESC("event_topic")})}, + {"publish_received_at", + sc(integer(), #{ + desc => ?DESC("event_publish_received_at") + })} ] ++ [qos()]; - fields("ctx_acked") -> [{"event_type", sc(message_acked, #{desc => ?DESC("event_event_type"), required => true})}] ++ - proplists:delete("event_type", fields("ctx_delivered")); - + proplists:delete("event_type", fields("ctx_delivered")); fields("ctx_dropped") -> - [ {"event_type", sc(message_dropped, #{desc => ?DESC("event_event_type"), required => true})} - , {"id", sc(binary(), #{desc => ?DESC("event_id")})} - , {"reason", sc(binary(), #{desc => ?DESC("event_ctx_dropped")})} - , {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})} - , {"username", sc(binary(), #{desc => ?DESC("event_username")})} - , {"payload", sc(binary(), #{desc => ?DESC("event_payload")})} - , {"peerhost", sc(binary(), #{desc => ?DESC("event_peerhost")})} - , {"topic", sc(binary(), #{desc => ?DESC("event_topic")})} - , {"publish_received_at", sc(integer(), #{ - desc => ?DESC("event_publish_received_at")})} + [ + {"event_type", sc(message_dropped, #{desc => ?DESC("event_event_type"), required => true})}, + {"id", sc(binary(), #{desc => ?DESC("event_id")})}, + {"reason", sc(binary(), #{desc => ?DESC("event_ctx_dropped")})}, + {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})}, + {"username", sc(binary(), #{desc => ?DESC("event_username")})}, + {"payload", sc(binary(), #{desc => ?DESC("event_payload")})}, + {"peerhost", sc(binary(), #{desc => ?DESC("event_peerhost")})}, + {"topic", sc(binary(), #{desc => ?DESC("event_topic")})}, + {"publish_received_at", + sc(integer(), #{ + desc => ?DESC("event_publish_received_at") + })} ] ++ [qos()]; - fields("ctx_connected") -> - [ {"event_type", sc(client_connected, #{desc => ?DESC("event_event_type"), required => true})} - , {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})} - , {"username", sc(binary(), #{desc => ?DESC("event_username")})} - , {"mountpoint", sc(binary(), #{desc => ?DESC("event_mountpoint")})} - , {"peername", sc(binary(), #{desc => ?DESC("event_peername")})} - , {"sockname", sc(binary(), #{desc => ?DESC("event_sockname")})} - , {"proto_name", sc(binary(), #{desc => ?DESC("event_proto_name")})} - , {"proto_ver", sc(binary(), #{desc => ?DESC("event_proto_ver")})} - , {"keepalive", sc(integer(), #{desc => ?DESC("event_keepalive")})} - , {"clean_start", sc(boolean(), #{desc => ?DESC("event_clean_start"), default => true})} - , {"expiry_interval", sc(integer(), #{desc => ?DESC("event_expiry_interval")})} - , {"is_bridge", sc(boolean(), #{desc => ?DESC("event_is_bridge"), default => false})} - , {"connected_at", sc(integer(), #{ - desc => ?DESC("event_connected_at")})} + [ + {"event_type", + sc(client_connected, #{desc => ?DESC("event_event_type"), required => true})}, + {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})}, + {"username", sc(binary(), #{desc => ?DESC("event_username")})}, + {"mountpoint", sc(binary(), #{desc => ?DESC("event_mountpoint")})}, + {"peername", sc(binary(), #{desc => ?DESC("event_peername")})}, + {"sockname", sc(binary(), #{desc => ?DESC("event_sockname")})}, + {"proto_name", sc(binary(), #{desc => ?DESC("event_proto_name")})}, + {"proto_ver", sc(binary(), #{desc => ?DESC("event_proto_ver")})}, + {"keepalive", sc(integer(), #{desc => ?DESC("event_keepalive")})}, + {"clean_start", sc(boolean(), #{desc => ?DESC("event_clean_start"), default => true})}, + {"expiry_interval", sc(integer(), #{desc => ?DESC("event_expiry_interval")})}, + {"is_bridge", sc(boolean(), #{desc => ?DESC("event_is_bridge"), default => false})}, + {"connected_at", + sc(integer(), #{ + desc => ?DESC("event_connected_at") + })} ]; - fields("ctx_disconnected") -> - [ {"event_type", sc(client_disconnected, #{desc => ?DESC("event_event_type"), required => true})} - , {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})} - , {"username", sc(binary(), #{desc => ?DESC("event_username")})} - , {"reason", sc(binary(), #{desc => ?DESC("event_ctx_disconnected_reason")})} - , {"peername", sc(binary(), #{desc => ?DESC("event_peername")})} - , {"sockname", sc(binary(), #{desc => ?DESC("event_sockname")})} - , {"disconnected_at", sc(integer(), #{ - desc => ?DESC("event_ctx_disconnected_da")})} + [ + {"event_type", + sc(client_disconnected, #{desc => ?DESC("event_event_type"), required => true})}, + {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})}, + {"username", sc(binary(), #{desc => ?DESC("event_username")})}, + {"reason", sc(binary(), #{desc => ?DESC("event_ctx_disconnected_reason")})}, + {"peername", sc(binary(), #{desc => ?DESC("event_peername")})}, + {"sockname", sc(binary(), #{desc => ?DESC("event_sockname")})}, + {"disconnected_at", + sc(integer(), #{ + desc => ?DESC("event_ctx_disconnected_da") + })} ]; - fields("ctx_connack") -> - [ {"event_type", sc(client_connack, #{desc => ?DESC("event_event_type"), required => true})} - , {"reason_code", sc(binary(), #{desc => ?DESC("event_ctx_connack_reason_code")})} - , {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})} - , {"clean_start", sc(boolean(), #{desc => ?DESC("event_clean_start"), default => true})} - , {"username", sc(binary(), #{desc => ?DESC("event_username")})} - , {"peername", sc(binary(), #{desc => ?DESC("event_peername")})} - , {"sockname", sc(binary(), #{desc => ?DESC("event_sockname")})} - , {"proto_name", sc(binary(), #{desc => ?DESC("event_proto_name")})} - , {"proto_ver", sc(binary(), #{desc => ?DESC("event_proto_ver")})} - , {"keepalive", sc(integer(), #{desc => ?DESC("event_keepalive")})} - , {"expiry_interval", sc(integer(), #{desc => ?DESC("event_expiry_interval")})} - , {"connected_at", sc(integer(), #{ - desc => ?DESC("event_connected_at")})} + [ + {"event_type", sc(client_connack, #{desc => ?DESC("event_event_type"), required => true})}, + {"reason_code", sc(binary(), #{desc => ?DESC("event_ctx_connack_reason_code")})}, + {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})}, + {"clean_start", sc(boolean(), #{desc => ?DESC("event_clean_start"), default => true})}, + {"username", sc(binary(), #{desc => ?DESC("event_username")})}, + {"peername", sc(binary(), #{desc => ?DESC("event_peername")})}, + {"sockname", sc(binary(), #{desc => ?DESC("event_sockname")})}, + {"proto_name", sc(binary(), #{desc => ?DESC("event_proto_name")})}, + {"proto_ver", sc(binary(), #{desc => ?DESC("event_proto_ver")})}, + {"keepalive", sc(integer(), #{desc => ?DESC("event_keepalive")})}, + {"expiry_interval", sc(integer(), #{desc => ?DESC("event_expiry_interval")})}, + {"connected_at", + sc(integer(), #{ + desc => ?DESC("event_connected_at") + })} ]; fields("ctx_check_authz_complete") -> - [ {"event_type", sc(client_check_authz_complete, #{desc => ?DESC("event_event_type"), required => true})} - , {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})} - , {"username", sc(binary(), #{desc => ?DESC("event_username")})} - , {"peerhost", sc(binary(), #{desc => ?DESC("event_peerhost")})} - , {"topic", sc(binary(), #{desc => ?DESC("event_topic")})} - , {"action", sc(binary(), #{desc => ?DESC("event_action")})} - , {"authz_source", sc(binary(), #{desc => ?DESC("event_authz_source")})} - , {"result", sc(binary(), #{desc => ?DESC("event_result")})} + [ + {"event_type", + sc(client_check_authz_complete, #{desc => ?DESC("event_event_type"), required => true})}, + {"clientid", sc(binary(), #{desc => ?DESC("event_clientid")})}, + {"username", sc(binary(), #{desc => ?DESC("event_username")})}, + {"peerhost", sc(binary(), #{desc => ?DESC("event_peerhost")})}, + {"topic", sc(binary(), #{desc => ?DESC("event_topic")})}, + {"action", sc(binary(), #{desc => ?DESC("event_action")})}, + {"authz_source", sc(binary(), #{desc => ?DESC("event_authz_source")})}, + {"result", sc(binary(), #{desc => ?DESC("event_result")})} ]; fields("ctx_bridge_mqtt") -> - [ {"event_type", sc('$bridges/mqtt:*', #{desc => ?DESC("event_event_type"), required => true})} - , {"id", sc(binary(), #{desc => ?DESC("event_id")})} - , {"payload", sc(binary(), #{desc => ?DESC("event_payload")})} - , {"topic", sc(binary(), #{desc => ?DESC("event_topic")})} - , {"server", sc(binary(), #{desc => ?DESC("event_server")})} - , {"dup", sc(binary(), #{desc => ?DESC("event_dup")})} - , {"retain", sc(binary(), #{desc => ?DESC("event_retain")})} - , {"message_received_at", sc(integer(), #{ - desc => ?DESC("event_publish_received_at")})} + [ + {"event_type", + sc('$bridges/mqtt:*', #{desc => ?DESC("event_event_type"), required => true})}, + {"id", sc(binary(), #{desc => ?DESC("event_id")})}, + {"payload", sc(binary(), #{desc => ?DESC("event_payload")})}, + {"topic", sc(binary(), #{desc => ?DESC("event_topic")})}, + {"server", sc(binary(), #{desc => ?DESC("event_server")})}, + {"dup", sc(binary(), #{desc => ?DESC("event_dup")})}, + {"retain", sc(binary(), #{desc => ?DESC("event_retain")})}, + {"message_received_at", + sc(integer(), #{ + desc => ?DESC("event_publish_received_at") + })} ] ++ [qos()]. qos() -> {"qos", sc(emqx_schema:qos(), #{desc => ?DESC("event_qos")})}. rule_id() -> - {"id", sc(binary(), - #{ desc => ?DESC("rule_id"), required => true - , example => "293fb66f" - })}. + {"id", + sc( + binary(), + #{ + desc => ?DESC("rule_id"), + required => true, + example => "293fb66f" + } + )}. sc(Type, Meta) -> hoconsc:mk(Type, Meta). ref(Field) -> hoconsc:ref(?MODULE, Field). diff --git a/apps/emqx_rule_engine/src/emqx_rule_date.erl b/apps/emqx_rule_engine/src/emqx_rule_date.erl index a06571b64..83056754b 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_date.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_date.erl @@ -18,19 +18,27 @@ -export([date/3, date/4, parse_date/4]). --export([ is_int_char/1 - , is_symbol_char/1 - , is_m_char/1 - ]). +-export([ + is_int_char/1, + is_symbol_char/1, + is_m_char/1 +]). -record(result, { - year = "1970" :: string() %%year() - , month = "1" :: string() %%month() - , day = "1" :: string() %%day() - , hour = "0" :: string() %%hour() - , minute = "0" :: string() %%minute() %% epoch in millisecond precision - , second = "0" :: string() %%second() %% epoch in millisecond precision - , zone = "+00:00" :: string() %%integer() %% zone maybe some value + %%year() + year = "1970" :: string(), + %%month() + month = "1" :: string(), + %%day() + day = "1" :: string(), + %%hour() + hour = "0" :: string(), + %%minute() %% epoch in millisecond precision + minute = "0" :: string(), + %%second() %% epoch in millisecond precision + second = "0" :: string(), + %%integer() %% zone maybe some value + zone = "+00:00" :: string() }). %% -type time_unit() :: 'microsecond' @@ -42,43 +50,59 @@ date(TimeUnit, Offset, FormatString) -> date(TimeUnit, Offset, FormatString, erlang:system_time(TimeUnit)). date(TimeUnit, Offset, FormatString, TimeEpoch) -> - [Head|Other] = string:split(FormatString, "%", all), + [Head | Other] = string:split(FormatString, "%", all), R = create_tag([{st, Head}], Other), - Res = lists:map(fun(Expr) -> - eval_tag(rmap(make_time(TimeUnit, Offset, TimeEpoch)), Expr) end, R), + Res = lists:map( + fun(Expr) -> + eval_tag(rmap(make_time(TimeUnit, Offset, TimeEpoch)), Expr) + end, + R + ), lists:concat(Res). parse_date(TimeUnit, Offset, FormatString, InputString) -> - [Head|Other] = string:split(FormatString, "%", all), + [Head | Other] = string:split(FormatString, "%", all), R = create_tag([{st, Head}], Other), - IsZ = fun(V) -> case V of - {tag, $Z} -> true; - _ -> false - end end, + IsZ = fun(V) -> + case V of + {tag, $Z} -> true; + _ -> false + end + end, R1 = lists:filter(IsZ, R), IfFun = fun(Con, A, B) -> - case Con of - [] -> A; - _ -> B - end end, + case Con of + [] -> A; + _ -> B + end + end, Res = parse_input(FormatString, InputString), - Str = Res#result.year ++ "-" - ++ Res#result.month ++ "-" - ++ Res#result.day ++ "T" - ++ Res#result.hour ++ ":" - ++ Res#result.minute ++ ":" - ++ Res#result.second ++ - IfFun(R1, Offset, Res#result.zone), + Str = + Res#result.year ++ "-" ++ + Res#result.month ++ "-" ++ + Res#result.day ++ "T" ++ + Res#result.hour ++ ":" ++ + Res#result.minute ++ ":" ++ + Res#result.second ++ + IfFun(R1, Offset, Res#result.zone), calendar:rfc3339_to_system_time(Str, [{unit, TimeUnit}]). -mlist(R)-> - [ {$H, R#result.hour} %% %H Shows hour in 24-hour format [15] - , {$M, R#result.minute} %% %M Displays minutes [00-59] - , {$S, R#result.second} %% %S Displays seconds [00-59] - , {$y, R#result.year} %% %y Displays year YYYY [2021] - , {$m, R#result.month} %% %m Displays the number of the month [01-12] - , {$d, R#result.day} %% %d Displays the number of the month [01-12] - , {$Z, R#result.zone} %% %Z Displays Time zone +mlist(R) -> + %% %H Shows hour in 24-hour format [15] + [ + {$H, R#result.hour}, + %% %M Displays minutes [00-59] + {$M, R#result.minute}, + %% %S Displays seconds [00-59] + {$S, R#result.second}, + %% %y Displays year YYYY [2021] + {$y, R#result.year}, + %% %m Displays the number of the month [01-12] + {$m, R#result.month}, + %% %d Displays the number of the month [01-12] + {$d, R#result.day}, + %% %Z Displays Time zone + {$Z, R#result.zone} ]. rmap(Result) -> @@ -88,69 +112,95 @@ support_char() -> "HMSymdZ". create_tag(Head, []) -> Head; -create_tag(Head, [Val1|RVal]) -> +create_tag(Head, [Val1 | RVal]) -> case Val1 of - [] -> create_tag(Head ++ [{st, [$%]}], RVal); - [H| Other] -> + [] -> + create_tag(Head ++ [{st, [$%]}], RVal); + [H | Other] -> case lists:member(H, support_char()) of true -> create_tag(Head ++ [{tag, H}, {st, Other}], RVal); - false -> create_tag(Head ++ [{st, [$%|Val1]}], RVal) + false -> create_tag(Head ++ [{st, [$% | Val1]}], RVal) end end. -eval_tag(_,{st, Str}) -> +eval_tag(_, {st, Str}) -> Str; -eval_tag(Map,{tag, Char}) -> +eval_tag(Map, {tag, Char}) -> maps:get(Char, Map, "undefined"). %% make_time(TimeUnit, Offset) -> %% make_time(TimeUnit, Offset, erlang:system_time(TimeUnit)). make_time(TimeUnit, Offset, TimeEpoch) -> - Res = calendar:system_time_to_rfc3339(TimeEpoch, - [{unit, TimeUnit}, {offset, Offset}]), - [Y1, Y2, Y3, Y4, $-, Mon1, Mon2, $-, D1, D2, _T, - H1, H2, $:, Min1, Min2, $:, S1, S2 | TimeStr] = Res, + Res = calendar:system_time_to_rfc3339( + TimeEpoch, + [{unit, TimeUnit}, {offset, Offset}] + ), + [ + Y1, + Y2, + Y3, + Y4, + $-, + Mon1, + Mon2, + $-, + D1, + D2, + _T, + H1, + H2, + $:, + Min1, + Min2, + $:, + S1, + S2 + | TimeStr + ] = Res, IsFractionChar = fun(C) -> C >= $0 andalso C =< $9 orelse C =:= $. end, {FractionStr, UtcOffset} = lists:splitwith(IsFractionChar, TimeStr), #result{ - year = [Y1, Y2, Y3, Y4] - , month = [Mon1, Mon2] - , day = [D1, D2] - , hour = [H1, H2] - , minute = [Min1, Min2] - , second = [S1, S2] ++ FractionStr - , zone = UtcOffset - }. - + year = [Y1, Y2, Y3, Y4], + month = [Mon1, Mon2], + day = [D1, D2], + hour = [H1, H2], + minute = [Min1, Min2], + second = [S1, S2] ++ FractionStr, + zone = UtcOffset + }. is_int_char(C) -> - C >= $0 andalso C =< $9 . + C >= $0 andalso C =< $9. is_symbol_char(C) -> - C =:= $- orelse C =:= $+ . + C =:= $- orelse C =:= $+. is_m_char(C) -> C =:= $:. -parse_char_with_fun(_, []) -> error(null_input); -parse_char_with_fun(ValidFun, [C|Other]) -> - Res = case erlang:is_function(ValidFun) of - true -> ValidFun(C); - false -> erlang:apply(emqx_rule_date, ValidFun, [C]) - end, +parse_char_with_fun(_, []) -> + error(null_input); +parse_char_with_fun(ValidFun, [C | Other]) -> + Res = + case erlang:is_function(ValidFun) of + true -> ValidFun(C); + false -> erlang:apply(emqx_rule_date, ValidFun, [C]) + end, case Res of true -> {C, Other}; - false -> error({unexpected,[C|Other]}) + false -> error({unexpected, [C | Other]}) end. -parse_string([], Input) -> {[], Input}; -parse_string([C|Other], Input) -> +parse_string([], Input) -> + {[], Input}; +parse_string([C | Other], Input) -> {C1, Input1} = parse_char_with_fun(fun(V) -> V =:= C end, Input), {Res, Input2} = parse_string(Other, Input1), - {[C1|Res], Input2}. + {[C1 | Res], Input2}. -parse_times(0, _, Input) -> {[], Input}; +parse_times(0, _, Input) -> + {[], Input}; parse_times(Times, Fun, Input) -> {C1, Input1} = parse_char_with_fun(Fun, Input), {Res, Input2} = parse_times((Times - 1), Fun, Input1), - {[C1|Res], Input2}. + {[C1 | Res], Input2}. parse_int_times(Times, Input) -> parse_times(Times, is_int_char, Input). @@ -162,33 +212,42 @@ parse_fraction(Input) -> parse_second(Input) -> {M, Input1} = parse_int_times(2, Input), {M1, Input2} = parse_fraction(Input1), - {M++M1, Input2}. + {M ++ M1, Input2}. parse_zone(Input) -> {S, Input1} = parse_char_with_fun(is_symbol_char, Input), {M, Input2} = parse_int_times(2, Input1), {C, Input3} = parse_char_with_fun(is_m_char, Input2), {V, Input4} = parse_int_times(2, Input3), - {[S|M++[C|V]], Input4}. + {[S | M ++ [C | V]], Input4}. -mlist1()-> +mlist1() -> maps:from_list( - [ {$H, fun(Input) -> parse_int_times(2, Input) end} %% %H Shows hour in 24-hour format [15] - , {$M, fun(Input) -> parse_int_times(2, Input) end} %% %M Displays minutes [00-59] - , {$S, fun(Input) -> parse_second(Input) end} %% %S Displays seconds [00-59] - , {$y, fun(Input) -> parse_int_times(4, Input) end} %% %y Displays year YYYY [2021] - , {$m, fun(Input) -> parse_int_times(2, Input) end} %% %m Displays the number of the month [01-12] - , {$d, fun(Input) -> parse_int_times(2, Input) end} %% %d Displays the number of the month [01-12] - , {$Z, fun(Input) -> parse_zone(Input) end} %% %Z Displays Time zone - ]). + %% %H Shows hour in 24-hour format [15] + [ + {$H, fun(Input) -> parse_int_times(2, Input) end}, + %% %M Displays minutes [00-59] + {$M, fun(Input) -> parse_int_times(2, Input) end}, + %% %S Displays seconds [00-59] + {$S, fun(Input) -> parse_second(Input) end}, + %% %y Displays year YYYY [2021] + {$y, fun(Input) -> parse_int_times(4, Input) end}, + %% %m Displays the number of the month [01-12] + {$m, fun(Input) -> parse_int_times(2, Input) end}, + %% %d Displays the number of the month [01-12] + {$d, fun(Input) -> parse_int_times(2, Input) end}, + %% %Z Displays Time zone + {$Z, fun(Input) -> parse_zone(Input) end} + ] + ). -update_result($H, Res, Str) -> Res#result{hour=Str}; -update_result($M, Res, Str) -> Res#result{minute=Str}; -update_result($S, Res, Str) -> Res#result{second=Str}; -update_result($y, Res, Str) -> Res#result{year=Str}; -update_result($m, Res, Str) -> Res#result{month=Str}; -update_result($d, Res, Str) -> Res#result{day=Str}; -update_result($Z, Res, Str) -> Res#result{zone=Str}. +update_result($H, Res, Str) -> Res#result{hour = Str}; +update_result($M, Res, Str) -> Res#result{minute = Str}; +update_result($S, Res, Str) -> Res#result{second = Str}; +update_result($y, Res, Str) -> Res#result{year = Str}; +update_result($m, Res, Str) -> Res#result{month = Str}; +update_result($d, Res, Str) -> Res#result{day = Str}; +update_result($Z, Res, Str) -> Res#result{zone = Str}. parse_tag(Res, {st, St}, InputString) -> {_A, B} = parse_string(St, InputString), @@ -199,12 +258,13 @@ parse_tag(Res, {tag, St}, InputString) -> NRes = update_result(St, Res, A), {NRes, B}. -parse_tags(Res, [], _) -> Res; -parse_tags(Res, [Tag|Others], InputString) -> +parse_tags(Res, [], _) -> + Res; +parse_tags(Res, [Tag | Others], InputString) -> {NRes, B} = parse_tag(Res, Tag, InputString), parse_tags(NRes, Others, B). parse_input(FormatString, InputString) -> - [Head|Other] = string:split(FormatString, "%", all), + [Head | Other] = string:split(FormatString, "%", all), R = create_tag([{st, Head}], Other), parse_tags(#result{}, R, InputString). diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src index bec85690f..ef6665e76 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src @@ -1,15 +1,17 @@ %% -*- mode: erlang -*- -{application, emqx_rule_engine, - [{description, "EMQX Rule Engine"}, - {vsn, "5.0.0"}, % strict semver, bump manually! - {modules, []}, - {registered, [emqx_rule_engine_sup, emqx_rule_engine]}, - {applications, [kernel,stdlib,rulesql,getopt]}, - {mod, {emqx_rule_engine_app, []}}, - {env, []}, - {licenses, ["Apache-2.0"]}, - {maintainers, ["EMQX Team "]}, - {links, [{"Homepage", "https://emqx.io/"}, - {"Github", "https://github.com/emqx/emqx-rule-engine"} - ]} - ]}. +{application, emqx_rule_engine, [ + {description, "EMQX Rule Engine"}, + % strict semver, bump manually! + {vsn, "5.0.0"}, + {modules, []}, + {registered, [emqx_rule_engine_sup, emqx_rule_engine]}, + {applications, [kernel, stdlib, rulesql, getopt]}, + {mod, {emqx_rule_engine_app, []}}, + {env, []}, + {licenses, ["Apache-2.0"]}, + {maintainers, ["EMQX Team "]}, + {links, [ + {"Homepage", "https://emqx.io/"}, + {"Github", "https://github.com/emqx/emqx-rule-engine"} + ]} +]}. diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.erl b/apps/emqx_rule_engine/src/emqx_rule_engine.erl index 597daa378..b08964646 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.erl @@ -25,51 +25,56 @@ -export([start_link/0]). --export([ post_config_update/5 - , config_key_path/0 - ]). +-export([ + post_config_update/5, + config_key_path/0 +]). %% Rule Management --export([ load_rules/0 - ]). +-export([load_rules/0]). --export([ create_rule/1 - , insert_rule/1 - , update_rule/1 - , delete_rule/1 - , get_rule/1 - ]). +-export([ + create_rule/1, + insert_rule/1, + update_rule/1, + delete_rule/1, + get_rule/1 +]). --export([ get_rules/0 - , get_rules_for_topic/1 - , get_rules_with_same_event/1 - , get_rules_ordered_by_ts/0 - ]). +-export([ + get_rules/0, + get_rules_for_topic/1, + get_rules_with_same_event/1, + get_rules_ordered_by_ts/0 +]). %% exported for cluster_call --export([ do_delete_rule/1 - , do_insert_rule/1 - ]). +-export([ + do_delete_rule/1, + do_insert_rule/1 +]). --export([ load_hooks_for_rule/1 - , unload_hooks_for_rule/1 - , maybe_add_metrics_for_rule/1 - , clear_metrics_for_rule/1 - , reset_metrics_for_rule/1 - ]). +-export([ + load_hooks_for_rule/1, + unload_hooks_for_rule/1, + maybe_add_metrics_for_rule/1, + clear_metrics_for_rule/1, + reset_metrics_for_rule/1 +]). %% exported for `emqx_telemetry' -export([get_basic_usage_info/0]). %% gen_server Callbacks --export([ init/1 - , handle_call/3 - , handle_cast/2 - , handle_info/2 - , terminate/2 - , code_change/3 - ]). +-export([ + init/1, + handle_call/3, + handle_cast/2, + handle_info/2, + terminate/2, + code_change/3 +]). -define(RULE_ENGINE, ?MODULE). @@ -77,24 +82,25 @@ %% NOTE: This order cannot be changed! This is to make the metric working during relup. %% Append elements to this list to add new metrics. --define(METRICS, [ 'sql.matched' - , 'sql.passed' - , 'sql.failed' - , 'sql.failed.exception' - , 'sql.failed.no_result' - , 'outputs.total' - , 'outputs.success' - , 'outputs.failed' - , 'outputs.failed.out_of_service' - , 'outputs.failed.unknown' - ]). +-define(METRICS, [ + 'sql.matched', + 'sql.passed', + 'sql.failed', + 'sql.failed.exception', + 'sql.failed.no_result', + 'outputs.total', + 'outputs.success', + 'outputs.failed', + 'outputs.failed.out_of_service', + 'outputs.failed.unknown' +]). -define(RATE_METRICS, ['sql.matched']). config_key_path() -> [rule_engine, rules]. --spec(start_link() -> {ok, pid()} | ignore | {error, Reason :: term()}). +-spec start_link() -> {ok, pid()} | ignore | {error, Reason :: term()}. start_link() -> gen_server:start_link({local, ?RULE_ENGINE}, ?MODULE, [], []). @@ -102,17 +108,26 @@ start_link() -> %% The config handler for emqx_rule_engine %%------------------------------------------------------------------------------ post_config_update(_, _Req, NewRules, OldRules, _AppEnvs) -> - #{added := Added, removed := Removed, changed := Updated} - = emqx_map_lib:diff_maps(NewRules, OldRules), - maps_foreach(fun({Id, {_Old, New}}) -> + #{added := Added, removed := Removed, changed := Updated} = + emqx_map_lib:diff_maps(NewRules, OldRules), + maps_foreach( + fun({Id, {_Old, New}}) -> {ok, _} = update_rule(New#{id => bin(Id)}) - end, Updated), - maps_foreach(fun({Id, _Rule}) -> + end, + Updated + ), + maps_foreach( + fun({Id, _Rule}) -> ok = delete_rule(bin(Id)) - end, Removed), - maps_foreach(fun({Id, Rule}) -> + end, + Removed + ), + maps_foreach( + fun({Id, Rule}) -> {ok, _} = create_rule(Rule#{id => bin(Id)}) - end, Added), + end, + Added + ), {ok, get_rules()}. %%------------------------------------------------------------------------------ @@ -121,9 +136,12 @@ post_config_update(_, _Req, NewRules, OldRules, _AppEnvs) -> -spec load_rules() -> ok. load_rules() -> - maps_foreach(fun({Id, Rule}) -> + maps_foreach( + fun({Id, Rule}) -> {ok, _} = create_rule(Rule#{id => bin(Id)}) - end, emqx:get_config([rule_engine, rules], #{})). + end, + emqx:get_config([rule_engine, rules], #{}) + ). -spec create_rule(map()) -> {ok, rule()} | {error, term()}. create_rule(Params = #{id := RuleId}) when is_binary(RuleId) -> @@ -141,11 +159,11 @@ update_rule(Params = #{id := RuleId}) when is_binary(RuleId) -> parse_and_insert(Params, CreatedAt) end. --spec(delete_rule(RuleId :: rule_id()) -> ok). +-spec delete_rule(RuleId :: rule_id()) -> ok. delete_rule(RuleId) when is_binary(RuleId) -> gen_server:call(?RULE_ENGINE, {delete_rule, RuleId}, ?T_CALL). --spec(insert_rule(Rule :: rule()) -> ok). +-spec insert_rule(Rule :: rule()) -> ok. insert_rule(Rule) -> gen_server:call(?RULE_ENGINE, {insert_rule, Rule}, ?T_CALL). @@ -153,30 +171,39 @@ insert_rule(Rule) -> %% Rule Management %%------------------------------------------------------------------------------ --spec(get_rules() -> [rule()]). +-spec get_rules() -> [rule()]. get_rules() -> get_all_records(?RULE_TAB). get_rules_ordered_by_ts() -> - lists:sort(fun(#{created_at := CreatedA}, #{created_at := CreatedB}) -> + lists:sort( + fun(#{created_at := CreatedA}, #{created_at := CreatedB}) -> CreatedA =< CreatedB - end, get_rules()). + end, + get_rules() + ). --spec(get_rules_for_topic(Topic :: binary()) -> [rule()]). +-spec get_rules_for_topic(Topic :: binary()) -> [rule()]. get_rules_for_topic(Topic) -> - [Rule || Rule = #{from := From} <- get_rules(), - emqx_plugin_libs_rule:can_topic_match_oneof(Topic, From)]. + [ + Rule + || Rule = #{from := From} <- get_rules(), + emqx_plugin_libs_rule:can_topic_match_oneof(Topic, From) + ]. --spec(get_rules_with_same_event(Topic :: binary()) -> [rule()]). +-spec get_rules_with_same_event(Topic :: binary()) -> [rule()]. get_rules_with_same_event(Topic) -> EventName = emqx_rule_events:event_name(Topic), - [Rule || Rule = #{from := From} <- get_rules(), - lists:any(fun(T) -> is_of_event_name(EventName, T) end, From)]. + [ + Rule + || Rule = #{from := From} <- get_rules(), + lists:any(fun(T) -> is_of_event_name(EventName, T) end, From) + ]. is_of_event_name(EventName, Topic) -> EventName =:= emqx_rule_events:event_name(Topic). --spec(get_rule(Id :: rule_id()) -> {ok, rule()} | not_found). +-spec get_rule(Id :: rule_id()) -> {ok, rule()} | not_found. get_rule(Id) -> case ets:lookup(?RULE_TAB, Id) of [{Id, Rule}] -> {ok, Rule#{id => Id}}; @@ -188,7 +215,8 @@ load_hooks_for_rule(#{from := Topics}) -> maybe_add_metrics_for_rule(Id) -> case emqx_plugin_libs_metrics:has_metrics(rule_metrics, Id) of - true -> ok; + true -> + ok; false -> ok = emqx_plugin_libs_metrics:create_metrics(rule_metrics, Id, ?METRICS, ?RATE_METRICS) end. @@ -196,86 +224,101 @@ maybe_add_metrics_for_rule(Id) -> clear_metrics_for_rule(Id) -> ok = emqx_plugin_libs_metrics:clear_metrics(rule_metrics, Id). --spec(reset_metrics_for_rule(rule_id()) -> ok). +-spec reset_metrics_for_rule(rule_id()) -> ok. reset_metrics_for_rule(Id) -> emqx_plugin_libs_metrics:reset_metrics(rule_metrics, Id). unload_hooks_for_rule(#{id := Id, from := Topics}) -> - lists:foreach(fun(Topic) -> - case get_rules_with_same_event(Topic) of - [#{id := Id0}] when Id0 == Id -> %% we are now deleting the last rule - emqx_rule_events:unload(Topic); - _ -> ok - end - end, Topics). + lists:foreach( + fun(Topic) -> + case get_rules_with_same_event(Topic) of + %% we are now deleting the last rule + [#{id := Id0}] when Id0 == Id -> + emqx_rule_events:unload(Topic); + _ -> + ok + end + end, + Topics + ). %%------------------------------------------------------------------------------ %% Telemetry helper functions %%------------------------------------------------------------------------------ --spec get_basic_usage_info() -> #{ num_rules => non_neg_integer() - , referenced_bridges => - #{ BridgeType => non_neg_integer() - } - } - when BridgeType :: atom(). +-spec get_basic_usage_info() -> + #{ + num_rules => non_neg_integer(), + referenced_bridges => + #{BridgeType => non_neg_integer()} + } +when + BridgeType :: atom(). get_basic_usage_info() -> try Rules = get_rules(), EnabledRules = lists:filter( - fun(#{enable := Enabled}) -> Enabled end, - Rules), + fun(#{enable := Enabled}) -> Enabled end, + Rules + ), NumRules = length(EnabledRules), ReferencedBridges = lists:foldl( - fun(#{outputs := Outputs}, Acc) -> - BridgeIDs = lists:filter(fun is_binary/1, Outputs), - tally_referenced_bridges(BridgeIDs, Acc) - end, - #{}, - EnabledRules), - #{ num_rules => NumRules - , referenced_bridges => ReferencedBridges - } + fun(#{outputs := Outputs}, Acc) -> + BridgeIDs = lists:filter(fun is_binary/1, Outputs), + tally_referenced_bridges(BridgeIDs, Acc) + end, + #{}, + EnabledRules + ), + #{ + num_rules => NumRules, + referenced_bridges => ReferencedBridges + } catch _:_ -> - #{ num_rules => 0 - , referenced_bridges => #{} - } + #{ + num_rules => 0, + referenced_bridges => #{} + } end. - tally_referenced_bridges(BridgeIDs, Acc0) -> lists:foldl( - fun(BridgeID, Acc) -> - {BridgeType, _BridgeName} = emqx_bridge:parse_bridge_id(BridgeID), - maps:update_with( - BridgeType, - fun(X) -> X + 1 end, - 1, - Acc) - end, - Acc0, - BridgeIDs). + fun(BridgeID, Acc) -> + {BridgeType, _BridgeName} = emqx_bridge:parse_bridge_id(BridgeID), + maps:update_with( + BridgeType, + fun(X) -> X + 1 end, + 1, + Acc + ) + end, + Acc0, + BridgeIDs + ). %%------------------------------------------------------------------------------ %% gen_server callbacks %%------------------------------------------------------------------------------ init([]) -> - _TableId = ets:new(?KV_TAB, [named_table, set, public, {write_concurrency, true}, - {read_concurrency, true}]), + _TableId = ets:new(?KV_TAB, [ + named_table, + set, + public, + {write_concurrency, true}, + {read_concurrency, true} + ]), {ok, #{}}. handle_call({insert_rule, Rule}, _From, State) -> do_insert_rule(Rule), {reply, ok, State}; - handle_call({delete_rule, Rule}, _From, State) -> do_delete_rule(Rule), {reply, ok, State}; - handle_call(Req, _From, State) -> ?SLOG(error, #{msg => "unexpected_call", request => Req}), {reply, ignored, State}. @@ -321,7 +364,8 @@ parse_and_insert(Params = #{id := RuleId, sql := Sql, outputs := Outputs}, Creat }, ok = insert_rule(Rule), {ok, Rule}; - {error, Reason} -> {error, Reason} + {error, Reason} -> + {error, Reason} end. do_insert_rule(#{id := Id} = Rule) -> @@ -337,7 +381,8 @@ do_delete_rule(RuleId) -> ok = clear_metrics_for_rule(RuleId), true = ets:delete(?RULE_TAB, RuleId), ok; - not_found -> ok + not_found -> + ok end. parse_outputs(Outputs) -> diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl index 7d2e34f5b..32b098311 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_api.erl @@ -32,20 +32,33 @@ -export(['/rule_events'/2, '/rule_test'/2, '/rules'/2, '/rules/:id'/2, '/rules/:id/reset_metrics'/2]). -define(ERR_NO_RULE(ID), list_to_binary(io_lib:format("Rule ~ts Not Found", [(ID)]))). --define(ERR_BADARGS(REASON), - begin - R0 = err_msg(REASON), - <<"Bad Arguments: ", R0/binary>> - end). +-define(ERR_BADARGS(REASON), begin + R0 = err_msg(REASON), + <<"Bad Arguments: ", R0/binary>> +end). -define(CHECK_PARAMS(PARAMS, TAG, EXPR), case emqx_rule_api_schema:check_params(PARAMS, TAG) of {ok, CheckedParams} -> EXPR; {error, REASON} -> {400, #{code => 'BAD_REQUEST', message => ?ERR_BADARGS(REASON)}} - end). --define(METRICS(MATCH, PASS, FAIL, FAIL_EX, FAIL_NORES, O_TOTAL, O_FAIL, O_FAIL_OOS, - O_FAIL_UNKNOWN, O_SUCC, RATE, RATE_MAX, RATE_5), + end +). +-define(METRICS( + MATCH, + PASS, + FAIL, + FAIL_EX, + FAIL_NORES, + O_TOTAL, + O_FAIL, + O_FAIL_OOS, + O_FAIL_UNKNOWN, + O_SUCC, + RATE, + RATE_MAX, + RATE_5 +), #{ 'sql.matched' => MATCH, 'sql.passed' => PASS, @@ -60,9 +73,23 @@ 'sql.matched.rate' => RATE, 'sql.matched.rate.max' => RATE_MAX, 'sql.matched.rate.last5m' => RATE_5 - }). --define(metrics(MATCH, PASS, FAIL, FAIL_EX, FAIL_NORES, O_TOTAL, O_FAIL, O_FAIL_OOS, - O_FAIL_UNKNOWN, O_SUCC, RATE, RATE_MAX, RATE_5), + } +). +-define(metrics( + MATCH, + PASS, + FAIL, + FAIL_EX, + FAIL_NORES, + O_TOTAL, + O_FAIL, + O_FAIL_OOS, + O_FAIL_UNKNOWN, + O_SUCC, + RATE, + RATE_MAX, + RATE_5 +), #{ 'sql.matched' := MATCH, 'sql.passed' := PASS, @@ -77,7 +104,8 @@ 'sql.matched.rate' := RATE, 'sql.matched.rate.max' := RATE_MAX, 'sql.matched.rate.last5m' := RATE_5 - }). + } +). namespace() -> "rule". @@ -107,7 +135,8 @@ schema("/rules") -> summary => <<"List Rules">>, responses => #{ 200 => mk(array(rule_info_schema()), #{desc => ?DESC("desc9")}) - }}, + } + }, post => #{ tags => [<<"rules">>], description => ?DESC("api2"), @@ -116,9 +145,9 @@ schema("/rules") -> responses => #{ 400 => error_schema('BAD_REQUEST', "Invalid Parameters"), 201 => rule_info_schema() - }} + } + } }; - schema("/rule_events") -> #{ 'operationId' => '/rule_events', @@ -131,7 +160,6 @@ schema("/rule_events") -> } } }; - schema("/rules/:id") -> #{ 'operationId' => '/rules/:id', @@ -166,7 +194,6 @@ schema("/rules/:id") -> } } }; - schema("/rules/:id/reset_metrics") -> #{ 'operationId' => '/rules/:id/reset_metrics', @@ -181,7 +208,6 @@ schema("/rules/:id/reset_metrics") -> } } }; - schema("/rule_test") -> #{ 'operationId' => '/rule_test', @@ -206,7 +232,7 @@ param_path_id() -> %%------------------------------------------------------------------------------ %% To get around the hocon bug, we replace crlf with spaces -replace_sql_clrf(#{ <<"sql">> := SQL } = Params) -> +replace_sql_clrf(#{<<"sql">> := SQL} = Params) -> NewSQL = re:replace(SQL, "[\r\n]", " ", [{return, binary}, global]), Params#{<<"sql">> => NewSQL}. @@ -216,7 +242,6 @@ replace_sql_clrf(#{ <<"sql">> := SQL } = Params) -> '/rules'(get, _Params) -> Records = emqx_rule_engine:get_rules_ordered_by_ts(), {200, format_rule_resp(Records)}; - '/rules'(post, #{body := Params0}) -> case maps:get(<<"id">>, Params0, list_to_binary(emqx_misc:gen_id(8))) of <<>> -> @@ -233,20 +258,29 @@ replace_sql_clrf(#{ <<"sql">> := SQL } = Params) -> [Rule] = get_one_rule(AllRules, Id), {201, format_rule_resp(Rule)}; {error, Reason} -> - ?SLOG(error, #{msg => "create_rule_failed", - id => Id, reason => Reason}), + ?SLOG(error, #{ + msg => "create_rule_failed", + id => Id, + reason => Reason + }), {400, #{code => 'BAD_REQUEST', message => ?ERR_BADARGS(Reason)}} end end end. '/rule_test'(post, #{body := Params}) -> - ?CHECK_PARAMS(Params, rule_test, case emqx_rule_sqltester:test(CheckedParams) of - {ok, Result} -> {200, Result}; - {error, {parse_error, Reason}} -> - {400, #{code => 'BAD_REQUEST', message => err_msg(Reason)}}; - {error, nomatch} -> {412, #{code => 'NOT_MATCH', message => <<"SQL Not Match">>}} - end). + ?CHECK_PARAMS( + Params, + rule_test, + case emqx_rule_sqltester:test(CheckedParams) of + {ok, Result} -> + {200, Result}; + {error, {parse_error, Reason}} -> + {400, #{code => 'BAD_REQUEST', message => err_msg(Reason)}}; + {error, nomatch} -> + {412, #{code => 'NOT_MATCH', message => <<"SQL Not Match">>}} + end + ). '/rules/:id'(get, #{bindings := #{id := Id}}) -> case emqx_rule_engine:get_rule(Id) of @@ -255,7 +289,6 @@ replace_sql_clrf(#{ <<"sql">> := SQL } = Params) -> not_found -> {404, #{code => 'NOT_FOUND', message => <<"Rule Id Not Found">>}} end; - '/rules/:id'(put, #{bindings := #{id := Id}, body := Params0}) -> Params = filter_out_request_body(Params0), ConfPath = emqx_rule_engine:config_key_path() ++ [Id], @@ -264,25 +297,35 @@ replace_sql_clrf(#{ <<"sql">> := SQL } = Params) -> [Rule] = get_one_rule(AllRules, Id), {200, format_rule_resp(Rule)}; {error, Reason} -> - ?SLOG(error, #{msg => "update_rule_failed", - id => Id, reason => Reason}), + ?SLOG(error, #{ + msg => "update_rule_failed", + id => Id, + reason => Reason + }), {400, #{code => 'BAD_REQUEST', message => ?ERR_BADARGS(Reason)}} end; - '/rules/:id'(delete, #{bindings := #{id := Id}}) -> ConfPath = emqx_rule_engine:config_key_path() ++ [Id], case emqx_conf:remove(ConfPath, #{override_to => cluster}) of - {ok, _} -> {204}; + {ok, _} -> + {204}; {error, Reason} -> - ?SLOG(error, #{msg => "delete_rule_failed", - id => Id, reason => Reason}), + ?SLOG(error, #{ + msg => "delete_rule_failed", + id => Id, + reason => Reason + }), {500, #{code => 'INTERNAL_ERROR', message => ?ERR_BADARGS(Reason)}} end. '/rules/:id/reset_metrics'(put, #{bindings := #{id := RuleId}}) -> case emqx_rule_engine_proto_v1:reset_metrics(RuleId) of - {ok, _TxnId, _Result} -> {200, <<"Reset Success">>}; - Failed -> {400, #{code => 'BAD_REQUEST', - message => err_msg(Failed)}} + {ok, _TxnId, _Result} -> + {200, <<"Reset Success">>}; + Failed -> + {400, #{ + code => 'BAD_REQUEST', + message => err_msg(Failed) + }} end. %%------------------------------------------------------------------------------ @@ -292,29 +335,31 @@ replace_sql_clrf(#{ <<"sql">> := SQL } = Params) -> err_msg(Msg) -> list_to_binary(io_lib:format("~0p", [Msg])). - format_rule_resp(Rules) when is_list(Rules) -> [format_rule_resp(R) || R <- Rules]; - -format_rule_resp(#{ id := Id, name := Name, - created_at := CreatedAt, - from := Topics, - outputs := Output, - sql := SQL, - enable := Enable, - description := Descr}) -> +format_rule_resp(#{ + id := Id, + name := Name, + created_at := CreatedAt, + from := Topics, + outputs := Output, + sql := SQL, + enable := Enable, + description := Descr +}) -> NodeMetrics = get_rule_metrics(Id), - #{id => Id, - name => Name, - from => Topics, - outputs => format_output(Output), - sql => SQL, - metrics => aggregate_metrics(NodeMetrics), - node_metrics => NodeMetrics, - enable => Enable, - created_at => format_datetime(CreatedAt, millisecond), - description => Descr - }. + #{ + id => Id, + name => Name, + from => Topics, + outputs => format_output(Output), + sql => SQL, + metrics => aggregate_metrics(NodeMetrics), + node_metrics => NodeMetrics, + enable => Enable, + created_at => format_datetime(CreatedAt, millisecond), + description => Descr + }. format_datetime(Timestamp, Unit) -> list_to_binary(calendar:system_time_to_rfc3339(Timestamp, [{unit, Unit}])). @@ -323,62 +368,133 @@ format_output(Outputs) -> [do_format_output(Out) || Out <- Outputs]. do_format_output(#{mod := Mod, func := Func, args := Args}) -> - #{function => printable_function_name(Mod, Func), - args => maps:remove(preprocessed_tmpl, Args)}; + #{ + function => printable_function_name(Mod, Func), + args => maps:remove(preprocessed_tmpl, Args) + }; do_format_output(BridgeChannelId) when is_binary(BridgeChannelId) -> BridgeChannelId. printable_function_name(emqx_rule_outputs, Func) -> Func; printable_function_name(Mod, Func) -> - list_to_binary(lists:concat([Mod,":",Func])). + list_to_binary(lists:concat([Mod, ":", Func])). get_rule_metrics(Id) -> - Format = fun (Node, #{ + Format = fun( + Node, + #{ counters := - #{'sql.matched' := Matched, 'sql.passed' := Passed, 'sql.failed' := Failed, - 'sql.failed.exception' := FailedEx, - 'sql.failed.no_result' := FailedNoRes, - 'outputs.total' := OTotal, - 'outputs.failed' := OFailed, - 'outputs.failed.out_of_service' := OFailedOOS, - 'outputs.failed.unknown' := OFailedUnknown, - 'outputs.success' := OFailedSucc - }, + #{ + 'sql.matched' := Matched, + 'sql.passed' := Passed, + 'sql.failed' := Failed, + 'sql.failed.exception' := FailedEx, + 'sql.failed.no_result' := FailedNoRes, + 'outputs.total' := OTotal, + 'outputs.failed' := OFailed, + 'outputs.failed.out_of_service' := OFailedOOS, + 'outputs.failed.unknown' := OFailedUnknown, + 'outputs.success' := OFailedSucc + }, rate := - #{'sql.matched' := - #{current := Current, max := Max, last5m := Last5M} - }}) -> - #{ metrics => ?METRICS(Matched, Passed, Failed, FailedEx, FailedNoRes, - OTotal, OFailed, OFailedOOS, OFailedUnknown, OFailedSucc, Current, Max, Last5M) - , node => Node - } + #{ + 'sql.matched' := + #{current := Current, max := Max, last5m := Last5M} + } + } + ) -> + #{ + metrics => ?METRICS( + Matched, + Passed, + Failed, + FailedEx, + FailedNoRes, + OTotal, + OFailed, + OFailedOOS, + OFailedUnknown, + OFailedSucc, + Current, + Max, + Last5M + ), + node => Node + } end, - [Format(Node, emqx_plugin_libs_proto_v1:get_metrics(Node, rule_metrics, Id)) - || Node <- mria_mnesia:running_nodes()]. + [ + Format(Node, emqx_plugin_libs_proto_v1:get_metrics(Node, rule_metrics, Id)) + || Node <- mria_mnesia:running_nodes() + ]. aggregate_metrics(AllMetrics) -> InitMetrics = ?METRICS(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), - lists:foldl(fun - (#{metrics := ?metrics(Match1, Passed1, Failed1, FailedEx1, FailedNoRes1, - OTotal1, OFailed1, OFailedOOS1, OFailedUnknown1, OFailedSucc1, - Rate1, RateMax1, Rate5m1)}, - ?metrics(Match0, Passed0, Failed0, FailedEx0, FailedNoRes0, - OTotal0, OFailed0, OFailedOOS0, OFailedUnknown0, OFailedSucc0, - Rate0, RateMax0, Rate5m0)) -> - ?METRICS(Match1 + Match0, Passed1 + Passed0, Failed1 + Failed0, - FailedEx1 + FailedEx0, FailedNoRes1 + FailedNoRes0, - OTotal1 + OTotal0, OFailed1 + OFailed0, - OFailedOOS1 + OFailedOOS0, - OFailedUnknown1 + OFailedUnknown0, - OFailedSucc1 + OFailedSucc0, - Rate1 + Rate0, RateMax1 + RateMax0, Rate5m1 + Rate5m0) - end, InitMetrics, AllMetrics). + lists:foldl( + fun( + #{ + metrics := ?metrics( + Match1, + Passed1, + Failed1, + FailedEx1, + FailedNoRes1, + OTotal1, + OFailed1, + OFailedOOS1, + OFailedUnknown1, + OFailedSucc1, + Rate1, + RateMax1, + Rate5m1 + ) + }, + ?metrics( + Match0, + Passed0, + Failed0, + FailedEx0, + FailedNoRes0, + OTotal0, + OFailed0, + OFailedOOS0, + OFailedUnknown0, + OFailedSucc0, + Rate0, + RateMax0, + Rate5m0 + ) + ) -> + ?METRICS( + Match1 + Match0, + Passed1 + Passed0, + Failed1 + Failed0, + FailedEx1 + FailedEx0, + FailedNoRes1 + FailedNoRes0, + OTotal1 + OTotal0, + OFailed1 + OFailed0, + OFailedOOS1 + OFailedOOS0, + OFailedUnknown1 + OFailedUnknown0, + OFailedSucc1 + OFailedSucc0, + Rate1 + Rate0, + RateMax1 + RateMax0, + Rate5m1 + Rate5m0 + ) + end, + InitMetrics, + AllMetrics + ). get_one_rule(AllRules, Id) -> [R || R = #{id := Id0} <- AllRules, Id0 == Id]. filter_out_request_body(Conf) -> - ExtraConfs = [<<"id">>, <<"status">>, <<"node_status">>, <<"node_metrics">>, - <<"metrics">>, <<"node">>], + ExtraConfs = [ + <<"id">>, + <<"status">>, + <<"node_status">>, + <<"node_metrics">>, + <<"metrics">>, + <<"node">> + ], maps:without(ExtraConfs, Conf). diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl index 005d78c7f..fd4cb8e6e 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl @@ -21,96 +21,140 @@ -behaviour(hocon_schema). --export([ namespace/0 - , roots/0 - , fields/1 - , desc/1 - ]). +-export([ + namespace/0, + roots/0, + fields/1, + desc/1 +]). --export([ validate_sql/1 - ]). +-export([validate_sql/1]). namespace() -> rule_engine. roots() -> ["rule_engine"]. fields("rule_engine") -> - [ {ignore_sys_message, sc(boolean(), #{default => true, desc => ?DESC("rule_engine_ignore_sys_message") - })} - , {rules, sc(hoconsc:map("id", ref("rules")), #{desc => ?DESC("rule_engine_rules"), default => #{}})} + [ + {ignore_sys_message, + sc(boolean(), #{default => true, desc => ?DESC("rule_engine_ignore_sys_message")})}, + {rules, + sc(hoconsc:map("id", ref("rules")), #{ + desc => ?DESC("rule_engine_rules"), default => #{} + })} ]; - fields("rules") -> - [ rule_name() - , {"sql", sc(binary(), - #{ desc => ?DESC("rules_sql") - , example => "SELECT * FROM \"test/topic\" WHERE payload.x = 1" - , required => true - , validator => fun ?MODULE:validate_sql/1 - })} - , {"outputs", sc(hoconsc:array(hoconsc:union(outputs())), - #{ desc => ?DESC("rules_outputs") - , default => [] - , example => [ - <<"http:my_http_bridge">>, - #{function => republish, args => #{ - topic => <<"t/1">>, payload => <<"${payload}">>}}, - #{function => console} - ] - })} - , {"enable", sc(boolean(), #{desc => ?DESC("rules_enable"), default => true})} - , {"description", sc(binary(), - #{ desc => ?DESC("rules_description") - , example => "Some description" - , default => <<>> - })} + [ + rule_name(), + {"sql", + sc( + binary(), + #{ + desc => ?DESC("rules_sql"), + example => "SELECT * FROM \"test/topic\" WHERE payload.x = 1", + required => true, + validator => fun ?MODULE:validate_sql/1 + } + )}, + {"outputs", + sc( + hoconsc:array(hoconsc:union(outputs())), + #{ + desc => ?DESC("rules_outputs"), + default => [], + example => [ + <<"http:my_http_bridge">>, + #{ + function => republish, + args => #{ + topic => <<"t/1">>, payload => <<"${payload}">> + } + }, + #{function => console} + ] + } + )}, + {"enable", sc(boolean(), #{desc => ?DESC("rules_enable"), default => true})}, + {"description", + sc( + binary(), + #{ + desc => ?DESC("rules_description"), + example => "Some description", + default => <<>> + } + )} ]; - fields("builtin_output_republish") -> - [ {function, sc(republish, #{desc => ?DESC("republish_function")})} - , {args, sc(ref("republish_args"), #{default => #{}})} + [ + {function, sc(republish, #{desc => ?DESC("republish_function")})}, + {args, sc(ref("republish_args"), #{default => #{}})} ]; - fields("builtin_output_console") -> - [ {function, sc(console, #{desc => ?DESC("console_function")})} - %% we may support some args for the console output in the future - %, {args, sc(map(), #{desc => "The arguments of the built-in 'console' output", - % default => #{}})} + [ + {function, sc(console, #{desc => ?DESC("console_function")})} + %% we may support some args for the console output in the future + %, {args, sc(map(), #{desc => "The arguments of the built-in 'console' output", + % default => #{}})} ]; - fields("user_provided_function") -> - [ {function, sc(binary(), - #{ desc => ?DESC("user_provided_function_function") - , required => true - , example => "module:function" - })} - , {args, sc(map(), - #{ desc => ?DESC("user_provided_function_args") - , default => #{} - })} + [ + {function, + sc( + binary(), + #{ + desc => ?DESC("user_provided_function_function"), + required => true, + example => "module:function" + } + )}, + {args, + sc( + map(), + #{ + desc => ?DESC("user_provided_function_args"), + default => #{} + } + )} ]; - fields("republish_args") -> - [ {topic, sc(binary(), - #{ desc => ?DESC("republish_args_topic") - , required => true - , example => <<"a/1">> - })} - , {qos, sc(qos(), - #{ desc => ?DESC("republish_args_qos") - , default => <<"${qos}">> - , example => <<"${qos}">> - })} - , {retain, sc(hoconsc:union([binary(), boolean()]), - #{ desc => ?DESC("republish_args_retain") - , default => <<"${retain}">> - , example => <<"${retain}">> - })} - , {payload, sc(binary(), - #{ desc => ?DESC("republish_args_payload") - , default => <<"${payload}">> - , example => <<"${payload}">> - })} + [ + {topic, + sc( + binary(), + #{ + desc => ?DESC("republish_args_topic"), + required => true, + example => <<"a/1">> + } + )}, + {qos, + sc( + qos(), + #{ + desc => ?DESC("republish_args_qos"), + default => <<"${qos}">>, + example => <<"${qos}">> + } + )}, + {retain, + sc( + hoconsc:union([binary(), boolean()]), + #{ + desc => ?DESC("republish_args_retain"), + default => <<"${retain}">>, + example => <<"${retain}">> + } + )}, + {payload, + sc( + binary(), + #{ + desc => ?DESC("republish_args_payload"), + default => <<"${payload}">>, + example => <<"${payload}">> + } + )} ]. desc("rule_engine") -> @@ -129,18 +173,23 @@ desc(_) -> undefined. rule_name() -> - {"name", sc(binary(), - #{ desc => ?DESC("rules_name") - , default => "" - , required => true - , example => "foo" - })}. + {"name", + sc( + binary(), + #{ + desc => ?DESC("rules_name"), + default => "", + required => true, + example => "foo" + } + )}. outputs() -> - [ binary() - , ref("builtin_output_republish") - , ref("builtin_output_console") - , ref("user_provided_function") + [ + binary(), + ref("builtin_output_republish"), + ref("builtin_output_console"), + ref("user_provided_function") ]. qos() -> diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_sup.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_sup.erl index 033e8f04f..621766945 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_sup.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_sup.erl @@ -28,11 +28,13 @@ start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []). init([]) -> - Registry = #{id => emqx_rule_engine, - start => {emqx_rule_engine, start_link, []}, - restart => permanent, - shutdown => 5000, - type => worker, - modules => [emqx_rule_engine]}, + Registry = #{ + id => emqx_rule_engine, + start => {emqx_rule_engine, start_link, []}, + restart => permanent, + shutdown => 5000, + type => worker, + modules => [emqx_rule_engine] + }, Metrics = emqx_plugin_libs_metrics:child_spec(rule_metrics), {ok, {{one_for_one, 10, 10}, [Registry, Metrics]}}. diff --git a/apps/emqx_rule_engine/src/emqx_rule_events.erl b/apps/emqx_rule_engine/src/emqx_rule_events.erl index 91c90f3e2..3070c054e 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_events.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_events.erl @@ -20,72 +20,82 @@ -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). +-export([ + reload/0, + load/1, + unload/0, + unload/1, + event_names/0, + event_name/1, + event_topic/1, + eventmsg_publish/1 +]). --export([ reload/0 - , load/1 - , unload/0 - , unload/1 - , event_names/0 - , event_name/1 - , event_topic/1 - , eventmsg_publish/1 - ]). +-export([ + on_client_connected/3, + on_client_disconnected/4, + on_client_connack/4, + on_client_check_authz_complete/6, + on_session_subscribed/4, + on_session_unsubscribed/4, + on_message_publish/2, + on_message_dropped/4, + on_message_delivered/3, + on_message_acked/3, + on_delivery_dropped/4, + on_bridge_message_received/2 +]). --export([ on_client_connected/3 - , on_client_disconnected/4 - , on_client_connack/4 - , on_client_check_authz_complete/6 - , on_session_subscribed/4 - , on_session_unsubscribed/4 - , on_message_publish/2 - , on_message_dropped/4 - , on_message_delivered/3 - , on_message_acked/3 - , on_delivery_dropped/4 - , on_bridge_message_received/2 - ]). - --export([ event_info/0 - , columns/1 - , columns_with_exam/1 - ]). +-export([ + event_info/0, + columns/1, + columns_with_exam/1 +]). -ifdef(TEST). --export([ reason/1 - , hook_fun/1 - , printable_maps/1 - ]). +-export([ + reason/1, + hook_fun/1, + printable_maps/1 +]). -endif. -elvis([{elvis_style, dont_repeat_yourself, disable}]). event_names() -> - [ 'client.connected' - , 'client.disconnected' - , 'client.connack' - , 'client.check_authz_complete' - , 'session.subscribed' - , 'session.unsubscribed' - , 'message.publish' - , 'message.delivered' - , 'message.acked' - , 'message.dropped' - , 'delivery.dropped' + [ + 'client.connected', + 'client.disconnected', + 'client.connack', + 'client.check_authz_complete', + 'session.subscribed', + 'session.unsubscribed', + 'message.publish', + 'message.delivered', + 'message.acked', + 'message.dropped', + 'delivery.dropped' ]. reload() -> - lists:foreach(fun(Rule) -> + lists:foreach( + fun(Rule) -> ok = emqx_rule_engine:load_hooks_for_rule(Rule) - end, emqx_rule_engine:get_rules()). + end, + emqx_rule_engine:get_rules() + ). load(Topic) -> HookPoint = event_name(Topic), emqx_hooks:put(HookPoint, {?MODULE, hook_fun(HookPoint), [#{event_topic => Topic}]}). unload() -> - lists:foreach(fun(HookPoint) -> + lists:foreach( + fun(HookPoint) -> emqx_hooks:del(HookPoint, {?MODULE, hook_fun(HookPoint)}) - end, event_names()). + end, + event_names() + ). unload(Topic) -> HookPoint = event_name(Topic), @@ -96,7 +106,8 @@ unload(Topic) -> %%-------------------------------------------------------------------- on_message_publish(Message = #message{topic = Topic}, _Env) -> case ignore_sys_message(Message) of - true -> ok; + true -> + ok; false -> case emqx_rule_engine:get_rules_for_topic(Topic) of [] -> ok; @@ -109,70 +120,108 @@ on_bridge_message_received(Message, Env = #{event_topic := BridgeTopic}) -> apply_event(BridgeTopic, fun() -> with_basic_columns(BridgeTopic, Message) end, Env). on_client_connected(ClientInfo, ConnInfo, Env) -> - apply_event('client.connected', - fun() -> eventmsg_connected(ClientInfo, ConnInfo) end, Env). + apply_event( + 'client.connected', + fun() -> eventmsg_connected(ClientInfo, ConnInfo) end, + Env + ). on_client_connack(ConnInfo, Reason, _, Env) -> - apply_event('client.connack', - fun() -> eventmsg_connack(ConnInfo, Reason) end, Env). + apply_event( + 'client.connack', + fun() -> eventmsg_connack(ConnInfo, Reason) end, + Env + ). on_client_check_authz_complete(ClientInfo, PubSub, Topic, Result, AuthzSource, Env) -> - apply_event('client.check_authz_complete', - fun() -> eventmsg_check_authz_complete(ClientInfo, - PubSub, - Topic, - Result, - AuthzSource) end, Env). + apply_event( + 'client.check_authz_complete', + fun() -> + eventmsg_check_authz_complete( + ClientInfo, + PubSub, + Topic, + Result, + AuthzSource + ) + end, + Env + ). on_client_disconnected(ClientInfo, Reason, ConnInfo, Env) -> - apply_event('client.disconnected', - fun() -> eventmsg_disconnected(ClientInfo, ConnInfo, Reason) end, Env). + apply_event( + 'client.disconnected', + fun() -> eventmsg_disconnected(ClientInfo, ConnInfo, Reason) end, + Env + ). on_session_subscribed(ClientInfo, Topic, SubOpts, Env) -> - apply_event('session.subscribed', + apply_event( + 'session.subscribed', fun() -> eventmsg_sub_or_unsub('session.subscribed', ClientInfo, Topic, SubOpts) - end, Env). + end, + Env + ). on_session_unsubscribed(ClientInfo, Topic, SubOpts, Env) -> - apply_event('session.unsubscribed', + apply_event( + 'session.unsubscribed', fun() -> eventmsg_sub_or_unsub('session.unsubscribed', ClientInfo, Topic, SubOpts) - end, Env). + end, + Env + ). on_message_dropped(Message, _, Reason, Env) -> case ignore_sys_message(Message) of - true -> ok; + true -> + ok; false -> - apply_event('message.dropped', - fun() -> eventmsg_dropped(Message, Reason) end, Env) + apply_event( + 'message.dropped', + fun() -> eventmsg_dropped(Message, Reason) end, + Env + ) end, {ok, Message}. on_message_delivered(ClientInfo, Message, Env) -> case ignore_sys_message(Message) of - true -> ok; + true -> + ok; false -> - apply_event('message.delivered', - fun() -> eventmsg_delivered(ClientInfo, Message) end, Env) + apply_event( + 'message.delivered', + fun() -> eventmsg_delivered(ClientInfo, Message) end, + Env + ) end, {ok, Message}. on_message_acked(ClientInfo, Message, Env) -> case ignore_sys_message(Message) of - true -> ok; + true -> + ok; false -> - apply_event('message.acked', - fun() -> eventmsg_acked(ClientInfo, Message) end, Env) + apply_event( + 'message.acked', + fun() -> eventmsg_acked(ClientInfo, Message) end, + Env + ) end, {ok, Message}. on_delivery_dropped(ClientInfo, Message, Reason, Env) -> case ignore_sys_message(Message) of - true -> ok; + true -> + ok; false -> - apply_event('delivery.dropped', - fun() -> eventmsg_delivery_dropped(ClientInfo, Message, Reason) end, Env) + apply_event( + 'delivery.dropped', + fun() -> eventmsg_delivery_dropped(ClientInfo, Message, Reason) end, + Env + ) end, {ok, Message}. @@ -180,235 +229,335 @@ on_delivery_dropped(ClientInfo, Message, Reason, Env) -> %% Event Messages %%-------------------------------------------------------------------- -eventmsg_publish(Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, - topic = Topic, headers = Headers, payload = Payload, timestamp = Timestamp}) -> - with_basic_columns('message.publish', - #{id => emqx_guid:to_hexstr(Id), - clientid => ClientId, - username => emqx_message:get_header(username, Message, undefined), - payload => Payload, - peerhost => ntoa(emqx_message:get_header(peerhost, Message, undefined)), - topic => Topic, - qos => QoS, - flags => Flags, - pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), - %% the column 'headers' will be removed in the next major release - headers => printable_maps(Headers), - publish_received_at => Timestamp - }). +eventmsg_publish( + Message = #message{ + id = Id, + from = ClientId, + qos = QoS, + flags = Flags, + topic = Topic, + headers = Headers, + payload = Payload, + timestamp = Timestamp + } +) -> + with_basic_columns( + 'message.publish', + #{ + id => emqx_guid:to_hexstr(Id), + clientid => ClientId, + username => emqx_message:get_header(username, Message, undefined), + payload => Payload, + peerhost => ntoa(emqx_message:get_header(peerhost, Message, undefined)), + topic => Topic, + qos => QoS, + flags => Flags, + pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), + %% the column 'headers' will be removed in the next major release + headers => printable_maps(Headers), + publish_received_at => Timestamp + } + ). -eventmsg_connected(_ClientInfo = #{ - clientid := ClientId, - username := Username, - is_bridge := IsBridge, - mountpoint := Mountpoint - }, - ConnInfo = #{ - peername := PeerName, - sockname := SockName, - clean_start := CleanStart, - proto_name := ProtoName, - proto_ver := ProtoVer, - connected_at := ConnectedAt - }) -> +eventmsg_connected( + _ClientInfo = #{ + clientid := ClientId, + username := Username, + is_bridge := IsBridge, + mountpoint := Mountpoint + }, + ConnInfo = #{ + peername := PeerName, + sockname := SockName, + clean_start := CleanStart, + proto_name := ProtoName, + proto_ver := ProtoVer, + connected_at := ConnectedAt + } +) -> Keepalive = maps:get(keepalive, ConnInfo, 0), ConnProps = maps:get(conn_props, ConnInfo, #{}), RcvMax = maps:get(receive_maximum, ConnInfo, 0), ExpiryInterval = maps:get(expiry_interval, ConnInfo, 0), - with_basic_columns('client.connected', - #{clientid => ClientId, - username => Username, - mountpoint => Mountpoint, - peername => ntoa(PeerName), - sockname => ntoa(SockName), - proto_name => ProtoName, - proto_ver => ProtoVer, - keepalive => Keepalive, - clean_start => CleanStart, - receive_maximum => RcvMax, - expiry_interval => ExpiryInterval div 1000, - is_bridge => IsBridge, - conn_props => printable_maps(ConnProps), - connected_at => ConnectedAt - }). + with_basic_columns( + 'client.connected', + #{ + clientid => ClientId, + username => Username, + mountpoint => Mountpoint, + peername => ntoa(PeerName), + sockname => ntoa(SockName), + proto_name => ProtoName, + proto_ver => ProtoVer, + keepalive => Keepalive, + clean_start => CleanStart, + receive_maximum => RcvMax, + expiry_interval => ExpiryInterval div 1000, + is_bridge => IsBridge, + conn_props => printable_maps(ConnProps), + connected_at => ConnectedAt + } + ). -eventmsg_disconnected(_ClientInfo = #{ - clientid := ClientId, - username := Username - }, - ConnInfo = #{ - peername := PeerName, - sockname := SockName, - disconnected_at := DisconnectedAt - }, Reason) -> - with_basic_columns('client.disconnected', - #{reason => reason(Reason), - clientid => ClientId, - username => Username, - peername => ntoa(PeerName), - sockname => ntoa(SockName), - disconn_props => printable_maps(maps:get(disconn_props, ConnInfo, #{})), - disconnected_at => DisconnectedAt - }). +eventmsg_disconnected( + _ClientInfo = #{ + clientid := ClientId, + username := Username + }, + ConnInfo = #{ + peername := PeerName, + sockname := SockName, + disconnected_at := DisconnectedAt + }, + Reason +) -> + with_basic_columns( + 'client.disconnected', + #{ + reason => reason(Reason), + clientid => ClientId, + username => Username, + peername => ntoa(PeerName), + sockname => ntoa(SockName), + disconn_props => printable_maps(maps:get(disconn_props, ConnInfo, #{})), + disconnected_at => DisconnectedAt + } + ). -eventmsg_connack(ConnInfo = #{ - clientid := ClientId, - clean_start := CleanStart, - username := Username, - peername := PeerName, - sockname := SockName, - proto_name := ProtoName, - proto_ver := ProtoVer - }, Reason) -> +eventmsg_connack( + ConnInfo = #{ + clientid := ClientId, + clean_start := CleanStart, + username := Username, + peername := PeerName, + sockname := SockName, + proto_name := ProtoName, + proto_ver := ProtoVer + }, + Reason +) -> Keepalive = maps:get(keepalive, ConnInfo, 0), ConnProps = maps:get(conn_props, ConnInfo, #{}), ExpiryInterval = maps:get(expiry_interval, ConnInfo, 0), - with_basic_columns('client.connack', - #{reason_code => reason(Reason), - clientid => ClientId, - clean_start => CleanStart, - username => Username, - peername => ntoa(PeerName), - sockname => ntoa(SockName), - proto_name => ProtoName, - proto_ver => ProtoVer, - keepalive => Keepalive, - expiry_interval => ExpiryInterval, - conn_props => printable_maps(ConnProps) - }). + with_basic_columns( + 'client.connack', + #{ + reason_code => reason(Reason), + clientid => ClientId, + clean_start => CleanStart, + username => Username, + peername => ntoa(PeerName), + sockname => ntoa(SockName), + proto_name => ProtoName, + proto_ver => ProtoVer, + keepalive => Keepalive, + expiry_interval => ExpiryInterval, + conn_props => printable_maps(ConnProps) + } + ). -eventmsg_check_authz_complete(_ClientInfo = #{ - clientid := ClientId, - username := Username, - peerhost := PeerHost - }, PubSub, Topic, Result, AuthzSource) -> - with_basic_columns('client.check_authz_complete', - #{clientid => ClientId, - username => Username, - peerhost => ntoa(PeerHost), - topic => Topic, - action => PubSub, - authz_source => AuthzSource, - result => Result - }). +eventmsg_check_authz_complete( + _ClientInfo = #{ + clientid := ClientId, + username := Username, + peerhost := PeerHost + }, + PubSub, + Topic, + Result, + AuthzSource +) -> + with_basic_columns( + 'client.check_authz_complete', + #{ + clientid => ClientId, + username => Username, + peerhost => ntoa(PeerHost), + topic => Topic, + action => PubSub, + authz_source => AuthzSource, + result => Result + } + ). -eventmsg_sub_or_unsub(Event, _ClientInfo = #{ - clientid := ClientId, - username := Username, - peerhost := PeerHost - }, Topic, SubOpts = #{qos := QoS}) -> +eventmsg_sub_or_unsub( + Event, + _ClientInfo = #{ + clientid := ClientId, + username := Username, + peerhost := PeerHost + }, + Topic, + SubOpts = #{qos := QoS} +) -> PropKey = sub_unsub_prop_key(Event), - with_basic_columns(Event, - #{clientid => ClientId, - username => Username, - peerhost => ntoa(PeerHost), - PropKey => printable_maps(maps:get(PropKey, SubOpts, #{})), - topic => Topic, - qos => QoS - }). + with_basic_columns( + Event, + #{ + clientid => ClientId, + username => Username, + peerhost => ntoa(PeerHost), + PropKey => printable_maps(maps:get(PropKey, SubOpts, #{})), + topic => Topic, + qos => QoS + } + ). -eventmsg_dropped(Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, - topic = Topic, headers = Headers, payload = Payload, timestamp = Timestamp}, Reason) -> - with_basic_columns('message.dropped', - #{id => emqx_guid:to_hexstr(Id), - reason => Reason, - clientid => ClientId, - username => emqx_message:get_header(username, Message, undefined), - payload => Payload, - peerhost => ntoa(emqx_message:get_header(peerhost, Message, undefined)), - topic => Topic, - qos => QoS, - flags => Flags, - %% the column 'headers' will be removed in the next major release - headers => printable_maps(Headers), - pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), - publish_received_at => Timestamp - }). +eventmsg_dropped( + Message = #message{ + id = Id, + from = ClientId, + qos = QoS, + flags = Flags, + topic = Topic, + headers = Headers, + payload = Payload, + timestamp = Timestamp + }, + Reason +) -> + with_basic_columns( + 'message.dropped', + #{ + id => emqx_guid:to_hexstr(Id), + reason => Reason, + clientid => ClientId, + username => emqx_message:get_header(username, Message, undefined), + payload => Payload, + peerhost => ntoa(emqx_message:get_header(peerhost, Message, undefined)), + topic => Topic, + qos => QoS, + flags => Flags, + %% the column 'headers' will be removed in the next major release + headers => printable_maps(Headers), + pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), + publish_received_at => Timestamp + } + ). -eventmsg_delivered(_ClientInfo = #{ - peerhost := PeerHost, - clientid := ReceiverCId, - username := ReceiverUsername - }, Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, - topic = Topic, headers = Headers, payload = Payload, - timestamp = Timestamp}) -> - with_basic_columns('message.delivered', - #{id => emqx_guid:to_hexstr(Id), - from_clientid => ClientId, - from_username => emqx_message:get_header(username, Message, undefined), - clientid => ReceiverCId, - username => ReceiverUsername, - payload => Payload, - peerhost => ntoa(PeerHost), - topic => Topic, - qos => QoS, - flags => Flags, - %% the column 'headers' will be removed in the next major release - headers => printable_maps(Headers), - pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), - publish_received_at => Timestamp - }). +eventmsg_delivered( + _ClientInfo = #{ + peerhost := PeerHost, + clientid := ReceiverCId, + username := ReceiverUsername + }, + Message = #message{ + id = Id, + from = ClientId, + qos = QoS, + flags = Flags, + topic = Topic, + headers = Headers, + payload = Payload, + timestamp = Timestamp + } +) -> + with_basic_columns( + 'message.delivered', + #{ + id => emqx_guid:to_hexstr(Id), + from_clientid => ClientId, + from_username => emqx_message:get_header(username, Message, undefined), + clientid => ReceiverCId, + username => ReceiverUsername, + payload => Payload, + peerhost => ntoa(PeerHost), + topic => Topic, + qos => QoS, + flags => Flags, + %% the column 'headers' will be removed in the next major release + headers => printable_maps(Headers), + pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), + publish_received_at => Timestamp + } + ). -eventmsg_acked(_ClientInfo = #{ - peerhost := PeerHost, - clientid := ReceiverCId, - username := ReceiverUsername - }, - Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, - topic = Topic, headers = Headers, payload = Payload, - timestamp = Timestamp}) -> - with_basic_columns('message.acked', - #{id => emqx_guid:to_hexstr(Id), - from_clientid => ClientId, - from_username => emqx_message:get_header(username, Message, undefined), - clientid => ReceiverCId, - username => ReceiverUsername, - payload => Payload, - peerhost => ntoa(PeerHost), - topic => Topic, - qos => QoS, - flags => Flags, - %% the column 'headers' will be removed in the next major release - headers => printable_maps(Headers), - pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), - puback_props => printable_maps(emqx_message:get_header(puback_props, Message, #{})), - publish_received_at => Timestamp - }). +eventmsg_acked( + _ClientInfo = #{ + peerhost := PeerHost, + clientid := ReceiverCId, + username := ReceiverUsername + }, + Message = #message{ + id = Id, + from = ClientId, + qos = QoS, + flags = Flags, + topic = Topic, + headers = Headers, + payload = Payload, + timestamp = Timestamp + } +) -> + with_basic_columns( + 'message.acked', + #{ + id => emqx_guid:to_hexstr(Id), + from_clientid => ClientId, + from_username => emqx_message:get_header(username, Message, undefined), + clientid => ReceiverCId, + username => ReceiverUsername, + payload => Payload, + peerhost => ntoa(PeerHost), + topic => Topic, + qos => QoS, + flags => Flags, + %% the column 'headers' will be removed in the next major release + headers => printable_maps(Headers), + pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), + puback_props => printable_maps(emqx_message:get_header(puback_props, Message, #{})), + publish_received_at => Timestamp + } + ). -eventmsg_delivery_dropped(_ClientInfo = #{ - peerhost := PeerHost, - clientid := ReceiverCId, - username := ReceiverUsername - }, - Message = #message{id = Id, from = ClientId, qos = QoS, flags = Flags, topic = Topic, - headers = Headers, payload = Payload, timestamp = Timestamp}, - Reason) -> - with_basic_columns('delivery.dropped', - #{id => emqx_guid:to_hexstr(Id), - reason => Reason, - from_clientid => ClientId, - from_username => emqx_message:get_header(username, Message, undefined), - clientid => ReceiverCId, - username => ReceiverUsername, - payload => Payload, - peerhost => ntoa(PeerHost), - topic => Topic, - qos => QoS, - flags => Flags, - %% the column 'headers' will be removed in the next major release - headers => printable_maps(Headers), - pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), - publish_received_at => Timestamp - }). +eventmsg_delivery_dropped( + _ClientInfo = #{ + peerhost := PeerHost, + clientid := ReceiverCId, + username := ReceiverUsername + }, + Message = #message{ + id = Id, + from = ClientId, + qos = QoS, + flags = Flags, + topic = Topic, + headers = Headers, + payload = Payload, + timestamp = Timestamp + }, + Reason +) -> + with_basic_columns( + 'delivery.dropped', + #{ + id => emqx_guid:to_hexstr(Id), + reason => Reason, + from_clientid => ClientId, + from_username => emqx_message:get_header(username, Message, undefined), + clientid => ReceiverCId, + username => ReceiverUsername, + payload => Payload, + peerhost => ntoa(PeerHost), + topic => Topic, + qos => QoS, + flags => Flags, + %% the column 'headers' will be removed in the next major release + headers => printable_maps(Headers), + pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), + publish_received_at => Timestamp + } + ). sub_unsub_prop_key('session.subscribed') -> sub_props; sub_unsub_prop_key('session.unsubscribed') -> unsub_props. with_basic_columns(EventName, Data) when is_map(Data) -> - Data#{event => EventName, - timestamp => erlang:system_time(millisecond), - node => node() - }. + Data#{ + event => EventName, + timestamp => erlang:system_time(millisecond), + node => node() + }. %%-------------------------------------------------------------------- %% rules applying @@ -416,7 +565,8 @@ with_basic_columns(EventName, Data) when is_map(Data) -> apply_event(EventName, GenEventMsg, _Env) -> EventTopic = event_topic(EventName), case emqx_rule_engine:get_rules_for_topic(EventTopic) of - [] -> ok; + [] -> + ok; Rules -> %% delay the generating of eventmsg after we have found some rules to apply emqx_rule_runtime:apply_rules(Rules, GenEventMsg()) @@ -429,18 +579,19 @@ columns(Event) -> [Key || {Key, _ExampleVal} <- columns_with_exam(Event)]. event_info() -> - [ event_info_message_publish() - , event_info_message_deliver() - , event_info_message_acked() - , event_info_message_dropped() - , event_info_client_connected() - , event_info_client_disconnected() - , event_info_client_connack() - , event_info_client_check_authz_complete() - , event_info_session_subscribed() - , event_info_session_unsubscribed() - , event_info_delivery_dropped() - , event_info_bridge_mqtt() + [ + event_info_message_publish(), + event_info_message_deliver(), + event_info_message_acked(), + event_info_message_dropped(), + event_info_client_connected(), + event_info_client_disconnected(), + event_info_client_connack(), + event_info_client_check_authz_complete(), + event_info_session_subscribed(), + event_info_session_unsubscribed(), + event_info_delivery_dropped(), + event_info_bridge_mqtt() ]. event_info_message_publish() -> @@ -469,7 +620,7 @@ event_info_message_dropped() -> 'message.dropped', {<<"message routing-drop">>, <<"消息转发丢弃"/utf8>>}, {<<"messages are discarded during routing, usually because there are no subscribers">>, - <<"消息在转发的过程中被丢弃,一般是由于没有订阅者"/utf8>>}, + <<"消息在转发的过程中被丢弃,一般是由于没有订阅者"/utf8>>}, <<"SELECT * FROM \"$events/message_dropped\" WHERE topic =~ 't/#'">> ). event_info_delivery_dropped() -> @@ -477,7 +628,7 @@ event_info_delivery_dropped() -> 'delivery.dropped', {<<"message delivery-drop">>, <<"消息投递丢弃"/utf8>>}, {<<"messages are discarded during delivery, i.e. because the message queue is full">>, - <<"消息在投递的过程中被丢弃,比如由于消息队列已满"/utf8>>}, + <<"消息在投递的过程中被丢弃,比如由于消息队列已满"/utf8>>}, <<"SELECT * FROM \"$events/delivery_dropped\" WHERE topic =~ 't/#'">> ). event_info_client_connected() -> @@ -496,18 +647,18 @@ event_info_client_disconnected() -> ). event_info_client_connack() -> event_info_common( - 'client.connack', - {<<"client connack">>, <<"连接确认"/utf8>>}, - {<<"client connack">>, <<"连接确认"/utf8>>}, - <<"SELECT * FROM \"$events/client_connack\"">> - ). + 'client.connack', + {<<"client connack">>, <<"连接确认"/utf8>>}, + {<<"client connack">>, <<"连接确认"/utf8>>}, + <<"SELECT * FROM \"$events/client_connack\"">> + ). event_info_client_check_authz_complete() -> event_info_common( - 'client.check_authz_complete', - {<<"client check authz complete">>, <<"鉴权结果"/utf8>>}, - {<<"client check authz complete">>, <<"鉴权结果"/utf8>>}, - <<"SELECT * FROM \"$events/client_check_authz_complete\"">> - ). + 'client.check_authz_complete', + {<<"client check authz complete">>, <<"鉴权结果"/utf8>>}, + {<<"client check authz complete">>, <<"鉴权结果"/utf8>>}, + <<"SELECT * FROM \"$events/client_check_authz_complete\"">> + ). event_info_session_subscribed() -> event_info_common( 'session.subscribed', @@ -522,7 +673,7 @@ event_info_session_unsubscribed() -> {<<"session unsubscribed">>, <<"会话取消订阅完成"/utf8>>}, <<"SELECT * FROM \"$events/session_unsubscribed\" WHERE topic =~ 't/#'">> ). -event_info_bridge_mqtt()-> +event_info_bridge_mqtt() -> event_info_common( <<"$bridges/mqtt:*">>, {<<"MQTT bridge message">>, <<"MQTT 桥接消息"/utf8>>}, @@ -531,239 +682,255 @@ event_info_bridge_mqtt()-> ). event_info_common(Event, {TitleEN, TitleZH}, {DescrEN, DescrZH}, SqlExam) -> - #{event => event_topic(Event), - title => #{en => TitleEN, zh => TitleZH}, - description => #{en => DescrEN, zh => DescrZH}, - test_columns => test_columns(Event), - columns => columns(Event), - sql_example => SqlExam + #{ + event => event_topic(Event), + title => #{en => TitleEN, zh => TitleZH}, + description => #{en => DescrEN, zh => DescrZH}, + test_columns => test_columns(Event), + columns => columns(Event), + sql_example => SqlExam }. test_columns('message.dropped') -> - [ {<<"reason">>, [<<"no_subscribers">>, <<"the reason of dropping">>]} - ] ++ test_columns('message.publish'); + [{<<"reason">>, [<<"no_subscribers">>, <<"the reason of dropping">>]}] ++ + test_columns('message.publish'); test_columns('message.publish') -> - [ {<<"clientid">>, [<<"c_emqx">>, <<"the clientid of the sender">>]} - , {<<"username">>, [<<"u_emqx">>, <<"the username of the sender">>]} - , {<<"topic">>, [<<"t/a">>, <<"the topic of the MQTT message">>]} - , {<<"qos">>, [1, <<"the QoS of the MQTT message">>]} - , {<<"payload">>, [<<"{\"msg\": \"hello\"}">>, <<"the payload of the MQTT message">>]} + [ + {<<"clientid">>, [<<"c_emqx">>, <<"the clientid of the sender">>]}, + {<<"username">>, [<<"u_emqx">>, <<"the username of the sender">>]}, + {<<"topic">>, [<<"t/a">>, <<"the topic of the MQTT message">>]}, + {<<"qos">>, [1, <<"the QoS of the MQTT message">>]}, + {<<"payload">>, [<<"{\"msg\": \"hello\"}">>, <<"the payload of the MQTT message">>]} ]; test_columns('delivery.dropped') -> - [ {<<"reason">>, [<<"queue_full">>, <<"the reason of dropping">>]} - ] ++ test_columns('message.delivered'); + [{<<"reason">>, [<<"queue_full">>, <<"the reason of dropping">>]}] ++ + test_columns('message.delivered'); test_columns('message.acked') -> test_columns('message.delivered'); test_columns('message.delivered') -> - [ {<<"from_clientid">>, [<<"c_emqx_1">>, <<"the clientid of the sender">>]} - , {<<"from_username">>, [<<"u_emqx_1">>, <<"the username of the sender">>]} - , {<<"clientid">>, [<<"c_emqx_2">>, <<"the clientid of the receiver">>]} - , {<<"username">>, [<<"u_emqx_2">>, <<"the username of the receiver">>]} - , {<<"topic">>, [<<"t/a">>, <<"the topic of the MQTT message">>]} - , {<<"qos">>, [1, <<"the QoS of the MQTT message">>]} - , {<<"payload">>, [<<"{\"msg\": \"hello\"}">>, <<"the payload of the MQTT message">>]} + [ + {<<"from_clientid">>, [<<"c_emqx_1">>, <<"the clientid of the sender">>]}, + {<<"from_username">>, [<<"u_emqx_1">>, <<"the username of the sender">>]}, + {<<"clientid">>, [<<"c_emqx_2">>, <<"the clientid of the receiver">>]}, + {<<"username">>, [<<"u_emqx_2">>, <<"the username of the receiver">>]}, + {<<"topic">>, [<<"t/a">>, <<"the topic of the MQTT message">>]}, + {<<"qos">>, [1, <<"the QoS of the MQTT message">>]}, + {<<"payload">>, [<<"{\"msg\": \"hello\"}">>, <<"the payload of the MQTT message">>]} ]; test_columns('client.connected') -> - [ {<<"clientid">>, [<<"c_emqx">>, <<"the clientid if the client">>]} - , {<<"username">>, [<<"u_emqx">>, <<"the username if the client">>]} - , {<<"peername">>, [<<"127.0.0.1:52918">>, <<"the IP address and port of the client">>]} + [ + {<<"clientid">>, [<<"c_emqx">>, <<"the clientid if the client">>]}, + {<<"username">>, [<<"u_emqx">>, <<"the username if the client">>]}, + {<<"peername">>, [<<"127.0.0.1:52918">>, <<"the IP address and port of the client">>]} ]; test_columns('client.disconnected') -> - [ {<<"clientid">>, [<<"c_emqx">>, <<"the clientid if the client">>]} - , {<<"username">>, [<<"u_emqx">>, <<"the username if the client">>]} - , {<<"reason">>, [<<"normal">>, <<"the reason for shutdown">>]} + [ + {<<"clientid">>, [<<"c_emqx">>, <<"the clientid if the client">>]}, + {<<"username">>, [<<"u_emqx">>, <<"the username if the client">>]}, + {<<"reason">>, [<<"normal">>, <<"the reason for shutdown">>]} ]; test_columns('client.connack') -> - [ {<<"clientid">>, [<<"c_emqx">>, <<"the clientid if the client">>]} - , {<<"username">>, [<<"u_emqx">>, <<"the username if the client">>]} - , {<<"reason_code">>, [<<"sucess">>, <<"the reason code">>]} + [ + {<<"clientid">>, [<<"c_emqx">>, <<"the clientid if the client">>]}, + {<<"username">>, [<<"u_emqx">>, <<"the username if the client">>]}, + {<<"reason_code">>, [<<"sucess">>, <<"the reason code">>]} ]; test_columns('client.check_authz_complete') -> - [ {<<"clientid">>, [<<"c_emqx">>, <<"the clientid if the client">>]} - , {<<"username">>, [<<"u_emqx">>, <<"the username if the client">>]} - , {<<"topic">>, [<<"t/1">>, <<"the topic of the MQTT message">>]} - , {<<"action">>, [<<"publish">>, <<"the action of publish or subscribe">>]} - , {<<"result">>, [<<"allow">>,<<"the authz check complete result">>]} + [ + {<<"clientid">>, [<<"c_emqx">>, <<"the clientid if the client">>]}, + {<<"username">>, [<<"u_emqx">>, <<"the username if the client">>]}, + {<<"topic">>, [<<"t/1">>, <<"the topic of the MQTT message">>]}, + {<<"action">>, [<<"publish">>, <<"the action of publish or subscribe">>]}, + {<<"result">>, [<<"allow">>, <<"the authz check complete result">>]} ]; test_columns('session.unsubscribed') -> test_columns('session.subscribed'); test_columns('session.subscribed') -> - [ {<<"clientid">>, [<<"c_emqx">>, <<"the clientid if the client">>]} - , {<<"username">>, [<<"u_emqx">>, <<"the username if the client">>]} - , {<<"topic">>, [<<"t/a">>, <<"the topic of the MQTT message">>]} - , {<<"qos">>, [1, <<"the QoS of the MQTT message">>]} + [ + {<<"clientid">>, [<<"c_emqx">>, <<"the clientid if the client">>]}, + {<<"username">>, [<<"u_emqx">>, <<"the username if the client">>]}, + {<<"topic">>, [<<"t/a">>, <<"the topic of the MQTT message">>]}, + {<<"qos">>, [1, <<"the QoS of the MQTT message">>]} ]; test_columns(<<"$bridges/mqtt", _/binary>>) -> - [ {<<"topic">>, [<<"t/a">>, <<"the topic of the MQTT message">>]} - , {<<"qos">>, [1, <<"the QoS of the MQTT message">>]} - , {<<"payload">>, [<<"{\"msg\": \"hello\"}">>, <<"the payload of the MQTT message">>]} + [ + {<<"topic">>, [<<"t/a">>, <<"the topic of the MQTT message">>]}, + {<<"qos">>, [1, <<"the QoS of the MQTT message">>]}, + {<<"payload">>, [<<"{\"msg\": \"hello\"}">>, <<"the payload of the MQTT message">>]} ]. columns_with_exam('message.publish') -> - [ {<<"event">>, 'message.publish'} - , {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())} - , {<<"clientid">>, <<"c_emqx">>} - , {<<"username">>, <<"u_emqx">>} - , {<<"payload">>, <<"{\"msg\": \"hello\"}">>} - , {<<"peerhost">>, <<"192.168.0.10">>} - , {<<"topic">>, <<"t/a">>} - , {<<"qos">>, 1} - , {<<"flags">>, #{}} - , {<<"headers">>, undefined} - , {<<"publish_received_at">>, erlang:system_time(millisecond)} - , columns_example_props(pub_props) - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} + [ + {<<"event">>, 'message.publish'}, + {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())}, + {<<"clientid">>, <<"c_emqx">>}, + {<<"username">>, <<"u_emqx">>}, + {<<"payload">>, <<"{\"msg\": \"hello\"}">>}, + {<<"peerhost">>, <<"192.168.0.10">>}, + {<<"topic">>, <<"t/a">>}, + {<<"qos">>, 1}, + {<<"flags">>, #{}}, + {<<"headers">>, undefined}, + {<<"publish_received_at">>, erlang:system_time(millisecond)}, + columns_example_props(pub_props), + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} ]; columns_with_exam('message.delivered') -> columns_message_ack_delivered('message.delivered'); columns_with_exam('message.acked') -> - [ columns_example_props(puback_props) - ] ++ - columns_message_ack_delivered('message.acked'); + [columns_example_props(puback_props)] ++ + columns_message_ack_delivered('message.acked'); columns_with_exam('message.dropped') -> - [ {<<"event">>, 'message.dropped'} - , {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())} - , {<<"reason">>, no_subscribers} - , {<<"clientid">>, <<"c_emqx">>} - , {<<"username">>, <<"u_emqx">>} - , {<<"payload">>, <<"{\"msg\": \"hello\"}">>} - , {<<"peerhost">>, <<"192.168.0.10">>} - , {<<"topic">>, <<"t/a">>} - , {<<"qos">>, 1} - , {<<"flags">>, #{}} - , {<<"publish_received_at">>, erlang:system_time(millisecond)} - , columns_example_props(pub_props) - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} + [ + {<<"event">>, 'message.dropped'}, + {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())}, + {<<"reason">>, no_subscribers}, + {<<"clientid">>, <<"c_emqx">>}, + {<<"username">>, <<"u_emqx">>}, + {<<"payload">>, <<"{\"msg\": \"hello\"}">>}, + {<<"peerhost">>, <<"192.168.0.10">>}, + {<<"topic">>, <<"t/a">>}, + {<<"qos">>, 1}, + {<<"flags">>, #{}}, + {<<"publish_received_at">>, erlang:system_time(millisecond)}, + columns_example_props(pub_props), + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} ]; columns_with_exam('delivery.dropped') -> - [ {<<"event">>, 'delivery.dropped'} - , {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())} - , {<<"reason">>, queue_full} - , {<<"from_clientid">>, <<"c_emqx_1">>} - , {<<"from_username">>, <<"u_emqx_1">>} - , {<<"clientid">>, <<"c_emqx_2">>} - , {<<"username">>, <<"u_emqx_2">>} - , {<<"payload">>, <<"{\"msg\": \"hello\"}">>} - , {<<"peerhost">>, <<"192.168.0.10">>} - , {<<"topic">>, <<"t/a">>} - , {<<"qos">>, 1} - , {<<"flags">>, #{}} - , columns_example_props(pub_props) - , {<<"publish_received_at">>, erlang:system_time(millisecond)} - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} + [ + {<<"event">>, 'delivery.dropped'}, + {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())}, + {<<"reason">>, queue_full}, + {<<"from_clientid">>, <<"c_emqx_1">>}, + {<<"from_username">>, <<"u_emqx_1">>}, + {<<"clientid">>, <<"c_emqx_2">>}, + {<<"username">>, <<"u_emqx_2">>}, + {<<"payload">>, <<"{\"msg\": \"hello\"}">>}, + {<<"peerhost">>, <<"192.168.0.10">>}, + {<<"topic">>, <<"t/a">>}, + {<<"qos">>, 1}, + {<<"flags">>, #{}}, + columns_example_props(pub_props), + {<<"publish_received_at">>, erlang:system_time(millisecond)}, + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} ]; columns_with_exam('client.connected') -> - [ {<<"event">>, 'client.connected'} - , {<<"clientid">>, <<"c_emqx">>} - , {<<"username">>, <<"u_emqx">>} - , {<<"mountpoint">>, undefined} - , {<<"peername">>, <<"192.168.0.10:56431">>} - , {<<"sockname">>, <<"0.0.0.0:1883">>} - , {<<"proto_name">>, <<"MQTT">>} - , {<<"proto_ver">>, 5} - , {<<"keepalive">>, 60} - , {<<"clean_start">>, true} - , {<<"expiry_interval">>, 3600} - , {<<"is_bridge">>, false} - , columns_example_props(conn_props) - , {<<"connected_at">>, erlang:system_time(millisecond)} - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} + [ + {<<"event">>, 'client.connected'}, + {<<"clientid">>, <<"c_emqx">>}, + {<<"username">>, <<"u_emqx">>}, + {<<"mountpoint">>, undefined}, + {<<"peername">>, <<"192.168.0.10:56431">>}, + {<<"sockname">>, <<"0.0.0.0:1883">>}, + {<<"proto_name">>, <<"MQTT">>}, + {<<"proto_ver">>, 5}, + {<<"keepalive">>, 60}, + {<<"clean_start">>, true}, + {<<"expiry_interval">>, 3600}, + {<<"is_bridge">>, false}, + columns_example_props(conn_props), + {<<"connected_at">>, erlang:system_time(millisecond)}, + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} ]; columns_with_exam('client.disconnected') -> - [ {<<"event">>, 'client.disconnected'} - , {<<"reason">>, normal} - , {<<"clientid">>, <<"c_emqx">>} - , {<<"username">>, <<"u_emqx">>} - , {<<"peername">>, <<"192.168.0.10:56431">>} - , {<<"sockname">>, <<"0.0.0.0:1883">>} - , columns_example_props(disconn_props) - , {<<"disconnected_at">>, erlang:system_time(millisecond)} - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} + [ + {<<"event">>, 'client.disconnected'}, + {<<"reason">>, normal}, + {<<"clientid">>, <<"c_emqx">>}, + {<<"username">>, <<"u_emqx">>}, + {<<"peername">>, <<"192.168.0.10:56431">>}, + {<<"sockname">>, <<"0.0.0.0:1883">>}, + columns_example_props(disconn_props), + {<<"disconnected_at">>, erlang:system_time(millisecond)}, + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} ]; columns_with_exam('client.connack') -> - [ {<<"event">>, 'client.connected'} - , {<<"reason_code">>, success} - , {<<"clientid">>, <<"c_emqx">>} - , {<<"username">>, <<"u_emqx">>} - , {<<"peername">>, <<"192.168.0.10:56431">>} - , {<<"sockname">>, <<"0.0.0.0:1883">>} - , {<<"proto_name">>, <<"MQTT">>} - , {<<"proto_ver">>, 5} - , {<<"keepalive">>, 60} - , {<<"clean_start">>, true} - , {<<"expiry_interval">>, 3600} - , {<<"connected_at">>, erlang:system_time(millisecond)} - , columns_example_props(conn_props) - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} + [ + {<<"event">>, 'client.connected'}, + {<<"reason_code">>, success}, + {<<"clientid">>, <<"c_emqx">>}, + {<<"username">>, <<"u_emqx">>}, + {<<"peername">>, <<"192.168.0.10:56431">>}, + {<<"sockname">>, <<"0.0.0.0:1883">>}, + {<<"proto_name">>, <<"MQTT">>}, + {<<"proto_ver">>, 5}, + {<<"keepalive">>, 60}, + {<<"clean_start">>, true}, + {<<"expiry_interval">>, 3600}, + {<<"connected_at">>, erlang:system_time(millisecond)}, + columns_example_props(conn_props), + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} ]; columns_with_exam('client.check_authz_complete') -> - [ {<<"event">>, 'client.check_authz_complete'} - , {<<"clientid">>, <<"c_emqx">>} - , {<<"username">>, <<"u_emqx">>} - , {<<"peerhost">>, <<"192.168.0.10">>} - , {<<"topic">>, <<"t/a">>} - , {<<"action">>, <<"publish">>} - , {<<"authz_source">>, <<"cache">>} - , {<<"result">>, <<"allow">>} - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} + [ + {<<"event">>, 'client.check_authz_complete'}, + {<<"clientid">>, <<"c_emqx">>}, + {<<"username">>, <<"u_emqx">>}, + {<<"peerhost">>, <<"192.168.0.10">>}, + {<<"topic">>, <<"t/a">>}, + {<<"action">>, <<"publish">>}, + {<<"authz_source">>, <<"cache">>}, + {<<"result">>, <<"allow">>}, + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} ]; columns_with_exam('session.subscribed') -> - [ columns_example_props(sub_props) - ] ++ columns_message_sub_unsub('session.subscribed'); + [columns_example_props(sub_props)] ++ columns_message_sub_unsub('session.subscribed'); columns_with_exam('session.unsubscribed') -> - [ columns_example_props(unsub_props) - ] ++ columns_message_sub_unsub('session.unsubscribed'); + [columns_example_props(unsub_props)] ++ columns_message_sub_unsub('session.unsubscribed'); columns_with_exam(<<"$bridges/mqtt", _/binary>> = EventName) -> - [ {<<"event">>, EventName} - , {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())} - , {<<"payload">>, <<"{\"msg\": \"hello\"}">>} - , {<<"server">>, <<"192.168.0.10:1883">>} - , {<<"topic">>, <<"t/a">>} - , {<<"qos">>, 1} - , {<<"dup">>, false} - , {<<"retain">>, false} - , columns_example_props(pub_props) - %% the time that we receiced the message from remote broker - , {<<"message_received_at">>, erlang:system_time(millisecond)} - %% the time that the rule is triggered - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} + [ + {<<"event">>, EventName}, + {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())}, + {<<"payload">>, <<"{\"msg\": \"hello\"}">>}, + {<<"server">>, <<"192.168.0.10:1883">>}, + {<<"topic">>, <<"t/a">>}, + {<<"qos">>, 1}, + {<<"dup">>, false}, + {<<"retain">>, false}, + columns_example_props(pub_props), + %% the time that we receiced the message from remote broker + {<<"message_received_at">>, erlang:system_time(millisecond)}, + %% the time that the rule is triggered + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} ]. columns_message_sub_unsub(EventName) -> - [ {<<"event">>, EventName} - , {<<"clientid">>, <<"c_emqx">>} - , {<<"username">>, <<"u_emqx">>} - , {<<"peerhost">>, <<"192.168.0.10">>} - , {<<"topic">>, <<"t/a">>} - , {<<"qos">>, 1} - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} + [ + {<<"event">>, EventName}, + {<<"clientid">>, <<"c_emqx">>}, + {<<"username">>, <<"u_emqx">>}, + {<<"peerhost">>, <<"192.168.0.10">>}, + {<<"topic">>, <<"t/a">>}, + {<<"qos">>, 1}, + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} ]. columns_message_ack_delivered(EventName) -> - [ {<<"event">>, EventName} - , {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())} - , {<<"from_clientid">>, <<"c_emqx_1">>} - , {<<"from_username">>, <<"u_emqx_1">>} - , {<<"clientid">>, <<"c_emqx_2">>} - , {<<"username">>, <<"u_emqx_2">>} - , {<<"payload">>, <<"{\"msg\": \"hello\"}">>} - , {<<"peerhost">>, <<"192.168.0.10">>} - , {<<"topic">>, <<"t/a">>} - , {<<"qos">>, 1} - , {<<"flags">>, #{}} - , {<<"publish_received_at">>, erlang:system_time(millisecond)} - , columns_example_props(pub_props) - , {<<"timestamp">>, erlang:system_time(millisecond)} - , {<<"node">>, node()} + [ + {<<"event">>, EventName}, + {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())}, + {<<"from_clientid">>, <<"c_emqx_1">>}, + {<<"from_username">>, <<"u_emqx_1">>}, + {<<"clientid">>, <<"c_emqx_2">>}, + {<<"username">>, <<"u_emqx_2">>}, + {<<"payload">>, <<"{\"msg\": \"hello\"}">>}, + {<<"peerhost">>, <<"192.168.0.10">>}, + {<<"topic">>, <<"t/a">>}, + {<<"qos">>, 1}, + {<<"flags">>, #{}}, + {<<"publish_received_at">>, erlang:system_time(millisecond)}, + columns_example_props(pub_props), + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} ]. columns_example_props(PropType) -> @@ -777,21 +944,23 @@ columns_example_props(PropType) -> {PropType, maps:merge(Props, UserProps)}. columns_example_props_specific(pub_props) -> - #{ 'Payload-Format-Indicator' => 0 - , 'Message-Expiry-Interval' => 30 - }; + #{ + 'Payload-Format-Indicator' => 0, + 'Message-Expiry-Interval' => 30 + }; columns_example_props_specific(puback_props) -> - #{ 'Reason-String' => <<"OK">> - }; + #{'Reason-String' => <<"OK">>}; columns_example_props_specific(conn_props) -> - #{ 'Session-Expiry-Interval' => 7200 - , 'Receive-Maximum' => 32 - }; + #{ + 'Session-Expiry-Interval' => 7200, + 'Receive-Maximum' => 32 + }; columns_example_props_specific(disconn_props) -> - #{ 'Session-Expiry-Interval' => 7200 - , 'Reason-String' => <<"Redirect to another server">> - , 'Server Reference' => <<"192.168.22.129">> - }; + #{ + 'Session-Expiry-Interval' => 7200, + 'Reason-String' => <<"Redirect to another server">>, + 'Server Reference' => <<"192.168.22.129">> + }; columns_example_props_specific(sub_props) -> #{}; columns_example_props_specific(unsub_props) -> @@ -817,19 +986,15 @@ reason({Error, _}) when is_atom(Error) -> Error; reason(_) -> internal_error. ntoa(undefined) -> undefined; -ntoa({IpAddr, Port}) -> - iolist_to_binary([inet:ntoa(IpAddr), ":", integer_to_list(Port)]); -ntoa(IpAddr) -> - iolist_to_binary(inet:ntoa(IpAddr)). +ntoa({IpAddr, Port}) -> iolist_to_binary([inet:ntoa(IpAddr), ":", integer_to_list(Port)]); +ntoa(IpAddr) -> iolist_to_binary(inet:ntoa(IpAddr)). event_name(<<"$events/client_connected", _/binary>>) -> 'client.connected'; event_name(<<"$events/client_disconnected", _/binary>>) -> 'client.disconnected'; event_name(<<"$events/client_connack", _/binary>>) -> 'client.connack'; -event_name(<<"$events/client_check_authz_complete", _/binary>>) -> - 'client.check_authz_complete'; +event_name(<<"$events/client_check_authz_complete", _/binary>>) -> 'client.check_authz_complete'; event_name(<<"$events/session_subscribed", _/binary>>) -> 'session.subscribed'; -event_name(<<"$events/session_unsubscribed", _/binary>>) -> - 'session.unsubscribed'; +event_name(<<"$events/session_unsubscribed", _/binary>>) -> 'session.unsubscribed'; event_name(<<"$events/message_delivered", _/binary>>) -> 'message.delivered'; event_name(<<"$events/message_acked", _/binary>>) -> 'message.acked'; event_name(<<"$events/message_dropped", _/binary>>) -> 'message.dropped'; @@ -840,8 +1005,7 @@ event_name(_) -> 'message.publish'. event_topic('client.connected') -> <<"$events/client_connected">>; event_topic('client.disconnected') -> <<"$events/client_disconnected">>; event_topic('client.connack') -> <<"$events/client_connack">>; -event_topic('client.check_authz_complete') -> - <<"$events/client_check_authz_complete">>; +event_topic('client.check_authz_complete') -> <<"$events/client_check_authz_complete">>; event_topic('session.subscribed') -> <<"$events/session_subscribed">>; event_topic('session.unsubscribed') -> <<"$events/session_unsubscribed">>; event_topic('message.delivered') -> <<"$events/message_delivered">>; @@ -851,10 +1015,12 @@ event_topic('delivery.dropped') -> <<"$events/delivery_dropped">>; event_topic('message.publish') -> <<"$events/message_publish">>; event_topic(<<"$bridges/", _/binary>> = Topic) -> Topic. -printable_maps(undefined) -> #{}; +printable_maps(undefined) -> + #{}; printable_maps(Headers) -> maps:fold( - fun (K, V0, AccIn) when K =:= peerhost; K =:= peername; K =:= sockname -> + fun + (K, V0, AccIn) when K =:= peerhost; K =:= peername; K =:= sockname -> AccIn#{K => ntoa(V0)}; ('User-Property', V0, AccIn) when is_list(V0) -> AccIn#{ @@ -863,15 +1029,22 @@ printable_maps(Headers) -> %% However, this does not allow duplicate property keys. To allow %% duplicate keys, we have to use the 'User-Property-Pairs' field instead. 'User-Property' => maps:from_list(V0), - 'User-Property-Pairs' => [#{ - key => Key, - value => Value - } || {Key, Value} <- V0] + 'User-Property-Pairs' => [ + #{ + key => Key, + value => Value + } + || {Key, Value} <- V0 + ] }; - (K, V0, AccIn) -> AccIn#{K => V0} - end, #{}, Headers). + (K, V0, AccIn) -> + AccIn#{K => V0} + end, + #{}, + Headers + ). ignore_sys_message(#message{flags = Flags}) -> ConfigRootKey = emqx_rule_engine_schema:namespace(), maps:get(sys, Flags, false) andalso - emqx:get_config([ConfigRootKey, ignore_sys_message]). + emqx:get_config([ConfigRootKey, ignore_sys_message]). diff --git a/apps/emqx_rule_engine/src/emqx_rule_funcs.erl b/apps/emqx_rule_engine/src/emqx_rule_funcs.erl index e7f5da52f..1c887327c 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_funcs.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_funcs.erl @@ -21,257 +21,303 @@ -include_lib("emqx/include/logger.hrl"). %% IoT Funcs --export([ msgid/0 - , qos/0 - , flags/0 - , flag/1 - , topic/0 - , topic/1 - , clientid/0 - , clientip/0 - , peerhost/0 - , username/0 - , payload/0 - , payload/1 - , contains_topic/2 - , contains_topic/3 - , contains_topic_match/2 - , contains_topic_match/3 - , null/0 - ]). +-export([ + msgid/0, + qos/0, + flags/0, + flag/1, + topic/0, + topic/1, + clientid/0, + clientip/0, + peerhost/0, + username/0, + payload/0, + payload/1, + contains_topic/2, + contains_topic/3, + contains_topic_match/2, + contains_topic_match/3, + null/0 +]). %% Arithmetic Funcs --export([ '+'/2 - , '-'/2 - , '*'/2 - , '/'/2 - , 'div'/2 - , mod/2 - , eq/2 - ]). +-export([ + '+'/2, + '-'/2, + '*'/2, + '/'/2, + 'div'/2, + mod/2, + eq/2 +]). %% Math Funcs --export([ abs/1 - , acos/1 - , acosh/1 - , asin/1 - , asinh/1 - , atan/1 - , atanh/1 - , ceil/1 - , cos/1 - , cosh/1 - , exp/1 - , floor/1 - , fmod/2 - , log/1 - , log10/1 - , log2/1 - , power/2 - , round/1 - , sin/1 - , sinh/1 - , sqrt/1 - , tan/1 - , tanh/1 - ]). +-export([ + abs/1, + acos/1, + acosh/1, + asin/1, + asinh/1, + atan/1, + atanh/1, + ceil/1, + cos/1, + cosh/1, + exp/1, + floor/1, + fmod/2, + log/1, + log10/1, + log2/1, + power/2, + round/1, + sin/1, + sinh/1, + sqrt/1, + tan/1, + tanh/1 +]). %% Bits Funcs --export([ bitnot/1 - , bitand/2 - , bitor/2 - , bitxor/2 - , bitsl/2 - , bitsr/2 - , bitsize/1 - , subbits/2 - , subbits/3 - , subbits/6 - ]). +-export([ + bitnot/1, + bitand/2, + bitor/2, + bitxor/2, + bitsl/2, + bitsr/2, + bitsize/1, + subbits/2, + subbits/3, + subbits/6 +]). %% Data Type Conversion --export([ str/1 - , str_utf8/1 - , bool/1 - , int/1 - , float/1 - , float/2 - , map/1 - , bin2hexstr/1 - , hexstr2bin/1 - ]). +-export([ + str/1, + str_utf8/1, + bool/1, + int/1, + float/1, + float/2, + map/1, + bin2hexstr/1, + hexstr2bin/1 +]). %% Data Type Validation Funcs --export([ is_null/1 - , is_not_null/1 - , is_str/1 - , is_bool/1 - , is_int/1 - , is_float/1 - , is_num/1 - , is_map/1 - , is_array/1 - ]). +-export([ + is_null/1, + is_not_null/1, + is_str/1, + is_bool/1, + is_int/1, + is_float/1, + is_num/1, + is_map/1, + is_array/1 +]). %% String Funcs --export([ lower/1 - , ltrim/1 - , reverse/1 - , rtrim/1 - , strlen/1 - , substr/2 - , substr/3 - , trim/1 - , upper/1 - , split/2 - , split/3 - , concat/2 - , tokens/2 - , tokens/3 - , sprintf_s/2 - , pad/2 - , pad/3 - , pad/4 - , replace/3 - , replace/4 - , regex_match/2 - , regex_replace/3 - , ascii/1 - , find/2 - , find/3 - ]). +-export([ + lower/1, + ltrim/1, + reverse/1, + rtrim/1, + strlen/1, + substr/2, + substr/3, + trim/1, + upper/1, + split/2, + split/3, + concat/2, + tokens/2, + tokens/3, + sprintf_s/2, + pad/2, + pad/3, + pad/4, + replace/3, + replace/4, + regex_match/2, + regex_replace/3, + ascii/1, + find/2, + find/3 +]). %% Map Funcs --export([ map_new/0 - ]). +-export([map_new/0]). --export([ map_get/2 - , map_get/3 - , map_put/3 - ]). +-export([ + map_get/2, + map_get/3, + map_put/3 +]). %% For backward compatibility --export([ mget/2 - , mget/3 - , mput/3 - ]). +-export([ + mget/2, + mget/3, + mput/3 +]). %% Array Funcs --export([ nth/2 - , length/1 - , sublist/2 - , sublist/3 - , first/1 - , last/1 - , contains/2 - ]). +-export([ + nth/2, + length/1, + sublist/2, + sublist/3, + first/1, + last/1, + contains/2 +]). %% Hash Funcs --export([ md5/1 - , sha/1 - , sha256/1 - ]). +-export([ + md5/1, + sha/1, + sha256/1 +]). %% Data encode and decode --export([ base64_encode/1 - , base64_decode/1 - , json_decode/1 - , json_encode/1 - , term_decode/1 - , term_encode/1 - ]). +-export([ + base64_encode/1, + base64_decode/1, + json_decode/1, + json_encode/1, + term_decode/1, + term_encode/1 +]). %% Date functions --export([ now_rfc3339/0 - , now_rfc3339/1 - , unix_ts_to_rfc3339/1 - , unix_ts_to_rfc3339/2 - , rfc3339_to_unix_ts/1 - , rfc3339_to_unix_ts/2 - , now_timestamp/0 - , now_timestamp/1 - , format_date/3 - , format_date/4 - , date_to_unix_ts/4 - ]). +-export([ + now_rfc3339/0, + now_rfc3339/1, + unix_ts_to_rfc3339/1, + unix_ts_to_rfc3339/2, + rfc3339_to_unix_ts/1, + rfc3339_to_unix_ts/2, + now_timestamp/0, + now_timestamp/1, + format_date/3, + format_date/4, + date_to_unix_ts/4 +]). %% Proc Dict Func - -export([ proc_dict_get/1 - , proc_dict_put/2 - , proc_dict_del/1 - , kv_store_get/1 - , kv_store_get/2 - , kv_store_put/2 - , kv_store_del/1 - ]). +-export([ + proc_dict_get/1, + proc_dict_put/2, + proc_dict_del/1, + kv_store_get/1, + kv_store_get/2, + kv_store_put/2, + kv_store_del/1 +]). -export(['$handle_undefined_function'/2]). --compile({no_auto_import, - [ abs/1 - , ceil/1 - , floor/1 - , round/1 - , map_get/2 - ]}). +-compile( + {no_auto_import, [ + abs/1, + ceil/1, + floor/1, + round/1, + map_get/2 + ]} +). -define(is_var(X), is_binary(X)). %% @doc "msgid()" Func msgid() -> - fun(#{id := MsgId}) -> MsgId; (_) -> undefined end. + fun + (#{id := MsgId}) -> MsgId; + (_) -> undefined + end. %% @doc "qos()" Func qos() -> - fun(#{qos := QoS}) -> QoS; (_) -> undefined end. + fun + (#{qos := QoS}) -> QoS; + (_) -> undefined + end. %% @doc "topic()" Func topic() -> - fun(#{topic := Topic}) -> Topic; (_) -> undefined end. + fun + (#{topic := Topic}) -> Topic; + (_) -> undefined + end. %% @doc "topic(N)" Func topic(I) when is_integer(I) -> - fun(#{topic := Topic}) -> + fun + (#{topic := Topic}) -> lists:nth(I, emqx_topic:tokens(Topic)); - (_) -> undefined + (_) -> + undefined end. %% @doc "flags()" Func flags() -> - fun(#{flags := Flags}) -> Flags; (_) -> #{} end. + fun + (#{flags := Flags}) -> Flags; + (_) -> #{} + end. %% @doc "flags(Name)" Func flag(Name) -> - fun(#{flags := Flags}) -> emqx_rule_maps:nested_get({var,Name}, Flags); (_) -> undefined end. + fun + (#{flags := Flags}) -> emqx_rule_maps:nested_get({var, Name}, Flags); + (_) -> undefined + end. %% @doc "clientid()" Func clientid() -> - fun(#{from := ClientId}) -> ClientId; (_) -> undefined end. + fun + (#{from := ClientId}) -> ClientId; + (_) -> undefined + end. %% @doc "username()" Func username() -> - fun(#{username := Username}) -> Username; (_) -> undefined end. + fun + (#{username := Username}) -> Username; + (_) -> undefined + end. %% @doc "clientip()" Func clientip() -> peerhost(). peerhost() -> - fun(#{peerhost := Addr}) -> Addr; (_) -> undefined end. + fun + (#{peerhost := Addr}) -> Addr; + (_) -> undefined + end. payload() -> - fun(#{payload := Payload}) -> Payload; (_) -> undefined end. + fun + (#{payload := Payload}) -> Payload; + (_) -> undefined + end. payload(Path) -> - fun(#{payload := Payload}) when erlang:is_map(Payload) -> + fun + (#{payload := Payload}) when erlang:is_map(Payload) -> emqx_rule_maps:nested_get(map_path(Path), Payload); - (_) -> undefined + (_) -> + undefined end. %% @doc Check if a topic_filter contains a specific topic %% TopicFilters = [{<<"t/a">>, #{qos => 0}]. --spec(contains_topic(emqx_types:topic_filters(), emqx_types:topic()) - -> true | false). +-spec contains_topic(emqx_types:topic_filters(), emqx_types:topic()) -> + true | false. contains_topic(TopicFilters, Topic) -> case find_topic_filter(Topic, TopicFilters, fun eq/2) of not_found -> false; @@ -283,8 +329,8 @@ contains_topic(TopicFilters, Topic, QoS) -> _ -> false end. --spec(contains_topic_match(emqx_types:topic_filters(), emqx_types:topic()) - -> true | false). +-spec contains_topic_match(emqx_types:topic_filters(), emqx_types:topic()) -> + true | false. contains_topic_match(TopicFilters, Topic) -> case find_topic_filter(Topic, TopicFilters, fun emqx_topic:match/2) of not_found -> false; @@ -298,10 +344,13 @@ contains_topic_match(TopicFilters, Topic, QoS) -> find_topic_filter(Filter, TopicFilters, Func) -> try - [case Func(Topic, Filter) of - true -> throw(Result); - false -> ok - end || Result = #{topic := Topic} <- TopicFilters], + [ + case Func(Topic, Filter) of + true -> throw(Result); + false -> ok + end + || Result = #{topic := Topic} <- TopicFilters + ], not_found catch throw:Result -> Result @@ -317,7 +366,6 @@ null() -> %% plus 2 numbers '+'(X, Y) when is_number(X), is_number(Y) -> X + Y; - %% string concatenation %% this requires one of the arguments is string, the other argument will be converted %% to string automatically (implicit conversion) @@ -355,7 +403,7 @@ acos(N) when is_number(N) -> acosh(N) when is_number(N) -> math:acosh(N). -asin(N) when is_number(N)-> +asin(N) when is_number(N) -> math:asin(N). asinh(N) when is_number(N) -> @@ -364,19 +412,19 @@ asinh(N) when is_number(N) -> atan(N) when is_number(N) -> math:atan(N). -atanh(N) when is_number(N)-> +atanh(N) when is_number(N) -> math:atanh(N). ceil(N) when is_number(N) -> erlang:ceil(N). -cos(N) when is_number(N)-> +cos(N) when is_number(N) -> math:cos(N). cosh(N) when is_number(N) -> math:cosh(N). -exp(N) when is_number(N)-> +exp(N) when is_number(N) -> math:exp(N). floor(N) when is_number(N) -> @@ -391,7 +439,7 @@ log(N) when is_number(N) -> log10(N) when is_number(N) -> math:log10(N). -log2(N) when is_number(N)-> +log2(N) when is_number(N) -> math:log2(N). power(X, Y) when is_number(X), is_number(Y) -> @@ -446,7 +494,9 @@ subbits(Bits, Len) when is_integer(Len), is_bitstring(Bits) -> subbits(Bits, Start, Len) when is_integer(Start), is_integer(Len), is_bitstring(Bits) -> get_subbits(Bits, Start, Len, <<"integer">>, <<"unsigned">>, <<"big">>). -subbits(Bits, Start, Len, Type, Signedness, Endianness) when is_integer(Start), is_integer(Len), is_bitstring(Bits) -> +subbits(Bits, Start, Len, Type, Signedness, Endianness) when + is_integer(Start), is_integer(Len), is_bitstring(Bits) +-> get_subbits(Bits, Start, Len, Type, Signedness, Endianness). get_subbits(Bits, Start, Len, Type, Signedness, Endianness) -> @@ -455,7 +505,8 @@ get_subbits(Bits, Start, Len, Type, Signedness, Endianness) -> <<_:Begin, Rem/bits>> when Rem =/= <<>> -> Sz = bit_size(Rem), do_get_subbits(Rem, Sz, Len, Type, Signedness, Endianness); - _ -> undefined + _ -> + undefined end. -define(match_bits(Bits0, Pattern, ElesePattern), @@ -464,46 +515,80 @@ get_subbits(Bits, Start, Len, Type, Signedness, Endianness) -> SubBits; ElesePattern -> SubBits - end). + end +). do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"unsigned">>, <<"big">>) -> - ?match_bits(Bits, <>, - <>); + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"float">>, <<"unsigned">>, <<"big">>) -> - ?match_bits(Bits, <>, - <>); + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"unsigned">>, <<"big">>) -> - ?match_bits(Bits, <>, - <>); - + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"signed">>, <<"big">>) -> - ?match_bits(Bits, <>, - <>); + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"float">>, <<"signed">>, <<"big">>) -> - ?match_bits(Bits, <>, - <>); + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"signed">>, <<"big">>) -> - ?match_bits(Bits, <>, - <>); - + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"unsigned">>, <<"little">>) -> - ?match_bits(Bits, <>, - <>); + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"float">>, <<"unsigned">>, <<"little">>) -> - ?match_bits(Bits, <>, - <>); + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"unsigned">>, <<"little">>) -> - ?match_bits(Bits, <>, - <>); - + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"integer">>, <<"signed">>, <<"little">>) -> - ?match_bits(Bits, <>, - <>); + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"float">>, <<"signed">>, <<"little">>) -> - ?match_bits(Bits, <>, - <>); + ?match_bits( + Bits, + <>, + <> + ); do_get_subbits(Bits, Sz, Len, <<"bits">>, <<"signed">>, <<"little">>) -> - ?match_bits(Bits, <>, - <>). + ?match_bits( + Bits, + <>, + <> + ). %%------------------------------------------------------------------------------ %% Data Type Conversion Funcs @@ -590,9 +675,11 @@ strlen(S) when is_binary(S) -> substr(S, Start) when is_binary(S), is_integer(Start) -> string:slice(S, Start). -substr(S, Start, Length) when is_binary(S), - is_integer(Start), - is_integer(Length) -> +substr(S, Start, Length) when + is_binary(S), + is_integer(Start), + is_integer(Length) +-> string:slice(S, Start, Length). trim(S) when is_binary(S) -> @@ -601,26 +688,28 @@ trim(S) when is_binary(S) -> upper(S) when is_binary(S) -> string:uppercase(S). -split(S, P) when is_binary(S),is_binary(P) -> +split(S, P) when is_binary(S), is_binary(P) -> [R || R <- string:split(S, P, all), R =/= <<>> andalso R =/= ""]. split(S, P, <<"notrim">>) -> string:split(S, P, all); - split(S, P, <<"leading_notrim">>) -> string:split(S, P, leading); -split(S, P, <<"leading">>) when is_binary(S),is_binary(P) -> +split(S, P, <<"leading">>) when is_binary(S), is_binary(P) -> [R || R <- string:split(S, P, leading), R =/= <<>> andalso R =/= ""]; split(S, P, <<"trailing_notrim">>) -> string:split(S, P, trailing); -split(S, P, <<"trailing">>) when is_binary(S),is_binary(P) -> +split(S, P, <<"trailing">>) when is_binary(S), is_binary(P) -> [R || R <- string:split(S, P, trailing), R =/= <<>> andalso R =/= ""]. tokens(S, Separators) -> [list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators))]. tokens(S, Separators, <<"nocrlf">>) -> - [list_to_binary(R) || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators) ++ [$\r,$\n,[$\r,$\n]])]. + [ + list_to_binary(R) + || R <- string:lexemes(binary_to_list(S), binary_to_list(Separators) ++ [$\r, $\n, [$\r, $\n]]) + ]. %% implicit convert args to strings, and then do concatenation concat(S1, S2) -> @@ -634,21 +723,17 @@ pad(S, Len) when is_binary(S), is_integer(Len) -> pad(S, Len, <<"trailing">>) when is_binary(S), is_integer(Len) -> iolist_to_binary(string:pad(S, Len, trailing)); - pad(S, Len, <<"both">>) when is_binary(S), is_integer(Len) -> iolist_to_binary(string:pad(S, Len, both)); - pad(S, Len, <<"leading">>) when is_binary(S), is_integer(Len) -> iolist_to_binary(string:pad(S, Len, leading)). pad(S, Len, <<"trailing">>, Char) when is_binary(S), is_integer(Len), is_binary(Char) -> Chars = unicode:characters_to_list(Char, utf8), iolist_to_binary(string:pad(S, Len, trailing, Chars)); - pad(S, Len, <<"both">>, Char) when is_binary(S), is_integer(Len), is_binary(Char) -> Chars = unicode:characters_to_list(Char, utf8), iolist_to_binary(string:pad(S, Len, both, Chars)); - pad(S, Len, <<"leading">>, Char) when is_binary(S), is_integer(Len), is_binary(Char) -> Chars = unicode:characters_to_list(Char, utf8), iolist_to_binary(string:pad(S, Len, leading, Chars)). @@ -658,24 +743,24 @@ replace(SrcStr, P, RepStr) when is_binary(SrcStr), is_binary(P), is_binary(RepSt replace(SrcStr, P, RepStr, <<"all">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> iolist_to_binary(string:replace(SrcStr, P, RepStr, all)); - -replace(SrcStr, P, RepStr, <<"trailing">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> +replace(SrcStr, P, RepStr, <<"trailing">>) when + is_binary(SrcStr), is_binary(P), is_binary(RepStr) +-> iolist_to_binary(string:replace(SrcStr, P, RepStr, trailing)); - replace(SrcStr, P, RepStr, <<"leading">>) when is_binary(SrcStr), is_binary(P), is_binary(RepStr) -> iolist_to_binary(string:replace(SrcStr, P, RepStr, leading)). regex_match(Str, RE) -> - case re:run(Str, RE, [global,{capture,none}]) of + case re:run(Str, RE, [global, {capture, none}]) of match -> true; nomatch -> false end. regex_replace(SrcStr, RE, RepStr) -> - re:replace(SrcStr, RE, RepStr, [global, {return,binary}]). + re:replace(SrcStr, RE, RepStr, [global, {return, binary}]). ascii(Char) when is_binary(Char) -> - [FirstC| _] = binary_to_list(Char), + [FirstC | _] = binary_to_list(Char), FirstC. find(S, P) when is_binary(S), is_binary(P) -> @@ -683,7 +768,6 @@ find(S, P) when is_binary(S), is_binary(P) -> find(S, P, <<"trailing">>) when is_binary(S), is_binary(P) -> find_s(S, P, trailing); - find(S, P, <<"leading">>) when is_binary(S), is_binary(P) -> find_s(S, P, leading). @@ -735,7 +819,8 @@ mget(Key, Map) -> mget(Key, Map, Default) -> case maps:find(Key, Map) of - {ok, Val} -> Val; + {ok, Val} -> + Val; error when is_atom(Key) -> %% the map may have an equivalent binary-form key BinKey = emqx_plugin_libs_rule:bin(Key), @@ -744,14 +829,16 @@ mget(Key, Map, Default) -> error -> Default end; error when is_binary(Key) -> - try %% the map may have an equivalent atom-form key + %% the map may have an equivalent atom-form key + try AtomKey = list_to_existing_atom(binary_to_list(Key)), case maps:find(AtomKey, Map) of {ok, Val} -> Val; error -> Default end - catch error:badarg -> - Default + catch + error:badarg -> + Default end; error -> Default @@ -759,7 +846,8 @@ mget(Key, Map, Default) -> mput(Key, Val, Map) -> case maps:find(Key, Map) of - {ok, _} -> maps:put(Key, Val, Map); + {ok, _} -> + maps:put(Key, Val, Map); error when is_atom(Key) -> %% the map may have an equivalent binary-form key BinKey = emqx_plugin_libs_rule:bin(Key), @@ -768,14 +856,16 @@ mput(Key, Val, Map) -> error -> maps:put(Key, Val, Map) end; error when is_binary(Key) -> - try %% the map may have an equivalent atom-form key + %% the map may have an equivalent atom-form key + try AtomKey = list_to_existing_atom(binary_to_list(Key)), case maps:find(AtomKey, Map) of {ok, _} -> maps:put(AtomKey, Val, Map); error -> maps:put(Key, Val, Map) end - catch error:badarg -> - maps:put(Key, Val, Map) + catch + error:badarg -> + maps:put(Key, Val, Map) end; error -> maps:put(Key, Val, Map) @@ -863,14 +953,18 @@ unix_ts_to_rfc3339(Epoch) -> unix_ts_to_rfc3339(Epoch, Unit) when is_integer(Epoch) -> emqx_plugin_libs_rule:bin( calendar:system_time_to_rfc3339( - Epoch, [{unit, time_unit(Unit)}])). + Epoch, [{unit, time_unit(Unit)}] + ) + ). rfc3339_to_unix_ts(DateTime) -> rfc3339_to_unix_ts(DateTime, <<"second">>). rfc3339_to_unix_ts(DateTime, Unit) when is_binary(DateTime) -> - calendar:rfc3339_to_system_time(binary_to_list(DateTime), - [{unit, time_unit(Unit)}]). + calendar:rfc3339_to_system_time( + binary_to_list(DateTime), + [{unit, time_unit(Unit)}] + ). now_timestamp() -> erlang:system_time(second). @@ -885,22 +979,30 @@ time_unit(<<"nanosecond">>) -> nanosecond. format_date(TimeUnit, Offset, FormatString) -> emqx_plugin_libs_rule:bin( - emqx_rule_date:date(time_unit(TimeUnit), - emqx_plugin_libs_rule:str(Offset), - emqx_plugin_libs_rule:str(FormatString))). + emqx_rule_date:date( + time_unit(TimeUnit), + emqx_plugin_libs_rule:str(Offset), + emqx_plugin_libs_rule:str(FormatString) + ) + ). format_date(TimeUnit, Offset, FormatString, TimeEpoch) -> emqx_plugin_libs_rule:bin( - emqx_rule_date:date(time_unit(TimeUnit), - emqx_plugin_libs_rule:str(Offset), - emqx_plugin_libs_rule:str(FormatString), - TimeEpoch)). + emqx_rule_date:date( + time_unit(TimeUnit), + emqx_plugin_libs_rule:str(Offset), + emqx_plugin_libs_rule:str(FormatString), + TimeEpoch + ) + ). date_to_unix_ts(TimeUnit, Offset, FormatString, InputString) -> - emqx_rule_date:parse_date(time_unit(TimeUnit), - emqx_plugin_libs_rule:str(Offset), - emqx_plugin_libs_rule:str(FormatString), - emqx_plugin_libs_rule:str(InputString)). + emqx_rule_date:parse_date( + time_unit(TimeUnit), + emqx_plugin_libs_rule:str(Offset), + emqx_plugin_libs_rule:str(FormatString), + emqx_plugin_libs_rule:str(InputString) + ). %% @doc This is for sql funcs that should be handled in the specific modules. %% Here the emqx_rule_funcs module acts as a proxy, forwarding @@ -922,9 +1024,8 @@ date_to_unix_ts(TimeUnit, Offset, FormatString, InputString) -> % '$handle_undefined_function'(Fun, Args) -> % error({sql_function_not_supported, function_literal(Fun, Args)}). -'$handle_undefined_function'(sprintf, [Format|Args]) -> +'$handle_undefined_function'(sprintf, [Format | Args]) -> erlang:apply(fun sprintf_s/2, [Format, Args]); - '$handle_undefined_function'(Fun, Args) -> error({sql_function_not_supported, function_literal(Fun, Args)}). @@ -935,8 +1036,12 @@ function_literal(Fun, []) when is_atom(Fun) -> atom_to_list(Fun) ++ "()"; function_literal(Fun, [FArg | Args]) when is_atom(Fun), is_list(Args) -> WithFirstArg = io_lib:format("~ts(~0p", [atom_to_list(Fun), FArg]), - lists:foldl(fun(Arg, Literal) -> - io_lib:format("~ts, ~0p", [Literal, Arg]) - end, WithFirstArg, Args) ++ ")"; + lists:foldl( + fun(Arg, Literal) -> + io_lib:format("~ts, ~0p", [Literal, Arg]) + end, + WithFirstArg, + Args + ) ++ ")"; function_literal(Fun, Args) -> {invalid_func, {Fun, Args}}. diff --git a/apps/emqx_rule_engine/src/emqx_rule_maps.erl b/apps/emqx_rule_engine/src/emqx_rule_maps.erl index 31ae2eee2..5d887b68b 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_maps.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_maps.erl @@ -16,14 +16,15 @@ -module(emqx_rule_maps). --export([ nested_get/2 - , nested_get/3 - , nested_put/3 - , range_gen/2 - , range_get/3 - , atom_key_map/1 - , unsafe_atom_key_map/1 - ]). +-export([ + nested_get/2, + nested_get/3, + nested_put/3, + range_gen/2, + range_get/3, + atom_key_map/1, + unsafe_atom_key_map/1 +]). nested_get(Key, Data) -> nested_get(Key, Data, undefined). @@ -41,8 +42,10 @@ do_nested_get([Key | More], Data, OrgData, Default) -> do_nested_get([], Val, _OrgData, _Default) -> Val. -nested_put(Key, Val, Data) when not is_map(Data), - not is_list(Data) -> +nested_put(Key, Val, Data) when + not is_map(Data), + not is_list(Data) +-> nested_put(Key, Val, #{}); nested_put({var, Key}, Val, Map) -> general_map_put({key, Key}, Val, Map, Map); @@ -56,19 +59,27 @@ do_nested_put([], Val, _Map, _OrgData) -> Val. general_map_get(Key, Map, OrgData, Default) -> - general_find(Key, Map, OrgData, + general_find( + Key, + Map, + OrgData, fun ({equivalent, {_EquiKey, Val}}) -> Val; ({found, {_Key, Val}}) -> Val; (not_found) -> Default - end). + end + ). general_map_put(Key, Val, Map, OrgData) -> - general_find(Key, Map, OrgData, + general_find( + Key, + Map, + OrgData, fun ({equivalent, {EquiKey, _Val}}) -> do_put(EquiKey, Val, Map, OrgData); (_) -> do_put(Key, Val, Map, OrgData) - end). + end + ). general_find(KeyOrIndex, Data, OrgData, Handler) when is_binary(Data) -> try emqx_json:decode(Data, [return_maps]) of @@ -78,7 +89,8 @@ general_find(KeyOrIndex, Data, OrgData, Handler) when is_binary(Data) -> end; general_find({key, Key}, Map, _OrgData, Handler) when is_map(Map) -> case maps:find(Key, Map) of - {ok, Val} -> Handler({found, {{key, Key}, Val}}); + {ok, Val} -> + Handler({found, {{key, Key}, Val}}); error when is_atom(Key) -> %% the map may have an equivalent binary-form key BinKey = emqx_plugin_libs_rule:bin(Key), @@ -87,14 +99,16 @@ general_find({key, Key}, Map, _OrgData, Handler) when is_map(Map) -> error -> Handler(not_found) end; error when is_binary(Key) -> - try %% the map may have an equivalent atom-form key + %% the map may have an equivalent atom-form key + try AtomKey = list_to_existing_atom(binary_to_list(Key)), case maps:find(AtomKey, Map) of {ok, Val} -> Handler({equivalent, {{key, AtomKey}, Val}}); error -> Handler(not_found) end - catch error:badarg -> - Handler(not_found) + catch + error:badarg -> + Handler(not_found) end; error -> Handler(not_found) @@ -122,18 +136,21 @@ do_put({index, Index0}, Val, List, OrgData) -> setnth(_, Data, Val) when not is_list(Data) -> setnth(head, [], Val); setnth(head, List, Val) when is_list(List) -> [Val | List]; -setnth(head, _List, Val) -> [Val]; +setnth(head, _List, Val) -> + [Val]; setnth(tail, List, Val) when is_list(List) -> List ++ [Val]; -setnth(tail, _List, Val) -> [Val]; +setnth(tail, _List, Val) -> + [Val]; setnth(I, List, _Val) when not is_integer(I) -> List; -setnth(0, List, _Val) -> List; +setnth(0, List, _Val) -> + List; setnth(I, List, Val) when is_integer(I), I > 0 -> do_setnth(I, List, Val); setnth(I, List, Val) when is_integer(I), I < 0 -> lists:reverse(do_setnth(-I, lists:reverse(List), Val)). do_setnth(1, [_ | Rest], Val) -> [Val | Rest]; -do_setnth(I, [E | Rest], Val) -> [E | setnth(I-1, Rest, Val)]; +do_setnth(I, [E | Rest], Val) -> [E | setnth(I - 1, Rest, Val)]; do_setnth(_, [], _Val) -> []. getnth(0, _) -> @@ -144,8 +161,10 @@ getnth(I, L) when I < 0 -> do_getnth(-I, lists:reverse(L)). do_getnth(I, L) -> - try {ok, lists:nth(I, L)} - catch error:_ -> {error, not_found} + try + {ok, lists:nth(I, L)} + catch + error:_ -> {error, not_found} end. handle_getnth(Index, List, IndexPattern, Handler) -> @@ -170,7 +189,8 @@ do_range_get(Begin, End, List) -> EndIndex = index(End, TotalLen), lists:sublist(List, BeginIndex, (EndIndex - BeginIndex + 1)). -index(0, _) -> error({invalid_index, 0}); +index(0, _) -> + error({invalid_index, 0}); index(Index, _) when Index > 0 -> Index; index(Index, Len) when Index < 0 -> Len + Index + 1. @@ -180,26 +200,36 @@ index(Index, Len) when Index < 0 -> %%%------------------------------------------------------------------- atom_key_map(BinKeyMap) when is_map(BinKeyMap) -> maps:fold( - fun(K, V, Acc) when is_binary(K) -> - Acc#{binary_to_existing_atom(K, utf8) => atom_key_map(V)}; - (K, V, Acc) when is_list(K) -> - Acc#{list_to_existing_atom(K) => atom_key_map(V)}; - (K, V, Acc) when is_atom(K) -> - Acc#{K => atom_key_map(V)} - end, #{}, BinKeyMap); + fun + (K, V, Acc) when is_binary(K) -> + Acc#{binary_to_existing_atom(K, utf8) => atom_key_map(V)}; + (K, V, Acc) when is_list(K) -> + Acc#{list_to_existing_atom(K) => atom_key_map(V)}; + (K, V, Acc) when is_atom(K) -> + Acc#{K => atom_key_map(V)} + end, + #{}, + BinKeyMap + ); atom_key_map(ListV) when is_list(ListV) -> [atom_key_map(V) || V <- ListV]; -atom_key_map(Val) -> Val. +atom_key_map(Val) -> + Val. unsafe_atom_key_map(BinKeyMap) when is_map(BinKeyMap) -> maps:fold( - fun(K, V, Acc) when is_binary(K) -> - Acc#{binary_to_atom(K, utf8) => unsafe_atom_key_map(V)}; - (K, V, Acc) when is_list(K) -> - Acc#{list_to_atom(K) => unsafe_atom_key_map(V)}; - (K, V, Acc) when is_atom(K) -> - Acc#{K => unsafe_atom_key_map(V)} - end, #{}, BinKeyMap); + fun + (K, V, Acc) when is_binary(K) -> + Acc#{binary_to_atom(K, utf8) => unsafe_atom_key_map(V)}; + (K, V, Acc) when is_list(K) -> + Acc#{list_to_atom(K) => unsafe_atom_key_map(V)}; + (K, V, Acc) when is_atom(K) -> + Acc#{K => unsafe_atom_key_map(V)} + end, + #{}, + BinKeyMap + ); unsafe_atom_key_map(ListV) when is_list(ListV) -> [unsafe_atom_key_map(V) || V <- ListV]; -unsafe_atom_key_map(Val) -> Val. +unsafe_atom_key_map(Val) -> + Val. diff --git a/apps/emqx_rule_engine/src/emqx_rule_outputs.erl b/apps/emqx_rule_engine/src/emqx_rule_outputs.erl index 6a858a73b..a0d06c978 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_outputs.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_outputs.erl @@ -22,20 +22,18 @@ -include_lib("emqx/include/emqx.hrl"). %% APIs --export([ parse_output/1 - ]). +-export([parse_output/1]). %% callbacks of emqx_rule_output --export([ pre_process_output_args/2 - ]). +-export([pre_process_output_args/2]). %% output functions --export([ console/3 - , republish/3 - ]). +-export([ + console/3, + republish/3 +]). --optional_callbacks([ pre_process_output_args/2 - ]). +-optional_callbacks([pre_process_output_args/2]). -callback pre_process_output_args(FuncName :: atom(), output_fun_args()) -> output_fun_args(). @@ -44,20 +42,32 @@ %%-------------------------------------------------------------------- parse_output(#{function := OutputFunc} = Output) -> {Mod, Func} = parse_output_func(OutputFunc), - #{mod => Mod, func => Func, - args => pre_process_args(Mod, Func, maps:get(args, Output, #{}))}. + #{ + mod => Mod, + func => Func, + args => pre_process_args(Mod, Func, maps:get(args, Output, #{})) + }. %%-------------------------------------------------------------------- %% callbacks of emqx_rule_output %%-------------------------------------------------------------------- -pre_process_output_args(republish, #{topic := Topic, qos := QoS, retain := Retain, - payload := Payload} = Args) -> - Args#{preprocessed_tmpl => #{ +pre_process_output_args( + republish, + #{ + topic := Topic, + qos := QoS, + retain := Retain, + payload := Payload + } = Args +) -> + Args#{ + preprocessed_tmpl => #{ topic => emqx_plugin_libs_rule:preproc_tmpl(Topic), qos => preproc_vars(QoS), retain => preproc_vars(Retain), payload => emqx_plugin_libs_rule:preproc_tmpl(Payload) - }}; + } + }; pre_process_output_args(_, Args) -> Args. @@ -66,35 +76,55 @@ pre_process_output_args(_, Args) -> %%-------------------------------------------------------------------- -spec console(map(), map(), map()) -> any(). console(Selected, #{metadata := #{rule_id := RuleId}} = Envs, _Args) -> - ?ULOG("[rule output] ~ts~n" - "\tOutput Data: ~p~n" - "\tEnvs: ~p~n", [RuleId, Selected, Envs]). + ?ULOG( + "[rule output] ~ts~n" + "\tOutput Data: ~p~n" + "\tEnvs: ~p~n", + [RuleId, Selected, Envs] + ). -republish(_Selected, #{topic := Topic, headers := #{republish_by := RuleId}, - metadata := #{rule_id := RuleId}}, _Args) -> +republish( + _Selected, + #{ + topic := Topic, + headers := #{republish_by := RuleId}, + metadata := #{rule_id := RuleId} + }, + _Args +) -> ?SLOG(error, #{msg => "recursive_republish_detected", topic => Topic}); - %% republish a PUBLISH message -republish(Selected, #{flags := Flags, metadata := #{rule_id := RuleId}}, - #{preprocessed_tmpl := #{ +republish( + Selected, + #{flags := Flags, metadata := #{rule_id := RuleId}}, + #{ + preprocessed_tmpl := #{ qos := QoSTks, retain := RetainTks, topic := TopicTks, - payload := PayloadTks}}) -> + payload := PayloadTks + } + } +) -> Topic = emqx_plugin_libs_rule:proc_tmpl(TopicTks, Selected), Payload = format_msg(PayloadTks, Selected), QoS = replace_simple_var(QoSTks, Selected, 0), Retain = replace_simple_var(RetainTks, Selected, false), ?TRACE("RULE", "republish_message", #{topic => Topic, payload => Payload}), safe_publish(RuleId, Topic, QoS, Flags#{retain => Retain}, Payload); - %% in case this is a "$events/" event -republish(Selected, #{metadata := #{rule_id := RuleId}}, - #{preprocessed_tmpl := #{ - qos := QoSTks, - retain := RetainTks, - topic := TopicTks, - payload := PayloadTks}}) -> +republish( + Selected, + #{metadata := #{rule_id := RuleId}}, + #{ + preprocessed_tmpl := #{ + qos := QoSTks, + retain := RetainTks, + topic := TopicTks, + payload := PayloadTks + } + } +) -> Topic = emqx_plugin_libs_rule:proc_tmpl(TopicTks, Selected), Payload = format_msg(PayloadTks, Selected), QoS = replace_simple_var(QoSTks, Selected, 0), @@ -114,8 +144,10 @@ get_output_mod_func(OutputFunc) when is_atom(OutputFunc) -> {emqx_rule_outputs, OutputFunc}; get_output_mod_func(OutputFunc) when is_binary(OutputFunc) -> ToAtom = fun(Bin) -> - try binary_to_existing_atom(Bin) of Atom -> Atom - catch error:badarg -> error({unknown_output_function, OutputFunc}) + try binary_to_existing_atom(Bin) of + Atom -> Atom + catch + error:badarg -> error({unknown_output_function, OutputFunc}) end end, case string:split(OutputFunc, ":", all) of @@ -158,7 +190,8 @@ preproc_vars(Data) -> replace_simple_var(Tokens, Data, Default) when is_list(Tokens) -> [Var] = emqx_plugin_libs_rule:proc_tmpl(Tokens, Data, #{return => rawlist}), case Var of - undefined -> Default; %% cannot find the variable from Data + %% cannot find the variable from Data + undefined -> Default; _ -> Var end; replace_simple_var(Val, _Data, _Default) -> diff --git a/apps/emqx_rule_engine/src/emqx_rule_runtime.erl b/apps/emqx_rule_engine/src/emqx_rule_runtime.erl index 8211426ab..81cabd40b 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_runtime.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_runtime.erl @@ -20,32 +20,37 @@ -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). --export([ apply_rule/2 - , apply_rules/2 - , clear_rule_payload/0 - ]). +-export([ + apply_rule/2, + apply_rules/2, + clear_rule_payload/0 +]). --import(emqx_rule_maps, - [ nested_get/2 - , range_gen/2 - , range_get/3 - ]). +-import( + emqx_rule_maps, + [ + nested_get/2, + range_gen/2, + range_get/3 + ] +). --compile({no_auto_import,[alias/1]}). +-compile({no_auto_import, [alias/1]}). -type input() :: map(). -type alias() :: atom(). -type collection() :: {alias(), [term()]}. -define(ephemeral_alias(TYPE, NAME), - iolist_to_binary(io_lib:format("_v_~ts_~p_~p", [TYPE, NAME, erlang:system_time()]))). + iolist_to_binary(io_lib:format("_v_~ts_~p_~p", [TYPE, NAME, erlang:system_time()])) +). -define(ActionMaxRetry, 3). %%------------------------------------------------------------------------------ %% Apply rules %%------------------------------------------------------------------------------ --spec(apply_rules(list(rule()), input()) -> ok). +-spec apply_rules(list(rule()), input()) -> ok. apply_rules([], _Input) -> ok; apply_rules([#{enable := false} | More], Input) -> @@ -61,54 +66,77 @@ apply_rule_discard_result(Rule, Input) -> apply_rule(Rule = #{id := RuleID}, Input) -> ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleID, 'sql.matched'), clear_rule_payload(), - try do_apply_rule(Rule, add_metadata(Input, #{rule_id => RuleID})) + try + do_apply_rule(Rule, add_metadata(Input, #{rule_id => RuleID})) catch %% ignore the errors if select or match failed _:Reason = {select_and_transform_error, Error} -> ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleID, 'sql.failed.exception'), - ?SLOG(warning, #{msg => "SELECT_clause_exception", - rule_id => RuleID, reason => Error}), + ?SLOG(warning, #{ + msg => "SELECT_clause_exception", + rule_id => RuleID, + reason => Error + }), {error, Reason}; _:Reason = {match_conditions_error, Error} -> ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleID, 'sql.failed.exception'), - ?SLOG(warning, #{msg => "WHERE_clause_exception", - rule_id => RuleID, reason => Error}), + ?SLOG(warning, #{ + msg => "WHERE_clause_exception", + rule_id => RuleID, + reason => Error + }), {error, Reason}; _:Reason = {select_and_collect_error, Error} -> ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleID, 'sql.failed.exception'), - ?SLOG(warning, #{msg => "FOREACH_clause_exception", - rule_id => RuleID, reason => Error}), + ?SLOG(warning, #{ + msg => "FOREACH_clause_exception", + rule_id => RuleID, + reason => Error + }), {error, Reason}; _:Reason = {match_incase_error, Error} -> ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleID, 'sql.failed.exception'), - ?SLOG(warning, #{msg => "INCASE_clause_exception", - rule_id => RuleID, reason => Error}), + ?SLOG(warning, #{ + msg => "INCASE_clause_exception", + rule_id => RuleID, + reason => Error + }), {error, Reason}; Class:Error:StkTrace -> ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleID, 'sql.failed.exception'), - ?SLOG(error, #{msg => "apply_rule_failed", - rule_id => RuleID, - exception => Class, - reason => Error, - stacktrace => StkTrace - }), + ?SLOG(error, #{ + msg => "apply_rule_failed", + rule_id => RuleID, + exception => Class, + reason => Error, + stacktrace => StkTrace + }), {error, {Error, StkTrace}} end. -do_apply_rule(#{ - id := RuleId, - is_foreach := true, - fields := Fields, - doeach := DoEach, - incase := InCase, - conditions := Conditions, - outputs := Outputs - }, Input) -> - {Selected, Collection} = ?RAISE(select_and_collect(Fields, Input), - {select_and_collect_error, {_EXCLASS_,_EXCPTION_,_ST_}}), +do_apply_rule( + #{ + id := RuleId, + is_foreach := true, + fields := Fields, + doeach := DoEach, + incase := InCase, + conditions := Conditions, + outputs := Outputs + }, + Input +) -> + {Selected, Collection} = ?RAISE( + select_and_collect(Fields, Input), + {select_and_collect_error, {_EXCLASS_, _EXCPTION_, _ST_}} + ), ColumnsAndSelected = maps:merge(Input, Selected), - case ?RAISE(match_conditions(Conditions, ColumnsAndSelected), - {match_conditions_error, {_EXCLASS_,_EXCPTION_,_ST_}}) of + case + ?RAISE( + match_conditions(Conditions, ColumnsAndSelected), + {match_conditions_error, {_EXCLASS_, _EXCPTION_, _ST_}} + ) + of true -> Collection2 = filter_collection(Input, InCase, DoEach, Collection), case Collection2 of @@ -122,17 +150,26 @@ do_apply_rule(#{ ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleId, 'sql.failed.no_result'), {error, nomatch} end; - -do_apply_rule(#{id := RuleId, - is_foreach := false, - fields := Fields, - conditions := Conditions, - outputs := Outputs - }, Input) -> - Selected = ?RAISE(select_and_transform(Fields, Input), - {select_and_transform_error, {_EXCLASS_,_EXCPTION_,_ST_}}), - case ?RAISE(match_conditions(Conditions, maps:merge(Input, Selected)), - {match_conditions_error, {_EXCLASS_,_EXCPTION_,_ST_}}) of +do_apply_rule( + #{ + id := RuleId, + is_foreach := false, + fields := Fields, + conditions := Conditions, + outputs := Outputs + }, + Input +) -> + Selected = ?RAISE( + select_and_transform(Fields, Input), + {select_and_transform_error, {_EXCLASS_, _EXCPTION_, _ST_}} + ), + case + ?RAISE( + match_conditions(Conditions, maps:merge(Input, Selected)), + {match_conditions_error, {_EXCLASS_, _EXCPTION_, _ST_}} + ) + of true -> ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleId, 'sql.passed'), {ok, handle_output_list(RuleId, Outputs, Selected, Input)}; @@ -154,15 +191,19 @@ select_and_transform(['*' | More], Input, Output) -> select_and_transform(More, Input, maps:merge(Output, Input)); select_and_transform([{as, Field, Alias} | More], Input, Output) -> Val = eval(Field, Input), - select_and_transform(More, + select_and_transform( + More, nested_put(Alias, Val, Input), - nested_put(Alias, Val, Output)); + nested_put(Alias, Val, Output) + ); select_and_transform([Field | More], Input, Output) -> Val = eval(Field, Input), Key = alias(Field), - select_and_transform(More, + select_and_transform( + More, nested_put(Key, Val, Input), - nested_put(Key, Val, Output)). + nested_put(Key, Val, Output) + ). %% FOREACH Clause -spec select_and_collect(list(), input()) -> {input(), collection()}. @@ -174,9 +215,11 @@ select_and_collect([{as, Field, {_, A} = Alias}], Input, {Output, _}) -> {nested_put(Alias, Val, Output), {A, ensure_list(Val)}}; select_and_collect([{as, Field, Alias} | More], Input, {Output, LastKV}) -> Val = eval(Field, Input), - select_and_collect(More, + select_and_collect( + More, nested_put(Alias, Val, Input), - {nested_put(Alias, Val, Output), LastKV}); + {nested_put(Alias, Val, Output), LastKV} + ); select_and_collect([Field], Input, {Output, _}) -> Val = eval(Field, Input), Key = alias(Field), @@ -184,24 +227,36 @@ select_and_collect([Field], Input, {Output, _}) -> select_and_collect([Field | More], Input, {Output, LastKV}) -> Val = eval(Field, Input), Key = alias(Field), - select_and_collect(More, + select_and_collect( + More, nested_put(Key, Val, Input), - {nested_put(Key, Val, Output), LastKV}). + {nested_put(Key, Val, Output), LastKV} + ). %% Filter each item got from FOREACH filter_collection(Input, InCase, DoEach, {CollKey, CollVal}) -> lists:filtermap( fun(Item) -> InputAndItem = maps:merge(Input, #{CollKey => Item}), - case ?RAISE(match_conditions(InCase, InputAndItem), - {match_incase_error, {_EXCLASS_,_EXCPTION_,_ST_}}) of + case + ?RAISE( + match_conditions(InCase, InputAndItem), + {match_incase_error, {_EXCLASS_, _EXCPTION_, _ST_}} + ) + of true when DoEach == [] -> {true, InputAndItem}; true -> - {true, ?RAISE(select_and_transform(DoEach, InputAndItem), - {doeach_error, {_EXCLASS_,_EXCPTION_,_ST_}})}; - false -> false + {true, + ?RAISE( + select_and_transform(DoEach, InputAndItem), + {doeach_error, {_EXCLASS_, _EXCPTION_, _ST_}} + )}; + false -> + false end - end, CollVal). + end, + CollVal + ). %% Conditional Clauses such as WHERE, WHEN. match_conditions({'and', L, R}, Data) -> @@ -212,7 +267,8 @@ match_conditions({'not', Var}, Data) -> case eval(Var, Data) of Bool when is_boolean(Bool) -> not Bool; - _other -> false + _other -> + false end; match_conditions({in, Var, {list, Vals}}, Data) -> lists:member(eval(Var, Data), [eval(V, Data) || V <- Vals]); @@ -250,8 +306,10 @@ do_compare('!=', L, R) -> L /= R; do_compare('=~', T, F) -> emqx_topic:match(T, F). number(Bin) -> - try binary_to_integer(Bin) - catch error:badarg -> binary_to_float(Bin) + try + binary_to_integer(Bin) + catch + error:badarg -> binary_to_float(Bin) end. handle_output_list(RuleId, Outputs, Selected, Envs) -> @@ -266,13 +324,20 @@ handle_output(RuleId, OutId, Selected, Envs) -> catch throw:out_of_service -> ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleId, 'outputs.failed'), - ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleId, 'outputs.failed.out_of_service'), + ok = emqx_plugin_libs_metrics:inc( + rule_metrics, RuleId, 'outputs.failed.out_of_service' + ), ?SLOG(warning, #{msg => "out_of_service", output => OutId}); Err:Reason:ST -> ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleId, 'outputs.failed'), ok = emqx_plugin_libs_metrics:inc(rule_metrics, RuleId, 'outputs.failed.unknown'), - ?SLOG(error, #{msg => "output_failed", output => OutId, exception => Err, - reason => Reason, stacktrace => ST}) + ?SLOG(error, #{ + msg => "output_failed", + output => OutId, + exception => Err, + reason => Reason, + stacktrace => ST + }) end. do_handle_output(BridgeId, Selected, _Envs) when is_binary(BridgeId) -> @@ -280,7 +345,8 @@ do_handle_output(BridgeId, Selected, _Envs) when is_binary(BridgeId) -> case emqx_bridge:send_message(BridgeId, Selected) of {error, {Err, _}} when Err == bridge_not_found; Err == bridge_stopped -> throw(out_of_service); - Result -> Result + Result -> + Result end; do_handle_output(#{mod := Mod, func := Func, args := Args}, Selected, Envs) -> %% the function can also throw 'out_of_service' @@ -382,8 +448,10 @@ apply_func(Name, Args, Input) when is_atom(Name) -> do_apply_func(Name, Args, Input); apply_func(Name, Args, Input) when is_binary(Name) -> FunName = - try binary_to_existing_atom(Name, utf8) - catch error:badarg -> error({sql_function_not_supported, Name}) + try + binary_to_existing_atom(Name, utf8) + catch + error:badarg -> error({sql_function_not_supported, Name}) end, do_apply_func(FunName, Args, Input). @@ -391,7 +459,8 @@ do_apply_func(Name, Args, Input) -> case erlang:apply(emqx_rule_funcs, Name, Args) of Func when is_function(Func) -> erlang:apply(Func, [Input]); - Result -> Result + Result -> + Result end. add_metadata(Input, Metadata) when is_map(Input), is_map(Metadata) -> @@ -417,8 +486,10 @@ cache_payload(DecodedP) -> DecodedP. safe_decode_and_cache(MaybeJson) -> - try cache_payload(emqx_json:decode(MaybeJson, [return_maps])) - catch _:_ -> error({decode_json_failed, MaybeJson}) + try + cache_payload(emqx_json:decode(MaybeJson, [return_maps])) + catch + _:_ -> error({decode_json_failed, MaybeJson}) end. ensure_list(List) when is_list(List) -> List; diff --git a/apps/emqx_rule_engine/src/emqx_rule_sqlparser.erl b/apps/emqx_rule_engine/src/emqx_rule_sqlparser.erl index 973614a62..1435d2e60 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_sqlparser.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_sqlparser.erl @@ -20,84 +20,89 @@ -export([parse/1]). --export([ select_fields/1 - , select_is_foreach/1 - , select_doeach/1 - , select_incase/1 - , select_from/1 - , select_where/1 - ]). +-export([ + select_fields/1, + select_is_foreach/1, + select_doeach/1, + select_incase/1, + select_from/1, + select_where/1 +]). --import(proplists, [ get_value/2 - , get_value/3 - ]). +-import(proplists, [ + get_value/2, + get_value/3 +]). -record(select, {fields, from, where, is_foreach, doeach, incase}). --opaque(select() :: #select{}). +-opaque select() :: #select{}. --type const() :: {const, number()|binary()}. +-type const() :: {const, number() | binary()}. -type variable() :: binary() | list(binary()). -type alias() :: binary() | list(binary()). --type field() :: const() | variable() - | {as, field(), alias()} - | {'fun', atom(), list(field())}. +-type field() :: + const() + | variable() + | {as, field(), alias()} + | {'fun', atom(), list(field())}. -export_type([select/0]). %% Parse one select statement. --spec(parse(string() | binary()) -> {ok, select()} | {error, term()}). +-spec parse(string() | binary()) -> {ok, select()} | {error, term()}. parse(Sql) -> - try case rulesql:parsetree(Sql) of + try + case rulesql:parsetree(Sql) of {ok, {select, Clauses}} -> {ok, #select{ - is_foreach = false, - fields = get_value(fields, Clauses), - doeach = [], - incase = {}, - from = get_value(from, Clauses), - where = get_value(where, Clauses) - }}; + is_foreach = false, + fields = get_value(fields, Clauses), + doeach = [], + incase = {}, + from = get_value(from, Clauses), + where = get_value(where, Clauses) + }}; {ok, {foreach, Clauses}} -> {ok, #select{ - is_foreach = true, - fields = get_value(fields, Clauses), - doeach = get_value(do, Clauses, []), - incase = get_value(incase, Clauses, {}), - from = get_value(from, Clauses), - where = get_value(where, Clauses) - }}; - Error -> {error, Error} + is_foreach = true, + fields = get_value(fields, Clauses), + doeach = get_value(do, Clauses, []), + incase = get_value(incase, Clauses, {}), + from = get_value(from, Clauses), + where = get_value(where, Clauses) + }}; + Error -> + {error, Error} end catch _Error:Reason:StackTrace -> {error, {Reason, StackTrace}} end. --spec(select_fields(select()) -> list(field())). +-spec select_fields(select()) -> list(field()). select_fields(#select{fields = Fields}) -> Fields. --spec(select_is_foreach(select()) -> boolean()). +-spec select_is_foreach(select()) -> boolean(). select_is_foreach(#select{is_foreach = IsForeach}) -> IsForeach. --spec(select_doeach(select()) -> list(field())). +-spec select_doeach(select()) -> list(field()). select_doeach(#select{doeach = DoEach}) -> DoEach. --spec(select_incase(select()) -> list(field())). +-spec select_incase(select()) -> list(field()). select_incase(#select{incase = InCase}) -> InCase. --spec(select_from(select()) -> list(binary())). +-spec select_from(select()) -> list(binary()). select_from(#select{from = From}) -> From. --spec(select_where(select()) -> tuple()). +-spec select_where(select()) -> tuple(). select_where(#select{where = Where}) -> Where. - diff --git a/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl b/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl index a791863f6..cc545a96e 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_sqltester.erl @@ -17,10 +17,11 @@ -include("rule_engine.hrl"). -include_lib("emqx/include/logger.hrl"). --export([ test/1 - , echo_action/2 - , get_selected_data/3 - ]). +-export([ + test/1, + echo_action/2, + get_selected_data/3 +]). -spec test(#{sql := binary(), context := map()}) -> {ok, map() | list()} | {error, term()}. test(#{sql := Sql, context := Context}) -> @@ -60,9 +61,7 @@ test_rule(Sql, Select, Context, EventTopics) -> created_at => erlang:system_time(millisecond) }, FullContext = fill_default_values(hd(EventTopics), emqx_rule_maps:atom_key_map(Context)), - try - emqx_rule_runtime:apply_rule(Rule, FullContext) - of + try emqx_rule_runtime:apply_rule(Rule, FullContext) of {ok, Data} -> {ok, flatten(Data)}; {error, Reason} -> {error, Reason} after @@ -76,8 +75,10 @@ is_publish_topic(<<"$events/", _/binary>>) -> false; is_publish_topic(<<"$bridges/", _/binary>>) -> false; is_publish_topic(_Topic) -> true. -flatten([]) -> []; -flatten([D1]) -> D1; +flatten([]) -> + []; +flatten([D1]) -> + D1; flatten([D1 | L]) when is_list(D1) -> D1 ++ flatten(L). @@ -92,4 +93,6 @@ envs_examp(EventTopic) -> EventName = emqx_rule_events:event_name(EventTopic), emqx_rule_maps:atom_key_map( maps:from_list( - emqx_rule_events:columns_with_exam(EventName))). + emqx_rule_events:columns_with_exam(EventName) + ) + ). diff --git a/apps/emqx_rule_engine/src/proto/emqx_rule_engine_proto_v1.erl b/apps/emqx_rule_engine/src/proto/emqx_rule_engine_proto_v1.erl index cc3a79a74..501a1d05c 100644 --- a/apps/emqx_rule_engine/src/proto/emqx_rule_engine_proto_v1.erl +++ b/apps/emqx_rule_engine/src/proto/emqx_rule_engine_proto_v1.erl @@ -18,10 +18,11 @@ -behaviour(emqx_bpapi). --export([ introduced_in/0 +-export([ + introduced_in/0, - , reset_metrics/1 - ]). + reset_metrics/1 +]). -include_lib("emqx/include/bpapi.hrl"). -include_lib("emqx_rule_engine/include/rule_engine.hrl"). @@ -30,6 +31,6 @@ introduced_in() -> "5.0.0". -spec reset_metrics(rule_id()) -> - emqx_cluster_rpc:multicall_return(ok). + emqx_cluster_rpc:multicall_return(ok). reset_metrics(RuleId) -> emqx_cluster_rpc:multicall(emqx_rule_engine, reset_metrics_for_rule, [RuleId]). diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl index 0abd47a59..af19a24f1 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl @@ -29,75 +29,71 @@ -define(TMP_RULEID, atom_to_binary(?FUNCTION_NAME)). all() -> - [ {group, engine} - , {group, funcs} - , {group, registry} - , {group, runtime} - , {group, events} - , {group, bugs} + [ + {group, engine}, + {group, funcs}, + {group, registry}, + {group, runtime}, + {group, events}, + {group, bugs} ]. suite() -> [{ct_hooks, [cth_surefire]}, {timetrap, {seconds, 30}}]. groups() -> - [{engine, [sequence], - [t_create_rule - ]}, - {funcs, [], - [t_kv_store - ]}, - {registry, [sequence], - [t_add_get_remove_rule, - t_add_get_remove_rules, - t_create_existing_rule, - t_get_rules_for_topic, - t_get_rules_for_topic_2, - t_get_rules_with_same_event - ]}, - {runtime, [], - [t_match_atom_and_binary, - t_sqlselect_0, - t_sqlselect_00, - t_sqlselect_01, - t_sqlselect_02, - t_sqlselect_1, - t_sqlselect_2, - t_sqlselect_3, - t_sqlparse_event_1, - t_sqlparse_event_2, - t_sqlparse_event_3, - t_sqlparse_foreach_1, - t_sqlparse_foreach_2, - t_sqlparse_foreach_3, - t_sqlparse_foreach_4, - t_sqlparse_foreach_5, - t_sqlparse_foreach_6, - t_sqlparse_foreach_7, - t_sqlparse_foreach_8, - t_sqlparse_case_when_1, - t_sqlparse_case_when_2, - t_sqlparse_case_when_3, - t_sqlparse_array_index_1, - t_sqlparse_array_index_2, - t_sqlparse_array_index_3, - t_sqlparse_array_index_4, - t_sqlparse_array_index_5, - t_sqlparse_select_matadata_1, - t_sqlparse_array_range_1, - t_sqlparse_array_range_2, - t_sqlparse_true_false, - t_sqlparse_undefined_variable, - t_sqlparse_new_map, - t_sqlparse_invalid_json - ]}, - {events, [], - [t_events - ]}, - {bugs, [], - [t_sqlparse_payload_as, - t_sqlparse_nested_get - ]} + [ + {engine, [sequence], [t_create_rule]}, + {funcs, [], [t_kv_store]}, + {registry, [sequence], [ + t_add_get_remove_rule, + t_add_get_remove_rules, + t_create_existing_rule, + t_get_rules_for_topic, + t_get_rules_for_topic_2, + t_get_rules_with_same_event + ]}, + {runtime, [], [ + t_match_atom_and_binary, + t_sqlselect_0, + t_sqlselect_00, + t_sqlselect_01, + t_sqlselect_02, + t_sqlselect_1, + t_sqlselect_2, + t_sqlselect_3, + t_sqlparse_event_1, + t_sqlparse_event_2, + t_sqlparse_event_3, + t_sqlparse_foreach_1, + t_sqlparse_foreach_2, + t_sqlparse_foreach_3, + t_sqlparse_foreach_4, + t_sqlparse_foreach_5, + t_sqlparse_foreach_6, + t_sqlparse_foreach_7, + t_sqlparse_foreach_8, + t_sqlparse_case_when_1, + t_sqlparse_case_when_2, + t_sqlparse_case_when_3, + t_sqlparse_array_index_1, + t_sqlparse_array_index_2, + t_sqlparse_array_index_3, + t_sqlparse_array_index_4, + t_sqlparse_array_index_5, + t_sqlparse_select_matadata_1, + t_sqlparse_array_range_1, + t_sqlparse_array_range_2, + t_sqlparse_true_false, + t_sqlparse_undefined_variable, + t_sqlparse_new_map, + t_sqlparse_invalid_json + ]}, + {events, [], [t_events]}, + {bugs, [], [ + t_sqlparse_payload_as, + t_sqlparse_nested_get + ]} ]. %%------------------------------------------------------------------------------ @@ -107,8 +103,9 @@ groups() -> init_per_suite(Config) -> application:load(emqx_conf), ok = emqx_common_test_helpers:start_apps( - [emqx_conf, emqx_rule_engine, emqx_authz], - fun set_special_configs/1), + [emqx_conf, emqx_rule_engine, emqx_authz], + fun set_special_configs/1 + ), Config. end_per_suite(_Config) -> @@ -117,10 +114,13 @@ end_per_suite(_Config) -> set_special_configs(emqx_authz) -> {ok, _} = emqx:update_config( - [authorization], - #{<<"no_match">> => atom_to_binary(allow), - <<"cache">> => #{<<"enable">> => atom_to_binary(true)}, - <<"sources">> => []}), + [authorization], + #{ + <<"no_match">> => atom_to_binary(allow), + <<"cache">> => #{<<"enable">> => atom_to_binary(true)}, + <<"sources">> => [] + } + ), ok; set_special_configs(_) -> ok. @@ -149,25 +149,31 @@ end_per_group(_Groupname, _Config) -> init_per_testcase(t_events, Config) -> init_events_counters(), - SQL = "SELECT * FROM \"$events/client_connected\", " - "\"$events/client_disconnected\", " - "\"$events/client_connack\", " - "\"$events/client_check_authz_complete\", " - "\"$events/session_subscribed\", " - "\"$events/session_unsubscribed\", " - "\"$events/message_acked\", " - "\"$events/message_delivered\", " - "\"$events/message_dropped\", " - "\"$events/delivery_dropped\", " - "\"t1\"", + SQL = + "SELECT * FROM \"$events/client_connected\", " + "\"$events/client_disconnected\", " + "\"$events/client_connack\", " + "\"$events/client_check_authz_complete\", " + "\"$events/session_subscribed\", " + "\"$events/session_unsubscribed\", " + "\"$events/message_acked\", " + "\"$events/message_delivered\", " + "\"$events/message_dropped\", " + "\"$events/delivery_dropped\", " + "\"t1\"", {ok, Rule} = emqx_rule_engine:create_rule( - #{id => <<"rule:t_events">>, + #{ + id => <<"rule:t_events">>, sql => SQL, outputs => [ - #{function => <<"emqx_rule_engine_SUITE:output_record_triggered_events">>, - args => #{}} + #{ + function => <<"emqx_rule_engine_SUITE:output_record_triggered_events">>, + args => #{} + } ], - description => <<"to console and record triggered events">>}), + description => <<"to console and record triggered events">> + } + ), ?assertMatch(#{id := <<"rule:t_events">>}, Rule), [{hook_points_rules, Rule} | Config]; init_per_testcase(_TestCase, Config) -> @@ -184,13 +190,18 @@ end_per_testcase(_TestCase, _Config) -> %%------------------------------------------------------------------------------ t_create_rule(_Config) -> {ok, #{id := Id}} = emqx_rule_engine:create_rule( - #{sql => <<"select * from \"t/a\"">>, - id => <<"t_create_rule">>, - outputs => [#{function => console}], - description => <<"debug rule">>}), + #{ + sql => <<"select * from \"t/a\"">>, + id => <<"t_create_rule">>, + outputs => [#{function => console}], + description => <<"debug rule">> + } + ), ct:pal("======== emqx_rule_engine:get_rules :~p", [emqx_rule_engine:get_rules()]), - ?assertMatch({ok, #{id := Id, from := [<<"t/a">>]}}, - emqx_rule_engine:get_rule(Id)), + ?assertMatch( + {ok, #{id := Id, from := [<<"t/a">>]}}, + emqx_rule_engine:get_rule(Id) + ), delete_rule(Id), ok. @@ -228,8 +239,11 @@ t_add_get_remove_rule(_Config) -> t_add_get_remove_rules(_Config) -> delete_rules_by_ids(emqx_rule_engine:get_rules()), ok = insert_rules( - [make_simple_rule(<<"rule-debug-1">>), - make_simple_rule(<<"rule-debug-2">>)]), + [ + make_simple_rule(<<"rule-debug-1">>), + make_simple_rule(<<"rule-debug-2">>) + ] + ), ?assertEqual(2, length(emqx_rule_engine:get_rules())), ok = delete_rules_by_ids([<<"rule-debug-1">>, <<"rule-debug-2">>]), ?assertEqual([], emqx_rule_engine:get_rules()), @@ -238,10 +252,12 @@ t_add_get_remove_rules(_Config) -> t_create_existing_rule(_Config) -> %% create a rule using given rule id {ok, _} = emqx_rule_engine:create_rule( - #{id => <<"an_existing_rule">>, - sql => <<"select * from \"t/#\"">>, - outputs => [#{function => console}] - }), + #{ + id => <<"an_existing_rule">>, + sql => <<"select * from \"t/#\"">>, + outputs => [#{function => console}] + } + ), {ok, #{sql := SQL}} = emqx_rule_engine:get_rule(<<"an_existing_rule">>), ?assertEqual(<<"select * from \"t/#\"">>, SQL), @@ -252,40 +268,64 @@ t_create_existing_rule(_Config) -> t_get_rules_for_topic(_Config) -> Len0 = length(emqx_rule_engine:get_rules_for_topic(<<"simple/topic">>)), ok = insert_rules( - [make_simple_rule(<<"rule-debug-1">>), - make_simple_rule(<<"rule-debug-2">>)]), - ?assertEqual(Len0+2, length(emqx_rule_engine:get_rules_for_topic(<<"simple/topic">>))), + [ + make_simple_rule(<<"rule-debug-1">>), + make_simple_rule(<<"rule-debug-2">>) + ] + ), + ?assertEqual(Len0 + 2, length(emqx_rule_engine:get_rules_for_topic(<<"simple/topic">>))), ok = delete_rules_by_ids([<<"rule-debug-1">>, <<"rule-debug-2">>]), ok. t_get_rules_ordered_by_ts(_Config) -> Now = fun() -> erlang:system_time(nanosecond) end, ok = insert_rules( - [make_simple_rule_with_ts(<<"rule-debug-0">>, Now()), - make_simple_rule_with_ts(<<"rule-debug-1">>, Now()), - make_simple_rule_with_ts(<<"rule-debug-2">>, Now()) - ]), - ?assertMatch([ - #{id := <<"rule-debug-0">>}, - #{id := <<"rule-debug-1">>}, - #{id := <<"rule-debug-2">>} - ], emqx_rule_engine:get_rules_ordered_by_ts()). + [ + make_simple_rule_with_ts(<<"rule-debug-0">>, Now()), + make_simple_rule_with_ts(<<"rule-debug-1">>, Now()), + make_simple_rule_with_ts(<<"rule-debug-2">>, Now()) + ] + ), + ?assertMatch( + [ + #{id := <<"rule-debug-0">>}, + #{id := <<"rule-debug-1">>}, + #{id := <<"rule-debug-2">>} + ], + emqx_rule_engine:get_rules_ordered_by_ts() + ). t_get_rules_for_topic_2(_Config) -> Len0 = length(emqx_rule_engine:get_rules_for_topic(<<"simple/1">>)), ok = insert_rules( - [make_simple_rule(<<"rule-debug-1">>, <<"select * from \"simple/#\"">>, [<<"simple/#">>]), - make_simple_rule(<<"rule-debug-2">>, <<"select * from \"simple/+\"">>, [<<"simple/+">>]), - make_simple_rule(<<"rule-debug-3">>, <<"select * from \"simple/+/1\"">>, [<<"simple/+/1">>]), - make_simple_rule(<<"rule-debug-4">>, <<"select * from \"simple/1\"">>, [<<"simple/1">>]), - make_simple_rule(<<"rule-debug-5">>, <<"select * from \"simple/2,simple/+,simple/3\"">>, - [<<"simple/2">>,<<"simple/+">>, <<"simple/3">>]), - make_simple_rule(<<"rule-debug-6">>, <<"select * from \"simple/2,simple/3,simple/4\"">>, - [<<"simple/2">>,<<"simple/3">>, <<"simple/4">>]) - ]), - ?assertEqual(Len0+4, length(emqx_rule_engine:get_rules_for_topic(<<"simple/1">>))), - ok = delete_rules_by_ids([<<"rule-debug-1">>, <<"rule-debug-2">>,<<"rule-debug-3">>, - <<"rule-debug-4">>,<<"rule-debug-5">>, <<"rule-debug-6">>]), + [ + make_simple_rule(<<"rule-debug-1">>, <<"select * from \"simple/#\"">>, [<<"simple/#">>]), + make_simple_rule(<<"rule-debug-2">>, <<"select * from \"simple/+\"">>, [<<"simple/+">>]), + make_simple_rule(<<"rule-debug-3">>, <<"select * from \"simple/+/1\"">>, [ + <<"simple/+/1">> + ]), + make_simple_rule(<<"rule-debug-4">>, <<"select * from \"simple/1\"">>, [<<"simple/1">>]), + make_simple_rule( + <<"rule-debug-5">>, + <<"select * from \"simple/2,simple/+,simple/3\"">>, + [<<"simple/2">>, <<"simple/+">>, <<"simple/3">>] + ), + make_simple_rule( + <<"rule-debug-6">>, + <<"select * from \"simple/2,simple/3,simple/4\"">>, + [<<"simple/2">>, <<"simple/3">>, <<"simple/4">>] + ) + ] + ), + ?assertEqual(Len0 + 4, length(emqx_rule_engine:get_rules_for_topic(<<"simple/1">>))), + ok = delete_rules_by_ids([ + <<"rule-debug-1">>, + <<"rule-debug-2">>, + <<"rule-debug-3">>, + <<"rule-debug-4">>, + <<"rule-debug-5">>, + <<"rule-debug-6">> + ]), ok. t_get_rules_with_same_event(_Config) -> @@ -294,41 +334,95 @@ t_get_rules_with_same_event(_Config) -> ?assertEqual([], emqx_rule_engine:get_rules_with_same_event(<<"$events/client_connected">>)), ?assertEqual([], emqx_rule_engine:get_rules_with_same_event(<<"$events/client_disconnected">>)), ?assertEqual([], emqx_rule_engine:get_rules_with_same_event(<<"$events/session_subscribed">>)), - ?assertEqual([], emqx_rule_engine:get_rules_with_same_event(<<"$events/session_unsubscribed">>)), + ?assertEqual( + [], emqx_rule_engine:get_rules_with_same_event(<<"$events/session_unsubscribed">>) + ), ?assertEqual([], emqx_rule_engine:get_rules_with_same_event(<<"$events/message_delivered">>)), ?assertEqual([], emqx_rule_engine:get_rules_with_same_event(<<"$events/message_acked">>)), ?assertEqual([], emqx_rule_engine:get_rules_with_same_event(<<"$events/message_dropped">>)), ok = insert_rules( - [make_simple_rule(<<"r1">>, <<"select * from \"simple/#\"">>, [<<"simple/#">>]), - make_simple_rule(<<"r2">>, <<"select * from \"abc/+\"">>, [<<"abc/+">>]), - make_simple_rule(<<"r3">>, <<"select * from \"$events/client_connected\"">>, - [<<"$events/client_connected">>]), - make_simple_rule(<<"r4">>, <<"select * from \"$events/client_disconnected\"">>, - [<<"$events/client_disconnected">>]), - make_simple_rule(<<"r5">>, <<"select * from \"$events/session_subscribed\"">>, - [<<"$events/session_subscribed">>]), - make_simple_rule(<<"r6">>, <<"select * from \"$events/session_unsubscribed\"">>, - [<<"$events/session_unsubscribed">>]), - make_simple_rule(<<"r7">>, <<"select * from \"$events/message_delivered\"">>, - [<<"$events/message_delivered">>]), - make_simple_rule(<<"r8">>, <<"select * from \"$events/message_acked\"">>, - [<<"$events/message_acked">>]), - make_simple_rule(<<"r9">>, <<"select * from \"$events/message_dropped\"">>, - [<<"$events/message_dropped">>]), - make_simple_rule(<<"r10">>, <<"select * from \"t/1, " - "$events/session_subscribed, $events/client_connected\"">>, - [<<"t/1">>, <<"$events/session_subscribed">>, <<"$events/client_connected">>]) - ]), + [ + make_simple_rule(<<"r1">>, <<"select * from \"simple/#\"">>, [<<"simple/#">>]), + make_simple_rule(<<"r2">>, <<"select * from \"abc/+\"">>, [<<"abc/+">>]), + make_simple_rule( + <<"r3">>, + <<"select * from \"$events/client_connected\"">>, + [<<"$events/client_connected">>] + ), + make_simple_rule( + <<"r4">>, + <<"select * from \"$events/client_disconnected\"">>, + [<<"$events/client_disconnected">>] + ), + make_simple_rule( + <<"r5">>, + <<"select * from \"$events/session_subscribed\"">>, + [<<"$events/session_subscribed">>] + ), + make_simple_rule( + <<"r6">>, + <<"select * from \"$events/session_unsubscribed\"">>, + [<<"$events/session_unsubscribed">>] + ), + make_simple_rule( + <<"r7">>, + <<"select * from \"$events/message_delivered\"">>, + [<<"$events/message_delivered">>] + ), + make_simple_rule( + <<"r8">>, + <<"select * from \"$events/message_acked\"">>, + [<<"$events/message_acked">>] + ), + make_simple_rule( + <<"r9">>, + <<"select * from \"$events/message_dropped\"">>, + [<<"$events/message_dropped">>] + ), + make_simple_rule( + <<"r10">>, + << + "select * from \"t/1, " + "$events/session_subscribed, $events/client_connected\"" + >>, + [<<"t/1">>, <<"$events/session_subscribed">>, <<"$events/client_connected">>] + ) + ] + ), ?assertEqual(PubN + 3, length(emqx_rule_engine:get_rules_with_same_event(PubT))), - ?assertEqual(2, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/client_connected">>))), - ?assertEqual(1, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/client_disconnected">>))), - ?assertEqual(2, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/session_subscribed">>))), - ?assertEqual(1, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/session_unsubscribed">>))), - ?assertEqual(1, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/message_delivered">>))), - ?assertEqual(1, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/message_acked">>))), - ?assertEqual(1, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/message_dropped">>))), - ok = delete_rules_by_ids([<<"r1">>, <<"r2">>,<<"r3">>, <<"r4">>, - <<"r5">>, <<"r6">>, <<"r7">>, <<"r8">>, <<"r9">>, <<"r10">>]), + ?assertEqual( + 2, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/client_connected">>)) + ), + ?assertEqual( + 1, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/client_disconnected">>)) + ), + ?assertEqual( + 2, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/session_subscribed">>)) + ), + ?assertEqual( + 1, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/session_unsubscribed">>)) + ), + ?assertEqual( + 1, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/message_delivered">>)) + ), + ?assertEqual( + 1, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/message_acked">>)) + ), + ?assertEqual( + 1, length(emqx_rule_engine:get_rules_with_same_event(<<"$events/message_dropped">>)) + ), + ok = delete_rules_by_ids([ + <<"r1">>, + <<"r2">>, + <<"r3">>, + <<"r4">>, + <<"r5">>, + <<"r6">>, + <<"r7">>, + <<"r8">>, + <<"r9">>, + <<"r10">> + ]), ok. %%------------------------------------------------------------------------------ @@ -337,17 +431,21 @@ t_get_rules_with_same_event(_Config) -> t_events(_Config) -> {ok, Client} = emqtt:start_link( - [ {username, <<"u_event">>} - , {clientid, <<"c_event">>} - , {proto_ver, v5} - , {properties, #{'Session-Expiry-Interval' => 60}} - ]), + [ + {username, <<"u_event">>}, + {clientid, <<"c_event">>}, + {proto_ver, v5}, + {properties, #{'Session-Expiry-Interval' => 60}} + ] + ), {ok, Client2} = emqtt:start_link( - [ {username, <<"u_event2">>} - , {clientid, <<"c_event2">>} - , {proto_ver, v5} - , {properties, #{'Session-Expiry-Interval' => 60}} - ]), + [ + {username, <<"u_event2">>}, + {clientid, <<"c_event2">>}, + {proto_ver, v5}, + {properties, #{'Session-Expiry-Interval' => 60}} + ] + ), ct:pal("====== verify $events/client_connected, $events/client_connack"), client_connected(Client, Client2), ct:pal("====== verify $events/message_dropped"), @@ -372,15 +470,20 @@ t_events(_Config) -> client_connack_failed() -> {ok, Client} = emqtt:start_link( - [ {username, <<"u_event3">>} - , {clientid, <<"c_event3">>} - , {proto_ver, v5} - , {properties, #{'Session-Expiry-Interval' => 60}} - ]), + [ + {username, <<"u_event3">>}, + {clientid, <<"c_event3">>}, + {proto_ver, v5}, + {properties, #{'Session-Expiry-Interval' => 60}} + ] + ), try meck:new(emqx_access_control, [non_strict, passthrough]), - meck:expect(emqx_access_control, authenticate, - fun(_) -> {error, bad_username_or_password} end), + meck:expect( + emqx_access_control, + authenticate, + fun(_) -> {error, bad_username_or_password} end + ), process_flag(trap_exit, true), ?assertMatch({error, _}, emqtt:connect(Client)), timer:sleep(300), @@ -391,8 +494,13 @@ client_connack_failed() -> ok. message_publish(Client) -> - emqtt:publish(Client, <<"t1">>, #{'Message-Expiry-Interval' => 60}, - <<"{\"id\": 1, \"name\": \"ha\"}">>, [{qos, 1}]), + emqtt:publish( + Client, + <<"t1">>, + #{'Message-Expiry-Interval' => 60}, + <<"{\"id\": 1, \"name\": \"ha\"}">>, + [{qos, 1}] + ), verify_event('message.publish'), ok. client_connected(Client, Client2) -> @@ -407,12 +515,16 @@ client_disconnected(Client, Client2) -> verify_event('client.disconnected'), ok. session_subscribed(Client2) -> - {ok, _, _} = emqtt:subscribe(Client2, #{'User-Property' => {<<"topic_name">>, <<"t1">>}}, <<"t1">>, 1), + {ok, _, _} = emqtt:subscribe( + Client2, #{'User-Property' => {<<"topic_name">>, <<"t1">>}}, <<"t1">>, 1 + ), verify_event('session.subscribed'), verify_event('client.check_authz_complete'), ok. session_unsubscribed(Client2) -> - {ok, _, _} = emqtt:unsubscribe(Client2, #{'User-Property' => {<<"topic_name">>, <<"t1">>}}, <<"t1">>), + {ok, _, _} = emqtt:unsubscribe( + Client2, #{'User-Property' => {<<"topic_name">>, <<"t1">>}}, <<"t1">> + ), verify_event('session.unsubscribed'), ok. @@ -437,23 +549,29 @@ message_acked(_Client) -> ok. t_match_atom_and_binary(_Config) -> - SQL = "SELECT connected_at as ts, * " + SQL = + "SELECT connected_at as ts, * " "FROM \"$events/client_connected\" " "WHERE username = 'emqx2' ", Repub = republish_output(<<"t2">>, <<"user:${ts}">>), {ok, TopicRule} = emqx_rule_engine:create_rule( - #{sql => SQL, id => ?TMP_RULEID, - outputs => [Repub]}), + #{ + sql => SQL, + id => ?TMP_RULEID, + outputs => [Repub] + } + ), {ok, Client} = emqtt:start_link([{username, <<"emqx1">>}]), {ok, _} = emqtt:connect(Client), {ok, _, _} = emqtt:subscribe(Client, <<"t2">>, 0), ct:sleep(100), {ok, Client2} = emqtt:start_link([{username, <<"emqx2">>}]), {ok, _} = emqtt:connect(Client2), - receive {publish, #{topic := T, payload := Payload}} -> - ?assertEqual(<<"t2">>, T), - <<"user:", ConnAt/binary>> = Payload, - _ = binary_to_integer(ConnAt) + receive + {publish, #{topic := T, payload := Payload}} -> + ?assertEqual(<<"t2">>, T), + <<"user:", ConnAt/binary>> = Payload, + _ = binary_to_integer(ConnAt) after 1000 -> ct:fail(wait_for_t2) end, @@ -463,116 +581,192 @@ t_match_atom_and_binary(_Config) -> t_sqlselect_0(_Config) -> %% Verify SELECT with and without 'AS' - Sql = "select * " - "from \"t/#\" " - "where payload.cmd.info = 'tt'", - ?assertMatch({ok,#{payload := <<"{\"cmd\": {\"info\":\"tt\"}}">>}}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => + Sql = + "select * " + "from \"t/#\" " + "where payload.cmd.info = 'tt'", + ?assertMatch( + {ok, #{payload := <<"{\"cmd\": {\"info\":\"tt\"}}">>}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => <<"{\"cmd\": {\"info\":\"tt\"}}">>, - topic => <<"t/a">>}})), - Sql2 = "select payload.cmd as cmd " - "from \"t/#\" " - "where cmd.info = 'tt'", - ?assertMatch({ok,#{<<"cmd">> := #{<<"info">> := <<"tt">>}}}, - emqx_rule_sqltester:test( - #{sql => Sql2, - context => - #{payload => + topic => <<"t/a">> + } + } + ) + ), + Sql2 = + "select payload.cmd as cmd " + "from \"t/#\" " + "where cmd.info = 'tt'", + ?assertMatch( + {ok, #{<<"cmd">> := #{<<"info">> := <<"tt">>}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => + #{ + payload => <<"{\"cmd\": {\"info\":\"tt\"}}">>, - topic => <<"t/a">>}})), - Sql3 = "select payload.cmd as cmd, cmd.info as info " - "from \"t/#\" " - "where cmd.info = 'tt' and info = 'tt'", - ?assertMatch({ok,#{<<"cmd">> := #{<<"info">> := <<"tt">>}, - <<"info">> := <<"tt">>}}, - emqx_rule_sqltester:test( - #{sql => Sql3, - context => - #{payload => + topic => <<"t/a">> + } + } + ) + ), + Sql3 = + "select payload.cmd as cmd, cmd.info as info " + "from \"t/#\" " + "where cmd.info = 'tt' and info = 'tt'", + ?assertMatch( + {ok, #{ + <<"cmd">> := #{<<"info">> := <<"tt">>}, + <<"info">> := <<"tt">> + }}, + emqx_rule_sqltester:test( + #{ + sql => Sql3, + context => + #{ + payload => <<"{\"cmd\": {\"info\":\"tt\"}}">>, - topic => <<"t/a">>}})), + topic => <<"t/a">> + } + } + ) + ), %% cascaded as - Sql4 = "select payload.cmd as cmd, cmd.info as meta.info " - "from \"t/#\" " - "where cmd.info = 'tt' and meta.info = 'tt'", - ?assertMatch({ok,#{<<"cmd">> := #{<<"info">> := <<"tt">>}, - <<"meta">> := #{<<"info">> := <<"tt">>}}}, - emqx_rule_sqltester:test( - #{sql => Sql4, - context => - #{payload => + Sql4 = + "select payload.cmd as cmd, cmd.info as meta.info " + "from \"t/#\" " + "where cmd.info = 'tt' and meta.info = 'tt'", + ?assertMatch( + {ok, #{ + <<"cmd">> := #{<<"info">> := <<"tt">>}, + <<"meta">> := #{<<"info">> := <<"tt">>} + }}, + emqx_rule_sqltester:test( + #{ + sql => Sql4, + context => + #{ + payload => <<"{\"cmd\": {\"info\":\"tt\"}}">>, - topic => <<"t/a">>}})). + topic => <<"t/a">> + } + } + ) + ). t_sqlselect_00(_Config) -> %% Verify plus/subtract and unary_add_or_subtract - Sql = "select 1-1 as a " - "from \"t/#\" ", - ?assertMatch({ok,#{<<"a">> := 0}}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => <<"">>, - topic => <<"t/a">>}})), - Sql1 = "select -1 + 1 as a " - "from \"t/#\" ", - ?assertMatch({ok,#{<<"a">> := 0}}, - emqx_rule_sqltester:test( - #{sql => Sql1, - context => - #{payload => <<"">>, - topic => <<"t/a">>}})), - Sql2 = "select 1 + 1 as a " - "from \"t/#\" ", - ?assertMatch({ok,#{<<"a">> := 2}}, - emqx_rule_sqltester:test( - #{sql => Sql2, - context => - #{payload => <<"">>, - topic => <<"t/a">>}})), - Sql3 = "select +1 as a " - "from \"t/#\" ", - ?assertMatch({ok,#{<<"a">> := 1}}, - emqx_rule_sqltester:test( - #{sql => Sql3, - context => - #{payload => <<"">>, - topic => <<"t/a">>}})). + Sql = + "select 1-1 as a " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"a">> := 0}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => <<"">>, + topic => <<"t/a">> + } + } + ) + ), + Sql1 = + "select -1 + 1 as a " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"a">> := 0}}, + emqx_rule_sqltester:test( + #{ + sql => Sql1, + context => + #{ + payload => <<"">>, + topic => <<"t/a">> + } + } + ) + ), + Sql2 = + "select 1 + 1 as a " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"a">> := 2}}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => + #{ + payload => <<"">>, + topic => <<"t/a">> + } + } + ) + ), + Sql3 = + "select +1 as a " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"a">> := 1}}, + emqx_rule_sqltester:test( + #{ + sql => Sql3, + context => + #{ + payload => <<"">>, + topic => <<"t/a">> + } + } + ) + ). t_sqlselect_01(_Config) -> - SQL = "SELECT json_decode(payload) as p, payload " + SQL = + "SELECT json_decode(payload) as p, payload " "FROM \"t3/#\", \"t1\" " "WHERE p.x = 1", Repub = republish_output(<<"t2">>), {ok, TopicRule1} = emqx_rule_engine:create_rule( - #{sql => SQL, id => ?TMP_RULEID, - outputs => [Repub]}), + #{ + sql => SQL, + id => ?TMP_RULEID, + outputs => [Repub] + } + ), {ok, Client} = emqtt:start_link([{username, <<"emqx">>}]), {ok, _} = emqtt:connect(Client), {ok, _, _} = emqtt:subscribe(Client, <<"t2">>, 0), emqtt:publish(Client, <<"t1">>, <<"{\"x\":1}">>, 0), ct:sleep(100), - receive {publish, #{topic := T, payload := Payload}} -> - ?assertEqual(<<"t2">>, T), - ?assertEqual(<<"{\"x\":1}">>, Payload) + receive + {publish, #{topic := T, payload := Payload}} -> + ?assertEqual(<<"t2">>, T), + ?assertEqual(<<"{\"x\":1}">>, Payload) after 1000 -> ct:fail(wait_for_t2) end, emqtt:publish(Client, <<"t1">>, <<"{\"x\":2}">>, 0), - receive {publish, #{topic := <<"t2">>, payload := _}} -> - ct:fail(unexpected_t2) + receive + {publish, #{topic := <<"t2">>, payload := _}} -> + ct:fail(unexpected_t2) after 1000 -> ok end, emqtt:publish(Client, <<"t3/a">>, <<"{\"x\":1}">>, 0), - receive {publish, #{topic := T3, payload := Payload3}} -> - ?assertEqual(<<"t2">>, T3), - ?assertEqual(<<"{\"x\":1}">>, Payload3) + receive + {publish, #{topic := T3, payload := Payload3}} -> + ?assertEqual(<<"t2">>, T3), + ?assertEqual(<<"{\"x\":1}">>, Payload3) after 1000 -> ct:fail(wait_for_t2) end, @@ -581,36 +775,44 @@ t_sqlselect_01(_Config) -> delete_rule(TopicRule1). t_sqlselect_02(_Config) -> - SQL = "SELECT * " + SQL = + "SELECT * " "FROM \"t3/#\", \"t1\" " "WHERE payload.x = 1", Repub = republish_output(<<"t2">>), {ok, TopicRule1} = emqx_rule_engine:create_rule( - #{sql => SQL, id => ?TMP_RULEID, - outputs => [Repub]}), + #{ + sql => SQL, + id => ?TMP_RULEID, + outputs => [Repub] + } + ), {ok, Client} = emqtt:start_link([{username, <<"emqx">>}]), {ok, _} = emqtt:connect(Client), {ok, _, _} = emqtt:subscribe(Client, <<"t2">>, 0), emqtt:publish(Client, <<"t1">>, <<"{\"x\":1}">>, 0), ct:sleep(100), - receive {publish, #{topic := T, payload := Payload}} -> - ?assertEqual(<<"t2">>, T), - ?assertEqual(<<"{\"x\":1}">>, Payload) + receive + {publish, #{topic := T, payload := Payload}} -> + ?assertEqual(<<"t2">>, T), + ?assertEqual(<<"{\"x\":1}">>, Payload) after 1000 -> ct:fail(wait_for_t2) end, emqtt:publish(Client, <<"t1">>, <<"{\"x\":2}">>, 0), - receive {publish, #{topic := <<"t2">>, payload := Payload0}} -> - ct:fail({unexpected_t2, Payload0}) + receive + {publish, #{topic := <<"t2">>, payload := Payload0}} -> + ct:fail({unexpected_t2, Payload0}) after 1000 -> ok end, emqtt:publish(Client, <<"t3/a">>, <<"{\"x\":1}">>, 0), - receive {publish, #{topic := T3, payload := Payload3}} -> - ?assertEqual(<<"t2">>, T3), - ?assertEqual(<<"{\"x\":1}">>, Payload3) + receive + {publish, #{topic := T3, payload := Payload3}} -> + ?assertEqual(<<"t2">>, T3), + ?assertEqual(<<"{\"x\":1}">>, Payload3) after 1000 -> ct:fail(wait_for_t2) end, @@ -619,28 +821,35 @@ t_sqlselect_02(_Config) -> delete_rule(TopicRule1). t_sqlselect_1(_Config) -> - SQL = "SELECT json_decode(payload) as p, payload " + SQL = + "SELECT json_decode(payload) as p, payload " "FROM \"t1\" " "WHERE p.x = 1 and p.y = 2", Repub = republish_output(<<"t2">>), {ok, TopicRule} = emqx_rule_engine:create_rule( - #{sql => SQL, id => ?TMP_RULEID, - outputs => [Repub]}), + #{ + sql => SQL, + id => ?TMP_RULEID, + outputs => [Repub] + } + ), {ok, Client} = emqtt:start_link([{username, <<"emqx">>}]), {ok, _} = emqtt:connect(Client), {ok, _, _} = emqtt:subscribe(Client, <<"t2">>, 0), ct:sleep(200), emqtt:publish(Client, <<"t1">>, <<"{\"x\":1,\"y\":2}">>, 0), - receive {publish, #{topic := T, payload := Payload}} -> - ?assertEqual(<<"t2">>, T), - ?assertEqual(<<"{\"x\":1,\"y\":2}">>, Payload) + receive + {publish, #{topic := T, payload := Payload}} -> + ?assertEqual(<<"t2">>, T), + ?assertEqual(<<"{\"x\":1,\"y\":2}">>, Payload) after 1000 -> ct:fail(wait_for_t2) end, emqtt:publish(Client, <<"t1">>, <<"{\"x\":1,\"y\":1}">>, 0), - receive {publish, #{topic := <<"t2">>, payload := _}} -> - ct:fail(unexpected_t2) + receive + {publish, #{topic := <<"t2">>, payload := _}} -> + ct:fail(unexpected_t2) after 1000 -> ok end, @@ -653,20 +862,25 @@ t_sqlselect_2(_Config) -> SQL = "SELECT * FROM \"t2\" ", Repub = republish_output(<<"t2">>), {ok, TopicRule} = emqx_rule_engine:create_rule( - #{sql => SQL, id => ?TMP_RULEID, - outputs => [Repub]}), + #{ + sql => SQL, + id => ?TMP_RULEID, + outputs => [Repub] + } + ), {ok, Client} = emqtt:start_link([{username, <<"emqx">>}]), {ok, _} = emqtt:connect(Client), {ok, _, _} = emqtt:subscribe(Client, <<"t2">>, 0), emqtt:publish(Client, <<"t2">>, <<"{\"x\":1,\"y\":144}">>, 0), Fun = fun() -> - receive {publish, #{topic := <<"t2">>, payload := _}} -> + receive + {publish, #{topic := <<"t2">>, payload := _}} -> received_t2 - after 500 -> - received_nothing - end - end, + after 500 -> + received_nothing + end + end, received_t2 = Fun(), received_t2 = Fun(), received_nothing = Fun(), @@ -676,29 +890,36 @@ t_sqlselect_2(_Config) -> t_sqlselect_3(_Config) -> %% republish the client.connected msg - SQL = "SELECT * " + SQL = + "SELECT * " "FROM \"$events/client_connected\" " "WHERE username = 'emqx1'", Repub = republish_output(<<"t2">>, <<"clientid=${clientid}">>), {ok, TopicRule} = emqx_rule_engine:create_rule( - #{sql => SQL, id => ?TMP_RULEID, - outputs => [Repub]}), + #{ + sql => SQL, + id => ?TMP_RULEID, + outputs => [Repub] + } + ), {ok, Client} = emqtt:start_link([{clientid, <<"emqx0">>}, {username, <<"emqx0">>}]), {ok, _} = emqtt:connect(Client), {ok, _, _} = emqtt:subscribe(Client, <<"t2">>, 0), ct:sleep(200), {ok, Client1} = emqtt:start_link([{clientid, <<"c_emqx1">>}, {username, <<"emqx1">>}]), {ok, _} = emqtt:connect(Client1), - receive {publish, #{topic := T, payload := Payload}} -> - ?assertEqual(<<"t2">>, T), - ?assertEqual(<<"clientid=c_emqx1">>, Payload) + receive + {publish, #{topic := T, payload := Payload}} -> + ?assertEqual(<<"t2">>, T), + ?assertEqual(<<"clientid=c_emqx1">>, Payload) after 1000 -> ct:fail(wait_for_t2) end, emqtt:publish(Client, <<"t1">>, <<"{\"x\":1,\"y\":1}">>, 0), - receive {publish, #{topic := <<"t2">>, payload := _}} -> - ct:fail(unexpected_t2) + receive + {publish, #{topic := <<"t2">>, payload := _}} -> + ct:fail(unexpected_t2) after 1000 -> ok end, @@ -707,189 +928,337 @@ t_sqlselect_3(_Config) -> delete_rule(TopicRule). t_sqlparse_event_1(_Config) -> - Sql = "select topic as tp " - "from \"$events/session_subscribed\" ", - ?assertMatch({ok,#{<<"tp">> := <<"t/tt">>}}, + Sql = + "select topic as tp " + "from \"$events/session_subscribed\" ", + ?assertMatch( + {ok, #{<<"tp">> := <<"t/tt">>}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{topic => <<"t/tt">>}})). + #{ + sql => Sql, + context => #{topic => <<"t/tt">>} + } + ) + ). t_sqlparse_event_2(_Config) -> - Sql = "select clientid " - "from \"$events/client_connected\" ", - ?assertMatch({ok,#{<<"clientid">> := <<"abc">>}}, + Sql = + "select clientid " + "from \"$events/client_connected\" ", + ?assertMatch( + {ok, #{<<"clientid">> := <<"abc">>}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{clientid => <<"abc">>}})). + #{ + sql => Sql, + context => #{clientid => <<"abc">>} + } + ) + ). t_sqlparse_event_3(_Config) -> - Sql = "select clientid, topic as tp " - "from \"t/tt\", \"$events/client_connected\" ", - ?assertMatch({ok,#{<<"clientid">> := <<"abc">>, <<"tp">> := <<"t/tt">>}}, + Sql = + "select clientid, topic as tp " + "from \"t/tt\", \"$events/client_connected\" ", + ?assertMatch( + {ok, #{<<"clientid">> := <<"abc">>, <<"tp">> := <<"t/tt">>}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{clientid => <<"abc">>, topic => <<"t/tt">>}})). + #{ + sql => Sql, + context => #{clientid => <<"abc">>, topic => <<"t/tt">>} + } + ) + ). t_sqlparse_foreach_1(_Config) -> %% Verify foreach with and without 'AS' - Sql = "foreach payload.sensors as s " - "from \"t/#\" ", - ?assertMatch({ok,[#{<<"s">> := 1}, #{<<"s">> := 2}]}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"sensors\": [1, 2]}">>, - topic => <<"t/a">>}})), - Sql2 = "foreach payload.sensors " - "from \"t/#\" ", - ?assertMatch({ok,[#{item := 1}, #{item := 2}]}, - emqx_rule_sqltester:test( - #{sql => Sql2, - context => #{payload => <<"{\"sensors\": [1, 2]}">>, - topic => <<"t/a">>}})), - Sql3 = "foreach payload.sensors " - "from \"t/#\" ", - ?assertMatch({ok,[#{item := #{<<"cmd">> := <<"1">>}, clientid := <<"c_a">>}, - #{item := #{<<"cmd">> := <<"2">>, <<"name">> := <<"ct">>}, clientid := <<"c_a">>}]}, - emqx_rule_sqltester:test( - #{sql => Sql3, - context => #{ - payload => <<"{\"sensors\": [{\"cmd\":\"1\"}, {\"cmd\":\"2\",\"name\":\"ct\"}]}">>, - clientid => <<"c_a">>, - topic => <<"t/a">>}})), - Sql4 = "foreach payload.sensors " - "from \"t/#\" ", - {ok,[#{metadata := #{rule_id := TRuleId}}, - #{metadata := #{rule_id := TRuleId}}]} = - emqx_rule_sqltester:test( - #{sql => Sql4, - context => #{ - payload => <<"{\"sensors\": [1, 2]}">>, - topic => <<"t/a">>}}), + Sql = + "foreach payload.sensors as s " + "from \"t/#\" ", + ?assertMatch( + {ok, [#{<<"s">> := 1}, #{<<"s">> := 2}]}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"sensors\": [1, 2]}">>, + topic => <<"t/a">> + } + } + ) + ), + Sql2 = + "foreach payload.sensors " + "from \"t/#\" ", + ?assertMatch( + {ok, [#{item := 1}, #{item := 2}]}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => #{ + payload => <<"{\"sensors\": [1, 2]}">>, + topic => <<"t/a">> + } + } + ) + ), + Sql3 = + "foreach payload.sensors " + "from \"t/#\" ", + ?assertMatch( + {ok, [ + #{item := #{<<"cmd">> := <<"1">>}, clientid := <<"c_a">>}, + #{item := #{<<"cmd">> := <<"2">>, <<"name">> := <<"ct">>}, clientid := <<"c_a">>} + ]}, + emqx_rule_sqltester:test( + #{ + sql => Sql3, + context => #{ + payload => + <<"{\"sensors\": [{\"cmd\":\"1\"}, {\"cmd\":\"2\",\"name\":\"ct\"}]}">>, + clientid => <<"c_a">>, + topic => <<"t/a">> + } + } + ) + ), + Sql4 = + "foreach payload.sensors " + "from \"t/#\" ", + {ok, [ + #{metadata := #{rule_id := TRuleId}}, + #{metadata := #{rule_id := TRuleId}} + ]} = + emqx_rule_sqltester:test( + #{ + sql => Sql4, + context => #{ + payload => <<"{\"sensors\": [1, 2]}">>, + topic => <<"t/a">> + } + } + ), ?assert(is_binary(TRuleId)). t_sqlparse_foreach_2(_Config) -> %% Verify foreach-do with and without 'AS' - Sql = "foreach payload.sensors as s " - "do s.cmd as msg_type " - "from \"t/#\" ", - ?assertMatch({ok,[#{<<"msg_type">> := <<"1">>},#{<<"msg_type">> := <<"2">>}]}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => + Sql = + "foreach payload.sensors as s " + "do s.cmd as msg_type " + "from \"t/#\" ", + ?assertMatch( + {ok, [#{<<"msg_type">> := <<"1">>}, #{<<"msg_type">> := <<"2">>}]}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => <<"{\"sensors\": [{\"cmd\":\"1\"}, {\"cmd\":\"2\"}]}">>, - topic => <<"t/a">>}})), - Sql2 = "foreach payload.sensors " - "do item.cmd as msg_type " - "from \"t/#\" ", - ?assertMatch({ok,[#{<<"msg_type">> := <<"1">>},#{<<"msg_type">> := <<"2">>}]}, - emqx_rule_sqltester:test( - #{sql => Sql2, - context => - #{payload => + topic => <<"t/a">> + } + } + ) + ), + Sql2 = + "foreach payload.sensors " + "do item.cmd as msg_type " + "from \"t/#\" ", + ?assertMatch( + {ok, [#{<<"msg_type">> := <<"1">>}, #{<<"msg_type">> := <<"2">>}]}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => + #{ + payload => <<"{\"sensors\": [{\"cmd\":\"1\"}, {\"cmd\":\"2\"}]}">>, - topic => <<"t/a">>}})), - Sql3 = "foreach payload.sensors " - "do item as item " - "from \"t/#\" ", - ?assertMatch({ok,[#{<<"item">> := 1},#{<<"item">> := 2}]}, - emqx_rule_sqltester:test( - #{sql => Sql3, - context => - #{payload => + topic => <<"t/a">> + } + } + ) + ), + Sql3 = + "foreach payload.sensors " + "do item as item " + "from \"t/#\" ", + ?assertMatch( + {ok, [#{<<"item">> := 1}, #{<<"item">> := 2}]}, + emqx_rule_sqltester:test( + #{ + sql => Sql3, + context => + #{ + payload => <<"{\"sensors\": [1, 2]}">>, - topic => <<"t/a">>}})). + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_foreach_3(_Config) -> %% Verify foreach-incase with and without 'AS' - Sql = "foreach payload.sensors as s " - "incase s.cmd != 1 " - "from \"t/#\" ", - ?assertMatch({ok,[#{<<"s">> := #{<<"cmd">> := 2}}, - #{<<"s">> := #{<<"cmd">> := 3}} - ]}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => + Sql = + "foreach payload.sensors as s " + "incase s.cmd != 1 " + "from \"t/#\" ", + ?assertMatch( + {ok, [ + #{<<"s">> := #{<<"cmd">> := 2}}, + #{<<"s">> := #{<<"cmd">> := 3}} + ]}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => <<"{\"sensors\": [{\"cmd\":1}, {\"cmd\":2}, {\"cmd\":3}]}">>, - topic => <<"t/a">>}})), - Sql2 = "foreach payload.sensors " - "incase item.cmd != 1 " - "from \"t/#\" ", - ?assertMatch({ok,[#{item := #{<<"cmd">> := 2}}, - #{item := #{<<"cmd">> := 3}} - ]}, - emqx_rule_sqltester:test( - #{sql => Sql2, - context => - #{payload => + topic => <<"t/a">> + } + } + ) + ), + Sql2 = + "foreach payload.sensors " + "incase item.cmd != 1 " + "from \"t/#\" ", + ?assertMatch( + {ok, [ + #{item := #{<<"cmd">> := 2}}, + #{item := #{<<"cmd">> := 3}} + ]}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => + #{ + payload => <<"{\"sensors\": [{\"cmd\":1}, {\"cmd\":2}, {\"cmd\":3}]}">>, - topic => <<"t/a">>}})). + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_foreach_4(_Config) -> %% Verify foreach-do-incase - Sql = "foreach payload.sensors as s " - "do s.cmd as msg_type, s.name as name " - "incase is_not_null(s.cmd) " - "from \"t/#\" ", - ?assertMatch({ok,[#{<<"msg_type">> := <<"1">>},#{<<"msg_type">> := <<"2">>}]}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => + Sql = + "foreach payload.sensors as s " + "do s.cmd as msg_type, s.name as name " + "incase is_not_null(s.cmd) " + "from \"t/#\" ", + ?assertMatch( + {ok, [#{<<"msg_type">> := <<"1">>}, #{<<"msg_type">> := <<"2">>}]}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => <<"{\"sensors\": [{\"cmd\":\"1\"}, {\"cmd\":\"2\"}]}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok,[#{<<"msg_type">> := <<"1">>, <<"name">> := <<"n1">>}, #{<<"msg_type">> := <<"2">>}]}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, [#{<<"msg_type">> := <<"1">>, <<"name">> := <<"n1">>}, #{<<"msg_type">> := <<"2">>}]}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => <<"{\"sensors\": [{\"cmd\":\"1\", \"name\":\"n1\"}, {\"cmd\":\"2\"}, {\"name\":\"n3\"}]}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok,[]}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => <<"{\"sensors\": [1, 2]}">>, - topic => <<"t/a">>}})). + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, []}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => <<"{\"sensors\": [1, 2]}">>, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_foreach_5(_Config) -> %% Verify foreach on a empty-list or non-list variable - Sql = "foreach payload.sensors as s " - "do s.cmd as msg_type, s.name as name " - "from \"t/#\" ", - ?assertMatch({ok,[]}, emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => <<"{\"sensors\": 1}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok,[]}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => <<"{\"sensors\": []}">>, - topic => <<"t/a">>}})), - Sql2 = "foreach payload.sensors " - "from \"t/#\" ", - ?assertMatch({ok,[]}, emqx_rule_sqltester:test( - #{sql => Sql2, - context => - #{payload => <<"{\"sensors\": 1}">>, - topic => <<"t/a">>}})). + Sql = + "foreach payload.sensors as s " + "do s.cmd as msg_type, s.name as name " + "from \"t/#\" ", + ?assertMatch( + {ok, []}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => <<"{\"sensors\": 1}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, []}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => <<"{\"sensors\": []}">>, + topic => <<"t/a">> + } + } + ) + ), + Sql2 = + "foreach payload.sensors " + "from \"t/#\" ", + ?assertMatch( + {ok, []}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => + #{ + payload => <<"{\"sensors\": 1}">>, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_foreach_6(_Config) -> %% Verify foreach on a empty-list or non-list variable - Sql = "foreach json_decode(payload) " - "do item.id as zid, timestamp as t " - "from \"t/#\" ", + Sql = + "foreach json_decode(payload) " + "do item.id as zid, timestamp as t " + "from \"t/#\" ", {ok, Res} = emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => <<"[{\"id\": 5},{\"id\": 15}]">>, - topic => <<"t/a">>}}), - [#{<<"t">> := Ts1, <<"zid">> := Zid1}, - #{<<"t">> := Ts2, <<"zid">> := Zid2}] = Res, + #{ + sql => Sql, + context => + #{ + payload => <<"[{\"id\": 5},{\"id\": 15}]">>, + topic => <<"t/a">> + } + } + ), + [ + #{<<"t">> := Ts1, <<"zid">> := Zid1}, + #{<<"t">> := Ts2, <<"zid">> := Zid2} + ] = Res, ?assertEqual(true, is_integer(Ts1)), ?assertEqual(true, is_integer(Ts2)), ?assert(Zid1 == 5 orelse Zid1 == 15), @@ -897,549 +1266,1004 @@ t_sqlparse_foreach_6(_Config) -> t_sqlparse_foreach_7(_Config) -> %% Verify foreach-do-incase and cascaded AS - Sql = "foreach json_decode(payload) as p, p.sensors as s, s.collection as c, c.info as info " - "do info.cmd as msg_type, info.name as name " - "incase is_not_null(info.cmd) " - "from \"t/#\" " - "where s.page = '2' ", - Payload = <<"{\"sensors\": {\"page\": 2, \"collection\": " - "{\"info\":[{\"name\":\"cmd1\", \"cmd\":\"1\"}, {\"cmd\":\"2\"}]} } }">>, - ?assertMatch({ok,[#{<<"name">> := <<"cmd1">>, <<"msg_type">> := <<"1">>}, #{<<"msg_type">> := <<"2">>}]}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => Payload, - topic => <<"t/a">>}})), - Sql2 = "foreach json_decode(payload) as p, p.sensors as s, s.collection as c, c.info as info " - "do info.cmd as msg_type, info.name as name " - "incase is_not_null(info.cmd) " - "from \"t/#\" " - "where s.page = '3' ", - ?assertMatch({error, nomatch}, - emqx_rule_sqltester:test( - #{sql => Sql2, - context => - #{payload => Payload, - topic => <<"t/a">>}})). + Sql = + "foreach json_decode(payload) as p, p.sensors as s, s.collection as c, c.info as info " + "do info.cmd as msg_type, info.name as name " + "incase is_not_null(info.cmd) " + "from \"t/#\" " + "where s.page = '2' ", + Payload = << + "{\"sensors\": {\"page\": 2, \"collection\": " + "{\"info\":[{\"name\":\"cmd1\", \"cmd\":\"1\"}, {\"cmd\":\"2\"}]} } }" + >>, + ?assertMatch( + {ok, [#{<<"name">> := <<"cmd1">>, <<"msg_type">> := <<"1">>}, #{<<"msg_type">> := <<"2">>}]}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => Payload, + topic => <<"t/a">> + } + } + ) + ), + Sql2 = + "foreach json_decode(payload) as p, p.sensors as s, s.collection as c, c.info as info " + "do info.cmd as msg_type, info.name as name " + "incase is_not_null(info.cmd) " + "from \"t/#\" " + "where s.page = '3' ", + ?assertMatch( + {error, nomatch}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => + #{ + payload => Payload, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_foreach_8(_Config) -> %% Verify foreach-do-incase and cascaded AS - Sql = "foreach json_decode(payload) as p, p.sensors as s, s.collection as c, c.info as info " - "do info.cmd as msg_type, info.name as name " - "incase is_map(info) " - "from \"t/#\" " - "where s.page = '2' ", - Payload = <<"{\"sensors\": {\"page\": 2, \"collection\": " - "{\"info\":[\"haha\", {\"name\":\"cmd1\", \"cmd\":\"1\"}]} } }">>, - ?assertMatch({ok,[#{<<"name">> := <<"cmd1">>, <<"msg_type">> := <<"1">>}]}, - emqx_rule_sqltester:test( - #{sql => Sql, - context => - #{payload => Payload, - topic => <<"t/a">>}})), + Sql = + "foreach json_decode(payload) as p, p.sensors as s, s.collection as c, c.info as info " + "do info.cmd as msg_type, info.name as name " + "incase is_map(info) " + "from \"t/#\" " + "where s.page = '2' ", + Payload = << + "{\"sensors\": {\"page\": 2, \"collection\": " + "{\"info\":[\"haha\", {\"name\":\"cmd1\", \"cmd\":\"1\"}]} } }" + >>, + ?assertMatch( + {ok, [#{<<"name">> := <<"cmd1">>, <<"msg_type">> := <<"1">>}]}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => + #{ + payload => Payload, + topic => <<"t/a">> + } + } + ) + ), - Sql3 = "foreach json_decode(payload) as p, p.sensors as s, s.collection as c, sublist(2,1,c.info) as info " - "do info.cmd as msg_type, info.name as name " - "from \"t/#\" " - "where s.page = '2' ", - [?assertMatch({ok,[#{<<"name">> := <<"cmd1">>, <<"msg_type">> := <<"1">>}]}, - emqx_rule_sqltester:test( - #{sql => SqlN, - context => - #{payload => Payload, - topic => <<"t/a">>}})) - || SqlN <- [Sql3]]. + Sql3 = + "foreach json_decode(payload) as p, p.sensors as s, s.collection as c, sublist(2,1,c.info) as info " + "do info.cmd as msg_type, info.name as name " + "from \"t/#\" " + "where s.page = '2' ", + [ + ?assertMatch( + {ok, [#{<<"name">> := <<"cmd1">>, <<"msg_type">> := <<"1">>}]}, + emqx_rule_sqltester:test( + #{ + sql => SqlN, + context => + #{ + payload => Payload, + topic => <<"t/a">> + } + } + ) + ) + || SqlN <- [Sql3] + ]. t_sqlparse_case_when_1(_Config) -> %% case-when-else clause - Sql = "select " - " case when payload.x < 0 then 0 " - " when payload.x > 7 then 7 " - " else payload.x " - " end as y " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"y">> := 1}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 1}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{<<"y">> := 0}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 0}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{<<"y">> := 0}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": -1}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{<<"y">> := 7}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 7}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{<<"y">> := 7}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 8}">>, - topic => <<"t/a">>}})), + Sql = + "select " + " case when payload.x < 0 then 0 " + " when payload.x > 7 then 7 " + " else payload.x " + " end as y " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"y">> := 1}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 1}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{<<"y">> := 0}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 0}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{<<"y">> := 0}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": -1}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{<<"y">> := 7}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 7}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{<<"y">> := 7}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 8}">>, + topic => <<"t/a">> + } + } + ) + ), ok. t_sqlparse_case_when_2(_Config) -> % switch clause - Sql = "select " - " case payload.x when 1 then 2 " - " when 2 then 3 " - " else 4 " - " end as y " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"y">> := 2}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 1}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{<<"y">> := 3}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 2}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{<<"y">> := 4}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 4}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{<<"y">> := 4}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 7}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{<<"y">> := 4}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 8}">>, - topic => <<"t/a">>}})). + Sql = + "select " + " case payload.x when 1 then 2 " + " when 2 then 3 " + " else 4 " + " end as y " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"y">> := 2}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 1}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{<<"y">> := 3}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 2}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{<<"y">> := 4}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 4}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{<<"y">> := 4}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 7}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{<<"y">> := 4}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 8}">>, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_case_when_3(_Config) -> %% case-when clause - Sql = "select " - " case when payload.x < 0 then 0 " - " when payload.x > 7 then 7 " - " end as y " - "from \"t/#\" ", - ?assertMatch({ok, #{}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 1}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 5}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 0}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{<<"y">> := 0}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": -1}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 7}">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{<<"y">> := 7}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 8}">>, - topic => <<"t/a">>}})), + Sql = + "select " + " case when payload.x < 0 then 0 " + " when payload.x > 7 then 7 " + " end as y " + "from \"t/#\" ", + ?assertMatch( + {ok, #{}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 1}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 5}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 0}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{<<"y">> := 0}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": -1}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 7}">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{<<"y">> := 7}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 8}">>, + topic => <<"t/a">> + } + } + ) + ), ok. t_sqlparse_array_index_1(_Config) -> %% index get - Sql = "select " - " json_decode(payload) as p, " - " p[1] as a " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"a">> := #{<<"x">> := 1}}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"[{\"x\": 1}]">>, - topic => <<"t/a">>}})), - ?assertMatch({ok, #{}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{payload => <<"{\"x\": 1}">>, - topic => <<"t/a">>}})), + Sql = + "select " + " json_decode(payload) as p, " + " p[1] as a " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"a">> := #{<<"x">> := 1}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"[{\"x\": 1}]">>, + topic => <<"t/a">> + } + } + ) + ), + ?assertMatch( + {ok, #{}}, + emqx_rule_sqltester:test( + #{ + sql => Sql, + context => #{ + payload => <<"{\"x\": 1}">>, + topic => <<"t/a">> + } + } + ) + ), %% index get without 'as' - Sql2 = "select " - " payload.x[2] " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"payload">> := #{<<"x">> := [3]}}}, emqx_rule_sqltester:test( - #{sql => Sql2, - context => #{payload => #{<<"x">> => [1,3,4]}, - topic => <<"t/a">>}})), + Sql2 = + "select " + " payload.x[2] " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"payload">> := #{<<"x">> := [3]}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => #{ + payload => #{<<"x">> => [1, 3, 4]}, + topic => <<"t/a">> + } + } + ) + ), %% index get without 'as' again - Sql3 = "select " - " payload.x[2].y " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"payload">> := #{<<"x">> := [#{<<"y">> := 3}]}}}, emqx_rule_sqltester:test( - #{sql => Sql3, - context => #{payload => #{<<"x">> => [1,#{y => 3},4]}, - topic => <<"t/a">>}})), + Sql3 = + "select " + " payload.x[2].y " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"payload">> := #{<<"x">> := [#{<<"y">> := 3}]}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql3, + context => #{ + payload => #{<<"x">> => [1, #{y => 3}, 4]}, + topic => <<"t/a">> + } + } + ) + ), %% index get with 'as' - Sql4 = "select " - " payload.x[2].y as b " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"b">> := 3}}, emqx_rule_sqltester:test( - #{sql => Sql4, - context => #{payload => #{<<"x">> => [1,#{y => 3},4]}, - topic => <<"t/a">>}})). + Sql4 = + "select " + " payload.x[2].y as b " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"b">> := 3}}, + emqx_rule_sqltester:test( + #{ + sql => Sql4, + context => #{ + payload => #{<<"x">> => [1, #{y => 3}, 4]}, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_array_index_2(_Config) -> %% array get with negative index - Sql1 = "select " - " payload.x[-2].y as b " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"b">> := 3}}, emqx_rule_sqltester:test( - #{sql => Sql1, - context => #{payload => #{<<"x">> => [1,#{y => 3},4]}, - topic => <<"t/a">>}})), + Sql1 = + "select " + " payload.x[-2].y as b " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"b">> := 3}}, + emqx_rule_sqltester:test( + #{ + sql => Sql1, + context => #{ + payload => #{<<"x">> => [1, #{y => 3}, 4]}, + topic => <<"t/a">> + } + } + ) + ), %% array append to head or tail of a list: - Sql2 = "select " - " payload.x as b, " - " 1 as c[-0], " - " 2 as c[-0], " - " b as c[0] " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"b">> := 0, <<"c">> := [0,1,2]}}, emqx_rule_sqltester:test( - #{sql => Sql2, - context => #{payload => #{<<"x">> => 0}, - topic => <<"t/a">>}})), + Sql2 = + "select " + " payload.x as b, " + " 1 as c[-0], " + " 2 as c[-0], " + " b as c[0] " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"b">> := 0, <<"c">> := [0, 1, 2]}}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => #{ + payload => #{<<"x">> => 0}, + topic => <<"t/a">> + } + } + ) + ), %% construct an empty list: - Sql3 = "select " - " [] as c, " - " 1 as c[-0], " - " 2 as c[-0], " - " 0 as c[0] " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"c">> := [0,1,2]}}, emqx_rule_sqltester:test( - #{sql => Sql3, - context => #{payload => <<"">>, - topic => <<"t/a">>}})), + Sql3 = + "select " + " [] as c, " + " 1 as c[-0], " + " 2 as c[-0], " + " 0 as c[0] " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"c">> := [0, 1, 2]}}, + emqx_rule_sqltester:test( + #{ + sql => Sql3, + context => #{ + payload => <<"">>, + topic => <<"t/a">> + } + } + ) + ), %% construct a list: - Sql4 = "select " - " [payload.a, \"topic\", 'c'] as c, " - " 1 as c[-0], " - " 2 as c[-0], " - " 0 as c[0] " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"c">> := [0,11,<<"t/a">>,<<"c">>,1,2]}}, emqx_rule_sqltester:test( - #{sql => Sql4, - context => #{payload => <<"{\"a\":11}">>, - topic => <<"t/a">> - }})). + Sql4 = + "select " + " [payload.a, \"topic\", 'c'] as c, " + " 1 as c[-0], " + " 2 as c[-0], " + " 0 as c[0] " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"c">> := [0, 11, <<"t/a">>, <<"c">>, 1, 2]}}, + emqx_rule_sqltester:test( + #{ + sql => Sql4, + context => #{ + payload => <<"{\"a\":11}">>, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_array_index_3(_Config) -> %% array with json string payload: - Sql0 = "select " - "payload," - "payload.x[2].y " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"payload">> := #{<<"x">> := [1, #{<<"y">> := [1,2]}, 3]}}}, - emqx_rule_sqltester:test( - #{sql => Sql0, - context => #{payload => <<"{\"x\": [1,{\"y\": [1,2]},3]}">>, - topic => <<"t/a">>}})), + Sql0 = + "select " + "payload," + "payload.x[2].y " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"payload">> := #{<<"x">> := [1, #{<<"y">> := [1, 2]}, 3]}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql0, + context => #{ + payload => <<"{\"x\": [1,{\"y\": [1,2]},3]}">>, + topic => <<"t/a">> + } + } + ) + ), %% same as above but don't select payload: - Sql1 = "select " - "payload.x[2].y as b " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"b">> := [1,2]}}, emqx_rule_sqltester:test( - #{sql => Sql1, - context => #{payload => <<"{\"x\": [1,{\"y\": [1,2]},3]}">>, - topic => <<"t/a">>}})), + Sql1 = + "select " + "payload.x[2].y as b " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"b">> := [1, 2]}}, + emqx_rule_sqltester:test( + #{ + sql => Sql1, + context => #{ + payload => <<"{\"x\": [1,{\"y\": [1,2]},3]}">>, + topic => <<"t/a">> + } + } + ) + ), %% same as above but add 'as' clause: - Sql2 = "select " - "payload.x[2].y as b.c " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"b">> := #{<<"c">> := [1,2]}}}, emqx_rule_sqltester:test( - #{sql => Sql2, - context => #{payload => <<"{\"x\": [1,{\"y\": [1,2]},3]}">>, - topic => <<"t/a">>}})). + Sql2 = + "select " + "payload.x[2].y as b.c " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"b">> := #{<<"c">> := [1, 2]}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => #{ + payload => <<"{\"x\": [1,{\"y\": [1,2]},3]}">>, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_array_index_4(_Config) -> %% array with json string payload: - Sql0 = "select " - "0 as payload.x[2].y " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"payload">> := #{<<"x">> := [#{<<"y">> := 0}]}}}, - emqx_rule_sqltester:test( - #{sql => Sql0, - context => #{payload => <<"{\"x\": [1,{\"y\": [1,2]},3]}">>, - topic => <<"t/a">>}})), + Sql0 = + "select " + "0 as payload.x[2].y " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"payload">> := #{<<"x">> := [#{<<"y">> := 0}]}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql0, + context => #{ + payload => <<"{\"x\": [1,{\"y\": [1,2]},3]}">>, + topic => <<"t/a">> + } + } + ) + ), %% array with json string payload, and also select payload.x: - Sql1 = "select " - "payload.x, " - "0 as payload.x[2].y " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"payload">> := #{<<"x">> := [1, #{<<"y">> := 0}, 3]}}}, - emqx_rule_sqltester:test( - #{sql => Sql1, - context => #{payload => <<"{\"x\": [1,{\"y\": [1,2]},3]}">>, - topic => <<"t/a">>}})). + Sql1 = + "select " + "payload.x, " + "0 as payload.x[2].y " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"payload">> := #{<<"x">> := [1, #{<<"y">> := 0}, 3]}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql1, + context => #{ + payload => <<"{\"x\": [1,{\"y\": [1,2]},3]}">>, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_array_index_5(_Config) -> - Sql00 = "select " - " [1,2,3,4] " - "from \"t/#\" ", + Sql00 = + "select " + " [1,2,3,4] " + "from \"t/#\" ", {ok, Res00} = emqx_rule_sqltester:test( - #{sql => Sql00, - context => #{payload => <<"">>, - topic => <<"t/a">>}}), - ?assert(lists:any(fun({_K, V}) -> - V =:= [1,2,3,4] - end, maps:to_list(Res00))). + #{ + sql => Sql00, + context => #{ + payload => <<"">>, + topic => <<"t/a">> + } + } + ), + ?assert( + lists:any( + fun({_K, V}) -> + V =:= [1, 2, 3, 4] + end, + maps:to_list(Res00) + ) + ). t_sqlparse_select_matadata_1(_Config) -> %% array with json string payload: - Sql0 = "select " - "payload " - "from \"t/#\" ", - ?assertNotMatch({ok, #{<<"payload">> := <<"abc">>, metadata := _}}, - emqx_rule_sqltester:test( - #{sql => Sql0, - context => #{payload => <<"abc">>, - topic => <<"t/a">>}})), - Sql1 = "select " - "payload, metadata " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"payload">> := <<"abc">>, <<"metadata">> := _}}, - emqx_rule_sqltester:test( - #{sql => Sql1, - context => #{payload => <<"abc">>, - topic => <<"t/a">>}})). + Sql0 = + "select " + "payload " + "from \"t/#\" ", + ?assertNotMatch( + {ok, #{<<"payload">> := <<"abc">>, metadata := _}}, + emqx_rule_sqltester:test( + #{ + sql => Sql0, + context => #{ + payload => <<"abc">>, + topic => <<"t/a">> + } + } + ) + ), + Sql1 = + "select " + "payload, metadata " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"payload">> := <<"abc">>, <<"metadata">> := _}}, + emqx_rule_sqltester:test( + #{ + sql => Sql1, + context => #{ + payload => <<"abc">>, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_array_range_1(_Config) -> %% get a range of list - Sql0 = "select " - " payload.a[1..4] as c " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"c">> := [0,1,2,3]}}, emqx_rule_sqltester:test( - #{sql => Sql0, - context => #{payload => <<"{\"a\":[0,1,2,3,4,5]}">>, - topic => <<"t/a">>}})), - %% get a range from non-list data - Sql02 = "select " - " payload.a[1..4] as c " - "from \"t/#\" ", - ?assertMatch({error, {select_and_transform_error, {error,{range_get,non_list_data},_}}}, + Sql0 = + "select " + " payload.a[1..4] as c " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"c">> := [0, 1, 2, 3]}}, emqx_rule_sqltester:test( - #{sql => Sql02, + #{ + sql => Sql0, + context => #{ + payload => <<"{\"a\":[0,1,2,3,4,5]}">>, + topic => <<"t/a">> + } + } + ) + ), + %% get a range from non-list data + Sql02 = + "select " + " payload.a[1..4] as c " + "from \"t/#\" ", + ?assertMatch( + {error, {select_and_transform_error, {error, {range_get, non_list_data}, _}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql02, context => - #{payload => <<"{\"x\":[0,1,2,3,4,5]}">>, - topic => <<"t/a">>}})), + #{ + payload => <<"{\"x\":[0,1,2,3,4,5]}">>, + topic => <<"t/a">> + } + } + ) + ), %% construct a range: - Sql1 = "select " - " [1..4] as c, " - " 5 as c[-0], " - " 6 as c[-0], " - " 0 as c[0] " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"c">> := [0,1,2,3,4,5,6]}}, emqx_rule_sqltester:test( - #{sql => Sql1, - context => #{payload => <<"">>, - topic => <<"t/a">>}})). + Sql1 = + "select " + " [1..4] as c, " + " 5 as c[-0], " + " 6 as c[-0], " + " 0 as c[0] " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"c">> := [0, 1, 2, 3, 4, 5, 6]}}, + emqx_rule_sqltester:test( + #{ + sql => Sql1, + context => #{ + payload => <<"">>, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_array_range_2(_Config) -> %% construct a range without 'as' - Sql00 = "select " - " [1..4] " - "from \"t/#\" ", + Sql00 = + "select " + " [1..4] " + "from \"t/#\" ", {ok, Res00} = emqx_rule_sqltester:test( - #{sql => Sql00, - context => #{payload => <<"">>, - topic => <<"t/a">>}}), - ?assert(lists:any(fun({_K, V}) -> - V =:= [1,2,3,4] - end, maps:to_list(Res00))), + #{ + sql => Sql00, + context => #{ + payload => <<"">>, + topic => <<"t/a">> + } + } + ), + ?assert( + lists:any( + fun({_K, V}) -> + V =:= [1, 2, 3, 4] + end, + maps:to_list(Res00) + ) + ), %% construct a range without 'as' - Sql01 = "select " - " a[2..4] " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"a">> := [2,3,4]}}, + Sql01 = + "select " + " a[2..4] " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"a">> := [2, 3, 4]}}, emqx_rule_sqltester:test( - #{sql => Sql01, - context => #{<<"a">> => [1,2,3,4,5], - topic => <<"t/a">>}})), + #{ + sql => Sql01, + context => #{ + <<"a">> => [1, 2, 3, 4, 5], + topic => <<"t/a">> + } + } + ) + ), %% get a range of list without 'as' - Sql02 = "select " - " payload.a[1..4] " - "from \"t/#\" ", - ?assertMatch({ok, #{<<"payload">> := #{<<"a">> := [0,1,2,3]}}}, emqx_rule_sqltester:test( - #{sql => Sql02, - context => #{payload => <<"{\"a\":[0,1,2,3,4,5]}">>, - topic => <<"t/a">>}})). + Sql02 = + "select " + " payload.a[1..4] " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"payload">> := #{<<"a">> := [0, 1, 2, 3]}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql02, + context => #{ + payload => <<"{\"a\":[0,1,2,3,4,5]}">>, + topic => <<"t/a">> + } + } + ) + ). t_sqlparse_true_false(_Config) -> %% construct a range without 'as' - Sql00 = "select " - " true as a, false as b, " - " false as x.y, true as c[-0] " - "from \"t/#\" ", + Sql00 = + "select " + " true as a, false as b, " + " false as x.y, true as c[-0] " + "from \"t/#\" ", {ok, Res00} = emqx_rule_sqltester:test( - #{sql => Sql00, - context => #{payload => <<"">>, - topic => <<"t/a">>}}), - ?assertMatch(#{<<"a">> := true, <<"b">> := false, - <<"x">> := #{<<"y">> := false}, - <<"c">> := [true] - }, Res00). + #{ + sql => Sql00, + context => #{ + payload => <<"">>, + topic => <<"t/a">> + } + } + ), + ?assertMatch( + #{ + <<"a">> := true, + <<"b">> := false, + <<"x">> := #{<<"y">> := false}, + <<"c">> := [true] + }, + Res00 + ). t_sqlparse_undefined_variable(_Config) -> %% undefined == undefined - Sql00 = "select " - "a, b " - "from \"t/#\" " - "where a = b" - , + Sql00 = + "select " + "a, b " + "from \"t/#\" " + "where a = b", + {ok, Res00} = emqx_rule_sqltester:test( - #{sql => Sql00, context => #{payload => <<"">>, topic => <<"t/a">>}}), + #{sql => Sql00, context => #{payload => <<"">>, topic => <<"t/a">>}} + ), ?assertEqual(#{<<"a">> => undefined, <<"b">> => undefined}, Res00), ?assertEqual(2, map_size(Res00)), %% undefined compare to non-undefined variables should return false - Sql01 = "select " - "a, b " - "from \"t/#\" " - "where a > b" - , + Sql01 = + "select " + "a, b " + "from \"t/#\" " + "where a > b", + {error, nomatch} = emqx_rule_sqltester:test( - #{sql => Sql01, - context => #{payload => <<"{\"b\":1}">>, topic => <<"t/a">>}}), - Sql02 = "select " - "a < b as c " - "from \"t/#\" " - , + #{ + sql => Sql01, + context => #{payload => <<"{\"b\":1}">>, topic => <<"t/a">>} + } + ), + Sql02 = + "select " + "a < b as c " + "from \"t/#\" ", + {ok, Res02} = emqx_rule_sqltester:test( - #{sql => Sql02, - context => #{payload => <<"{\"b\":1}">>, topic => <<"t/a">>}}), + #{ + sql => Sql02, + context => #{payload => <<"{\"b\":1}">>, topic => <<"t/a">>} + } + ), ?assertMatch(#{<<"c">> := false}, Res02). t_sqlparse_new_map(_Config) -> %% construct a range without 'as' - Sql00 = "select " - " map_new() as a, map_new() as b, " - " map_new() as x.y, map_new() as c[-0] " - "from \"t/#\" ", + Sql00 = + "select " + " map_new() as a, map_new() as b, " + " map_new() as x.y, map_new() as c[-0] " + "from \"t/#\" ", {ok, Res00} = emqx_rule_sqltester:test( - #{sql => Sql00, - context => #{payload => <<"">>, - topic => <<"t/a">>}}), - ?assertMatch(#{<<"a">> := #{}, <<"b">> := #{}, - <<"x">> := #{<<"y">> := #{}}, - <<"c">> := [#{}] - }, Res00). + #{ + sql => Sql00, + context => #{ + payload => <<"">>, + topic => <<"t/a">> + } + } + ), + ?assertMatch( + #{ + <<"a">> := #{}, + <<"b">> := #{}, + <<"x">> := #{<<"y">> := #{}}, + <<"c">> := [#{}] + }, + Res00 + ). t_sqlparse_payload_as(_Config) -> %% https://github.com/emqx/emqx/issues/3866 - Sql00 = "SELECT " - " payload, map_get('engineWorkTime', payload.params, -1) as payload.params.engineWorkTime, " - " map_get('hydOilTem', payload.params, -1) as payload.params.hydOilTem " - "FROM \"t/#\" ", - Payload1 = <<"{ \"msgId\": 1002, \"params\": { \"convertTemp\": 20, \"engineSpeed\": 42, \"hydOilTem\": 30 } }">>, + Sql00 = + "SELECT " + " payload, map_get('engineWorkTime', payload.params, -1) as payload.params.engineWorkTime, " + " map_get('hydOilTem', payload.params, -1) as payload.params.hydOilTem " + "FROM \"t/#\" ", + Payload1 = + <<"{ \"msgId\": 1002, \"params\": { \"convertTemp\": 20, \"engineSpeed\": 42, \"hydOilTem\": 30 } }">>, {ok, Res01} = emqx_rule_sqltester:test( - #{sql => Sql00, - context => #{payload => Payload1, - topic => <<"t/a">>}}), - ?assertMatch(#{ - <<"payload">> := #{ - <<"params">> := #{ - <<"convertTemp">> := 20, - <<"engineSpeed">> := 42, - <<"engineWorkTime">> := -1, - <<"hydOilTem">> := 30 + #{ + sql => Sql00, + context => #{ + payload => Payload1, + topic => <<"t/a">> } } - }, Res01), + ), + ?assertMatch( + #{ + <<"payload">> := #{ + <<"params">> := #{ + <<"convertTemp">> := 20, + <<"engineSpeed">> := 42, + <<"engineWorkTime">> := -1, + <<"hydOilTem">> := 30 + } + } + }, + Res01 + ), Payload2 = <<"{ \"msgId\": 1002, \"params\": { \"convertTemp\": 20, \"engineSpeed\": 42 } }">>, {ok, Res02} = emqx_rule_sqltester:test( - #{sql => Sql00, - context => #{payload => Payload2, - topic => <<"t/a">>}}), - ?assertMatch(#{ - <<"payload">> := #{ - <<"params">> := #{ - <<"convertTemp">> := 20, - <<"engineSpeed">> := 42, - <<"engineWorkTime">> := -1, - <<"hydOilTem">> := -1 + #{ + sql => Sql00, + context => #{ + payload => Payload2, + topic => <<"t/a">> } } - }, Res02). + ), + ?assertMatch( + #{ + <<"payload">> := #{ + <<"params">> := #{ + <<"convertTemp">> := 20, + <<"engineSpeed">> := 42, + <<"engineWorkTime">> := -1, + <<"hydOilTem">> := -1 + } + } + }, + Res02 + ). t_sqlparse_nested_get(_Config) -> - Sql = "select payload as p, p.a.b as c " - "from \"t/#\" ", - ?assertMatch({ok,#{<<"c">> := 0}}, + Sql = + "select payload as p, p.a.b as c " + "from \"t/#\" ", + ?assertMatch( + {ok, #{<<"c">> := 0}}, emqx_rule_sqltester:test( - #{sql => Sql, - context => #{ - topic => <<"t/1">>, - payload => <<"{\"a\": {\"b\": 0}}">> - }})). + #{ + sql => Sql, + context => #{ + topic => <<"t/1">>, + payload => <<"{\"a\": {\"b\": 0}}">> + } + } + ) + ). t_sqlparse_invalid_json(_Config) -> - Sql02 = "select " + Sql02 = + "select " " payload.a[1..4] as c " "from \"t/#\" ", - ?assertMatch({error, {select_and_transform_error, {error,{decode_json_failed,_},_}}}, - emqx_rule_sqltester:test( - #{sql => Sql02, - context => - #{payload => <<"{\"x\":[0,1,2,3,}">>, - topic => <<"t/a">>}})), + ?assertMatch( + {error, {select_and_transform_error, {error, {decode_json_failed, _}, _}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql02, + context => + #{ + payload => <<"{\"x\":[0,1,2,3,}">>, + topic => <<"t/a">> + } + } + ) + ), - - Sql2 = "foreach payload.sensors " + Sql2 = + "foreach payload.sensors " "do item.cmd as msg_type " "from \"t/#\" ", - ?assertMatch({error, {select_and_collect_error, {error,{decode_json_failed,_},_}}}, - emqx_rule_sqltester:test( - #{sql => Sql2, - context => - #{payload => - <<"{\"sensors\": [{\"cmd\":\"1\"} {\"cmd\":}]}">>, - topic => <<"t/a">>}})). + ?assertMatch( + {error, {select_and_collect_error, {error, {decode_json_failed, _}, _}}}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => + #{ + payload => + <<"{\"sensors\": [{\"cmd\":\"1\"} {\"cmd\":}]}">>, + topic => <<"t/a">> + } + } + ) + ). %%------------------------------------------------------------------------------ %% Test cases for telemetry functions %%------------------------------------------------------------------------------ t_get_basic_usage_info_0(_Config) -> ?assertEqual( - #{ num_rules => 0 - , referenced_bridges => #{} - }, - emqx_rule_engine:get_basic_usage_info()), + #{ + num_rules => 0, + referenced_bridges => #{} + }, + emqx_rule_engine:get_basic_usage_info() + ), ok. t_get_basic_usage_info_1(_Config) -> {ok, _} = emqx_rule_engine:create_rule( - #{id => <<"rule:t_get_basic_usage_info:1">>, - sql => <<"select 1 from topic">>, - outputs => - [ #{function => <<"erlang:hibernate">>, args => #{}} - , #{function => console} - , <<"http:my_http_bridge">> - , <<"http:my_http_bridge">> - ]}), + #{ + id => <<"rule:t_get_basic_usage_info:1">>, + sql => <<"select 1 from topic">>, + outputs => + [ + #{function => <<"erlang:hibernate">>, args => #{}}, + #{function => console}, + <<"http:my_http_bridge">>, + <<"http:my_http_bridge">> + ] + } + ), {ok, _} = emqx_rule_engine:create_rule( - #{id => <<"rule:t_get_basic_usage_info:2">>, - sql => <<"select 1 from topic">>, - outputs => - [ <<"mqtt:my_mqtt_bridge">> - , <<"http:my_http_bridge">> - ]}), + #{ + id => <<"rule:t_get_basic_usage_info:2">>, + sql => <<"select 1 from topic">>, + outputs => + [ + <<"mqtt:my_mqtt_bridge">>, + <<"http:my_http_bridge">> + ] + } + ), ?assertEqual( - #{ num_rules => 2 - , referenced_bridges => - #{ mqtt => 1 - , http => 3 - } - }, - emqx_rule_engine:get_basic_usage_info()), + #{ + num_rules => 2, + referenced_bridges => + #{ + mqtt => 1, + http => 3 + } + }, + emqx_rule_engine:get_basic_usage_info() + ), ok. %%------------------------------------------------------------------------------ @@ -1449,8 +2273,10 @@ t_get_basic_usage_info_1(_Config) -> republish_output(Topic) -> republish_output(Topic, <<"${payload}">>). republish_output(Topic, Payload) -> - #{function => republish, - args => #{payload => Payload, topic => Topic, qos => 0, retain => false}}. + #{ + function => republish, + args => #{payload => Payload, topic => Topic, qos => 0, retain => false} + }. make_simple_rule_with_ts(RuleId, Ts) when is_binary(RuleId) -> SQL = <<"select * from \"simple/topic\"">>, @@ -1467,15 +2293,15 @@ make_simple_rule(RuleId, SQL, Topics) when is_binary(RuleId) -> make_simple_rule(RuleId, SQL, Topics, Ts) when is_binary(RuleId) -> #{ - id => RuleId, - sql => SQL, - from => Topics, - fields => [<<"*">>], - is_foreach => false, - conditions => {}, - outputs => [#{mod => emqx_rule_outputs, func => console, args => #{}}], - description => <<"simple rule">>, - created_at => Ts + id => RuleId, + sql => SQL, + from => Topics, + fields => [<<"*">>], + is_foreach => false, + conditions => {}, + outputs => [#{mod => emqx_rule_outputs, func => console, args => #{}}], + description => <<"simple rule">>, + created_at => Ts }. output_record_triggered_events(Data = #{event := EventName}, _Envs, _Args) -> @@ -1488,34 +2314,39 @@ verify_event(EventName) -> [] -> ct:fail({no_such_event, EventName, ets:tab2list(events_record_tab)}); Records -> - [begin - %% verify fields can be formatted to JSON string - _ = emqx_json:encode(Fields), - %% verify metadata fields - verify_metadata_fields(EventName, Fields), - %% verify available fields for each event name - verify_event_fields(EventName, Fields) - end || {_Name, Fields} <- Records] + [ + begin + %% verify fields can be formatted to JSON string + _ = emqx_json:encode(Fields), + %% verify metadata fields + verify_metadata_fields(EventName, Fields), + %% verify available fields for each event name + verify_event_fields(EventName, Fields) + end + || {_Name, Fields} <- Records + ] end. verify_metadata_fields(_EventName, #{metadata := Metadata}) -> ?assertMatch( #{rule_id := <<"rule:t_events">>}, - Metadata). + Metadata + ). verify_event_fields('message.publish', Fields) -> - #{id := ID, - clientid := ClientId, - username := Username, - payload := Payload, - peerhost := PeerHost, - topic := Topic, - qos := QoS, - flags := Flags, - headers := Headers, - pub_props := Properties, - timestamp := Timestamp, - publish_received_at := EventAt + #{ + id := ID, + clientid := ClientId, + username := Username, + payload := Payload, + peerhost := PeerHost, + topic := Topic, + qos := QoS, + flags := Flags, + headers := Headers, + pub_props := Properties, + timestamp := Timestamp, + publish_received_at := EventAt } = Fields, Now = erlang:system_time(millisecond), TimestampElapse = Now - Timestamp, @@ -1530,25 +2361,25 @@ verify_event_fields('message.publish', Fields) -> ?assert(is_map(Flags)), ?assert(is_map(Headers)), ?assertMatch(#{'Message-Expiry-Interval' := 60}, Properties), - ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000), - ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000), + ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60 * 1000), + ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60 * 1000), ?assert(EventAt =< Timestamp); - verify_event_fields('client.connected', Fields) -> - #{clientid := ClientId, - username := Username, - mountpoint := MountPoint, - peername := PeerName, - sockname := SockName, - proto_name := ProtoName, - proto_ver := ProtoVer, - keepalive := Keepalive, - clean_start := CleanStart, - expiry_interval := ExpiryInterval, - is_bridge := IsBridge, - conn_props := Properties, - timestamp := Timestamp, - connected_at := EventAt + #{ + clientid := ClientId, + username := Username, + mountpoint := MountPoint, + peername := PeerName, + sockname := SockName, + proto_name := ProtoName, + proto_ver := ProtoVer, + keepalive := Keepalive, + clean_start := CleanStart, + expiry_interval := ExpiryInterval, + is_bridge := IsBridge, + conn_props := Properties, + timestamp := Timestamp, + connected_at := EventAt } = Fields, Now = erlang:system_time(millisecond), TimestampElapse = Now - Timestamp, @@ -1565,19 +2396,19 @@ verify_event_fields('client.connected', Fields) -> ?assertEqual(60, ExpiryInterval), ?assertEqual(false, IsBridge), ?assertMatch(#{'Session-Expiry-Interval' := 60}, Properties), - ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000), - ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000), + ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60 * 1000), + ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60 * 1000), ?assert(EventAt =< Timestamp); - verify_event_fields('client.disconnected', Fields) -> - #{reason := Reason, - clientid := ClientId, - username := Username, - peername := PeerName, - sockname := SockName, - disconn_props := Properties, - timestamp := Timestamp, - disconnected_at := EventAt + #{ + reason := Reason, + clientid := ClientId, + username := Username, + peername := PeerName, + sockname := SockName, + disconn_props := Properties, + timestamp := Timestamp, + disconnected_at := EventAt } = Fields, Now = erlang:system_time(millisecond), TimestampElapse = Now - Timestamp, @@ -1588,18 +2419,20 @@ verify_event_fields('client.disconnected', Fields) -> verify_peername(PeerName), verify_peername(SockName), ?assertMatch(#{'User-Property' := #{<<"reason">> := <<"normal">>}}, Properties), - ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000), - ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000), + ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60 * 1000), + ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60 * 1000), ?assert(EventAt =< Timestamp); - -verify_event_fields(SubUnsub, Fields) when SubUnsub == 'session.subscribed' - ; SubUnsub == 'session.unsubscribed' -> - #{clientid := ClientId, - username := Username, - peerhost := PeerHost, - topic := Topic, - qos := QoS, - timestamp := Timestamp +verify_event_fields(SubUnsub, Fields) when + SubUnsub == 'session.subscribed'; + SubUnsub == 'session.unsubscribed' +-> + #{ + clientid := ClientId, + username := Username, + peerhost := PeerHost, + topic := Topic, + qos := QoS, + timestamp := Timestamp } = Fields, Now = erlang:system_time(millisecond), TimestampElapse = Now - Timestamp, @@ -1614,28 +2447,31 @@ verify_event_fields(SubUnsub, Fields) when SubUnsub == 'session.subscribed' 'session.subscribed' -> sub_props; 'session.unsubscribed' -> unsub_props end, - ?assertMatch(#{'User-Property' := #{<<"topic_name">> := <<"t1">>}}, - maps:get(PropKey, Fields)), - ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000); - + ?assertMatch( + #{'User-Property' := #{<<"topic_name">> := <<"t1">>}}, + maps:get(PropKey, Fields) + ), + ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60 * 1000); verify_event_fields('delivery.dropped', Fields) -> - #{event := 'delivery.dropped', - id := ID, - metadata := #{rule_id := RuleId}, - reason := Reason, - clientid := ClientId, - username := Username, - from_clientid := FromClientId, - from_username := FromUsername, - node := Node, - payload := Payload, - peerhost := PeerHost, - pub_props := Properties, - publish_received_at := EventAt, - qos := QoS, - flags := Flags, - timestamp := Timestamp, - topic := Topic} = Fields, + #{ + event := 'delivery.dropped', + id := ID, + metadata := #{rule_id := RuleId}, + reason := Reason, + clientid := ClientId, + username := Username, + from_clientid := FromClientId, + from_username := FromUsername, + node := Node, + payload := Payload, + peerhost := PeerHost, + pub_props := Properties, + publish_received_at := EventAt, + qos := QoS, + flags := Flags, + timestamp := Timestamp, + topic := Topic + } = Fields, Now = erlang:system_time(millisecond), TimestampElapse = Now - Timestamp, RcvdAtElapse = Now - EventAt, @@ -1653,23 +2489,23 @@ verify_event_fields('delivery.dropped', Fields) -> ?assertEqual(1, QoS), ?assert(is_map(Flags)), ?assertMatch(#{'Message-Expiry-Interval' := 60}, Properties), - ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000), - ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000), + ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60 * 1000), + ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60 * 1000), ?assert(EventAt =< Timestamp); - verify_event_fields('message.dropped', Fields) -> - #{id := ID, - reason := Reason, - clientid := ClientId, - username := Username, - payload := Payload, - peerhost := PeerHost, - topic := Topic, - qos := QoS, - flags := Flags, - pub_props := Properties, - timestamp := Timestamp, - publish_received_at := EventAt + #{ + id := ID, + reason := Reason, + clientid := ClientId, + username := Username, + payload := Payload, + peerhost := PeerHost, + topic := Topic, + qos := QoS, + flags := Flags, + pub_props := Properties, + timestamp := Timestamp, + publish_received_at := EventAt } = Fields, Now = erlang:system_time(millisecond), TimestampElapse = Now - Timestamp, @@ -1684,24 +2520,24 @@ verify_event_fields('message.dropped', Fields) -> ?assertEqual(1, QoS), ?assert(is_map(Flags)), ?assertMatch(#{'Message-Expiry-Interval' := 60}, Properties), - ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000), - ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000), + ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60 * 1000), + ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60 * 1000), ?assert(EventAt =< Timestamp); - verify_event_fields('message.delivered', Fields) -> - #{id := ID, - clientid := ClientId, - username := Username, - from_clientid := FromClientId, - from_username := FromUsername, - payload := Payload, - peerhost := PeerHost, - topic := Topic, - qos := QoS, - flags := Flags, - pub_props := Properties, - timestamp := Timestamp, - publish_received_at := EventAt + #{ + id := ID, + clientid := ClientId, + username := Username, + from_clientid := FromClientId, + from_username := FromUsername, + payload := Payload, + peerhost := PeerHost, + topic := Topic, + qos := QoS, + flags := Flags, + pub_props := Properties, + timestamp := Timestamp, + publish_received_at := EventAt } = Fields, Now = erlang:system_time(millisecond), TimestampElapse = Now - Timestamp, @@ -1717,25 +2553,25 @@ verify_event_fields('message.delivered', Fields) -> ?assertEqual(1, QoS), ?assert(is_map(Flags)), ?assertMatch(#{'Message-Expiry-Interval' := 60}, Properties), - ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000), - ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000), + ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60 * 1000), + ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60 * 1000), ?assert(EventAt =< Timestamp); - verify_event_fields('message.acked', Fields) -> - #{id := ID, - clientid := ClientId, - username := Username, - from_clientid := FromClientId, - from_username := FromUsername, - payload := Payload, - peerhost := PeerHost, - topic := Topic, - qos := QoS, - flags := Flags, - pub_props := PubProps, - puback_props := PubAckProps, - timestamp := Timestamp, - publish_received_at := EventAt + #{ + id := ID, + clientid := ClientId, + username := Username, + from_clientid := FromClientId, + from_username := FromUsername, + payload := Payload, + peerhost := PeerHost, + topic := Topic, + qos := QoS, + flags := Flags, + pub_props := PubProps, + puback_props := PubAckProps, + timestamp := Timestamp, + publish_received_at := EventAt } = Fields, Now = erlang:system_time(millisecond), TimestampElapse = Now - Timestamp, @@ -1752,23 +2588,23 @@ verify_event_fields('message.acked', Fields) -> ?assert(is_map(Flags)), ?assertMatch(#{'Message-Expiry-Interval' := 60}, PubProps), ?assert(is_map(PubAckProps)), - ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000), - ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60*1000), + ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60 * 1000), + ?assert(0 =< RcvdAtElapse andalso RcvdAtElapse =< 60 * 1000), ?assert(EventAt =< Timestamp); - verify_event_fields('client.connack', Fields) -> - #{clientid := ClientId, - clean_start := CleanStart, - username := Username, - peername := PeerName, - sockname := SockName, - proto_name := ProtoName, - proto_ver := ProtoVer, - keepalive := Keepalive, - expiry_interval := ExpiryInterval, - conn_props := Properties, - reason_code := Reason, - timestamp := Timestamp + #{ + clientid := ClientId, + clean_start := CleanStart, + username := Username, + peername := PeerName, + sockname := SockName, + proto_name := ProtoName, + proto_ver := ProtoVer, + keepalive := Keepalive, + expiry_interval := ExpiryInterval, + conn_props := Properties, + reason_code := Reason, + timestamp := Timestamp } = Fields, Now = erlang:system_time(millisecond), TimestampElapse = Now - Timestamp, @@ -1783,22 +2619,32 @@ verify_event_fields('client.connack', Fields) -> ?assert(is_boolean(CleanStart)), ?assertEqual(60000, ExpiryInterval), ?assertMatch(#{'Session-Expiry-Interval' := 60}, Properties), - ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60*1000); - + ?assert(0 =< TimestampElapse andalso TimestampElapse =< 60 * 1000); verify_event_fields('client.check_authz_complete', Fields) -> - #{clientid := ClientId, - action := Action, - result := Result, - topic := Topic, - authz_source := AuthzSource, - username := Username - } = Fields, + #{ + clientid := ClientId, + action := Action, + result := Result, + topic := Topic, + authz_source := AuthzSource, + username := Username + } = Fields, ?assertEqual(<<"t1">>, Topic), ?assert(lists:member(Action, [subscribe, publish])), ?assert(lists:member(Result, [allow, deny])), - ?assert(lists:member(AuthzSource, [cache, default, file, - http, mongodb, mysql, redis, - postgresql, built_in_database])), + ?assert( + lists:member(AuthzSource, [ + cache, + default, + file, + http, + mongodb, + mysql, + redis, + postgresql, + built_in_database + ]) + ), ?assert(lists:member(ClientId, [<<"c_event">>, <<"c_event2">>])), ?assert(lists:member(Username, [<<"u_event">>, <<"u_event2">>])). @@ -1807,7 +2653,8 @@ verify_peername(PeerName) -> [IPAddrS, PortS] -> verify_ipaddr(IPAddrS), _ = binary_to_integer(PortS); - _ -> ct:fail({invalid_peername, PeerName}) + _ -> + ct:fail({invalid_peername, PeerName}) end. verify_ipaddr(IPAddrS) -> @@ -1821,24 +2668,31 @@ init_events_counters() -> %%------------------------------------------------------------------------------ deps_path(App, RelativePath) -> Path0 = code:lib_dir(App), - Path = case file:read_link(Path0) of - {ok, Resolved} -> Resolved; - {error, _} -> Path0 - end, + Path = + case file:read_link(Path0) of + {ok, Resolved} -> Resolved; + {error, _} -> Path0 + end, filename:join([Path, RelativePath]). local_path(RelativePath) -> deps_path(emqx_rule_engine, RelativePath). insert_rules(Rules) -> - lists:foreach(fun(Rule) -> - ok = emqx_rule_engine:insert_rule(Rule) - end, Rules). + lists:foreach( + fun(Rule) -> + ok = emqx_rule_engine:insert_rule(Rule) + end, + Rules + ). delete_rules_by_ids(Ids) -> - lists:foreach(fun(Id) -> - ok = emqx_rule_engine:delete_rule(Id) - end, Ids). + lists:foreach( + fun(Id) -> + ok = emqx_rule_engine:delete_rule(Id) + end, + Ids + ). delete_rule(#{id := Id}) -> ok = emqx_rule_engine:delete_rule(Id); diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl index 498e34b4b..79a83ebaa 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_api_SUITE.erl @@ -38,15 +38,19 @@ t_crud_rule_api(_Config) -> }, {201, Rule} = emqx_rule_engine_api:'/rules'(post, #{body => Params0}), %% if we post again with the same params, it return with 400 "rule id already exists" - ?assertMatch({400, #{code := _, message := _Message}}, - emqx_rule_engine_api:'/rules'(post, #{body => Params0})), + ?assertMatch( + {400, #{code := _, message := _Message}}, + emqx_rule_engine_api:'/rules'(post, #{body => Params0}) + ), ?assertEqual(RuleID, maps:get(id, Rule)), {200, Rules} = emqx_rule_engine_api:'/rules'(get, #{}), ct:pal("RList : ~p", [Rules]), ?assert(length(Rules) > 0), - {200, Rule0} = emqx_rule_engine_api:'/rules/:id/reset_metrics'(put, #{bindings => #{id => RuleID}}), + {200, Rule0} = emqx_rule_engine_api:'/rules/:id/reset_metrics'(put, #{ + bindings => #{id => RuleID} + }), ?assertEqual(<<"Reset Success">>, Rule0), {200, Rule1} = emqx_rule_engine_api:'/rules/:id'(get, #{bindings => #{id => RuleID}}), @@ -54,19 +58,26 @@ t_crud_rule_api(_Config) -> ?assertEqual(Rule, Rule1), {200, Rule2} = emqx_rule_engine_api:'/rules/:id'(put, #{ - bindings => #{id => RuleID}, - body => Params0#{<<"sql">> => <<"select * from \"t/b\"">>} - }), + bindings => #{id => RuleID}, + body => Params0#{<<"sql">> => <<"select * from \"t/b\"">>} + }), {200, Rule3} = emqx_rule_engine_api:'/rules/:id'(get, #{bindings => #{id => RuleID}}), %ct:pal("RShow : ~p", [Rule3]), ?assertEqual(Rule3, Rule2), ?assertEqual(<<"select * from \"t/b\"">>, maps:get(sql, Rule3)), - ?assertMatch({204}, emqx_rule_engine_api:'/rules/:id'(delete, - #{bindings => #{id => RuleID}})), + ?assertMatch( + {204}, + emqx_rule_engine_api:'/rules/:id'( + delete, + #{bindings => #{id => RuleID}} + ) + ), %ct:pal("Show After Deleted: ~p", [NotFound]), - ?assertMatch({404, #{code := _, message := _Message}}, - emqx_rule_engine_api:'/rules/:id'(get, #{bindings => #{id => RuleID}})), + ?assertMatch( + {404, #{code := _, message := _Message}}, + emqx_rule_engine_api:'/rules/:id'(get, #{bindings => #{id => RuleID}}) + ), ok. diff --git a/apps/emqx_rule_engine/test/emqx_rule_events_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_events_SUITE.erl index 0226066b3..ad8d28159 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_events_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_events_SUITE.erl @@ -9,23 +9,30 @@ all() -> emqx_common_test_helpers:all(?MODULE). t_mod_hook_fun(_) -> Funcs = emqx_rule_events:module_info(exports), - [?assert(lists:keymember(emqx_rule_events:hook_fun(Event), 1, Funcs)) || - Event <- ['client.connected', - 'client.disconnected', - 'session.subscribed', - 'session.unsubscribed', - 'message.acked', - 'message.dropped', - 'message.delivered' - ]]. + [ + ?assert(lists:keymember(emqx_rule_events:hook_fun(Event), 1, Funcs)) + || Event <- [ + 'client.connected', + 'client.disconnected', + 'session.subscribed', + 'session.unsubscribed', + 'message.acked', + 'message.dropped', + 'message.delivered' + ] + ]. t_printable_maps(_) -> - Headers = #{peerhost => {127,0,0,1}, - peername => {{127,0,0,1}, 9980}, - sockname => {{127,0,0,1}, 1883} - }, + Headers = #{ + peerhost => {127, 0, 0, 1}, + peername => {{127, 0, 0, 1}, 9980}, + sockname => {{127, 0, 0, 1}, 1883} + }, ?assertMatch( - #{peerhost := <<"127.0.0.1">>, - peername := <<"127.0.0.1:9980">>, - sockname := <<"127.0.0.1:1883">> - }, emqx_rule_events:printable_maps(Headers)). + #{ + peerhost := <<"127.0.0.1">>, + peername := <<"127.0.0.1:9980">>, + sockname := <<"127.0.0.1:1883">> + }, + emqx_rule_events:printable_maps(Headers) + ). diff --git a/apps/emqx_rule_engine/test/emqx_rule_funcs_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_funcs_SUITE.erl index a795e2cda..132f874e4 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_funcs_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_funcs_SUITE.erl @@ -35,7 +35,9 @@ t_msgid(_) -> Msg = message(), ?assertEqual(undefined, apply_func(msgid, [], #{})), - ?assertEqual(emqx_guid:to_hexstr(emqx_message:id(Msg)), apply_func(msgid, [], eventmsg_publish(Msg))). + ?assertEqual( + emqx_guid:to_hexstr(emqx_message:id(Msg)), apply_func(msgid, [], eventmsg_publish(Msg)) + ). t_qos(_) -> ?assertEqual(undefined, apply_func(qos, [], #{})), @@ -61,12 +63,12 @@ t_clientid(_) -> ?assertEqual(<<"clientid">>, apply_func(clientid, [], Msg)). t_clientip(_) -> - Msg = emqx_message:set_header(peerhost, {127,0,0,1}, message()), + Msg = emqx_message:set_header(peerhost, {127, 0, 0, 1}, message()), ?assertEqual(undefined, apply_func(clientip, [], #{})), ?assertEqual(<<"127.0.0.1">>, apply_func(clientip, [], eventmsg_publish(Msg))). t_peerhost(_) -> - Msg = emqx_message:set_header(peerhost, {127,0,0,1}, message()), + Msg = emqx_message:set_header(peerhost, {127, 0, 0, 1}, message()), ?assertEqual(undefined, apply_func(peerhost, [], #{})), ?assertEqual(<<"127.0.0.1">>, apply_func(peerhost, [], eventmsg_publish(Msg))). @@ -87,7 +89,7 @@ t_str(_) -> ?assertEqual(<<"abc">>, emqx_rule_funcs:str("abc")), ?assertEqual(<<"abc">>, emqx_rule_funcs:str(abc)), ?assertEqual(<<"{\"a\":1}">>, emqx_rule_funcs:str(#{a => 1})), - ?assertEqual(<<"[{\"a\":1},{\"b\":1}]">>, emqx_rule_funcs:str([#{a => 1},#{b => 1}])), + ?assertEqual(<<"[{\"a\":1},{\"b\":1}]">>, emqx_rule_funcs:str([#{a => 1}, #{b => 1}])), ?assertEqual(<<"1">>, emqx_rule_funcs:str(1)), ?assertEqual(<<"2.0">>, emqx_rule_funcs:str(2.0)), ?assertEqual(<<"true">>, emqx_rule_funcs:str(true)), @@ -97,7 +99,9 @@ t_str(_) -> ?assertEqual(<<"abc 你好"/utf8>>, emqx_rule_funcs:str_utf8("abc 你好")), ?assertEqual(<<"abc 你好"/utf8>>, emqx_rule_funcs:str_utf8(<<"abc 你好"/utf8>>)), ?assertEqual(<<"abc">>, emqx_rule_funcs:str_utf8(abc)), - ?assertEqual(<<"{\"a\":\"abc 你好\"}"/utf8>>, emqx_rule_funcs:str_utf8(#{a => <<"abc 你好"/utf8>>})), + ?assertEqual( + <<"{\"a\":\"abc 你好\"}"/utf8>>, emqx_rule_funcs:str_utf8(#{a => <<"abc 你好"/utf8>>}) + ), ?assertEqual(<<"1">>, emqx_rule_funcs:str_utf8(1)), ?assertEqual(<<"2.0">>, emqx_rule_funcs:str_utf8(2.0)), ?assertEqual(<<"true">>, emqx_rule_funcs:str_utf8(true)), @@ -126,7 +130,9 @@ t_float(_) -> ?assertError(_, emqx_rule_funcs:float("a")). t_map(_) -> - ?assertEqual(#{ver => <<"1.0">>, name => "emqx"}, emqx_rule_funcs:map([{ver, <<"1.0">>}, {name, "emqx"}])), + ?assertEqual( + #{ver => <<"1.0">>, name => "emqx"}, emqx_rule_funcs:map([{ver, <<"1.0">>}, {name, "emqx"}]) + ), ?assertEqual(#{<<"a">> => 1}, emqx_rule_funcs:map(<<"{\"a\":1}">>)), ?assertError(_, emqx_rule_funcs:map(<<"a">>)), ?assertError(_, emqx_rule_funcs:map("a")), @@ -151,31 +157,42 @@ t_proc_dict_put_get_del(_) -> ?assertEqual(undefined, emqx_rule_funcs:proc_dict_get(<<"abc">>)). t_term_encode(_) -> - TestData = [<<"abc">>, #{a => 1}, #{<<"3">> => [1,2,4]}], - lists:foreach(fun(Data) -> - ?assertEqual(Data, + TestData = [<<"abc">>, #{a => 1}, #{<<"3">> => [1, 2, 4]}], + lists:foreach( + fun(Data) -> + ?assertEqual( + Data, emqx_rule_funcs:term_decode( - emqx_rule_funcs:term_encode(Data))) - end, TestData). + emqx_rule_funcs:term_encode(Data) + ) + ) + end, + TestData + ). t_hexstr2bin(_) -> - ?assertEqual(<<1,2>>, emqx_rule_funcs:hexstr2bin(<<"0102">>)), - ?assertEqual(<<17,33>>, emqx_rule_funcs:hexstr2bin(<<"1121">>)). + ?assertEqual(<<1, 2>>, emqx_rule_funcs:hexstr2bin(<<"0102">>)), + ?assertEqual(<<17, 33>>, emqx_rule_funcs:hexstr2bin(<<"1121">>)). t_bin2hexstr(_) -> - ?assertEqual(<<"0102">>, emqx_rule_funcs:bin2hexstr(<<1,2>>)), - ?assertEqual(<<"1121">>, emqx_rule_funcs:bin2hexstr(<<17,33>>)). + ?assertEqual(<<"0102">>, emqx_rule_funcs:bin2hexstr(<<1, 2>>)), + ?assertEqual(<<"1121">>, emqx_rule_funcs:bin2hexstr(<<17, 33>>)). t_hex_convert(_) -> ?PROPTEST(hex_convert). hex_convert() -> - ?FORALL(L, list(range(0, 255)), - begin - AbitraryBin = list_to_binary(L), - AbitraryBin == emqx_rule_funcs:hexstr2bin( - emqx_rule_funcs:bin2hexstr(AbitraryBin)) - end). + ?FORALL( + L, + list(range(0, 255)), + begin + AbitraryBin = list_to_binary(L), + AbitraryBin == + emqx_rule_funcs:hexstr2bin( + emqx_rule_funcs:bin2hexstr(AbitraryBin) + ) + end + ). t_is_null(_) -> ?assertEqual(true, emqx_rule_funcs:is_null(undefined)), @@ -184,50 +201,80 @@ t_is_null(_) -> ?assertEqual(false, emqx_rule_funcs:is_null(<<"a">>)). t_is_not_null(_) -> - [?assertEqual(emqx_rule_funcs:is_not_null(T), not emqx_rule_funcs:is_null(T)) - || T <- [undefined, a, <<"a">>, <<>>]]. + [ + ?assertEqual(emqx_rule_funcs:is_not_null(T), not emqx_rule_funcs:is_null(T)) + || T <- [undefined, a, <<"a">>, <<>>] + ]. t_is_str(_) -> - [?assertEqual(true, emqx_rule_funcs:is_str(T)) - || T <- [<<"a">>, <<>>, <<"abc">>]], - [?assertEqual(false, emqx_rule_funcs:is_str(T)) - || T <- ["a", a, 1]]. + [ + ?assertEqual(true, emqx_rule_funcs:is_str(T)) + || T <- [<<"a">>, <<>>, <<"abc">>] + ], + [ + ?assertEqual(false, emqx_rule_funcs:is_str(T)) + || T <- ["a", a, 1] + ]. t_is_bool(_) -> - [?assertEqual(true, emqx_rule_funcs:is_bool(T)) - || T <- [true, false]], - [?assertEqual(false, emqx_rule_funcs:is_bool(T)) - || T <- ["a", <<>>, a, 2]]. + [ + ?assertEqual(true, emqx_rule_funcs:is_bool(T)) + || T <- [true, false] + ], + [ + ?assertEqual(false, emqx_rule_funcs:is_bool(T)) + || T <- ["a", <<>>, a, 2] + ]. t_is_int(_) -> - [?assertEqual(true, emqx_rule_funcs:is_int(T)) - || T <- [1, 2, -1]], - [?assertEqual(false, emqx_rule_funcs:is_int(T)) - || T <- [1.1, "a", a]]. + [ + ?assertEqual(true, emqx_rule_funcs:is_int(T)) + || T <- [1, 2, -1] + ], + [ + ?assertEqual(false, emqx_rule_funcs:is_int(T)) + || T <- [1.1, "a", a] + ]. t_is_float(_) -> - [?assertEqual(true, emqx_rule_funcs:is_float(T)) - || T <- [1.1, 2.0, -1.2]], - [?assertEqual(false, emqx_rule_funcs:is_float(T)) - || T <- [1, "a", a, <<>>]]. + [ + ?assertEqual(true, emqx_rule_funcs:is_float(T)) + || T <- [1.1, 2.0, -1.2] + ], + [ + ?assertEqual(false, emqx_rule_funcs:is_float(T)) + || T <- [1, "a", a, <<>>] + ]. t_is_num(_) -> - [?assertEqual(true, emqx_rule_funcs:is_num(T)) - || T <- [1.1, 2.0, -1.2, 1]], - [?assertEqual(false, emqx_rule_funcs:is_num(T)) - || T <- ["a", a, <<>>]]. + [ + ?assertEqual(true, emqx_rule_funcs:is_num(T)) + || T <- [1.1, 2.0, -1.2, 1] + ], + [ + ?assertEqual(false, emqx_rule_funcs:is_num(T)) + || T <- ["a", a, <<>>] + ]. t_is_map(_) -> - [?assertEqual(true, emqx_rule_funcs:is_map(T)) - || T <- [#{}, #{a =>1}]], - [?assertEqual(false, emqx_rule_funcs:is_map(T)) - || T <- ["a", a, <<>>]]. + [ + ?assertEqual(true, emqx_rule_funcs:is_map(T)) + || T <- [#{}, #{a => 1}] + ], + [ + ?assertEqual(false, emqx_rule_funcs:is_map(T)) + || T <- ["a", a, <<>>] + ]. t_is_array(_) -> - [?assertEqual(true, emqx_rule_funcs:is_array(T)) - || T <- [[], [1,2]]], - [?assertEqual(false, emqx_rule_funcs:is_array(T)) - || T <- [<<>>, a]]. + [ + ?assertEqual(true, emqx_rule_funcs:is_array(T)) + || T <- [[], [1, 2]] + ], + [ + ?assertEqual(false, emqx_rule_funcs:is_array(T)) + || T <- [<<>>, a] + ]. %%------------------------------------------------------------------------------ %% Test cases for arith op @@ -237,22 +284,30 @@ t_arith_op(_) -> ?PROPTEST(prop_arith_op). prop_arith_op() -> - ?FORALL({X, Y}, {number(), number()}, - begin - (X + Y) == apply_func('+', [X, Y]) andalso + ?FORALL( + {X, Y}, + {number(), number()}, + begin + (X + Y) == apply_func('+', [X, Y]) andalso (X - Y) == apply_func('-', [X, Y]) andalso (X * Y) == apply_func('*', [X, Y]) andalso - (if Y =/= 0 -> + (if + Y =/= 0 -> (X / Y) == apply_func('/', [X, Y]); - true -> true - end) andalso - (case is_integer(X) - andalso is_pos_integer(Y) of - true -> - (X rem Y) == apply_func('mod', [X, Y]); - false -> true + true -> + true + end) andalso + (case + is_integer(X) andalso + is_pos_integer(Y) + of + true -> + (X rem Y) == apply_func('mod', [X, Y]); + false -> + true end) - end). + end + ). is_pos_integer(X) -> is_integer(X) andalso X > 0. @@ -266,29 +321,45 @@ t_math_fun(_) -> prop_math_fun() -> Excluded = [module_info, atanh, asin, acos], - MathFuns = [{F, A} || {F, A} <- math:module_info(exports), - not lists:member(F, Excluded), - erlang:function_exported(emqx_rule_funcs, F, A)], - ?FORALL({X, Y}, {pos_integer(), pos_integer()}, - begin - lists:foldl(fun({F, 1}, True) -> - True andalso comp_with_math(F, X); - ({F = fmod, 2}, True) -> - True andalso (if Y =/= 0 -> - comp_with_math(F, X, Y); - true -> true - end); - ({F, 2}, True) -> - True andalso comp_with_math(F, X, Y) - end, true, MathFuns) - end). + MathFuns = [ + {F, A} + || {F, A} <- math:module_info(exports), + not lists:member(F, Excluded), + erlang:function_exported(emqx_rule_funcs, F, A) + ], + ?FORALL( + {X, Y}, + {pos_integer(), pos_integer()}, + begin + lists:foldl( + fun + ({F, 1}, True) -> + True andalso comp_with_math(F, X); + ({F = fmod, 2}, True) -> + True andalso + (if + Y =/= 0 -> + comp_with_math(F, X, Y); + true -> + true + end); + ({F, 2}, True) -> + True andalso comp_with_math(F, X, Y) + end, + true, + MathFuns + ) + end + ). -comp_with_math(Fun, X) - when Fun =:= exp; - Fun =:= sinh; - Fun =:= cosh -> - if X < 710 -> math:Fun(X) == apply_func(Fun, [X]); - true -> true +comp_with_math(Fun, X) when + Fun =:= exp; + Fun =:= sinh; + Fun =:= cosh +-> + if + X < 710 -> math:Fun(X) == apply_func(Fun, [X]); + true -> true end; comp_with_math(F, X) -> math:F(X) == apply_func(F, [X]). @@ -304,15 +375,18 @@ t_bits_op(_) -> ?PROPTEST(prop_bits_op). prop_bits_op() -> - ?FORALL({X, Y}, {integer(), integer()}, - begin - (bnot X) == apply_func(bitnot, [X]) andalso + ?FORALL( + {X, Y}, + {integer(), integer()}, + begin + (bnot X) == apply_func(bitnot, [X]) andalso (X band Y) == apply_func(bitand, [X, Y]) andalso (X bor Y) == apply_func(bitor, [X, Y]) andalso (X bxor Y) == apply_func(bitxor, [X, Y]) andalso (X bsl Y) == apply_func(bitsl, [X, Y]) andalso (X bsr Y) == apply_func(bitsr, [X, Y]) - end). + end + ). %%------------------------------------------------------------------------------ %% Test cases for string @@ -346,77 +420,140 @@ t_trim(_) -> t_split_all(_) -> ?assertEqual([], apply_func(split, [<<>>, <<"/">>])), ?assertEqual([], apply_func(split, [<<"/">>, <<"/">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c">>], apply_func(split, [<<"/a/b/c">>, <<"/">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c">>], apply_func(split, [<<"/a/b//c/">>, <<"/">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c">>], apply_func(split, [<<"a,b,c">>, <<",">>])), - ?assertEqual([<<"a">>,<<" b ">>,<<"c">>], apply_func(split, [<<"a, b ,c">>, <<",">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c\n">>], apply_func(split, [<<"a,b,c\n">>, <<",">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c\r\n">>], apply_func(split, [<<"a,b,c\r\n">>, <<",">>])). + ?assertEqual([<<"a">>, <<"b">>, <<"c">>], apply_func(split, [<<"/a/b/c">>, <<"/">>])), + ?assertEqual([<<"a">>, <<"b">>, <<"c">>], apply_func(split, [<<"/a/b//c/">>, <<"/">>])), + ?assertEqual([<<"a">>, <<"b">>, <<"c">>], apply_func(split, [<<"a,b,c">>, <<",">>])), + ?assertEqual([<<"a">>, <<" b ">>, <<"c">>], apply_func(split, [<<"a, b ,c">>, <<",">>])), + ?assertEqual([<<"a">>, <<"b">>, <<"c\n">>], apply_func(split, [<<"a,b,c\n">>, <<",">>])), + ?assertEqual([<<"a">>, <<"b">>, <<"c\r\n">>], apply_func(split, [<<"a,b,c\r\n">>, <<",">>])). t_split_notrim_all(_) -> ?assertEqual([<<>>], apply_func(split, [<<>>, <<"/">>, <<"notrim">>])), - ?assertEqual([<<>>,<<>>], apply_func(split, [<<"/">>, <<"/">>, <<"notrim">>])), - ?assertEqual([<<>>, <<"a">>,<<"b">>,<<"c">>], apply_func(split, [<<"/a/b/c">>, <<"/">>, <<"notrim">>])), - ?assertEqual([<<>>, <<"a">>,<<"b">>, <<>>, <<"c">>, <<>>], apply_func(split, [<<"/a/b//c/">>, <<"/">>, <<"notrim">>])), - ?assertEqual([<<>>, <<"a">>,<<"b">>,<<"c\n">>], apply_func(split, [<<",a,b,c\n">>, <<",">>, <<"notrim">>])), - ?assertEqual([<<"a">>,<<" b">>,<<"c\r\n">>], apply_func(split, [<<"a, b,c\r\n">>, <<",">>, <<"notrim">>])), - ?assertEqual([<<"哈哈"/utf8>>,<<" 你好"/utf8>>,<<" 是的\r\n"/utf8>>], apply_func(split, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<",">>, <<"notrim">>])). + ?assertEqual([<<>>, <<>>], apply_func(split, [<<"/">>, <<"/">>, <<"notrim">>])), + ?assertEqual( + [<<>>, <<"a">>, <<"b">>, <<"c">>], apply_func(split, [<<"/a/b/c">>, <<"/">>, <<"notrim">>]) + ), + ?assertEqual( + [<<>>, <<"a">>, <<"b">>, <<>>, <<"c">>, <<>>], + apply_func(split, [<<"/a/b//c/">>, <<"/">>, <<"notrim">>]) + ), + ?assertEqual( + [<<>>, <<"a">>, <<"b">>, <<"c\n">>], + apply_func(split, [<<",a,b,c\n">>, <<",">>, <<"notrim">>]) + ), + ?assertEqual( + [<<"a">>, <<" b">>, <<"c\r\n">>], + apply_func(split, [<<"a, b,c\r\n">>, <<",">>, <<"notrim">>]) + ), + ?assertEqual( + [<<"哈哈"/utf8>>, <<" 你好"/utf8>>, <<" 是的\r\n"/utf8>>], + apply_func(split, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<",">>, <<"notrim">>]) + ). t_split_leading(_) -> ?assertEqual([], apply_func(split, [<<>>, <<"/">>, <<"leading">>])), ?assertEqual([], apply_func(split, [<<"/">>, <<"/">>, <<"leading">>])), ?assertEqual([<<"a/b/c">>], apply_func(split, [<<"/a/b/c">>, <<"/">>, <<"leading">>])), - ?assertEqual([<<"a">>,<<"b//c/">>], apply_func(split, [<<"a/b//c/">>, <<"/">>, <<"leading">>])), - ?assertEqual([<<"a">>,<<"b,c\n">>], apply_func(split, [<<"a,b,c\n">>, <<",">>, <<"leading">>])), - ?assertEqual([<<"a b">>,<<"c\r\n">>], apply_func(split, [<<"a b,c\r\n">>, <<",">>, <<"leading">>])), - ?assertEqual([<<"哈哈"/utf8>>,<<" 你好, 是的\r\n"/utf8>>], apply_func(split, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<",">>, <<"leading">>])). + ?assertEqual( + [<<"a">>, <<"b//c/">>], apply_func(split, [<<"a/b//c/">>, <<"/">>, <<"leading">>]) + ), + ?assertEqual( + [<<"a">>, <<"b,c\n">>], apply_func(split, [<<"a,b,c\n">>, <<",">>, <<"leading">>]) + ), + ?assertEqual( + [<<"a b">>, <<"c\r\n">>], apply_func(split, [<<"a b,c\r\n">>, <<",">>, <<"leading">>]) + ), + ?assertEqual( + [<<"哈哈"/utf8>>, <<" 你好, 是的\r\n"/utf8>>], + apply_func(split, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<",">>, <<"leading">>]) + ). t_split_leading_notrim(_) -> ?assertEqual([<<>>], apply_func(split, [<<>>, <<"/">>, <<"leading_notrim">>])), - ?assertEqual([<<>>,<<>>], apply_func(split, [<<"/">>, <<"/">>, <<"leading_notrim">>])), - ?assertEqual([<<>>, <<"a/b/c">>], apply_func(split, [<<"/a/b/c">>, <<"/">>, <<"leading_notrim">>])), - ?assertEqual([<<"a">>,<<"b//c/">>], apply_func(split, [<<"a/b//c/">>, <<"/">>, <<"leading_notrim">>])), - ?assertEqual([<<"a">>,<<"b,c\n">>], apply_func(split, [<<"a,b,c\n">>, <<",">>, <<"leading_notrim">>])), - ?assertEqual([<<"a b">>,<<"c\r\n">>], apply_func(split, [<<"a b,c\r\n">>, <<",">>, <<"leading_notrim">>])), - ?assertEqual([<<"哈哈"/utf8>>,<<" 你好, 是的\r\n"/utf8>>], apply_func(split, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<",">>, <<"leading_notrim">>])). + ?assertEqual([<<>>, <<>>], apply_func(split, [<<"/">>, <<"/">>, <<"leading_notrim">>])), + ?assertEqual( + [<<>>, <<"a/b/c">>], apply_func(split, [<<"/a/b/c">>, <<"/">>, <<"leading_notrim">>]) + ), + ?assertEqual( + [<<"a">>, <<"b//c/">>], apply_func(split, [<<"a/b//c/">>, <<"/">>, <<"leading_notrim">>]) + ), + ?assertEqual( + [<<"a">>, <<"b,c\n">>], apply_func(split, [<<"a,b,c\n">>, <<",">>, <<"leading_notrim">>]) + ), + ?assertEqual( + [<<"a b">>, <<"c\r\n">>], + apply_func(split, [<<"a b,c\r\n">>, <<",">>, <<"leading_notrim">>]) + ), + ?assertEqual( + [<<"哈哈"/utf8>>, <<" 你好, 是的\r\n"/utf8>>], + apply_func(split, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<",">>, <<"leading_notrim">>]) + ). t_split_trailing(_) -> ?assertEqual([], apply_func(split, [<<>>, <<"/">>, <<"trailing">>])), ?assertEqual([], apply_func(split, [<<"/">>, <<"/">>, <<"trailing">>])), ?assertEqual([<<"/a/b">>, <<"c">>], apply_func(split, [<<"/a/b/c">>, <<"/">>, <<"trailing">>])), ?assertEqual([<<"a/b//c">>], apply_func(split, [<<"a/b//c/">>, <<"/">>, <<"trailing">>])), - ?assertEqual([<<"a,b">>,<<"c\n">>], apply_func(split, [<<"a,b,c\n">>, <<",">>, <<"trailing">>])), - ?assertEqual([<<"a b">>,<<"c\r\n">>], apply_func(split, [<<"a b,c\r\n">>, <<",">>, <<"trailing">>])), - ?assertEqual([<<"哈哈, 你好"/utf8>>,<<" 是的\r\n"/utf8>>], apply_func(split, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<",">>, <<"trailing">>])). + ?assertEqual( + [<<"a,b">>, <<"c\n">>], apply_func(split, [<<"a,b,c\n">>, <<",">>, <<"trailing">>]) + ), + ?assertEqual( + [<<"a b">>, <<"c\r\n">>], apply_func(split, [<<"a b,c\r\n">>, <<",">>, <<"trailing">>]) + ), + ?assertEqual( + [<<"哈哈, 你好"/utf8>>, <<" 是的\r\n"/utf8>>], + apply_func(split, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<",">>, <<"trailing">>]) + ). t_split_trailing_notrim(_) -> ?assertEqual([<<>>], apply_func(split, [<<>>, <<"/">>, <<"trailing_notrim">>])), ?assertEqual([<<>>, <<>>], apply_func(split, [<<"/">>, <<"/">>, <<"trailing_notrim">>])), - ?assertEqual([<<"/a/b">>, <<"c">>], apply_func(split, [<<"/a/b/c">>, <<"/">>, <<"trailing_notrim">>])), - ?assertEqual([<<"a/b//c">>, <<>>], apply_func(split, [<<"a/b//c/">>, <<"/">>, <<"trailing_notrim">>])), - ?assertEqual([<<"a,b">>,<<"c\n">>], apply_func(split, [<<"a,b,c\n">>, <<",">>, <<"trailing_notrim">>])), - ?assertEqual([<<"a b">>,<<"c\r\n">>], apply_func(split, [<<"a b,c\r\n">>, <<",">>, <<"trailing_notrim">>])), - ?assertEqual([<<"哈哈, 你好"/utf8>>,<<" 是的\r\n"/utf8>>], apply_func(split, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<",">>, <<"trailing_notrim">>])). + ?assertEqual( + [<<"/a/b">>, <<"c">>], apply_func(split, [<<"/a/b/c">>, <<"/">>, <<"trailing_notrim">>]) + ), + ?assertEqual( + [<<"a/b//c">>, <<>>], apply_func(split, [<<"a/b//c/">>, <<"/">>, <<"trailing_notrim">>]) + ), + ?assertEqual( + [<<"a,b">>, <<"c\n">>], apply_func(split, [<<"a,b,c\n">>, <<",">>, <<"trailing_notrim">>]) + ), + ?assertEqual( + [<<"a b">>, <<"c\r\n">>], + apply_func(split, [<<"a b,c\r\n">>, <<",">>, <<"trailing_notrim">>]) + ), + ?assertEqual( + [<<"哈哈, 你好"/utf8>>, <<" 是的\r\n"/utf8>>], + apply_func(split, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<",">>, <<"trailing_notrim">>]) + ). t_tokens(_) -> ?assertEqual([], apply_func(tokens, [<<>>, <<"/">>])), ?assertEqual([], apply_func(tokens, [<<"/">>, <<"/">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c">>], apply_func(tokens, [<<"/a/b/c">>, <<"/">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c">>], apply_func(tokens, [<<"/a/b//c/">>, <<"/">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c">>], apply_func(tokens, [<<" /a/ b /c">>, <<" /">>])), - ?assertEqual([<<"a">>,<<"\nb">>,<<"c\n">>], apply_func(tokens, [<<"a ,\nb,c\n">>, <<", ">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c\r\n">>], apply_func(tokens, [<<"a ,b,c\r\n">>, <<", ">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c">>], apply_func(tokens, [<<"a,b, c\n">>, <<", ">>, <<"nocrlf">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c">>], apply_func(tokens, [<<"a,b,c\r\n">>, <<",">>, <<"nocrlf">>])), - ?assertEqual([<<"a">>,<<"b">>,<<"c">>], apply_func(tokens, [<<"a,b\r\n,c\n">>, <<",">>, <<"nocrlf">>])), + ?assertEqual([<<"a">>, <<"b">>, <<"c">>], apply_func(tokens, [<<"/a/b/c">>, <<"/">>])), + ?assertEqual([<<"a">>, <<"b">>, <<"c">>], apply_func(tokens, [<<"/a/b//c/">>, <<"/">>])), + ?assertEqual([<<"a">>, <<"b">>, <<"c">>], apply_func(tokens, [<<" /a/ b /c">>, <<" /">>])), + ?assertEqual([<<"a">>, <<"\nb">>, <<"c\n">>], apply_func(tokens, [<<"a ,\nb,c\n">>, <<", ">>])), + ?assertEqual([<<"a">>, <<"b">>, <<"c\r\n">>], apply_func(tokens, [<<"a ,b,c\r\n">>, <<", ">>])), + ?assertEqual( + [<<"a">>, <<"b">>, <<"c">>], apply_func(tokens, [<<"a,b, c\n">>, <<", ">>, <<"nocrlf">>]) + ), + ?assertEqual( + [<<"a">>, <<"b">>, <<"c">>], apply_func(tokens, [<<"a,b,c\r\n">>, <<",">>, <<"nocrlf">>]) + ), + ?assertEqual( + [<<"a">>, <<"b">>, <<"c">>], apply_func(tokens, [<<"a,b\r\n,c\n">>, <<",">>, <<"nocrlf">>]) + ), ?assertEqual([], apply_func(tokens, [<<"\r\n">>, <<",">>, <<"nocrlf">>])), ?assertEqual([], apply_func(tokens, [<<"\r\n">>, <<",">>, <<"nocrlf">>])), - ?assertEqual([<<"哈哈"/utf8>>,<<"你好"/utf8>>,<<"是的"/utf8>>], apply_func(tokens, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<", ">>, <<"nocrlf">>])). + ?assertEqual( + [<<"哈哈"/utf8>>, <<"你好"/utf8>>, <<"是的"/utf8>>], + apply_func(tokens, [<<"哈哈, 你好, 是的\r\n"/utf8>>, <<", ">>, <<"nocrlf">>]) + ). t_concat(_) -> ?assertEqual(<<"ab">>, apply_func(concat, [<<"a">>, <<"b">>])), ?assertEqual(<<"ab">>, apply_func('+', [<<"a">>, <<"b">>])), - ?assertEqual(<<"哈哈你好"/utf8>>, apply_func(concat, [<<"哈哈"/utf8>>,<<"你好"/utf8>>])), + ?assertEqual(<<"哈哈你好"/utf8>>, apply_func(concat, [<<"哈哈"/utf8>>, <<"你好"/utf8>>])), ?assertEqual(<<"abc">>, apply_func(concat, [apply_func(concat, [<<"a">>, <<"b">>]), <<"c">>])), ?assertEqual(<<"a">>, apply_func(concat, [<<"">>, <<"a">>])), ?assertEqual(<<"a">>, apply_func(concat, [<<"a">>, <<"">>])), @@ -424,8 +561,13 @@ t_concat(_) -> t_sprintf(_) -> ?assertEqual(<<"Hello Shawn!">>, apply_func(sprintf, [<<"Hello ~ts!">>, <<"Shawn">>])), - ?assertEqual(<<"Name: ABC, Count: 2">>, apply_func(sprintf, [<<"Name: ~ts, Count: ~p">>, <<"ABC">>, 2])), - ?assertEqual(<<"Name: ABC, Count: 2, Status: {ok,running}">>, apply_func(sprintf, [<<"Name: ~ts, Count: ~p, Status: ~p">>, <<"ABC">>, 2, {ok, running}])). + ?assertEqual( + <<"Name: ABC, Count: 2">>, apply_func(sprintf, [<<"Name: ~ts, Count: ~p">>, <<"ABC">>, 2]) + ), + ?assertEqual( + <<"Name: ABC, Count: 2, Status: {ok,running}">>, + apply_func(sprintf, [<<"Name: ~ts, Count: ~p, Status: ~p">>, <<"ABC">>, 2, {ok, running}]) + ). t_pad(_) -> ?assertEqual(<<"abc ">>, apply_func(pad, [<<"abc">>, 5])), @@ -449,8 +591,12 @@ t_replace(_) -> ?assertEqual(<<"ab-c--">>, apply_func(replace, [<<"ab c ">>, <<" ">>, <<"-">>])), ?assertEqual(<<"ab::c::::">>, apply_func(replace, [<<"ab c ">>, <<" ">>, <<"::">>])), ?assertEqual(<<"ab-c--">>, apply_func(replace, [<<"ab c ">>, <<" ">>, <<"-">>, <<"all">>])), - ?assertEqual(<<"ab-c ">>, apply_func(replace, [<<"ab c ">>, <<" ">>, <<"-">>, <<"leading">>])), - ?assertEqual(<<"ab c -">>, apply_func(replace, [<<"ab c ">>, <<" ">>, <<"-">>, <<"trailing">>])). + ?assertEqual( + <<"ab-c ">>, apply_func(replace, [<<"ab c ">>, <<" ">>, <<"-">>, <<"leading">>]) + ), + ?assertEqual( + <<"ab c -">>, apply_func(replace, [<<"ab c ">>, <<" ">>, <<"-">>, <<"trailing">>]) + ). t_ascii(_) -> ?assertEqual(97, apply_func(ascii, [<<"a">>])), @@ -483,7 +629,7 @@ t_regex_replace(_) -> ?assertEqual(<<"aebed">>, apply_func(regex_replace, [<<"accbcd">>, <<"c+">>, <<"e">>])), ?assertEqual(<<"a[cc]b[c]d">>, apply_func(regex_replace, [<<"accbcd">>, <<"c+">>, <<"[&]">>])). -ascii_string() -> list(range(0,127)). +ascii_string() -> list(range(0, 127)). bin(S) -> iolist_to_binary(S). @@ -492,34 +638,34 @@ bin(S) -> iolist_to_binary(S). %%------------------------------------------------------------------------------ t_nth(_) -> - ?assertEqual(2, apply_func(nth, [2, [1,2,3,4]])), - ?assertEqual(4, apply_func(nth, [4, [1,2,3,4]])). + ?assertEqual(2, apply_func(nth, [2, [1, 2, 3, 4]])), + ?assertEqual(4, apply_func(nth, [4, [1, 2, 3, 4]])). t_length(_) -> - ?assertEqual(4, apply_func(length, [[1,2,3,4]])), + ?assertEqual(4, apply_func(length, [[1, 2, 3, 4]])), ?assertEqual(0, apply_func(length, [[]])). t_slice(_) -> - ?assertEqual([1,2,3,4], apply_func(sublist, [4, [1,2,3,4]])), - ?assertEqual([1,2], apply_func(sublist, [2, [1,2,3,4]])), - ?assertEqual([4], apply_func(sublist, [4, 1, [1,2,3,4]])), - ?assertEqual([4], apply_func(sublist, [4, 2, [1,2,3,4]])), - ?assertEqual([], apply_func(sublist, [5, 2, [1,2,3,4]])), - ?assertEqual([2,3], apply_func(sublist, [2, 2, [1,2,3,4]])), - ?assertEqual([1], apply_func(sublist, [1, 1, [1,2,3,4]])). + ?assertEqual([1, 2, 3, 4], apply_func(sublist, [4, [1, 2, 3, 4]])), + ?assertEqual([1, 2], apply_func(sublist, [2, [1, 2, 3, 4]])), + ?assertEqual([4], apply_func(sublist, [4, 1, [1, 2, 3, 4]])), + ?assertEqual([4], apply_func(sublist, [4, 2, [1, 2, 3, 4]])), + ?assertEqual([], apply_func(sublist, [5, 2, [1, 2, 3, 4]])), + ?assertEqual([2, 3], apply_func(sublist, [2, 2, [1, 2, 3, 4]])), + ?assertEqual([1], apply_func(sublist, [1, 1, [1, 2, 3, 4]])). t_first_last(_) -> - ?assertEqual(1, apply_func(first, [[1,2,3,4]])), - ?assertEqual(4, apply_func(last, [[1,2,3,4]])). + ?assertEqual(1, apply_func(first, [[1, 2, 3, 4]])), + ?assertEqual(4, apply_func(last, [[1, 2, 3, 4]])). t_contains(_) -> - ?assertEqual(true, apply_func(contains, [1, [1,2,3,4]])), - ?assertEqual(true, apply_func(contains, [3, [1,2,3,4]])), - ?assertEqual(true, apply_func(contains, [<<"a">>, [<<>>,<<"ab">>,3,<<"a">>]])), - ?assertEqual(true, apply_func(contains, [#{a=>b}, [#{a=>1}, #{a=>b}]])), - ?assertEqual(false, apply_func(contains, [#{a=>b}, [#{a=>1}]])), + ?assertEqual(true, apply_func(contains, [1, [1, 2, 3, 4]])), + ?assertEqual(true, apply_func(contains, [3, [1, 2, 3, 4]])), + ?assertEqual(true, apply_func(contains, [<<"a">>, [<<>>, <<"ab">>, 3, <<"a">>]])), + ?assertEqual(true, apply_func(contains, [#{a => b}, [#{a => 1}, #{a => b}]])), + ?assertEqual(false, apply_func(contains, [#{a => b}, [#{a => 1}]])), ?assertEqual(false, apply_func(contains, [3, [1, 2]])), - ?assertEqual(false, apply_func(contains, [<<"c">>, [<<>>,<<"ab">>,3,<<"a">>]])). + ?assertEqual(false, apply_func(contains, [<<"c">>, [<<>>, <<"ab">>, 3, <<"a">>]])). t_map_get(_) -> ?assertEqual(1, apply_func(map_get, [<<"a">>, #{a => 1}])), @@ -532,7 +678,9 @@ t_map_put(_) -> ?assertEqual(#{<<"a">> => 1}, apply_func(map_put, [<<"a">>, 1, #{}])), ?assertEqual(#{a => 2}, apply_func(map_put, [<<"a">>, 2, #{a => 1}])), ?assertEqual(#{<<"a">> => #{<<"b">> => 1}}, apply_func(map_put, [<<"a.b">>, 1, #{}])), - ?assertEqual(#{a => #{b => 1, <<"c">> => 1}}, apply_func(map_put, [<<"a.c">>, 1, #{a => #{b => 1}}])), + ?assertEqual( + #{a => #{b => 1, <<"c">> => 1}}, apply_func(map_put, [<<"a.c">>, 1, #{a => #{b => 1}}]) + ), ?assertEqual(#{a => 2}, apply_func(map_put, [<<"a">>, 2, #{a => 1}])). t_mget(_) -> @@ -579,20 +727,26 @@ t_subbits2_1(_) -> ?assertEqual(127, apply_func(subbits, [<<255:8>>, 2, 7])), ?assertEqual(127, apply_func(subbits, [<<255:8>>, 2, 8])). t_subbits2_integer(_) -> - ?assertEqual(456, apply_func(subbits, [<<456:32/integer>>, 1, 32, <<"integer">>, <<"signed">>, <<"big">>])), - ?assertEqual(-456, apply_func(subbits, [<<-456:32/integer>>, 1, 32, <<"integer">>, <<"signed">>, <<"big">>])). + ?assertEqual( + 456, + apply_func(subbits, [<<456:32/integer>>, 1, 32, <<"integer">>, <<"signed">>, <<"big">>]) + ), + ?assertEqual( + -456, + apply_func(subbits, [<<-456:32/integer>>, 1, 32, <<"integer">>, <<"signed">>, <<"big">>]) + ). t_subbits2_float(_) -> R = apply_func(subbits, [<<5.3:64/float>>, 1, 64, <<"float">>, <<"unsigned">>, <<"big">>]), RL = (5.3 - R), ct:pal(";;;;~p", [R]), - ?assert( (RL >= 0 andalso RL < 0.0001) orelse (RL =< 0 andalso RL > -0.0001)), + ?assert((RL >= 0 andalso RL < 0.0001) orelse (RL =< 0 andalso RL > -0.0001)), R2 = apply_func(subbits, [<<-5.3:64/float>>, 1, 64, <<"float">>, <<"signed">>, <<"big">>]), RL2 = (5.3 + R2), ct:pal(";;;;~p", [R2]), - ?assert( (RL2 >= 0 andalso RL2 < 0.0001) orelse (RL2 =< 0 andalso RL2 > -0.0001)). + ?assert((RL2 >= 0 andalso RL2 < 0.0001) orelse (RL2 =< 0 andalso RL2 > -0.0001)). %%------------------------------------------------------------------------------ %% Test cases for Hash funcs @@ -602,12 +756,15 @@ t_hash_funcs(_) -> ?PROPTEST(prop_hash_fun). prop_hash_fun() -> - ?FORALL(S, binary(), - begin - (32 == byte_size(apply_func(md5, [S]))) andalso + ?FORALL( + S, + binary(), + begin + (32 == byte_size(apply_func(md5, [S]))) andalso (40 == byte_size(apply_func(sha, [S]))) andalso (64 == byte_size(apply_func(sha256, [S]))) - end). + end + ). %%------------------------------------------------------------------------------ %% Test cases for base64 @@ -617,72 +774,131 @@ t_base64_encode(_) -> ?PROPTEST(prop_base64_encode). prop_base64_encode() -> - ?FORALL(S, list(range(0, 255)), - begin - Bin = iolist_to_binary(S), - Bin == base64:decode(apply_func(base64_encode, [Bin])) - end). + ?FORALL( + S, + list(range(0, 255)), + begin + Bin = iolist_to_binary(S), + Bin == base64:decode(apply_func(base64_encode, [Bin])) + end + ). %%-------------------------------------------------------------------- %% Date functions %%-------------------------------------------------------------------- t_now_rfc3339(_) -> - ?assert(is_integer( - calendar:rfc3339_to_system_time( - binary_to_list(apply_func(now_rfc3339, []))))). + ?assert( + is_integer( + calendar:rfc3339_to_system_time( + binary_to_list(apply_func(now_rfc3339, [])) + ) + ) + ). t_now_rfc3339_1(_) -> - [?assert(is_integer( - calendar:rfc3339_to_system_time( - binary_to_list(apply_func(now_rfc3339, [atom_to_binary(Unit, utf8)])), - [{unit, Unit}]))) - || Unit <- [second,millisecond,microsecond,nanosecond]]. + [ + ?assert( + is_integer( + calendar:rfc3339_to_system_time( + binary_to_list(apply_func(now_rfc3339, [atom_to_binary(Unit, utf8)])), + [{unit, Unit}] + ) + ) + ) + || Unit <- [second, millisecond, microsecond, nanosecond] + ]. t_now_timestamp(_) -> ?assert(is_integer(apply_func(now_timestamp, []))). t_now_timestamp_1(_) -> - [?assert(is_integer( - apply_func(now_timestamp, [atom_to_binary(Unit, utf8)]))) - || Unit <- [second,millisecond,microsecond,nanosecond]]. + [ + ?assert( + is_integer( + apply_func(now_timestamp, [atom_to_binary(Unit, utf8)]) + ) + ) + || Unit <- [second, millisecond, microsecond, nanosecond] + ]. t_unix_ts_to_rfc3339(_) -> - [begin - BUnit = atom_to_binary(Unit, utf8), - Epoch = apply_func(now_timestamp, [BUnit]), - DateTime = apply_func(unix_ts_to_rfc3339, [Epoch, BUnit]), - ?assertEqual(Epoch, - calendar:rfc3339_to_system_time(binary_to_list(DateTime), [{unit, Unit}])) - end || Unit <- [second,millisecond,microsecond,nanosecond]]. + [ + begin + BUnit = atom_to_binary(Unit, utf8), + Epoch = apply_func(now_timestamp, [BUnit]), + DateTime = apply_func(unix_ts_to_rfc3339, [Epoch, BUnit]), + ?assertEqual( + Epoch, + calendar:rfc3339_to_system_time(binary_to_list(DateTime), [{unit, Unit}]) + ) + end + || Unit <- [second, millisecond, microsecond, nanosecond] + ]. t_rfc3339_to_unix_ts(_) -> - [begin - BUnit = atom_to_binary(Unit, utf8), - Epoch = apply_func(now_timestamp, [BUnit]), - DateTime = apply_func(unix_ts_to_rfc3339, [Epoch, BUnit]), - ?assertEqual(Epoch, emqx_rule_funcs:rfc3339_to_unix_ts(DateTime, BUnit)) - end || Unit <- [second,millisecond,microsecond,nanosecond]]. + [ + begin + BUnit = atom_to_binary(Unit, utf8), + Epoch = apply_func(now_timestamp, [BUnit]), + DateTime = apply_func(unix_ts_to_rfc3339, [Epoch, BUnit]), + ?assertEqual(Epoch, emqx_rule_funcs:rfc3339_to_unix_ts(DateTime, BUnit)) + end + || Unit <- [second, millisecond, microsecond, nanosecond] + ]. t_format_date_funcs(_) -> ?PROPTEST(prop_format_date_fun). prop_format_date_fun() -> Args1 = [<<"second">>, <<"+07:00">>, <<"%m--%d--%y---%H:%M:%S%Z">>], - ?FORALL(S, erlang:system_time(second), - S == apply_func(date_to_unix_ts, - Args1 ++ [apply_func(format_date, - Args1 ++ [S])])), + ?FORALL( + S, + erlang:system_time(second), + S == + apply_func( + date_to_unix_ts, + Args1 ++ + [ + apply_func( + format_date, + Args1 ++ [S] + ) + ] + ) + ), Args2 = [<<"millisecond">>, <<"+04:00">>, <<"--%m--%d--%y---%H:%M:%S%Z">>], - ?FORALL(S, erlang:system_time(millisecond), - S == apply_func(date_to_unix_ts, - Args2 ++ [apply_func(format_date, - Args2 ++ [S])])), + ?FORALL( + S, + erlang:system_time(millisecond), + S == + apply_func( + date_to_unix_ts, + Args2 ++ + [ + apply_func( + format_date, + Args2 ++ [S] + ) + ] + ) + ), Args = [<<"second">>, <<"+08:00">>, <<"%y-%m-%d-%H:%M:%S%Z">>], - ?FORALL(S, erlang:system_time(second), - S == apply_func(date_to_unix_ts, - Args ++ [apply_func(format_date, - Args ++ [S])])). + ?FORALL( + S, + erlang:system_time(second), + S == + apply_func( + date_to_unix_ts, + Args ++ + [ + apply_func( + format_date, + Args ++ [S] + ) + ] + ) + ). %%------------------------------------------------------------------------------ %% Utility functions @@ -699,8 +915,10 @@ apply_func(Name, Args, Msg) -> apply_func(Name, Args, emqx_message:to_map(Msg)). message() -> - emqx_message:set_flags(#{dup => false}, - emqx_message:make(<<"clientid">>, 1, <<"topic/#">>, <<"payload">>)). + emqx_message:set_flags( + #{dup => false}, + emqx_message:make(<<"clientid">>, 1, <<"topic/#">>, <<"payload">>) + ). % t_contains_topic(_) -> % error('TODO'). @@ -831,13 +1049,15 @@ message() -> % t_json_decode(_) -> % error('TODO'). - %%------------------------------------------------------------------------------ %% CT functions %%------------------------------------------------------------------------------ all() -> - IsTestCase = fun("t_" ++ _) -> true; (_) -> false end, + IsTestCase = fun + ("t_" ++ _) -> true; + (_) -> false + end, [F || {F, _A} <- module_info(exports), IsTestCase(atom_to_list(F))]. suite() -> diff --git a/apps/emqx_rule_engine/test/emqx_rule_maps_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_maps_SUITE.erl index a25145acd..965173b08 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_maps_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_maps_SUITE.erl @@ -22,20 +22,27 @@ -compile(export_all). -compile(nowarn_export_all). --import(emqx_rule_maps, - [ nested_get/2 - , nested_get/3 - , nested_put/3 - , atom_key_map/1 - ]). +-import( + emqx_rule_maps, + [ + nested_get/2, + nested_get/3, + nested_put/3, + atom_key_map/1 + ] +). --define(path(Path), {path, - [case K of - {ic, Key} -> {index, {const, Key}}; - {iv, Key} -> {index, {var, Key}}; - {i, Path1} -> {index, Path1}; - _ -> {key, K} - end || K <- Path]}). +-define(path(Path), + {path, [ + case K of + {ic, Key} -> {index, {const, Key}}; + {iv, Key} -> {index, {var, Key}}; + {i, Path1} -> {index, Path1}; + _ -> {key, K} + end + || K <- Path + ]} +). -define(PROPTEST(Prop), true = proper:quickcheck(Prop)). @@ -44,8 +51,8 @@ t_nested_put_map(_) -> ?assertEqual(#{a => a}, nested_put(?path([a]), a, #{})), ?assertEqual(#{a => undefined}, nested_put(?path([a]), undefined, #{})), ?assertEqual(#{a => 1}, nested_put(?path([a]), 1, not_map)), - ?assertEqual(#{a => #{b => b}}, nested_put(?path([a,b]), b, #{})), - ?assertEqual(#{a => #{b => #{c => c}}}, nested_put(?path([a,b,c]), c, #{})), + ?assertEqual(#{a => #{b => b}}, nested_put(?path([a, b]), b, #{})), + ?assertEqual(#{a => #{b => #{c => c}}}, nested_put(?path([a, b, c]), c, #{})), ?assertEqual(#{<<"k">> => v1}, nested_put(?path([k]), v1, #{<<"k">> => v0})), ?assertEqual(#{k => v1}, nested_put(?path([k]), v1, #{k => v0})), ?assertEqual(#{<<"k">> => v1, a => b}, nested_put(?path([k]), v1, #{<<"k">> => v0, a => b})), @@ -53,122 +60,194 @@ t_nested_put_map(_) -> ?assertEqual(#{k => v1}, nested_put(?path([k]), v1, #{k => v0})), ?assertEqual(#{k => v1, a => b}, nested_put(?path([k]), v1, #{k => v0, a => b})), ?assertEqual(#{<<"k">> => v1, a => b}, nested_put(?path([k]), v1, #{<<"k">> => v0, a => b})), - ?assertEqual(#{<<"k">> => #{<<"t">> => v1}}, nested_put(?path([k,t]), v1, #{<<"k">> => #{<<"t">> => v0}})), - ?assertEqual(#{<<"k">> => #{t => v1}}, nested_put(?path([k,t]), v1, #{<<"k">> => #{t => v0}})), - ?assertEqual(#{k => #{<<"t">> => #{a => v1}}}, nested_put(?path([k,t,a]), v1, #{k => #{<<"t">> => v0}})), - ?assertEqual(#{k => #{<<"t">> => #{<<"a">> => v1}}}, nested_put(?path([k,t,<<"a">>]), v1, #{k => #{<<"t">> => v0}})). + ?assertEqual( + #{<<"k">> => #{<<"t">> => v1}}, + nested_put(?path([k, t]), v1, #{<<"k">> => #{<<"t">> => v0}}) + ), + ?assertEqual(#{<<"k">> => #{t => v1}}, nested_put(?path([k, t]), v1, #{<<"k">> => #{t => v0}})), + ?assertEqual( + #{k => #{<<"t">> => #{a => v1}}}, nested_put(?path([k, t, a]), v1, #{k => #{<<"t">> => v0}}) + ), + ?assertEqual( + #{k => #{<<"t">> => #{<<"a">> => v1}}}, + nested_put(?path([k, t, <<"a">>]), v1, #{k => #{<<"t">> => v0}}) + ). t_nested_put_index(_) -> - ?assertEqual([1,a,3], nested_put(?path([{ic,2}]), a, [1,2,3])), - ?assertEqual([1,2,3], nested_put(?path([{ic,0}]), a, [1,2,3])), - ?assertEqual([1,2,3], nested_put(?path([{ic,4}]), a, [1,2,3])), - ?assertEqual([1,[a],3], nested_put(?path([{ic,2}, {ic,1}]), a, [1,[2],3])), - ?assertEqual([1,[[a]],3], nested_put(?path([{ic,2}, {ic,1}, {ic,1}]), a, [1,[[2]],3])), - ?assertEqual([1,[[2]],3], nested_put(?path([{ic,2}, {ic,1}, {ic,2}]), a, [1,[[2]],3])), - ?assertEqual([1,[a],1], nested_put(?path([{ic,2}, {i,?path([{ic,3}])}]), a, [1,[2],1])), + ?assertEqual([1, a, 3], nested_put(?path([{ic, 2}]), a, [1, 2, 3])), + ?assertEqual([1, 2, 3], nested_put(?path([{ic, 0}]), a, [1, 2, 3])), + ?assertEqual([1, 2, 3], nested_put(?path([{ic, 4}]), a, [1, 2, 3])), + ?assertEqual([1, [a], 3], nested_put(?path([{ic, 2}, {ic, 1}]), a, [1, [2], 3])), + ?assertEqual([1, [[a]], 3], nested_put(?path([{ic, 2}, {ic, 1}, {ic, 1}]), a, [1, [[2]], 3])), + ?assertEqual([1, [[2]], 3], nested_put(?path([{ic, 2}, {ic, 1}, {ic, 2}]), a, [1, [[2]], 3])), + ?assertEqual([1, [a], 1], nested_put(?path([{ic, 2}, {i, ?path([{ic, 3}])}]), a, [1, [2], 1])), %% nested_put to the first or tail of a list: - ?assertEqual([a], nested_put(?path([{ic,head}]), a, not_list)), - ?assertEqual([a], nested_put(?path([{ic,head}]), a, [])), - ?assertEqual([a,1,2,3], nested_put(?path([{ic,head}]), a, [1,2,3])), - ?assertEqual([a], nested_put(?path([{ic,tail}]), a, not_list)), - ?assertEqual([a], nested_put(?path([{ic,tail}]), a, [])), - ?assertEqual([1,2,3,a], nested_put(?path([{ic,tail}]), a, [1,2,3])). + ?assertEqual([a], nested_put(?path([{ic, head}]), a, not_list)), + ?assertEqual([a], nested_put(?path([{ic, head}]), a, [])), + ?assertEqual([a, 1, 2, 3], nested_put(?path([{ic, head}]), a, [1, 2, 3])), + ?assertEqual([a], nested_put(?path([{ic, tail}]), a, not_list)), + ?assertEqual([a], nested_put(?path([{ic, tail}]), a, [])), + ?assertEqual([1, 2, 3, a], nested_put(?path([{ic, tail}]), a, [1, 2, 3])). t_nested_put_negative_index(_) -> - ?assertEqual([1,2,a], nested_put(?path([{ic,-1}]), a, [1,2,3])), - ?assertEqual([1,a,3], nested_put(?path([{ic,-2}]), a, [1,2,3])), - ?assertEqual([a,2,3], nested_put(?path([{ic,-3}]), a, [1,2,3])), - ?assertEqual([1,2,3], nested_put(?path([{ic,-4}]), a, [1,2,3])). + ?assertEqual([1, 2, a], nested_put(?path([{ic, -1}]), a, [1, 2, 3])), + ?assertEqual([1, a, 3], nested_put(?path([{ic, -2}]), a, [1, 2, 3])), + ?assertEqual([a, 2, 3], nested_put(?path([{ic, -3}]), a, [1, 2, 3])), + ?assertEqual([1, 2, 3], nested_put(?path([{ic, -4}]), a, [1, 2, 3])). t_nested_put_mix_map_index(_) -> - ?assertEqual(#{a => [a]}, nested_put(?path([a, {ic,2}]), a, #{})), - ?assertEqual(#{a => [#{b => 0}]}, nested_put(?path([a, {ic,2}, b]), 0, #{})), - ?assertEqual(#{a => [1,a,3]}, nested_put(?path([a, {ic,2}]), a, #{a => [1,2,3]})), - ?assertEqual([1,#{a => c},3], nested_put(?path([{ic,2}, a]), c, [1,#{a => b},3])), - ?assertEqual([1,#{a => [c]},3], nested_put(?path([{ic,2}, a, {ic, 1}]), c, [1,#{a => [b]},3])), - ?assertEqual(#{a => [1,a,3], b => 2}, nested_put(?path([a, {iv,b}]), a, #{a => [1,2,3], b => 2})), - ?assertEqual(#{a => [1,2,3], b => 2}, nested_put(?path([a, {iv,c}]), a, #{a => [1,2,3], b => 2})), - ?assertEqual(#{a => [#{c => a},1,2,3]}, nested_put(?path([a, {ic,head}, c]), a, #{a => [1,2,3]})). + ?assertEqual(#{a => [a]}, nested_put(?path([a, {ic, 2}]), a, #{})), + ?assertEqual(#{a => [#{b => 0}]}, nested_put(?path([a, {ic, 2}, b]), 0, #{})), + ?assertEqual(#{a => [1, a, 3]}, nested_put(?path([a, {ic, 2}]), a, #{a => [1, 2, 3]})), + ?assertEqual([1, #{a => c}, 3], nested_put(?path([{ic, 2}, a]), c, [1, #{a => b}, 3])), + ?assertEqual( + [1, #{a => [c]}, 3], nested_put(?path([{ic, 2}, a, {ic, 1}]), c, [1, #{a => [b]}, 3]) + ), + ?assertEqual( + #{a => [1, a, 3], b => 2}, nested_put(?path([a, {iv, b}]), a, #{a => [1, 2, 3], b => 2}) + ), + ?assertEqual( + #{a => [1, 2, 3], b => 2}, nested_put(?path([a, {iv, c}]), a, #{a => [1, 2, 3], b => 2}) + ), + ?assertEqual( + #{a => [#{c => a}, 1, 2, 3]}, nested_put(?path([a, {ic, head}, c]), a, #{a => [1, 2, 3]}) + ). t_nested_get_map(_) -> ?assertEqual(undefined, nested_get(?path([a]), not_map)), ?assertEqual(#{a => 1}, nested_get(?path([]), #{a => 1})), ?assertEqual(#{b => c}, nested_get(?path([a]), #{a => #{b => c}})), - ?assertEqual(undefined, nested_get(?path([a,b,c]), not_map)), - ?assertEqual(undefined, nested_get(?path([a,b,c]), #{})), - ?assertEqual(undefined, nested_get(?path([a,b,c]), #{a => #{}})), - ?assertEqual(undefined, nested_get(?path([a,b,c]), #{a => #{b => #{}}})), - ?assertEqual(v1, nested_get(?path([p,x]), #{p => #{x => v1}})), - ?assertEqual(v1, nested_get(?path([<<"p">>,<<"x">>]), #{p => #{x => v1}})), - ?assertEqual(c, nested_get(?path([a,b,c]), #{a => #{b => #{c => c}}})). + ?assertEqual(undefined, nested_get(?path([a, b, c]), not_map)), + ?assertEqual(undefined, nested_get(?path([a, b, c]), #{})), + ?assertEqual(undefined, nested_get(?path([a, b, c]), #{a => #{}})), + ?assertEqual(undefined, nested_get(?path([a, b, c]), #{a => #{b => #{}}})), + ?assertEqual(v1, nested_get(?path([p, x]), #{p => #{x => v1}})), + ?assertEqual(v1, nested_get(?path([<<"p">>, <<"x">>]), #{p => #{x => v1}})), + ?assertEqual(c, nested_get(?path([a, b, c]), #{a => #{b => #{c => c}}})). t_nested_get_map_1(_) -> ?assertEqual(1, nested_get(?path([a]), <<"{\"a\": 1}">>)), ?assertEqual(<<"{\"b\": 1}">>, nested_get(?path([a]), #{a => <<"{\"b\": 1}">>})), - ?assertEqual(1, nested_get(?path([a,b]), #{a => <<"{\"b\": 1}">>})). + ?assertEqual(1, nested_get(?path([a, b]), #{a => <<"{\"b\": 1}">>})). t_nested_get_index(_) -> %% single index get - ?assertEqual(1, nested_get(?path([{ic,1}]), [1,2,3])), - ?assertEqual(2, nested_get(?path([{ic,2}]), [1,2,3])), - ?assertEqual(3, nested_get(?path([{ic,3}]), [1,2,3])), - ?assertEqual(undefined, nested_get(?path([{ic,0}]), [1,2,3])), - ?assertEqual("not_found", nested_get(?path([{ic,0}]), [1,2,3], "not_found")), - ?assertEqual(undefined, nested_get(?path([{ic,4}]), [1,2,3])), - ?assertEqual("not_found", nested_get(?path([{ic,4}]), [1,2,3], "not_found")), + ?assertEqual(1, nested_get(?path([{ic, 1}]), [1, 2, 3])), + ?assertEqual(2, nested_get(?path([{ic, 2}]), [1, 2, 3])), + ?assertEqual(3, nested_get(?path([{ic, 3}]), [1, 2, 3])), + ?assertEqual(undefined, nested_get(?path([{ic, 0}]), [1, 2, 3])), + ?assertEqual("not_found", nested_get(?path([{ic, 0}]), [1, 2, 3], "not_found")), + ?assertEqual(undefined, nested_get(?path([{ic, 4}]), [1, 2, 3])), + ?assertEqual("not_found", nested_get(?path([{ic, 4}]), [1, 2, 3], "not_found")), %% multiple index get - ?assertEqual(c, nested_get(?path([{ic,2}, {ic,3}]), [1,[a,b,c],3])), - ?assertEqual("I", nested_get(?path([{ic,2}, {ic,3}, {ic,1}]), [1,[a,b,["I","II","III"]],3])), - ?assertEqual(undefined, nested_get(?path([{ic,2}, {ic,1}, {ic,1}]), [1,[a,b,["I","II","III"]],3])), - ?assertEqual(default, nested_get(?path([{ic,2}, {ic,1}, {ic,1}]), [1,[a,b,["I","II","III"]],3], default)). + ?assertEqual(c, nested_get(?path([{ic, 2}, {ic, 3}]), [1, [a, b, c], 3])), + ?assertEqual( + "I", nested_get(?path([{ic, 2}, {ic, 3}, {ic, 1}]), [1, [a, b, ["I", "II", "III"]], 3]) + ), + ?assertEqual( + undefined, + nested_get(?path([{ic, 2}, {ic, 1}, {ic, 1}]), [1, [a, b, ["I", "II", "III"]], 3]) + ), + ?assertEqual( + default, + nested_get(?path([{ic, 2}, {ic, 1}, {ic, 1}]), [1, [a, b, ["I", "II", "III"]], 3], default) + ). t_nested_get_negative_index(_) -> - ?assertEqual(3, nested_get(?path([{ic,-1}]), [1,2,3])), - ?assertEqual(2, nested_get(?path([{ic,-2}]), [1,2,3])), - ?assertEqual(1, nested_get(?path([{ic,-3}]), [1,2,3])), - ?assertEqual(undefined, nested_get(?path([{ic,-4}]), [1,2,3])). + ?assertEqual(3, nested_get(?path([{ic, -1}]), [1, 2, 3])), + ?assertEqual(2, nested_get(?path([{ic, -2}]), [1, 2, 3])), + ?assertEqual(1, nested_get(?path([{ic, -3}]), [1, 2, 3])), + ?assertEqual(undefined, nested_get(?path([{ic, -4}]), [1, 2, 3])). t_nested_get_mix_map_index(_) -> %% index const - ?assertEqual(1, nested_get(?path([a, {ic,1}]), #{a => [1,2,3]})), - ?assertEqual(2, nested_get(?path([{ic,2}, a]), [1,#{a => 2},3])), - ?assertEqual(undefined, nested_get(?path([a, {ic,0}]), #{a => [1,2,3]})), - ?assertEqual("not_found", nested_get(?path([a, {ic,0}]), #{a => [1,2,3]}, "not_found")), - ?assertEqual("not_found", nested_get(?path([b, {ic,1}]), #{a => [1,2,3]}, "not_found")), - ?assertEqual(undefined, nested_get(?path([{ic,4}, a]), [1,2,3,4])), - ?assertEqual("not_found", nested_get(?path([{ic,4}, a]), [1,2,3,4], "not_found")), - ?assertEqual(c, nested_get(?path([a, {ic,2}, {ic,3}]), #{a => [1,[a,b,c],3]})), - ?assertEqual("I", nested_get(?path([{ic,2}, c, {ic,1}]), [1,#{a => a, b => b, c => ["I","II","III"]},3])), - ?assertEqual("I", nested_get(?path([{ic,2}, c, d]), [1,#{a => a, b => b, c => #{d => "I"}},3])), - ?assertEqual(undefined, nested_get(?path([{ic,2}, c, e]), [1,#{a => a, b => b, c => #{d => "I"}},3])), - ?assertEqual(default, nested_get(?path([{ic,2}, c, e]), [1,#{a => a, b => b, c => #{d => "I"}},3], default)), + ?assertEqual(1, nested_get(?path([a, {ic, 1}]), #{a => [1, 2, 3]})), + ?assertEqual(2, nested_get(?path([{ic, 2}, a]), [1, #{a => 2}, 3])), + ?assertEqual(undefined, nested_get(?path([a, {ic, 0}]), #{a => [1, 2, 3]})), + ?assertEqual("not_found", nested_get(?path([a, {ic, 0}]), #{a => [1, 2, 3]}, "not_found")), + ?assertEqual("not_found", nested_get(?path([b, {ic, 1}]), #{a => [1, 2, 3]}, "not_found")), + ?assertEqual(undefined, nested_get(?path([{ic, 4}, a]), [1, 2, 3, 4])), + ?assertEqual("not_found", nested_get(?path([{ic, 4}, a]), [1, 2, 3, 4], "not_found")), + ?assertEqual(c, nested_get(?path([a, {ic, 2}, {ic, 3}]), #{a => [1, [a, b, c], 3]})), + ?assertEqual( + "I", + nested_get(?path([{ic, 2}, c, {ic, 1}]), [1, #{a => a, b => b, c => ["I", "II", "III"]}, 3]) + ), + ?assertEqual( + "I", nested_get(?path([{ic, 2}, c, d]), [1, #{a => a, b => b, c => #{d => "I"}}, 3]) + ), + ?assertEqual( + undefined, nested_get(?path([{ic, 2}, c, e]), [1, #{a => a, b => b, c => #{d => "I"}}, 3]) + ), + ?assertEqual( + default, + nested_get(?path([{ic, 2}, c, e]), [1, #{a => a, b => b, c => #{d => "I"}}, 3], default) + ), %% index var - ?assertEqual(1, nested_get(?path([a, {iv,<<"b">>}]), #{a => [1,2,3], b => 1})), - ?assertEqual(1, nested_get(?path([a, {iv,b}]), #{a => [1,2,3], b => 1})), - ?assertEqual(undefined, nested_get(?path([a, {iv,c}]), #{a => [1,2,3], b => 1})), - ?assertEqual(undefined, nested_get(?path([a, {iv,b}]), #{a => [1,2,3], b => 4})), - ?assertEqual("I", nested_get(?path([{i,?path([{ic, 3}])}, c, d]), - [1,#{a => a, b => b, c => #{d => "I"}},2], default)), - ?assertEqual(3, nested_get(?path([a, {i,?path([b,{ic,1},c])}]), - #{a => [1,2,3], b => [#{c => 3}]})), - ?assertEqual(3, nested_get(?path([a, {i,?path([b,{ic,1},c])}]), - #{a => [1,2,3], b => [#{c => 3}]}, default)), - ?assertEqual(default, nested_get(?path([a, {i,?path([b,{ic,1},c])}]), - #{a => [1,2,3], b => [#{c => 4}]}, default)), - ?assertEqual(default, nested_get(?path([a, {i,?path([b,{ic,2},c])}]), - #{a => [1,2,3], b => [#{c => 3}]}, default)). + ?assertEqual(1, nested_get(?path([a, {iv, <<"b">>}]), #{a => [1, 2, 3], b => 1})), + ?assertEqual(1, nested_get(?path([a, {iv, b}]), #{a => [1, 2, 3], b => 1})), + ?assertEqual(undefined, nested_get(?path([a, {iv, c}]), #{a => [1, 2, 3], b => 1})), + ?assertEqual(undefined, nested_get(?path([a, {iv, b}]), #{a => [1, 2, 3], b => 4})), + ?assertEqual( + "I", + nested_get( + ?path([{i, ?path([{ic, 3}])}, c, d]), + [1, #{a => a, b => b, c => #{d => "I"}}, 2], + default + ) + ), + ?assertEqual( + 3, + nested_get( + ?path([a, {i, ?path([b, {ic, 1}, c])}]), + #{a => [1, 2, 3], b => [#{c => 3}]} + ) + ), + ?assertEqual( + 3, + nested_get( + ?path([a, {i, ?path([b, {ic, 1}, c])}]), + #{a => [1, 2, 3], b => [#{c => 3}]}, + default + ) + ), + ?assertEqual( + default, + nested_get( + ?path([a, {i, ?path([b, {ic, 1}, c])}]), + #{a => [1, 2, 3], b => [#{c => 4}]}, + default + ) + ), + ?assertEqual( + default, + nested_get( + ?path([a, {i, ?path([b, {ic, 2}, c])}]), + #{a => [1, 2, 3], b => [#{c => 3}]}, + default + ) + ). t_atom_key_map(_) -> ?assertEqual(#{a => 1}, atom_key_map(#{<<"a">> => 1})), - ?assertEqual(#{a => 1, b => #{a => 2}}, - atom_key_map(#{<<"a">> => 1, <<"b">> => #{<<"a">> => 2}})), - ?assertEqual([#{a => 1}, #{b => #{a => 2}}], - atom_key_map([#{<<"a">> => 1}, #{<<"b">> => #{<<"a">> => 2}}])), - ?assertEqual(#{a => 1, b => [#{a => 2}, #{c => 2}]}, - atom_key_map(#{<<"a">> => 1, <<"b">> => [#{<<"a">> => 2}, #{<<"c">> => 2}]})). + ?assertEqual( + #{a => 1, b => #{a => 2}}, + atom_key_map(#{<<"a">> => 1, <<"b">> => #{<<"a">> => 2}}) + ), + ?assertEqual( + [#{a => 1}, #{b => #{a => 2}}], + atom_key_map([#{<<"a">> => 1}, #{<<"b">> => #{<<"a">> => 2}}]) + ), + ?assertEqual( + #{a => 1, b => [#{a => 2}, #{c => 2}]}, + atom_key_map(#{<<"a">> => 1, <<"b">> => [#{<<"a">> => 2}, #{<<"c">> => 2}]}) + ). all() -> - IsTestCase = fun("t_" ++ _) -> true; (_) -> false end, + IsTestCase = fun + ("t_" ++ _) -> true; + (_) -> false + end, [F || {F, _A} <- module_info(exports), IsTestCase(atom_to_list(F))]. suite() -> diff --git a/apps/emqx_rule_engine/test/prop_rule_maps.erl b/apps/emqx_rule_engine/test/prop_rule_maps.erl index 659a423e1..15eb4ab6d 100644 --- a/apps/emqx_rule_engine/test/prop_rule_maps.erl +++ b/apps/emqx_rule_engine/test/prop_rule_maps.erl @@ -3,8 +3,14 @@ -include_lib("proper/include/proper.hrl"). prop_get_put_single_key() -> - ?FORALL({Key, Val}, {term(), term()}, - begin - Val =:= emqx_rule_maps:nested_get({var, Key}, - emqx_rule_maps:nested_put({var, Key}, Val, #{})) - end). + ?FORALL( + {Key, Val}, + {term(), term()}, + begin + Val =:= + emqx_rule_maps:nested_get( + {var, Key}, + emqx_rule_maps:nested_put({var, Key}, Val, #{}) + ) + end + ).