Merge pull request #3347 from emqx/master
Auto-pull-request-by-2020-03-27
This commit is contained in:
commit
2ddda28821
|
@ -18,6 +18,11 @@ jobs:
|
|||
make eunit
|
||||
make ct
|
||||
make cover
|
||||
- name: Coveralls
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
make coveralls
|
||||
- uses: actions/upload-artifact@v1
|
||||
if: always()
|
||||
with:
|
||||
|
@ -26,4 +31,4 @@ jobs:
|
|||
- uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: cover
|
||||
path: _build/test/cover
|
||||
path: _build/test/cover
|
||||
|
|
24
.travis.yml
24
.travis.yml
|
@ -1,24 +0,0 @@
|
|||
language: erlang
|
||||
|
||||
otp_release:
|
||||
- 22.1
|
||||
|
||||
before_install:
|
||||
- git clone https://github.com/erlang/rebar3.git; cd rebar3; ./bootstrap; sudo mv rebar3 /usr/local/bin/; cd ..
|
||||
|
||||
script:
|
||||
- make compile
|
||||
- rm -rf rebar.lock
|
||||
- make xref
|
||||
- rm -rf rebar.lock
|
||||
- make eunit
|
||||
- rm -rf rebar.lock
|
||||
- make ct
|
||||
- rm -rf rebar.lock
|
||||
- make cover
|
||||
- rm -rf rebar.lock
|
||||
|
||||
after_success:
|
||||
- make coveralls
|
||||
|
||||
sudo: false
|
2
Makefile
2
Makefile
|
@ -65,7 +65,7 @@ cover:
|
|||
|
||||
.PHONY: coveralls
|
||||
coveralls:
|
||||
@rebar3 coveralls send
|
||||
@rebar3 as test coveralls send
|
||||
|
||||
.PHONY: xref
|
||||
xref:
|
||||
|
|
|
@ -1853,12 +1853,36 @@ module.presence.qos = 1
|
|||
module.subscription = off
|
||||
|
||||
## Subscribe the Topics automatically when client connected.
|
||||
## module.subscription.1.topic = $client/%c
|
||||
## Qos of the subscription: 0 | 1 | 2
|
||||
## module.subscription.1.qos = 1
|
||||
##
|
||||
## Value: String
|
||||
## module.subscription.1.topic = connected/%c/%u
|
||||
|
||||
## module.subscription.2.topic = $user/%u
|
||||
## module.subscription.2.qos = 1
|
||||
## Qos of the proxy subscription.
|
||||
##
|
||||
## Value: 0 | 1 | 2
|
||||
## Default: 0
|
||||
## module.subscription.1.qos = 0
|
||||
|
||||
## No Local of the proxy subscription options.
|
||||
## This configuration only takes effect in the MQTT V5 protocol.
|
||||
##
|
||||
## Value: 0 | 1
|
||||
## Default: 0
|
||||
## module.subscription.1.nl = 0
|
||||
|
||||
## Retain As Published of the proxy subscription options.
|
||||
## This configuration only takes effect in the MQTT V5 protocol.
|
||||
##
|
||||
## Value: 0 | 1
|
||||
## Default: 0
|
||||
## module.subscription.1.rap = 0
|
||||
|
||||
## Retain Handling of the proxy subscription options.
|
||||
## This configuration only takes effect in the MQTT V5 protocol.
|
||||
##
|
||||
## Value: 0 | 1 | 2
|
||||
## Default: 0
|
||||
## module.subscription.1.rh = 0
|
||||
|
||||
##--------------------------------------------------------------------
|
||||
## Rewrite Module
|
||||
|
@ -1872,6 +1896,22 @@ module.rewrite = off
|
|||
## module.rewrite.rule.1 = x/# ^x/y/(.+)$ z/y/$1
|
||||
## module.rewrite.rule.2 = y/+/z/# ^y/(.+)/z/(.+)$ y/z/$2
|
||||
|
||||
##--------------------------------------------------------------------
|
||||
## Topic Metrics Module
|
||||
|
||||
## Enable Topic Metrics Module.
|
||||
##
|
||||
## Value: on | off
|
||||
module.topic_metrics = off
|
||||
|
||||
##--------------------------------------------------------------------
|
||||
## Delayed Module
|
||||
|
||||
## Enable Delayed Module.
|
||||
##
|
||||
## Value: on | off
|
||||
module.delayed = off
|
||||
|
||||
##-------------------------------------------------------------------
|
||||
## Plugins
|
||||
##-------------------------------------------------------------------
|
||||
|
@ -1909,11 +1949,6 @@ broker.sys_interval = 1m
|
|||
## Default: 30s
|
||||
broker.sys_heartbeat = 30s
|
||||
|
||||
## Enable global session registry.
|
||||
##
|
||||
## Value: on | off
|
||||
broker.enable_session_registry = on
|
||||
|
||||
## Session locking strategy in a cluster.
|
||||
##
|
||||
## Value: Enum
|
||||
|
|
|
@ -128,7 +128,6 @@
|
|||
|
||||
-record(plugin, {
|
||||
name :: atom(),
|
||||
version :: string(),
|
||||
dir :: string(),
|
||||
descr :: string(),
|
||||
vendor :: string(),
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
%%-*- mode: erlang -*-
|
||||
%% EMQ X R3.0 config mapping
|
||||
%% EMQ X R4.0 config mapping
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Cluster
|
||||
|
@ -1814,6 +1814,24 @@ end}.
|
|||
{validators, ["range:0-2"]}
|
||||
]}.
|
||||
|
||||
{mapping, "module.subscription.$id.nl", "emqx.modules", [
|
||||
{default, 0},
|
||||
{datatype, integer},
|
||||
{validators, ["range:0-1"]}
|
||||
]}.
|
||||
|
||||
{mapping, "module.subscription.$id.rap", "emqx.modules", [
|
||||
{default, 0},
|
||||
{datatype, integer},
|
||||
{validators, ["range:0-1"]}
|
||||
]}.
|
||||
|
||||
{mapping, "module.subscription.$id.rh", "emqx.modules", [
|
||||
{default, 0},
|
||||
{datatype, integer},
|
||||
{validators, ["range:0-2"]}
|
||||
]}.
|
||||
|
||||
{mapping, "module.rewrite", "emqx.modules", [
|
||||
{default, off},
|
||||
{datatype, flag}
|
||||
|
@ -1823,13 +1841,25 @@ end}.
|
|||
{datatype, string}
|
||||
]}.
|
||||
|
||||
{mapping, "module.topic_metrics", "emqx.modules", [
|
||||
{default, off},
|
||||
{datatype, flag}
|
||||
]}.
|
||||
|
||||
{mapping, "module.delayed", "emqx.modules", [
|
||||
{default, off},
|
||||
{datatype, flag}
|
||||
]}.
|
||||
|
||||
{translation, "emqx.modules", fun(Conf) ->
|
||||
Subscriptions = fun() ->
|
||||
List = cuttlefish_variable:filter_by_prefix("module.subscription", Conf),
|
||||
QosList = [Qos || {_, Qos} <- lists:sort([{I, Qos} || {[_,"subscription", I,"qos"], Qos} <- List])],
|
||||
TopicList = [iolist_to_binary(Topic) || {_, Topic} <-
|
||||
lists:sort([{I, Topic} || {[_,"subscription", I, "topic"], Topic} <- List])],
|
||||
lists:zip(TopicList, QosList)
|
||||
TopicList = [{N, Topic}|| {[_,"subscription",N,"topic"], Topic} <- List],
|
||||
[{iolist_to_binary(T), #{ qos => cuttlefish:conf_get("module.subscription." ++ N ++ ".qos", Conf, 0),
|
||||
nl => cuttlefish:conf_get("module.subscription." ++ N ++ ".nl", Conf, 0),
|
||||
rap => cuttlefish:conf_get("module.subscription." ++ N ++ ".rap", Conf, 0),
|
||||
rh => cuttlefish:conf_get("module.subscription." ++ N ++ ".rh", Conf, 0)
|
||||
}} || {N, T} <- TopicList]
|
||||
end,
|
||||
Rewrites = fun() ->
|
||||
Rules = cuttlefish_variable:filter_by_prefix("module.rewrite.rule", Conf),
|
||||
|
@ -1850,6 +1880,14 @@ end}.
|
|||
case cuttlefish:conf_get("module.rewrite", Conf) of %% Rewrite
|
||||
true -> [{emqx_mod_rewrite, Rewrites()}];
|
||||
false -> []
|
||||
end,
|
||||
case cuttlefish:conf_get("module.topic_metrics", Conf) of %% Topic Metrics
|
||||
true -> [{emqx_mod_topic_metrics, []}];
|
||||
false -> []
|
||||
end,
|
||||
case cuttlefish:conf_get("module.delayed", Conf) of %% Delayed
|
||||
true -> [{emqx_mod_delayed, []}];
|
||||
false -> []
|
||||
end
|
||||
])
|
||||
end}.
|
||||
|
|
|
@ -31,13 +31,12 @@
|
|||
{cover_opts, [verbose]}.
|
||||
{cover_export_enabled, true}.
|
||||
|
||||
{plugins, [coveralls]}.
|
||||
|
||||
{erl_first_files, ["src/emqx_logger.erl"]}.
|
||||
|
||||
{profiles,
|
||||
[{test,
|
||||
[{deps,
|
||||
[{plugins, [{coveralls, {git, "https://github.com/emqx/coveralls-erl", {branch, "github"}}}]},
|
||||
{deps,
|
||||
[{bbmustache, "1.7.0"},
|
||||
{emqtt, {git, "https://github.com/emqx/emqtt", {tag, "1.2.0"}}},
|
||||
{emqx_ct_helpers, {git, "https://github.com/emqx/emqx-ct-helpers", {tag, "1.2.2"}}}
|
||||
|
|
|
@ -1,11 +1,20 @@
|
|||
CONFIG1 = case os:getenv("TRAVIS") of
|
||||
"true" ->
|
||||
JobId = os:getenv("TRAVIS_JOB_ID"),
|
||||
[{coveralls_service_job_id, JobId},
|
||||
{coveralls_coverdata, "_build/test/cover/*.coverdata"},
|
||||
{coveralls_service_name , "travis-ci"} | CONFIG];
|
||||
_ ->
|
||||
CONFIG
|
||||
end,
|
||||
%% -*-: erlang -*-
|
||||
|
||||
CONFIG1.
|
||||
case {os:getenv("GITHUB_ACTIONS"), os:getenv("GITHUB_TOKEN")} of
|
||||
{"true", Token} when is_list(Token) ->
|
||||
CONFIG1 = [{coveralls_repo_token, Token},
|
||||
{coveralls_service_job_id, os:getenv("GITHUB_RUN_ID")},
|
||||
{coveralls_commit_sha, os:getenv("GITHUB_SHA")},
|
||||
{coveralls_service_number, os:getenv("GITHUB_RUN_NUMBER")},
|
||||
{coveralls_coverdata, "_build/test/cover/*.coverdata"},
|
||||
{coveralls_service_name, "github"} | CONFIG],
|
||||
case os:getenv("GITHUB_EVENT_NAME") =:= "pull_request"
|
||||
andalso string:tokens(os:getenv("GITHUB_REF"), "/") of
|
||||
[_, "pull", PRNO, _] ->
|
||||
[{coveralls_service_pull_request, PRNO} | CONFIG1];
|
||||
_ ->
|
||||
CONFIG1
|
||||
end;
|
||||
_ ->
|
||||
CONFIG
|
||||
end.
|
||||
|
|
|
@ -214,7 +214,7 @@ safe_publish(Msg) when is_record(Msg, message) ->
|
|||
publish(Msg)
|
||||
catch
|
||||
_:Error:Stk->
|
||||
?LOG(error, "Publish error: ~p~n~s~n~p",
|
||||
?LOG(error, "Publish error: ~0p~n~s~n~0p",
|
||||
[Error, emqx_message:format(Msg), Stk])
|
||||
after
|
||||
[]
|
||||
|
|
|
@ -68,7 +68,7 @@
|
|||
%% MQTT Will Msg
|
||||
will_msg :: maybe(emqx_types:message()),
|
||||
%% MQTT Topic Aliases
|
||||
topic_aliases :: maybe(map()),
|
||||
topic_aliases :: emqx_types:topic_aliases(),
|
||||
%% MQTT Topic Alias Maximum
|
||||
alias_maximum :: maybe(map()),
|
||||
%% Timers
|
||||
|
@ -94,6 +94,8 @@
|
|||
|
||||
-type(replies() :: emqx_types:packet() | reply() | [reply()]).
|
||||
|
||||
-define(IS_MQTT_V5, #channel{conninfo = #{proto_ver := ?MQTT_PROTO_V5}}).
|
||||
|
||||
-define(TIMER_TABLE, #{
|
||||
alive_timer => keepalive,
|
||||
retry_timer => retry_delivery,
|
||||
|
@ -180,6 +182,9 @@ init(ConnInfo = #{peername := {PeerHost, _Port},
|
|||
},
|
||||
#channel{conninfo = ConnInfo,
|
||||
clientinfo = ClientInfo,
|
||||
topic_aliases = #{inbound => #{},
|
||||
outbound => #{}
|
||||
},
|
||||
timers = #{},
|
||||
conn_state = idle,
|
||||
takeover = false,
|
||||
|
@ -599,8 +604,8 @@ handle_out(publish, [], Channel) ->
|
|||
{ok, Channel};
|
||||
|
||||
handle_out(publish, Publishes, Channel) ->
|
||||
Packets = do_deliver(Publishes, Channel),
|
||||
{ok, {outgoing, Packets}, Channel};
|
||||
{Packets, NChannel} = do_deliver(Publishes, Channel),
|
||||
{ok, {outgoing, Packets}, NChannel};
|
||||
|
||||
handle_out(puback, {PacketId, ReasonCode}, Channel) ->
|
||||
{ok, ?PUBACK_PACKET(PacketId, ReasonCode), Channel};
|
||||
|
@ -614,16 +619,14 @@ handle_out(pubrel, {PacketId, ReasonCode}, Channel) ->
|
|||
handle_out(pubcomp, {PacketId, ReasonCode}, Channel) ->
|
||||
{ok, ?PUBCOMP_PACKET(PacketId, ReasonCode), Channel};
|
||||
|
||||
handle_out(suback, {PacketId, ReasonCodes},
|
||||
Channel = #channel{conninfo = #{proto_ver := ?MQTT_PROTO_V5}}) ->
|
||||
handle_out(suback, {PacketId, ReasonCodes}, Channel = ?IS_MQTT_V5) ->
|
||||
return_suback(?SUBACK_PACKET(PacketId, ReasonCodes), Channel);
|
||||
|
||||
handle_out(suback, {PacketId, ReasonCodes}, Channel) ->
|
||||
ReasonCodes1 = [emqx_reason_codes:compat(suback, RC) || RC <- ReasonCodes],
|
||||
return_suback(?SUBACK_PACKET(PacketId, ReasonCodes1), Channel);
|
||||
|
||||
handle_out(unsuback, {PacketId, ReasonCodes},
|
||||
Channel = #channel{conninfo = #{proto_ver := ?MQTT_PROTO_V5}}) ->
|
||||
handle_out(unsuback, {PacketId, ReasonCodes}, Channel = ?IS_MQTT_V5) ->
|
||||
return_unsuback(?UNSUBACK_PACKET(PacketId, ReasonCodes), Channel);
|
||||
|
||||
handle_out(unsuback, {PacketId, _ReasonCodes}, Channel) ->
|
||||
|
@ -633,8 +636,7 @@ handle_out(disconnect, ReasonCode, Channel) when is_integer(ReasonCode) ->
|
|||
ReasonName = disconnect_reason(ReasonCode),
|
||||
handle_out(disconnect, {ReasonCode, ReasonName}, Channel);
|
||||
|
||||
handle_out(disconnect, {ReasonCode, ReasonName}, Channel =
|
||||
#channel{conninfo = #{proto_ver := ?MQTT_PROTO_V5}}) ->
|
||||
handle_out(disconnect, {ReasonCode, ReasonName}, Channel = ?IS_MQTT_V5) ->
|
||||
Packet = ?DISCONNECT_PACKET(ReasonCode),
|
||||
{ok, [{outgoing, Packet}, {close, ReasonName}], Channel};
|
||||
|
||||
|
@ -658,9 +660,9 @@ return_connack(AckPacket, Channel) ->
|
|||
resuming = false,
|
||||
pendings = []
|
||||
},
|
||||
Packets = do_deliver(Publishes, NChannel),
|
||||
{Packets, NChannel1} = do_deliver(Publishes, NChannel),
|
||||
Outgoing = [{outgoing, Packets} || length(Packets) > 0],
|
||||
{ok, Replies ++ Outgoing, NChannel}
|
||||
{ok, Replies ++ Outgoing, NChannel1}
|
||||
end.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
|
@ -668,16 +670,16 @@ return_connack(AckPacket, Channel) ->
|
|||
%%--------------------------------------------------------------------
|
||||
|
||||
%% return list(emqx_types:packet())
|
||||
do_deliver({pubrel, PacketId}, _Channel) ->
|
||||
[?PUBREL_PACKET(PacketId, ?RC_SUCCESS)];
|
||||
do_deliver({pubrel, PacketId}, Channel) ->
|
||||
{[?PUBREL_PACKET(PacketId, ?RC_SUCCESS)], Channel};
|
||||
|
||||
do_deliver({PacketId, Msg}, #channel{clientinfo = ClientInfo =
|
||||
do_deliver({PacketId, Msg}, Channel = #channel{clientinfo = ClientInfo =
|
||||
#{mountpoint := MountPoint}}) ->
|
||||
case ignore_local(Msg, ClientInfo) of
|
||||
true ->
|
||||
ok = emqx_metrics:inc('delivery.dropped'),
|
||||
ok = emqx_metrics:inc('delivery.dropped.no_local'),
|
||||
[];
|
||||
{[], Channel};
|
||||
false ->
|
||||
ok = emqx_metrics:inc('messages.delivered'),
|
||||
Msg1 = emqx_hooks:run_fold('message.delivered',
|
||||
|
@ -685,18 +687,21 @@ do_deliver({PacketId, Msg}, #channel{clientinfo = ClientInfo =
|
|||
emqx_message:update_expiry(Msg)
|
||||
),
|
||||
Msg2 = emqx_mountpoint:unmount(MountPoint, Msg1),
|
||||
[emqx_message:to_packet(PacketId, Msg2)]
|
||||
Packet = emqx_message:to_packet(PacketId, Msg2),
|
||||
{NPacket, NChannel} = packing_alias(Packet, Channel),
|
||||
{[NPacket], NChannel}
|
||||
end;
|
||||
|
||||
do_deliver([Publish], Channel) ->
|
||||
do_deliver(Publish, Channel);
|
||||
|
||||
do_deliver(Publishes, Channel) when is_list(Publishes) ->
|
||||
lists:reverse(
|
||||
lists:foldl(
|
||||
fun(Publish, Acc) ->
|
||||
lists:append(do_deliver(Publish, Channel), Acc)
|
||||
end, [], Publishes)).
|
||||
{Packets, NChannel} =
|
||||
lists:foldl(fun(Publish, {Acc, Chann}) ->
|
||||
{Packets, NChann} = do_deliver(Publish, Chann),
|
||||
{Packets ++ Acc, NChann}
|
||||
end, {[], Channel}, Publishes),
|
||||
{lists:reverse(Packets), NChannel}.
|
||||
|
||||
ignore_local(#message{flags = #{nl := true}, from = ClientId},
|
||||
#{clientid := ClientId}) ->
|
||||
|
@ -1072,8 +1077,8 @@ process_alias(Packet = #mqtt_packet{
|
|||
properties = #{'Topic-Alias' := AliasId}
|
||||
} = Publish
|
||||
},
|
||||
Channel = #channel{topic_aliases = Aliases}) ->
|
||||
case find_alias(AliasId, Aliases) of
|
||||
Channel = ?IS_MQTT_V5 = #channel{topic_aliases = TopicAliases}) ->
|
||||
case find_alias(inbound, AliasId, TopicAliases) of
|
||||
{ok, Topic} ->
|
||||
NPublish = Publish#mqtt_packet_publish{topic_name = Topic},
|
||||
{ok, Packet#mqtt_packet{variable = NPublish}, Channel};
|
||||
|
@ -1085,12 +1090,44 @@ process_alias(#mqtt_packet{
|
|||
properties = #{'Topic-Alias' := AliasId}
|
||||
}
|
||||
},
|
||||
Channel = #channel{topic_aliases = Aliases}) ->
|
||||
NAliases = save_alias(AliasId, Topic, Aliases),
|
||||
{ok, Channel#channel{topic_aliases = NAliases}};
|
||||
Channel = ?IS_MQTT_V5 = #channel{topic_aliases = TopicAliases}) ->
|
||||
NTopicAliases = save_alias(inbound, AliasId, Topic, TopicAliases),
|
||||
{ok, Channel#channel{topic_aliases = NTopicAliases}};
|
||||
|
||||
process_alias(_Packet, Channel) -> {ok, Channel}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Packing Topic Alias
|
||||
|
||||
packing_alias(Packet = #mqtt_packet{
|
||||
variable = #mqtt_packet_publish{topic_name = Topic} = Publish
|
||||
},
|
||||
Channel = ?IS_MQTT_V5 = #channel{topic_aliases = TopicAliases, alias_maximum = Limits}) ->
|
||||
case find_alias(outbound, Topic, TopicAliases) of
|
||||
{ok, AliasId} ->
|
||||
NPublish = Publish#mqtt_packet_publish{
|
||||
topic_name = <<>>,
|
||||
properties = #{'Topic-Alias' => AliasId}
|
||||
},
|
||||
{Packet#mqtt_packet{variable = NPublish}, Channel};
|
||||
error ->
|
||||
#{outbound := Aliases} = TopicAliases,
|
||||
AliasId = maps:size(Aliases) + 1,
|
||||
case (Limits =:= undefined) orelse
|
||||
(AliasId =< maps:get(outbound, Limits, 0)) of
|
||||
true ->
|
||||
NTopicAliases = save_alias(outbound, AliasId, Topic, TopicAliases),
|
||||
NChannel = Channel#channel{topic_aliases = NTopicAliases},
|
||||
NPublish = Publish#mqtt_packet_publish{
|
||||
topic_name = Topic,
|
||||
properties = #{'Topic-Alias' => AliasId}
|
||||
},
|
||||
{Packet#mqtt_packet{variable = NPublish}, NChannel};
|
||||
false -> {Packet, Channel}
|
||||
end
|
||||
end;
|
||||
packing_alias(Packet, Channel) -> {Packet, Channel}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Check Pub Alias
|
||||
|
||||
|
@ -1164,7 +1201,7 @@ enrich_subid(_Properties, TopicFilters) -> TopicFilters.
|
|||
%%--------------------------------------------------------------------
|
||||
%% Enrich SubOpts
|
||||
|
||||
enrich_subopts(SubOpts, #channel{conninfo = #{proto_ver := ?MQTT_PROTO_V5}}) ->
|
||||
enrich_subopts(SubOpts, _Channel = ?IS_MQTT_V5) ->
|
||||
SubOpts;
|
||||
enrich_subopts(SubOpts, #channel{clientinfo = #{zone := Zone, is_bridge := IsBridge}}) ->
|
||||
NL = flag(emqx_zone:ignore_loop_deliver(Zone)),
|
||||
|
@ -1173,8 +1210,7 @@ enrich_subopts(SubOpts, #channel{clientinfo = #{zone := Zone, is_bridge := IsBri
|
|||
%%--------------------------------------------------------------------
|
||||
%% Enrich ConnAck Caps
|
||||
|
||||
enrich_connack_caps(AckProps, #channel{conninfo = #{proto_ver := ?MQTT_PROTO_V5},
|
||||
clientinfo = #{zone := Zone}}) ->
|
||||
enrich_connack_caps(AckProps, ?IS_MQTT_V5 = #channel{clientinfo = #{zone := Zone}}) ->
|
||||
#{max_packet_size := MaxPktSize,
|
||||
max_qos_allowed := MaxQoS,
|
||||
retain_available := Retain,
|
||||
|
@ -1346,16 +1382,21 @@ run_hooks(Name, Args) ->
|
|||
run_hooks(Name, Args, Acc) ->
|
||||
ok = emqx_metrics:inc(Name), emqx_hooks:run_fold(Name, Args, Acc).
|
||||
|
||||
-compile({inline, [find_alias/2, save_alias/3]}).
|
||||
-compile({inline, [find_alias/3, save_alias/4]}).
|
||||
|
||||
find_alias(_AliasId, undefined) -> false;
|
||||
find_alias(AliasId, Aliases) ->
|
||||
maps:find(AliasId, Aliases).
|
||||
find_alias(_, _ ,undefined) -> false;
|
||||
find_alias(inbound, AliasId, _TopicAliases = #{inbound := Aliases}) ->
|
||||
maps:find(AliasId, Aliases);
|
||||
find_alias(outbound, Topic, _TopicAliases = #{outbound := Aliases}) ->
|
||||
maps:find(Topic, Aliases).
|
||||
|
||||
save_alias(AliasId, Topic, undefined) ->
|
||||
#{AliasId => Topic};
|
||||
save_alias(AliasId, Topic, Aliases) ->
|
||||
maps:put(AliasId, Topic, Aliases).
|
||||
save_alias(_, _, _, undefined) -> false;
|
||||
save_alias(inbound, AliasId, Topic, TopicAliases = #{inbound := Aliases}) ->
|
||||
NAliases = maps:put(AliasId, Topic, Aliases),
|
||||
TopicAliases#{inbound => NAliases};
|
||||
save_alias(outbound, AliasId, Topic, TopicAliases = #{outbound := Aliases}) ->
|
||||
NAliases = maps:put(Topic, AliasId, Aliases),
|
||||
TopicAliases#{outbound => NAliases}.
|
||||
|
||||
-compile({inline, [reply/2, shutdown/2, shutdown/3, sp/1, flag/1]}).
|
||||
|
||||
|
@ -1377,8 +1418,8 @@ shutdown(success, Reply, Packet, Channel) ->
|
|||
shutdown(Reason, Reply, Packet, Channel) ->
|
||||
{shutdown, Reason, Reply, Packet, Channel}.
|
||||
|
||||
disconnect_and_shutdown(Reason, Reply, Channel = #channel{conn_state = connected,
|
||||
conninfo = #{proto_ver := ?MQTT_PROTO_V5}}) ->
|
||||
disconnect_and_shutdown(Reason, Reply, Channel = ?IS_MQTT_V5
|
||||
= #channel{conn_state = connected}) ->
|
||||
shutdown(Reason, Reply, ?DISCONNECT_PACKET(reason_code(Reason)), Channel);
|
||||
|
||||
disconnect_and_shutdown(Reason, Reply, Channel) ->
|
||||
|
|
|
@ -276,7 +276,7 @@ discard_session(ClientId) when is_binary(ClientId) ->
|
|||
_:{noproc,_}:_Stk -> ok;
|
||||
_:{{shutdown,_},_}:_Stk -> ok;
|
||||
_:Error:_Stk ->
|
||||
?LOG(error, "Failed to discard ~p: ~p", [ChanPid, Error])
|
||||
?LOG(error, "Failed to discard ~0p: ~0p", [ChanPid, Error])
|
||||
end
|
||||
end, ChanPids)
|
||||
end.
|
||||
|
|
|
@ -62,7 +62,7 @@ start_link() ->
|
|||
%% @doc Is the global registry enabled?
|
||||
-spec(is_enabled() -> boolean()).
|
||||
is_enabled() ->
|
||||
emqx:get_env(enable_channel_registry, true).
|
||||
emqx:get_env(enable_session_registry, true).
|
||||
|
||||
%% @doc Register a global channel.
|
||||
-spec(register_channel(emqx_types:clientid()
|
||||
|
|
|
@ -319,7 +319,7 @@ handle_msg({'$gen_call', From, Req}, State) ->
|
|||
end;
|
||||
|
||||
handle_msg({Inet, _Sock, Data}, State) when Inet == tcp; Inet == ssl ->
|
||||
?LOG(debug, "RECV ~p", [Data]),
|
||||
?LOG(debug, "RECV ~0p", [Data]),
|
||||
Oct = iolist_size(Data),
|
||||
emqx_pd:inc_counter(incoming_bytes, Oct),
|
||||
ok = emqx_metrics:inc('bytes.received', Oct),
|
||||
|
@ -513,7 +513,7 @@ parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
|
|||
parse_incoming(Rest, [Packet|Packets], NState)
|
||||
catch
|
||||
error:Reason:Stk ->
|
||||
?LOG(error, "~nParse failed for ~p~n~p~nFrame data:~p",
|
||||
?LOG(error, "~nParse failed for ~0p~n~0p~nFrame data:~0p",
|
||||
[Reason, Stk, Data]),
|
||||
{[{frame_error, Reason}|Packets], State}
|
||||
end.
|
||||
|
|
|
@ -106,7 +106,7 @@ run_command(Cmd, Args) when is_atom(Cmd) ->
|
|||
_ -> ok
|
||||
catch
|
||||
_:Reason:Stacktrace ->
|
||||
?ERROR("CMD Error:~p, Stacktrace:~p", [Reason, Stacktrace]),
|
||||
?ERROR("CMD Error:~0p, Stacktrace:~0p", [Reason, Stacktrace]),
|
||||
{error, Reason}
|
||||
end;
|
||||
[] ->
|
||||
|
|
|
@ -165,7 +165,7 @@ safe_execute(Fun, Args) ->
|
|||
Result -> Result
|
||||
catch
|
||||
_:Reason:Stacktrace ->
|
||||
?LOG(error, "Failed to execute ~p: ~p", [Fun, {Reason, Stacktrace}]),
|
||||
?LOG(error, "Failed to execute ~0p: ~0p", [Fun, {Reason, Stacktrace}]),
|
||||
ok
|
||||
end.
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ encode(Term) ->
|
|||
|
||||
-spec(encode(json_term(), encode_options()) -> json_text()).
|
||||
encode(Term, Opts) ->
|
||||
jiffy:encode(to_ejson(Term), Opts).
|
||||
to_binary(jiffy:encode(to_ejson(Term), Opts)).
|
||||
|
||||
-spec(safe_encode(json_term())
|
||||
-> {ok, json_text()} | {error, Reason :: term()}).
|
||||
|
@ -118,3 +118,7 @@ from_ejson({L}) ->
|
|||
[{Name, from_ejson(Value)} || {Name, Value} <- L];
|
||||
from_ejson(T) -> T.
|
||||
|
||||
to_binary(B) when is_binary(B) -> B;
|
||||
to_binary(L) when is_list(L) ->
|
||||
iolist_to_binary(L).
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ start_listener({Proto, ListenOn, Options}) ->
|
|||
{ok, _} -> io:format("Start mqtt:~s listener on ~s successfully.~n",
|
||||
[Proto, format(ListenOn)]);
|
||||
{error, Reason} ->
|
||||
io:format(standard_error, "Failed to start mqtt:~s listener on ~s - ~p~n!",
|
||||
io:format(standard_error, "Failed to start mqtt:~s listener on ~s - ~0p~n!",
|
||||
[Proto, format(ListenOn), Reason])
|
||||
end,
|
||||
StartRet.
|
||||
|
|
|
@ -101,6 +101,12 @@ rules_from_file(AclFile) ->
|
|||
Rules = [emqx_access_rule:compile(Term) || Term <- Terms],
|
||||
#{publish => [Rule || Rule <- Rules, filter(publish, Rule)],
|
||||
subscribe => [Rule || Rule <- Rules, filter(subscribe, Rule)]};
|
||||
{error, eacces} ->
|
||||
?LOG(alert, "Insufficient permissions to read the ~s file", [AclFile]),
|
||||
#{};
|
||||
{error, enoent} ->
|
||||
?LOG(alert, "The ~s file does not exist", [AclFile]),
|
||||
#{};
|
||||
{error, Reason} ->
|
||||
?LOG(alert, "Failed to read ~s: ~p", [AclFile, Reason]),
|
||||
#{}
|
||||
|
|
|
@ -0,0 +1,195 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_mod_delayed).
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
-include_lib("emqx/include/emqx.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
|
||||
%% emqx_gen_mod callbacks
|
||||
-export([ load/1
|
||||
, unload/1
|
||||
]).
|
||||
|
||||
-export([ start_link/0
|
||||
, on_message_publish/1
|
||||
]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([ init/1
|
||||
, handle_call/3
|
||||
, handle_cast/2
|
||||
, handle_info/2
|
||||
, terminate/2
|
||||
, code_change/3
|
||||
]).
|
||||
|
||||
-record(delayed_message,
|
||||
{ key
|
||||
, msg
|
||||
}).
|
||||
|
||||
-define(TAB, ?MODULE).
|
||||
-define(SERVER, ?MODULE).
|
||||
-define(MAX_INTERVAL, 4294967).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Load/Unload
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-spec(load(list()) -> ok).
|
||||
load(_Env) ->
|
||||
emqx_mod_sup:start_child(?MODULE, worker),
|
||||
emqx:hook('message.publish', {?MODULE, on_message_publish, []}).
|
||||
|
||||
-spec(unload(list()) -> ok).
|
||||
unload(_Env) ->
|
||||
emqx:unhook('message.publish', {?MODULE, on_message_publish, []}),
|
||||
emqx_mod_sup:stop_child(?MODULE).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Hooks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
on_message_publish(Msg = #message{id = Id, topic = <<"$delayed/", Topic/binary>>, timestamp = Ts}) ->
|
||||
[Delay, Topic1] = binary:split(Topic, <<"/">>),
|
||||
PubAt = case binary_to_integer(Delay) of
|
||||
Interval when Interval < ?MAX_INTERVAL ->
|
||||
Interval + erlang:round(Ts / 1000);
|
||||
Timestamp ->
|
||||
%% Check malicious timestamp?
|
||||
case (Timestamp - erlang:round(Ts / 1000)) > ?MAX_INTERVAL of
|
||||
true -> error(invalid_delayed_timestamp);
|
||||
false -> Timestamp
|
||||
end
|
||||
end,
|
||||
PubMsg = Msg#message{topic = Topic1},
|
||||
Headers = case PubMsg#message.headers of
|
||||
undefined -> #{};
|
||||
Headers0 -> Headers0
|
||||
end,
|
||||
ok = store(#delayed_message{key = {PubAt, delayed_mid(Id)}, msg = PubMsg}),
|
||||
{stop, PubMsg#message{headers = Headers#{allow_publish => false}}};
|
||||
|
||||
on_message_publish(Msg) ->
|
||||
{ok, Msg}.
|
||||
|
||||
%% @private
|
||||
delayed_mid(undefined) ->
|
||||
emqx_guid:gen();
|
||||
delayed_mid(MsgId) -> MsgId.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Start delayed publish server
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-spec(start_link() -> emqx_types:startlink_ret()).
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
|
||||
|
||||
-spec(store(#delayed_message{}) -> ok).
|
||||
store(DelayedMsg) ->
|
||||
gen_server:call(?SERVER, {store, DelayedMsg}, infinity).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% gen_server callback
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init([]) ->
|
||||
ok = ekka_mnesia:create_table(?TAB, [
|
||||
{type, ordered_set},
|
||||
{disc_copies, [node()]},
|
||||
{local_content, true},
|
||||
{record_name, delayed_message},
|
||||
{attributes, record_info(fields, delayed_message)}]),
|
||||
ok = ekka_mnesia:copy_table(?TAB, disc_copies),
|
||||
{ok, ensure_publish_timer(#{timer => undefined, publish_at => 0})}.
|
||||
|
||||
handle_call({store, DelayedMsg = #delayed_message{key = Key}}, _From, State) ->
|
||||
ok = mnesia:dirty_write(?TAB, DelayedMsg),
|
||||
emqx_metrics:set('messages.delayed', delayed_count()),
|
||||
{reply, ok, ensure_publish_timer(Key, State)};
|
||||
|
||||
handle_call(Req, _From, State) ->
|
||||
?LOG(error, "[Delayed] Unexpected call: ~p", [Req]),
|
||||
{reply, ignored, State}.
|
||||
|
||||
handle_cast(Msg, State) ->
|
||||
?LOG(error, "[Delayed] Unexpected cast: ~p", [Msg]),
|
||||
{noreply, State}.
|
||||
|
||||
%% Do Publish...
|
||||
handle_info({timeout, TRef, do_publish}, State = #{timer := TRef}) ->
|
||||
DeletedKeys = do_publish(mnesia:dirty_first(?TAB), os:system_time(seconds)),
|
||||
lists:foreach(fun(Key) -> mnesia:dirty_delete(?TAB, Key) end, DeletedKeys),
|
||||
emqx_metrics:set('messages.delayed', delayed_count()),
|
||||
{noreply, ensure_publish_timer(State#{timer := undefined, publish_at := 0})};
|
||||
|
||||
handle_info(Info, State) ->
|
||||
?LOG(error, "[Delayed] Unexpected info: ~p", [Info]),
|
||||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, #{timer := TRef}) ->
|
||||
emqx_misc:cancel_timer(TRef).
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
%% Ensure publish timer
|
||||
ensure_publish_timer(State) ->
|
||||
ensure_publish_timer(mnesia:dirty_first(?TAB), State).
|
||||
|
||||
ensure_publish_timer('$end_of_table', State) ->
|
||||
State#{timer := undefined, publish_at := 0};
|
||||
ensure_publish_timer({Ts, _Id}, State = #{timer := undefined}) ->
|
||||
ensure_publish_timer(Ts, os:system_time(seconds), State);
|
||||
ensure_publish_timer({Ts, _Id}, State = #{timer := TRef, publish_at := PubAt})
|
||||
when Ts < PubAt ->
|
||||
ok = emqx_misc:cancel_timer(TRef),
|
||||
ensure_publish_timer(Ts, os:system_time(seconds), State);
|
||||
ensure_publish_timer(_Key, State) ->
|
||||
State.
|
||||
|
||||
ensure_publish_timer(Ts, Now, State) ->
|
||||
Interval = max(1, Ts - Now),
|
||||
TRef = emqx_misc:start_timer(timer:seconds(Interval), do_publish),
|
||||
State#{timer := TRef, publish_at := Now + Interval}.
|
||||
|
||||
do_publish(Key, Now) ->
|
||||
do_publish(Key, Now, []).
|
||||
|
||||
%% Do publish
|
||||
do_publish('$end_of_table', _Now, Acc) ->
|
||||
Acc;
|
||||
do_publish({Ts, _Id}, Now, Acc) when Ts > Now ->
|
||||
Acc;
|
||||
do_publish(Key = {Ts, _Id}, Now, Acc) when Ts =< Now ->
|
||||
case mnesia:dirty_read(?TAB, Key) of
|
||||
[] -> ok;
|
||||
[#delayed_message{msg = Msg}] ->
|
||||
emqx_pool:async_submit(fun emqx_broker:publish/1, [Msg])
|
||||
end,
|
||||
do_publish(mnesia:dirty_next(?TAB, Key), Now, [Key|Acc]).
|
||||
|
||||
-spec(delayed_count() -> non_neg_integer()).
|
||||
delayed_count() -> mnesia:table_info(?TAB, size).
|
||||
|
|
@ -36,11 +36,14 @@
|
|||
load(Topics) ->
|
||||
emqx_hooks:add('client.connected', {?MODULE, on_client_connected, [Topics]}).
|
||||
|
||||
on_client_connected(#{clientid := ClientId, username := Username}, _ConnInfo, Topics) ->
|
||||
on_client_connected(#{clientid := ClientId, username := Username}, _ConnInfo = #{proto_ver := ProtoVer}, Topics) ->
|
||||
Replace = fun(Topic) ->
|
||||
rep(<<"%u">>, Username, rep(<<"%c">>, ClientId, Topic))
|
||||
end,
|
||||
TopicFilters = [{Replace(Topic), #{qos => QoS}} || {Topic, QoS} <- Topics],
|
||||
TopicFilters = case ProtoVer of
|
||||
?MQTT_PROTO_V5 -> [{Replace(Topic), SubOpts} || {Topic, SubOpts} <- Topics];
|
||||
_ -> [{Replace(Topic), #{qos => Qos}} || {Topic, #{qos := Qos}} <- Topics]
|
||||
end,
|
||||
self() ! {subscribe, TopicFilters}.
|
||||
|
||||
unload(_) ->
|
||||
|
|
|
@ -0,0 +1,369 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_mod_topic_metrics).
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
-include("emqx.hrl").
|
||||
-include("logger.hrl").
|
||||
-include("emqx_mqtt.hrl").
|
||||
|
||||
-logger_header("[TOPIC_METRICS]").
|
||||
|
||||
-export([ load/1
|
||||
, unload/1
|
||||
]).
|
||||
|
||||
-export([ on_message_publish/1
|
||||
, on_message_delivered/2
|
||||
, on_message_dropped/3
|
||||
]).
|
||||
|
||||
%% API functions
|
||||
-export([ start_link/0
|
||||
, stop/0
|
||||
]).
|
||||
|
||||
-export([ inc/2
|
||||
, inc/3
|
||||
, val/2
|
||||
, rate/2
|
||||
, metrics/1
|
||||
, register/1
|
||||
, unregister/1
|
||||
, unregister_all/0
|
||||
, is_registered/1
|
||||
]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([ init/1
|
||||
, handle_call/3
|
||||
, handle_info/2
|
||||
, handle_cast/2
|
||||
, terminate/2
|
||||
]).
|
||||
|
||||
-define(CRefID(Topic), {?MODULE, Topic}).
|
||||
|
||||
-define(MAX_TOPICS, 512).
|
||||
-define(TAB, ?MODULE).
|
||||
|
||||
-define(TOPIC_METRICS,
|
||||
['messages.in',
|
||||
'messages.out',
|
||||
'messages.qos0.in',
|
||||
'messages.qos0.out',
|
||||
'messages.qos1.in',
|
||||
'messages.qos1.out',
|
||||
'messages.qos2.in',
|
||||
'messages.qos2.out',
|
||||
'messages.dropped'
|
||||
]).
|
||||
|
||||
-define(TICKING_INTERVAL, 1).
|
||||
|
||||
-record(speed, {
|
||||
last = 0 :: number(),
|
||||
tick = 1 :: number(),
|
||||
last_v = 0 :: number(),
|
||||
acc = 0 :: number(),
|
||||
samples = [] :: list()
|
||||
}).
|
||||
|
||||
-record(state, {
|
||||
speeds :: #{{binary(), atom()} => #speed{}}
|
||||
}).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% APIs
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
load(_Env) ->
|
||||
emqx_mod_sup:start_child(?MODULE, worker),
|
||||
emqx:hook('message.publish', fun ?MODULE:on_message_publish/1, []),
|
||||
emqx:hook('message.dropped', fun ?MODULE:on_message_dropped/3, []),
|
||||
emqx:hook('message.delivered', fun ?MODULE:on_message_delivered/2, []).
|
||||
|
||||
unload(_Env) ->
|
||||
emqx:unhook('message.publish', fun ?MODULE:on_message_publish/1),
|
||||
emqx:unhook('message.dropped', fun ?MODULE:on_message_dropped/3),
|
||||
emqx:unhook('message.delivered', fun ?MODULE:on_message_delivered/2),
|
||||
emqx_mod_sup:stop_child(?MODULE).
|
||||
|
||||
on_message_publish(#message{topic = Topic, qos = QoS}) ->
|
||||
case is_registered(Topic) of
|
||||
true ->
|
||||
inc(Topic, 'messages.in'),
|
||||
case QoS of
|
||||
?QOS_0 -> inc(Topic, 'messages.qos0.in');
|
||||
?QOS_1 -> inc(Topic, 'messages.qos1.in');
|
||||
?QOS_2 -> inc(Topic, 'messages.qos2.in')
|
||||
end;
|
||||
false ->
|
||||
ok
|
||||
end.
|
||||
|
||||
on_message_delivered(_, #message{topic = Topic, qos = QoS}) ->
|
||||
case is_registered(Topic) of
|
||||
true ->
|
||||
inc(Topic, 'messages.out'),
|
||||
case QoS of
|
||||
?QOS_0 -> inc(Topic, 'messages.qos0.out');
|
||||
?QOS_1 -> inc(Topic, 'messages.qos1.out');
|
||||
?QOS_2 -> inc(Topic, 'messages.qos2.out')
|
||||
end;
|
||||
false ->
|
||||
ok
|
||||
end.
|
||||
|
||||
on_message_dropped(#message{topic = Topic}, _, _) ->
|
||||
case is_registered(Topic) of
|
||||
true ->
|
||||
inc(Topic, 'messages.dropped');
|
||||
false ->
|
||||
ok
|
||||
end.
|
||||
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
stop() ->
|
||||
gen_server:stop(?MODULE).
|
||||
|
||||
inc(Topic, Metric) ->
|
||||
inc(Topic, Metric, 1).
|
||||
|
||||
inc(Topic, Metric, Val) ->
|
||||
case get_counters(Topic) of
|
||||
{error, not_found} ->
|
||||
{error, not_found};
|
||||
CRef ->
|
||||
counters:add(CRef, metrics_idx(Metric), Val)
|
||||
end.
|
||||
|
||||
val(Topic, Metric) ->
|
||||
case ets:lookup(?TAB, Topic) of
|
||||
[] ->
|
||||
{error, not_found};
|
||||
[{Topic, CRef}] ->
|
||||
counters:get(CRef, metrics_idx(Metric))
|
||||
end.
|
||||
|
||||
rate(Topic, Metric) ->
|
||||
gen_server:call(?MODULE, {get_rate, Topic, Metric}).
|
||||
|
||||
metrics(Topic) ->
|
||||
case ets:lookup(?TAB, Topic) of
|
||||
[] ->
|
||||
{error, not_found};
|
||||
[{Topic, CRef}] ->
|
||||
lists:foldl(fun(Metric, Acc) ->
|
||||
[{to_count(Metric), counters:get(CRef, metrics_idx(Metric))},
|
||||
{to_rate(Metric), rate(Topic, Metric)} | Acc]
|
||||
end, [], ?TOPIC_METRICS)
|
||||
end.
|
||||
|
||||
register(Topic) when is_binary(Topic) ->
|
||||
gen_server:call(?MODULE, {register, Topic}).
|
||||
|
||||
unregister(Topic) when is_binary(Topic) ->
|
||||
gen_server:call(?MODULE, {unregister, Topic}).
|
||||
|
||||
unregister_all() ->
|
||||
gen_server:call(?MODULE, {unregister, all}).
|
||||
|
||||
is_registered(Topic) ->
|
||||
ets:member(?TAB, Topic).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% gen_server callbacks
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init([]) ->
|
||||
erlang:process_flag(trap_exit, true),
|
||||
ok = emqx_tables:new(?TAB, [{read_concurrency, true}]),
|
||||
erlang:send_after(timer:seconds(?TICKING_INTERVAL), self(), ticking),
|
||||
{ok, #state{speeds = #{}}, hibernate}.
|
||||
|
||||
handle_call({register, Topic}, _From, State = #state{speeds = Speeds}) ->
|
||||
case is_registered(Topic) of
|
||||
true ->
|
||||
{reply, {error, already_existed}, State};
|
||||
false ->
|
||||
case number_of_registered_topics() < ?MAX_TOPICS of
|
||||
true ->
|
||||
CRef = counters:new(counters_size(), [write_concurrency]),
|
||||
true = ets:insert(?TAB, {Topic, CRef}),
|
||||
[counters:put(CRef, Idx, 0) || Idx <- lists:seq(1, counters_size())],
|
||||
NSpeeds = lists:foldl(fun(Metric, Acc) ->
|
||||
maps:put({Topic, Metric}, #speed{}, Acc)
|
||||
end, Speeds, ?TOPIC_METRICS),
|
||||
{reply, ok, State#state{speeds = NSpeeds}};
|
||||
false ->
|
||||
{reply, {error, quota_exceeded}, State}
|
||||
end
|
||||
end;
|
||||
|
||||
handle_call({unregister, all}, _From, State) ->
|
||||
[delete_counters(Topic) || {Topic, _CRef} <- ets:tab2list(?TAB)],
|
||||
{reply, ok, State#state{speeds = #{}}};
|
||||
|
||||
handle_call({unregister, Topic}, _From, State = #state{speeds = Speeds}) ->
|
||||
case is_registered(Topic) of
|
||||
false ->
|
||||
{reply, ok, State};
|
||||
true ->
|
||||
ok = delete_counters(Topic),
|
||||
{reply, ok, State#state{speeds = maps:remove(Topic, Speeds)}}
|
||||
end;
|
||||
|
||||
handle_call({get_rate, Topic}, _From, State = #state{speeds = Speeds}) ->
|
||||
case is_registered(Topic) of
|
||||
false ->
|
||||
{reply, {error, not_found}, State};
|
||||
true ->
|
||||
lists:foldl(fun(Metric, Acc) ->
|
||||
Speed = maps:get({Topic, Metric}, Speeds),
|
||||
[{Metric, Speed#speed.last} | Acc]
|
||||
end, [], ?TOPIC_METRICS)
|
||||
end;
|
||||
|
||||
handle_call({get_rate, Topic, Metric}, _From, State = #state{speeds = Speeds}) ->
|
||||
case is_registered(Topic) of
|
||||
false ->
|
||||
{reply, {error, not_found}, State};
|
||||
true ->
|
||||
case maps:get({Topic, Metric}, Speeds, undefined) of
|
||||
undefined ->
|
||||
{reply, {error, not_found}, State};
|
||||
#speed{last = Last} ->
|
||||
{reply, Last, State}
|
||||
end
|
||||
end.
|
||||
|
||||
handle_cast(Msg, State) ->
|
||||
?LOG(error, "Unexpected cast: ~p", [Msg]),
|
||||
{noreply, State}.
|
||||
|
||||
handle_info(ticking, State = #state{speeds = Speeds}) ->
|
||||
NSpeeds = maps:map(
|
||||
fun({Topic, Metric}, Speed) ->
|
||||
case val(Topic, Metric) of
|
||||
{error, not_found} -> maps:remove(Topic, Speeds);
|
||||
Val -> calculate_speed(Val, Speed)
|
||||
end
|
||||
end, Speeds),
|
||||
erlang:send_after(timer:seconds(5), self(), ticking),
|
||||
{noreply, State#state{speeds = NSpeeds}};
|
||||
|
||||
handle_info(Info, State) ->
|
||||
?LOG(error, "Unexpected info: ~p", [Info]),
|
||||
{noreply, State}.
|
||||
|
||||
terminate(_Reason, _State) ->
|
||||
ok.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Internal Functions
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
metrics_idx('messages.in') -> 01;
|
||||
metrics_idx('messages.out') -> 02;
|
||||
metrics_idx('messages.qos0.in') -> 03;
|
||||
metrics_idx('messages.qos0.out') -> 04;
|
||||
metrics_idx('messages.qos1.in') -> 05;
|
||||
metrics_idx('messages.qos1.out') -> 06;
|
||||
metrics_idx('messages.qos2.in') -> 07;
|
||||
metrics_idx('messages.qos2.out') -> 08;
|
||||
metrics_idx('messages.dropped') -> 09.
|
||||
|
||||
to_count('messages.in') ->
|
||||
'messages.in.count';
|
||||
to_count('messages.out') ->
|
||||
'messages.out.count';
|
||||
to_count('messages.qos0.in') ->
|
||||
'messages.qos0.in.count';
|
||||
to_count('messages.qos0.out') ->
|
||||
'messages.qos0.out.count';
|
||||
to_count('messages.qos1.in') ->
|
||||
'messages.qos1.in.count';
|
||||
to_count('messages.qos1.out') ->
|
||||
'messages.qos1.out.count';
|
||||
to_count('messages.qos2.in') ->
|
||||
'messages.qos2.in.count';
|
||||
to_count('messages.qos2.out') ->
|
||||
'messages.qos2.out.count';
|
||||
to_count('messages.dropped') ->
|
||||
'messages.dropped.count'.
|
||||
|
||||
to_rate('messages.in') ->
|
||||
'messages.in.rate';
|
||||
to_rate('messages.out') ->
|
||||
'messages.out.rate';
|
||||
to_rate('messages.qos0.in') ->
|
||||
'messages.qos0.in.rate';
|
||||
to_rate('messages.qos0.out') ->
|
||||
'messages.qos0.out.rate';
|
||||
to_rate('messages.qos1.in') ->
|
||||
'messages.qos1.in.rate';
|
||||
to_rate('messages.qos1.out') ->
|
||||
'messages.qos1.out.rate';
|
||||
to_rate('messages.qos2.in') ->
|
||||
'messages.qos2.in.rate';
|
||||
to_rate('messages.qos2.out') ->
|
||||
'messages.qos2.out.rate';
|
||||
to_rate('messages.dropped') ->
|
||||
'messages.dropped.rate'.
|
||||
|
||||
delete_counters(Topic) ->
|
||||
true = ets:delete(?TAB, Topic),
|
||||
ok.
|
||||
|
||||
get_counters(Topic) ->
|
||||
case ets:lookup(?TAB, Topic) of
|
||||
[] -> {error, not_found};
|
||||
[{Topic, CRef}] -> CRef
|
||||
end.
|
||||
|
||||
counters_size() ->
|
||||
length(?TOPIC_METRICS).
|
||||
|
||||
number_of_registered_topics() ->
|
||||
proplists:get_value(size, ets:info(?TAB)).
|
||||
|
||||
calculate_speed(CurVal, #speed{last_v = LastVal, tick = Tick, acc = Acc, samples = Samples}) ->
|
||||
%% calculate the current speed based on the last value of the counter
|
||||
CurSpeed = (CurVal - LastVal) / ?TICKING_INTERVAL,
|
||||
|
||||
%% calculate the average speed in last 5 seconds
|
||||
case Tick =< 5 of
|
||||
true ->
|
||||
Acc1 = Acc + CurSpeed,
|
||||
#speed{last = Acc1 / Tick,
|
||||
last_v = CurVal,
|
||||
acc = Acc1,
|
||||
samples = Samples ++ [CurSpeed],
|
||||
tick = Tick + 1};
|
||||
false ->
|
||||
[FirstSpeed | Speeds] = Samples,
|
||||
Acc1 = Acc + CurSpeed - FirstSpeed,
|
||||
#speed{last = Acc1 / Tick,
|
||||
last_v = CurVal,
|
||||
acc = Acc1,
|
||||
samples = Speeds ++ [CurSpeed],
|
||||
tick = Tick}
|
||||
end.
|
|
@ -442,7 +442,7 @@ format_variable(undefined, _) ->
|
|||
format_variable(Variable, undefined) ->
|
||||
format_variable(Variable);
|
||||
format_variable(Variable, Payload) ->
|
||||
io_lib:format("~s, Payload=~p", [format_variable(Variable), Payload]).
|
||||
io_lib:format("~s, Payload=~0p", [format_variable(Variable), Payload]).
|
||||
|
||||
format_variable(#mqtt_packet_connect{
|
||||
proto_ver = ProtoVer,
|
||||
|
@ -460,7 +460,7 @@ format_variable(#mqtt_packet_connect{
|
|||
Format = "ClientId=~s, ProtoName=~s, ProtoVsn=~p, CleanStart=~s, KeepAlive=~p, Username=~s, Password=~s",
|
||||
Args = [ClientId, ProtoName, ProtoVer, CleanStart, KeepAlive, Username, format_password(Password)],
|
||||
{Format1, Args1} = if
|
||||
WillFlag -> {Format ++ ", Will(Q~p, R~p, Topic=~s, Payload=~p)",
|
||||
WillFlag -> {Format ++ ", Will(Q~p, R~p, Topic=~s, Payload=~0p)",
|
||||
Args ++ [WillQoS, i(WillRetain), WillTopic, WillPayload]};
|
||||
true -> {Format, Args}
|
||||
end,
|
||||
|
|
|
@ -27,9 +27,11 @@
|
|||
, load/1
|
||||
, unload/0
|
||||
, unload/1
|
||||
, reload/1
|
||||
, list/0
|
||||
, find_plugin/1
|
||||
, load_expand_plugin/1
|
||||
, generate_configs/1
|
||||
, apply_configs/1
|
||||
]).
|
||||
|
||||
-ifdef(TEST).
|
||||
|
@ -51,12 +53,6 @@ init() ->
|
|||
lists:foreach(fun init_config/1, CfgFiles)
|
||||
end.
|
||||
|
||||
init_config(CfgFile) ->
|
||||
{ok, [AppsEnv]} = file:consult(CfgFile),
|
||||
lists:foreach(fun({App, Envs}) ->
|
||||
[application:set_env(App, Par, Val) || {Par, Val} <- Envs]
|
||||
end, AppsEnv).
|
||||
|
||||
%% @doc Load all plugins when the broker started.
|
||||
-spec(load() -> list() | {error, term()}).
|
||||
load() ->
|
||||
|
@ -68,6 +64,85 @@ load() ->
|
|||
with_loaded_file(File, fun(Names) -> load_plugins(Names, false) end)
|
||||
end.
|
||||
|
||||
%% @doc Load a Plugin
|
||||
-spec(load(atom()) -> ok | {error, term()}).
|
||||
load(PluginName) when is_atom(PluginName) ->
|
||||
case {lists:member(PluginName, names(plugin)), lists:member(PluginName, names(started_app))} of
|
||||
{false, _} ->
|
||||
?LOG(alert, "Plugin ~s not found, cannot load it", [PluginName]),
|
||||
{error, not_found};
|
||||
{_, true} ->
|
||||
?LOG(notice, "Plugin ~s is already started", [PluginName]),
|
||||
{error, already_started};
|
||||
{_, false} ->
|
||||
load_plugin(PluginName, true)
|
||||
end.
|
||||
|
||||
%% @doc Unload all plugins before broker stopped.
|
||||
-spec(unload() -> list() | {error, term()}).
|
||||
unload() ->
|
||||
case emqx:get_env(plugins_loaded_file) of
|
||||
undefined -> ignore;
|
||||
File ->
|
||||
with_loaded_file(File, fun stop_plugins/1)
|
||||
end.
|
||||
|
||||
%% @doc UnLoad a Plugin
|
||||
-spec(unload(atom()) -> ok | {error, term()}).
|
||||
unload(PluginName) when is_atom(PluginName) ->
|
||||
case {lists:member(PluginName, names(plugin)), lists:member(PluginName, names(started_app))} of
|
||||
{false, _} ->
|
||||
?LOG(error, "Plugin ~s is not found, cannot unload it", [PluginName]),
|
||||
{error, not_found};
|
||||
{_, false} ->
|
||||
?LOG(error, "Plugin ~s is not started", [PluginName]),
|
||||
{error, not_started};
|
||||
{_, _} ->
|
||||
unload_plugin(PluginName, true)
|
||||
end.
|
||||
|
||||
reload(PluginName) when is_atom(PluginName)->
|
||||
case {lists:member(PluginName, names(plugin)), lists:member(PluginName, names(started_app))} of
|
||||
{false, _} ->
|
||||
?LOG(error, "Plugin ~s is not found, cannot reload it", [PluginName]),
|
||||
{error, not_found};
|
||||
{_, false} ->
|
||||
load(PluginName);
|
||||
{_, true} ->
|
||||
case unload(PluginName) of
|
||||
ok -> load(PluginName);
|
||||
{error, Reason} -> {error, Reason}
|
||||
end
|
||||
end.
|
||||
|
||||
%% @doc List all available plugins
|
||||
-spec(list() -> [emqx_types:plugin()]).
|
||||
list() ->
|
||||
StartedApps = names(started_app),
|
||||
lists:map(fun({Name, _, [Type| _]}) ->
|
||||
Plugin = plugin(Name, Type),
|
||||
case lists:member(Name, StartedApps) of
|
||||
true -> Plugin#plugin{active = true};
|
||||
false -> Plugin
|
||||
end
|
||||
end, lists:sort(ekka_boot:all_module_attributes(emqx_plugin))).
|
||||
|
||||
find_plugin(Name) ->
|
||||
find_plugin(Name, list()).
|
||||
|
||||
find_plugin(Name, Plugins) ->
|
||||
lists:keyfind(Name, 2, Plugins).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
init_config(CfgFile) ->
|
||||
{ok, [AppsEnv]} = file:consult(CfgFile),
|
||||
lists:foreach(fun({App, Envs}) ->
|
||||
[application:set_env(App, Par, Val) || {Par, Val} <- Envs]
|
||||
end, AppsEnv).
|
||||
|
||||
load_expand_plugins() ->
|
||||
case emqx:get_env(expand_plugins_dir) of
|
||||
undefined -> ok;
|
||||
|
@ -136,65 +211,60 @@ load_plugins(Names, Persistent) ->
|
|||
NotFound -> ?LOG(alert, "Cannot find plugins: ~p", [NotFound])
|
||||
end,
|
||||
NeedToLoad = Names -- NotFound -- names(started_app),
|
||||
[load_plugin(find_plugin(Name, Plugins), Persistent) || Name <- NeedToLoad].
|
||||
lists:foreach(fun(Name) ->
|
||||
Plugin = find_plugin(Name, Plugins),
|
||||
load_plugin(Plugin#plugin.name, Persistent)
|
||||
end, NeedToLoad).
|
||||
|
||||
%% @doc Unload all plugins before broker stopped.
|
||||
-spec(unload() -> list() | {error, term()}).
|
||||
unload() ->
|
||||
case emqx:get_env(plugins_loaded_file) of
|
||||
undefined -> ignore;
|
||||
File ->
|
||||
with_loaded_file(File, fun stop_plugins/1)
|
||||
generate_configs(App) ->
|
||||
ConfigFile = filename:join([emqx:get_env(plugins_etc_dir), App]) ++ ".config",
|
||||
ConfFile = filename:join([emqx:get_env(plugins_etc_dir), App]) ++ ".conf",
|
||||
SchemaFile = filename:join([code:priv_dir(App), App]) ++ ".schema",
|
||||
case {filelib:is_file(ConfigFile), filelib:is_file(ConfFile) andalso filelib:is_file(SchemaFile)} of
|
||||
{true, _} ->
|
||||
{ok, [Configs]} = file:consult(ConfigFile),
|
||||
Configs;
|
||||
{_, true} ->
|
||||
Schema = cuttlefish_schema:files([SchemaFile]),
|
||||
Conf = cuttlefish_conf:file(ConfFile),
|
||||
cuttlefish_generator:map(Schema, Conf);
|
||||
{false, false} ->
|
||||
error(no_avaliable_configuration)
|
||||
end.
|
||||
|
||||
apply_configs([]) ->
|
||||
ok;
|
||||
apply_configs([{App, Config} | More]) ->
|
||||
lists:foreach(fun({Key, _}) -> application:unset_env(App, Key) end, application:get_all_env(App)),
|
||||
lists:foreach(fun({Key, Val}) -> application:set_env(App, Key, Val) end, Config),
|
||||
apply_configs(More).
|
||||
|
||||
%% Stop plugins
|
||||
stop_plugins(Names) ->
|
||||
[stop_app(App) || App <- Names].
|
||||
|
||||
%% @doc List all available plugins
|
||||
-spec(list() -> [emqx_types:plugin()]).
|
||||
list() ->
|
||||
StartedApps = names(started_app),
|
||||
lists:map(fun({Name, _, [Type| _]}) ->
|
||||
Plugin = plugin(Name, Type),
|
||||
case lists:member(Name, StartedApps) of
|
||||
true -> Plugin#plugin{active = true};
|
||||
false -> Plugin
|
||||
end
|
||||
end, lists:sort(ekka_boot:all_module_attributes(emqx_plugin))).
|
||||
[stop_app(App) || App <- Names],
|
||||
ok.
|
||||
|
||||
plugin(AppName, Type) ->
|
||||
case application:get_all_key(AppName) of
|
||||
{ok, Attrs} ->
|
||||
Ver = proplists:get_value(vsn, Attrs, "0"),
|
||||
Descr = proplists:get_value(description, Attrs, ""),
|
||||
#plugin{name = AppName, version = Ver, descr = Descr, type = plugin_type(Type)};
|
||||
#plugin{name = AppName, descr = Descr, type = plugin_type(Type)};
|
||||
undefined -> error({plugin_not_found, AppName})
|
||||
end.
|
||||
|
||||
%% @doc Load a Plugin
|
||||
-spec(load(atom()) -> ok | {error, term()}).
|
||||
load(PluginName) when is_atom(PluginName) ->
|
||||
case lists:member(PluginName, names(started_app)) of
|
||||
true ->
|
||||
?LOG(notice, "Plugin ~s is already started", [PluginName]),
|
||||
{error, already_started};
|
||||
false ->
|
||||
case find_plugin(PluginName) of
|
||||
false ->
|
||||
?LOG(alert, "Plugin ~s not found", [PluginName]),
|
||||
{error, not_found};
|
||||
Plugin ->
|
||||
load_plugin(Plugin, true)
|
||||
end
|
||||
end.
|
||||
|
||||
load_plugin(#plugin{name = Name}, Persistent) ->
|
||||
case load_app(Name) of
|
||||
ok ->
|
||||
start_app(Name, fun(App) -> plugin_loaded(App, Persistent) end);
|
||||
{error, Error} ->
|
||||
{error, Error}
|
||||
load_plugin(Name, Persistent) ->
|
||||
try
|
||||
Configs = ?MODULE:generate_configs(Name),
|
||||
?MODULE:apply_configs(Configs),
|
||||
case load_app(Name) of
|
||||
ok ->
|
||||
start_app(Name, fun(App) -> plugin_loaded(App, Persistent) end);
|
||||
{error, Error0} ->
|
||||
{error, Error0}
|
||||
end
|
||||
catch _ : Error : Stacktrace ->
|
||||
?LOG(alert, "Plugin ~s load failed with ~p", [Name, {Error, Stacktrace}]),
|
||||
{error, parse_config_file_failed}
|
||||
end.
|
||||
|
||||
load_app(App) ->
|
||||
|
@ -215,30 +285,10 @@ start_app(App, SuccFun) ->
|
|||
SuccFun(App),
|
||||
ok;
|
||||
{error, {ErrApp, Reason}} ->
|
||||
?LOG(error, "Load plugin ~s failed, cannot start plugin ~s for ~p", [App, ErrApp, Reason]),
|
||||
?LOG(error, "Load plugin ~s failed, cannot start plugin ~s for ~0p", [App, ErrApp, Reason]),
|
||||
{error, {ErrApp, Reason}}
|
||||
end.
|
||||
|
||||
find_plugin(Name) ->
|
||||
find_plugin(Name, list()).
|
||||
|
||||
find_plugin(Name, Plugins) ->
|
||||
lists:keyfind(Name, 2, Plugins).
|
||||
|
||||
%% @doc UnLoad a Plugin
|
||||
-spec(unload(atom()) -> ok | {error, term()}).
|
||||
unload(PluginName) when is_atom(PluginName) ->
|
||||
case {lists:member(PluginName, names(started_app)), lists:member(PluginName, names(plugin))} of
|
||||
{true, true} ->
|
||||
unload_plugin(PluginName, true);
|
||||
{false, _} ->
|
||||
?LOG(error, "Plugin ~s is not started", [PluginName]),
|
||||
{error, not_started};
|
||||
{true, false} ->
|
||||
?LOG(error, "~s is not a plugin, cannot unload it", [PluginName]),
|
||||
{error, not_found}
|
||||
end.
|
||||
|
||||
unload_plugin(App, Persistent) ->
|
||||
case stop_app(App) of
|
||||
ok ->
|
||||
|
@ -257,9 +307,6 @@ stop_app(App) ->
|
|||
?LOG(error, "Stop plugin ~s error: ~p", [App]), {error, Reason}
|
||||
end.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
names(plugin) ->
|
||||
names(list());
|
||||
|
||||
|
|
|
@ -107,7 +107,7 @@ handle_call(Req, _From, State) ->
|
|||
handle_cast({async_submit, Task}, State) ->
|
||||
try run(Task)
|
||||
catch _:Error:Stacktrace ->
|
||||
?LOG(error, "Error: ~p, ~p", [Error, Stacktrace])
|
||||
?LOG(error, "Error: ~0p, ~0p", [Error, Stacktrace])
|
||||
end,
|
||||
{noreply, State};
|
||||
|
||||
|
|
|
@ -35,6 +35,6 @@ lookup(psk, ClientPSKID, _UserState) ->
|
|||
error
|
||||
catch
|
||||
Except:Error:Stacktrace ->
|
||||
?LOG(error, "Lookup PSK failed, ~p: ~p", [{Except,Error}, Stacktrace]),
|
||||
?LOG(error, "Lookup PSK failed, ~0p: ~0p", [{Except,Error}, Stacktrace]),
|
||||
error
|
||||
end.
|
||||
|
|
|
@ -243,7 +243,7 @@ handle_info({timeout, TRef, tick}, State = #state{timer = TRef, updates = Update
|
|||
try UpFun()
|
||||
catch
|
||||
_:Error ->
|
||||
?LOG(error, "Update ~s failed: ~p", [Name, Error])
|
||||
?LOG(error, "Update ~s failed: ~0p", [Name, Error])
|
||||
end,
|
||||
[Update#update{countdown = I} | Acc];
|
||||
(Update = #update{countdown = C}, Acc) ->
|
||||
|
@ -272,6 +272,6 @@ safe_update_element(Key, Val) ->
|
|||
true -> true
|
||||
catch
|
||||
error:badarg ->
|
||||
?LOG(warning, "Failed to update ~p to ~p", [Key, Val])
|
||||
?LOG(warning, "Failed to update ~0p to ~0p", [Key, Val])
|
||||
end.
|
||||
|
||||
|
|
|
@ -67,11 +67,12 @@ init([]) ->
|
|||
BrokerSup = child_spec(emqx_broker_sup, supervisor),
|
||||
CMSup = child_spec(emqx_cm_sup, supervisor),
|
||||
SysSup = child_spec(emqx_sys_sup, supervisor),
|
||||
ModSup = child_spec(emqx_mod_sup, supervisor),
|
||||
Childs = [KernelSup] ++
|
||||
[RouterSup || emqx_boot:is_enabled(router)] ++
|
||||
[BrokerSup || emqx_boot:is_enabled(broker)] ++
|
||||
[CMSup || emqx_boot:is_enabled(broker)] ++
|
||||
[SysSup],
|
||||
[SysSup] ++ [ModSup],
|
||||
SupFlags = #{strategy => one_for_all,
|
||||
intensity => 0,
|
||||
period => 1
|
||||
|
|
|
@ -65,7 +65,7 @@ trace(publish, #message{topic = <<"$SYS/", _/binary>>}) ->
|
|||
ignore;
|
||||
trace(publish, #message{from = From, topic = Topic, payload = Payload})
|
||||
when is_binary(From); is_atom(From) ->
|
||||
emqx_logger:info(#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY} }, "PUBLISH to ~s: ~p", [Topic, Payload]).
|
||||
emqx_logger:info(#{topic => Topic, mfa => {?MODULE, ?FUNCTION_NAME, ?FUNCTION_ARITY} }, "PUBLISH to ~s: ~0p", [Topic, Payload]).
|
||||
|
||||
%% @doc Start to trace clientid or topic.
|
||||
-spec(start_trace(trace_who(), logger:level(), string()) -> ok | {error, term()}).
|
||||
|
|
|
@ -47,6 +47,7 @@
|
|||
, subopts/0
|
||||
, reason_code/0
|
||||
, alias_id/0
|
||||
, topic_aliases/0
|
||||
, properties/0
|
||||
]).
|
||||
|
||||
|
@ -165,6 +166,8 @@
|
|||
-type(reason_code() :: 0..16#FF).
|
||||
-type(packet_id() :: 1..16#FFFF).
|
||||
-type(alias_id() :: 0..16#FFFF).
|
||||
-type(topic_aliases() :: #{inbound => maybe(map()),
|
||||
outbound => maybe(map())}).
|
||||
-type(properties() :: #{atom() => term()}).
|
||||
-type(topic_filters() :: list({topic(), subopts()})).
|
||||
-type(packet() :: #mqtt_packet{}).
|
||||
|
|
|
@ -200,7 +200,7 @@ websocket_init([Req, Opts]) ->
|
|||
?LOG(error, "Illegal cookie"),
|
||||
undefined;
|
||||
Error:Reason ->
|
||||
?LOG(error, "Failed to parse cookie, Error: ~p, Reason ~p",
|
||||
?LOG(error, "Failed to parse cookie, Error: ~0p, Reason ~0p",
|
||||
[Error, Reason]),
|
||||
undefined
|
||||
end,
|
||||
|
@ -245,7 +245,7 @@ websocket_handle({binary, Data}, State) when is_list(Data) ->
|
|||
websocket_handle({binary, iolist_to_binary(Data)}, State);
|
||||
|
||||
websocket_handle({binary, Data}, State) ->
|
||||
?LOG(debug, "RECV ~p", [Data]),
|
||||
?LOG(debug, "RECV ~0p", [Data]),
|
||||
ok = inc_recv_stats(1, iolist_size(Data)),
|
||||
NState = ensure_stats_timer(State),
|
||||
return(parse_incoming(Data, NState));
|
||||
|
@ -458,7 +458,7 @@ parse_incoming(Data, State = #state{parse_state = ParseState}) ->
|
|||
parse_incoming(Rest, postpone({incoming, Packet}, NState))
|
||||
catch
|
||||
error:Reason:Stk ->
|
||||
?LOG(error, "~nParse failed for ~p~n~p~nFrame data: ~p",
|
||||
?LOG(error, "~nParse failed for ~0p~n~0p~nFrame data: ~0p",
|
||||
[Reason, Stk, Data]),
|
||||
FrameError = {frame_error, Reason},
|
||||
postpone({incoming, FrameError}, State)
|
||||
|
|
|
@ -58,6 +58,7 @@ t_subopts(_) ->
|
|||
?assertEqual(undefined, emqx_broker:get_subopts(self(), <<"topic">>)),
|
||||
?assertEqual(undefined, emqx_broker:get_subopts(<<"clientid">>, <<"topic">>)),
|
||||
emqx_broker:subscribe(<<"topic">>, <<"clientid">>, #{qos => 1}),
|
||||
timer:sleep(200),
|
||||
?assertEqual(#{qos => 1, subid => <<"clientid">>}, emqx_broker:get_subopts(self(), <<"topic">>)),
|
||||
?assertEqual(#{qos => 1, subid => <<"clientid">>}, emqx_broker:get_subopts(<<"clientid">>,<<"topic">>)),
|
||||
emqx_broker:subscribe(<<"topic">>, <<"clientid">>, #{qos => 2}),
|
||||
|
|
|
@ -482,10 +482,26 @@ t_auth_connect(_) ->
|
|||
|
||||
t_process_alias(_) ->
|
||||
Publish = #mqtt_packet_publish{topic_name = <<>>, properties = #{'Topic-Alias' => 1}},
|
||||
Channel = emqx_channel:set_field(topic_aliases, #{1 => <<"t">>}, channel()),
|
||||
Channel = emqx_channel:set_field(topic_aliases, #{inbound => #{1 => <<"t">>}}, channel()),
|
||||
{ok, #mqtt_packet{variable = #mqtt_packet_publish{topic_name = <<"t">>}}, _Chan} =
|
||||
emqx_channel:process_alias(#mqtt_packet{variable = Publish}, Channel).
|
||||
|
||||
t_packing_alias(_) ->
|
||||
Packet1 = #mqtt_packet{variable = #mqtt_packet_publish{topic_name = <<"x">>}},
|
||||
Packet2 = #mqtt_packet{variable = #mqtt_packet_publish{topic_name = <<"y">>}},
|
||||
Channel = emqx_channel:set_field(alias_maximum, #{outbound => 1}, channel()),
|
||||
|
||||
{RePacket1, NChannel1} = emqx_channel:packing_alias(Packet1, Channel),
|
||||
?assertEqual(#mqtt_packet{variable = #mqtt_packet_publish{topic_name = <<"x">>, properties = #{'Topic-Alias' => 1}}}, RePacket1),
|
||||
|
||||
{RePacket2, NChannel2} = emqx_channel:packing_alias(Packet1, NChannel1),
|
||||
?assertEqual(#mqtt_packet{variable = #mqtt_packet_publish{topic_name = <<>>, properties = #{'Topic-Alias' => 1}}}, RePacket2),
|
||||
|
||||
{RePacket3, _} = emqx_channel:packing_alias(Packet2, NChannel2),
|
||||
?assertEqual(#mqtt_packet{variable = #mqtt_packet_publish{topic_name = <<"y">>, properties = undefined}}, RePacket3),
|
||||
|
||||
?assertMatch({#mqtt_packet{variable = #mqtt_packet_publish{topic_name = <<"z">>}}, _}, emqx_channel:packing_alias(#mqtt_packet{variable = #mqtt_packet_publish{topic_name = <<"z">>}}, channel())).
|
||||
|
||||
t_check_pub_acl(_) ->
|
||||
ok = meck:new(emqx_zone, [passthrough, no_history]),
|
||||
ok = meck:expect(emqx_zone, enable_acl, fun(_) -> true end),
|
||||
|
|
|
@ -42,26 +42,26 @@ end_per_testcase(_TestCase, Config) ->
|
|||
Config.
|
||||
|
||||
t_is_enabled(_) ->
|
||||
application:set_env(emqx, enable_channel_registry, false),
|
||||
application:set_env(emqx, enable_session_registry, false),
|
||||
?assertEqual(false, emqx_cm_registry:is_enabled()),
|
||||
application:set_env(emqx, enable_channel_registry, true),
|
||||
application:set_env(emqx, enable_session_registry, true),
|
||||
?assertEqual(true, emqx_cm_registry:is_enabled()).
|
||||
|
||||
t_register_unregister_channel(_) ->
|
||||
ClientId = <<"clientid">>,
|
||||
application:set_env(emqx, enable_channel_registry, false),
|
||||
application:set_env(emqx, enable_session_registry, false),
|
||||
emqx_cm_registry:register_channel(ClientId),
|
||||
?assertEqual([], emqx_cm_registry:lookup_channels(ClientId)),
|
||||
|
||||
application:set_env(emqx, enable_channel_registry, true),
|
||||
application:set_env(emqx, enable_session_registry, true),
|
||||
emqx_cm_registry:register_channel(ClientId),
|
||||
?assertEqual([self()], emqx_cm_registry:lookup_channels(ClientId)),
|
||||
|
||||
application:set_env(emqx, enable_channel_registry, false),
|
||||
application:set_env(emqx, enable_session_registry, false),
|
||||
emqx_cm_registry:unregister_channel(ClientId),
|
||||
?assertEqual([self()], emqx_cm_registry:lookup_channels(ClientId)),
|
||||
|
||||
application:set_env(emqx, enable_channel_registry, true),
|
||||
application:set_env(emqx, enable_session_registry, true),
|
||||
emqx_cm_registry:unregister_channel(ClientId),
|
||||
?assertEqual([], emqx_cm_registry:lookup_channels(ClientId)).
|
||||
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_mod_delayed_SUITE).
|
||||
|
||||
-import(emqx_mod_delayed, [on_message_publish/1]).
|
||||
|
||||
-compile(export_all).
|
||||
-compile(nowarn_export_all).
|
||||
|
||||
-record(delayed_message, {key, msg}).
|
||||
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("emqx/include/emqx.hrl").
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Setups
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
all() ->
|
||||
emqx_ct:all(?MODULE).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
emqx_ct_helpers:start_apps([], fun set_special_configs/1),
|
||||
Config.
|
||||
|
||||
end_per_suite(_) ->
|
||||
emqx_ct_helpers:stop_apps([]).
|
||||
|
||||
set_special_configs(emqx) ->
|
||||
application:set_env(emqx, modules, [{emqx_mod_delayed, []}]),
|
||||
application:set_env(emqx, allow_anonymous, false),
|
||||
application:set_env(emqx, enable_acl_cache, false),
|
||||
application:set_env(emqx, plugins_loaded_file,
|
||||
emqx_ct_helpers:deps_path(emqx, "test/emqx_SUITE_data/loaded_plugins"));
|
||||
set_special_configs(_App) ->
|
||||
ok.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Test cases
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
t_load_case(_) ->
|
||||
ok = emqx_mod_delayed:unload([]),
|
||||
timer:sleep(100),
|
||||
UnHooks = emqx_hooks:lookup('message.publish'),
|
||||
?assertEqual([], UnHooks),
|
||||
ok = emqx_mod_delayed:load([]),
|
||||
Hooks = emqx_hooks:lookup('message.publish'),
|
||||
?assertEqual(1, length(Hooks)),
|
||||
ok.
|
||||
|
||||
t_delayed_message(_) ->
|
||||
DelayedMsg = emqx_message:make(?MODULE, 1, <<"$delayed/1/publish">>, <<"delayed_m">>),
|
||||
?assertEqual({stop, DelayedMsg#message{topic = <<"publish">>, headers = #{allow_publish => false}}}, on_message_publish(DelayedMsg)),
|
||||
|
||||
Msg = emqx_message:make(?MODULE, 1, <<"no_delayed_msg">>, <<"no_delayed">>),
|
||||
?assertEqual({ok, Msg}, on_message_publish(Msg)),
|
||||
|
||||
[Key] = mnesia:dirty_all_keys(emqx_mod_delayed),
|
||||
[#delayed_message{msg = #message{payload = Payload}}] = mnesia:dirty_read({emqx_mod_delayed, Key}),
|
||||
?assertEqual(<<"delayed_m">>, Payload),
|
||||
timer:sleep(5000),
|
||||
|
||||
EmptyKey = mnesia:dirty_all_keys(emqx_mod_delayed),
|
||||
?assertEqual([], EmptyKey).
|
|
@ -33,7 +33,7 @@ end_per_suite(_Config) ->
|
|||
emqx_ct_helpers:stop_apps([]).
|
||||
|
||||
t_on_client_connected(_) ->
|
||||
?assertEqual(ok, emqx_mod_subscription:load([{<<"connected/%c/%u">>, ?QOS_0}])),
|
||||
?assertEqual(ok, emqx_mod_subscription:load([{<<"connected/%c/%u">>, #{qos => ?QOS_0}}])),
|
||||
{ok, C} = emqtt:start_link([{host, "localhost"},
|
||||
{clientid, "myclient"},
|
||||
{username, "admin"}]),
|
||||
|
@ -43,18 +43,42 @@ t_on_client_connected(_) ->
|
|||
?assertEqual(<<"connected/myclient/admin">>, Topic),
|
||||
?assertEqual(<<"Hello world">>, Payload),
|
||||
ok = emqtt:disconnect(C),
|
||||
?assertEqual(ok, emqx_mod_subscription:unload([{<<"connected/%c/%u">>, ?QOS_0}])).
|
||||
?assertEqual(ok, emqx_mod_subscription:unload([{<<"connected/%c/%u">>, #{qos => ?QOS_0}}])).
|
||||
|
||||
t_on_undefined_client_connected(_) ->
|
||||
?assertEqual(ok, emqx_mod_subscription:load([{<<"connected/undefined">>, ?QOS_0}])),
|
||||
?assertEqual(ok, emqx_mod_subscription:load([{<<"connected/undefined">>, #{qos => ?QOS_1}}])),
|
||||
{ok, C} = emqtt:start_link([{host, "localhost"}]),
|
||||
{ok, _} = emqtt:connect(C),
|
||||
emqtt:publish(C, <<"connected/undefined">>, <<"Hello world">>, ?QOS_0),
|
||||
emqtt:publish(C, <<"connected/undefined">>, <<"Hello world">>, ?QOS_1),
|
||||
{ok, #{topic := Topic, payload := Payload}} = receive_publish(100),
|
||||
?assertEqual(<<"connected/undefined">>, Topic),
|
||||
?assertEqual(<<"Hello world">>, Payload),
|
||||
ok = emqtt:disconnect(C),
|
||||
?assertEqual(ok, emqx_mod_subscription:unload([{<<"connected/undefined">>, ?QOS_0}])).
|
||||
?assertEqual(ok, emqx_mod_subscription:unload([{<<"connected/undefined">>, #{qos => ?QOS_1}}])).
|
||||
|
||||
t_suboption(_) ->
|
||||
Client_info = fun(Key, Client) -> maps:get(Key, maps:from_list(emqtt:info(Client)), undefined) end,
|
||||
Suboption = #{qos => ?QOS_2, nl => 1, rap => 1, rh => 2},
|
||||
?assertEqual(ok, emqx_mod_subscription:load([{<<"connected/%c/%u">>, Suboption}])),
|
||||
{ok, C1} = emqtt:start_link([{proto_ver, v5}]),
|
||||
{ok, _} = emqtt:connect(C1),
|
||||
timer:sleep(200),
|
||||
[CPid1] = emqx_cm:lookup_channels(Client_info(clientid, C1)),
|
||||
[ Sub1 | _ ] = ets:lookup(emqx_subscription,CPid1),
|
||||
[ Suboption1 | _ ] = ets:lookup(emqx_suboption,Sub1),
|
||||
?assertMatch({Sub1, #{qos := 2, nl := 1, rap := 1, rh := 2, subid := _}}, Suboption1),
|
||||
ok = emqtt:disconnect(C1),
|
||||
%% The subscription option is not valid for MQTT V3.1.1
|
||||
{ok, C2} = emqtt:start_link([{proto_ver, v4}]),
|
||||
{ok, _} = emqtt:connect(C2),
|
||||
timer:sleep(200),
|
||||
[CPid2] = emqx_cm:lookup_channels(Client_info(clientid, C2)),
|
||||
[ Sub2 | _ ] = ets:lookup(emqx_subscription,CPid2),
|
||||
[ Suboption2 | _ ] = ets:lookup(emqx_suboption,Sub2),
|
||||
ok = emqtt:disconnect(C2),
|
||||
?assertMatch({Sub2, #{qos := 2, nl := 0, rap := 0, rh := 0, subid := _}}, Suboption2),
|
||||
|
||||
?assertEqual(ok, emqx_mod_subscription:unload([{<<"connected/undefined">>, Suboption}])).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
|
@ -66,4 +90,3 @@ receive_publish(Timeout) ->
|
|||
after
|
||||
Timeout -> {error, timeout}
|
||||
end.
|
||||
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
|
||||
all() -> emqx_ct:all(?MODULE).
|
||||
|
||||
|
||||
init_per_suite(Config) ->
|
||||
|
||||
%% Compile extra plugin code
|
||||
|
@ -55,13 +54,11 @@ end_per_suite(_Config) ->
|
|||
emqx_ct_helpers:stop_apps([]).
|
||||
|
||||
t_load(_) ->
|
||||
?assertEqual([], emqx_plugins:load()),
|
||||
?assertEqual([], emqx_plugins:unload()),
|
||||
?assertEqual(ok, emqx_plugins:load()),
|
||||
?assertEqual(ok, emqx_plugins:unload()),
|
||||
|
||||
?assertEqual({error, not_found}, emqx_plugins:load(not_existed_plugin)),
|
||||
?assertMatch(ok, emqx_plugins:load(emqx_mini_plugin)),
|
||||
?assertEqual({error, already_started}, emqx_plugins:load(emqx_mini_plugin)),
|
||||
?assertEqual(ok, emqx_plugins:unload(emqx_mini_plugin)),
|
||||
?assertEqual({error, parse_config_file_failed}, emqx_plugins:load(emqx_mini_plugin)),
|
||||
?assertEqual({error, not_started}, emqx_plugins:unload(emqx_mini_plugin)),
|
||||
|
||||
application:set_env(emqx, expand_plugins_dir, undefined),
|
||||
|
@ -82,10 +79,10 @@ t_load_expand_plugin(_) ->
|
|||
?assertEqual({error, load_app_fail}, emqx_plugins:load_expand_plugin("./not_existed_path/")).
|
||||
|
||||
t_list(_) ->
|
||||
?assertMatch([{plugin, _, _, _, _, _, _, _, _} | _ ], emqx_plugins:list()).
|
||||
?assertMatch([{plugin, _, _, _, _, _, _, _} | _ ], emqx_plugins:list()).
|
||||
|
||||
t_find_plugin(_) ->
|
||||
?assertMatch({plugin, emqx_mini_plugin, _, _, _, _, _, _, _}, emqx_plugins:find_plugin(emqx_mini_plugin)).
|
||||
?assertMatch({plugin, emqx_mini_plugin, _, _, _, _, _, _}, emqx_plugins:find_plugin(emqx_mini_plugin)).
|
||||
|
||||
t_plugin_type(_) ->
|
||||
?assertEqual(auth, emqx_plugins:plugin_type(auth)),
|
||||
|
@ -112,7 +109,7 @@ t_plugin(_) ->
|
|||
_Error:Reason:_Stacktrace ->
|
||||
?assertEqual({plugin_not_found,not_existed_plugin}, Reason)
|
||||
end,
|
||||
?assertMatch({plugin, emqx_mini_plugin, _, _, _, _, _, _, _}, emqx_plugins:plugin(emqx_mini_plugin, undefined)).
|
||||
?assertMatch({plugin, emqx_mini_plugin, _, _, _, _, _, _}, emqx_plugins:plugin(emqx_mini_plugin, undefined)).
|
||||
|
||||
t_filter_plugins(_) ->
|
||||
?assertEqual([name1, name2], emqx_plugins:filter_plugins([name1, {name2,true}, {name3, false}])).
|
||||
|
@ -120,27 +117,29 @@ t_filter_plugins(_) ->
|
|||
t_load_plugin(_) ->
|
||||
ok = meck:new(application, [unstick, non_strict, passthrough, no_history]),
|
||||
ok = meck:expect(application, load, fun(already_loaded_app) -> {error, {already_loaded, already_loaded_app}};
|
||||
(error_app) -> {error, error};
|
||||
(_) -> ok end),
|
||||
(error_app) -> {error, error};
|
||||
(_) -> ok end),
|
||||
ok = meck:expect(application, ensure_all_started, fun(already_loaded_app) -> {error, {already_loaded_app, already_loaded}};
|
||||
(error_app) -> {error, error};
|
||||
(App) -> {ok, App} end),
|
||||
|
||||
?assertMatch({error, _}, emqx_plugins:load_plugin(#plugin{name = already_loaded_app}, true)),
|
||||
?assertMatch(ok, emqx_plugins:load_plugin(#plugin{name = normal}, true)),
|
||||
?assertMatch({error,_}, emqx_plugins:load_plugin(#plugin{name = error_app}, true)),
|
||||
(error_app) -> {error, error};
|
||||
(App) -> {ok, App} end),
|
||||
ok = meck:new(emqx_plugins, [unstick, non_strict, passthrough, no_history]),
|
||||
ok = meck:expect(emqx_plugins, generate_configs, fun(_) -> ok end),
|
||||
ok = meck:expect(emqx_plugins, apply_configs, fun(_) -> ok end),
|
||||
?assertMatch({error, _}, emqx_plugins:load_plugin(already_loaded_app, true)),
|
||||
?assertMatch(ok, emqx_plugins:load_plugin(normal, true)),
|
||||
?assertMatch({error,_}, emqx_plugins:load_plugin(error_app, true)),
|
||||
|
||||
ok = meck:unload(emqx_plugins),
|
||||
ok = meck:unload(application).
|
||||
|
||||
t_unload_plugin(_) ->
|
||||
ok = meck:new(application, [unstick, non_strict, passthrough, no_history]),
|
||||
ok = meck:expect(application, stop, fun(not_started_app) -> {error, {not_started, not_started_app}};
|
||||
(error_app) -> {error, error};
|
||||
(_) -> ok end),
|
||||
(error_app) -> {error, error};
|
||||
(_) -> ok end),
|
||||
|
||||
?assertEqual(ok, emqx_plugins:unload_plugin(not_started_app, true)),
|
||||
?assertEqual(ok, emqx_plugins:unload_plugin(normal, true)),
|
||||
?assertEqual({error,error}, emqx_plugins:unload_plugin(error_app, true)),
|
||||
|
||||
ok = meck:unload(application).
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ receive_messages(Count, Msgs) ->
|
|||
receive_messages(Count-1, [Msg|Msgs]);
|
||||
_Other ->
|
||||
receive_messages(Count, Msgs)
|
||||
after 100 ->
|
||||
after 1000 ->
|
||||
Msgs
|
||||
end.
|
||||
|
||||
|
@ -605,6 +605,33 @@ t_publish_overlapping_subscriptions(_) ->
|
|||
%% Subsctibe
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
t_subscribe_topic_alias(_) ->
|
||||
Topic1 = nth(1, ?TOPICS),
|
||||
Topic2 = nth(2, ?TOPICS),
|
||||
{ok, Client1} = emqtt:start_link([{proto_ver, v5},
|
||||
{properties, #{'Topic-Alias-Maximum' => 1}}
|
||||
]),
|
||||
{ok, _} = emqtt:connect(Client1),
|
||||
{ok, _, [2]} = emqtt:subscribe(Client1, Topic1, qos2),
|
||||
{ok, _, [2]} = emqtt:subscribe(Client1, Topic2, qos2),
|
||||
|
||||
ok = emqtt:publish(Client1, Topic1, #{}, <<"Topic-Alias">>, [{qos, ?QOS_0}]),
|
||||
[Msg1] = receive_messages(1),
|
||||
?assertEqual({ok, #{'Topic-Alias' => 1}}, maps:find(properties, Msg1)),
|
||||
?assertEqual({ok, Topic1}, maps:find(topic, Msg1)),
|
||||
|
||||
ok = emqtt:publish(Client1, Topic1, #{}, <<"Topic-Alias">>, [{qos, ?QOS_0}]),
|
||||
[Msg2] = receive_messages(1),
|
||||
?assertEqual({ok, #{'Topic-Alias' => 1}}, maps:find(properties, Msg2)),
|
||||
?assertEqual({ok, <<>>}, maps:find(topic, Msg2)),
|
||||
|
||||
ok = emqtt:publish(Client1, Topic2, #{}, <<"Topic-Alias">>, [{qos, ?QOS_0}]),
|
||||
[Msg3] = receive_messages(1),
|
||||
?assertEqual({ok, #{}}, maps:find(properties, Msg3)),
|
||||
?assertEqual({ok, Topic2}, maps:find(topic, Msg3)),
|
||||
|
||||
ok = emqtt:disconnect(Client1).
|
||||
|
||||
t_subscribe_no_local(_) ->
|
||||
Topic = nth(1, ?TOPICS),
|
||||
|
||||
|
|
Loading…
Reference in New Issue