Merge remote-tracking branch 'upstream/master' into cassa
This commit is contained in:
commit
8cbbc9f271
|
@ -0,0 +1,34 @@
|
||||||
|
version: '3.9'
|
||||||
|
|
||||||
|
services:
|
||||||
|
mqnamesrv:
|
||||||
|
image: apache/rocketmq:4.9.4
|
||||||
|
container_name: rocketmq_namesrv
|
||||||
|
# ports:
|
||||||
|
# - 9876:9876
|
||||||
|
volumes:
|
||||||
|
- ./rocketmq/logs:/opt/logs
|
||||||
|
- ./rocketmq/store:/opt/store
|
||||||
|
command: ./mqnamesrv
|
||||||
|
networks:
|
||||||
|
- emqx_bridge
|
||||||
|
|
||||||
|
mqbroker:
|
||||||
|
image: apache/rocketmq:4.9.4
|
||||||
|
container_name: rocketmq_broker
|
||||||
|
# ports:
|
||||||
|
# - 10909:10909
|
||||||
|
# - 10911:10911
|
||||||
|
volumes:
|
||||||
|
- ./rocketmq/logs:/opt/logs
|
||||||
|
- ./rocketmq/store:/opt/store
|
||||||
|
- ./rocketmq/conf/broker.conf:/etc/rocketmq/broker.conf
|
||||||
|
environment:
|
||||||
|
NAMESRV_ADDR: "rocketmq_namesrv:9876"
|
||||||
|
JAVA_OPTS: " -Duser.home=/opt"
|
||||||
|
JAVA_OPT_EXT: "-server -Xms1024m -Xmx1024m -Xmn1024m"
|
||||||
|
command: ./mqbroker -c /etc/rocketmq/broker.conf
|
||||||
|
depends_on:
|
||||||
|
- mqnamesrv
|
||||||
|
networks:
|
||||||
|
- emqx_bridge
|
|
@ -22,6 +22,7 @@ services:
|
||||||
- 15433:5433
|
- 15433:5433
|
||||||
- 16041:6041
|
- 16041:6041
|
||||||
- 18000:8000
|
- 18000:8000
|
||||||
|
- 19876:9876
|
||||||
- 19042:9042
|
- 19042:9042
|
||||||
- 19142:9142
|
- 19142:9142
|
||||||
command:
|
command:
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
brokerClusterName=DefaultCluster
|
||||||
|
brokerName=broker-a
|
||||||
|
brokerId=0
|
||||||
|
|
||||||
|
brokerIP1=rocketmq_broker
|
||||||
|
|
||||||
|
defaultTopicQueueNums=4
|
||||||
|
autoCreateTopicEnable=true
|
||||||
|
autoCreateSubscriptionGroup=true
|
||||||
|
|
||||||
|
listenPort=10911
|
||||||
|
deleteWhen=04
|
||||||
|
|
||||||
|
fileReservedTime=120
|
||||||
|
mapedFileSizeCommitLog=1073741824
|
||||||
|
mapedFileSizeConsumeQueue=300000
|
||||||
|
diskMaxUsedSpaceRatio=100
|
||||||
|
maxMessageSize=65536
|
||||||
|
|
||||||
|
brokerRole=ASYNC_MASTER
|
||||||
|
|
||||||
|
flushDiskType=ASYNC_FLUSH
|
|
@ -78,6 +78,12 @@
|
||||||
"upstream": "kafka-1.emqx.net:9295",
|
"upstream": "kafka-1.emqx.net:9295",
|
||||||
"enabled": true
|
"enabled": true
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "rocketmq",
|
||||||
|
"listen": "0.0.0.0:9876",
|
||||||
|
"upstream": "rocketmq_namesrv:9876",
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "cassa_tcp",
|
"name": "cassa_tcp",
|
||||||
"listen": "0.0.0.0:9042",
|
"listen": "0.0.0.0:9042",
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
{gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}},
|
{gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}},
|
||||||
{jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}},
|
{jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}},
|
||||||
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}},
|
{cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}},
|
||||||
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}},
|
{esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}},
|
||||||
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.5"}}},
|
{ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.5"}}},
|
||||||
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
|
{gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}},
|
||||||
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.37.0"}}},
|
{hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.37.0"}}},
|
||||||
|
|
|
@ -2128,17 +2128,23 @@ publish_will_msg(
|
||||||
ClientInfo = #{mountpoint := MountPoint},
|
ClientInfo = #{mountpoint := MountPoint},
|
||||||
Msg = #message{topic = Topic}
|
Msg = #message{topic = Topic}
|
||||||
) ->
|
) ->
|
||||||
case emqx_access_control:authorize(ClientInfo, publish, Topic) of
|
PublishingDisallowed = emqx_access_control:authorize(ClientInfo, publish, Topic) =/= allow,
|
||||||
allow ->
|
ClientBanned = emqx_banned:check(ClientInfo),
|
||||||
NMsg = emqx_mountpoint:mount(MountPoint, Msg),
|
case PublishingDisallowed orelse ClientBanned of
|
||||||
_ = emqx_broker:publish(NMsg),
|
true ->
|
||||||
ok;
|
|
||||||
deny ->
|
|
||||||
?tp(
|
?tp(
|
||||||
warning,
|
warning,
|
||||||
last_will_testament_publish_denied,
|
last_will_testament_publish_denied,
|
||||||
#{topic => Topic}
|
#{
|
||||||
|
topic => Topic,
|
||||||
|
client_banned => ClientBanned,
|
||||||
|
publishing_disallowed => PublishingDisallowed
|
||||||
|
}
|
||||||
),
|
),
|
||||||
|
ok;
|
||||||
|
false ->
|
||||||
|
NMsg = emqx_mountpoint:mount(MountPoint, Msg),
|
||||||
|
_ = emqx_broker:publish(NMsg),
|
||||||
ok
|
ok
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
|
@ -26,6 +26,8 @@
|
||||||
-include_lib("emqx/include/emqx_placeholder.hrl").
|
-include_lib("emqx/include/emqx_placeholder.hrl").
|
||||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
|
-import(emqx_common_test_helpers, [on_exit/1]).
|
||||||
|
|
||||||
all() ->
|
all() ->
|
||||||
emqx_common_test_helpers:all(?MODULE).
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
|
@ -65,6 +67,7 @@ end_per_suite(_Config) ->
|
||||||
|
|
||||||
init_per_testcase(TestCase, Config) when
|
init_per_testcase(TestCase, Config) when
|
||||||
TestCase =:= t_subscribe_deny_disconnect_publishes_last_will_testament;
|
TestCase =:= t_subscribe_deny_disconnect_publishes_last_will_testament;
|
||||||
|
TestCase =:= t_publish_last_will_testament_banned_client_connecting;
|
||||||
TestCase =:= t_publish_deny_disconnect_publishes_last_will_testament
|
TestCase =:= t_publish_deny_disconnect_publishes_last_will_testament
|
||||||
->
|
->
|
||||||
{ok, _} = emqx_authz:update(?CMD_REPLACE, []),
|
{ok, _} = emqx_authz:update(?CMD_REPLACE, []),
|
||||||
|
@ -76,11 +79,15 @@ init_per_testcase(_, Config) ->
|
||||||
|
|
||||||
end_per_testcase(TestCase, _Config) when
|
end_per_testcase(TestCase, _Config) when
|
||||||
TestCase =:= t_subscribe_deny_disconnect_publishes_last_will_testament;
|
TestCase =:= t_subscribe_deny_disconnect_publishes_last_will_testament;
|
||||||
|
TestCase =:= t_publish_last_will_testament_banned_client_connecting;
|
||||||
TestCase =:= t_publish_deny_disconnect_publishes_last_will_testament
|
TestCase =:= t_publish_deny_disconnect_publishes_last_will_testament
|
||||||
->
|
->
|
||||||
{ok, _} = emqx:update_config([authorization, deny_action], ignore),
|
{ok, _} = emqx:update_config([authorization, deny_action], ignore),
|
||||||
|
{ok, _} = emqx_authz:update(?CMD_REPLACE, []),
|
||||||
|
emqx_common_test_helpers:call_janitor(),
|
||||||
ok;
|
ok;
|
||||||
end_per_testcase(_TestCase, _Config) ->
|
end_per_testcase(_TestCase, _Config) ->
|
||||||
|
emqx_common_test_helpers:call_janitor(),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
set_special_configs(emqx_authz) ->
|
set_special_configs(emqx_authz) ->
|
||||||
|
@ -396,5 +403,63 @@ t_publish_last_will_testament_denied_topic(_Config) ->
|
||||||
|
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
%% client is allowed by ACL to publish to its LWT topic, is connected,
|
||||||
|
%% and then gets banned and kicked out while connected. Should not
|
||||||
|
%% publish LWT.
|
||||||
|
t_publish_last_will_testament_banned_client_connecting(_Config) ->
|
||||||
|
{ok, _} = emqx_authz:update(?CMD_REPLACE, [?SOURCE7]),
|
||||||
|
Username = <<"some_client">>,
|
||||||
|
ClientId = <<"some_clientid">>,
|
||||||
|
LWTPayload = <<"should not be published">>,
|
||||||
|
LWTTopic = <<"some_client/lwt">>,
|
||||||
|
ok = emqx:subscribe(<<"some_client/lwt">>),
|
||||||
|
{ok, C} = emqtt:start_link([
|
||||||
|
{clientid, ClientId},
|
||||||
|
{username, Username},
|
||||||
|
{will_topic, LWTTopic},
|
||||||
|
{will_payload, LWTPayload}
|
||||||
|
]),
|
||||||
|
?assertMatch({ok, _}, emqtt:connect(C)),
|
||||||
|
|
||||||
|
%% Now we ban the client while it is connected.
|
||||||
|
Now = erlang:system_time(second),
|
||||||
|
Who = {username, Username},
|
||||||
|
emqx_banned:create(#{
|
||||||
|
who => Who,
|
||||||
|
by => <<"test">>,
|
||||||
|
reason => <<"test">>,
|
||||||
|
at => Now,
|
||||||
|
until => Now + 120
|
||||||
|
}),
|
||||||
|
on_exit(fun() -> emqx_banned:delete(Who) end),
|
||||||
|
%% Now kick it as we do in the ban API.
|
||||||
|
process_flag(trap_exit, true),
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
ok = emqx_cm:kick_session(ClientId),
|
||||||
|
receive
|
||||||
|
{deliver, LWTTopic, #message{payload = LWTPayload}} ->
|
||||||
|
error(lwt_should_not_be_published_to_forbidden_topic)
|
||||||
|
after 2_000 -> ok
|
||||||
|
end,
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
fun(Trace) ->
|
||||||
|
?assertMatch(
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
client_banned := true,
|
||||||
|
publishing_disallowed := false
|
||||||
|
}
|
||||||
|
],
|
||||||
|
?of_kind(last_will_testament_publish_denied, Trace)
|
||||||
|
),
|
||||||
|
ok
|
||||||
|
end
|
||||||
|
),
|
||||||
|
ok = snabbkaffe:stop(),
|
||||||
|
|
||||||
|
ok.
|
||||||
|
|
||||||
stop_apps(Apps) ->
|
stop_apps(Apps) ->
|
||||||
lists:foreach(fun application:stop/1, Apps).
|
lists:foreach(fun application:stop/1, Apps).
|
||||||
|
|
|
@ -99,13 +99,3 @@
|
||||||
received := Rcvd
|
received := Rcvd
|
||||||
}
|
}
|
||||||
).
|
).
|
||||||
|
|
||||||
-define(METRICS_EXAMPLE, #{
|
|
||||||
metrics => ?EMPTY_METRICS,
|
|
||||||
node_metrics => [
|
|
||||||
#{
|
|
||||||
node => node(),
|
|
||||||
metrics => ?EMPTY_METRICS
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}).
|
|
||||||
|
|
|
@ -68,6 +68,7 @@
|
||||||
T == matrix;
|
T == matrix;
|
||||||
T == tdengine;
|
T == tdengine;
|
||||||
T == dynamo;
|
T == dynamo;
|
||||||
|
T == rocketmq;
|
||||||
T == cassandra
|
T == cassandra
|
||||||
).
|
).
|
||||||
|
|
||||||
|
|
|
@ -176,22 +176,19 @@ param_path_enable() ->
|
||||||
}
|
}
|
||||||
)}.
|
)}.
|
||||||
|
|
||||||
bridge_info_array_example(Method, WithMetrics) ->
|
bridge_info_array_example(Method) ->
|
||||||
[Config || #{value := Config} <- maps:values(bridge_info_examples(Method, WithMetrics))].
|
lists:map(fun(#{value := Config}) -> Config end, maps:values(bridge_info_examples(Method))).
|
||||||
|
|
||||||
bridge_info_examples(Method) ->
|
bridge_info_examples(Method) ->
|
||||||
bridge_info_examples(Method, false).
|
|
||||||
|
|
||||||
bridge_info_examples(Method, WithMetrics) ->
|
|
||||||
maps:merge(
|
maps:merge(
|
||||||
#{
|
#{
|
||||||
<<"webhook_example">> => #{
|
<<"webhook_example">> => #{
|
||||||
summary => <<"WebHook">>,
|
summary => <<"WebHook">>,
|
||||||
value => info_example(webhook, Method, WithMetrics)
|
value => info_example(webhook, Method)
|
||||||
},
|
},
|
||||||
<<"mqtt_example">> => #{
|
<<"mqtt_example">> => #{
|
||||||
summary => <<"MQTT Bridge">>,
|
summary => <<"MQTT Bridge">>,
|
||||||
value => info_example(mqtt, Method, WithMetrics)
|
value => info_example(mqtt, Method)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ee_bridge_examples(Method)
|
ee_bridge_examples(Method)
|
||||||
|
@ -204,35 +201,21 @@ ee_bridge_examples(Method) ->
|
||||||
ee_bridge_examples(_Method) -> #{}.
|
ee_bridge_examples(_Method) -> #{}.
|
||||||
-endif.
|
-endif.
|
||||||
|
|
||||||
info_example(Type, Method, WithMetrics) ->
|
info_example(Type, Method) ->
|
||||||
maps:merge(
|
maps:merge(
|
||||||
info_example_basic(Type),
|
info_example_basic(Type),
|
||||||
method_example(Type, Method, WithMetrics)
|
method_example(Type, Method)
|
||||||
).
|
).
|
||||||
|
|
||||||
method_example(Type, Method, WithMetrics) when Method == get; Method == post ->
|
method_example(Type, Method) when Method == get; Method == post ->
|
||||||
SType = atom_to_list(Type),
|
SType = atom_to_list(Type),
|
||||||
SName = SType ++ "_example",
|
SName = SType ++ "_example",
|
||||||
TypeNameExam = #{
|
#{
|
||||||
type => bin(SType),
|
type => bin(SType),
|
||||||
name => bin(SName)
|
name => bin(SName)
|
||||||
},
|
|
||||||
maybe_with_metrics_example(TypeNameExam, Method, WithMetrics);
|
|
||||||
method_example(_Type, put, _WithMetrics) ->
|
|
||||||
#{}.
|
|
||||||
|
|
||||||
maybe_with_metrics_example(TypeNameExam, get, true) ->
|
|
||||||
TypeNameExam#{
|
|
||||||
metrics => ?EMPTY_METRICS,
|
|
||||||
node_metrics => [
|
|
||||||
#{
|
|
||||||
node => node(),
|
|
||||||
metrics => ?EMPTY_METRICS
|
|
||||||
}
|
|
||||||
]
|
|
||||||
};
|
};
|
||||||
maybe_with_metrics_example(TypeNameExam, _, _) ->
|
method_example(_Type, put) ->
|
||||||
TypeNameExam.
|
#{}.
|
||||||
|
|
||||||
info_example_basic(webhook) ->
|
info_example_basic(webhook) ->
|
||||||
#{
|
#{
|
||||||
|
@ -321,7 +304,7 @@ schema("/bridges") ->
|
||||||
responses => #{
|
responses => #{
|
||||||
200 => emqx_dashboard_swagger:schema_with_example(
|
200 => emqx_dashboard_swagger:schema_with_example(
|
||||||
array(emqx_bridge_schema:get_response()),
|
array(emqx_bridge_schema:get_response()),
|
||||||
bridge_info_array_example(get, true)
|
bridge_info_array_example(get)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -602,7 +585,7 @@ maybe_deobfuscate_bridge_probe(Params) ->
|
||||||
Params.
|
Params.
|
||||||
|
|
||||||
lookup_from_all_nodes(BridgeType, BridgeName, SuccCode) ->
|
lookup_from_all_nodes(BridgeType, BridgeName, SuccCode) ->
|
||||||
FormatFun = fun format_bridge_info_without_metrics/1,
|
FormatFun = fun format_bridge_info/1,
|
||||||
do_lookup_from_all_nodes(BridgeType, BridgeName, SuccCode, FormatFun).
|
do_lookup_from_all_nodes(BridgeType, BridgeName, SuccCode, FormatFun).
|
||||||
|
|
||||||
lookup_from_all_nodes_metrics(BridgeType, BridgeName, SuccCode) ->
|
lookup_from_all_nodes_metrics(BridgeType, BridgeName, SuccCode) ->
|
||||||
|
@ -727,7 +710,7 @@ zip_bridges([BridgesFirstNode | _] = BridgesAllNodes) ->
|
||||||
lists:foldl(
|
lists:foldl(
|
||||||
fun(#{type := Type, name := Name}, Acc) ->
|
fun(#{type := Type, name := Name}, Acc) ->
|
||||||
Bridges = pick_bridges_by_id(Type, Name, BridgesAllNodes),
|
Bridges = pick_bridges_by_id(Type, Name, BridgesAllNodes),
|
||||||
[format_bridge_info_with_metrics(Bridges) | Acc]
|
[format_bridge_info(Bridges) | Acc]
|
||||||
end,
|
end,
|
||||||
[],
|
[],
|
||||||
BridgesFirstNode
|
BridgesFirstNode
|
||||||
|
@ -761,24 +744,20 @@ pick_bridges_by_id(Type, Name, BridgesAllNodes) ->
|
||||||
BridgesAllNodes
|
BridgesAllNodes
|
||||||
).
|
).
|
||||||
|
|
||||||
format_bridge_info_with_metrics([FirstBridge | _] = Bridges) ->
|
format_bridge_info([FirstBridge | _] = Bridges) ->
|
||||||
Res = maps:remove(node, FirstBridge),
|
Res = maps:without([node, metrics], FirstBridge),
|
||||||
NodeStatus = node_status(Bridges),
|
NodeStatus = node_status(Bridges),
|
||||||
NodeMetrics = collect_metrics(Bridges),
|
|
||||||
redact(Res#{
|
redact(Res#{
|
||||||
status => aggregate_status(NodeStatus),
|
status => aggregate_status(NodeStatus),
|
||||||
node_status => NodeStatus,
|
node_status => NodeStatus
|
||||||
metrics => aggregate_metrics(NodeMetrics),
|
|
||||||
node_metrics => NodeMetrics
|
|
||||||
}).
|
}).
|
||||||
|
|
||||||
format_bridge_info_without_metrics(Bridges) ->
|
|
||||||
Res = format_bridge_info_with_metrics(Bridges),
|
|
||||||
maps:without([metrics, node_metrics], Res).
|
|
||||||
|
|
||||||
format_bridge_metrics(Bridges) ->
|
format_bridge_metrics(Bridges) ->
|
||||||
Res = format_bridge_info_with_metrics(Bridges),
|
NodeMetrics = collect_metrics(Bridges),
|
||||||
maps:with([metrics, node_metrics], Res).
|
#{
|
||||||
|
metrics => aggregate_metrics(NodeMetrics),
|
||||||
|
node_metrics => NodeMetrics
|
||||||
|
}.
|
||||||
|
|
||||||
node_status(Bridges) ->
|
node_status(Bridges) ->
|
||||||
[maps:with([node, status, status_reason], B) || B <- Bridges].
|
[maps:with([node, status, status_reason], B) || B <- Bridges].
|
||||||
|
|
|
@ -288,8 +288,6 @@ t_http_crud_apis(Config) ->
|
||||||
<<"enable">> := true,
|
<<"enable">> := true,
|
||||||
<<"status">> := _,
|
<<"status">> := _,
|
||||||
<<"node_status">> := [_ | _],
|
<<"node_status">> := [_ | _],
|
||||||
<<"metrics">> := _,
|
|
||||||
<<"node_metrics">> := [_ | _],
|
|
||||||
<<"url">> := URL2
|
<<"url">> := URL2
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -945,6 +943,7 @@ t_metrics(Config) ->
|
||||||
),
|
),
|
||||||
|
|
||||||
%ct:pal("---bridge: ~p", [Bridge]),
|
%ct:pal("---bridge: ~p", [Bridge]),
|
||||||
|
Decoded = emqx_json:decode(Bridge, [return_maps]),
|
||||||
#{
|
#{
|
||||||
<<"type">> := ?BRIDGE_TYPE_HTTP,
|
<<"type">> := ?BRIDGE_TYPE_HTTP,
|
||||||
<<"name">> := Name,
|
<<"name">> := Name,
|
||||||
|
@ -952,7 +951,11 @@ t_metrics(Config) ->
|
||||||
<<"status">> := _,
|
<<"status">> := _,
|
||||||
<<"node_status">> := [_ | _],
|
<<"node_status">> := [_ | _],
|
||||||
<<"url">> := URL1
|
<<"url">> := URL1
|
||||||
} = emqx_json:decode(Bridge, [return_maps]),
|
} = Decoded,
|
||||||
|
|
||||||
|
%% assert that the bridge return doesn't contain metrics anymore
|
||||||
|
?assertNot(maps:is_key(<<"metrics">>, Decoded)),
|
||||||
|
?assertNot(maps:is_key(<<"node_metrics">>, Decoded)),
|
||||||
|
|
||||||
BridgeID = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE_HTTP, Name),
|
BridgeID = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE_HTTP, Name),
|
||||||
|
|
||||||
|
@ -968,9 +971,9 @@ t_metrics(Config) ->
|
||||||
|
|
||||||
%% check that the bridge doesn't contain metrics anymore
|
%% check that the bridge doesn't contain metrics anymore
|
||||||
{ok, 200, Bridge2Str} = request(get, uri(["bridges", BridgeID]), []),
|
{ok, 200, Bridge2Str} = request(get, uri(["bridges", BridgeID]), []),
|
||||||
Decoded = emqx_json:decode(Bridge2Str, [return_maps]),
|
Decoded2 = emqx_json:decode(Bridge2Str, [return_maps]),
|
||||||
?assertNot(maps:is_key(<<"metrics">>, Decoded)),
|
?assertNot(maps:is_key(<<"metrics">>, Decoded2)),
|
||||||
?assertNot(maps:is_key(<<"node_metrics">>, Decoded)),
|
?assertNot(maps:is_key(<<"node_metrics">>, Decoded2)),
|
||||||
|
|
||||||
%% send an message to emqx and the message should be forwarded to the HTTP server
|
%% send an message to emqx and the message should be forwarded to the HTTP server
|
||||||
Body = <<"my msg">>,
|
Body = <<"my msg">>,
|
||||||
|
@ -1001,16 +1004,13 @@ t_metrics(Config) ->
|
||||||
emqx_json:decode(Bridge3Str, [return_maps])
|
emqx_json:decode(Bridge3Str, [return_maps])
|
||||||
),
|
),
|
||||||
|
|
||||||
%% check for non-empty metrics when listing all bridges
|
%% check that metrics isn't returned when listing all bridges
|
||||||
{ok, 200, BridgesStr} = request(get, uri(["bridges"]), []),
|
{ok, 200, BridgesStr} = request(get, uri(["bridges"]), []),
|
||||||
?assertMatch(
|
?assert(
|
||||||
[
|
lists:all(
|
||||||
#{
|
fun(E) -> not maps:is_key(<<"metrics">>, E) end,
|
||||||
<<"metrics">> := #{<<"success">> := _},
|
|
||||||
<<"node_metrics">> := [_ | _]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
emqx_json:decode(BridgesStr, [return_maps])
|
emqx_json:decode(BridgesStr, [return_maps])
|
||||||
|
)
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
|
|
@ -45,6 +45,17 @@ For bridges only have ingress direction data flow, it can be set to 0 otherwise
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resume_interval {
|
||||||
|
desc {
|
||||||
|
en: """The interval at which the buffer worker attempts to resend failed requests in the inflight window."""
|
||||||
|
zh: """在发送失败后尝试重传飞行窗口中的请求的时间间隔。"""
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: """Resume Interval"""
|
||||||
|
zh: """重试时间间隔"""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
start_after_created {
|
start_after_created {
|
||||||
desc {
|
desc {
|
||||||
en: """Whether start the resource right after created."""
|
en: """Whether start the resource right after created."""
|
||||||
|
|
|
@ -88,6 +88,8 @@
|
||||||
-type queue_query() :: ?QUERY(reply_fun(), request(), HasBeenSent :: boolean(), expire_at()).
|
-type queue_query() :: ?QUERY(reply_fun(), request(), HasBeenSent :: boolean(), expire_at()).
|
||||||
-type request() :: term().
|
-type request() :: term().
|
||||||
-type request_from() :: undefined | gen_statem:from().
|
-type request_from() :: undefined | gen_statem:from().
|
||||||
|
-type request_timeout() :: infinity | timer:time().
|
||||||
|
-type health_check_interval() :: timer:time().
|
||||||
-type state() :: blocked | running.
|
-type state() :: blocked | running.
|
||||||
-type inflight_key() :: integer().
|
-type inflight_key() :: integer().
|
||||||
-type data() :: #{
|
-type data() :: #{
|
||||||
|
@ -199,6 +201,8 @@ init({Id, Index, Opts}) ->
|
||||||
RequestTimeout = maps:get(request_timeout, Opts, ?DEFAULT_REQUEST_TIMEOUT),
|
RequestTimeout = maps:get(request_timeout, Opts, ?DEFAULT_REQUEST_TIMEOUT),
|
||||||
BatchTime0 = maps:get(batch_time, Opts, ?DEFAULT_BATCH_TIME),
|
BatchTime0 = maps:get(batch_time, Opts, ?DEFAULT_BATCH_TIME),
|
||||||
BatchTime = adjust_batch_time(Id, RequestTimeout, BatchTime0),
|
BatchTime = adjust_batch_time(Id, RequestTimeout, BatchTime0),
|
||||||
|
DefaultResumeInterval = default_resume_interval(RequestTimeout, HealthCheckInterval),
|
||||||
|
ResumeInterval = maps:get(resume_interval, Opts, DefaultResumeInterval),
|
||||||
Data = #{
|
Data = #{
|
||||||
id => Id,
|
id => Id,
|
||||||
index => Index,
|
index => Index,
|
||||||
|
@ -207,7 +211,7 @@ init({Id, Index, Opts}) ->
|
||||||
batch_size => BatchSize,
|
batch_size => BatchSize,
|
||||||
batch_time => BatchTime,
|
batch_time => BatchTime,
|
||||||
queue => Queue,
|
queue => Queue,
|
||||||
resume_interval => maps:get(resume_interval, Opts, HealthCheckInterval),
|
resume_interval => ResumeInterval,
|
||||||
tref => undefined
|
tref => undefined
|
||||||
},
|
},
|
||||||
?tp(buffer_worker_init, #{id => Id, index => Index}),
|
?tp(buffer_worker_init, #{id => Id, index => Index}),
|
||||||
|
@ -1679,6 +1683,17 @@ adjust_batch_time(Id, RequestTimeout, BatchTime0) ->
|
||||||
end,
|
end,
|
||||||
BatchTime.
|
BatchTime.
|
||||||
|
|
||||||
|
%% The request timeout should be greater than the resume interval, as
|
||||||
|
%% it defines how often the buffer worker tries to unblock. If request
|
||||||
|
%% timeout is <= resume interval and the buffer worker is ever
|
||||||
|
%% blocked, than all queued requests will basically fail without being
|
||||||
|
%% attempted.
|
||||||
|
-spec default_resume_interval(request_timeout(), health_check_interval()) -> timer:time().
|
||||||
|
default_resume_interval(_RequestTimeout = infinity, HealthCheckInterval) ->
|
||||||
|
max(1, HealthCheckInterval);
|
||||||
|
default_resume_interval(RequestTimeout, HealthCheckInterval) ->
|
||||||
|
max(1, min(HealthCheckInterval, RequestTimeout div 3)).
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
adjust_batch_time_test_() ->
|
adjust_batch_time_test_() ->
|
||||||
|
|
|
@ -388,6 +388,7 @@ handle_event(state_timeout, health_check, connecting, Data) ->
|
||||||
handle_event(enter, _OldState, connected = State, Data) ->
|
handle_event(enter, _OldState, connected = State, Data) ->
|
||||||
ok = log_state_consistency(State, Data),
|
ok = log_state_consistency(State, Data),
|
||||||
_ = emqx_alarm:deactivate(Data#data.id),
|
_ = emqx_alarm:deactivate(Data#data.id),
|
||||||
|
?tp(resource_connected_enter, #{}),
|
||||||
{keep_state_and_data, health_check_actions(Data)};
|
{keep_state_and_data, health_check_actions(Data)};
|
||||||
handle_event(state_timeout, health_check, connected, Data) ->
|
handle_event(state_timeout, health_check, connected, Data) ->
|
||||||
handle_connected_health_check(Data);
|
handle_connected_health_check(Data);
|
||||||
|
|
|
@ -55,6 +55,7 @@ fields("creation_opts") ->
|
||||||
[
|
[
|
||||||
{worker_pool_size, fun worker_pool_size/1},
|
{worker_pool_size, fun worker_pool_size/1},
|
||||||
{health_check_interval, fun health_check_interval/1},
|
{health_check_interval, fun health_check_interval/1},
|
||||||
|
{resume_interval, fun resume_interval/1},
|
||||||
{start_after_created, fun start_after_created/1},
|
{start_after_created, fun start_after_created/1},
|
||||||
{start_timeout, fun start_timeout/1},
|
{start_timeout, fun start_timeout/1},
|
||||||
{auto_restart_interval, fun auto_restart_interval/1},
|
{auto_restart_interval, fun auto_restart_interval/1},
|
||||||
|
@ -81,6 +82,12 @@ worker_pool_size(default) -> ?WORKER_POOL_SIZE;
|
||||||
worker_pool_size(required) -> false;
|
worker_pool_size(required) -> false;
|
||||||
worker_pool_size(_) -> undefined.
|
worker_pool_size(_) -> undefined.
|
||||||
|
|
||||||
|
resume_interval(type) -> emqx_schema:duration_ms();
|
||||||
|
resume_interval(hidden) -> true;
|
||||||
|
resume_interval(desc) -> ?DESC("resume_interval");
|
||||||
|
resume_interval(required) -> false;
|
||||||
|
resume_interval(_) -> undefined.
|
||||||
|
|
||||||
health_check_interval(type) -> emqx_schema:duration_ms();
|
health_check_interval(type) -> emqx_schema:duration_ms();
|
||||||
health_check_interval(desc) -> ?DESC("health_check_interval");
|
health_check_interval(desc) -> ?DESC("health_check_interval");
|
||||||
health_check_interval(default) -> ?HEALTHCHECK_INTERVAL_RAW;
|
health_check_interval(default) -> ?HEALTHCHECK_INTERVAL_RAW;
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Metrics are now only exposed via the /bridges/:id/metrics endpoint. Metrics are no longer returned in other API operations such as getting the list of all bridges, or in the response when a bridge has been created.
|
|
@ -0,0 +1 @@
|
||||||
|
现在只有显式调用 `/bridges/:id/metrics` 接口时才可以获得指标数据,而其他 API 接口将不再返回相关数据。
|
|
@ -0,0 +1,8 @@
|
||||||
|
Change the default `resume_interval` for bridges and connectors to be
|
||||||
|
the minimum of `health_check_interval` and `request_timeout / 3`.
|
||||||
|
Also exposes it as a hidden configuration to allow fine tuning.
|
||||||
|
|
||||||
|
Before this change, the default values for `resume_interval` meant
|
||||||
|
that, if a buffer ever got blocked due to resource errors or high
|
||||||
|
message volumes, then, by the time the buffer would try to resume its
|
||||||
|
normal operations, almost all requests would have timed out.
|
|
@ -0,0 +1,2 @@
|
||||||
|
Upgrade library `esockd` from 5.9.4 to 5.9.6.
|
||||||
|
Fix an unnecessary error level logging when a connection is closed before proxy protocol header is sent by the proxy.
|
|
@ -0,0 +1,2 @@
|
||||||
|
依赖库 `esockd` 从 5.9.4 升级到 5.9.6。
|
||||||
|
修复了一个不必要的错误日志。如果连接在 proxy protocol 包头还没有发送前就关闭了, 则不打印错误日志。
|
|
@ -1 +0,0 @@
|
||||||
Fix the issue where nodes responses to the list bridges RPC were incorrectly flattened, which caused List Bridges API HTTP handler to crash when there was more than 1 node in the cluster.
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
Fix bug where a last will testament (LWT) message could be published
|
||||||
|
when kicking out a banned client.
|
|
@ -0,0 +1 @@
|
||||||
|
Add `RocketMQ` data integration bridge.
|
|
@ -0,0 +1 @@
|
||||||
|
为数据桥接增加 `RocketMQ` 支持。
|
|
@ -75,7 +75,7 @@ spec:
|
||||||
secretName: {{ .Values.emqxLicenseSecretName }}
|
secretName: {{ .Values.emqxLicenseSecretName }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- if .Values.extraVolumes }}
|
{{- if .Values.extraVolumes }}
|
||||||
{{- toYaml .Values.extraVolumes | nindent 8 }}
|
{{- toYaml .Values.extraVolumes | nindent 6 }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- if .Values.podSecurityContext.enabled }}
|
{{- if .Values.podSecurityContext.enabled }}
|
||||||
securityContext: {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }}
|
securityContext: {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }}
|
||||||
|
@ -142,7 +142,7 @@ spec:
|
||||||
readOnly: true
|
readOnly: true
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- if .Values.extraVolumeMounts }}
|
{{- if .Values.extraVolumeMounts }}
|
||||||
{{- toYaml .Values.extraVolumeMounts | nindent 12 }}
|
{{- toYaml .Values.extraVolumeMounts | nindent 10 }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
readinessProbe:
|
readinessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
|
|
|
@ -75,7 +75,7 @@ spec:
|
||||||
secretName: {{ .Values.emqxLicenseSecretName }}
|
secretName: {{ .Values.emqxLicenseSecretName }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- if .Values.extraVolumes }}
|
{{- if .Values.extraVolumes }}
|
||||||
{{- toYaml .Values.extraVolumes | nindent 8 }}
|
{{- toYaml .Values.extraVolumes | nindent 6 }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- if .Values.podSecurityContext.enabled }}
|
{{- if .Values.podSecurityContext.enabled }}
|
||||||
securityContext: {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }}
|
securityContext: {{- omit .Values.podSecurityContext "enabled" | toYaml | nindent 8 }}
|
||||||
|
@ -142,7 +142,7 @@ spec:
|
||||||
readOnly: true
|
readOnly: true
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- if .Values.extraVolumeMounts }}
|
{{- if .Values.extraVolumeMounts }}
|
||||||
{{- toYaml .Values.extraVolumeMounts | nindent 12 }}
|
{{- toYaml .Values.extraVolumeMounts | nindent 10 }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
readinessProbe:
|
readinessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
|
|
|
@ -10,4 +10,5 @@ pgsql
|
||||||
tdengine
|
tdengine
|
||||||
clickhouse
|
clickhouse
|
||||||
dynamo
|
dynamo
|
||||||
|
rocketmq
|
||||||
cassandra
|
cassandra
|
||||||
|
|
|
@ -0,0 +1,70 @@
|
||||||
|
emqx_ee_bridge_rocketmq {
|
||||||
|
|
||||||
|
local_topic {
|
||||||
|
desc {
|
||||||
|
en: """The MQTT topic filter to be forwarded to RocketMQ. All MQTT `PUBLISH` messages with the topic
|
||||||
|
matching the `local_topic` will be forwarded.</br>
|
||||||
|
NOTE: if the bridge is used as a rule action, `local_topic` should be left empty otherwise the messages will be duplicated."""
|
||||||
|
zh: """发送到 'local_topic' 的消息都会转发到 RocketMQ。 </br>
|
||||||
|
注意:如果这个 Bridge 被用作规则(EMQX 规则引擎)的输出,同时也配置了 'local_topic' ,那么这两部分的消息都会被转发。"""
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "Local Topic"
|
||||||
|
zh: "本地 Topic"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template {
|
||||||
|
desc {
|
||||||
|
en: """Template, the default value is empty. When this value is empty the whole message will be stored in the RocketMQ"""
|
||||||
|
zh: """模板, 默认为空,为空时将会将整个消息转发给 RocketMQ"""
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "Template"
|
||||||
|
zh: "模板"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
config_enable {
|
||||||
|
desc {
|
||||||
|
en: """Enable or disable this bridge"""
|
||||||
|
zh: """启用/禁用桥接"""
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "Enable Or Disable Bridge"
|
||||||
|
zh: "启用/禁用桥接"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
desc_config {
|
||||||
|
desc {
|
||||||
|
en: """Configuration for a RocketMQ bridge."""
|
||||||
|
zh: """RocketMQ 桥接配置"""
|
||||||
|
}
|
||||||
|
label: {
|
||||||
|
en: "RocketMQ Bridge Configuration"
|
||||||
|
zh: "RocketMQ 桥接配置"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
desc_type {
|
||||||
|
desc {
|
||||||
|
en: """The Bridge Type"""
|
||||||
|
zh: """Bridge 类型"""
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "Bridge Type"
|
||||||
|
zh: "桥接类型"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
desc_name {
|
||||||
|
desc {
|
||||||
|
en: """Bridge name."""
|
||||||
|
zh: """桥接名字"""
|
||||||
|
}
|
||||||
|
label {
|
||||||
|
en: "Bridge Name"
|
||||||
|
zh: "桥接名字"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -33,6 +33,7 @@ api_schemas(Method) ->
|
||||||
ref(emqx_ee_bridge_tdengine, Method),
|
ref(emqx_ee_bridge_tdengine, Method),
|
||||||
ref(emqx_ee_bridge_clickhouse, Method),
|
ref(emqx_ee_bridge_clickhouse, Method),
|
||||||
ref(emqx_ee_bridge_dynamo, Method),
|
ref(emqx_ee_bridge_dynamo, Method),
|
||||||
|
ref(emqx_ee_bridge_rocketmq, Method),
|
||||||
ref(emqx_ee_bridge_cassa, Method)
|
ref(emqx_ee_bridge_cassa, Method)
|
||||||
].
|
].
|
||||||
|
|
||||||
|
@ -51,6 +52,7 @@ schema_modules() ->
|
||||||
emqx_ee_bridge_tdengine,
|
emqx_ee_bridge_tdengine,
|
||||||
emqx_ee_bridge_clickhouse,
|
emqx_ee_bridge_clickhouse,
|
||||||
emqx_ee_bridge_dynamo,
|
emqx_ee_bridge_dynamo,
|
||||||
|
emqx_ee_bridge_rocketmq,
|
||||||
emqx_ee_bridge_cassa
|
emqx_ee_bridge_cassa
|
||||||
].
|
].
|
||||||
|
|
||||||
|
@ -88,6 +90,7 @@ resource_type(matrix) -> emqx_connector_pgsql;
|
||||||
resource_type(tdengine) -> emqx_ee_connector_tdengine;
|
resource_type(tdengine) -> emqx_ee_connector_tdengine;
|
||||||
resource_type(clickhouse) -> emqx_ee_connector_clickhouse;
|
resource_type(clickhouse) -> emqx_ee_connector_clickhouse;
|
||||||
resource_type(dynamo) -> emqx_ee_connector_dynamo;
|
resource_type(dynamo) -> emqx_ee_connector_dynamo;
|
||||||
|
resource_type(rocketmq) -> emqx_ee_connector_rocketmq;
|
||||||
resource_type(cassandra) -> emqx_ee_connector_cassa.
|
resource_type(cassandra) -> emqx_ee_connector_cassa.
|
||||||
|
|
||||||
fields(bridges) ->
|
fields(bridges) ->
|
||||||
|
@ -132,6 +135,14 @@ fields(bridges) ->
|
||||||
required => false
|
required => false
|
||||||
}
|
}
|
||||||
)},
|
)},
|
||||||
|
{rocketmq,
|
||||||
|
mk(
|
||||||
|
hoconsc:map(name, ref(emqx_ee_bridge_rocketmq, "config")),
|
||||||
|
#{
|
||||||
|
desc => <<"RocketMQ Bridge Config">>,
|
||||||
|
required => false
|
||||||
|
}
|
||||||
|
)},
|
||||||
{cassandra,
|
{cassandra,
|
||||||
mk(
|
mk(
|
||||||
hoconsc:map(name, ref(emqx_ee_bridge_cassa, "config")),
|
hoconsc:map(name, ref(emqx_ee_bridge_cassa, "config")),
|
||||||
|
|
|
@ -41,9 +41,7 @@ conn_bridge_examples(Method) ->
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
values(get, Type) ->
|
values(_Method, Type) ->
|
||||||
maps:merge(values(post, Type), ?METRICS_EXAMPLE);
|
|
||||||
values(post, Type) ->
|
|
||||||
#{
|
#{
|
||||||
enable => true,
|
enable => true,
|
||||||
type => Type,
|
type => Type,
|
||||||
|
@ -65,9 +63,7 @@ values(post, Type) ->
|
||||||
query_mode => async,
|
query_mode => async,
|
||||||
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
||||||
}
|
}
|
||||||
};
|
}.
|
||||||
values(put, Type) ->
|
|
||||||
values(post, Type).
|
|
||||||
|
|
||||||
%% -------------------------------------------------------------------------------------------------
|
%% -------------------------------------------------------------------------------------------------
|
||||||
%% Hocon Schema Definitions
|
%% Hocon Schema Definitions
|
||||||
|
|
|
@ -37,9 +37,7 @@ conn_bridge_examples(Method) ->
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
values(get) ->
|
values(_Method) ->
|
||||||
maps:merge(values(post), ?METRICS_EXAMPLE);
|
|
||||||
values(post) ->
|
|
||||||
#{
|
#{
|
||||||
enable => true,
|
enable => true,
|
||||||
type => dynamo,
|
type => dynamo,
|
||||||
|
@ -60,9 +58,7 @@ values(post) ->
|
||||||
query_mode => sync,
|
query_mode => sync,
|
||||||
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
||||||
}
|
}
|
||||||
};
|
}.
|
||||||
values(put) ->
|
|
||||||
values(post).
|
|
||||||
|
|
||||||
%% -------------------------------------------------------------------------------------------------
|
%% -------------------------------------------------------------------------------------------------
|
||||||
%% Hocon Schema Definitions
|
%% Hocon Schema Definitions
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
-module(emqx_ee_bridge_gcp_pubsub).
|
-module(emqx_ee_bridge_gcp_pubsub).
|
||||||
|
|
||||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
@ -146,9 +145,7 @@ conn_bridge_examples(Method) ->
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
values(get) ->
|
values(_Method) ->
|
||||||
maps:merge(values(post), ?METRICS_EXAMPLE);
|
|
||||||
values(post) ->
|
|
||||||
#{
|
#{
|
||||||
pubsub_topic => <<"mytopic">>,
|
pubsub_topic => <<"mytopic">>,
|
||||||
service_account_json =>
|
service_account_json =>
|
||||||
|
@ -176,9 +173,7 @@ values(post) ->
|
||||||
<<"https://oauth2.googleapis.com/token">>,
|
<<"https://oauth2.googleapis.com/token">>,
|
||||||
type => <<"service_account">>
|
type => <<"service_account">>
|
||||||
}
|
}
|
||||||
};
|
}.
|
||||||
values(put) ->
|
|
||||||
values(post).
|
|
||||||
|
|
||||||
%%-------------------------------------------------------------------------------------------------
|
%%-------------------------------------------------------------------------------------------------
|
||||||
%% Helper fns
|
%% Helper fns
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||||
|
|
||||||
|
@ -33,9 +32,7 @@ conn_bridge_examples(Method) ->
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
values(get) ->
|
values(_Method) ->
|
||||||
maps:merge(values(post), ?METRICS_EXAMPLE);
|
|
||||||
values(post) ->
|
|
||||||
#{
|
#{
|
||||||
type => hstreamdb,
|
type => hstreamdb,
|
||||||
name => <<"demo">>,
|
name => <<"demo">>,
|
||||||
|
@ -44,9 +41,7 @@ values(post) ->
|
||||||
direction => egress,
|
direction => egress,
|
||||||
local_topic => <<"local/topic/#">>,
|
local_topic => <<"local/topic/#">>,
|
||||||
payload => <<"${payload}">>
|
payload => <<"${payload}">>
|
||||||
};
|
}.
|
||||||
values(put) ->
|
|
||||||
values(post).
|
|
||||||
|
|
||||||
%% -------------------------------------------------------------------------------------------------
|
%% -------------------------------------------------------------------------------------------------
|
||||||
%% Hocon Schema Definitions
|
%% Hocon Schema Definitions
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
-module(emqx_ee_bridge_influxdb).
|
-module(emqx_ee_bridge_influxdb).
|
||||||
|
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
|
||||||
-include_lib("emqx_connector/include/emqx_connector.hrl").
|
-include_lib("emqx_connector/include/emqx_connector.hrl").
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
@ -47,7 +46,7 @@ conn_bridge_examples(Method) ->
|
||||||
].
|
].
|
||||||
|
|
||||||
values(Protocol, get) ->
|
values(Protocol, get) ->
|
||||||
maps:merge(values(Protocol, post), ?METRICS_EXAMPLE);
|
values(Protocol, post);
|
||||||
values("influxdb_api_v2", post) ->
|
values("influxdb_api_v2", post) ->
|
||||||
SupportUint = <<"uint_value=${payload.uint_key}u,">>,
|
SupportUint = <<"uint_value=${payload.uint_key}u,">>,
|
||||||
TypeOpts = #{
|
TypeOpts = #{
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
-module(emqx_ee_bridge_kafka).
|
-module(emqx_ee_bridge_kafka).
|
||||||
|
|
||||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
|
||||||
-include_lib("emqx_connector/include/emqx_connector.hrl").
|
-include_lib("emqx_connector/include/emqx_connector.hrl").
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
@ -55,7 +54,7 @@ conn_bridge_examples(Method) ->
|
||||||
].
|
].
|
||||||
|
|
||||||
values({get, KafkaType}) ->
|
values({get, KafkaType}) ->
|
||||||
maps:merge(values({post, KafkaType}), ?METRICS_EXAMPLE);
|
values({post, KafkaType});
|
||||||
values({post, KafkaType}) ->
|
values({post, KafkaType}) ->
|
||||||
maps:merge(values(common_config), values(KafkaType));
|
maps:merge(values(common_config), values(KafkaType));
|
||||||
values({put, KafkaType}) ->
|
values({put, KafkaType}) ->
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||||
|
|
||||||
|
@ -156,9 +155,6 @@ values(common, MongoType, Method, TypeOpts) ->
|
||||||
Vals0 = maps:merge(MethodVals, Common),
|
Vals0 = maps:merge(MethodVals, Common),
|
||||||
maps:merge(Vals0, TypeOpts).
|
maps:merge(Vals0, TypeOpts).
|
||||||
|
|
||||||
method_values(MongoType, get) ->
|
|
||||||
Vals = method_values(MongoType, post),
|
|
||||||
maps:merge(?METRICS_EXAMPLE, Vals);
|
|
||||||
method_values(MongoType, _) ->
|
method_values(MongoType, _) ->
|
||||||
ConnectorType =
|
ConnectorType =
|
||||||
case MongoType of
|
case MongoType of
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
|
||||||
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||||
|
@ -39,9 +38,7 @@ conn_bridge_examples(Method) ->
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
values(get) ->
|
values(_Method) ->
|
||||||
maps:merge(values(post), ?METRICS_EXAMPLE);
|
|
||||||
values(post) ->
|
|
||||||
#{
|
#{
|
||||||
enable => true,
|
enable => true,
|
||||||
type => mysql,
|
type => mysql,
|
||||||
|
@ -62,9 +59,7 @@ values(post) ->
|
||||||
query_mode => async,
|
query_mode => async,
|
||||||
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
||||||
}
|
}
|
||||||
};
|
}.
|
||||||
values(put) ->
|
|
||||||
values(post).
|
|
||||||
|
|
||||||
%% -------------------------------------------------------------------------------------------------
|
%% -------------------------------------------------------------------------------------------------
|
||||||
%% Hocon Schema Definitions
|
%% Hocon Schema Definitions
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
|
||||||
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||||
|
@ -41,9 +40,7 @@ conn_bridge_examples(Method) ->
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
values(get, Type) ->
|
values(_Method, Type) ->
|
||||||
maps:merge(values(post, Type), ?METRICS_EXAMPLE);
|
|
||||||
values(post, Type) ->
|
|
||||||
#{
|
#{
|
||||||
enable => true,
|
enable => true,
|
||||||
type => Type,
|
type => Type,
|
||||||
|
@ -64,9 +61,7 @@ values(post, Type) ->
|
||||||
query_mode => async,
|
query_mode => async,
|
||||||
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
||||||
}
|
}
|
||||||
};
|
}.
|
||||||
values(put, Type) ->
|
|
||||||
values(post, Type).
|
|
||||||
|
|
||||||
%% -------------------------------------------------------------------------------------------------
|
%% -------------------------------------------------------------------------------------------------
|
||||||
%% Hocon Schema Definitions
|
%% Hocon Schema Definitions
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
%%--------------------------------------------------------------------
|
%%--------------------------------------------------------------------
|
||||||
-module(emqx_ee_bridge_redis).
|
-module(emqx_ee_bridge_redis).
|
||||||
|
|
||||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
@ -46,7 +45,7 @@ conn_bridge_examples(Method) ->
|
||||||
].
|
].
|
||||||
|
|
||||||
values(Protocol, get) ->
|
values(Protocol, get) ->
|
||||||
maps:merge(values(Protocol, post), ?METRICS_EXAMPLE);
|
values(Protocol, post);
|
||||||
values("single", post) ->
|
values("single", post) ->
|
||||||
SpecificOpts = #{
|
SpecificOpts = #{
|
||||||
server => <<"127.0.0.1:6379">>,
|
server => <<"127.0.0.1:6379">>,
|
||||||
|
|
|
@ -0,0 +1,120 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2022 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
-module(emqx_ee_bridge_rocketmq).
|
||||||
|
|
||||||
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
||||||
|
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||||
|
|
||||||
|
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||||
|
|
||||||
|
-export([
|
||||||
|
conn_bridge_examples/1,
|
||||||
|
values/1
|
||||||
|
]).
|
||||||
|
|
||||||
|
-export([
|
||||||
|
namespace/0,
|
||||||
|
roots/0,
|
||||||
|
fields/1,
|
||||||
|
desc/1
|
||||||
|
]).
|
||||||
|
|
||||||
|
-define(DEFAULT_TEMPLATE, <<>>).
|
||||||
|
-define(DEFFAULT_REQ_TIMEOUT, <<"15s">>).
|
||||||
|
|
||||||
|
%% -------------------------------------------------------------------------------------------------
|
||||||
|
%% api
|
||||||
|
|
||||||
|
conn_bridge_examples(Method) ->
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
<<"rocketmq">> => #{
|
||||||
|
summary => <<"RocketMQ Bridge">>,
|
||||||
|
value => values(Method)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
].
|
||||||
|
|
||||||
|
values(get) ->
|
||||||
|
values(post);
|
||||||
|
values(post) ->
|
||||||
|
#{
|
||||||
|
enable => true,
|
||||||
|
type => rocketmq,
|
||||||
|
name => <<"foo">>,
|
||||||
|
server => <<"127.0.0.1:9876">>,
|
||||||
|
topic => <<"TopicTest">>,
|
||||||
|
template => ?DEFAULT_TEMPLATE,
|
||||||
|
local_topic => <<"local/topic/#">>,
|
||||||
|
resource_opts => #{
|
||||||
|
worker_pool_size => 1,
|
||||||
|
health_check_interval => ?HEALTHCHECK_INTERVAL_RAW,
|
||||||
|
auto_restart_interval => ?AUTO_RESTART_INTERVAL_RAW,
|
||||||
|
batch_size => ?DEFAULT_BATCH_SIZE,
|
||||||
|
batch_time => ?DEFAULT_BATCH_TIME,
|
||||||
|
query_mode => sync,
|
||||||
|
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
||||||
|
}
|
||||||
|
};
|
||||||
|
values(put) ->
|
||||||
|
values(post).
|
||||||
|
|
||||||
|
%% -------------------------------------------------------------------------------------------------
|
||||||
|
%% Hocon Schema Definitions
|
||||||
|
namespace() -> "bridge_rocketmq".
|
||||||
|
|
||||||
|
roots() -> [].
|
||||||
|
|
||||||
|
fields("config") ->
|
||||||
|
[
|
||||||
|
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
|
||||||
|
{template,
|
||||||
|
mk(
|
||||||
|
binary(),
|
||||||
|
#{desc => ?DESC("template"), default => ?DEFAULT_TEMPLATE}
|
||||||
|
)},
|
||||||
|
{local_topic,
|
||||||
|
mk(
|
||||||
|
binary(),
|
||||||
|
#{desc => ?DESC("local_topic"), required => false}
|
||||||
|
)},
|
||||||
|
{resource_opts,
|
||||||
|
mk(
|
||||||
|
ref(?MODULE, "creation_opts"),
|
||||||
|
#{
|
||||||
|
required => false,
|
||||||
|
default => #{<<"request_timeout">> => ?DEFFAULT_REQ_TIMEOUT},
|
||||||
|
desc => ?DESC(emqx_resource_schema, <<"resource_opts">>)
|
||||||
|
}
|
||||||
|
)}
|
||||||
|
] ++
|
||||||
|
(emqx_ee_connector_rocketmq:fields(config) --
|
||||||
|
emqx_connector_schema_lib:prepare_statement_fields());
|
||||||
|
fields("creation_opts") ->
|
||||||
|
emqx_resource_schema:fields("creation_opts_sync_only");
|
||||||
|
fields("post") ->
|
||||||
|
[type_field(), name_field() | fields("config")];
|
||||||
|
fields("put") ->
|
||||||
|
fields("config");
|
||||||
|
fields("get") ->
|
||||||
|
emqx_bridge_schema:status_fields() ++ fields("post").
|
||||||
|
|
||||||
|
desc("config") ->
|
||||||
|
?DESC("desc_config");
|
||||||
|
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
|
||||||
|
["Configuration for RocketMQ using `", string:to_upper(Method), "` method."];
|
||||||
|
desc("creation_opts" = Name) ->
|
||||||
|
emqx_resource_schema:desc(Name);
|
||||||
|
desc(_) ->
|
||||||
|
undefined.
|
||||||
|
|
||||||
|
%% -------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type_field() ->
|
||||||
|
{type, mk(enum([rocketmq]), #{required => true, desc => ?DESC("desc_type")})}.
|
||||||
|
|
||||||
|
name_field() ->
|
||||||
|
{name, mk(binary(), #{required => true, desc => ?DESC("desc_name")})}.
|
|
@ -5,7 +5,6 @@
|
||||||
|
|
||||||
-include_lib("typerefl/include/types.hrl").
|
-include_lib("typerefl/include/types.hrl").
|
||||||
-include_lib("hocon/include/hoconsc.hrl").
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
|
||||||
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||||
|
|
||||||
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||||
|
@ -40,9 +39,7 @@ conn_bridge_examples(Method) ->
|
||||||
}
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
values(get) ->
|
values(_Method) ->
|
||||||
maps:merge(values(post), ?METRICS_EXAMPLE);
|
|
||||||
values(post) ->
|
|
||||||
#{
|
#{
|
||||||
enable => true,
|
enable => true,
|
||||||
type => tdengine,
|
type => tdengine,
|
||||||
|
@ -63,9 +60,7 @@ values(post) ->
|
||||||
query_mode => sync,
|
query_mode => sync,
|
||||||
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
||||||
}
|
}
|
||||||
};
|
}.
|
||||||
values(put) ->
|
|
||||||
values(post).
|
|
||||||
|
|
||||||
%% -------------------------------------------------------------------------------------------------
|
%% -------------------------------------------------------------------------------------------------
|
||||||
%% Hocon Schema Definitions
|
%% Hocon Schema Definitions
|
||||||
|
|
|
@ -1623,7 +1623,11 @@ t_bridge_rule_action_source(Config) ->
|
||||||
},
|
},
|
||||||
emqx_json:decode(RawPayload, [return_maps])
|
emqx_json:decode(RawPayload, [return_maps])
|
||||||
),
|
),
|
||||||
?assertEqual(1, emqx_resource_metrics:received_get(ResourceId)),
|
?retry(
|
||||||
|
_Interval = 200,
|
||||||
|
_NAttempts = 20,
|
||||||
|
?assertEqual(1, emqx_resource_metrics:received_get(ResourceId))
|
||||||
|
),
|
||||||
ok
|
ok
|
||||||
end
|
end
|
||||||
),
|
),
|
||||||
|
|
|
@ -83,9 +83,10 @@ end_per_suite(_Config) ->
|
||||||
ok = emqx_common_test_helpers:stop_apps([emqx_bridge, emqx_conf]),
|
ok = emqx_common_test_helpers:stop_apps([emqx_bridge, emqx_conf]),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
init_per_testcase(_Testcase, Config) ->
|
init_per_testcase(TestCase, Config) ->
|
||||||
create_table(Config),
|
create_table(Config),
|
||||||
Config.
|
ok = snabbkaffe:start_trace(),
|
||||||
|
[{dynamo_name, atom_to_binary(TestCase)} | Config].
|
||||||
|
|
||||||
end_per_testcase(_Testcase, Config) ->
|
end_per_testcase(_Testcase, Config) ->
|
||||||
ProxyHost = ?config(proxy_host, Config),
|
ProxyHost = ?config(proxy_host, Config),
|
||||||
|
@ -93,7 +94,7 @@ end_per_testcase(_Testcase, Config) ->
|
||||||
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
||||||
ok = snabbkaffe:stop(),
|
ok = snabbkaffe:stop(),
|
||||||
delete_table(Config),
|
delete_table(Config),
|
||||||
delete_bridge(Config),
|
delete_all_bridges(),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
@ -186,15 +187,22 @@ parse_and_check(ConfigString, BridgeType, Name) ->
|
||||||
Config.
|
Config.
|
||||||
|
|
||||||
create_bridge(Config) ->
|
create_bridge(Config) ->
|
||||||
BridgeType = ?config(dynamo_bridge_type, Config),
|
create_bridge(Config, _Overrides = #{}).
|
||||||
Name = ?config(dynamo_name, Config),
|
|
||||||
TDConfig = ?config(dynamo_config, Config),
|
|
||||||
emqx_bridge:create(BridgeType, Name, TDConfig).
|
|
||||||
|
|
||||||
delete_bridge(Config) ->
|
create_bridge(Config, Overrides) ->
|
||||||
BridgeType = ?config(dynamo_bridge_type, Config),
|
BridgeType = ?config(dynamo_bridge_type, Config),
|
||||||
Name = ?config(dynamo_name, Config),
|
Name = ?config(dynamo_name, Config),
|
||||||
emqx_bridge:remove(BridgeType, Name).
|
DynamoConfig0 = ?config(dynamo_config, Config),
|
||||||
|
DynamoConfig = emqx_map_lib:deep_merge(DynamoConfig0, Overrides),
|
||||||
|
emqx_bridge:create(BridgeType, Name, DynamoConfig).
|
||||||
|
|
||||||
|
delete_all_bridges() ->
|
||||||
|
lists:foreach(
|
||||||
|
fun(#{name := Name, type := Type}) ->
|
||||||
|
emqx_bridge:remove(Type, Name)
|
||||||
|
end,
|
||||||
|
emqx_bridge:list()
|
||||||
|
).
|
||||||
|
|
||||||
create_bridge_http(Params) ->
|
create_bridge_http(Params) ->
|
||||||
Path = emqx_mgmt_api_test_util:api_path(["bridges"]),
|
Path = emqx_mgmt_api_test_util:api_path(["bridges"]),
|
||||||
|
@ -327,9 +335,11 @@ t_setup_via_http_api_and_publish(Config) ->
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
t_get_status(Config) ->
|
t_get_status(Config) ->
|
||||||
?assertMatch(
|
{{ok, _}, {ok, _}} =
|
||||||
{ok, _},
|
?wait_async_action(
|
||||||
create_bridge(Config)
|
create_bridge(Config),
|
||||||
|
#{?snk_kind := resource_connected_enter},
|
||||||
|
20_000
|
||||||
),
|
),
|
||||||
|
|
||||||
ProxyPort = ?config(proxy_port, Config),
|
ProxyPort = ?config(proxy_port, Config),
|
||||||
|
@ -359,7 +369,12 @@ t_write_failure(Config) ->
|
||||||
ProxyName = ?config(proxy_name, Config),
|
ProxyName = ?config(proxy_name, Config),
|
||||||
ProxyPort = ?config(proxy_port, Config),
|
ProxyPort = ?config(proxy_port, Config),
|
||||||
ProxyHost = ?config(proxy_host, Config),
|
ProxyHost = ?config(proxy_host, Config),
|
||||||
{ok, _} = create_bridge(Config),
|
{{ok, _}, {ok, _}} =
|
||||||
|
?wait_async_action(
|
||||||
|
create_bridge(Config),
|
||||||
|
#{?snk_kind := resource_connected_enter},
|
||||||
|
20_000
|
||||||
|
),
|
||||||
SentData = #{id => emqx_misc:gen_id(), payload => ?PAYLOAD},
|
SentData = #{id => emqx_misc:gen_id(), payload => ?PAYLOAD},
|
||||||
emqx_common_test_helpers:with_failure(down, ProxyName, ProxyHost, ProxyPort, fun() ->
|
emqx_common_test_helpers:with_failure(down, ProxyName, ProxyHost, ProxyPort, fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
@ -372,7 +387,12 @@ t_write_timeout(Config) ->
|
||||||
ProxyName = ?config(proxy_name, Config),
|
ProxyName = ?config(proxy_name, Config),
|
||||||
ProxyPort = ?config(proxy_port, Config),
|
ProxyPort = ?config(proxy_port, Config),
|
||||||
ProxyHost = ?config(proxy_host, Config),
|
ProxyHost = ?config(proxy_host, Config),
|
||||||
{ok, _} = create_bridge(Config),
|
{{ok, _}, {ok, _}} =
|
||||||
|
?wait_async_action(
|
||||||
|
create_bridge(Config),
|
||||||
|
#{?snk_kind := resource_connected_enter},
|
||||||
|
20_000
|
||||||
|
),
|
||||||
SentData = #{id => emqx_misc:gen_id(), payload => ?PAYLOAD},
|
SentData = #{id => emqx_misc:gen_id(), payload => ?PAYLOAD},
|
||||||
emqx_common_test_helpers:with_failure(timeout, ProxyName, ProxyHost, ProxyPort, fun() ->
|
emqx_common_test_helpers:with_failure(timeout, ProxyName, ProxyHost, ProxyPort, fun() ->
|
||||||
?assertMatch(
|
?assertMatch(
|
||||||
|
|
|
@ -520,6 +520,7 @@ wait_until_gauge_is(GaugeName, ExpectedValue, Timeout) ->
|
||||||
#{measurements := #{gauge_set := ExpectedValue}} ->
|
#{measurements := #{gauge_set := ExpectedValue}} ->
|
||||||
ok;
|
ok;
|
||||||
#{measurements := #{gauge_set := Value}} ->
|
#{measurements := #{gauge_set := Value}} ->
|
||||||
|
ct:pal("events: ~p", [Events]),
|
||||||
ct:fail(
|
ct:fail(
|
||||||
"gauge ~p didn't reach expected value ~p; last value: ~p",
|
"gauge ~p didn't reach expected value ~p; last value: ~p",
|
||||||
[GaugeName, ExpectedValue, Value]
|
[GaugeName, ExpectedValue, Value]
|
||||||
|
@ -972,7 +973,13 @@ t_publish_econnrefused(Config) ->
|
||||||
ResourceId = ?config(resource_id, Config),
|
ResourceId = ?config(resource_id, Config),
|
||||||
%% set pipelining to 1 so that one of the 2 requests is `pending'
|
%% set pipelining to 1 so that one of the 2 requests is `pending'
|
||||||
%% in ehttpc.
|
%% in ehttpc.
|
||||||
{ok, _} = create_bridge(Config, #{<<"pipelining">> => 1}),
|
{ok, _} = create_bridge(
|
||||||
|
Config,
|
||||||
|
#{
|
||||||
|
<<"pipelining">> => 1,
|
||||||
|
<<"resource_opts">> => #{<<"resume_interval">> => <<"15s">>}
|
||||||
|
}
|
||||||
|
),
|
||||||
{ok, #{<<"id">> := RuleId}} = create_rule_and_action_http(Config),
|
{ok, #{<<"id">> := RuleId}} = create_rule_and_action_http(Config),
|
||||||
on_exit(fun() -> ok = emqx_rule_engine:delete_rule(RuleId) end),
|
on_exit(fun() -> ok = emqx_rule_engine:delete_rule(RuleId) end),
|
||||||
assert_empty_metrics(ResourceId),
|
assert_empty_metrics(ResourceId),
|
||||||
|
@ -986,7 +993,10 @@ t_publish_timeout(Config) ->
|
||||||
%% requests are done separately.
|
%% requests are done separately.
|
||||||
{ok, _} = create_bridge(Config, #{
|
{ok, _} = create_bridge(Config, #{
|
||||||
<<"pipelining">> => 1,
|
<<"pipelining">> => 1,
|
||||||
<<"resource_opts">> => #{<<"batch_size">> => 1}
|
<<"resource_opts">> => #{
|
||||||
|
<<"batch_size">> => 1,
|
||||||
|
<<"resume_interval">> => <<"15s">>
|
||||||
|
}
|
||||||
}),
|
}),
|
||||||
{ok, #{<<"id">> := RuleId}} = create_rule_and_action_http(Config),
|
{ok, #{<<"id">> := RuleId}} = create_rule_and_action_http(Config),
|
||||||
on_exit(fun() -> ok = emqx_rule_engine:delete_rule(RuleId) end),
|
on_exit(fun() -> ok = emqx_rule_engine:delete_rule(RuleId) end),
|
||||||
|
|
|
@ -210,8 +210,7 @@ t_check_values(_Config) ->
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
fun(Method) ->
|
fun(Method) ->
|
||||||
lists:foreach(
|
lists:foreach(
|
||||||
fun({RedisType, #{value := Value0}}) ->
|
fun({RedisType, #{value := Value}}) ->
|
||||||
Value = maps:without(maps:keys(?METRICS_EXAMPLE), Value0),
|
|
||||||
MethodBin = atom_to_binary(Method),
|
MethodBin = atom_to_binary(Method),
|
||||||
Type = string:slice(RedisType, length("redis_")),
|
Type = string:slice(RedisType, length("redis_")),
|
||||||
RefName = binary_to_list(<<MethodBin/binary, "_", Type/binary>>),
|
RefName = binary_to_list(<<MethodBin/binary, "_", Type/binary>>),
|
||||||
|
|
|
@ -0,0 +1,267 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_ee_bridge_rocketmq_SUITE).
|
||||||
|
|
||||||
|
-compile(nowarn_export_all).
|
||||||
|
-compile(export_all).
|
||||||
|
|
||||||
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
-include_lib("common_test/include/ct.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
|
||||||
|
% Bridge defaults
|
||||||
|
-define(TOPIC, "TopicTest").
|
||||||
|
-define(BATCH_SIZE, 10).
|
||||||
|
-define(PAYLOAD, <<"HELLO">>).
|
||||||
|
|
||||||
|
-define(GET_CONFIG(KEY__, CFG__), proplists:get_value(KEY__, CFG__)).
|
||||||
|
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
%% CT boilerplate
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
all() ->
|
||||||
|
[
|
||||||
|
{group, with_batch},
|
||||||
|
{group, without_batch}
|
||||||
|
].
|
||||||
|
|
||||||
|
groups() ->
|
||||||
|
TCs = emqx_common_test_helpers:all(?MODULE),
|
||||||
|
[
|
||||||
|
{with_batch, TCs},
|
||||||
|
{without_batch, TCs}
|
||||||
|
].
|
||||||
|
|
||||||
|
init_per_group(with_batch, Config0) ->
|
||||||
|
Config = [{batch_size, ?BATCH_SIZE} | Config0],
|
||||||
|
common_init(Config);
|
||||||
|
init_per_group(without_batch, Config0) ->
|
||||||
|
Config = [{batch_size, 1} | Config0],
|
||||||
|
common_init(Config);
|
||||||
|
init_per_group(_Group, Config) ->
|
||||||
|
Config.
|
||||||
|
|
||||||
|
end_per_group(Group, Config) when Group =:= with_batch; Group =:= without_batch ->
|
||||||
|
ProxyHost = ?config(proxy_host, Config),
|
||||||
|
ProxyPort = ?config(proxy_port, Config),
|
||||||
|
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
||||||
|
ok;
|
||||||
|
end_per_group(_Group, _Config) ->
|
||||||
|
ok.
|
||||||
|
|
||||||
|
init_per_suite(Config) ->
|
||||||
|
Config.
|
||||||
|
|
||||||
|
end_per_suite(_Config) ->
|
||||||
|
emqx_mgmt_api_test_util:end_suite(),
|
||||||
|
ok = emqx_common_test_helpers:stop_apps([emqx_bridge, emqx_conf]),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
init_per_testcase(_Testcase, Config) ->
|
||||||
|
delete_bridge(Config),
|
||||||
|
Config.
|
||||||
|
|
||||||
|
end_per_testcase(_Testcase, Config) ->
|
||||||
|
ProxyHost = ?config(proxy_host, Config),
|
||||||
|
ProxyPort = ?config(proxy_port, Config),
|
||||||
|
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
||||||
|
ok = snabbkaffe:stop(),
|
||||||
|
delete_bridge(Config),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
%% Helper fns
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
common_init(ConfigT) ->
|
||||||
|
BridgeType = <<"rocketmq">>,
|
||||||
|
Host = os:getenv("ROCKETMQ_HOST", "toxiproxy"),
|
||||||
|
Port = list_to_integer(os:getenv("ROCKETMQ_PORT", "9876")),
|
||||||
|
|
||||||
|
Config0 = [
|
||||||
|
{host, Host},
|
||||||
|
{port, Port},
|
||||||
|
{query_mode, sync},
|
||||||
|
{proxy_name, "rocketmq"}
|
||||||
|
| ConfigT
|
||||||
|
],
|
||||||
|
|
||||||
|
case emqx_common_test_helpers:is_tcp_server_available(Host, Port) of
|
||||||
|
true ->
|
||||||
|
% Setup toxiproxy
|
||||||
|
ProxyHost = os:getenv("PROXY_HOST", "toxiproxy"),
|
||||||
|
ProxyPort = list_to_integer(os:getenv("PROXY_PORT", "8474")),
|
||||||
|
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
||||||
|
% Ensure EE bridge module is loaded
|
||||||
|
_ = application:load(emqx_ee_bridge),
|
||||||
|
_ = emqx_ee_bridge:module_info(),
|
||||||
|
ok = emqx_common_test_helpers:start_apps([emqx_conf, emqx_bridge]),
|
||||||
|
emqx_mgmt_api_test_util:init_suite(),
|
||||||
|
{Name, RocketMQConf} = rocketmq_config(BridgeType, Config0),
|
||||||
|
Config =
|
||||||
|
[
|
||||||
|
{rocketmq_config, RocketMQConf},
|
||||||
|
{rocketmq_bridge_type, BridgeType},
|
||||||
|
{rocketmq_name, Name},
|
||||||
|
{proxy_host, ProxyHost},
|
||||||
|
{proxy_port, ProxyPort}
|
||||||
|
| Config0
|
||||||
|
],
|
||||||
|
Config;
|
||||||
|
false ->
|
||||||
|
case os:getenv("IS_CI") of
|
||||||
|
false ->
|
||||||
|
{skip, no_rocketmq};
|
||||||
|
_ ->
|
||||||
|
throw(no_rocketmq)
|
||||||
|
end
|
||||||
|
end.
|
||||||
|
|
||||||
|
rocketmq_config(BridgeType, Config) ->
|
||||||
|
Port = integer_to_list(?GET_CONFIG(port, Config)),
|
||||||
|
Server = ?GET_CONFIG(host, Config) ++ ":" ++ Port,
|
||||||
|
Name = atom_to_binary(?MODULE),
|
||||||
|
BatchSize = ?config(batch_size, Config),
|
||||||
|
QueryMode = ?config(query_mode, Config),
|
||||||
|
ConfigString =
|
||||||
|
io_lib:format(
|
||||||
|
"bridges.~s.~s {\n"
|
||||||
|
" enable = true\n"
|
||||||
|
" server = ~p\n"
|
||||||
|
" topic = ~p\n"
|
||||||
|
" resource_opts = {\n"
|
||||||
|
" request_timeout = 1500ms\n"
|
||||||
|
" batch_size = ~b\n"
|
||||||
|
" query_mode = ~s\n"
|
||||||
|
" }\n"
|
||||||
|
"}",
|
||||||
|
[
|
||||||
|
BridgeType,
|
||||||
|
Name,
|
||||||
|
Server,
|
||||||
|
?TOPIC,
|
||||||
|
BatchSize,
|
||||||
|
QueryMode
|
||||||
|
]
|
||||||
|
),
|
||||||
|
{Name, parse_and_check(ConfigString, BridgeType, Name)}.
|
||||||
|
|
||||||
|
parse_and_check(ConfigString, BridgeType, Name) ->
|
||||||
|
{ok, RawConf} = hocon:binary(ConfigString, #{format => map}),
|
||||||
|
hocon_tconf:check_plain(emqx_bridge_schema, RawConf, #{required => false, atom_key => false}),
|
||||||
|
#{<<"bridges">> := #{BridgeType := #{Name := Config}}} = RawConf,
|
||||||
|
Config.
|
||||||
|
|
||||||
|
create_bridge(Config) ->
|
||||||
|
BridgeType = ?GET_CONFIG(rocketmq_bridge_type, Config),
|
||||||
|
Name = ?GET_CONFIG(rocketmq_name, Config),
|
||||||
|
RocketMQConf = ?GET_CONFIG(rocketmq_config, Config),
|
||||||
|
emqx_bridge:create(BridgeType, Name, RocketMQConf).
|
||||||
|
|
||||||
|
delete_bridge(Config) ->
|
||||||
|
BridgeType = ?GET_CONFIG(rocketmq_bridge_type, Config),
|
||||||
|
Name = ?GET_CONFIG(rocketmq_name, Config),
|
||||||
|
emqx_bridge:remove(BridgeType, Name).
|
||||||
|
|
||||||
|
create_bridge_http(Params) ->
|
||||||
|
Path = emqx_mgmt_api_test_util:api_path(["bridges"]),
|
||||||
|
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||||
|
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params) of
|
||||||
|
{ok, Res} -> {ok, emqx_json:decode(Res, [return_maps])};
|
||||||
|
Error -> Error
|
||||||
|
end.
|
||||||
|
|
||||||
|
send_message(Config, Payload) ->
|
||||||
|
Name = ?GET_CONFIG(rocketmq_name, Config),
|
||||||
|
BridgeType = ?GET_CONFIG(rocketmq_bridge_type, Config),
|
||||||
|
BridgeID = emqx_bridge_resource:bridge_id(BridgeType, Name),
|
||||||
|
emqx_bridge:send_message(BridgeID, Payload).
|
||||||
|
|
||||||
|
query_resource(Config, Request) ->
|
||||||
|
Name = ?GET_CONFIG(rocketmq_name, Config),
|
||||||
|
BridgeType = ?GET_CONFIG(rocketmq_bridge_type, Config),
|
||||||
|
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
||||||
|
emqx_resource:query(ResourceID, Request, #{timeout => 500}).
|
||||||
|
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
%% Testcases
|
||||||
|
%%------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
t_setup_via_config_and_publish(Config) ->
|
||||||
|
?assertMatch(
|
||||||
|
{ok, _},
|
||||||
|
create_bridge(Config)
|
||||||
|
),
|
||||||
|
SentData = #{payload => ?PAYLOAD},
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
?wait_async_action(
|
||||||
|
?assertEqual(ok, send_message(Config, SentData)),
|
||||||
|
#{?snk_kind := rocketmq_connector_query_return},
|
||||||
|
10_000
|
||||||
|
),
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
fun(Trace0) ->
|
||||||
|
Trace = ?of_kind(rocketmq_connector_query_return, Trace0),
|
||||||
|
?assertMatch([#{result := ok}], Trace),
|
||||||
|
ok
|
||||||
|
end
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
t_setup_via_http_api_and_publish(Config) ->
|
||||||
|
BridgeType = ?GET_CONFIG(rocketmq_bridge_type, Config),
|
||||||
|
Name = ?GET_CONFIG(rocketmq_name, Config),
|
||||||
|
RocketMQConf = ?GET_CONFIG(rocketmq_config, Config),
|
||||||
|
RocketMQConf2 = RocketMQConf#{
|
||||||
|
<<"name">> => Name,
|
||||||
|
<<"type">> => BridgeType
|
||||||
|
},
|
||||||
|
?assertMatch(
|
||||||
|
{ok, _},
|
||||||
|
create_bridge_http(RocketMQConf2)
|
||||||
|
),
|
||||||
|
SentData = #{payload => ?PAYLOAD},
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
?wait_async_action(
|
||||||
|
?assertEqual(ok, send_message(Config, SentData)),
|
||||||
|
#{?snk_kind := rocketmq_connector_query_return},
|
||||||
|
10_000
|
||||||
|
),
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
fun(Trace0) ->
|
||||||
|
Trace = ?of_kind(rocketmq_connector_query_return, Trace0),
|
||||||
|
?assertMatch([#{result := ok}], Trace),
|
||||||
|
ok
|
||||||
|
end
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
t_get_status(Config) ->
|
||||||
|
?assertMatch(
|
||||||
|
{ok, _},
|
||||||
|
create_bridge(Config)
|
||||||
|
),
|
||||||
|
|
||||||
|
Name = ?GET_CONFIG(rocketmq_name, Config),
|
||||||
|
BridgeType = ?GET_CONFIG(rocketmq_bridge_type, Config),
|
||||||
|
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
||||||
|
|
||||||
|
?assertEqual({ok, connected}, emqx_resource_manager:health_check(ResourceID)),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
t_simple_query(Config) ->
|
||||||
|
?assertMatch(
|
||||||
|
{ok, _},
|
||||||
|
create_bridge(Config)
|
||||||
|
),
|
||||||
|
Request = {send_message, #{message => <<"Hello">>}},
|
||||||
|
Result = query_resource(Config, Request),
|
||||||
|
?assertEqual(ok, Result),
|
||||||
|
ok.
|
|
@ -0,0 +1,66 @@
|
||||||
|
emqx_ee_connector_rocketmq {
|
||||||
|
|
||||||
|
server {
|
||||||
|
desc {
|
||||||
|
en: """
|
||||||
|
The IPv4 or IPv6 address or the hostname to connect to.<br/>
|
||||||
|
A host entry has the following form: `Host[:Port]`.<br/>
|
||||||
|
The RocketMQ default port 9876 is used if `[:Port]` is not specified.
|
||||||
|
"""
|
||||||
|
zh: """
|
||||||
|
将要连接的 IPv4 或 IPv6 地址,或者主机名。<br/>
|
||||||
|
主机名具有以下形式:`Host[:Port]`。<br/>
|
||||||
|
如果未指定 `[:Port]`,则使用 RocketMQ 默认端口 9876。
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
label: {
|
||||||
|
en: "Server Host"
|
||||||
|
zh: "服务器地址"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
topic {
|
||||||
|
desc {
|
||||||
|
en: """RocketMQ Topic"""
|
||||||
|
zh: """RocketMQ 主题"""
|
||||||
|
}
|
||||||
|
label: {
|
||||||
|
en: "RocketMQ Topic"
|
||||||
|
zh: "RocketMQ 主题"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
refresh_interval {
|
||||||
|
desc {
|
||||||
|
en: """RocketMQ Topic Route Refresh Interval."""
|
||||||
|
zh: """RocketMQ 主题路由更新间隔。"""
|
||||||
|
}
|
||||||
|
label: {
|
||||||
|
en: "Topic Route Refresh Interval"
|
||||||
|
zh: "主题路由更新间隔"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
send_buffer {
|
||||||
|
desc {
|
||||||
|
en: """The socket send buffer size of the RocketMQ driver client."""
|
||||||
|
zh: """RocketMQ 驱动的套字节发送消息的缓冲区大小"""
|
||||||
|
}
|
||||||
|
label: {
|
||||||
|
en: "Send Buffer Size"
|
||||||
|
zh: "发送消息的缓冲区大小"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
security_token {
|
||||||
|
desc {
|
||||||
|
en: """RocketMQ Server Security Token"""
|
||||||
|
zh: """RocketMQ 服务器安全令牌"""
|
||||||
|
}
|
||||||
|
label: {
|
||||||
|
en: "Security Token"
|
||||||
|
zh: "安全令牌"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -5,6 +5,7 @@
|
||||||
{tdengine, {git, "https://github.com/emqx/tdengine-client-erl", {tag, "0.1.5"}}},
|
{tdengine, {git, "https://github.com/emqx/tdengine-client-erl", {tag, "0.1.5"}}},
|
||||||
{clickhouse, {git, "https://github.com/emqx/clickhouse-client-erl", {tag, "0.2"}}},
|
{clickhouse, {git, "https://github.com/emqx/clickhouse-client-erl", {tag, "0.2"}}},
|
||||||
{erlcloud, {git, "https://github.com/emqx/erlcloud.git", {tag,"3.5.16-emqx-1"}}},
|
{erlcloud, {git, "https://github.com/emqx/erlcloud.git", {tag,"3.5.16-emqx-1"}}},
|
||||||
|
{rocketmq, {git, "https://github.com/emqx/rocketmq-client-erl.git", {tag, "v0.5.1"}}},
|
||||||
{emqx, {path, "../../apps/emqx"}}
|
{emqx, {path, "../../apps/emqx"}}
|
||||||
]}.
|
]}.
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@
|
||||||
brod,
|
brod,
|
||||||
clickhouse,
|
clickhouse,
|
||||||
erlcloud,
|
erlcloud,
|
||||||
|
rocketmq,
|
||||||
ecql
|
ecql
|
||||||
]},
|
]},
|
||||||
{env, []},
|
{env, []},
|
||||||
|
|
|
@ -0,0 +1,338 @@
|
||||||
|
%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_ee_connector_rocketmq).
|
||||||
|
|
||||||
|
-behaviour(emqx_resource).
|
||||||
|
|
||||||
|
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||||
|
-include_lib("typerefl/include/types.hrl").
|
||||||
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||||
|
-include_lib("hocon/include/hoconsc.hrl").
|
||||||
|
|
||||||
|
-export([roots/0, fields/1]).
|
||||||
|
|
||||||
|
%% `emqx_resource' API
|
||||||
|
-export([
|
||||||
|
callback_mode/0,
|
||||||
|
is_buffer_supported/0,
|
||||||
|
on_start/2,
|
||||||
|
on_stop/2,
|
||||||
|
on_query/3,
|
||||||
|
on_batch_query/3,
|
||||||
|
on_get_status/2
|
||||||
|
]).
|
||||||
|
|
||||||
|
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||||
|
|
||||||
|
-define(ROCKETMQ_HOST_OPTIONS, #{
|
||||||
|
default_port => 9876
|
||||||
|
}).
|
||||||
|
|
||||||
|
%%=====================================================================
|
||||||
|
%% Hocon schema
|
||||||
|
roots() ->
|
||||||
|
[{config, #{type => hoconsc:ref(?MODULE, config)}}].
|
||||||
|
|
||||||
|
fields(config) ->
|
||||||
|
[
|
||||||
|
{server, server()},
|
||||||
|
{topic,
|
||||||
|
mk(
|
||||||
|
binary(),
|
||||||
|
#{default => <<"TopicTest">>, desc => ?DESC(topic)}
|
||||||
|
)},
|
||||||
|
{refresh_interval,
|
||||||
|
mk(
|
||||||
|
emqx_schema:duration(),
|
||||||
|
#{default => <<"3s">>, desc => ?DESC(refresh_interval)}
|
||||||
|
)},
|
||||||
|
{send_buffer,
|
||||||
|
mk(
|
||||||
|
emqx_schema:bytesize(),
|
||||||
|
#{default => <<"1024KB">>, desc => ?DESC(send_buffer)}
|
||||||
|
)},
|
||||||
|
{security_token, mk(binary(), #{default => <<>>, desc => ?DESC(security_token)})}
|
||||||
|
| relational_fields()
|
||||||
|
].
|
||||||
|
|
||||||
|
add_default_username(Fields) ->
|
||||||
|
lists:map(
|
||||||
|
fun
|
||||||
|
({username, OrigUsernameFn}) ->
|
||||||
|
{username, add_default_fn(OrigUsernameFn, <<"">>)};
|
||||||
|
(Field) ->
|
||||||
|
Field
|
||||||
|
end,
|
||||||
|
Fields
|
||||||
|
).
|
||||||
|
|
||||||
|
add_default_fn(OrigFn, Default) ->
|
||||||
|
fun
|
||||||
|
(default) -> Default;
|
||||||
|
(Field) -> OrigFn(Field)
|
||||||
|
end.
|
||||||
|
|
||||||
|
server() ->
|
||||||
|
Meta = #{desc => ?DESC("server")},
|
||||||
|
emqx_schema:servers_sc(Meta, ?ROCKETMQ_HOST_OPTIONS).
|
||||||
|
|
||||||
|
relational_fields() ->
|
||||||
|
Fields = [username, password, auto_reconnect],
|
||||||
|
Values = lists:filter(
|
||||||
|
fun({E, _}) -> lists:member(E, Fields) end,
|
||||||
|
emqx_connector_schema_lib:relational_db_fields()
|
||||||
|
),
|
||||||
|
add_default_username(Values).
|
||||||
|
|
||||||
|
%%========================================================================================
|
||||||
|
%% `emqx_resource' API
|
||||||
|
%%========================================================================================
|
||||||
|
|
||||||
|
callback_mode() -> always_sync.
|
||||||
|
|
||||||
|
is_buffer_supported() -> false.
|
||||||
|
|
||||||
|
on_start(
|
||||||
|
InstanceId,
|
||||||
|
#{server := Server, topic := Topic} = Config1
|
||||||
|
) ->
|
||||||
|
?SLOG(info, #{
|
||||||
|
msg => "starting_rocketmq_connector",
|
||||||
|
connector => InstanceId,
|
||||||
|
config => redact(Config1)
|
||||||
|
}),
|
||||||
|
Config = maps:merge(default_security_info(), Config1),
|
||||||
|
{Host, Port} = emqx_schema:parse_server(Server, ?ROCKETMQ_HOST_OPTIONS),
|
||||||
|
|
||||||
|
Server1 = [{Host, Port}],
|
||||||
|
ClientId = client_id(InstanceId),
|
||||||
|
ClientCfg = #{acl_info => #{}},
|
||||||
|
|
||||||
|
TopicTks = emqx_plugin_libs_rule:preproc_tmpl(Topic),
|
||||||
|
ProducerOpts = make_producer_opts(Config),
|
||||||
|
Templates = parse_template(Config),
|
||||||
|
ProducersMapPID = create_producers_map(ClientId),
|
||||||
|
State = #{
|
||||||
|
client_id => ClientId,
|
||||||
|
topic_tokens => TopicTks,
|
||||||
|
config => Config,
|
||||||
|
templates => Templates,
|
||||||
|
producers_map_pid => ProducersMapPID,
|
||||||
|
producers_opts => ProducerOpts
|
||||||
|
},
|
||||||
|
|
||||||
|
case rocketmq:ensure_supervised_client(ClientId, Server1, ClientCfg) of
|
||||||
|
{ok, _Pid} ->
|
||||||
|
{ok, State};
|
||||||
|
{error, _Reason} = Error ->
|
||||||
|
?tp(
|
||||||
|
rocketmq_connector_start_failed,
|
||||||
|
#{error => _Reason}
|
||||||
|
),
|
||||||
|
Error
|
||||||
|
end.
|
||||||
|
|
||||||
|
on_stop(InstanceId, #{client_id := ClientId, producers_map_pid := Pid} = _State) ->
|
||||||
|
?SLOG(info, #{
|
||||||
|
msg => "stopping_rocketmq_connector",
|
||||||
|
connector => InstanceId
|
||||||
|
}),
|
||||||
|
Pid ! ok,
|
||||||
|
ok = rocketmq:stop_and_delete_supervised_client(ClientId).
|
||||||
|
|
||||||
|
on_query(InstanceId, Query, State) ->
|
||||||
|
do_query(InstanceId, Query, send_sync, State).
|
||||||
|
|
||||||
|
%% We only support batch inserts and all messages must have the same topic
|
||||||
|
on_batch_query(InstanceId, [{send_message, _Msg} | _] = Query, State) ->
|
||||||
|
do_query(InstanceId, Query, batch_send_sync, State);
|
||||||
|
on_batch_query(_InstanceId, Query, _State) ->
|
||||||
|
{error, {unrecoverable_error, {invalid_request, Query}}}.
|
||||||
|
|
||||||
|
on_get_status(_InstanceId, #{client_id := ClientId}) ->
|
||||||
|
case rocketmq_client_sup:find_client(ClientId) of
|
||||||
|
{ok, _Pid} ->
|
||||||
|
connected;
|
||||||
|
_ ->
|
||||||
|
connecting
|
||||||
|
end.
|
||||||
|
|
||||||
|
%%========================================================================================
|
||||||
|
%% Helper fns
|
||||||
|
%%========================================================================================
|
||||||
|
|
||||||
|
do_query(
|
||||||
|
InstanceId,
|
||||||
|
Query,
|
||||||
|
QueryFunc,
|
||||||
|
#{
|
||||||
|
templates := Templates,
|
||||||
|
client_id := ClientId,
|
||||||
|
topic_tokens := TopicTks,
|
||||||
|
producers_opts := ProducerOpts,
|
||||||
|
config := #{topic := RawTopic, resource_opts := #{request_timeout := RequestTimeout}}
|
||||||
|
} = State
|
||||||
|
) ->
|
||||||
|
?TRACE(
|
||||||
|
"QUERY",
|
||||||
|
"rocketmq_connector_received",
|
||||||
|
#{connector => InstanceId, query => Query, state => State}
|
||||||
|
),
|
||||||
|
|
||||||
|
TopicKey = get_topic_key(Query, RawTopic, TopicTks),
|
||||||
|
Data = apply_template(Query, Templates),
|
||||||
|
|
||||||
|
Result = safe_do_produce(
|
||||||
|
InstanceId, QueryFunc, ClientId, TopicKey, Data, ProducerOpts, RequestTimeout
|
||||||
|
),
|
||||||
|
case Result of
|
||||||
|
{error, Reason} ->
|
||||||
|
?tp(
|
||||||
|
rocketmq_connector_query_return,
|
||||||
|
#{error => Reason}
|
||||||
|
),
|
||||||
|
?SLOG(error, #{
|
||||||
|
msg => "rocketmq_connector_do_query_failed",
|
||||||
|
connector => InstanceId,
|
||||||
|
query => Query,
|
||||||
|
reason => Reason
|
||||||
|
}),
|
||||||
|
Result;
|
||||||
|
_ ->
|
||||||
|
?tp(
|
||||||
|
rocketmq_connector_query_return,
|
||||||
|
#{result => Result}
|
||||||
|
),
|
||||||
|
Result
|
||||||
|
end.
|
||||||
|
|
||||||
|
safe_do_produce(InstanceId, QueryFunc, ClientId, TopicKey, Data, ProducerOpts, RequestTimeout) ->
|
||||||
|
try
|
||||||
|
Producers = get_producers(ClientId, TopicKey, ProducerOpts),
|
||||||
|
produce(InstanceId, QueryFunc, Producers, Data, RequestTimeout)
|
||||||
|
catch
|
||||||
|
_Type:Reason ->
|
||||||
|
{error, {unrecoverable_error, Reason}}
|
||||||
|
end.
|
||||||
|
|
||||||
|
produce(_InstanceId, QueryFunc, Producers, Data, RequestTimeout) ->
|
||||||
|
rocketmq:QueryFunc(Producers, Data, RequestTimeout).
|
||||||
|
|
||||||
|
parse_template(Config) ->
|
||||||
|
Templates =
|
||||||
|
case maps:get(template, Config, undefined) of
|
||||||
|
undefined -> #{};
|
||||||
|
<<>> -> #{};
|
||||||
|
Template -> #{send_message => Template}
|
||||||
|
end,
|
||||||
|
|
||||||
|
parse_template(maps:to_list(Templates), #{}).
|
||||||
|
|
||||||
|
parse_template([{Key, H} | T], Templates) ->
|
||||||
|
ParamsTks = emqx_plugin_libs_rule:preproc_tmpl(H),
|
||||||
|
parse_template(
|
||||||
|
T,
|
||||||
|
Templates#{Key => ParamsTks}
|
||||||
|
);
|
||||||
|
parse_template([], Templates) ->
|
||||||
|
Templates.
|
||||||
|
|
||||||
|
get_topic_key({_, Msg}, RawTopic, TopicTks) ->
|
||||||
|
{RawTopic, emqx_plugin_libs_rule:proc_tmpl(TopicTks, Msg)};
|
||||||
|
get_topic_key([Query | _], RawTopic, TopicTks) ->
|
||||||
|
get_topic_key(Query, RawTopic, TopicTks).
|
||||||
|
|
||||||
|
apply_template({Key, Msg} = _Req, Templates) ->
|
||||||
|
case maps:get(Key, Templates, undefined) of
|
||||||
|
undefined ->
|
||||||
|
emqx_json:encode(Msg);
|
||||||
|
Template ->
|
||||||
|
emqx_plugin_libs_rule:proc_tmpl(Template, Msg)
|
||||||
|
end;
|
||||||
|
apply_template([{Key, _} | _] = Reqs, Templates) ->
|
||||||
|
case maps:get(Key, Templates, undefined) of
|
||||||
|
undefined ->
|
||||||
|
[emqx_json:encode(Msg) || {_, Msg} <- Reqs];
|
||||||
|
Template ->
|
||||||
|
[emqx_plugin_libs_rule:proc_tmpl(Template, Msg) || {_, Msg} <- Reqs]
|
||||||
|
end.
|
||||||
|
|
||||||
|
client_id(InstanceId) ->
|
||||||
|
Name = emqx_resource_manager:manager_id_to_resource_id(InstanceId),
|
||||||
|
erlang:binary_to_atom(Name, utf8).
|
||||||
|
|
||||||
|
redact(Msg) ->
|
||||||
|
emqx_misc:redact(Msg, fun is_sensitive_key/1).
|
||||||
|
|
||||||
|
is_sensitive_key(security_token) ->
|
||||||
|
true;
|
||||||
|
is_sensitive_key(_) ->
|
||||||
|
false.
|
||||||
|
|
||||||
|
make_producer_opts(
|
||||||
|
#{
|
||||||
|
username := Username,
|
||||||
|
password := Password,
|
||||||
|
security_token := SecurityToken,
|
||||||
|
send_buffer := SendBuff,
|
||||||
|
refresh_interval := RefreshInterval
|
||||||
|
}
|
||||||
|
) ->
|
||||||
|
ACLInfo = acl_info(Username, Password, SecurityToken),
|
||||||
|
#{
|
||||||
|
tcp_opts => [{sndbuf, SendBuff}],
|
||||||
|
ref_topic_route_interval => RefreshInterval,
|
||||||
|
acl_info => ACLInfo
|
||||||
|
}.
|
||||||
|
|
||||||
|
acl_info(<<>>, <<>>, <<>>) ->
|
||||||
|
#{};
|
||||||
|
acl_info(Username, Password, <<>>) when is_binary(Username), is_binary(Password) ->
|
||||||
|
#{
|
||||||
|
access_key => Username,
|
||||||
|
secret_key => Password
|
||||||
|
};
|
||||||
|
acl_info(Username, Password, SecurityToken) when
|
||||||
|
is_binary(Username), is_binary(Password), is_binary(SecurityToken)
|
||||||
|
->
|
||||||
|
#{
|
||||||
|
access_key => Username,
|
||||||
|
secret_key => Password,
|
||||||
|
security_token => SecurityToken
|
||||||
|
};
|
||||||
|
acl_info(_, _, _) ->
|
||||||
|
#{}.
|
||||||
|
|
||||||
|
create_producers_map(ClientId) ->
|
||||||
|
erlang:spawn(fun() ->
|
||||||
|
case ets:whereis(ClientId) of
|
||||||
|
undefined ->
|
||||||
|
_ = ets:new(ClientId, [public, named_table]),
|
||||||
|
ok;
|
||||||
|
_ ->
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
receive
|
||||||
|
_Msg ->
|
||||||
|
ok
|
||||||
|
end
|
||||||
|
end).
|
||||||
|
|
||||||
|
get_producers(ClientId, {_, Topic1} = TopicKey, ProducerOpts) ->
|
||||||
|
case ets:lookup(ClientId, TopicKey) of
|
||||||
|
[{_, Producers0}] ->
|
||||||
|
Producers0;
|
||||||
|
_ ->
|
||||||
|
ProducerGroup = iolist_to_binary([atom_to_list(ClientId), "_", Topic1]),
|
||||||
|
{ok, Producers0} = rocketmq:ensure_supervised_producers(
|
||||||
|
ClientId, ProducerGroup, Topic1, ProducerOpts
|
||||||
|
),
|
||||||
|
ets:insert(ClientId, {TopicKey, Producers0}),
|
||||||
|
Producers0
|
||||||
|
end.
|
||||||
|
|
||||||
|
default_security_info() ->
|
||||||
|
#{username => <<>>, password => <<>>, security_token => <<>>}.
|
2
mix.exs
2
mix.exs
|
@ -53,7 +53,7 @@ defmodule EMQXUmbrella.MixProject do
|
||||||
{:gproc, github: "uwiger/gproc", tag: "0.8.0", override: true},
|
{:gproc, github: "uwiger/gproc", tag: "0.8.0", override: true},
|
||||||
{:jiffy, github: "emqx/jiffy", tag: "1.0.5", override: true},
|
{:jiffy, github: "emqx/jiffy", tag: "1.0.5", override: true},
|
||||||
{:cowboy, github: "emqx/cowboy", tag: "2.9.0", override: true},
|
{:cowboy, github: "emqx/cowboy", tag: "2.9.0", override: true},
|
||||||
{:esockd, github: "emqx/esockd", tag: "5.9.4", override: true},
|
{:esockd, github: "emqx/esockd", tag: "5.9.6", override: true},
|
||||||
{:rocksdb, github: "emqx/erlang-rocksdb", tag: "1.7.2-emqx-9", override: true},
|
{:rocksdb, github: "emqx/erlang-rocksdb", tag: "1.7.2-emqx-9", override: true},
|
||||||
{:ekka, github: "emqx/ekka", tag: "0.14.5", override: true},
|
{:ekka, github: "emqx/ekka", tag: "0.14.5", override: true},
|
||||||
{:gen_rpc, github: "emqx/gen_rpc", tag: "2.8.1", override: true},
|
{:gen_rpc, github: "emqx/gen_rpc", tag: "2.8.1", override: true},
|
||||||
|
|
|
@ -60,7 +60,7 @@
|
||||||
, {gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}}
|
, {gproc, {git, "https://github.com/uwiger/gproc", {tag, "0.8.0"}}}
|
||||||
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
|
, {jiffy, {git, "https://github.com/emqx/jiffy", {tag, "1.0.5"}}}
|
||||||
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}}
|
, {cowboy, {git, "https://github.com/emqx/cowboy", {tag, "2.9.0"}}}
|
||||||
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.4"}}}
|
, {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.6"}}}
|
||||||
, {rocksdb, {git, "https://github.com/emqx/erlang-rocksdb", {tag, "1.7.2-emqx-9"}}}
|
, {rocksdb, {git, "https://github.com/emqx/erlang-rocksdb", {tag, "1.7.2-emqx-9"}}}
|
||||||
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.5"}}}
|
, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.14.5"}}}
|
||||||
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}}
|
, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}}
|
||||||
|
|
|
@ -170,6 +170,9 @@ for dep in ${CT_DEPS}; do
|
||||||
dynamo)
|
dynamo)
|
||||||
FILES+=( '.ci/docker-compose-file/docker-compose-dynamo.yaml' )
|
FILES+=( '.ci/docker-compose-file/docker-compose-dynamo.yaml' )
|
||||||
;;
|
;;
|
||||||
|
rocketmq)
|
||||||
|
FILES+=( '.ci/docker-compose-file/docker-compose-rocketmq.yaml' )
|
||||||
|
;;
|
||||||
cassandra)
|
cassandra)
|
||||||
FILES+=( '.ci/docker-compose-file/docker-compose-cassandra.yaml' )
|
FILES+=( '.ci/docker-compose-file/docker-compose-cassandra.yaml' )
|
||||||
;;
|
;;
|
||||||
|
|
|
@ -20,7 +20,7 @@ case "$VERSION" in
|
||||||
esac
|
esac
|
||||||
|
|
||||||
DASHBOARD_PATH='apps/emqx_dashboard/priv'
|
DASHBOARD_PATH='apps/emqx_dashboard/priv'
|
||||||
DASHBOARD_REPO='emqx-dashboard-web-new'
|
DASHBOARD_REPO='emqx-dashboard5'
|
||||||
DIRECT_DOWNLOAD_URL="https://github.com/emqx/${DASHBOARD_REPO}/releases/download/${VERSION}/${RELEASE_ASSET_FILE}"
|
DIRECT_DOWNLOAD_URL="https://github.com/emqx/${DASHBOARD_REPO}/releases/download/${VERSION}/${RELEASE_ASSET_FILE}"
|
||||||
|
|
||||||
case $(uname) in
|
case $(uname) in
|
||||||
|
|
|
@ -271,4 +271,5 @@ nif
|
||||||
TDengine
|
TDengine
|
||||||
clickhouse
|
clickhouse
|
||||||
FormatType
|
FormatType
|
||||||
|
RocketMQ
|
||||||
Keyspace
|
Keyspace
|
||||||
|
|
Loading…
Reference in New Issue