Merge pull request #9932 from lafirest/feat/tdengine
feat(bridges): add TDengine
This commit is contained in:
commit
3108b6f9dc
|
@ -4,5 +4,6 @@ MONGO_TAG=5
|
|||
PGSQL_TAG=13
|
||||
LDAP_TAG=2.4.50
|
||||
INFLUXDB_TAG=2.5.0
|
||||
TDENGINE_TAG=3.0.2.4
|
||||
|
||||
TARGET=emqx/emqx
|
||||
|
|
|
@ -16,6 +16,7 @@ up:
|
|||
REDIS_TAG=7.0 \
|
||||
MONGO_TAG=5 \
|
||||
PGSQL_TAG=13 \
|
||||
TDENGINE_TAG=3.0.2.4 \
|
||||
docker-compose \
|
||||
-f .ci/docker-compose-file/docker-compose.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-mongo-single-tcp.yaml \
|
||||
|
@ -31,6 +32,7 @@ up:
|
|||
-f .ci/docker-compose-file/docker-compose-redis-cluster-tcp.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-redis-cluster-tls.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-toxiproxy.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-tdengine-restful.yaml \
|
||||
up -d --build --remove-orphans
|
||||
|
||||
down:
|
||||
|
@ -49,6 +51,7 @@ down:
|
|||
-f .ci/docker-compose-file/docker-compose-redis-cluster-tcp.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-redis-cluster-tls.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-toxiproxy.yaml \
|
||||
-f .ci/docker-compose-file/docker-compose-tdengine-restful.yaml \
|
||||
down --remove-orphans
|
||||
|
||||
ct:
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
version: '3.9'
|
||||
|
||||
services:
|
||||
tdengine_server:
|
||||
container_name: tdengine
|
||||
image: tdengine/tdengine:${TDENGINE_TAG}
|
||||
restart: always
|
||||
ports:
|
||||
- "6041:6041"
|
||||
networks:
|
||||
- emqx_bridge
|
|
@ -17,6 +17,7 @@ services:
|
|||
- 13307:3307
|
||||
- 15432:5432
|
||||
- 15433:5433
|
||||
- 16041:6041
|
||||
command:
|
||||
- "-host=0.0.0.0"
|
||||
- "-config=/config/toxiproxy.json"
|
||||
|
|
|
@ -41,5 +41,11 @@
|
|||
"listen": "0.0.0.0:5433",
|
||||
"upstream": "pgsql-tls:5432",
|
||||
"enabled": true
|
||||
},
|
||||
{
|
||||
"name": "tdengine_restful",
|
||||
"listen": "0.0.0.0:6041",
|
||||
"upstream": "tdengine:6041",
|
||||
"enabled": true
|
||||
}
|
||||
]
|
||||
|
|
|
@ -161,6 +161,7 @@ jobs:
|
|||
PGSQL_TAG: "13"
|
||||
REDIS_TAG: "7.0"
|
||||
INFLUXDB_TAG: "2.5.0"
|
||||
TDENGINE_TAG: "3.0.2.4"
|
||||
PROFILE: ${{ matrix.profile }}
|
||||
CT_COVER_EXPORT_PREFIX: ${{ matrix.profile }}-${{ matrix.otp }}
|
||||
run: ./scripts/ct/run.sh --ci --app ${{ matrix.app }}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_connector, [
|
||||
{description, "EMQX Data Integration Connectors"},
|
||||
{vsn, "0.1.14"},
|
||||
{vsn, "0.1.15"},
|
||||
{registered, []},
|
||||
{mod, {emqx_connector_app, []}},
|
||||
{applications, [
|
||||
|
|
|
@ -391,22 +391,7 @@ proc_sql_params(TypeOrKey, SQLOrData, Params, #{params_tokens := ParamsTokens})
|
|||
end.
|
||||
|
||||
on_batch_insert(InstId, BatchReqs, InsertPart, Tokens, State) ->
|
||||
JoinFun = fun
|
||||
([Msg]) ->
|
||||
emqx_plugin_libs_rule:proc_sql_param_str(Tokens, Msg);
|
||||
([H | T]) ->
|
||||
lists:foldl(
|
||||
fun(Msg, Acc) ->
|
||||
Value = emqx_plugin_libs_rule:proc_sql_param_str(Tokens, Msg),
|
||||
<<Acc/binary, ", ", Value/binary>>
|
||||
end,
|
||||
emqx_plugin_libs_rule:proc_sql_param_str(Tokens, H),
|
||||
T
|
||||
)
|
||||
end,
|
||||
{_, Msgs} = lists:unzip(BatchReqs),
|
||||
JoinPart = JoinFun(Msgs),
|
||||
SQL = <<InsertPart/binary, " values ", JoinPart/binary>>,
|
||||
SQL = emqx_plugin_libs_rule:proc_batch_sql(BatchReqs, InsertPart, Tokens),
|
||||
on_sql_query(InstId, query, SQL, [], default_timeout, State).
|
||||
|
||||
on_sql_query(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
%% -*- mode: erlang -*-
|
||||
{application, emqx_plugin_libs, [
|
||||
{description, "EMQX Plugin utility libs"},
|
||||
{vsn, "4.3.5"},
|
||||
{vsn, "4.3.6"},
|
||||
{modules, []},
|
||||
{applications, [kernel, stdlib]},
|
||||
{env, []}
|
||||
|
|
|
@ -31,7 +31,8 @@
|
|||
proc_sql_param_str/2,
|
||||
proc_cql_param_str/2,
|
||||
split_insert_sql/1,
|
||||
detect_sql_type/1
|
||||
detect_sql_type/1,
|
||||
proc_batch_sql/3
|
||||
]).
|
||||
|
||||
%% type converting
|
||||
|
@ -164,6 +165,20 @@ detect_sql_type(SQL) ->
|
|||
{error, invalid_sql}
|
||||
end.
|
||||
|
||||
-spec proc_batch_sql(
|
||||
BatchReqs :: list({atom(), map()}),
|
||||
InsertPart :: binary(),
|
||||
Tokens :: tmpl_token()
|
||||
) -> InsertSQL :: binary().
|
||||
proc_batch_sql(BatchReqs, InsertPart, Tokens) ->
|
||||
ValuesPart = erlang:iolist_to_binary(
|
||||
lists:join(", ", [
|
||||
emqx_plugin_libs_rule:proc_sql_param_str(Tokens, Msg)
|
||||
|| {_, Msg} <- BatchReqs
|
||||
])
|
||||
),
|
||||
<<InsertPart/binary, " values ", ValuesPart/binary>>.
|
||||
|
||||
unsafe_atom_key(Key) when is_atom(Key) ->
|
||||
Key;
|
||||
unsafe_atom_key(Key) when is_binary(Key) ->
|
||||
|
|
|
@ -7,3 +7,4 @@ mysql
|
|||
redis
|
||||
redis_cluster
|
||||
pgsql
|
||||
tdengine
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
emqx_ee_bridge_tdengine {
|
||||
|
||||
local_topic {
|
||||
desc {
|
||||
en: """The MQTT topic filter to be forwarded to TDengine. All MQTT 'PUBLISH' messages with the topic
|
||||
matching the local_topic will be forwarded.</br>
|
||||
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is
|
||||
configured, then both the data got from the rule and the MQTT messages that match local_topic
|
||||
will be forwarded.
|
||||
"""
|
||||
zh: """发送到 'local_topic' 的消息都会转发到 TDengine。 </br>
|
||||
注意:如果这个 Bridge 被用作规则(EMQX 规则引擎)的输出,同时也配置了 'local_topic' ,那么这两部分的消息都会被转发。
|
||||
"""
|
||||
}
|
||||
label {
|
||||
en: "Local Topic"
|
||||
zh: "本地 Topic"
|
||||
}
|
||||
}
|
||||
|
||||
sql_template {
|
||||
desc {
|
||||
en: """SQL Template"""
|
||||
zh: """SQL 模板"""
|
||||
}
|
||||
label {
|
||||
en: "SQL Template"
|
||||
zh: "SQL 模板"
|
||||
}
|
||||
}
|
||||
config_enable {
|
||||
desc {
|
||||
en: """Enable or disable this bridge"""
|
||||
zh: """启用/禁用桥接"""
|
||||
}
|
||||
label {
|
||||
en: "Enable Or Disable Bridge"
|
||||
zh: "启用/禁用桥接"
|
||||
}
|
||||
}
|
||||
|
||||
desc_config {
|
||||
desc {
|
||||
en: """Configuration for an TDengine bridge."""
|
||||
zh: """TDengine 桥接配置"""
|
||||
}
|
||||
label: {
|
||||
en: "TDengine Bridge Configuration"
|
||||
zh: "TDengine 桥接配置"
|
||||
}
|
||||
}
|
||||
|
||||
desc_type {
|
||||
desc {
|
||||
en: """The Bridge Type"""
|
||||
zh: """Bridge 类型"""
|
||||
}
|
||||
label {
|
||||
en: "Bridge Type"
|
||||
zh: "桥接类型"
|
||||
}
|
||||
}
|
||||
|
||||
desc_name {
|
||||
desc {
|
||||
en: """Bridge name."""
|
||||
zh: """桥接名字"""
|
||||
}
|
||||
label {
|
||||
en: "Bridge Name"
|
||||
zh: "桥接名字"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
{application, emqx_ee_bridge, [
|
||||
{description, "EMQX Enterprise data bridges"},
|
||||
{vsn, "0.1.4"},
|
||||
{vsn, "0.1.5"},
|
||||
{registered, []},
|
||||
{applications, [
|
||||
kernel,
|
||||
|
|
|
@ -28,7 +28,8 @@ api_schemas(Method) ->
|
|||
ref(emqx_ee_bridge_redis, Method ++ "_sentinel"),
|
||||
ref(emqx_ee_bridge_redis, Method ++ "_cluster"),
|
||||
ref(emqx_ee_bridge_timescale, Method),
|
||||
ref(emqx_ee_bridge_matrix, Method)
|
||||
ref(emqx_ee_bridge_matrix, Method),
|
||||
ref(emqx_ee_bridge_tdengine, Method)
|
||||
].
|
||||
|
||||
schema_modules() ->
|
||||
|
@ -42,7 +43,8 @@ schema_modules() ->
|
|||
emqx_ee_bridge_redis,
|
||||
emqx_ee_bridge_pgsql,
|
||||
emqx_ee_bridge_timescale,
|
||||
emqx_ee_bridge_matrix
|
||||
emqx_ee_bridge_matrix,
|
||||
emqx_ee_bridge_tdengine
|
||||
].
|
||||
|
||||
examples(Method) ->
|
||||
|
@ -72,7 +74,8 @@ resource_type(redis_sentinel) -> emqx_ee_connector_redis;
|
|||
resource_type(redis_cluster) -> emqx_ee_connector_redis;
|
||||
resource_type(pgsql) -> emqx_connector_pgsql;
|
||||
resource_type(timescale) -> emqx_connector_pgsql;
|
||||
resource_type(matrix) -> emqx_connector_pgsql.
|
||||
resource_type(matrix) -> emqx_connector_pgsql;
|
||||
resource_type(tdengine) -> emqx_ee_connector_tdengine.
|
||||
|
||||
fields(bridges) ->
|
||||
[
|
||||
|
@ -107,6 +110,14 @@ fields(bridges) ->
|
|||
desc => <<"MySQL Bridge Config">>,
|
||||
required => false
|
||||
}
|
||||
)},
|
||||
{tdengine,
|
||||
mk(
|
||||
hoconsc:map(name, ref(emqx_ee_bridge_tdengine, "config")),
|
||||
#{
|
||||
desc => <<"TDengine Bridge Config">>,
|
||||
required => false
|
||||
}
|
||||
)}
|
||||
] ++ mongodb_structs() ++ influxdb_structs() ++ redis_structs() ++ pgsql_structs().
|
||||
|
||||
|
|
|
@ -0,0 +1,123 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
-module(emqx_ee_bridge_tdengine).
|
||||
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
-include_lib("emqx_bridge/include/emqx_bridge.hrl").
|
||||
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||
|
||||
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||
|
||||
-export([
|
||||
conn_bridge_examples/1,
|
||||
values/1
|
||||
]).
|
||||
|
||||
-export([
|
||||
namespace/0,
|
||||
roots/0,
|
||||
fields/1,
|
||||
desc/1
|
||||
]).
|
||||
|
||||
-define(DEFAULT_SQL, <<
|
||||
"insert into mqtt.t_mqtt_msg(ts, msgid, mqtt_topic, qos, payload, arrived) "
|
||||
"values (${ts}, ${id}, ${topic}, ${qos}, ${payload}, ${timestamp})"
|
||||
>>).
|
||||
|
||||
%% -------------------------------------------------------------------------------------------------
|
||||
%% api
|
||||
|
||||
conn_bridge_examples(Method) ->
|
||||
[
|
||||
#{
|
||||
<<"tdengine">> => #{
|
||||
summary => <<"TDengine Bridge">>,
|
||||
value => values(Method)
|
||||
}
|
||||
}
|
||||
].
|
||||
|
||||
values(get) ->
|
||||
maps:merge(values(post), ?METRICS_EXAMPLE);
|
||||
values(post) ->
|
||||
#{
|
||||
enable => true,
|
||||
type => tdengine,
|
||||
name => <<"foo">>,
|
||||
server => <<"127.0.0.1:6041">>,
|
||||
database => <<"mqtt">>,
|
||||
pool_size => 8,
|
||||
username => <<"root">>,
|
||||
password => <<"taosdata">>,
|
||||
sql => ?DEFAULT_SQL,
|
||||
local_topic => <<"local/topic/#">>,
|
||||
resource_opts => #{
|
||||
worker_pool_size => 8,
|
||||
health_check_interval => ?HEALTHCHECK_INTERVAL_RAW,
|
||||
auto_restart_interval => ?AUTO_RESTART_INTERVAL_RAW,
|
||||
batch_size => ?DEFAULT_BATCH_SIZE,
|
||||
batch_time => ?DEFAULT_BATCH_TIME,
|
||||
query_mode => sync,
|
||||
max_queue_bytes => ?DEFAULT_QUEUE_SIZE
|
||||
}
|
||||
};
|
||||
values(put) ->
|
||||
values(post).
|
||||
|
||||
%% -------------------------------------------------------------------------------------------------
|
||||
%% Hocon Schema Definitions
|
||||
namespace() -> "bridge_tdengine".
|
||||
|
||||
roots() -> [].
|
||||
|
||||
fields("config") ->
|
||||
[
|
||||
{enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})},
|
||||
{sql,
|
||||
mk(
|
||||
binary(),
|
||||
#{desc => ?DESC("sql_template"), default => ?DEFAULT_SQL, format => <<"sql">>}
|
||||
)},
|
||||
{local_topic,
|
||||
mk(
|
||||
binary(),
|
||||
#{desc => ?DESC("local_topic"), default => undefined}
|
||||
)},
|
||||
{resource_opts,
|
||||
mk(
|
||||
ref(?MODULE, "creation_opts"),
|
||||
#{
|
||||
required => false,
|
||||
default => #{},
|
||||
desc => ?DESC(emqx_resource_schema, <<"resource_opts">>)
|
||||
}
|
||||
)}
|
||||
] ++ emqx_ee_connector_tdengine:fields(config);
|
||||
fields("creation_opts") ->
|
||||
emqx_resource_schema:fields("creation_opts_sync_only");
|
||||
fields("post") ->
|
||||
[type_field(), name_field() | fields("config")];
|
||||
fields("put") ->
|
||||
fields("config");
|
||||
fields("get") ->
|
||||
emqx_bridge_schema:status_fields() ++ fields("post").
|
||||
|
||||
desc("config") ->
|
||||
?DESC("desc_config");
|
||||
desc(Method) when Method =:= "get"; Method =:= "put"; Method =:= "post" ->
|
||||
["Configuration for TDengine using `", string:to_upper(Method), "` method."];
|
||||
desc("creation_opts" = Name) ->
|
||||
emqx_resource_schema:desc(Name);
|
||||
desc(_) ->
|
||||
undefined.
|
||||
|
||||
%% -------------------------------------------------------------------------------------------------
|
||||
|
||||
type_field() ->
|
||||
{type, mk(enum([tdengine]), #{required => true, desc => ?DESC("desc_type")})}.
|
||||
|
||||
name_field() ->
|
||||
{name, mk(binary(), #{required => true, desc => ?DESC("desc_name")})}.
|
|
@ -0,0 +1,432 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_ee_bridge_tdengine_SUITE).
|
||||
|
||||
-compile(nowarn_export_all).
|
||||
-compile(export_all).
|
||||
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
|
||||
% SQL definitions
|
||||
-define(SQL_BRIDGE,
|
||||
"insert into mqtt.t_mqtt_msg(ts, payload) values (${timestamp}, ${payload})"
|
||||
).
|
||||
|
||||
-define(SQL_CREATE_DATABASE, "CREATE DATABASE IF NOT EXISTS mqtt; USE mqtt;").
|
||||
-define(SQL_CREATE_TABLE,
|
||||
"CREATE TABLE t_mqtt_msg (\n"
|
||||
" ts timestamp,\n"
|
||||
" payload BINARY(1024)\n"
|
||||
");"
|
||||
).
|
||||
-define(SQL_DROP_TABLE, "DROP TABLE t_mqtt_msg").
|
||||
-define(SQL_DELETE, "DELETE from t_mqtt_msg").
|
||||
-define(SQL_SELECT, "SELECT payload FROM t_mqtt_msg").
|
||||
|
||||
% DB defaults
|
||||
-define(TD_DATABASE, "mqtt").
|
||||
-define(TD_USERNAME, "root").
|
||||
-define(TD_PASSWORD, "taosdata").
|
||||
-define(BATCH_SIZE, 10).
|
||||
-define(PAYLOAD, <<"HELLO">>).
|
||||
|
||||
-define(WITH_CON(Process),
|
||||
Con = connect_direct_tdengine(Config),
|
||||
Process,
|
||||
ok = tdengine:stop(Con)
|
||||
).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% CT boilerplate
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
all() ->
|
||||
[
|
||||
{group, with_batch},
|
||||
{group, without_batch}
|
||||
].
|
||||
|
||||
groups() ->
|
||||
TCs = emqx_common_test_helpers:all(?MODULE),
|
||||
NonBatchCases = [t_write_timeout],
|
||||
[
|
||||
{with_batch, TCs -- NonBatchCases},
|
||||
{without_batch, TCs}
|
||||
].
|
||||
|
||||
init_per_group(with_batch, Config0) ->
|
||||
Config = [{enable_batch, true} | Config0],
|
||||
common_init(Config);
|
||||
init_per_group(without_batch, Config0) ->
|
||||
Config = [{enable_batch, false} | Config0],
|
||||
common_init(Config);
|
||||
init_per_group(_Group, Config) ->
|
||||
Config.
|
||||
|
||||
end_per_group(Group, Config) when Group =:= with_batch; Group =:= without_batch ->
|
||||
connect_and_drop_table(Config),
|
||||
ProxyHost = ?config(proxy_host, Config),
|
||||
ProxyPort = ?config(proxy_port, Config),
|
||||
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
||||
ok;
|
||||
end_per_group(_Group, _Config) ->
|
||||
ok.
|
||||
|
||||
init_per_suite(Config) ->
|
||||
Config.
|
||||
|
||||
end_per_suite(_Config) ->
|
||||
emqx_mgmt_api_test_util:end_suite(),
|
||||
ok = emqx_common_test_helpers:stop_apps([emqx_bridge, emqx_conf]),
|
||||
ok.
|
||||
|
||||
init_per_testcase(_Testcase, Config) ->
|
||||
connect_and_clear_table(Config),
|
||||
delete_bridge(Config),
|
||||
Config.
|
||||
|
||||
end_per_testcase(_Testcase, Config) ->
|
||||
ProxyHost = ?config(proxy_host, Config),
|
||||
ProxyPort = ?config(proxy_port, Config),
|
||||
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
||||
connect_and_clear_table(Config),
|
||||
ok = snabbkaffe:stop(),
|
||||
delete_bridge(Config),
|
||||
ok.
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Helper fns
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
common_init(ConfigT) ->
|
||||
Host = os:getenv("TDENGINE_HOST", "toxiproxy"),
|
||||
Port = list_to_integer(os:getenv("TDENGINE_PORT", "6041")),
|
||||
|
||||
Config0 = [
|
||||
{td_host, Host},
|
||||
{td_port, Port},
|
||||
{query_mode, sync},
|
||||
{proxy_name, "tdengine_restful"}
|
||||
| ConfigT
|
||||
],
|
||||
|
||||
BridgeType = proplists:get_value(bridge_type, Config0, <<"tdengine">>),
|
||||
case emqx_common_test_helpers:is_tcp_server_available(Host, Port) of
|
||||
true ->
|
||||
% Setup toxiproxy
|
||||
ProxyHost = os:getenv("PROXY_HOST", "toxiproxy"),
|
||||
ProxyPort = list_to_integer(os:getenv("PROXY_PORT", "8474")),
|
||||
emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort),
|
||||
% Ensure EE bridge module is loaded
|
||||
_ = application:load(emqx_ee_bridge),
|
||||
_ = emqx_ee_bridge:module_info(),
|
||||
ok = emqx_common_test_helpers:start_apps([emqx_conf, emqx_bridge]),
|
||||
emqx_mgmt_api_test_util:init_suite(),
|
||||
% Connect to tdengine directly and create the table
|
||||
connect_and_create_table(Config0),
|
||||
{Name, TDConf} = tdengine_config(BridgeType, Config0),
|
||||
Config =
|
||||
[
|
||||
{tdengine_config, TDConf},
|
||||
{tdengine_bridge_type, BridgeType},
|
||||
{tdengine_name, Name},
|
||||
{proxy_host, ProxyHost},
|
||||
{proxy_port, ProxyPort}
|
||||
| Config0
|
||||
],
|
||||
Config;
|
||||
false ->
|
||||
case os:getenv("IS_CI") of
|
||||
"yes" ->
|
||||
throw(no_tdengine);
|
||||
_ ->
|
||||
{skip, no_tdengine}
|
||||
end
|
||||
end.
|
||||
|
||||
tdengine_config(BridgeType, Config) ->
|
||||
Port = integer_to_list(?config(td_port, Config)),
|
||||
Server = ?config(td_host, Config) ++ ":" ++ Port,
|
||||
Name = atom_to_binary(?MODULE),
|
||||
BatchSize =
|
||||
case ?config(enable_batch, Config) of
|
||||
true -> ?BATCH_SIZE;
|
||||
false -> 1
|
||||
end,
|
||||
QueryMode = ?config(query_mode, Config),
|
||||
ConfigString =
|
||||
io_lib:format(
|
||||
"bridges.~s.~s {\n"
|
||||
" enable = true\n"
|
||||
" server = ~p\n"
|
||||
" database = ~p\n"
|
||||
" username = ~p\n"
|
||||
" password = ~p\n"
|
||||
" sql = ~p\n"
|
||||
" resource_opts = {\n"
|
||||
" request_timeout = 500ms\n"
|
||||
" batch_size = ~b\n"
|
||||
" query_mode = ~s\n"
|
||||
" }\n"
|
||||
"}",
|
||||
[
|
||||
BridgeType,
|
||||
Name,
|
||||
Server,
|
||||
?TD_DATABASE,
|
||||
?TD_USERNAME,
|
||||
?TD_PASSWORD,
|
||||
?SQL_BRIDGE,
|
||||
BatchSize,
|
||||
QueryMode
|
||||
]
|
||||
),
|
||||
{Name, parse_and_check(ConfigString, BridgeType, Name)}.
|
||||
|
||||
parse_and_check(ConfigString, BridgeType, Name) ->
|
||||
{ok, RawConf} = hocon:binary(ConfigString, #{format => map}),
|
||||
hocon_tconf:check_plain(emqx_bridge_schema, RawConf, #{required => false, atom_key => false}),
|
||||
#{<<"bridges">> := #{BridgeType := #{Name := Config}}} = RawConf,
|
||||
Config.
|
||||
|
||||
create_bridge(Config) ->
|
||||
BridgeType = ?config(tdengine_bridge_type, Config),
|
||||
Name = ?config(tdengine_name, Config),
|
||||
TDConfig = ?config(tdengine_config, Config),
|
||||
emqx_bridge:create(BridgeType, Name, TDConfig).
|
||||
|
||||
delete_bridge(Config) ->
|
||||
BridgeType = ?config(tdengine_bridge_type, Config),
|
||||
Name = ?config(tdengine_name, Config),
|
||||
emqx_bridge:remove(BridgeType, Name).
|
||||
|
||||
create_bridge_http(Params) ->
|
||||
Path = emqx_mgmt_api_test_util:api_path(["bridges"]),
|
||||
AuthHeader = emqx_mgmt_api_test_util:auth_header_(),
|
||||
case emqx_mgmt_api_test_util:request_api(post, Path, "", AuthHeader, Params) of
|
||||
{ok, Res} -> {ok, emqx_json:decode(Res, [return_maps])};
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
send_message(Config, Payload) ->
|
||||
Name = ?config(tdengine_name, Config),
|
||||
BridgeType = ?config(tdengine_bridge_type, Config),
|
||||
BridgeID = emqx_bridge_resource:bridge_id(BridgeType, Name),
|
||||
emqx_bridge:send_message(BridgeID, Payload).
|
||||
|
||||
query_resource(Config, Request) ->
|
||||
Name = ?config(tdengine_name, Config),
|
||||
BridgeType = ?config(tdengine_bridge_type, Config),
|
||||
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
||||
emqx_resource:query(ResourceID, Request, #{timeout => 1_000}).
|
||||
|
||||
connect_direct_tdengine(Config) ->
|
||||
Opts = [
|
||||
{host, to_bin(?config(td_host, Config))},
|
||||
{port, ?config(td_port, Config)},
|
||||
{username, to_bin(?TD_USERNAME)},
|
||||
{password, to_bin(?TD_PASSWORD)},
|
||||
{pool_size, 8}
|
||||
],
|
||||
|
||||
{ok, Con} = tdengine:start_link(Opts),
|
||||
Con.
|
||||
|
||||
% These funs connect and then stop the tdengine connection
|
||||
connect_and_create_table(Config) ->
|
||||
?WITH_CON(begin
|
||||
{ok, _} = directly_query(Con, ?SQL_CREATE_DATABASE, []),
|
||||
{ok, _} = directly_query(Con, ?SQL_CREATE_TABLE)
|
||||
end).
|
||||
|
||||
connect_and_drop_table(Config) ->
|
||||
?WITH_CON({ok, _} = directly_query(Con, ?SQL_DROP_TABLE)).
|
||||
|
||||
connect_and_clear_table(Config) ->
|
||||
?WITH_CON({ok, _} = directly_query(Con, ?SQL_DELETE)).
|
||||
|
||||
connect_and_get_payload(Config) ->
|
||||
?WITH_CON(
|
||||
{ok, #{<<"code">> := 0, <<"data">> := [[Result]]}} = directly_query(Con, ?SQL_SELECT)
|
||||
),
|
||||
Result.
|
||||
|
||||
directly_query(Con, Query) ->
|
||||
directly_query(Con, Query, [{db_name, ?TD_DATABASE}]).
|
||||
|
||||
directly_query(Con, Query, QueryOpts) ->
|
||||
tdengine:insert(Con, Query, QueryOpts).
|
||||
|
||||
%%------------------------------------------------------------------------------
|
||||
%% Testcases
|
||||
%%------------------------------------------------------------------------------
|
||||
|
||||
t_setup_via_config_and_publish(Config) ->
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
create_bridge(Config)
|
||||
),
|
||||
SentData = #{payload => ?PAYLOAD, timestamp => 1668602148000},
|
||||
?check_trace(
|
||||
begin
|
||||
?wait_async_action(
|
||||
?assertMatch(
|
||||
{ok, #{<<"code">> := 0, <<"rows">> := 1}}, send_message(Config, SentData)
|
||||
),
|
||||
#{?snk_kind := tdengine_connector_query_return},
|
||||
10_000
|
||||
),
|
||||
?assertMatch(
|
||||
?PAYLOAD,
|
||||
connect_and_get_payload(Config)
|
||||
),
|
||||
ok
|
||||
end,
|
||||
fun(Trace0) ->
|
||||
Trace = ?of_kind(tdengine_connector_query_return, Trace0),
|
||||
?assertMatch([#{result := {ok, #{<<"code">> := 0, <<"rows">> := 1}}}], Trace),
|
||||
ok
|
||||
end
|
||||
),
|
||||
ok.
|
||||
|
||||
t_setup_via_http_api_and_publish(Config) ->
|
||||
BridgeType = ?config(tdengine_bridge_type, Config),
|
||||
Name = ?config(tdengine_name, Config),
|
||||
PgsqlConfig0 = ?config(tdengine_config, Config),
|
||||
PgsqlConfig = PgsqlConfig0#{
|
||||
<<"name">> => Name,
|
||||
<<"type">> => BridgeType
|
||||
},
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
create_bridge_http(PgsqlConfig)
|
||||
),
|
||||
SentData = #{payload => ?PAYLOAD, timestamp => 1668602148000},
|
||||
?check_trace(
|
||||
begin
|
||||
?wait_async_action(
|
||||
?assertMatch(
|
||||
{ok, #{<<"code">> := 0, <<"rows">> := 1}}, send_message(Config, SentData)
|
||||
),
|
||||
#{?snk_kind := tdengine_connector_query_return},
|
||||
10_000
|
||||
),
|
||||
?assertMatch(
|
||||
?PAYLOAD,
|
||||
connect_and_get_payload(Config)
|
||||
),
|
||||
ok
|
||||
end,
|
||||
fun(Trace0) ->
|
||||
Trace = ?of_kind(tdengine_connector_query_return, Trace0),
|
||||
?assertMatch([#{result := {ok, #{<<"code">> := 0, <<"rows">> := 1}}}], Trace),
|
||||
ok
|
||||
end
|
||||
),
|
||||
ok.
|
||||
|
||||
t_get_status(Config) ->
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
create_bridge(Config)
|
||||
),
|
||||
ProxyPort = ?config(proxy_port, Config),
|
||||
ProxyHost = ?config(proxy_host, Config),
|
||||
ProxyName = ?config(proxy_name, Config),
|
||||
|
||||
Name = ?config(tdengine_name, Config),
|
||||
BridgeType = ?config(tdengine_bridge_type, Config),
|
||||
ResourceID = emqx_bridge_resource:resource_id(BridgeType, Name),
|
||||
|
||||
?assertEqual({ok, connected}, emqx_resource_manager:health_check(ResourceID)),
|
||||
emqx_common_test_helpers:with_failure(down, ProxyName, ProxyHost, ProxyPort, fun() ->
|
||||
?assertMatch(
|
||||
{ok, Status} when Status =:= disconnected orelse Status =:= connecting,
|
||||
emqx_resource_manager:health_check(ResourceID)
|
||||
)
|
||||
end),
|
||||
ok.
|
||||
|
||||
t_write_failure(Config) ->
|
||||
ProxyName = ?config(proxy_name, Config),
|
||||
ProxyPort = ?config(proxy_port, Config),
|
||||
ProxyHost = ?config(proxy_host, Config),
|
||||
{ok, _} = create_bridge(Config),
|
||||
SentData = #{payload => ?PAYLOAD, timestamp => 1668602148000},
|
||||
emqx_common_test_helpers:with_failure(down, ProxyName, ProxyHost, ProxyPort, fun() ->
|
||||
?assertMatch({error, econnrefused}, send_message(Config, SentData))
|
||||
end),
|
||||
ok.
|
||||
|
||||
% This test doesn't work with batch enabled since it is not possible
|
||||
% to set the timeout directly for batch queries
|
||||
t_write_timeout(Config) ->
|
||||
ProxyName = ?config(proxy_name, Config),
|
||||
ProxyPort = ?config(proxy_port, Config),
|
||||
ProxyHost = ?config(proxy_host, Config),
|
||||
{ok, _} = create_bridge(Config),
|
||||
SentData = #{payload => ?PAYLOAD, timestamp => 1668602148000},
|
||||
emqx_common_test_helpers:with_failure(timeout, ProxyName, ProxyHost, ProxyPort, fun() ->
|
||||
?assertMatch(
|
||||
{error, {resource_error, #{reason := timeout}}},
|
||||
query_resource(Config, {send_message, SentData})
|
||||
)
|
||||
end),
|
||||
ok.
|
||||
|
||||
t_simple_sql_query(Config) ->
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
create_bridge(Config)
|
||||
),
|
||||
Request = {query, <<"SELECT count(1) AS T">>},
|
||||
Result = query_resource(Config, Request),
|
||||
case ?config(enable_batch, Config) of
|
||||
true ->
|
||||
?assertEqual({error, {unrecoverable_error, batch_prepare_not_implemented}}, Result);
|
||||
false ->
|
||||
?assertMatch({ok, #{<<"code">> := 0, <<"data">> := [[1]]}}, Result)
|
||||
end,
|
||||
ok.
|
||||
|
||||
t_missing_data(Config) ->
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
create_bridge(Config)
|
||||
),
|
||||
Result = send_message(Config, #{}),
|
||||
?assertMatch(
|
||||
{error, #{
|
||||
<<"code">> := 534,
|
||||
<<"desc">> := _
|
||||
}},
|
||||
Result
|
||||
),
|
||||
ok.
|
||||
|
||||
t_bad_sql_parameter(Config) ->
|
||||
?assertMatch(
|
||||
{ok, _},
|
||||
create_bridge(Config)
|
||||
),
|
||||
Request = {sql, <<"">>, [bad_parameter]},
|
||||
Result = query_resource(Config, Request),
|
||||
case ?config(enable_batch, Config) of
|
||||
true ->
|
||||
?assertEqual({error, {unrecoverable_error, invalid_request}}, Result);
|
||||
false ->
|
||||
?assertMatch(
|
||||
{error, {unrecoverable_error, _}}, Result
|
||||
)
|
||||
end,
|
||||
ok.
|
||||
|
||||
to_bin(List) when is_list(List) ->
|
||||
unicode:characters_to_binary(List, utf8);
|
||||
to_bin(Bin) when is_binary(Bin) ->
|
||||
Bin.
|
|
@ -0,0 +1,22 @@
|
|||
emqx_ee_connector_tdengine {
|
||||
|
||||
server {
|
||||
desc {
|
||||
en: """
|
||||
The IPv4 or IPv6 address or the hostname to connect to.<br/>
|
||||
A host entry has the following form: `Host[:Port]`.<br/>
|
||||
The TDengine default port 6041 is used if `[:Port]` is not specified.
|
||||
"""
|
||||
zh: """
|
||||
将要连接的 IPv4 或 IPv6 地址,或者主机名。<br/>
|
||||
主机名具有以下形式:`Host[:Port]`。<br/>
|
||||
如果未指定 `[:Port]`,则使用 TDengine 默认端口 6041。
|
||||
"""
|
||||
}
|
||||
label: {
|
||||
en: "Server Host"
|
||||
zh: "服务器地址"
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -2,6 +2,7 @@
|
|||
{deps, [
|
||||
{hstreamdb_erl, {git, "https://github.com/hstreamdb/hstreamdb_erl.git", {tag, "0.2.5"}}},
|
||||
{influxdb, {git, "https://github.com/emqx/influxdb-client-erl", {tag, "1.1.8"}}},
|
||||
{tdengine, {git, "https://github.com/emqx/tdengine-client-erl", {tag, "0.1.5"}}},
|
||||
{emqx, {path, "../../apps/emqx"}}
|
||||
]}.
|
||||
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
{application, emqx_ee_connector, [
|
||||
{description, "EMQX Enterprise connectors"},
|
||||
{vsn, "0.1.4"},
|
||||
{vsn, "0.1.5"},
|
||||
{registered, []},
|
||||
{applications, [
|
||||
kernel,
|
||||
stdlib,
|
||||
hstreamdb_erl,
|
||||
influxdb,
|
||||
tdengine,
|
||||
wolff,
|
||||
brod
|
||||
]},
|
||||
|
|
|
@ -0,0 +1,241 @@
|
|||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqx_ee_connector_tdengine).
|
||||
|
||||
-behaviour(emqx_resource).
|
||||
|
||||
-include_lib("emqx_resource/include/emqx_resource.hrl").
|
||||
-include_lib("typerefl/include/types.hrl").
|
||||
-include_lib("emqx/include/logger.hrl").
|
||||
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
|
||||
-include_lib("hocon/include/hoconsc.hrl").
|
||||
|
||||
-export([roots/0, fields/1]).
|
||||
|
||||
%% `emqx_resource' API
|
||||
-export([
|
||||
callback_mode/0,
|
||||
is_buffer_supported/0,
|
||||
on_start/2,
|
||||
on_stop/2,
|
||||
on_query/3,
|
||||
on_batch_query/3,
|
||||
on_get_status/2
|
||||
]).
|
||||
|
||||
-export([connect/1, do_get_status/1, execute/3]).
|
||||
|
||||
-import(hoconsc, [mk/2, enum/1, ref/2]).
|
||||
|
||||
-define(TD_HOST_OPTIONS, #{
|
||||
default_port => 6041
|
||||
}).
|
||||
|
||||
%%=====================================================================
|
||||
%% Hocon schema
|
||||
roots() ->
|
||||
[{config, #{type => hoconsc:ref(?MODULE, config)}}].
|
||||
|
||||
fields(config) ->
|
||||
[
|
||||
{server, server()}
|
||||
| add_default_username(emqx_connector_schema_lib:relational_db_fields())
|
||||
].
|
||||
|
||||
add_default_username(Fields) ->
|
||||
lists:map(
|
||||
fun
|
||||
({username, OrigUsernameFn}) ->
|
||||
{username, add_default_fn(OrigUsernameFn, <<"root">>)};
|
||||
(Field) ->
|
||||
Field
|
||||
end,
|
||||
Fields
|
||||
).
|
||||
|
||||
add_default_fn(OrigFn, Default) ->
|
||||
fun
|
||||
(default) -> Default;
|
||||
(Field) -> OrigFn(Field)
|
||||
end.
|
||||
|
||||
server() ->
|
||||
Meta = #{desc => ?DESC("server")},
|
||||
emqx_schema:servers_sc(Meta, ?TD_HOST_OPTIONS).
|
||||
|
||||
%%========================================================================================
|
||||
%% `emqx_resource' API
|
||||
%%========================================================================================
|
||||
|
||||
callback_mode() -> always_sync.
|
||||
|
||||
is_buffer_supported() -> false.
|
||||
|
||||
on_start(
|
||||
InstanceId,
|
||||
#{
|
||||
server := Server,
|
||||
username := Username,
|
||||
password := Password,
|
||||
pool_size := PoolSize
|
||||
} = Config
|
||||
) ->
|
||||
?SLOG(info, #{
|
||||
msg => "starting_tdengine_connector",
|
||||
connector => InstanceId,
|
||||
config => emqx_misc:redact(Config)
|
||||
}),
|
||||
|
||||
{Host, Port} = emqx_schema:parse_server(Server, ?TD_HOST_OPTIONS),
|
||||
Options = [
|
||||
{host, to_bin(Host)},
|
||||
{port, Port},
|
||||
{username, Username},
|
||||
{password, Password},
|
||||
{pool_size, PoolSize},
|
||||
{pool, binary_to_atom(InstanceId, utf8)}
|
||||
],
|
||||
|
||||
Prepares = parse_prepare_sql(Config),
|
||||
State = maps:merge(Prepares, #{poolname => InstanceId, query_opts => query_opts(Config)}),
|
||||
case emqx_plugin_libs_pool:start_pool(InstanceId, ?MODULE, Options) of
|
||||
ok ->
|
||||
{ok, State};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
on_stop(InstanceId, #{poolname := PoolName} = _State) ->
|
||||
?SLOG(info, #{
|
||||
msg => "stopping_tdengine_connector",
|
||||
connector => InstanceId
|
||||
}),
|
||||
emqx_plugin_libs_pool:stop_pool(PoolName).
|
||||
|
||||
on_query(InstanceId, {query, SQL}, State) ->
|
||||
do_query(InstanceId, SQL, State);
|
||||
on_query(InstanceId, Request, State) ->
|
||||
%% because the `emqx-tdengine` client only supports a single SQL cmd
|
||||
%% so the `on_query` and `on_batch_query` have the same process, that is:
|
||||
%% we need to collect all data into one SQL cmd and then call the insert API
|
||||
on_batch_query(InstanceId, [Request], State).
|
||||
|
||||
on_batch_query(
|
||||
InstanceId,
|
||||
BatchReq,
|
||||
#{batch_inserts := Inserts, batch_params_tokens := ParamsTokens} = State
|
||||
) ->
|
||||
case hd(BatchReq) of
|
||||
{Key, _} ->
|
||||
case maps:get(Key, Inserts, undefined) of
|
||||
undefined ->
|
||||
{error, {unrecoverable_error, batch_prepare_not_implemented}};
|
||||
InsertSQL ->
|
||||
Tokens = maps:get(Key, ParamsTokens),
|
||||
do_batch_insert(InstanceId, BatchReq, InsertSQL, Tokens, State)
|
||||
end;
|
||||
Request ->
|
||||
LogMeta = #{connector => InstanceId, first_request => Request, state => State},
|
||||
?SLOG(error, LogMeta#{msg => "invalid request"}),
|
||||
{error, {unrecoverable_error, invalid_request}}
|
||||
end.
|
||||
|
||||
on_get_status(_InstanceId, #{poolname := Pool}) ->
|
||||
Health = emqx_plugin_libs_pool:health_check_ecpool_workers(Pool, fun ?MODULE:do_get_status/1),
|
||||
status_result(Health).
|
||||
|
||||
do_get_status(Conn) ->
|
||||
case tdengine:insert(Conn, "select server_version()", []) of
|
||||
{ok, _} -> true;
|
||||
_ -> false
|
||||
end.
|
||||
|
||||
status_result(_Status = true) -> connected;
|
||||
status_result(_Status = false) -> connecting.
|
||||
|
||||
%%========================================================================================
|
||||
%% Helper fns
|
||||
%%========================================================================================
|
||||
|
||||
do_batch_insert(InstanceId, BatchReqs, InsertPart, Tokens, State) ->
|
||||
SQL = emqx_plugin_libs_rule:proc_batch_sql(BatchReqs, InsertPart, Tokens),
|
||||
do_query(InstanceId, SQL, State).
|
||||
|
||||
do_query(InstanceId, Query, #{poolname := PoolName, query_opts := Opts} = State) ->
|
||||
?TRACE(
|
||||
"QUERY",
|
||||
"tdengine_connector_received",
|
||||
#{connector => InstanceId, query => Query, state => State}
|
||||
),
|
||||
Result = ecpool:pick_and_do(PoolName, {?MODULE, execute, [Query, Opts]}, no_handover),
|
||||
|
||||
case Result of
|
||||
{error, Reason} ->
|
||||
?tp(
|
||||
tdengine_connector_query_return,
|
||||
#{error => Reason}
|
||||
),
|
||||
?SLOG(error, #{
|
||||
msg => "tdengine_connector_do_query_failed",
|
||||
connector => InstanceId,
|
||||
query => Query,
|
||||
reason => Reason
|
||||
}),
|
||||
Result;
|
||||
_ ->
|
||||
?tp(
|
||||
tdengine_connector_query_return,
|
||||
#{result => Result}
|
||||
),
|
||||
Result
|
||||
end.
|
||||
|
||||
execute(Conn, Query, Opts) ->
|
||||
tdengine:insert(Conn, Query, Opts).
|
||||
|
||||
connect(Opts) ->
|
||||
tdengine:start_link(Opts).
|
||||
|
||||
query_opts(#{database := Database} = _Opts) ->
|
||||
[{db_name, Database}].
|
||||
|
||||
parse_prepare_sql(Config) ->
|
||||
SQL =
|
||||
case maps:get(sql, Config, undefined) of
|
||||
undefined -> #{};
|
||||
Template -> #{send_message => Template}
|
||||
end,
|
||||
|
||||
parse_batch_prepare_sql(maps:to_list(SQL), #{}, #{}).
|
||||
|
||||
parse_batch_prepare_sql([{Key, H} | T], BatchInserts, BatchTks) ->
|
||||
case emqx_plugin_libs_rule:detect_sql_type(H) of
|
||||
{ok, select} ->
|
||||
parse_batch_prepare_sql(T, BatchInserts, BatchTks);
|
||||
{ok, insert} ->
|
||||
case emqx_plugin_libs_rule:split_insert_sql(H) of
|
||||
{ok, {InsertSQL, Params}} ->
|
||||
ParamsTks = emqx_plugin_libs_rule:preproc_tmpl(Params),
|
||||
parse_batch_prepare_sql(
|
||||
T,
|
||||
BatchInserts#{Key => InsertSQL},
|
||||
BatchTks#{Key => ParamsTks}
|
||||
);
|
||||
{error, Reason} ->
|
||||
?SLOG(error, #{msg => "split sql failed", sql => H, reason => Reason}),
|
||||
parse_batch_prepare_sql(T, BatchInserts, BatchTks)
|
||||
end;
|
||||
{error, Reason} ->
|
||||
?SLOG(error, #{msg => "detect sql type failed", sql => H, reason => Reason}),
|
||||
parse_batch_prepare_sql(T, BatchInserts, BatchTks)
|
||||
end;
|
||||
parse_batch_prepare_sql([], BatchInserts, BatchTks) ->
|
||||
#{
|
||||
batch_inserts => BatchInserts,
|
||||
batch_params_tokens => BatchTks
|
||||
}.
|
||||
|
||||
to_bin(List) when is_list(List) ->
|
||||
unicode:characters_to_binary(List, utf8).
|
|
@ -153,6 +153,9 @@ for dep in ${CT_DEPS}; do
|
|||
NEED_ROOT=yes
|
||||
FILES+=( '.ci/docker-compose-file/docker-compose-kafka.yaml' )
|
||||
;;
|
||||
tdengine)
|
||||
FILES+=( '.ci/docker-compose-file/docker-compose-tdengine-restful.yaml' )
|
||||
;;
|
||||
*)
|
||||
echo "unknown_ct_dependency $dep"
|
||||
exit 1
|
||||
|
|
|
@ -265,3 +265,4 @@ GSSAPI
|
|||
keytab
|
||||
jq
|
||||
nif
|
||||
TDengine
|
||||
|
|
Loading…
Reference in New Issue