Merge pull request #11472 from thalesmg/sync-r52-master-20230817
sync `release-52` to `master`
This commit is contained in:
commit
908721afce
|
@ -10,6 +10,8 @@ CASSANDRA_TAG=3.11.6
|
||||||
MINIO_TAG=RELEASE.2023-03-20T20-16-18Z
|
MINIO_TAG=RELEASE.2023-03-20T20-16-18Z
|
||||||
OPENTS_TAG=9aa7f88
|
OPENTS_TAG=9aa7f88
|
||||||
KINESIS_TAG=2.1
|
KINESIS_TAG=2.1
|
||||||
|
HSTREAMDB_TAG=v0.15.0
|
||||||
|
HSTREAMDB_ZK_TAG=3.8.1
|
||||||
|
|
||||||
MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server
|
MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server
|
||||||
SQLSERVER_TAG=2019-CU19-ubuntu-20.04
|
SQLSERVER_TAG=2019-CU19-ubuntu-20.04
|
||||||
|
|
|
@ -2,11 +2,13 @@ version: "3.5"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
hserver:
|
hserver:
|
||||||
image: hstreamdb/hstream:v0.15.0
|
image: hstreamdb/hstream:${HSTREAMDB_TAG}
|
||||||
container_name: hstreamdb
|
container_name: hstreamdb
|
||||||
depends_on:
|
depends_on:
|
||||||
- zookeeper
|
zookeeper:
|
||||||
- hstore
|
condition: service_started
|
||||||
|
hstore:
|
||||||
|
condition: service_healthy
|
||||||
# ports:
|
# ports:
|
||||||
# - "127.0.0.1:6570:6570"
|
# - "127.0.0.1:6570:6570"
|
||||||
expose:
|
expose:
|
||||||
|
@ -37,7 +39,7 @@ services:
|
||||||
--io-tasks-network emqx_bridge
|
--io-tasks-network emqx_bridge
|
||||||
|
|
||||||
hstore:
|
hstore:
|
||||||
image: hstreamdb/hstream:v0.15.0
|
image: hstreamdb/hstream:${HSTREAMDB_TAG}
|
||||||
networks:
|
networks:
|
||||||
- emqx_bridge
|
- emqx_bridge
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -53,10 +55,17 @@ services:
|
||||||
--use-tcp --tcp-host $$(hostname -I | awk '{print $$1}') \
|
--use-tcp --tcp-host $$(hostname -I | awk '{print $$1}') \
|
||||||
--user-admin-port 6440 \
|
--user-admin-port 6440 \
|
||||||
--param enable-dscp-reflection=false \
|
--param enable-dscp-reflection=false \
|
||||||
--no-interactive
|
--no-interactive \
|
||||||
|
> /data/store/hstore.log 2>&1
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "grep", "LogDevice Cluster running", "/data/store/hstore.log"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 60
|
||||||
|
start_period: 60s
|
||||||
|
|
||||||
zookeeper:
|
zookeeper:
|
||||||
image: zookeeper
|
image: zookeeper:${HSTREAMDB_ZK_TAG}
|
||||||
expose:
|
expose:
|
||||||
- 2181
|
- 2181
|
||||||
networks:
|
networks:
|
||||||
|
|
2
Makefile
2
Makefile
|
@ -16,7 +16,7 @@ endif
|
||||||
# Dashboard version
|
# Dashboard version
|
||||||
# from https://github.com/emqx/emqx-dashboard5
|
# from https://github.com/emqx/emqx-dashboard5
|
||||||
export EMQX_DASHBOARD_VERSION ?= v1.3.2
|
export EMQX_DASHBOARD_VERSION ?= v1.3.2
|
||||||
export EMQX_EE_DASHBOARD_VERSION ?= e1.1.1
|
export EMQX_EE_DASHBOARD_VERSION ?= e1.2.0-beta.4
|
||||||
|
|
||||||
# `:=` should be used here, otherwise the `$(shell ...)` will be executed every time when the variable is used
|
# `:=` should be used here, otherwise the `$(shell ...)` will be executed every time when the variable is used
|
||||||
# In make 4.4+, for backward-compatibility the value from the original environment is used.
|
# In make 4.4+, for backward-compatibility the value from the original environment is used.
|
||||||
|
|
|
@ -35,7 +35,7 @@
|
||||||
-define(EMQX_RELEASE_CE, "5.1.5-build.3").
|
-define(EMQX_RELEASE_CE, "5.1.5-build.3").
|
||||||
|
|
||||||
%% Enterprise edition
|
%% Enterprise edition
|
||||||
-define(EMQX_RELEASE_EE, "5.2.0-alpha.1").
|
-define(EMQX_RELEASE_EE, "5.2.0-alpha.3").
|
||||||
|
|
||||||
%% The HTTP API version
|
%% The HTTP API version
|
||||||
-define(EMQX_API_VERSION, "5.0").
|
-define(EMQX_API_VERSION, "5.0").
|
||||||
|
|
|
@ -0,0 +1,242 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
%% @doc Topic index for matching topics to topic filters.
|
||||||
|
%%
|
||||||
|
%% Works on top of ETS ordered_set table. Keys are tuples constructed from
|
||||||
|
%% parsed topic filters and record IDs, wrapped in a tuple to order them
|
||||||
|
%% strictly greater than unit tuple (`{}`). Existing table may be used if
|
||||||
|
%% existing keys will not collide with index keys.
|
||||||
|
%%
|
||||||
|
%% Designed to effectively answer questions like:
|
||||||
|
%% 1. Does any topic filter match given topic?
|
||||||
|
%% 2. Which records are associated with topic filters matching given topic?
|
||||||
|
%% 3. Which topic filters match given topic?
|
||||||
|
%% 4. Which record IDs are associated with topic filters matching given topic?
|
||||||
|
|
||||||
|
-module(emqx_topic_index).
|
||||||
|
|
||||||
|
-export([new/0]).
|
||||||
|
-export([insert/4]).
|
||||||
|
-export([delete/3]).
|
||||||
|
-export([match/2]).
|
||||||
|
-export([matches/3]).
|
||||||
|
|
||||||
|
-export([get_id/1]).
|
||||||
|
-export([get_topic/1]).
|
||||||
|
-export([get_record/2]).
|
||||||
|
|
||||||
|
-type word() :: binary() | '+' | '#'.
|
||||||
|
-type key(ID) :: {[word()], {ID}}.
|
||||||
|
-type match(ID) :: key(ID).
|
||||||
|
|
||||||
|
%% @doc Create a new ETS table suitable for topic index.
|
||||||
|
%% Usable mostly for testing purposes.
|
||||||
|
-spec new() -> ets:table().
|
||||||
|
new() ->
|
||||||
|
ets:new(?MODULE, [public, ordered_set, {read_concurrency, true}]).
|
||||||
|
|
||||||
|
%% @doc Insert a new entry into the index that associates given topic filter to given
|
||||||
|
%% record ID, and attaches arbitrary record to the entry. This allows users to choose
|
||||||
|
%% between regular and "materialized" indexes, for example.
|
||||||
|
-spec insert(emqx_types:topic(), _ID, _Record, ets:table()) -> true.
|
||||||
|
insert(Filter, ID, Record, Tab) ->
|
||||||
|
ets:insert(Tab, {{words(Filter), {ID}}, Record}).
|
||||||
|
|
||||||
|
%% @doc Delete an entry from the index that associates given topic filter to given
|
||||||
|
%% record ID. Deleting non-existing entry is not an error.
|
||||||
|
-spec delete(emqx_types:topic(), _ID, ets:table()) -> true.
|
||||||
|
delete(Filter, ID, Tab) ->
|
||||||
|
ets:delete(Tab, {words(Filter), {ID}}).
|
||||||
|
|
||||||
|
%% @doc Match given topic against the index and return the first match, or `false` if
|
||||||
|
%% no match is found.
|
||||||
|
-spec match(emqx_types:topic(), ets:table()) -> match(_ID) | false.
|
||||||
|
match(Topic, Tab) ->
|
||||||
|
{Words, RPrefix} = match_init(Topic),
|
||||||
|
match(Words, RPrefix, Tab).
|
||||||
|
|
||||||
|
match(Words, RPrefix, Tab) ->
|
||||||
|
Prefix = lists:reverse(RPrefix),
|
||||||
|
match(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Tab).
|
||||||
|
|
||||||
|
match(K, Prefix, Words, RPrefix, Tab) ->
|
||||||
|
case match_next(Prefix, K, Words) of
|
||||||
|
true ->
|
||||||
|
K;
|
||||||
|
skip ->
|
||||||
|
match(ets:next(Tab, K), Prefix, Words, RPrefix, Tab);
|
||||||
|
stop ->
|
||||||
|
false;
|
||||||
|
Matched ->
|
||||||
|
match_rest(Matched, Words, RPrefix, Tab)
|
||||||
|
end.
|
||||||
|
|
||||||
|
match_rest([W1 | [W2 | _] = SLast], [W1 | [W2 | _] = Rest], RPrefix, Tab) ->
|
||||||
|
% NOTE
|
||||||
|
% Fast-forward through identical words in the topic and the last key suffixes.
|
||||||
|
% This should save us a few redundant `ets:next` calls at the cost of slightly
|
||||||
|
% more complex match patterns.
|
||||||
|
match_rest(SLast, Rest, [W1 | RPrefix], Tab);
|
||||||
|
match_rest(SLast, [W | Rest], RPrefix, Tab) when is_list(SLast) ->
|
||||||
|
match(Rest, [W | RPrefix], Tab);
|
||||||
|
match_rest(plus, [W | Rest], RPrefix, Tab) ->
|
||||||
|
% NOTE
|
||||||
|
% There's '+' in the key suffix, meaning we should consider 2 alternatives:
|
||||||
|
% 1. Match the rest of the topic as if there was '+' in the current position.
|
||||||
|
% 2. Skip this key and try to match the topic as it is.
|
||||||
|
case match(Rest, ['+' | RPrefix], Tab) of
|
||||||
|
Match = {_, _} ->
|
||||||
|
Match;
|
||||||
|
false ->
|
||||||
|
match(Rest, [W | RPrefix], Tab)
|
||||||
|
end;
|
||||||
|
match_rest(_, [], _RPrefix, _Tab) ->
|
||||||
|
false.
|
||||||
|
|
||||||
|
%% @doc Match given topic against the index and return _all_ matches.
|
||||||
|
%% If `unique` option is given, return only unique matches by record ID.
|
||||||
|
-spec matches(emqx_types:topic(), ets:table(), _Opts :: [unique]) -> [match(_ID)].
|
||||||
|
matches(Topic, Tab, Opts) ->
|
||||||
|
{Words, RPrefix} = match_init(Topic),
|
||||||
|
AccIn =
|
||||||
|
case Opts of
|
||||||
|
[unique | _] -> #{};
|
||||||
|
[] -> []
|
||||||
|
end,
|
||||||
|
Matches = matches(Words, RPrefix, AccIn, Tab),
|
||||||
|
case Matches of
|
||||||
|
#{} -> maps:values(Matches);
|
||||||
|
_ -> Matches
|
||||||
|
end.
|
||||||
|
|
||||||
|
matches(Words, RPrefix, Acc, Tab) ->
|
||||||
|
Prefix = lists:reverse(RPrefix),
|
||||||
|
matches(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Acc, Tab).
|
||||||
|
|
||||||
|
matches(Words, RPrefix, K = {Filter, _}, Acc, Tab) ->
|
||||||
|
Prefix = lists:reverse(RPrefix),
|
||||||
|
case Prefix > Filter of
|
||||||
|
true ->
|
||||||
|
% NOTE: Prefix already greater than the last key seen, need to `ets:next/2`.
|
||||||
|
matches(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Acc, Tab);
|
||||||
|
false ->
|
||||||
|
% NOTE: Prefix is still less than or equal to the last key seen, reuse it.
|
||||||
|
matches(K, Prefix, Words, RPrefix, Acc, Tab)
|
||||||
|
end.
|
||||||
|
|
||||||
|
matches(K, Prefix, Words, RPrefix, Acc, Tab) ->
|
||||||
|
case match_next(Prefix, K, Words) of
|
||||||
|
true ->
|
||||||
|
matches(ets:next(Tab, K), Prefix, Words, RPrefix, match_add(K, Acc), Tab);
|
||||||
|
skip ->
|
||||||
|
matches(ets:next(Tab, K), Prefix, Words, RPrefix, Acc, Tab);
|
||||||
|
stop ->
|
||||||
|
Acc;
|
||||||
|
Matched ->
|
||||||
|
% NOTE: Prserve next key on the stack to save on `ets:next/2` calls.
|
||||||
|
matches_rest(Matched, Words, RPrefix, K, Acc, Tab)
|
||||||
|
end.
|
||||||
|
|
||||||
|
matches_rest([W1 | [W2 | _] = SLast], [W1 | [W2 | _] = Rest], RPrefix, K, Acc, Tab) ->
|
||||||
|
% NOTE
|
||||||
|
% Fast-forward through identical words in the topic and the last key suffixes.
|
||||||
|
% This should save us a few redundant `ets:next` calls at the cost of slightly
|
||||||
|
% more complex match patterns.
|
||||||
|
matches_rest(SLast, Rest, [W1 | RPrefix], K, Acc, Tab);
|
||||||
|
matches_rest(SLast, [W | Rest], RPrefix, K, Acc, Tab) when is_list(SLast) ->
|
||||||
|
matches(Rest, [W | RPrefix], K, Acc, Tab);
|
||||||
|
matches_rest(plus, [W | Rest], RPrefix, K, Acc, Tab) ->
|
||||||
|
% NOTE
|
||||||
|
% There's '+' in the key suffix, meaning we should accumulate all matches from
|
||||||
|
% each of 2 branches:
|
||||||
|
% 1. Match the rest of the topic as if there was '+' in the current position.
|
||||||
|
% 2. Skip this key and try to match the topic as it is.
|
||||||
|
NAcc = matches(Rest, ['+' | RPrefix], K, Acc, Tab),
|
||||||
|
matches(Rest, [W | RPrefix], K, NAcc, Tab);
|
||||||
|
matches_rest(_, [], _RPrefix, _K, Acc, _Tab) ->
|
||||||
|
Acc.
|
||||||
|
|
||||||
|
match_add(K = {_Filter, ID}, Acc = #{}) ->
|
||||||
|
% NOTE: ensuring uniqueness by record ID
|
||||||
|
Acc#{ID => K};
|
||||||
|
match_add(K, Acc) ->
|
||||||
|
[K | Acc].
|
||||||
|
|
||||||
|
match_next(Prefix, {Filter, _ID}, Suffix) ->
|
||||||
|
match_filter(Prefix, Filter, Suffix);
|
||||||
|
match_next(_, '$end_of_table', _) ->
|
||||||
|
stop.
|
||||||
|
|
||||||
|
match_filter([], [], []) ->
|
||||||
|
% NOTE: we matched the topic exactly
|
||||||
|
true;
|
||||||
|
match_filter([], [], _Suffix) ->
|
||||||
|
% NOTE: we matched the prefix, but there may be more matches next
|
||||||
|
skip;
|
||||||
|
match_filter([], ['#'], _Suffix) ->
|
||||||
|
% NOTE: naturally, '#' < '+', so this is already optimal for `match/2`
|
||||||
|
true;
|
||||||
|
match_filter([], ['+' | _], _Suffix) ->
|
||||||
|
plus;
|
||||||
|
match_filter([], [_H | _] = Rest, _Suffix) ->
|
||||||
|
Rest;
|
||||||
|
match_filter([H | T1], [H | T2], Suffix) ->
|
||||||
|
match_filter(T1, T2, Suffix);
|
||||||
|
match_filter([H1 | _], [H2 | _], _Suffix) when H2 > H1 ->
|
||||||
|
% NOTE: we're strictly past the prefix, no need to continue
|
||||||
|
stop.
|
||||||
|
|
||||||
|
match_init(Topic) ->
|
||||||
|
case words(Topic) of
|
||||||
|
[W = <<"$", _/bytes>> | Rest] ->
|
||||||
|
% NOTE
|
||||||
|
% This will effectively skip attempts to match special topics to `#` or `+/...`.
|
||||||
|
{Rest, [W]};
|
||||||
|
Words ->
|
||||||
|
{Words, []}
|
||||||
|
end.
|
||||||
|
|
||||||
|
%% @doc Extract record ID from the match.
|
||||||
|
-spec get_id(match(ID)) -> ID.
|
||||||
|
get_id({_Filter, {ID}}) ->
|
||||||
|
ID.
|
||||||
|
|
||||||
|
%% @doc Extract topic (or topic filter) from the match.
|
||||||
|
-spec get_topic(match(_ID)) -> emqx_types:topic().
|
||||||
|
get_topic({Filter, _ID}) ->
|
||||||
|
emqx_topic:join(Filter).
|
||||||
|
|
||||||
|
%% @doc Fetch the record associated with the match.
|
||||||
|
%% NOTE: Only really useful for ETS tables where the record ID is the first element.
|
||||||
|
-spec get_record(match(_ID), ets:table()) -> _Record.
|
||||||
|
get_record(K, Tab) ->
|
||||||
|
ets:lookup_element(Tab, K, 2).
|
||||||
|
|
||||||
|
%%
|
||||||
|
|
||||||
|
-spec words(emqx_types:topic()) -> [word()].
|
||||||
|
words(Topic) when is_binary(Topic) ->
|
||||||
|
% NOTE
|
||||||
|
% This is almost identical to `emqx_topic:words/1`, but it doesn't convert empty
|
||||||
|
% tokens to ''. This is needed to keep ordering of words consistent with what
|
||||||
|
% `match_filter/3` expects.
|
||||||
|
[word(W) || W <- emqx_topic:tokens(Topic)].
|
||||||
|
|
||||||
|
-spec word(binary()) -> word().
|
||||||
|
word(<<"+">>) -> '+';
|
||||||
|
word(<<"#">>) -> '#';
|
||||||
|
word(Bin) -> Bin.
|
|
@ -0,0 +1,331 @@
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||||
|
%%
|
||||||
|
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
%% you may not use this file except in compliance with the License.
|
||||||
|
%% You may obtain a copy of the License at
|
||||||
|
%%
|
||||||
|
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
%%
|
||||||
|
%% Unless required by applicable law or agreed to in writing, software
|
||||||
|
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
%% See the License for the specific language governing permissions and
|
||||||
|
%% limitations under the License.
|
||||||
|
%%--------------------------------------------------------------------
|
||||||
|
|
||||||
|
-module(emqx_topic_index_SUITE).
|
||||||
|
|
||||||
|
-compile(export_all).
|
||||||
|
-compile(nowarn_export_all).
|
||||||
|
|
||||||
|
-include_lib("proper/include/proper.hrl").
|
||||||
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
|
|
||||||
|
-import(emqx_proper_types, [scaled/2]).
|
||||||
|
|
||||||
|
all() ->
|
||||||
|
emqx_common_test_helpers:all(?MODULE).
|
||||||
|
|
||||||
|
t_insert(_) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
true = emqx_topic_index:insert(<<"sensor/1/metric/2">>, t_insert_1, <<>>, Tab),
|
||||||
|
true = emqx_topic_index:insert(<<"sensor/+/#">>, t_insert_2, <<>>, Tab),
|
||||||
|
true = emqx_topic_index:insert(<<"sensor/#">>, t_insert_3, <<>>, Tab),
|
||||||
|
?assertEqual(<<"sensor/#">>, topic(match(<<"sensor">>, Tab))),
|
||||||
|
?assertEqual(t_insert_3, id(match(<<"sensor">>, Tab))).
|
||||||
|
|
||||||
|
t_match(_) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
true = emqx_topic_index:insert(<<"sensor/1/metric/2">>, t_match_1, <<>>, Tab),
|
||||||
|
true = emqx_topic_index:insert(<<"sensor/+/#">>, t_match_2, <<>>, Tab),
|
||||||
|
true = emqx_topic_index:insert(<<"sensor/#">>, t_match_3, <<>>, Tab),
|
||||||
|
?assertMatch(
|
||||||
|
[<<"sensor/#">>, <<"sensor/+/#">>],
|
||||||
|
[topic(M) || M <- matches(<<"sensor/1">>, Tab)]
|
||||||
|
).
|
||||||
|
|
||||||
|
t_match2(_) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
true = emqx_topic_index:insert(<<"#">>, t_match2_1, <<>>, Tab),
|
||||||
|
true = emqx_topic_index:insert(<<"+/#">>, t_match2_2, <<>>, Tab),
|
||||||
|
true = emqx_topic_index:insert(<<"+/+/#">>, t_match2_3, <<>>, Tab),
|
||||||
|
?assertEqual(
|
||||||
|
[<<"#">>, <<"+/#">>, <<"+/+/#">>],
|
||||||
|
[topic(M) || M <- matches(<<"a/b/c">>, Tab)]
|
||||||
|
),
|
||||||
|
?assertEqual(
|
||||||
|
false,
|
||||||
|
emqx_topic_index:match(<<"$SYS/broker/zenmq">>, Tab)
|
||||||
|
).
|
||||||
|
|
||||||
|
t_match3(_) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
Records = [
|
||||||
|
{<<"d/#">>, t_match3_1},
|
||||||
|
{<<"a/b/+">>, t_match3_2},
|
||||||
|
{<<"a/#">>, t_match3_3},
|
||||||
|
{<<"#">>, t_match3_4},
|
||||||
|
{<<"$SYS/#">>, t_match3_sys}
|
||||||
|
],
|
||||||
|
lists:foreach(
|
||||||
|
fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end,
|
||||||
|
Records
|
||||||
|
),
|
||||||
|
Matched = matches(<<"a/b/c">>, Tab),
|
||||||
|
case length(Matched) of
|
||||||
|
3 -> ok;
|
||||||
|
_ -> error({unexpected, Matched})
|
||||||
|
end,
|
||||||
|
?assertEqual(
|
||||||
|
t_match3_sys,
|
||||||
|
id(match(<<"$SYS/a/b/c">>, Tab))
|
||||||
|
).
|
||||||
|
|
||||||
|
t_match4(_) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
Records = [{<<"/#">>, t_match4_1}, {<<"/+">>, t_match4_2}, {<<"/+/a/b/c">>, t_match4_3}],
|
||||||
|
lists:foreach(
|
||||||
|
fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end,
|
||||||
|
Records
|
||||||
|
),
|
||||||
|
?assertEqual(
|
||||||
|
[<<"/#">>, <<"/+">>],
|
||||||
|
[topic(M) || M <- matches(<<"/">>, Tab)]
|
||||||
|
),
|
||||||
|
?assertEqual(
|
||||||
|
[<<"/#">>, <<"/+/a/b/c">>],
|
||||||
|
[topic(M) || M <- matches(<<"/0/a/b/c">>, Tab)]
|
||||||
|
).
|
||||||
|
|
||||||
|
t_match5(_) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>,
|
||||||
|
Records = [
|
||||||
|
{<<"#">>, t_match5_1},
|
||||||
|
{<<T/binary, "/#">>, t_match5_2},
|
||||||
|
{<<T/binary, "/+">>, t_match5_3}
|
||||||
|
],
|
||||||
|
lists:foreach(
|
||||||
|
fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end,
|
||||||
|
Records
|
||||||
|
),
|
||||||
|
?assertEqual(
|
||||||
|
[<<"#">>, <<T/binary, "/#">>],
|
||||||
|
[topic(M) || M <- matches(T, Tab)]
|
||||||
|
),
|
||||||
|
?assertEqual(
|
||||||
|
[<<"#">>, <<T/binary, "/#">>, <<T/binary, "/+">>],
|
||||||
|
[topic(M) || M <- matches(<<T/binary, "/1">>, Tab)]
|
||||||
|
).
|
||||||
|
|
||||||
|
t_match6(_) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>,
|
||||||
|
W = <<"+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/#">>,
|
||||||
|
emqx_topic_index:insert(W, ID = t_match6, <<>>, Tab),
|
||||||
|
?assertEqual(ID, id(match(T, Tab))).
|
||||||
|
|
||||||
|
t_match7(_) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>,
|
||||||
|
W = <<"a/+/c/+/e/+/g/+/i/+/k/+/m/+/o/+/q/+/s/+/u/+/w/+/y/+/#">>,
|
||||||
|
emqx_topic_index:insert(W, t_match7, <<>>, Tab),
|
||||||
|
?assertEqual(W, topic(match(T, Tab))).
|
||||||
|
|
||||||
|
t_match_fast_forward(_) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
emqx_topic_index:insert(<<"a/b/1/2/3/4/5/6/7/8/9/#">>, id1, <<>>, Tab),
|
||||||
|
emqx_topic_index:insert(<<"z/y/x/+/+">>, id2, <<>>, Tab),
|
||||||
|
emqx_topic_index:insert(<<"a/b/c/+">>, id3, <<>>, Tab),
|
||||||
|
% dbg:tracer(),
|
||||||
|
% dbg:p(all, c),
|
||||||
|
% dbg:tpl({ets, next, '_'}, x),
|
||||||
|
?assertEqual(id1, id(match(<<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab))),
|
||||||
|
?assertEqual([id1], [id(M) || M <- matches(<<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab)]).
|
||||||
|
|
||||||
|
t_match_unique(_) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
emqx_topic_index:insert(<<"a/b/c">>, t_match_id1, <<>>, Tab),
|
||||||
|
emqx_topic_index:insert(<<"a/b/+">>, t_match_id1, <<>>, Tab),
|
||||||
|
emqx_topic_index:insert(<<"a/b/c/+">>, t_match_id2, <<>>, Tab),
|
||||||
|
?assertEqual(
|
||||||
|
[t_match_id1, t_match_id1],
|
||||||
|
[id(M) || M <- emqx_topic_index:matches(<<"a/b/c">>, Tab, [])]
|
||||||
|
),
|
||||||
|
?assertEqual(
|
||||||
|
[t_match_id1],
|
||||||
|
[id(M) || M <- emqx_topic_index:matches(<<"a/b/c">>, Tab, [unique])]
|
||||||
|
).
|
||||||
|
|
||||||
|
t_match_wildcard_edge_cases(_) ->
|
||||||
|
CommonTopics = [
|
||||||
|
<<"a/b">>,
|
||||||
|
<<"a/b/#">>,
|
||||||
|
<<"a/b/#">>,
|
||||||
|
<<"a/b/c">>,
|
||||||
|
<<"a/b/+">>,
|
||||||
|
<<"a/b/d">>,
|
||||||
|
<<"a/+/+">>,
|
||||||
|
<<"a/+/#">>
|
||||||
|
],
|
||||||
|
Datasets =
|
||||||
|
[
|
||||||
|
%% Topics, TopicName, Results
|
||||||
|
{CommonTopics, <<"a/b/c">>, [2, 3, 4, 5, 7, 8]},
|
||||||
|
{CommonTopics, <<"a/b">>, [1, 2, 3, 8]},
|
||||||
|
{[<<"+/b/c">>, <<"/">>], <<"a/b/c">>, [1]},
|
||||||
|
{[<<"#">>, <<"/">>], <<"a">>, [1]},
|
||||||
|
{[<<"/">>, <<"+">>], <<"a">>, [2]}
|
||||||
|
],
|
||||||
|
F = fun({Topics, TopicName, Expected}) ->
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
_ = [emqx_topic_index:insert(T, N, <<>>, Tab) || {N, T} <- lists:enumerate(Topics)],
|
||||||
|
?assertEqual(
|
||||||
|
lists:last(Expected),
|
||||||
|
id(emqx_topic_index:match(TopicName, Tab)),
|
||||||
|
#{"Base topics" => Topics, "Topic name" => TopicName}
|
||||||
|
),
|
||||||
|
?assertEqual(
|
||||||
|
Expected,
|
||||||
|
[id(M) || M <- emqx_topic_index:matches(TopicName, Tab, [unique])],
|
||||||
|
#{"Base topics" => Topics, "Topic name" => TopicName}
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
lists:foreach(F, Datasets).
|
||||||
|
|
||||||
|
t_prop_matches(_) ->
|
||||||
|
?assert(
|
||||||
|
proper:quickcheck(
|
||||||
|
topic_matches_prop(),
|
||||||
|
[{max_size, 100}, {numtests, 100}]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
Statistics = [{C, account(C)} || C <- [filters, topics, matches, maxhits]],
|
||||||
|
ct:pal("Statistics: ~p", [maps:from_list(Statistics)]).
|
||||||
|
|
||||||
|
topic_matches_prop() ->
|
||||||
|
?FORALL(
|
||||||
|
% Generate a longer list of topics and a shorter list of topic filter patterns.
|
||||||
|
#{
|
||||||
|
topics := TTopics,
|
||||||
|
patterns := Pats
|
||||||
|
},
|
||||||
|
emqx_proper_types:fixedmap(#{
|
||||||
|
% NOTE
|
||||||
|
% Beware adding non-empty contraint, proper will have a hard time with `topic_t/1`
|
||||||
|
% for some reason.
|
||||||
|
topics => scaled(4, list(topic_t([1, 2, 3, 4]))),
|
||||||
|
patterns => list(topic_filter_pattern_t())
|
||||||
|
}),
|
||||||
|
begin
|
||||||
|
Tab = emqx_topic_index:new(),
|
||||||
|
Topics = [emqx_topic:join(T) || T <- TTopics],
|
||||||
|
% Produce topic filters from generated topics and patterns.
|
||||||
|
% Number of filters is equal to the number of patterns, most of the time.
|
||||||
|
Filters = lists:enumerate(mk_filters(Pats, TTopics)),
|
||||||
|
_ = [emqx_topic_index:insert(F, N, <<>>, Tab) || {N, F} <- Filters],
|
||||||
|
% Gather some basic statistics
|
||||||
|
_ = account(filters, length(Filters)),
|
||||||
|
_ = account(topics, NTopics = length(Topics)),
|
||||||
|
_ = account(maxhits, NTopics * NTopics),
|
||||||
|
% Verify that matching each topic against index returns the same results as
|
||||||
|
% matching it against the list of filters one by one.
|
||||||
|
lists:all(
|
||||||
|
fun(Topic) ->
|
||||||
|
Ids1 = [id(M) || M <- emqx_topic_index:matches(Topic, Tab, [unique])],
|
||||||
|
Ids2 = lists:filtermap(
|
||||||
|
fun({N, F}) ->
|
||||||
|
case emqx_topic:match(Topic, F) of
|
||||||
|
true -> {true, N};
|
||||||
|
false -> false
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
Filters
|
||||||
|
),
|
||||||
|
% Account a number of matches to compute hitrate later
|
||||||
|
_ = account(matches, length(Ids1)),
|
||||||
|
case (Ids2 -- Ids1) ++ (Ids2 -- Ids1) of
|
||||||
|
[] ->
|
||||||
|
true;
|
||||||
|
[_ | _] = _Differences ->
|
||||||
|
ct:pal(
|
||||||
|
"Topic name: ~p~n"
|
||||||
|
"Index results: ~p~n"
|
||||||
|
"Topic match results:: ~p~n",
|
||||||
|
[Topic, Ids1, Ids2]
|
||||||
|
),
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
Topics
|
||||||
|
)
|
||||||
|
end
|
||||||
|
).
|
||||||
|
|
||||||
|
mk_filters([Pat | PRest], [Topic | TRest]) ->
|
||||||
|
[emqx_topic:join(mk_topic_filter(Pat, Topic)) | mk_filters(PRest, TRest)];
|
||||||
|
mk_filters(_, _) ->
|
||||||
|
[].
|
||||||
|
|
||||||
|
account(Counter, N) ->
|
||||||
|
put({?MODULE, Counter}, account(Counter) + N).
|
||||||
|
|
||||||
|
account(Counter) ->
|
||||||
|
emqx_maybe:define(get({?MODULE, Counter}), 0).
|
||||||
|
|
||||||
|
%%
|
||||||
|
|
||||||
|
match(T, Tab) ->
|
||||||
|
emqx_topic_index:match(T, Tab).
|
||||||
|
|
||||||
|
matches(T, Tab) ->
|
||||||
|
lists:sort(emqx_topic_index:matches(T, Tab, [])).
|
||||||
|
|
||||||
|
id(Match) ->
|
||||||
|
emqx_topic_index:get_id(Match).
|
||||||
|
|
||||||
|
topic(Match) ->
|
||||||
|
emqx_topic_index:get_topic(Match).
|
||||||
|
|
||||||
|
%%
|
||||||
|
|
||||||
|
topic_t(EntropyWeights) ->
|
||||||
|
EWLast = lists:last(EntropyWeights),
|
||||||
|
?LET(L, scaled(1 / 4, list(EWLast)), begin
|
||||||
|
EWs = lists:sublist(EntropyWeights ++ L, length(L)),
|
||||||
|
?SIZED(S, [oneof([topic_level_t(S * EW), topic_level_fixed_t()]) || EW <- EWs])
|
||||||
|
end).
|
||||||
|
|
||||||
|
topic_level_t(Entropy) ->
|
||||||
|
S = floor(1 + math:log2(Entropy) / 4),
|
||||||
|
?LET(I, range(1, Entropy), iolist_to_binary(io_lib:format("~*.16.0B", [S, I]))).
|
||||||
|
|
||||||
|
topic_level_fixed_t() ->
|
||||||
|
oneof([
|
||||||
|
<<"foo">>,
|
||||||
|
<<"bar">>,
|
||||||
|
<<"baz">>,
|
||||||
|
<<"xyzzy">>
|
||||||
|
]).
|
||||||
|
|
||||||
|
topic_filter_pattern_t() ->
|
||||||
|
list(topic_level_pattern_t()).
|
||||||
|
|
||||||
|
topic_level_pattern_t() ->
|
||||||
|
frequency([
|
||||||
|
{5, level},
|
||||||
|
{2, '+'},
|
||||||
|
{1, '#'}
|
||||||
|
]).
|
||||||
|
|
||||||
|
mk_topic_filter([], _) ->
|
||||||
|
[];
|
||||||
|
mk_topic_filter(_, []) ->
|
||||||
|
[];
|
||||||
|
mk_topic_filter(['#' | _], _) ->
|
||||||
|
['#'];
|
||||||
|
mk_topic_filter(['+' | Rest], [_ | Levels]) ->
|
||||||
|
['+' | mk_topic_filter(Rest, Levels)];
|
||||||
|
mk_topic_filter([level | Rest], [L | Levels]) ->
|
||||||
|
[L | mk_topic_filter(Rest, Levels)].
|
|
@ -205,7 +205,7 @@ get_topic(Topic, ConnectorState) ->
|
||||||
Path = <<"/v1/projects/", ProjectId/binary, "/topics/", Topic/binary>>,
|
Path = <<"/v1/projects/", ProjectId/binary, "/topics/", Topic/binary>>,
|
||||||
Body = <<>>,
|
Body = <<>>,
|
||||||
PreparedRequest = {prepared_request, {Method, Path, Body}},
|
PreparedRequest = {prepared_request, {Method, Path, Body}},
|
||||||
query_sync(PreparedRequest, ConnectorState).
|
?MODULE:query_sync(PreparedRequest, ConnectorState).
|
||||||
|
|
||||||
%%-------------------------------------------------------------------------------------------------
|
%%-------------------------------------------------------------------------------------------------
|
||||||
%% Helper fns
|
%% Helper fns
|
||||||
|
|
|
@ -217,7 +217,9 @@ handle_continue(?ensure_subscription, State0) ->
|
||||||
{noreply, State0, {continue, ?ensure_subscription}};
|
{noreply, State0, {continue, ?ensure_subscription}};
|
||||||
not_found ->
|
not_found ->
|
||||||
%% there's nothing much to do if the topic suddenly doesn't exist anymore.
|
%% there's nothing much to do if the topic suddenly doesn't exist anymore.
|
||||||
{stop, {error, topic_not_found}, State0}
|
{stop, {error, topic_not_found}, State0};
|
||||||
|
permission_denied ->
|
||||||
|
{stop, {error, permission_denied}, State0}
|
||||||
end;
|
end;
|
||||||
handle_continue(?patch_subscription, State0) ->
|
handle_continue(?patch_subscription, State0) ->
|
||||||
?tp(gcp_pubsub_consumer_worker_patch_subscription_enter, #{}),
|
?tp(gcp_pubsub_consumer_worker_patch_subscription_enter, #{}),
|
||||||
|
@ -291,14 +293,17 @@ handle_info(Msg, State0) ->
|
||||||
}),
|
}),
|
||||||
{noreply, State0}.
|
{noreply, State0}.
|
||||||
|
|
||||||
terminate({error, topic_not_found} = _Reason, State) ->
|
terminate({error, Reason}, State) when
|
||||||
|
Reason =:= topic_not_found;
|
||||||
|
Reason =:= permission_denied
|
||||||
|
->
|
||||||
#{
|
#{
|
||||||
instance_id := InstanceId,
|
instance_id := InstanceId,
|
||||||
topic := _Topic
|
topic := _Topic
|
||||||
} = State,
|
} = State,
|
||||||
optvar:unset(?OPTVAR_SUB_OK(self())),
|
optvar:unset(?OPTVAR_SUB_OK(self())),
|
||||||
emqx_bridge_gcp_pubsub_impl_consumer:mark_topic_as_nonexistent(InstanceId),
|
emqx_bridge_gcp_pubsub_impl_consumer:mark_as_unhealthy(InstanceId, Reason),
|
||||||
?tp(gcp_pubsub_consumer_worker_terminate, #{reason => _Reason, topic => _Topic}),
|
?tp(gcp_pubsub_consumer_worker_terminate, #{reason => {error, Reason}, topic => _Topic}),
|
||||||
ok;
|
ok;
|
||||||
terminate(_Reason, _State) ->
|
terminate(_Reason, _State) ->
|
||||||
optvar:unset(?OPTVAR_SUB_OK(self())),
|
optvar:unset(?OPTVAR_SUB_OK(self())),
|
||||||
|
@ -329,7 +334,8 @@ ensure_pull_timer(State = #{pull_timer := TRef}) when is_reference(TRef) ->
|
||||||
ensure_pull_timer(State = #{pull_retry_interval := PullRetryInterval}) ->
|
ensure_pull_timer(State = #{pull_retry_interval := PullRetryInterval}) ->
|
||||||
State#{pull_timer := emqx_utils:start_timer(PullRetryInterval, pull)}.
|
State#{pull_timer := emqx_utils:start_timer(PullRetryInterval, pull)}.
|
||||||
|
|
||||||
-spec ensure_subscription_exists(state()) -> continue | retry | not_found | already_exists.
|
-spec ensure_subscription_exists(state()) ->
|
||||||
|
continue | retry | not_found | permission_denied | already_exists.
|
||||||
ensure_subscription_exists(State) ->
|
ensure_subscription_exists(State) ->
|
||||||
?tp(gcp_pubsub_consumer_worker_create_subscription_enter, #{}),
|
?tp(gcp_pubsub_consumer_worker_create_subscription_enter, #{}),
|
||||||
#{
|
#{
|
||||||
|
@ -367,6 +373,17 @@ ensure_subscription_exists(State) ->
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
not_found;
|
not_found;
|
||||||
|
{error, #{status_code := 403}} ->
|
||||||
|
%% permission denied
|
||||||
|
?tp(
|
||||||
|
warning,
|
||||||
|
"gcp_pubsub_consumer_worker_permission_denied",
|
||||||
|
#{
|
||||||
|
instance_id => InstanceId,
|
||||||
|
topic => Topic
|
||||||
|
}
|
||||||
|
),
|
||||||
|
permission_denied;
|
||||||
{ok, #{status_code := 200}} ->
|
{ok, #{status_code := 200}} ->
|
||||||
?tp(
|
?tp(
|
||||||
debug,
|
debug,
|
||||||
|
|
|
@ -17,9 +17,9 @@
|
||||||
|
|
||||||
%% health check API
|
%% health check API
|
||||||
-export([
|
-export([
|
||||||
mark_topic_as_nonexistent/1,
|
mark_as_unhealthy/2,
|
||||||
unset_nonexistent_topic/1,
|
clear_unhealthy/1,
|
||||||
is_nonexistent_topic/1
|
check_if_unhealthy/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
-include_lib("emqx/include/logger.hrl").
|
-include_lib("emqx/include/logger.hrl").
|
||||||
|
@ -47,11 +47,15 @@
|
||||||
|
|
||||||
-define(AUTO_RECONNECT_S, 2).
|
-define(AUTO_RECONNECT_S, 2).
|
||||||
-define(DEFAULT_FORGET_INTERVAL, timer:seconds(60)).
|
-define(DEFAULT_FORGET_INTERVAL, timer:seconds(60)).
|
||||||
-define(OPTVAR_TOPIC_NOT_FOUND(INSTANCE_ID), {?MODULE, topic_not_found, INSTANCE_ID}).
|
-define(OPTVAR_UNHEALTHY(INSTANCE_ID), {?MODULE, topic_not_found, INSTANCE_ID}).
|
||||||
-define(TOPIC_MESSAGE,
|
-define(TOPIC_MESSAGE,
|
||||||
"GCP PubSub topics are invalid. Please check the logs, check if the "
|
"GCP PubSub topics are invalid. Please check the logs, check if the "
|
||||||
"topics exist in GCP and if the service account has permissions to use them."
|
"topics exist in GCP and if the service account has permissions to use them."
|
||||||
).
|
).
|
||||||
|
-define(PERMISSION_MESSAGE,
|
||||||
|
"Permission denied while verifying topic existence. Please check that the "
|
||||||
|
"provided service account has the correct permissions configured."
|
||||||
|
).
|
||||||
|
|
||||||
%%-------------------------------------------------------------------------------------------------
|
%%-------------------------------------------------------------------------------------------------
|
||||||
%% `emqx_resource' API
|
%% `emqx_resource' API
|
||||||
|
@ -77,7 +81,7 @@ on_start(InstanceId, Config0) ->
|
||||||
-spec on_stop(resource_id(), state()) -> ok | {error, term()}.
|
-spec on_stop(resource_id(), state()) -> ok | {error, term()}.
|
||||||
on_stop(InstanceId, _State) ->
|
on_stop(InstanceId, _State) ->
|
||||||
?tp(gcp_pubsub_consumer_stop_enter, #{}),
|
?tp(gcp_pubsub_consumer_stop_enter, #{}),
|
||||||
unset_nonexistent_topic(InstanceId),
|
clear_unhealthy(InstanceId),
|
||||||
ok = stop_consumers(InstanceId),
|
ok = stop_consumers(InstanceId),
|
||||||
emqx_bridge_gcp_pubsub_client:stop(InstanceId).
|
emqx_bridge_gcp_pubsub_client:stop(InstanceId).
|
||||||
|
|
||||||
|
@ -85,10 +89,12 @@ on_stop(InstanceId, _State) ->
|
||||||
on_get_status(InstanceId, State) ->
|
on_get_status(InstanceId, State) ->
|
||||||
%% We need to check this flag separately because the workers might be gone when we
|
%% We need to check this flag separately because the workers might be gone when we
|
||||||
%% check them.
|
%% check them.
|
||||||
case is_nonexistent_topic(InstanceId) of
|
case check_if_unhealthy(InstanceId) of
|
||||||
true ->
|
{error, topic_not_found} ->
|
||||||
{disconnected, State, {unhealthy_target, ?TOPIC_MESSAGE}};
|
{disconnected, State, {unhealthy_target, ?TOPIC_MESSAGE}};
|
||||||
false ->
|
{error, permission_denied} ->
|
||||||
|
{disconnected, State, {unhealthy_target, ?PERMISSION_MESSAGE}};
|
||||||
|
ok ->
|
||||||
#{client := Client} = State,
|
#{client := Client} = State,
|
||||||
check_workers(InstanceId, Client)
|
check_workers(InstanceId, Client)
|
||||||
end.
|
end.
|
||||||
|
@ -97,24 +103,24 @@ on_get_status(InstanceId, State) ->
|
||||||
%% Health check API (signalled by consumer worker)
|
%% Health check API (signalled by consumer worker)
|
||||||
%%-------------------------------------------------------------------------------------------------
|
%%-------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
-spec mark_topic_as_nonexistent(resource_id()) -> ok.
|
-spec mark_as_unhealthy(resource_id(), topic_not_found | permission_denied) -> ok.
|
||||||
mark_topic_as_nonexistent(InstanceId) ->
|
mark_as_unhealthy(InstanceId, Reason) ->
|
||||||
optvar:set(?OPTVAR_TOPIC_NOT_FOUND(InstanceId), true),
|
optvar:set(?OPTVAR_UNHEALTHY(InstanceId), Reason),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
-spec unset_nonexistent_topic(resource_id()) -> ok.
|
-spec clear_unhealthy(resource_id()) -> ok.
|
||||||
unset_nonexistent_topic(InstanceId) ->
|
clear_unhealthy(InstanceId) ->
|
||||||
optvar:unset(?OPTVAR_TOPIC_NOT_FOUND(InstanceId)),
|
optvar:unset(?OPTVAR_UNHEALTHY(InstanceId)),
|
||||||
?tp(gcp_pubsub_consumer_unset_nonexistent_topic, #{}),
|
?tp(gcp_pubsub_consumer_clear_unhealthy, #{}),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
-spec is_nonexistent_topic(resource_id()) -> boolean().
|
-spec check_if_unhealthy(resource_id()) -> ok | {error, topic_not_found | permission_denied}.
|
||||||
is_nonexistent_topic(InstanceId) ->
|
check_if_unhealthy(InstanceId) ->
|
||||||
case optvar:peek(?OPTVAR_TOPIC_NOT_FOUND(InstanceId)) of
|
case optvar:peek(?OPTVAR_UNHEALTHY(InstanceId)) of
|
||||||
{ok, true} ->
|
{ok, Reason} ->
|
||||||
true;
|
{error, Reason};
|
||||||
_ ->
|
undefined ->
|
||||||
false
|
ok
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%%-------------------------------------------------------------------------------------------------
|
%%-------------------------------------------------------------------------------------------------
|
||||||
|
@ -153,6 +159,11 @@ start_consumers(InstanceId, Client, Config) ->
|
||||||
throw(
|
throw(
|
||||||
{unhealthy_target, ?TOPIC_MESSAGE}
|
{unhealthy_target, ?TOPIC_MESSAGE}
|
||||||
);
|
);
|
||||||
|
{error, permission_denied} ->
|
||||||
|
_ = emqx_bridge_gcp_pubsub_client:stop(InstanceId),
|
||||||
|
throw(
|
||||||
|
{unhealthy_target, ?PERMISSION_MESSAGE}
|
||||||
|
);
|
||||||
{error, _} ->
|
{error, _} ->
|
||||||
%% connection might be down; we'll have to check topic existence during health
|
%% connection might be down; we'll have to check topic existence during health
|
||||||
%% check, or the workers will kill themselves when they realized there's no
|
%% check, or the workers will kill themselves when they realized there's no
|
||||||
|
@ -229,6 +240,8 @@ check_for_topic_existence(Topic, Client) ->
|
||||||
ok;
|
ok;
|
||||||
{error, #{status_code := 404}} ->
|
{error, #{status_code := 404}} ->
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
|
{error, #{status_code := 403}} ->
|
||||||
|
{error, permission_denied};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
?tp(warning, "gcp_pubsub_consumer_check_topic_error", #{reason => Reason}),
|
?tp(warning, "gcp_pubsub_consumer_check_topic_error", #{reason => Reason}),
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
|
|
|
@ -760,6 +760,64 @@ prop_acked_ids_eventually_forgotten(Trace) ->
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
permission_denied_response() ->
|
||||||
|
Link =
|
||||||
|
<<"https://console.developers.google.com/project/9999/apiui/credential">>,
|
||||||
|
{error, #{
|
||||||
|
status_code => 403,
|
||||||
|
headers =>
|
||||||
|
[
|
||||||
|
{<<"vary">>, <<"X-Origin">>},
|
||||||
|
{<<"vary">>, <<"Referer">>},
|
||||||
|
{<<"content-type">>, <<"application/json; charset=UTF-8">>},
|
||||||
|
{<<"date">>, <<"Tue, 15 Aug 2023 13:59:09 GMT">>},
|
||||||
|
{<<"server">>, <<"ESF">>},
|
||||||
|
{<<"cache-control">>, <<"private">>},
|
||||||
|
{<<"x-xss-protection">>, <<"0">>},
|
||||||
|
{<<"x-frame-options">>, <<"SAMEORIGIN">>},
|
||||||
|
{<<"x-content-type-options">>, <<"nosniff">>},
|
||||||
|
{<<"alt-svc">>, <<"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000">>},
|
||||||
|
{<<"accept-ranges">>, <<"none">>},
|
||||||
|
{<<"vary">>, <<"Origin,Accept-Encoding">>},
|
||||||
|
{<<"transfer-encoding">>, <<"chunked">>}
|
||||||
|
],
|
||||||
|
body => emqx_utils_json:encode(
|
||||||
|
#{
|
||||||
|
<<"error">> =>
|
||||||
|
#{
|
||||||
|
<<"code">> => 403,
|
||||||
|
<<"details">> =>
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
<<"@type">> => <<"type.googleapis.com/google.rpc.Help">>,
|
||||||
|
<<"links">> =>
|
||||||
|
[
|
||||||
|
#{
|
||||||
|
<<"description">> =>
|
||||||
|
<<"Google developer console API key">>,
|
||||||
|
<<"url">> =>
|
||||||
|
Link
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
#{
|
||||||
|
<<"@type">> => <<"type.googleapis.com/google.rpc.ErrorInfo">>,
|
||||||
|
<<"domain">> => <<"googleapis.com">>,
|
||||||
|
<<"metadata">> =>
|
||||||
|
#{
|
||||||
|
<<"consumer">> => <<"projects/9999">>,
|
||||||
|
<<"service">> => <<"pubsub.googleapis.com">>
|
||||||
|
},
|
||||||
|
<<"reason">> => <<"CONSUMER_INVALID">>
|
||||||
|
}
|
||||||
|
],
|
||||||
|
<<"message">> => <<"Project #9999 has been deleted.">>,
|
||||||
|
<<"status">> => <<"PERMISSION_DENIED">>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}}.
|
||||||
|
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
%% Testcases
|
%% Testcases
|
||||||
%%------------------------------------------------------------------------------
|
%%------------------------------------------------------------------------------
|
||||||
|
@ -785,7 +843,7 @@ t_start_stop(Config) ->
|
||||||
prop_client_stopped(),
|
prop_client_stopped(),
|
||||||
prop_workers_stopped(PubSubTopic),
|
prop_workers_stopped(PubSubTopic),
|
||||||
fun(Trace) ->
|
fun(Trace) ->
|
||||||
?assertMatch([_], ?of_kind(gcp_pubsub_consumer_unset_nonexistent_topic, Trace)),
|
?assertMatch([_], ?of_kind(gcp_pubsub_consumer_clear_unhealthy, Trace)),
|
||||||
ok
|
ok
|
||||||
end
|
end
|
||||||
]
|
]
|
||||||
|
@ -1992,6 +2050,81 @@ t_get_subscription(Config) ->
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_permission_denied_topic_check(Config) ->
|
||||||
|
[#{pubsub_topic := PubSubTopic}] = ?config(topic_mapping, Config),
|
||||||
|
ResourceId = resource_id(Config),
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
%% the emulator does not check any credentials
|
||||||
|
emqx_common_test_helpers:with_mock(
|
||||||
|
emqx_bridge_gcp_pubsub_client,
|
||||||
|
query_sync,
|
||||||
|
fun(PreparedRequest = {prepared_request, {Method, Path, _Body}}, Client) ->
|
||||||
|
RE = iolist_to_binary(["/topics/", PubSubTopic, "$"]),
|
||||||
|
case {Method =:= get, re:run(Path, RE)} of
|
||||||
|
{true, {match, _}} ->
|
||||||
|
permission_denied_response();
|
||||||
|
_ ->
|
||||||
|
meck:passthrough([PreparedRequest, Client])
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
fun() ->
|
||||||
|
{{ok, _}, {ok, _}} =
|
||||||
|
?wait_async_action(
|
||||||
|
create_bridge(Config),
|
||||||
|
#{?snk_kind := gcp_pubsub_stop},
|
||||||
|
5_000
|
||||||
|
),
|
||||||
|
?assertMatch(
|
||||||
|
{ok, disconnected},
|
||||||
|
emqx_resource_manager:health_check(ResourceId)
|
||||||
|
),
|
||||||
|
?assertMatch(
|
||||||
|
{ok, _Group, #{error := {unhealthy_target, "Permission denied" ++ _}}},
|
||||||
|
emqx_resource_manager:lookup_cached(ResourceId)
|
||||||
|
),
|
||||||
|
ok
|
||||||
|
end
|
||||||
|
),
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
[]
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
||||||
|
t_permission_denied_worker(Config) ->
|
||||||
|
?check_trace(
|
||||||
|
begin
|
||||||
|
emqx_common_test_helpers:with_mock(
|
||||||
|
emqx_bridge_gcp_pubsub_client,
|
||||||
|
query_sync,
|
||||||
|
fun(PreparedRequest = {prepared_request, {Method, _Path, _Body}}, Client) ->
|
||||||
|
case Method =:= put of
|
||||||
|
true ->
|
||||||
|
permission_denied_response();
|
||||||
|
false ->
|
||||||
|
meck:passthrough([PreparedRequest, Client])
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
fun() ->
|
||||||
|
{{ok, _}, {ok, _}} =
|
||||||
|
?wait_async_action(
|
||||||
|
create_bridge(
|
||||||
|
Config
|
||||||
|
),
|
||||||
|
#{?snk_kind := gcp_pubsub_consumer_worker_terminate},
|
||||||
|
10_000
|
||||||
|
),
|
||||||
|
|
||||||
|
ok
|
||||||
|
end
|
||||||
|
),
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
[]
|
||||||
|
),
|
||||||
|
ok.
|
||||||
|
|
||||||
t_cluster_subscription(Config) ->
|
t_cluster_subscription(Config) ->
|
||||||
[
|
[
|
||||||
#{
|
#{
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{application, emqx_bridge_influxdb, [
|
{application, emqx_bridge_influxdb, [
|
||||||
{description, "EMQX Enterprise InfluxDB Bridge"},
|
{description, "EMQX Enterprise InfluxDB Bridge"},
|
||||||
{vsn, "0.1.3"},
|
{vsn, "0.1.4"},
|
||||||
{registered, []},
|
{registered, []},
|
||||||
{applications, [
|
{applications, [
|
||||||
kernel,
|
kernel,
|
||||||
|
|
|
@ -168,6 +168,9 @@ write_syntax(format) ->
|
||||||
write_syntax(_) ->
|
write_syntax(_) ->
|
||||||
undefined.
|
undefined.
|
||||||
|
|
||||||
|
to_influx_lines(Lines = [#{} | _]) ->
|
||||||
|
%% already parsed/converted (e.g.: bridge_probe, after hocon_tconf:check_plain)
|
||||||
|
Lines;
|
||||||
to_influx_lines(RawLines) ->
|
to_influx_lines(RawLines) ->
|
||||||
try
|
try
|
||||||
influx_lines(str(RawLines), [])
|
influx_lines(str(RawLines), [])
|
||||||
|
|
|
@ -66,7 +66,9 @@ on_start(InstId, Config) ->
|
||||||
on_stop(InstId, _State) ->
|
on_stop(InstId, _State) ->
|
||||||
case emqx_resource:get_allocated_resources(InstId) of
|
case emqx_resource:get_allocated_resources(InstId) of
|
||||||
#{?influx_client := Client} ->
|
#{?influx_client := Client} ->
|
||||||
influxdb:stop_client(Client);
|
Res = influxdb:stop_client(Client),
|
||||||
|
?tp(influxdb_client_stopped, #{instance_id => InstId}),
|
||||||
|
Res;
|
||||||
_ ->
|
_ ->
|
||||||
ok
|
ok
|
||||||
end.
|
end.
|
||||||
|
|
|
@ -124,6 +124,9 @@ init_per_group(InfluxDBType, Config0) when
|
||||||
{influxdb_config, InfluxDBConfig},
|
{influxdb_config, InfluxDBConfig},
|
||||||
{influxdb_config_string, ConfigString},
|
{influxdb_config_string, ConfigString},
|
||||||
{ehttpc_pool_name, EHttpcPoolName},
|
{ehttpc_pool_name, EHttpcPoolName},
|
||||||
|
{bridge_type, influxdb_api_v1},
|
||||||
|
{bridge_name, Name},
|
||||||
|
{bridge_config, InfluxDBConfig},
|
||||||
{influxdb_name, Name}
|
{influxdb_name, Name}
|
||||||
| Config
|
| Config
|
||||||
];
|
];
|
||||||
|
@ -193,6 +196,9 @@ init_per_group(InfluxDBType, Config0) when
|
||||||
{influxdb_config, InfluxDBConfig},
|
{influxdb_config, InfluxDBConfig},
|
||||||
{influxdb_config_string, ConfigString},
|
{influxdb_config_string, ConfigString},
|
||||||
{ehttpc_pool_name, EHttpcPoolName},
|
{ehttpc_pool_name, EHttpcPoolName},
|
||||||
|
{bridge_type, influxdb_api_v2},
|
||||||
|
{bridge_name, Name},
|
||||||
|
{bridge_config, InfluxDBConfig},
|
||||||
{influxdb_name, Name}
|
{influxdb_name, Name}
|
||||||
| Config
|
| Config
|
||||||
];
|
];
|
||||||
|
@ -570,6 +576,10 @@ t_start_ok(Config) ->
|
||||||
),
|
),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
|
t_start_stop(Config) ->
|
||||||
|
ok = emqx_bridge_testlib:t_start_stop(Config, influxdb_client_stopped),
|
||||||
|
ok.
|
||||||
|
|
||||||
t_start_already_started(Config) ->
|
t_start_already_started(Config) ->
|
||||||
Type = influxdb_type_bin(?config(influxdb_type, Config)),
|
Type = influxdb_type_bin(?config(influxdb_type, Config)),
|
||||||
Name = ?config(influxdb_name, Config),
|
Name = ?config(influxdb_name, Config),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
%% -*- mode: erlang -*-
|
%% -*- mode: erlang -*-
|
||||||
{application, emqx_bridge_kafka, [
|
{application, emqx_bridge_kafka, [
|
||||||
{description, "EMQX Enterprise Kafka Bridge"},
|
{description, "EMQX Enterprise Kafka Bridge"},
|
||||||
{vsn, "0.1.7"},
|
{vsn, "0.1.8"},
|
||||||
{registered, [emqx_bridge_kafka_consumer_sup]},
|
{registered, [emqx_bridge_kafka_consumer_sup]},
|
||||||
{applications, [
|
{applications, [
|
||||||
kernel,
|
kernel,
|
||||||
|
|
|
@ -268,7 +268,8 @@ fields(producer_opts) ->
|
||||||
required => true,
|
required => true,
|
||||||
desc => ?DESC(producer_kafka_opts),
|
desc => ?DESC(producer_kafka_opts),
|
||||||
validator => fun producer_strategy_key_validator/1
|
validator => fun producer_strategy_key_validator/1
|
||||||
})}
|
})},
|
||||||
|
{resource_opts, mk(ref(resource_opts), #{default => #{}})}
|
||||||
];
|
];
|
||||||
fields(producer_kafka_opts) ->
|
fields(producer_kafka_opts) ->
|
||||||
[
|
[
|
||||||
|
@ -425,7 +426,8 @@ fields(consumer_opts) ->
|
||||||
{value_encoding_mode,
|
{value_encoding_mode,
|
||||||
mk(enum([none, base64]), #{
|
mk(enum([none, base64]), #{
|
||||||
default => none, desc => ?DESC(consumer_value_encoding_mode)
|
default => none, desc => ?DESC(consumer_value_encoding_mode)
|
||||||
})}
|
})},
|
||||||
|
{resource_opts, mk(ref(resource_opts), #{default => #{}})}
|
||||||
];
|
];
|
||||||
fields(consumer_topic_mapping) ->
|
fields(consumer_topic_mapping) ->
|
||||||
[
|
[
|
||||||
|
@ -460,10 +462,16 @@ fields(consumer_kafka_opts) ->
|
||||||
emqx_schema:timeout_duration_s(),
|
emqx_schema:timeout_duration_s(),
|
||||||
#{default => <<"5s">>, desc => ?DESC(consumer_offset_commit_interval_seconds)}
|
#{default => <<"5s">>, desc => ?DESC(consumer_offset_commit_interval_seconds)}
|
||||||
)}
|
)}
|
||||||
].
|
];
|
||||||
|
fields(resource_opts) ->
|
||||||
|
SupportedFields = [health_check_interval],
|
||||||
|
CreationOpts = emqx_resource_schema:create_opts(_Overrides = []),
|
||||||
|
lists:filter(fun({Field, _}) -> lists:member(Field, SupportedFields) end, CreationOpts).
|
||||||
|
|
||||||
desc("config") ->
|
desc("config") ->
|
||||||
?DESC("desc_config");
|
?DESC("desc_config");
|
||||||
|
desc(resource_opts) ->
|
||||||
|
?DESC(emqx_resource_schema, "resource_opts");
|
||||||
desc("get_" ++ Type) when Type =:= "consumer"; Type =:= "producer" ->
|
desc("get_" ++ Type) when Type =:= "consumer"; Type =:= "producer" ->
|
||||||
["Configuration for Kafka using `GET` method."];
|
["Configuration for Kafka using `GET` method."];
|
||||||
desc("put_" ++ Type) when Type =:= "consumer"; Type =:= "producer" ->
|
desc("put_" ++ Type) when Type =:= "consumer"; Type =:= "producer" ->
|
||||||
|
|
|
@ -596,7 +596,6 @@ t_send_message_with_headers(Config) ->
|
||||||
},
|
},
|
||||||
KafkaMsg
|
KafkaMsg
|
||||||
),
|
),
|
||||||
?assertMatch(#kafka_message{key = BinTime}, KafkaMsg),
|
|
||||||
%% TODO: refactor those into init/end per testcase
|
%% TODO: refactor those into init/end per testcase
|
||||||
ok = ?PRODUCER:on_stop(ResourceId, State),
|
ok = ?PRODUCER:on_stop(ResourceId, State),
|
||||||
?assertEqual([], supervisor:which_children(wolff_client_sup)),
|
?assertEqual([], supervisor:which_children(wolff_client_sup)),
|
||||||
|
|
|
@ -306,6 +306,9 @@ kafka_producer_new_hocon() ->
|
||||||
" sndbuf = \"1024KB\"\n"
|
" sndbuf = \"1024KB\"\n"
|
||||||
" }\n"
|
" }\n"
|
||||||
" ssl {enable = false, verify = \"verify_peer\"}\n"
|
" ssl {enable = false, verify = \"verify_peer\"}\n"
|
||||||
|
" resource_opts {\n"
|
||||||
|
" health_check_interval = 10s\n"
|
||||||
|
" }\n"
|
||||||
" }\n"
|
" }\n"
|
||||||
"}\n"
|
"}\n"
|
||||||
"".
|
"".
|
||||||
|
@ -351,5 +354,8 @@ bridges.kafka_consumer.my_consumer {
|
||||||
verify = verify_none
|
verify = verify_none
|
||||||
server_name_indication = \"auto\"
|
server_name_indication = \"auto\"
|
||||||
}
|
}
|
||||||
|
resource_opts {
|
||||||
|
health_check_interval = 10s
|
||||||
|
}
|
||||||
}
|
}
|
||||||
""".
|
""".
|
||||||
|
|
|
@ -109,6 +109,7 @@
|
||||||
|
|
||||||
%% Tables
|
%% Tables
|
||||||
-define(RULE_TAB, emqx_rule_engine).
|
-define(RULE_TAB, emqx_rule_engine).
|
||||||
|
-define(RULE_TOPIC_INDEX, emqx_rule_engine_topic_index).
|
||||||
|
|
||||||
%% Allowed sql function provider modules
|
%% Allowed sql function provider modules
|
||||||
-define(DEFAULT_SQL_FUNC_PROVIDER, emqx_rule_funcs).
|
-define(DEFAULT_SQL_FUNC_PROVIDER, emqx_rule_funcs).
|
||||||
|
|
|
@ -176,7 +176,7 @@ create_rule(Params) ->
|
||||||
|
|
||||||
create_rule(Params = #{id := RuleId}, CreatedAt) when is_binary(RuleId) ->
|
create_rule(Params = #{id := RuleId}, CreatedAt) when is_binary(RuleId) ->
|
||||||
case get_rule(RuleId) of
|
case get_rule(RuleId) of
|
||||||
not_found -> parse_and_insert(Params, CreatedAt);
|
not_found -> with_parsed_rule(Params, CreatedAt, fun insert_rule/1);
|
||||||
{ok, _} -> {error, already_exists}
|
{ok, _} -> {error, already_exists}
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
@ -185,18 +185,27 @@ update_rule(Params = #{id := RuleId}) when is_binary(RuleId) ->
|
||||||
case get_rule(RuleId) of
|
case get_rule(RuleId) of
|
||||||
not_found ->
|
not_found ->
|
||||||
{error, not_found};
|
{error, not_found};
|
||||||
{ok, #{created_at := CreatedAt}} ->
|
{ok, RulePrev = #{created_at := CreatedAt}} ->
|
||||||
parse_and_insert(Params, CreatedAt)
|
with_parsed_rule(Params, CreatedAt, fun(Rule) -> update_rule(Rule, RulePrev) end)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
-spec delete_rule(RuleId :: rule_id()) -> ok.
|
-spec delete_rule(RuleId :: rule_id()) -> ok.
|
||||||
delete_rule(RuleId) when is_binary(RuleId) ->
|
delete_rule(RuleId) when is_binary(RuleId) ->
|
||||||
gen_server:call(?RULE_ENGINE, {delete_rule, RuleId}, ?T_CALL).
|
case get_rule(RuleId) of
|
||||||
|
not_found ->
|
||||||
|
ok;
|
||||||
|
{ok, Rule} ->
|
||||||
|
gen_server:call(?RULE_ENGINE, {delete_rule, Rule}, ?T_CALL)
|
||||||
|
end.
|
||||||
|
|
||||||
-spec insert_rule(Rule :: rule()) -> ok.
|
-spec insert_rule(Rule :: rule()) -> ok.
|
||||||
insert_rule(Rule) ->
|
insert_rule(Rule) ->
|
||||||
gen_server:call(?RULE_ENGINE, {insert_rule, Rule}, ?T_CALL).
|
gen_server:call(?RULE_ENGINE, {insert_rule, Rule}, ?T_CALL).
|
||||||
|
|
||||||
|
-spec update_rule(Rule :: rule(), RulePrev :: rule()) -> ok.
|
||||||
|
update_rule(Rule, RulePrev) ->
|
||||||
|
gen_server:call(?RULE_ENGINE, {update_rule, Rule, RulePrev}, ?T_CALL).
|
||||||
|
|
||||||
%%----------------------------------------------------------------------------------------
|
%%----------------------------------------------------------------------------------------
|
||||||
%% Rule Management
|
%% Rule Management
|
||||||
%%----------------------------------------------------------------------------------------
|
%%----------------------------------------------------------------------------------------
|
||||||
|
@ -216,9 +225,8 @@ get_rules_ordered_by_ts() ->
|
||||||
-spec get_rules_for_topic(Topic :: binary()) -> [rule()].
|
-spec get_rules_for_topic(Topic :: binary()) -> [rule()].
|
||||||
get_rules_for_topic(Topic) ->
|
get_rules_for_topic(Topic) ->
|
||||||
[
|
[
|
||||||
Rule
|
emqx_topic_index:get_record(M, ?RULE_TOPIC_INDEX)
|
||||||
|| Rule = #{from := From} <- get_rules(),
|
|| M <- emqx_topic_index:matches(Topic, ?RULE_TOPIC_INDEX, [unique])
|
||||||
emqx_topic:match_any(Topic, From)
|
|
||||||
].
|
].
|
||||||
|
|
||||||
-spec get_rules_with_same_event(Topic :: binary()) -> [rule()].
|
-spec get_rules_with_same_event(Topic :: binary()) -> [rule()].
|
||||||
|
@ -411,10 +419,17 @@ init([]) ->
|
||||||
{ok, #{}}.
|
{ok, #{}}.
|
||||||
|
|
||||||
handle_call({insert_rule, Rule}, _From, State) ->
|
handle_call({insert_rule, Rule}, _From, State) ->
|
||||||
do_insert_rule(Rule),
|
ok = do_insert_rule(Rule),
|
||||||
|
ok = do_update_rule_index(Rule),
|
||||||
|
{reply, ok, State};
|
||||||
|
handle_call({update_rule, Rule, RulePrev}, _From, State) ->
|
||||||
|
ok = do_delete_rule_index(RulePrev),
|
||||||
|
ok = do_insert_rule(Rule),
|
||||||
|
ok = do_update_rule_index(Rule),
|
||||||
{reply, ok, State};
|
{reply, ok, State};
|
||||||
handle_call({delete_rule, Rule}, _From, State) ->
|
handle_call({delete_rule, Rule}, _From, State) ->
|
||||||
do_delete_rule(Rule),
|
ok = do_delete_rule_index(Rule),
|
||||||
|
ok = do_delete_rule(Rule),
|
||||||
{reply, ok, State};
|
{reply, ok, State};
|
||||||
handle_call(Req, _From, State) ->
|
handle_call(Req, _From, State) ->
|
||||||
?SLOG(error, #{msg => "unexpected_call", request => Req}),
|
?SLOG(error, #{msg => "unexpected_call", request => Req}),
|
||||||
|
@ -438,7 +453,7 @@ code_change(_OldVsn, State, _Extra) ->
|
||||||
%% Internal Functions
|
%% Internal Functions
|
||||||
%%----------------------------------------------------------------------------------------
|
%%----------------------------------------------------------------------------------------
|
||||||
|
|
||||||
parse_and_insert(Params = #{id := RuleId, sql := Sql, actions := Actions}, CreatedAt) ->
|
with_parsed_rule(Params = #{id := RuleId, sql := Sql, actions := Actions}, CreatedAt, Fun) ->
|
||||||
case emqx_rule_sqlparser:parse(Sql) of
|
case emqx_rule_sqlparser:parse(Sql) of
|
||||||
{ok, Select} ->
|
{ok, Select} ->
|
||||||
Rule = #{
|
Rule = #{
|
||||||
|
@ -459,7 +474,7 @@ parse_and_insert(Params = #{id := RuleId, sql := Sql, actions := Actions}, Creat
|
||||||
conditions => emqx_rule_sqlparser:select_where(Select)
|
conditions => emqx_rule_sqlparser:select_where(Select)
|
||||||
%% -- calculated fields end
|
%% -- calculated fields end
|
||||||
},
|
},
|
||||||
ok = insert_rule(Rule),
|
ok = Fun(Rule),
|
||||||
{ok, Rule};
|
{ok, Rule};
|
||||||
{error, Reason} ->
|
{error, Reason} ->
|
||||||
{error, Reason}
|
{error, Reason}
|
||||||
|
@ -471,16 +486,27 @@ do_insert_rule(#{id := Id} = Rule) ->
|
||||||
true = ets:insert(?RULE_TAB, {Id, maps:remove(id, Rule)}),
|
true = ets:insert(?RULE_TAB, {Id, maps:remove(id, Rule)}),
|
||||||
ok.
|
ok.
|
||||||
|
|
||||||
do_delete_rule(RuleId) ->
|
do_delete_rule(#{id := Id} = Rule) ->
|
||||||
case get_rule(RuleId) of
|
ok = unload_hooks_for_rule(Rule),
|
||||||
{ok, Rule} ->
|
ok = clear_metrics_for_rule(Id),
|
||||||
ok = unload_hooks_for_rule(Rule),
|
true = ets:delete(?RULE_TAB, Id),
|
||||||
ok = clear_metrics_for_rule(RuleId),
|
ok.
|
||||||
true = ets:delete(?RULE_TAB, RuleId),
|
|
||||||
ok;
|
do_update_rule_index(#{id := Id, from := From} = Rule) ->
|
||||||
not_found ->
|
ok = lists:foreach(
|
||||||
ok
|
fun(Topic) ->
|
||||||
end.
|
true = emqx_topic_index:insert(Topic, Id, Rule, ?RULE_TOPIC_INDEX)
|
||||||
|
end,
|
||||||
|
From
|
||||||
|
).
|
||||||
|
|
||||||
|
do_delete_rule_index(#{id := Id, from := From}) ->
|
||||||
|
ok = lists:foreach(
|
||||||
|
fun(Topic) ->
|
||||||
|
true = emqx_topic_index:delete(Topic, Id, ?RULE_TOPIC_INDEX)
|
||||||
|
end,
|
||||||
|
From
|
||||||
|
).
|
||||||
|
|
||||||
parse_actions(Actions) ->
|
parse_actions(Actions) ->
|
||||||
[do_parse_action(Act) || Act <- Actions].
|
[do_parse_action(Act) || Act <- Actions].
|
||||||
|
|
|
@ -26,6 +26,7 @@
|
||||||
|
|
||||||
start(_Type, _Args) ->
|
start(_Type, _Args) ->
|
||||||
_ = ets:new(?RULE_TAB, [named_table, public, ordered_set, {read_concurrency, true}]),
|
_ = ets:new(?RULE_TAB, [named_table, public, ordered_set, {read_concurrency, true}]),
|
||||||
|
_ = ets:new(?RULE_TOPIC_INDEX, [named_table, public, ordered_set, {read_concurrency, true}]),
|
||||||
ok = emqx_rule_events:reload(),
|
ok = emqx_rule_events:reload(),
|
||||||
SupRet = emqx_rule_engine_sup:start_link(),
|
SupRet = emqx_rule_engine_sup:start_link(),
|
||||||
ok = emqx_rule_engine:load_rules(),
|
ok = emqx_rule_engine:load_rules(),
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Introduce topic index for the rule engine runtime that significantly improves the performance of EMQX with a non-trivial number of rules consuming messages matching different topic filters.
|
|
@ -0,0 +1 @@
|
||||||
|
Added the option to configure health check interval for Kafka bridges.
|
|
@ -0,0 +1 @@
|
||||||
|
Fixed an issue which would yield false negatives when testing the connectivity of InfluxDB bridges.
|
|
@ -14,8 +14,8 @@ type: application
|
||||||
|
|
||||||
# This is the chart version. This version number should be incremented each time you make changes
|
# This is the chart version. This version number should be incremented each time you make changes
|
||||||
# to the chart and its templates, including the app version.
|
# to the chart and its templates, including the app version.
|
||||||
version: 5.2.0-alpha.1
|
version: 5.2.0-alpha.3
|
||||||
|
|
||||||
# This is the version number of the application being deployed. This version number should be
|
# This is the version number of the application being deployed. This version number should be
|
||||||
# incremented each time you make changes to the application.
|
# incremented each time you make changes to the application.
|
||||||
appVersion: 5.2.0-alpha.1
|
appVersion: 5.2.0-alpha.3
|
||||||
|
|
|
@ -62,12 +62,13 @@ def test_log(driver, login, dashboard_url):
|
||||||
ensure_current_url(driver, dest_url)
|
ensure_current_url(driver, dest_url)
|
||||||
title = wait_title(driver)
|
title = wait_title(driver)
|
||||||
assert "Logging" == title.text
|
assert "Logging" == title.text
|
||||||
label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[./label/span[text()='Enable Log Handler']]")
|
|
||||||
|
label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[contains(., 'Enable Log Handler')]")
|
||||||
assert driver.find_elements(By.ID, label.get_attribute("for"))
|
assert driver.find_elements(By.ID, label.get_attribute("for"))
|
||||||
label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[./label/span[text()='Log Level']]")
|
label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[contains(., 'Log Level')]")
|
||||||
assert driver.find_elements(By.ID, label.get_attribute("for"))
|
assert driver.find_elements(By.ID, label.get_attribute("for"))
|
||||||
label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[./label/span[text()='Log Formatter']]")
|
label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[contains(., 'Log Formatter')]")
|
||||||
assert driver.find_elements(By.ID, label.get_attribute("for"))
|
assert driver.find_elements(By.ID, label.get_attribute("for"))
|
||||||
label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[./label/span[text()='Time Offset']]")
|
label = driver.find_element(By.XPATH, "//div[@id='app']//form//label[contains(., 'Time Offset')]")
|
||||||
assert driver.find_elements(By.ID, label.get_attribute("for"))
|
assert driver.find_elements(By.ID, label.get_attribute("for"))
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue