Merge remote-tracking branch 'origin/release-52' into 0906-sync-release-52-to-master

This commit is contained in:
Zaiming (Stone) Shi 2023-09-06 09:08:22 +02:00
commit e794143ae1
216 changed files with 5124 additions and 1954 deletions

View File

@ -10,7 +10,7 @@ CASSANDRA_TAG=3.11.6
MINIO_TAG=RELEASE.2023-03-20T20-16-18Z
OPENTS_TAG=9aa7f88
KINESIS_TAG=2.1
HSTREAMDB_TAG=v0.15.0
HSTREAMDB_TAG=v0.16.1
HSTREAMDB_ZK_TAG=3.8.1
MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server

View File

@ -11,6 +11,8 @@ services:
image: openldap
#ports:
# - 389:389
volumes:
- ./certs/ca.crt:/etc/certs/ca.crt
restart: always
networks:
- emqx_bridge

View File

@ -4,12 +4,11 @@ services:
redis_server:
container_name: redis
image: redis:${REDIS_TAG}
volumes:
- ./redis/single-tcp:/usr/local/etc/redis/
ports:
- "6379:6379"
command:
- redis-server
- "--bind 0.0.0.0 ::"
- --requirepass public
command: redis-server /usr/local/etc/redis/redis.conf
restart: always
networks:
- emqx_bridge

View File

@ -8,18 +8,10 @@ services:
- ./certs/server.crt:/etc/certs/redis.crt
- ./certs/server.key:/etc/certs/redis.key
- ./certs/ca.crt:/etc/certs/ca.crt
- ./redis/single-tls:/usr/local/etc/redis
ports:
- "6380:6380"
command:
- redis-server
- "--bind 0.0.0.0 ::"
- --requirepass public
- --tls-port 6380
- --tls-cert-file /etc/certs/redis.crt
- --tls-key-file /etc/certs/redis.key
- --tls-ca-cert-file /etc/certs/ca.crt
- --tls-protocols "TLSv1.3"
- --tls-ciphersuites "TLS_CHACHA20_POLY1305_SHA256"
command: redis-server /usr/local/etc/redis/redis.conf
restart: always
networks:
emqx_bridge:

View File

@ -1,10 +1,11 @@
bind :: 0.0.0.0
port 6379
requirepass public
cluster-enabled yes
masteruser default
masterauth public
aclfile /usr/local/etc/redis/users.acl
protected-mode no
daemonize no

View File

@ -0,0 +1,2 @@
user default on >public ~* &* +@all
user test_user on >test_passwd ~* &* +@all

View File

@ -1,10 +1,11 @@
bind :: 0.0.0.0
port 6379
requirepass public
cluster-enabled yes
masteruser default
masterauth public
aclfile /usr/local/etc/redis/users.acl
tls-port 6389
tls-cert-file /etc/certs/cert.pem

View File

@ -0,0 +1,2 @@
user default on >public ~* &* +@all
user test_user on >test_passwd ~* &* +@all

View File

@ -1,6 +1,6 @@
bind :: 0.0.0.0
port 6379
requirepass public
aclfile /usr/local/etc/redis/users.acl
protected-mode no
daemonize no

View File

@ -1,9 +1,10 @@
bind :: 0.0.0.0
port 6379
requirepass public
replicaof redis-sentinel-master 6379
masteruser default
masterauth public
aclfile /usr/local/etc/redis/users.acl
protected-mode no
daemonize no

View File

@ -0,0 +1,2 @@
user default on >public ~* &* +@all
user test_user on >test_passwd ~* &* +@all

View File

@ -1,6 +1,6 @@
bind :: 0.0.0.0
port 6379
requirepass public
aclfile /usr/local/etc/redis/users.acl
tls-port 6389
tls-cert-file /etc/certs/cert.pem

View File

@ -1,9 +1,10 @@
bind :: 0.0.0.0
port 6379
requirepass public
replicaof redis-sentinel-tls-master 6389
masteruser default
masterauth public
aclfile /usr/local/etc/redis/users.acl
tls-port 6389
tls-replication yes

View File

@ -0,0 +1,2 @@
user default on >public ~* &* +@all
user test_user on >test_passwd ~* &* +@all

View File

@ -0,0 +1,3 @@
bind :: 0.0.0.0
port 6379
aclfile /usr/local/etc/redis/users.acl

View File

@ -0,0 +1,2 @@
user default on >public ~* &* +@all
user test_user on >test_passwd ~* &* +@all

View File

@ -0,0 +1,9 @@
bind :: 0.0.0.0
aclfile /usr/local/etc/redis/users.acl
tls-port 6380
tls-cert-file /etc/certs/redis.crt
tls-key-file /etc/certs/redis.key
tls-ca-cert-file /etc/certs/ca.crt
tls-protocols "TLSv1.3"
tls-ciphersuites "TLS_CHACHA20_POLY1305_SHA256"

View File

@ -0,0 +1,2 @@
user default on >public ~* &* +@all
user test_user on >test_passwd ~* &* +@all

View File

@ -179,5 +179,17 @@
"listen": "0.0.0.0:4566",
"upstream": "kinesis:4566",
"enabled": true
},
{
"name": "ldap_tcp",
"listen": "0.0.0.0:389",
"upstream": "ldap:389",
"enabled": true
},
{
"name": "ldap_ssl",
"listen": "0.0.0.0:636",
"upstream": "ldap:636",
"enabled": true
}
]

View File

@ -25,7 +25,7 @@ jobs:
runs-on: ubuntu-latest
env:
EMQX_NAME: ${{ matrix.profile }}
PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }}
PKG_VSN: ${{ startsWith(matrix.profile, 'emqx-enterprise') && inputs.version-emqx-enterprise || inputs.version-emqx }}
OTP_VSN: ${{ inputs.otp_vsn }}
ELIXIR_VSN: ${{ inputs.elixir_vsn }}
@ -36,6 +36,7 @@ jobs:
- emqx
- emqx-enterprise
- emqx-elixir
- emqx-enterprise-elixir
steps:
- uses: actions/checkout@v3
@ -58,4 +59,3 @@ jobs:
name: "${{ env.EMQX_NAME }}-docker"
path: "${{ env.EMQX_NAME }}-docker-${{ env.PKG_VSN }}.tar.gz"
retention-days: 3

View File

@ -16,7 +16,7 @@ endif
# Dashboard version
# from https://github.com/emqx/emqx-dashboard5
export EMQX_DASHBOARD_VERSION ?= v1.3.2
export EMQX_EE_DASHBOARD_VERSION ?= e1.2.0-beta.4
export EMQX_EE_DASHBOARD_VERSION ?= e1.2.0-beta.9
# `:=` should be used here, otherwise the `$(shell ...)` will be executed every time when the variable is used
# In make 4.4+, for backward-compatibility the value from the original environment is used.

View File

@ -122,20 +122,4 @@
until :: integer()
}).
%%--------------------------------------------------------------------
%% Authentication
%%--------------------------------------------------------------------
-record(authenticator, {
id :: binary(),
provider :: module(),
enable :: boolean(),
state :: map()
}).
-record(chain, {
name :: atom(),
authenticators :: [#authenticator{}]
}).
-endif.

View File

@ -14,7 +14,9 @@
%% limitations under the License.
%%--------------------------------------------------------------------
%% config root name all auth providers have to agree on.
-ifndef(EMQX_ACCESS_CONTROL_HRL).
-define(EMQX_ACCESS_CONTROL_HRL, true).
-define(EMQX_AUTHORIZATION_CONFIG_ROOT_NAME, "authorization").
-define(EMQX_AUTHORIZATION_CONFIG_ROOT_NAME_ATOM, authorization).
-define(EMQX_AUTHORIZATION_CONFIG_ROOT_NAME_BINARY, <<"authorization">>).
@ -32,3 +34,7 @@
-define(authz_action(PUBSUB, QOS), #{action_type := PUBSUB, qos := QOS}).
-define(authz_action(PUBSUB), ?authz_action(PUBSUB, _)).
-define(authz_action, ?authz_action(_)).
-define(AUTHN_TRACE_TAG, "AUTHN").
-endif.

View File

@ -35,7 +35,7 @@
-define(EMQX_RELEASE_CE, "5.1.6").
%% Enterprise edition
-define(EMQX_RELEASE_EE, "5.2.0-alpha.3").
-define(EMQX_RELEASE_EE, "5.2.0-alpha.4").
%% The HTTP API version
-define(EMQX_API_VERSION, "5.0").

View File

@ -17,8 +17,9 @@
-ifndef(EMQX_ROUTER_HRL).
-define(EMQX_ROUTER_HRL, true).
%% ETS table for message routing
%% ETS tables for message routing
-define(ROUTE_TAB, emqx_route).
-define(ROUTE_TAB_FILTERS, emqx_route_filters).
%% Mnesia table for message routing
-define(ROUTING_NODE, emqx_routing_node).

View File

@ -19,6 +19,7 @@
%% API
-export([add_handler/0, remove_handler/0, pre_config_update/3]).
-export([is_olp_enabled/0]).
-define(ZONES, [zones]).
@ -33,3 +34,13 @@ remove_handler() ->
%% replace the old config with the new config
pre_config_update(?ZONES, NewRaw, _OldRaw) ->
{ok, NewRaw}.
is_olp_enabled() ->
maps:fold(
fun
(_, #{overload_protection := #{enable := true}}, _Acc) -> true;
(_, _, Acc) -> Acc
end,
false,
emqx_config:get([zones], #{})
).

View File

@ -17,6 +17,7 @@
-module(emqx_access_control).
-include("emqx.hrl").
-include("emqx_access_control.hrl").
-include("logger.hrl").
-export([
@ -29,6 +30,14 @@
-compile(nowarn_export_all).
-endif.
-define(TRACE_RESULT(Label, Result, Reason), begin
?TRACE(Label, ?AUTHN_TRACE_TAG, #{
result => (Result),
reason => (Reason)
}),
Result
end).
%%--------------------------------------------------------------------
%% APIs
%%--------------------------------------------------------------------
@ -44,7 +53,7 @@ authenticate(Credential) ->
%% if auth backend returning nothing but just 'ok'
%% it means it's not a superuser, or there is no way to tell.
NotSuperUser = #{is_superuser => false},
case emqx_authentication:pre_hook_authenticate(Credential) of
case pre_hook_authenticate(Credential) of
ok ->
inc_authn_metrics(anonymous),
{ok, NotSuperUser};
@ -99,6 +108,29 @@ authorize(ClientInfo, Action, Topic) ->
inc_authz_metrics(Result),
Result.
%%--------------------------------------------------------------------
%% Internal Functions
%%--------------------------------------------------------------------
-spec pre_hook_authenticate(emqx_types:clientinfo()) ->
ok | continue | {error, not_authorized}.
pre_hook_authenticate(#{enable_authn := false}) ->
?TRACE_RESULT("pre_hook_authenticate", ok, enable_authn_false);
pre_hook_authenticate(#{enable_authn := quick_deny_anonymous} = Credential) ->
case is_username_defined(Credential) of
true ->
continue;
false ->
?TRACE_RESULT("pre_hook_authenticate", {error, not_authorized}, enable_authn_false)
end;
pre_hook_authenticate(_) ->
continue.
is_username_defined(#{username := undefined}) -> false;
is_username_defined(#{username := <<>>}) -> false;
is_username_defined(#{username := _Username}) -> true;
is_username_defined(_) -> false.
check_authorization_cache(ClientInfo, Action, Topic) ->
case emqx_authz_cache:get_authz_cache(Action, Topic) of
not_found ->

View File

@ -55,7 +55,9 @@ prep_stop(_State) ->
emqx_boot:is_enabled(listeners) andalso
emqx_listeners:stop().
stop(_State) -> ok.
stop(_State) ->
ok = emqx_router:deinit_schema(),
ok.
-define(CONFIG_LOADER, config_loader).
-define(DEFAULT_LOADER, emqx).

View File

@ -49,16 +49,6 @@ init([]) ->
modules => [emqx_shared_sub]
},
%% Authentication
AuthNSup = #{
id => emqx_authentication_sup,
start => {emqx_authentication_sup, start_link, []},
restart => permanent,
shutdown => infinity,
type => supervisor,
modules => [emqx_authentication_sup]
},
%% Broker helper
Helper = #{
id => helper,
@ -69,4 +59,4 @@ init([]) ->
modules => [emqx_broker_helper]
},
{ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, AuthNSup, Helper]}}.
{ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, Helper]}}.

View File

@ -2228,6 +2228,7 @@ disconnect_and_shutdown(Reason, Reply, Channel) ->
NChannel = ensure_disconnected(Reason, Channel),
shutdown(Reason, Reply, NChannel).
-compile({inline, [sp/1, flag/1]}).
sp(true) -> 1;
sp(false) -> 0.

View File

@ -36,8 +36,6 @@
insert_channel_info/3
]).
-export([connection_closed/1]).
-export([
get_chan_info/1,
get_chan_info/2,
@ -194,14 +192,6 @@ do_unregister_channel({_ClientId, ChanPid} = Chan) ->
ok = emqx_hooks:run('cm.channel.unregistered', [ChanPid]),
true.
-spec connection_closed(emqx_types:clientid()) -> true.
connection_closed(ClientId) ->
connection_closed(ClientId, self()).
-spec connection_closed(emqx_types:clientid(), chan_pid()) -> true.
connection_closed(ClientId, ChanPid) ->
ets:delete_object(?CHAN_CONN_TAB, {ClientId, ChanPid}).
%% @doc Get info of a channel.
-spec get_chan_info(emqx_types:clientid()) -> maybe(emqx_types:infos()).
get_chan_info(ClientId) ->

View File

@ -53,11 +53,17 @@
-optional_callbacks([
pre_config_update/3,
post_config_update/5
propagated_pre_config_update/3,
post_config_update/5,
propagated_post_config_update/5
]).
-callback pre_config_update([atom()], emqx_config:update_request(), emqx_config:raw_config()) ->
{ok, emqx_config:update_request()} | {error, term()}.
ok | {ok, emqx_config:update_request()} | {error, term()}.
-callback propagated_pre_config_update(
[binary()], emqx_config:update_request(), emqx_config:raw_config()
) ->
ok | {ok, emqx_config:update_request()} | {error, term()}.
-callback post_config_update(
[atom()],
@ -68,6 +74,15 @@
) ->
ok | {ok, Result :: any()} | {error, Reason :: term()}.
-callback propagated_post_config_update(
[atom()],
emqx_config:update_request(),
emqx_config:config(),
emqx_config:config(),
emqx_config:app_envs()
) ->
ok | {ok, Result :: any()} | {error, Reason :: term()}.
-type state() :: #{handlers := any()}.
start_link() ->
@ -244,7 +259,14 @@ do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) ->
do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq, []).
do_update_config([], Handlers, OldRawConf, UpdateReq, ConfKeyPath) ->
call_pre_config_update(Handlers, OldRawConf, UpdateReq, ConfKeyPath);
call_pre_config_update(#{
handlers => Handlers,
old_raw_conf => OldRawConf,
update_req => UpdateReq,
conf_key_path => ConfKeyPath,
callback => pre_config_update,
is_propagated => false
});
do_update_config(
[ConfKey | SubConfKeyPath],
Handlers,
@ -331,15 +353,16 @@ do_post_config_update(
Result,
ConfKeyPath
) ->
call_post_config_update(
Handlers,
OldConf,
NewConf,
AppEnvs,
up_req(UpdateArgs),
Result,
ConfKeyPath
);
call_post_config_update(#{
handlers => Handlers,
old_conf => OldConf,
new_conf => NewConf,
app_envs => AppEnvs,
update_req => up_req(UpdateArgs),
result => Result,
conf_key_path => ConfKeyPath,
callback => post_config_update
});
do_post_config_update(
[ConfKey | SubConfKeyPath],
Handlers,
@ -365,10 +388,16 @@ do_post_config_update(
ConfKeyPath
).
get_sub_handlers(ConfKey, Handlers) ->
get_sub_handlers(ConfKey, Handlers) when is_atom(ConfKey) ->
case maps:find(ConfKey, Handlers) of
error -> maps:get(?WKEY, Handlers, #{});
{ok, SubHandlers} -> SubHandlers
end;
get_sub_handlers(ConfKey, Handlers) when is_binary(ConfKey) ->
ConcreteHandlerKeys = maps:keys(Handlers) -- [?MOD, ?WKEY],
case lists:search(fun(K) -> bin(K) =:= ConfKey end, ConcreteHandlerKeys) of
{value, Key} -> maps:get(Key, Handlers);
false -> maps:get(?WKEY, Handlers, #{})
end.
get_sub_config(ConfKey, Conf) when is_map(Conf) ->
@ -377,56 +406,246 @@ get_sub_config(ConfKey, Conf) when is_map(Conf) ->
get_sub_config(_, _Conf) ->
undefined.
call_pre_config_update(#{?MOD := HandlerName}, OldRawConf, UpdateReq, ConfKeyPath) ->
case erlang:function_exported(HandlerName, pre_config_update, 3) of
call_pre_config_update(Ctx) ->
case call_proper_pre_config_update(Ctx) of
{ok, NewUpdateReq0} ->
case
propagate_pre_config_updates_to_subconf(Ctx#{
update_req => NewUpdateReq0
})
of
{ok, #{update_req := NewUpdateReq1}} ->
{ok, NewUpdateReq1};
{error, _} = Error ->
Error
end;
{error, _} = Error ->
Error
end.
call_proper_pre_config_update(
#{
handlers := #{?MOD := Module},
callback := Callback,
update_req := UpdateReq,
old_raw_conf := OldRawConf
} = Ctx
) ->
case erlang:function_exported(Module, Callback, 3) of
true ->
case HandlerName:pre_config_update(ConfKeyPath, UpdateReq, OldRawConf) of
{ok, NewUpdateReq} -> {ok, NewUpdateReq};
{error, Reason} -> {error, {pre_config_update, HandlerName, Reason}}
case apply_pre_config_update(Module, Ctx) of
{ok, NewUpdateReq} ->
{ok, NewUpdateReq};
ok ->
{ok, UpdateReq};
{error, Reason} ->
{error, {pre_config_update, Module, Reason}}
end;
false ->
merge_to_old_config(UpdateReq, OldRawConf)
end;
call_pre_config_update(_Handlers, OldRawConf, UpdateReq, _ConfKeyPath) ->
merge_to_old_config(UpdateReq, OldRawConf).
call_post_config_update(
#{?MOD := HandlerName},
OldConf,
NewConf,
AppEnvs,
UpdateReq,
Result,
ConfKeyPath
call_proper_pre_config_update(
#{update_req := UpdateReq}
) ->
case erlang:function_exported(HandlerName, post_config_update, 5) of
true ->
{ok, UpdateReq}.
apply_pre_config_update(Module, #{
conf_key_path := ConfKeyPath,
update_req := UpdateReq,
old_raw_conf := OldRawConf,
callback := Callback
}) ->
Module:Callback(
ConfKeyPath, UpdateReq, OldRawConf
).
propagate_pre_config_updates_to_subconf(
#{handlers := #{?WKEY := _}} = Ctx
) ->
propagate_pre_config_updates_to_subconf_wkey(Ctx);
propagate_pre_config_updates_to_subconf(
#{handlers := Handlers} = Ctx
) ->
Keys = maps:keys(maps:without([?MOD], Handlers)),
propagate_pre_config_updates_to_subconf_keys(Keys, Ctx).
propagate_pre_config_updates_to_subconf_wkey(
#{
update_req := UpdateReq,
old_raw_conf := OldRawConf
} = Ctx
) ->
Keys = propagate_keys(UpdateReq, OldRawConf),
propagate_pre_config_updates_to_subconf_keys(Keys, Ctx).
propagate_pre_config_updates_to_subconf_keys([], Ctx) ->
{ok, Ctx};
propagate_pre_config_updates_to_subconf_keys([Key | Keys], Ctx0) ->
case propagate_pre_config_updates_to_subconf_key(Key, Ctx0) of
{ok, Ctx1} ->
propagate_pre_config_updates_to_subconf_keys(Keys, Ctx1);
{error, _} = Error ->
Error
end.
propagate_pre_config_updates_to_subconf_key(
Key,
#{
handlers := Handlers,
old_raw_conf := OldRawConf,
update_req := UpdateReq,
conf_key_path := ConfKeyPath,
is_propagated := IsPropagated
} = Ctx
) ->
BinKey = bin(Key),
SubHandlers = get_sub_handlers(BinKey, Handlers),
SubUpdateReq = get_sub_config(BinKey, UpdateReq),
SubOldConf = get_sub_config(BinKey, OldRawConf),
SubConfKeyPath =
case IsPropagated of
true -> ConfKeyPath ++ [BinKey];
false -> bin_path(ConfKeyPath) ++ [BinKey]
end,
case {SubOldConf, SubUpdateReq} of
%% we have handler, but no relevant keys in both configs (new and old),
%% so we don't need to go further
{undefined, undefined} ->
{ok, Ctx};
{_, _} ->
case
HandlerName:post_config_update(
call_pre_config_update(Ctx#{
handlers := SubHandlers,
old_raw_conf := SubOldConf,
update_req := SubUpdateReq,
conf_key_path := SubConfKeyPath,
is_propagated := true,
callback := propagated_pre_config_update
})
of
{ok, SubNewConf1} ->
%% we update only if the new config is not to be removed
%% i.e. SubUpdateReq is not undefined
case SubUpdateReq of
undefined ->
{ok, Ctx};
_ ->
{ok, Ctx#{
update_req := maps:put(BinKey, SubNewConf1, UpdateReq)
}}
end;
{error, _} = Error ->
Error
end
end.
call_post_config_update(#{handlers := Handlers} = Ctx) ->
case call_proper_post_config_update(Ctx) of
{ok, Result} ->
SubHandlers = maps:without([?MOD], Handlers),
propagate_post_config_updates_to_subconf(Ctx#{
handlers := SubHandlers,
callback := propagated_post_config_update,
result := Result
});
{error, _} = Error ->
Error
end.
call_proper_post_config_update(
#{
handlers := #{?MOD := Module},
callback := Callback,
result := Result
} = Ctx
) ->
case erlang:function_exported(Module, Callback, 5) of
true ->
case apply_post_config_update(Module, Ctx) of
ok -> {ok, Result};
{ok, Result1} -> {ok, Result#{Module => Result1}};
{error, Reason} -> {error, {post_config_update, Module, Reason}}
end;
false ->
{ok, Result}
end;
call_proper_post_config_update(
#{result := Result} = _Ctx
) ->
{ok, Result}.
apply_post_config_update(Module, #{
conf_key_path := ConfKeyPath,
update_req := UpdateReq,
new_conf := NewConf,
old_conf := OldConf,
app_envs := AppEnvs,
callback := Callback
}) ->
Module:Callback(
ConfKeyPath,
UpdateReq,
NewConf,
OldConf,
AppEnvs
)
of
ok -> {ok, Result};
{ok, Result1} -> {ok, Result#{HandlerName => Result1}};
{error, Reason} -> {error, {post_config_update, HandlerName, Reason}}
end;
false ->
{ok, Result}
end;
call_post_config_update(
_Handlers,
_OldConf,
_NewConf,
_AppEnvs,
_UpdateReq,
Result,
_ConfKeyPath
).
propagate_post_config_updates_to_subconf(
#{handlers := #{?WKEY := _}} = Ctx
) ->
{ok, Result}.
propagate_post_config_updates_to_subconf_wkey(Ctx);
propagate_post_config_updates_to_subconf(
#{handlers := Handlers} = Ctx
) ->
Keys = maps:keys(Handlers),
propagate_post_config_updates_to_subconf_keys(Keys, Ctx).
propagate_post_config_updates_to_subconf_wkey(
#{
old_conf := OldConf,
new_conf := NewConf
} = Ctx
) ->
Keys = propagate_keys(OldConf, NewConf),
propagate_post_config_updates_to_subconf_keys(Keys, Ctx).
propagate_post_config_updates_to_subconf_keys([], #{result := Result}) ->
{ok, Result};
propagate_post_config_updates_to_subconf_keys([Key | Keys], Ctx) ->
case propagate_post_config_updates_to_subconf_key(Key, Ctx) of
{ok, Result1} ->
propagate_post_config_updates_to_subconf_keys(Keys, Ctx#{result := Result1});
Error ->
Error
end.
propagate_keys(OldConf, NewConf) ->
sets:to_list(sets:union(propagate_keys(OldConf), propagate_keys(NewConf))).
propagate_keys(Conf) when is_map(Conf) -> sets:from_list(maps:keys(Conf), [{version, 2}]);
propagate_keys(_) -> sets:new([{version, 2}]).
propagate_post_config_updates_to_subconf_key(
Key,
#{
handlers := Handlers,
new_conf := NewConf,
old_conf := OldConf,
result := Result,
conf_key_path := ConfKeyPath
} = Ctx
) ->
SubHandlers = maps:get(Key, Handlers, maps:get(?WKEY, Handlers, undefined)),
SubNewConf = get_sub_config(Key, NewConf),
SubOldConf = get_sub_config(Key, OldConf),
SubConfKeyPath = ConfKeyPath ++ [Key],
call_post_config_update(Ctx#{
handlers := SubHandlers,
new_conf := SubNewConf,
old_conf := SubOldConf,
result := Result,
conf_key_path := SubConfKeyPath,
callback := propagated_post_config_update
}).
%% The default callback of config handlers
%% the behaviour is overwriting the old config if:
@ -517,6 +736,7 @@ remove_empty_leaf(KeyPath, Handlers) ->
end.
assert_callback_function(Mod) ->
_ = Mod:module_info(),
case
erlang:function_exported(Mod, pre_config_update, 3) orelse
erlang:function_exported(Mod, post_config_update, 5)

View File

@ -636,7 +636,6 @@ handle_msg(
handle_msg({event, disconnected}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_info(ClientId, info(State)),
emqx_cm:connection_closed(ClientId),
{ok, State};
handle_msg({event, _Other}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
@ -1217,9 +1216,9 @@ inc_counter(Key, Inc) ->
set_tcp_keepalive({quic, _Listener}) ->
ok;
set_tcp_keepalive({Type, Id}) ->
Conf = emqx_config:get_listener_conf(Type, Id, [tcp_options, keepalive], <<"none">>),
case iolist_to_binary(Conf) of
<<"none">> ->
Conf = emqx_config:get_listener_conf(Type, Id, [tcp_options, keepalive], "none"),
case Conf of
"none" ->
ok;
Value ->
%% the value is already validated by schema, so we do not validate it again.

View File

@ -531,41 +531,15 @@ post_config_update(_Path, _Request, _NewConf, _OldConf, _AppEnvs) ->
ok.
create_listener(Type, Name, NewConf) ->
Res = start_listener(Type, Name, NewConf),
recreate_authenticators(Res, Type, Name, NewConf).
recreate_authenticators(ok, Type, Name, Conf) ->
Chain = listener_id(Type, Name),
_ = emqx_authentication:delete_chain(Chain),
do_create_authneticators(Chain, maps:get(authentication, Conf, []));
recreate_authenticators(Error, _Type, _Name, _NewConf) ->
Error.
do_create_authneticators(Chain, [AuthN | T]) ->
case emqx_authentication:create_authenticator(Chain, AuthN) of
{ok, _} ->
do_create_authneticators(Chain, T);
Error ->
_ = emqx_authentication:delete_chain(Chain),
Error
end;
do_create_authneticators(_Chain, []) ->
ok.
start_listener(Type, Name, NewConf).
remove_listener(Type, Name, OldConf) ->
ok = unregister_ocsp_stapling_refresh(Type, Name),
case stop_listener(Type, Name, OldConf) of
ok ->
_ = emqx_authentication:delete_chain(listener_id(Type, Name)),
ok;
Err ->
Err
end.
stop_listener(Type, Name, OldConf).
update_listener(Type, Name, {OldConf, NewConf}) ->
ok = maybe_unregister_ocsp_stapling_refresh(Type, Name, NewConf),
Res = restart_listener(Type, Name, {OldConf, NewConf}),
recreate_authenticators(Res, Type, Name, NewConf).
restart_listener(Type, Name, {OldConf, NewConf}).
perform_listener_changes([]) ->
ok;
@ -847,10 +821,9 @@ convert_certs(ListenerConf) ->
fun(Type, Listeners0, Acc) ->
Listeners1 =
maps:fold(
fun(Name, Conf, Acc1) ->
Conf1 = convert_certs(Type, Name, Conf),
Conf2 = convert_authn_certs(Type, Name, Conf1),
Acc1#{Name => Conf2}
fun(Name, Conf0, Acc1) ->
Conf1 = convert_certs(Type, Name, Conf0),
Acc1#{Name => Conf1}
end,
#{},
Listeners0
@ -873,19 +846,6 @@ convert_certs(Type, Name, Conf) ->
throw({bad_ssl_config, Reason})
end.
convert_authn_certs(Type, Name, #{<<"authentication">> := AuthNList} = Conf) ->
ChainName = listener_id(Type, Name),
AuthNList1 = lists:map(
fun(AuthN) ->
CertsDir = emqx_authentication_config:certs_dir(ChainName, AuthN),
emqx_authentication_config:convert_certs(CertsDir, AuthN)
end,
AuthNList
),
Conf#{<<"authentication">> => AuthNList1};
convert_authn_certs(_Type, _Name, Conf) ->
Conf.
filter_stacktrace({Reason, _Stacktrace}) -> Reason;
filter_stacktrace(Reason) -> Reason.

View File

@ -67,6 +67,7 @@
terminate/2,
code_change/3
]).
-export([olp_metrics/0]).
%% BACKW: v4.3.0
-export([upgrade_retained_delayed_counter_type/0]).
@ -267,15 +268,18 @@
{counter, 'authentication.failure'}
]).
%% Overload protetion counters
%% Overload protection counters
-define(OLP_METRICS, [
{counter, 'olp.delay.ok'},
{counter, 'olp.delay.timeout'},
{counter, 'olp.hbn'},
{counter, 'olp.gc'},
{counter, 'olp.new_conn'}
{counter, 'overload_protection.delay.ok'},
{counter, 'overload_protection.delay.timeout'},
{counter, 'overload_protection.hibernation'},
{counter, 'overload_protection.gc'},
{counter, 'overload_protection.new_conn'}
]).
olp_metrics() ->
lists:map(fun({_, Metric}) -> Metric end, ?OLP_METRICS).
-record(state, {next_idx = 1}).
-record(metric, {name, type, idx}).
@ -489,7 +493,7 @@ inc_sent(Packet) ->
inc('packets.sent'),
do_inc_sent(Packet).
do_inc_sent(?CONNACK_PACKET(ReasonCode)) ->
do_inc_sent(?CONNACK_PACKET(ReasonCode, _SessPresent)) ->
(ReasonCode == ?RC_SUCCESS) orelse inc('packets.connack.error'),
((ReasonCode == ?RC_NOT_AUTHORIZED) orelse
(ReasonCode == ?CONNACK_AUTH)) andalso
@ -701,9 +705,9 @@ reserved_idx('authorization.cache_hit') -> 302;
reserved_idx('authentication.success') -> 310;
reserved_idx('authentication.success.anonymous') -> 311;
reserved_idx('authentication.failure') -> 312;
reserved_idx('olp.delay.ok') -> 400;
reserved_idx('olp.delay.timeout') -> 401;
reserved_idx('olp.hbn') -> 402;
reserved_idx('olp.gc') -> 403;
reserved_idx('olp.new_conn') -> 404;
reserved_idx('overload_protection.delay.ok') -> 400;
reserved_idx('overload_protection.delay.timeout') -> 401;
reserved_idx('overload_protection.hibernation') -> 402;
reserved_idx('overload_protection.gc') -> 403;
reserved_idx('overload_protection.new_conn') -> 404;
reserved_idx(_) -> undefined.

View File

@ -495,7 +495,7 @@ terminate(_Reason, #state{metric_ids = MIDs}) ->
stop(Name) ->
try
gen_server:stop(Name)
gen_server:stop(Name, normal, 10000)
catch
exit:noproc ->
ok;

View File

@ -38,11 +38,11 @@
| backoff_new_conn.
-type cnt_name() ::
'olp.delay.ok'
| 'olp.delay.timeout'
| 'olp.hbn'
| 'olp.gc'
| 'olp.new_conn'.
'overload_protection.delay.ok'
| 'overload_protection.delay.timeout'
| 'overload_protection.hibernation'
| 'overload_protection.gc'
| 'overload_protection.new_conn'.
-define(overload_protection, overload_protection).
@ -63,10 +63,10 @@ backoff(Zone) ->
false ->
false;
ok ->
emqx_metrics:inc('olp.delay.ok'),
emqx_metrics:inc('overload_protection.delay.ok'),
ok;
timeout ->
emqx_metrics:inc('olp.delay.timeout'),
emqx_metrics:inc('overload_protection.delay.timeout'),
timeout
end;
_ ->
@ -76,18 +76,18 @@ backoff(Zone) ->
%% @doc If forceful GC should be skipped when the system is overloaded.
-spec backoff_gc(Zone :: atom()) -> boolean().
backoff_gc(Zone) ->
do_check(Zone, ?FUNCTION_NAME, 'olp.gc').
do_check(Zone, ?FUNCTION_NAME, 'overload_protection.gc').
%% @doc If hibernation should be skipped when the system is overloaded.
-spec backoff_hibernation(Zone :: atom()) -> boolean().
backoff_hibernation(Zone) ->
do_check(Zone, ?FUNCTION_NAME, 'olp.hbn').
do_check(Zone, ?FUNCTION_NAME, 'overload_protection.hibernation').
%% @doc Returns {error, overloaded} if new connection should be
%% closed when system is overloaded.
-spec backoff_new_conn(Zone :: atom()) -> ok | {error, overloaded}.
backoff_new_conn(Zone) ->
case do_check(Zone, ?FUNCTION_NAME, 'olp.new_conn') of
case do_check(Zone, ?FUNCTION_NAME, 'overload_protection.new_conn') of
true ->
{error, overloaded};
false ->

View File

@ -118,7 +118,7 @@ new_conn(
{stop, stream_accept_error, S}
end;
true ->
emqx_metrics:inc('olp.new_conn'),
emqx_metrics:inc('overload_protection.new_conn'),
_ = quicer:async_shutdown_connection(
Conn,
?QUIC_CONNECTION_SHUTDOWN_FLAG_NONE,

View File

@ -21,7 +21,6 @@
-include("emqx.hrl").
-include("logger.hrl").
-include("types.hrl").
-include_lib("mria/include/mria.hrl").
-include_lib("emqx/include/emqx_router.hrl").
%% Mnesia bootstrap
@ -46,16 +45,25 @@
do_delete_route/2
]).
-export([cleanup_routes/1]).
-export([
match_routes/1,
lookup_routes/1,
has_routes/1
lookup_routes/1
]).
-export([print_routes/1]).
-export([
foldl_routes/2,
foldr_routes/2
]).
-export([topics/0]).
%% Exported for tests
-export([has_route/2]).
%% gen_server callbacks
-export([
init/1,
@ -66,10 +74,21 @@
code_change/3
]).
-export([
get_schema_vsn/0,
init_schema/0,
deinit_schema/0
]).
-type group() :: binary().
-type dest() :: node() | {group(), node()}.
-record(routeidx, {
entry :: emqx_topic_index:key(dest()),
unused = [] :: nil()
}).
%%--------------------------------------------------------------------
%% Mnesia bootstrap
%%--------------------------------------------------------------------
@ -88,6 +107,19 @@ mnesia(boot) ->
{write_concurrency, true}
]}
]}
]),
ok = mria:create_table(?ROUTE_TAB_FILTERS, [
{type, ordered_set},
{rlog_shard, ?ROUTE_SHARD},
{storage, ram_copies},
{record_name, routeidx},
{attributes, record_info(fields, routeidx)},
{storage_properties, [
{ets, [
{read_concurrency, true},
{write_concurrency, auto}
]}
]}
]).
%%--------------------------------------------------------------------
@ -121,43 +153,49 @@ do_add_route(Topic) when is_binary(Topic) ->
-spec do_add_route(emqx_types:topic(), dest()) -> ok | {error, term()}.
do_add_route(Topic, Dest) when is_binary(Topic) ->
Route = #route{topic = Topic, dest = Dest},
case lists:member(Route, lookup_routes(Topic)) of
case has_route(Topic, Dest) of
true ->
ok;
false ->
ok = emqx_router_helper:monitor(Dest),
case emqx_topic:wildcard(Topic) of
true ->
Fun = fun emqx_router_utils:insert_trie_route/2,
emqx_router_utils:maybe_trans(Fun, [?ROUTE_TAB, Route], ?ROUTE_SHARD);
false ->
emqx_router_utils:insert_direct_route(?ROUTE_TAB, Route)
end
mria_insert_route(get_schema_vsn(), Topic, Dest)
end.
%% @doc Match routes
mria_insert_route(v2, Topic, Dest) ->
mria_insert_route_v2(Topic, Dest);
mria_insert_route(v1, Topic, Dest) ->
mria_insert_route_v1(Topic, Dest).
%% @doc Take a real topic (not filter) as input, return the matching topics and topic
%% filters associated with route destination.
-spec match_routes(emqx_types:topic()) -> [emqx_types:route()].
match_routes(Topic) when is_binary(Topic) ->
case match_trie(Topic) of
[] -> lookup_routes(Topic);
Matched -> lists:append([lookup_routes(To) || To <- [Topic | Matched]])
end.
match_routes(get_schema_vsn(), Topic).
%% Optimize: routing table will be replicated to all router nodes.
match_trie(Topic) ->
case emqx_trie:empty() of
true -> [];
false -> emqx_trie:match(Topic)
end.
match_routes(v2, Topic) ->
match_routes_v2(Topic);
match_routes(v1, Topic) ->
match_routes_v1(Topic).
%% @doc Take a topic or filter as input, and return the existing routes with exactly
%% this topic or filter.
-spec lookup_routes(emqx_types:topic()) -> [emqx_types:route()].
lookup_routes(Topic) ->
ets:lookup(?ROUTE_TAB, Topic).
lookup_routes(get_schema_vsn(), Topic).
-spec has_routes(emqx_types:topic()) -> boolean().
has_routes(Topic) when is_binary(Topic) ->
ets:member(?ROUTE_TAB, Topic).
lookup_routes(v2, Topic) ->
lookup_routes_v2(Topic);
lookup_routes(v1, Topic) ->
lookup_routes_v1(Topic).
-spec has_route(emqx_types:topic(), dest()) -> boolean().
has_route(Topic, Dest) ->
has_route(get_schema_vsn(), Topic, Dest).
has_route(v2, Topic, Dest) ->
has_route_v2(Topic, Dest);
has_route(v1, Topic, Dest) ->
has_route_v1(Topic, Dest).
-spec delete_route(emqx_types:topic()) -> ok | {error, term()}.
delete_route(Topic) when is_binary(Topic) ->
@ -173,18 +211,21 @@ do_delete_route(Topic) when is_binary(Topic) ->
-spec do_delete_route(emqx_types:topic(), dest()) -> ok | {error, term()}.
do_delete_route(Topic, Dest) ->
Route = #route{topic = Topic, dest = Dest},
case emqx_topic:wildcard(Topic) of
true ->
Fun = fun emqx_router_utils:delete_trie_route/2,
emqx_router_utils:maybe_trans(Fun, [?ROUTE_TAB, Route], ?ROUTE_SHARD);
false ->
emqx_router_utils:delete_direct_route(?ROUTE_TAB, Route)
end.
mria_delete_route(get_schema_vsn(), Topic, Dest).
mria_delete_route(v2, Topic, Dest) ->
mria_delete_route_v2(Topic, Dest);
mria_delete_route(v1, Topic, Dest) ->
mria_delete_route_v1(Topic, Dest).
-spec topics() -> list(emqx_types:topic()).
topics() ->
mnesia:dirty_all_keys(?ROUTE_TAB).
topics(get_schema_vsn()).
topics(v2) ->
list_topics_v2();
topics(v1) ->
list_topics_v1().
%% @doc Print routes to a topic
-spec print_routes(emqx_types:topic()) -> ok.
@ -196,12 +237,290 @@ print_routes(Topic) ->
match_routes(Topic)
).
-spec cleanup_routes(node()) -> ok.
cleanup_routes(Node) ->
cleanup_routes(get_schema_vsn(), Node).
cleanup_routes(v2, Node) ->
cleanup_routes_v2(Node);
cleanup_routes(v1, Node) ->
cleanup_routes_v1(Node).
-spec foldl_routes(fun((emqx_types:route(), Acc) -> Acc), Acc) -> Acc.
foldl_routes(FoldFun, AccIn) ->
fold_routes(get_schema_vsn(), foldl, FoldFun, AccIn).
-spec foldr_routes(fun((emqx_types:route(), Acc) -> Acc), Acc) -> Acc.
foldr_routes(FoldFun, AccIn) ->
fold_routes(get_schema_vsn(), foldr, FoldFun, AccIn).
fold_routes(v2, FunName, FoldFun, AccIn) ->
fold_routes_v2(FunName, FoldFun, AccIn);
fold_routes(v1, FunName, FoldFun, AccIn) ->
fold_routes_v1(FunName, FoldFun, AccIn).
call(Router, Msg) ->
gen_server:call(Router, Msg, infinity).
pick(Topic) ->
gproc_pool:pick_worker(router_pool, Topic).
%%--------------------------------------------------------------------
%% Schema v1
%% --------------------------------------------------------------------
-dialyzer({nowarn_function, [cleanup_routes_v1/1]}).
mria_insert_route_v1(Topic, Dest) ->
Route = #route{topic = Topic, dest = Dest},
case emqx_topic:wildcard(Topic) of
true ->
mria_route_tab_insert_update_trie(Route);
false ->
mria_route_tab_insert(Route)
end.
mria_route_tab_insert_update_trie(Route) ->
emqx_router_utils:maybe_trans(
fun emqx_router_utils:insert_trie_route/2,
[?ROUTE_TAB, Route],
?ROUTE_SHARD
).
mria_route_tab_insert(Route) ->
mria:dirty_write(?ROUTE_TAB, Route).
mria_delete_route_v1(Topic, Dest) ->
Route = #route{topic = Topic, dest = Dest},
case emqx_topic:wildcard(Topic) of
true ->
mria_route_tab_delete_update_trie(Route);
false ->
mria_route_tab_delete(Route)
end.
mria_route_tab_delete_update_trie(Route) ->
emqx_router_utils:maybe_trans(
fun emqx_router_utils:delete_trie_route/2,
[?ROUTE_TAB, Route],
?ROUTE_SHARD
).
mria_route_tab_delete(Route) ->
mria:dirty_delete_object(?ROUTE_TAB, Route).
match_routes_v1(Topic) ->
lookup_route_tab(Topic) ++
lists:flatmap(fun lookup_route_tab/1, match_global_trie(Topic)).
match_global_trie(Topic) ->
case emqx_trie:empty() of
true -> [];
false -> emqx_trie:match(Topic)
end.
lookup_routes_v1(Topic) ->
lookup_route_tab(Topic).
lookup_route_tab(Topic) ->
ets:lookup(?ROUTE_TAB, Topic).
has_route_v1(Topic, Dest) ->
has_route_tab_entry(Topic, Dest).
has_route_tab_entry(Topic, Dest) ->
[] =/= ets:match(?ROUTE_TAB, #route{topic = Topic, dest = Dest}).
cleanup_routes_v1(Node) ->
Patterns = [
#route{_ = '_', dest = Node},
#route{_ = '_', dest = {'_', Node}}
],
mria:transaction(?ROUTE_SHARD, fun() ->
[
mnesia:delete_object(?ROUTE_TAB, Route, write)
|| Pat <- Patterns,
Route <- mnesia:match_object(?ROUTE_TAB, Pat, write)
]
end).
list_topics_v1() ->
list_route_tab_topics().
list_route_tab_topics() ->
mnesia:dirty_all_keys(?ROUTE_TAB).
fold_routes_v1(FunName, FoldFun, AccIn) ->
ets:FunName(FoldFun, AccIn, ?ROUTE_TAB).
%%--------------------------------------------------------------------
%% Schema v2
%% One bag table exclusively for regular, non-filter subscription
%% topics, and one `emqx_topic_index` table exclusively for wildcard
%% topics. Writes go to only one of the two tables at a time.
%% --------------------------------------------------------------------
mria_insert_route_v2(Topic, Dest) ->
case emqx_trie_search:filter(Topic) of
Words when is_list(Words) ->
K = emqx_topic_index:make_key(Words, Dest),
mria:dirty_write(?ROUTE_TAB_FILTERS, #routeidx{entry = K});
false ->
mria_route_tab_insert(#route{topic = Topic, dest = Dest})
end.
mria_delete_route_v2(Topic, Dest) ->
case emqx_trie_search:filter(Topic) of
Words when is_list(Words) ->
K = emqx_topic_index:make_key(Words, Dest),
mria:dirty_delete(?ROUTE_TAB_FILTERS, K);
false ->
mria_route_tab_delete(#route{topic = Topic, dest = Dest})
end.
match_routes_v2(Topic) ->
lookup_route_tab(Topic) ++
[match_to_route(M) || M <- match_filters(Topic)].
match_filters(Topic) ->
emqx_topic_index:matches(Topic, ?ROUTE_TAB_FILTERS, []).
lookup_routes_v2(Topic) ->
case emqx_topic:wildcard(Topic) of
true ->
Pat = #routeidx{entry = emqx_topic_index:make_key(Topic, '$1')},
[Dest || [Dest] <- ets:match(?ROUTE_TAB_FILTERS, Pat)];
false ->
lookup_route_tab(Topic)
end.
has_route_v2(Topic, Dest) ->
case emqx_topic:wildcard(Topic) of
true ->
ets:member(?ROUTE_TAB_FILTERS, emqx_topic_index:make_key(Topic, Dest));
false ->
has_route_tab_entry(Topic, Dest)
end.
cleanup_routes_v2(Node) ->
% NOTE
% No point in transaction here because all the operations on filters table are dirty.
ok = ets:foldl(
fun(#routeidx{entry = K}, ok) ->
case get_dest_node(emqx_topic_index:get_id(K)) of
Node ->
mria:dirty_delete(?ROUTE_TAB_FILTERS, K);
_ ->
ok
end
end,
ok,
?ROUTE_TAB_FILTERS
),
ok = ets:foldl(
fun(#route{dest = Dest} = Route, ok) ->
case get_dest_node(Dest) of
Node ->
mria:dirty_delete_object(?ROUTE_TAB, Route);
_ ->
ok
end
end,
ok,
?ROUTE_TAB
).
get_dest_node({_, Node}) ->
Node;
get_dest_node(Node) ->
Node.
list_topics_v2() ->
Pat = #routeidx{entry = '$1'},
Filters = [emqx_topic_index:get_topic(K) || [K] <- ets:match(?ROUTE_TAB_FILTERS, Pat)],
list_route_tab_topics() ++ Filters.
fold_routes_v2(FunName, FoldFun, AccIn) ->
FilterFoldFun = mk_filtertab_fold_fun(FoldFun),
Acc = ets:FunName(FoldFun, AccIn, ?ROUTE_TAB),
ets:FunName(FilterFoldFun, Acc, ?ROUTE_TAB_FILTERS).
mk_filtertab_fold_fun(FoldFun) ->
fun(#routeidx{entry = K}, Acc) -> FoldFun(match_to_route(K), Acc) end.
match_to_route(M) ->
#route{topic = emqx_topic_index:get_topic(M), dest = emqx_topic_index:get_id(M)}.
%%--------------------------------------------------------------------
%% Routing table type
%% --------------------------------------------------------------------
-define(PT_SCHEMA_VSN, {?MODULE, schemavsn}).
-type schemavsn() :: v1 | v2.
-spec get_schema_vsn() -> schemavsn().
get_schema_vsn() ->
persistent_term:get(?PT_SCHEMA_VSN).
-spec init_schema() -> ok.
init_schema() ->
ok = mria:wait_for_tables([?ROUTE_TAB, ?ROUTE_TAB_FILTERS]),
ok = emqx_trie:wait_for_tables(),
ConfSchema = emqx_config:get([broker, routing, storage_schema]),
Schema = choose_schema_vsn(ConfSchema),
ok = persistent_term:put(?PT_SCHEMA_VSN, Schema),
case Schema of
ConfSchema ->
?SLOG(info, #{
msg => "routing_schema_used",
schema => Schema
});
_ ->
?SLOG(notice, #{
msg => "configured_routing_schema_ignored",
schema_in_use => Schema,
configured => ConfSchema,
reason =>
"Could not use configured routing storage schema because "
"there are already non-empty routing tables pertaining to "
"another schema."
})
end.
-spec deinit_schema() -> ok.
deinit_schema() ->
_ = persistent_term:erase(?PT_SCHEMA_VSN),
ok.
-spec choose_schema_vsn(schemavsn()) -> schemavsn().
choose_schema_vsn(ConfType) ->
IsEmptyIndex = emqx_trie:empty(),
IsEmptyFilters = is_empty(?ROUTE_TAB_FILTERS),
case {IsEmptyIndex, IsEmptyFilters} of
{true, true} ->
ConfType;
{false, true} ->
v1;
{true, false} ->
v2;
{false, false} ->
?SLOG(critical, #{
msg => "conflicting_routing_schemas_detected_in_cluster",
configured => ConfType,
reason =>
"There are records in the routing tables related to both v1 "
"and v2 storage schemas. This probably means that some nodes "
"in the cluster use v1 schema and some use v2, independently "
"of each other. The routing is likely broken. Manual intervention "
"and full cluster restart is required. This node will shut down."
}),
error(conflicting_routing_schemas_detected_in_cluster)
end.
is_empty(Tab) ->
ets:first(Tab) =:= '$end_of_table'.
%%--------------------------------------------------------------------
%% gen_server callbacks
%%--------------------------------------------------------------------

View File

@ -148,11 +148,12 @@ handle_info({mnesia_table_event, Event}, State) ->
handle_info({nodedown, Node}, State = #{nodes := Nodes}) ->
case mria_rlog:role() of
core ->
% TODO
% Node may flap, do we need to wait for any pending cleanups in `init/1`
% on the flapping node?
global:trans(
{?LOCK, self()},
fun() ->
mria:transaction(?ROUTE_SHARD, fun ?MODULE:cleanup_routes/1, [Node])
end
fun() -> cleanup_routes(Node) end
),
ok = mria:dirty_delete(?ROUTING_NODE, Node);
replicant ->
@ -197,11 +198,4 @@ stats_fun() ->
end.
cleanup_routes(Node) ->
Patterns = [
#route{_ = '_', dest = Node},
#route{_ = '_', dest = {'_', Node}}
],
[
mnesia:delete_object(?ROUTE_TAB, Route, write)
|| Pat <- Patterns, Route <- mnesia:match_object(?ROUTE_TAB, Pat, write)
].
emqx_router:cleanup_routes(Node).

View File

@ -23,6 +23,8 @@
-export([init/1]).
start_link() ->
%% Init and log routing table type
ok = emqx_router:init_schema(),
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->

View File

@ -24,7 +24,6 @@
-elvis([{elvis_style, invalid_dynamic_call, disable}]).
-include("emqx_schema.hrl").
-include("emqx_authentication.hrl").
-include("emqx_access_control.hrl").
-include_lib("typerefl/include/types.hrl").
-include_lib("hocon/include/hoconsc.hrl").
@ -213,8 +212,10 @@ roots(high) ->
desc => ?DESC(zones),
importance => ?IMPORTANCE_HIDDEN
}
)},
{?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, authentication(global)},
)}
] ++
emqx_schema_hooks:injection_point('roots.high') ++
[
%% NOTE: authorization schema here is only to keep emqx app pure
%% the full schema for EMQX node is injected in emqx_conf_schema.
{?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME,
@ -1357,6 +1358,11 @@ fields("broker") ->
ref("broker_perf"),
#{importance => ?IMPORTANCE_HIDDEN}
)},
{"routing",
sc(
ref("broker_routing"),
#{importance => ?IMPORTANCE_HIDDEN}
)},
%% FIXME: Need new design for shared subscription group
{"shared_subscription_group",
sc(
@ -1368,6 +1374,18 @@ fields("broker") ->
}
)}
];
fields("broker_routing") ->
[
{"storage_schema",
sc(
hoconsc:enum([v1, v2]),
#{
default => v1,
'readOnly' => true,
desc => ?DESC(broker_routing_storage_schema)
}
)}
];
fields("shared_subscription_group") ->
[
{"strategy",
@ -1748,11 +1766,8 @@ mqtt_listener(Bind) ->
desc => ?DESC(mqtt_listener_proxy_protocol_timeout),
default => <<"3s">>
}
)},
{?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, (authentication(listener))#{
importance => ?IMPORTANCE_HIDDEN
}}
].
)}
] ++ emqx_schema_hooks:injection_point('mqtt.listener').
base_listener(Bind) ->
[
@ -2316,18 +2331,7 @@ ciphers_schema(Default) ->
hoconsc:array(string()),
#{
default => default_ciphers(Default),
converter => fun
(undefined) ->
[];
(<<>>) ->
[];
("") ->
[];
(Ciphers) when is_binary(Ciphers) ->
binary:split(Ciphers, <<",">>, [global]);
(Ciphers) when is_list(Ciphers) ->
Ciphers
end,
converter => fun converter_ciphers/2,
validator =>
case Default =:= quic of
%% quic has openssl statically linked
@ -2338,6 +2342,15 @@ ciphers_schema(Default) ->
}
).
converter_ciphers(undefined, _Opts) ->
[];
converter_ciphers(<<>>, _Opts) ->
[];
converter_ciphers(Ciphers, _Opts) when is_list(Ciphers) -> Ciphers;
converter_ciphers(Ciphers, _Opts) when is_binary(Ciphers) ->
{ok, List} = to_comma_separated_binary(binary_to_list(Ciphers)),
List.
default_ciphers(Which) ->
lists:map(
fun erlang:iolist_to_binary/1,
@ -2654,7 +2667,7 @@ validate_tcp_keepalive(Value) ->
%% @doc This function is used as value validator and also run-time parser.
parse_tcp_keepalive(Str) ->
try
[Idle, Interval, Probes] = binary:split(iolist_to_binary(Str), <<",">>, [global]),
{ok, [Idle, Interval, Probes]} = to_comma_separated_binary(Str),
%% use 10 times the Linux defaults as range limit
IdleInt = parse_ka_int(Idle, "Idle", 1, 7200_0),
IntervalInt = parse_ka_int(Interval, "Interval", 1, 75_0),
@ -2770,41 +2783,6 @@ str(B) when is_binary(B) ->
str(S) when is_list(S) ->
S.
authentication(Which) ->
{Importance, Desc} =
case Which of
global ->
%% For root level authentication, it is recommended to configure
%% from the dashboard or API.
%% Hence it's considered a low-importance when it comes to
%% configuration importance.
{?IMPORTANCE_LOW, ?DESC(global_authentication)};
listener ->
{?IMPORTANCE_HIDDEN, ?DESC(listener_authentication)}
end,
%% poor man's dependency injection
%% this is due to the fact that authn is implemented outside of 'emqx' app.
%% so it can not be a part of emqx_schema since 'emqx' app is supposed to
%% work standalone.
Type =
case persistent_term:get(?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY, undefined) of
undefined ->
hoconsc:array(typerefl:map());
Module ->
Module:root_type()
end,
hoconsc:mk(Type, #{
desc => Desc,
converter => fun ensure_array/2,
default => [],
importance => Importance
}).
%% the older version schema allows individual element (instead of a chain) in config
ensure_array(undefined, _) -> undefined;
ensure_array(L, _) when is_list(L) -> L;
ensure_array(M, _) -> [M].
-spec qos() -> typerefl:type().
qos() ->
typerefl:alias("qos", typerefl:union([0, 1, 2])).
@ -3162,9 +3140,10 @@ quic_feature_toggle(Desc) ->
importance => ?IMPORTANCE_HIDDEN,
required => false,
converter => fun
(true) -> 1;
(false) -> 0;
(Other) -> Other
(Val, #{make_serializable := true}) -> Val;
(true, _Opts) -> 1;
(false, _Opts) -> 0;
(Other, _Opts) -> Other
end
}
).

View File

@ -0,0 +1,118 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_schema_hooks).
-type hookpoint() :: atom().
-callback injected_fields() ->
#{
hookpoint() => [hocon_schema:field()]
}.
-optional_callbacks([injected_fields/0]).
-export_type([hookpoint/0]).
-define(HOOKPOINT_PT_KEY(POINT_NAME), {?MODULE, fields, POINT_NAME}).
-export([
injection_point/1,
inject_from_modules/1
]).
%% for tests
-export([
erase_injections/0,
any_injections/0
]).
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
injection_point(PointName) ->
persistent_term:get(?HOOKPOINT_PT_KEY(PointName), []).
erase_injections() ->
lists:foreach(
fun
({?HOOKPOINT_PT_KEY(_) = Key, _}) ->
persistent_term:erase(Key);
(_) ->
ok
end,
persistent_term:get()
).
any_injections() ->
lists:any(
fun
({?HOOKPOINT_PT_KEY(_), _}) ->
true;
(_) ->
false
end,
persistent_term:get()
).
inject_from_modules(Modules) ->
Injections =
lists:foldl(
fun append_module_injections/2,
#{},
Modules
),
ok = inject_fields(maps:to_list(Injections)).
%%--------------------------------------------------------------------
%% Internal functions
%%--------------------------------------------------------------------
append_module_injections(Module, AllInjections) when is_atom(Module) ->
append_module_injections(Module:injected_fields(), AllInjections);
append_module_injections(ModuleInjections, AllInjections) when is_map(ModuleInjections) ->
maps:fold(
fun(PointName, Fields, Acc) ->
maps:update_with(
PointName,
fun(Fields0) ->
Fields0 ++ Fields
end,
Fields,
Acc
)
end,
AllInjections,
ModuleInjections
).
inject_fields([]) ->
ok;
inject_fields([{PointName, Fields} | Rest]) ->
case any_injections(PointName) of
true ->
inject_fields(Rest);
false ->
ok = inject_fields(PointName, Fields),
inject_fields(Rest)
end.
inject_fields(PointName, Fields) ->
Key = ?HOOKPOINT_PT_KEY(PointName),
persistent_term:put(Key, Fields).
any_injections(PointName) ->
persistent_term:get(?HOOKPOINT_PT_KEY(PointName), undefined) =/= undefined.

View File

@ -177,7 +177,9 @@ names() ->
emqx_subscriptions_shared_count,
emqx_subscriptions_shared_max,
emqx_retained_count,
emqx_retained_max
emqx_retained_max,
emqx_delayed_count,
emqx_delayed_max
].
%% @doc Get stats by name.

View File

@ -0,0 +1,120 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc Topic index implemetation with gb_trees stored in persistent_term.
%% This is only suitable for a static set of topic or topic-filters.
-module(emqx_topic_gbt).
-export([new/0, new/1]).
-export([insert/4]).
-export([delete/3]).
-export([match/2]).
-export([matches/3]).
-export([get_id/1]).
-export([get_topic/1]).
-export([get_record/2]).
-type key(ID) :: emqx_trie_search:key(ID).
-type words() :: emqx_trie_search:words().
-type match(ID) :: key(ID).
-type name() :: any().
%% @private Only for testing.
-spec new() -> name().
new() ->
new(test).
%% @doc Create a new gb_tree and store it in the persitent_term with the
%% given name.
-spec new(name()) -> name().
new(Name) ->
T = gb_trees:from_orddict([]),
true = gbt_update(Name, T),
Name.
%% @doc Insert a new entry into the index that associates given topic filter to given
%% record ID, and attaches arbitrary record to the entry. This allows users to choose
%% between regular and "materialized" indexes, for example.
-spec insert(emqx_types:topic() | words(), _ID, _Record, name()) -> true.
insert(Filter, ID, Record, Name) ->
Tree = gbt(Name),
Key = key(Filter, ID),
NewTree = gb_trees:enter(Key, Record, Tree),
true = gbt_update(Name, NewTree).
%% @doc Delete an entry from the index that associates given topic filter to given
%% record ID. Deleting non-existing entry is not an error.
-spec delete(emqx_types:topic() | words(), _ID, name()) -> true.
delete(Filter, ID, Name) ->
Tree = gbt(Name),
Key = key(Filter, ID),
NewTree = gb_trees:delete_any(Key, Tree),
true = gbt_update(Name, NewTree).
%% @doc Match given topic against the index and return the first match, or `false` if
%% no match is found.
-spec match(emqx_types:topic(), name()) -> match(_ID) | false.
match(Topic, Name) ->
emqx_trie_search:match(Topic, make_nextf(Name)).
%% @doc Match given topic against the index and return _all_ matches.
%% If `unique` option is given, return only unique matches by record ID.
matches(Topic, Name, Opts) ->
emqx_trie_search:matches(Topic, make_nextf(Name), Opts).
%% @doc Extract record ID from the match.
-spec get_id(match(ID)) -> ID.
get_id(Key) ->
emqx_trie_search:get_id(Key).
%% @doc Extract topic (or topic filter) from the match.
-spec get_topic(match(_ID)) -> emqx_types:topic().
get_topic(Key) ->
emqx_trie_search:get_topic(Key).
%% @doc Fetch the record associated with the match.
-spec get_record(match(_ID), name()) -> _Record.
get_record(Key, Name) ->
Gbt = gbt(Name),
gb_trees:get(Key, Gbt).
key(TopicOrFilter, ID) ->
emqx_trie_search:make_key(TopicOrFilter, ID).
gbt(Name) ->
persistent_term:get({?MODULE, Name}).
gbt_update(Name, Tree) ->
persistent_term:put({?MODULE, Name}, Tree),
true.
gbt_next(nil, _Input) ->
'$end_of_table';
gbt_next({P, _V, _Smaller, Bigger}, K) when K >= P ->
gbt_next(Bigger, K);
gbt_next({P, _V, Smaller, _Bigger}, K) ->
case gbt_next(Smaller, K) of
'$end_of_table' ->
P;
NextKey ->
NextKey
end.
make_nextf(Name) ->
{_SizeWeDontCare, TheTree} = gbt(Name),
fun(Key) -> gbt_next(TheTree, Key) end.

View File

@ -14,18 +14,7 @@
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc Topic index for matching topics to topic filters.
%%
%% Works on top of ETS ordered_set table. Keys are tuples constructed from
%% parsed topic filters and record IDs, wrapped in a tuple to order them
%% strictly greater than unit tuple (`{}`). Existing table may be used if
%% existing keys will not collide with index keys.
%%
%% Designed to effectively answer questions like:
%% 1. Does any topic filter match given topic?
%% 2. Which records are associated with topic filters matching given topic?
%% 3. Which topic filters match given topic?
%% 4. Which record IDs are associated with topic filters matching given topic?
%% @doc Topic index implemetation with ETS table as ordered-set storage.
-module(emqx_topic_index).
@ -35,13 +24,15 @@
-export([match/2]).
-export([matches/3]).
-export([make_key/2]).
-export([get_id/1]).
-export([get_topic/1]).
-export([get_record/2]).
-type word() :: binary() | '+' | '#'.
-type key(ID) :: {[word()], {ID}}.
-type key(ID) :: emqx_trie_search:key(ID).
-type match(ID) :: key(ID).
-type words() :: emqx_trie_search:words().
%% @doc Create a new ETS table suitable for topic index.
%% Usable mostly for testing purposes.
@ -52,191 +43,53 @@ new() ->
%% @doc Insert a new entry into the index that associates given topic filter to given
%% record ID, and attaches arbitrary record to the entry. This allows users to choose
%% between regular and "materialized" indexes, for example.
-spec insert(emqx_types:topic(), _ID, _Record, ets:table()) -> true.
-spec insert(emqx_types:topic() | words(), _ID, _Record, ets:table()) -> true.
insert(Filter, ID, Record, Tab) ->
ets:insert(Tab, {{words(Filter), {ID}}, Record}).
Key = make_key(Filter, ID),
true = ets:insert(Tab, {Key, Record}).
%% @doc Delete an entry from the index that associates given topic filter to given
%% record ID. Deleting non-existing entry is not an error.
-spec delete(emqx_types:topic(), _ID, ets:table()) -> true.
-spec delete(emqx_types:topic() | words(), _ID, ets:table()) -> true.
delete(Filter, ID, Tab) ->
ets:delete(Tab, {words(Filter), {ID}}).
ets:delete(Tab, make_key(Filter, ID)).
-spec make_key(emqx_types:topic() | words(), ID) -> key(ID).
make_key(TopicOrFilter, ID) ->
emqx_trie_search:make_key(TopicOrFilter, ID).
%% @doc Match given topic against the index and return the first match, or `false` if
%% no match is found.
-spec match(emqx_types:topic(), ets:table()) -> match(_ID) | false.
match(Topic, Tab) ->
{Words, RPrefix} = match_init(Topic),
match(Words, RPrefix, Tab).
match(Words, RPrefix, Tab) ->
Prefix = lists:reverse(RPrefix),
match(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Tab).
match(K, Prefix, Words, RPrefix, Tab) ->
case match_next(Prefix, K, Words) of
true ->
K;
skip ->
match(ets:next(Tab, K), Prefix, Words, RPrefix, Tab);
stop ->
false;
Matched ->
match_rest(Matched, Words, RPrefix, Tab)
end.
match_rest([W1 | [W2 | _] = SLast], [W1 | [W2 | _] = Rest], RPrefix, Tab) ->
% NOTE
% Fast-forward through identical words in the topic and the last key suffixes.
% This should save us a few redundant `ets:next` calls at the cost of slightly
% more complex match patterns.
match_rest(SLast, Rest, [W1 | RPrefix], Tab);
match_rest(SLast, [W | Rest], RPrefix, Tab) when is_list(SLast) ->
match(Rest, [W | RPrefix], Tab);
match_rest(plus, [W | Rest], RPrefix, Tab) ->
% NOTE
% There's '+' in the key suffix, meaning we should consider 2 alternatives:
% 1. Match the rest of the topic as if there was '+' in the current position.
% 2. Skip this key and try to match the topic as it is.
case match(Rest, ['+' | RPrefix], Tab) of
Match = {_, _} ->
Match;
false ->
match(Rest, [W | RPrefix], Tab)
end;
match_rest(_, [], _RPrefix, _Tab) ->
false.
emqx_trie_search:match(Topic, make_nextf(Tab)).
%% @doc Match given topic against the index and return _all_ matches.
%% If `unique` option is given, return only unique matches by record ID.
-spec matches(emqx_types:topic(), ets:table(), _Opts :: [unique]) -> [match(_ID)].
matches(Topic, Tab, Opts) ->
{Words, RPrefix} = match_init(Topic),
AccIn =
case Opts of
[unique | _] -> #{};
[] -> []
end,
Matches = matches(Words, RPrefix, AccIn, Tab),
case Matches of
#{} -> maps:values(Matches);
_ -> Matches
end.
matches(Words, RPrefix, Acc, Tab) ->
Prefix = lists:reverse(RPrefix),
matches(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Acc, Tab).
matches(Words, RPrefix, K = {Filter, _}, Acc, Tab) ->
Prefix = lists:reverse(RPrefix),
case Prefix > Filter of
true ->
% NOTE: Prefix already greater than the last key seen, need to `ets:next/2`.
matches(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Acc, Tab);
false ->
% NOTE: Prefix is still less than or equal to the last key seen, reuse it.
matches(K, Prefix, Words, RPrefix, Acc, Tab)
end.
matches(K, Prefix, Words, RPrefix, Acc, Tab) ->
case match_next(Prefix, K, Words) of
true ->
matches(ets:next(Tab, K), Prefix, Words, RPrefix, match_add(K, Acc), Tab);
skip ->
matches(ets:next(Tab, K), Prefix, Words, RPrefix, Acc, Tab);
stop ->
Acc;
Matched ->
% NOTE: Prserve next key on the stack to save on `ets:next/2` calls.
matches_rest(Matched, Words, RPrefix, K, Acc, Tab)
end.
matches_rest([W1 | [W2 | _] = SLast], [W1 | [W2 | _] = Rest], RPrefix, K, Acc, Tab) ->
% NOTE
% Fast-forward through identical words in the topic and the last key suffixes.
% This should save us a few redundant `ets:next` calls at the cost of slightly
% more complex match patterns.
matches_rest(SLast, Rest, [W1 | RPrefix], K, Acc, Tab);
matches_rest(SLast, [W | Rest], RPrefix, K, Acc, Tab) when is_list(SLast) ->
matches(Rest, [W | RPrefix], K, Acc, Tab);
matches_rest(plus, [W | Rest], RPrefix, K, Acc, Tab) ->
% NOTE
% There's '+' in the key suffix, meaning we should accumulate all matches from
% each of 2 branches:
% 1. Match the rest of the topic as if there was '+' in the current position.
% 2. Skip this key and try to match the topic as it is.
NAcc = matches(Rest, ['+' | RPrefix], K, Acc, Tab),
matches(Rest, [W | RPrefix], K, NAcc, Tab);
matches_rest(_, [], _RPrefix, _K, Acc, _Tab) ->
Acc.
match_add(K = {_Filter, ID}, Acc = #{}) ->
% NOTE: ensuring uniqueness by record ID
Acc#{ID => K};
match_add(K, Acc) ->
[K | Acc].
match_next(Prefix, {Filter, _ID}, Suffix) ->
match_filter(Prefix, Filter, Suffix);
match_next(_, '$end_of_table', _) ->
stop.
match_filter([], [], []) ->
% NOTE: we matched the topic exactly
true;
match_filter([], [], _Suffix) ->
% NOTE: we matched the prefix, but there may be more matches next
skip;
match_filter([], ['#'], _Suffix) ->
% NOTE: naturally, '#' < '+', so this is already optimal for `match/2`
true;
match_filter([], ['+' | _], _Suffix) ->
plus;
match_filter([], [_H | _] = Rest, _Suffix) ->
Rest;
match_filter([H | T1], [H | T2], Suffix) ->
match_filter(T1, T2, Suffix);
match_filter([H1 | _], [H2 | _], _Suffix) when H2 > H1 ->
% NOTE: we're strictly past the prefix, no need to continue
stop.
match_init(Topic) ->
case words(Topic) of
[W = <<"$", _/bytes>> | Rest] ->
% NOTE
% This will effectively skip attempts to match special topics to `#` or `+/...`.
{Rest, [W]};
Words ->
{Words, []}
end.
emqx_trie_search:matches(Topic, make_nextf(Tab), Opts).
%% @doc Extract record ID from the match.
-spec get_id(match(ID)) -> ID.
get_id({_Filter, {ID}}) ->
ID.
get_id(Key) ->
emqx_trie_search:get_id(Key).
%% @doc Extract topic (or topic filter) from the match.
-spec get_topic(match(_ID)) -> emqx_types:topic().
get_topic({Filter, _ID}) ->
emqx_topic:join(Filter).
get_topic(Key) ->
emqx_trie_search:get_topic(Key).
%% @doc Fetch the record associated with the match.
%% NOTE: Only really useful for ETS tables where the record ID is the first element.
-spec get_record(match(_ID), ets:table()) -> _Record.
%% May return empty list if the index entry was deleted in the meantime.
%% NOTE: Only really useful for ETS tables where the record data is the last element.
-spec get_record(match(_ID), ets:table()) -> [_Record].
get_record(K, Tab) ->
ets:lookup_element(Tab, K, 2).
case ets:lookup(Tab, K) of
[Entry] ->
[erlang:element(tuple_size(Entry), Entry)];
[] ->
[]
end.
%%
-spec words(emqx_types:topic()) -> [word()].
words(Topic) when is_binary(Topic) ->
% NOTE
% This is almost identical to `emqx_topic:words/1`, but it doesn't convert empty
% tokens to ''. This is needed to keep ordering of words consistent with what
% `match_filter/3` expects.
[word(W) || W <- emqx_topic:tokens(Topic)].
-spec word(binary()) -> word().
word(<<"+">>) -> '+';
word(<<"#">>) -> '#';
word(Bin) -> Bin.
make_nextf(Tab) ->
fun(Key) -> ets:next(Tab, Key) end.

View File

@ -21,6 +21,7 @@
%% Mnesia bootstrap
-export([
mnesia/1,
wait_for_tables/0,
create_session_trie/1
]).
@ -105,6 +106,10 @@ create_session_trie(Type) ->
]
).
-spec wait_for_tables() -> ok | {error, _Reason}.
wait_for_tables() ->
mria:wait_for_tables([?TRIE]).
%%--------------------------------------------------------------------
%% Topics APIs
%%--------------------------------------------------------------------

View File

@ -0,0 +1,355 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc Topic index for matching topics to topic filters.
%%
%% Works on top of a ordered collection data set, such as ETS ordered_set table.
%% Keys are tuples constructed from parsed topic filters and record IDs,
%% wrapped in a tuple to order them strictly greater than unit tuple (`{}`).
%% Existing table may be used if existing keys will not collide with index keys.
%%
%% Designed to effectively answer questions like:
%% 1. Does any topic filter match given topic?
%% 2. Which records are associated with topic filters matching given topic?
%% 3. Which topic filters match given topic?
%% 4. Which record IDs are associated with topic filters matching given topic?
%%
%% Trie-search algorithm:
%%
%% Given a 3-level topic (e.g. a/b/c), if we leave out '#' for now,
%% all possible subscriptions of a/b/c can be enumerated as below:
%%
%% a/b/c
%% a/b/+
%% a/+/c <--- subscribed
%% a/+/+
%% +/b/c <--- subscribed
%% +/b/+
%% +/+/c
%% +/+/+ <--- start searching upward from here
%%
%% Let's name this search space "Space1".
%% If we brute-force it, the scope would be 8 (2^3).
%% Meaning this has O(2^N) complexity (N being the level of topics).
%%
%% This clearly isn't going to work.
%% Should we then try to enumerate all subscribers instead?
%% If there are also other subscriptions, e.g. "+/x/y" and "+/b/0"
%%
%% a/+/c <--- match of a/b/c
%% +/x/n
%% ...
%% +/x/2
%% +/x/1
%% +/b/c <--- match of a/b/c
%% +/b/1
%% +/b/0
%%
%% Let's name it "Space2".
%%
%% This has O(M * L) complexity (M being the total number of subscriptions,
%% and L being the number of topic levels).
%% This is usually a lot smaller than "Space1", but still not very effective
%% if the collection size is e.g. 1 million.
%%
%% To make it more effective, we'll need to combine the two algorithms:
%% Use the ordered subscription topics' prefixes as starting points to make
%% guesses about whether or not the next word can be a '+', and skip-over
%% to the next possible match.
%%
%% NOTE: A prerequisite of the ordered collection is, it should be able
%% to find the *immediate-next* topic/filter with a given prefix.
%%
%% In the above example, we start from "+/b/0". When comparing "+/b/0"
%% with "a/b/c", we know the matching prefix is "+/b", meaning we can
%% start guessing if the next word is '+' or 'c':
%% * It can't be '+' because '+' < '0'
%% * It might be 'c' because 'c' > '0'
%%
%% So, we try to jump to the next topic which has a prefix of "+/b/c"
%% (this effectively means skipping over "+/b/1").
%%
%% After "+/b/c" is found to be a matching filter, we move up:
%% * The next possible match is "a/+/+" according to Space1
%% * The next subscription is "+/x/1" according to Space2
%%
%% "a/+/+" is lexicographically greater than "+/x/+", so let's jump to
%% the immediate-next of 'a/+/+', which is "a/+/c", allowing us to skip
%% over all the ones starting with "+/x".
%%
%% If we take '#' into consideration, it's only one extra comparison to see
%% if a filter ends with '#'.
%%
%% In summary, the complexity of this algorithm is O(N * L)
%% N being the number of total matches, and L being the level of the topic.
-module(emqx_trie_search).
-export([make_key/2, filter/1]).
-export([match/2, matches/3, get_id/1, get_topic/1]).
-export_type([key/1, word/0, words/0, nextf/0, opts/0]).
-define(END, '$end_of_table').
-type word() :: binary() | '+' | '#'.
-type words() :: [word()].
-type base_key() :: {binary() | [word()], {}}.
-type key(ID) :: {binary() | [word()], {ID}}.
-type nextf() :: fun((key(_) | base_key()) -> ?END | key(_)).
-type opts() :: [unique | return_first].
%% @doc Make a search-key for the given topic.
-spec make_key(emqx_types:topic() | words(), ID) -> key(ID).
make_key(Topic, ID) when is_binary(Topic) ->
case filter(Topic) of
Words when is_list(Words) ->
%% it's a wildcard
{Words, {ID}};
false ->
%% Not a wildcard. We do not split the topic
%% because they can be found with direct lookups.
%% it is also more compact in memory.
{Topic, {ID}}
end;
make_key(Words, ID) when is_list(Words) ->
{Words, {ID}}.
%% @doc Parse a topic filter into a list of words. Returns `false` if it's not a filter.
-spec filter(emqx_types:topic()) -> words() | false.
filter(Topic) ->
Words = filter_words(Topic),
emqx_topic:wildcard(Words) andalso Words.
%% @doc Extract record ID from the match.
-spec get_id(key(ID)) -> ID.
get_id({_Filter, {ID}}) ->
ID.
%% @doc Extract topic (or topic filter) from the match.
-spec get_topic(key(_ID)) -> emqx_types:topic().
get_topic({Filter, _ID}) when is_list(Filter) ->
emqx_topic:join(Filter);
get_topic({Topic, _ID}) ->
Topic.
-compile({inline, [base/1, move_up/2, match_add/2, compare/3]}).
%% Make the base-key which can be used to locate the desired search target.
base(Prefix) ->
{Prefix, {}}.
base_init([W = <<"$", _/bytes>> | _]) ->
base([W]);
base_init(_) ->
base([]).
%% Move the search target to the key next to the given Base.
move_up(NextF, Base) ->
NextF(Base).
%% @doc Match given topic against the index and return the first match, or `false` if
%% no match is found.
-spec match(emqx_types:topic(), nextf()) -> false | key(_).
match(Topic, NextF) ->
try search(Topic, NextF, [return_first]) of
_ -> false
catch
throw:{first, Res} ->
Res
end.
%% @doc Match given topic against the index and return _all_ matches.
%% If `unique` option is given, return only unique matches by record ID.
-spec matches(emqx_types:topic(), nextf(), opts()) -> [key(_)].
matches(Topic, NextF, Opts) ->
search(Topic, NextF, Opts).
%% @doc Entrypoint of the search for a given topic.
search(Topic, NextF, Opts) ->
Words = topic_words(Topic),
Base = base_init(Words),
ORetFirst = proplists:get_bool(return_first, Opts),
OUnique = proplists:get_bool(unique, Opts),
Acc0 =
case ORetFirst of
true ->
first;
false when OUnique ->
#{};
false ->
[]
end,
Matches =
case search_new(Words, Base, NextF, Acc0) of
{Cursor, Acc} ->
match_topics(Topic, Cursor, NextF, Acc);
Acc ->
Acc
end,
case is_map(Matches) of
true ->
maps:values(Matches);
false ->
Matches
end.
%% The recursive entrypoint of the trie-search algorithm.
%% Always start from the initial prefix and words.
search_new(Words0, NewBase, NextF, Acc) ->
case move_up(NextF, NewBase) of
?END ->
Acc;
Cursor ->
search_up(Words0, Cursor, NextF, Acc)
end.
%% Search to the bigger end of ordered collection of topics and topic-filters.
search_up(Words, {Filter, _} = Cursor, NextF, Acc) ->
case compare(Filter, Words, 0) of
match_full ->
search_new(Words, Cursor, NextF, match_add(Cursor, Acc));
match_prefix ->
search_new(Words, Cursor, NextF, Acc);
lower ->
{Cursor, Acc};
{Pos, SeekWord} ->
% NOTE
% This is a seek instruction. It means we need to take `Pos` words
% from the current topic filter and attach `SeekWord` to the end of it.
NewBase = base(seek(Pos, SeekWord, Filter)),
search_new(Words, NewBase, NextF, Acc)
end.
seek(_Pos = 0, SeekWord, _FilterTail) ->
[SeekWord];
seek(Pos, SeekWord, [FilterWord | Rest]) ->
[FilterWord | seek(Pos - 1, SeekWord, Rest)].
compare(NotFilter, _, _) when is_binary(NotFilter) ->
lower;
compare([], [], _) ->
% NOTE
% Topic: a/b/c/d
% Filter: a/+/+/d
% We matched the topic to a topic filter exactly (possibly with pluses).
% We include it in the result set, and now need to try next entry in the table.
% Closest possible next entries that we must not miss:
% * a/+/+/d (same topic but a different ID)
% * a/+/+/d/# (also a match)
match_full;
compare([], _Words, _) ->
% NOTE
% Topic: a/b/c/d
% Filter: a/+/c
% We found out that a topic filter is a prefix of the topic (possibly with pluses).
% We discard it, and now need to try next entry in the table.
% Closest possible next entries that we must not miss:
% * a/+/c/# (which is a match)
% * a/+/c/+ (also a match)
match_prefix;
compare(['#'], _Words, _) ->
% NOTE
% Topic: a/b/c/d
% Filter: a/+/+/d/# or just a/#
% We matched the topic to a topic filter with wildcard (possibly with pluses).
% We include it in the result set, and now need to try next entry in the table.
% Closest possible next entries that we must not miss:
% * a/+/+/d/# (same topic but a different ID)
match_full;
compare(['+' | TF], [HW | TW], Pos) ->
case compare(TF, TW, Pos + 1) of
lower ->
% NOTE
% Topic: a/b/c/d
% Filter: a/+/+/e/1 or a/b/+/d/1
% The topic is lower than a topic filter. But we're at the `+` position,
% so we emit a backtrack point to seek to:
% Seek: {2, c}
% We skip over part of search space, and seek to the next possible match:
% Next: a/+/c
{Pos, HW};
Other ->
% NOTE
% It's either already a backtrack point, emitted from the last `+`
% position or just a seek / match. In both cases we just pass it
% through.
Other
end;
compare([HW | TF], [HW | TW], Pos) ->
% NOTE
% Skip over the same word in both topic and filter, keeping the last backtrack point.
compare(TF, TW, Pos + 1);
compare([HF | _], [HW | _], _) when HF > HW ->
% NOTE
% Topic: a/b/c/d
% Filter: a/b/c/e/1 or a/b/+/e
% The topic is lower than a topic filter. In the first case there's nowhere to
% backtrack to, we're out of the search space. In the second case there's a `+`
% on 3rd level, we'll seek up from there.
lower;
compare([_ | _], [], _) ->
% NOTE
% Topic: a/b/c/d
% Filter: a/b/c/d/1 or a/+/c/d/1
% The topic is lower than a topic filter (since it's shorter). In the first case
% there's nowhere to backtrack to, we're out of the search space. In the second case
% there's a `+` on 2nd level, we'll seek up from there.
lower;
compare([_ | _], [HW | _], Pos) ->
% NOTE
% Topic: a/b/c/d
% Filter: a/+/+/0/1/2
% Topic is higher than the filter, we need to skip over to the next possible filter.
% Seek: {3, d}
% Next: a/+/+/d
{Pos, HW}.
match_add(K = {_Filter, ID}, Acc = #{}) ->
% NOTE: ensuring uniqueness by record ID
Acc#{ID => K};
match_add(K, Acc) when is_list(Acc) ->
[K | Acc];
match_add(K, first) ->
throw({first, K}).
-spec filter_words(emqx_types:topic()) -> [word()].
filter_words(Topic) when is_binary(Topic) ->
% NOTE
% This is almost identical to `emqx_topic:words/1`, but it doesn't convert empty
% tokens to ''. This is needed to keep ordering of words consistent with what
% `match_filter/3` expects.
[word(W, filter) || W <- emqx_topic:tokens(Topic)].
-spec topic_words(emqx_types:topic()) -> [binary()].
topic_words(Topic) when is_binary(Topic) ->
[word(W, topic) || W <- emqx_topic:tokens(Topic)].
word(<<"+">>, topic) -> error(badarg);
word(<<"#">>, topic) -> error(badarg);
word(<<"+">>, filter) -> '+';
word(<<"#">>, filter) -> '#';
word(Bin, _) -> Bin.
%% match non-wildcard topics
match_topics(Topic, {Topic, _} = Key, NextF, Acc) ->
%% found a topic match
match_topics(Topic, NextF(Key), NextF, match_add(Key, Acc));
match_topics(Topic, {F, _}, NextF, Acc) when F < Topic ->
%% the last key is a filter, try jump to the topic
match_topics(Topic, NextF(base(Topic)), NextF, Acc);
match_topics(_Topic, _Key, _NextF, Acc) ->
%% gone pass the topic
Acc.

View File

@ -531,7 +531,6 @@ handle_info({event, connected}, State = #state{channel = Channel}) ->
handle_info({event, disconnected}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_info(ClientId, info(State)),
emqx_cm:connection_closed(ClientId),
return(State);
handle_info({event, _Other}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),

View File

@ -26,6 +26,7 @@
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-include_lib("emqx/include/emqx.hrl").
-include_lib("emqx/include/emqx_hooks.hrl").
-include_lib("emqx/include/emqx_mqtt.hrl").
all() ->
@ -695,28 +696,17 @@ t_connect_client_never_negative({'end', _Config}) ->
t_connack_auth_error({init, Config}) ->
process_flag(trap_exit, true),
ChainName = 'mqtt:global',
AuthenticatorConfig = #{
enable => true,
mechanism => password_based,
backend => built_in_database,
user_id_type => username,
password_hash_algorithm => #{
name => plain,
salt_position => disable
},
user_group => <<"global:mqtt">>
},
ok = emqx_authentication:register_providers(
[{{password_based, built_in_database}, emqx_authentication_SUITE}]
emqx_hooks:put(
'client.authenticate',
{?MODULE, authenticate_deny, []},
?HP_AUTHN
),
emqx_authentication:initialize_authentication(ChainName, AuthenticatorConfig),
Config;
t_connack_auth_error({'end', _Config}) ->
ChainName = 'mqtt:global',
AuthenticatorID = <<"password_based:built_in_database">>,
ok = emqx_authentication:deregister_provider({password_based, built_in_database}),
ok = emqx_authentication:delete_authenticator(ChainName, AuthenticatorID),
emqx_hooks:del(
'client.authenticate',
{?MODULE, authenticate_deny, []}
),
ok;
t_connack_auth_error(Config) when is_list(Config) ->
%% MQTT 3.1
@ -748,6 +738,9 @@ t_handle_in_empty_client_subscribe_hook(Config) when is_list(Config) ->
emqtt:disconnect(C)
end.
authenticate_deny(_Credentials, _Default) ->
{stop, {error, bad_username_or_password}}.
wait_for_events(Action, Kinds) ->
wait_for_events(Action, Kinds, 500).

View File

@ -16,8 +16,6 @@
-module(emqx_common_test_helpers).
-include_lib("emqx/include/emqx_authentication.hrl").
-type special_config_handler() :: fun().
-type apps() :: list(atom()).
@ -351,7 +349,7 @@ stop_apps(Apps, Opts) ->
%% to avoid inter-suite flakiness
application:unset_env(emqx, config_loader),
application:unset_env(emqx, boot_modules),
persistent_term:erase(?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY),
emqx_schema_hooks:erase_injections(),
case Opts of
#{erase_all_configs := false} ->
%% FIXME: this means inter-suite or inter-test dependencies

View File

@ -26,7 +26,8 @@
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
all() -> emqx_common_test_helpers:all(?MODULE).
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
emqx_common_test_helpers:boot_modules(all),
@ -223,8 +224,8 @@ t_callback_crash(_Config) ->
ok = emqx_config_handler:remove_handler(CrashPath),
ok.
t_pre_callback_error(_Config) ->
callback_error(
t_pre_assert_update_result(_Config) ->
assert_update_result(
[sysmon, os, mem_check_interval],
<<"100s">>,
{error, {pre_config_update, ?MODULE, pre_config_update_error}}
@ -232,13 +233,88 @@ t_pre_callback_error(_Config) ->
ok.
t_post_update_error(_Config) ->
callback_error(
assert_update_result(
[sysmon, os, sysmem_high_watermark],
<<"60%">>,
{error, {post_config_update, ?MODULE, post_config_update_error}}
),
ok.
t_post_update_propagate_error_wkey(_Config) ->
Conf0 = emqx_config:get_raw([sysmon]),
Conf1 = emqx_utils_maps:deep_put([<<"os">>, <<"sysmem_high_watermark">>], Conf0, <<"60%">>),
assert_update_result(
[
[sysmon, '?', sysmem_high_watermark],
[sysmon]
],
[sysmon],
Conf1,
{error, {post_config_update, ?MODULE, post_config_update_error}}
),
ok.
t_post_update_propagate_error_key(_Config) ->
Conf0 = emqx_config:get_raw([sysmon]),
Conf1 = emqx_utils_maps:deep_put([<<"os">>, <<"sysmem_high_watermark">>], Conf0, <<"60%">>),
assert_update_result(
[
[sysmon, os, sysmem_high_watermark],
[sysmon]
],
[sysmon],
Conf1,
{error, {post_config_update, ?MODULE, post_config_update_error}}
),
ok.
t_pre_update_propagate_error_wkey(_Config) ->
Conf0 = emqx_config:get_raw([sysmon]),
Conf1 = emqx_utils_maps:deep_put([<<"os">>, <<"mem_check_interval">>], Conf0, <<"70s">>),
assert_update_result(
[
[sysmon, '?', mem_check_interval],
[sysmon]
],
[sysmon],
Conf1,
{error, {pre_config_update, ?MODULE, pre_config_update_error}}
),
ok.
t_pre_update_propagate_error_key(_Config) ->
Conf0 = emqx_config:get_raw([sysmon]),
Conf1 = emqx_utils_maps:deep_put([<<"os">>, <<"mem_check_interval">>], Conf0, <<"70s">>),
assert_update_result(
[
[sysmon, os, mem_check_interval],
[sysmon]
],
[sysmon],
Conf1,
{error, {pre_config_update, ?MODULE, pre_config_update_error}}
),
ok.
t_pre_update_propagate_key_rewrite(_Config) ->
Conf0 = emqx_config:get_raw([sysmon]),
Conf1 = emqx_utils_maps:deep_put([<<"os">>, <<"cpu_check_interval">>], Conf0, <<"333s">>),
with_update_result(
[
[sysmon, '?', cpu_check_interval],
[sysmon]
],
[sysmon],
Conf1,
fun(_, Result) ->
?assertMatch(
{ok, #{config := #{os := #{cpu_check_interval := 444000}}}},
Result
)
end
),
ok.
t_handler_root() ->
%% Don't rely on default emqx_config_handler's merge behaviour.
RootKey = [],
@ -295,6 +371,17 @@ pre_config_update([sysmon, os, sysmem_high_watermark], UpdateReq, _RawConf) ->
pre_config_update([sysmon, os, mem_check_interval], _UpdateReq, _RawConf) ->
{error, pre_config_update_error}.
propagated_pre_config_update(
[<<"sysmon">>, <<"os">>, <<"cpu_check_interval">>], <<"333s">>, _RawConf
) ->
{ok, <<"444s">>};
propagated_pre_config_update(
[<<"sysmon">>, <<"os">>, <<"mem_check_interval">>], _UpdateReq, _RawConf
) ->
{error, pre_config_update_error};
propagated_pre_config_update(_ConfKeyPath, _UpdateReq, _RawConf) ->
ok.
post_config_update([sysmon], _UpdateReq, _NewConf, _OldConf, _AppEnvs) ->
{ok, ok};
post_config_update([sysmon, os], _UpdateReq, _NewConf, _OldConf, _AppEnvs) ->
@ -308,6 +395,13 @@ post_config_update([sysmon, os, cpu_high_watermark], _UpdateReq, _NewConf, _OldC
post_config_update([sysmon, os, sysmem_high_watermark], _UpdateReq, _NewConf, _OldConf, _AppEnvs) ->
{error, post_config_update_error}.
propagated_post_config_update(
[sysmon, os, sysmem_high_watermark], _UpdateReq, _NewConf, _OldConf, _AppEnvs
) ->
{error, post_config_update_error};
propagated_post_config_update(_ConfKeyPath, _UpdateReq, _NewConf, _OldConf, _AppEnvs) ->
ok.
wait_for_new_pid() ->
case erlang:whereis(emqx_config_handler) of
undefined ->
@ -317,20 +411,34 @@ wait_for_new_pid() ->
Pid
end.
callback_error(FailedPath, Update, ExpectError) ->
Opts = #{rawconf_with_defaults => true},
ok = emqx_config_handler:add_handler(FailedPath, ?MODULE),
Old = emqx:get_raw_config(FailedPath, undefined),
Error = emqx:update_config(FailedPath, Update, Opts),
case ExpectError of
assert_update_result(FailedPath, Update, Expect) ->
assert_update_result([FailedPath], FailedPath, Update, Expect).
assert_update_result(Paths, UpdatePath, Update, Expect) ->
with_update_result(Paths, UpdatePath, Update, fun(Old, Result) ->
case Expect of
{error, {post_config_update, ?MODULE, post_config_update_error}} ->
?assertMatch(
{error, {post_config_update, ?MODULE, {post_config_update_error, _}}}, Error
{error, {post_config_update, ?MODULE, {post_config_update_error, _}}}, Result
);
_ ->
?assertEqual(ExpectError, Error)
?assertEqual(Expect, Result)
end,
New = emqx:get_raw_config(FailedPath, undefined),
?assertEqual(Old, New),
ok = emqx_config_handler:remove_handler(FailedPath),
New = emqx:get_raw_config(UpdatePath, undefined),
?assertEqual(Old, New)
end).
with_update_result(Paths, UpdatePath, Update, Fun) ->
ok = lists:foreach(
fun(Path) -> emqx_config_handler:add_handler(Path, ?MODULE) end,
Paths
),
Opts = #{rawconf_with_defaults => true},
Old = emqx:get_raw_config(UpdatePath, undefined),
Result = emqx:update_config(UpdatePath, Update, Opts),
_ = Fun(Old, Result),
ok = lists:foreach(
fun(Path) -> emqx_config_handler:remove_handler(Path) end,
Paths
),
ok.

View File

@ -274,7 +274,6 @@ t_handle_msg_event(_) ->
ok = meck:expect(emqx_cm, register_channel, fun(_, _, _) -> ok end),
ok = meck:expect(emqx_cm, insert_channel_info, fun(_, _, _) -> ok end),
ok = meck:expect(emqx_cm, set_chan_info, fun(_, _) -> ok end),
ok = meck:expect(emqx_cm, connection_closed, fun(_) -> ok end),
?assertEqual(ok, handle_msg({event, connected}, st())),
?assertMatch({ok, _St}, handle_msg({event, disconnected}, st())),
?assertMatch({ok, _St}, handle_msg({event, undefined}, st())).

View File

@ -41,6 +41,8 @@
-export([start/2]).
-export([stop/1, stop_node/1]).
-export([start_bare_node/2]).
-export([share_load_module/2]).
-export([node_name/1, mk_nodespecs/2]).
-export([start_apps/2, set_node_opts/2]).
@ -282,9 +284,6 @@ allocate_listener_ports(Types, Spec) ->
start_node_init(Spec = #{name := Node}) ->
Node = start_bare_node(Node, Spec),
pong = net_adm:ping(Node),
% Preserve node spec right on the remote node
ok = set_node_opts(Node, Spec),
% Make it possible to call `ct:pal` and friends (if running under rebar3)
_ = share_load_module(Node, cthr),
% Enable snabbkaffe trace forwarding
@ -392,7 +391,8 @@ listener_port(BasePort, wss) ->
%%
start_bare_node(Name, #{driver := ct_slave}) ->
-spec start_bare_node(atom(), map()) -> node().
start_bare_node(Name, Spec = #{driver := ct_slave}) ->
{ok, Node} = ct_slave:start(
node_name(Name),
[
@ -404,9 +404,15 @@ start_bare_node(Name, #{driver := ct_slave}) ->
{env, []}
]
),
Node;
start_bare_node(Name, #{driver := slave}) ->
init_bare_node(Node, Spec);
start_bare_node(Name, Spec = #{driver := slave}) ->
{ok, Node} = slave:start_link(host(), Name, ebin_path()),
init_bare_node(Node, Spec).
init_bare_node(Node, Spec) ->
pong = net_adm:ping(Node),
% Preserve node spec right on the remote node
ok = set_node_opts(Node, Spec),
Node.
erl_flags() ->
@ -429,6 +435,7 @@ share_load_module(Node, Module) ->
error
end.
-spec node_name(atom()) -> node().
node_name(Name) ->
case string:tokens(atom_to_list(Name), "@") of
[_Name, _Host] ->

View File

@ -58,7 +58,7 @@
-module(emqx_cth_suite).
-include_lib("common_test/include/ct.hrl").
-include_lib("emqx/include/emqx_authentication.hrl").
-include_lib("emqx/include/emqx_access_control.hrl").
-export([start/2]).
-export([stop/1]).
@ -306,7 +306,7 @@ merge_envs(false, E2) ->
merge_envs(_E, false) ->
[];
merge_envs(E1, E2) ->
E1 ++ E2.
lists:foldl(fun({K, _} = Opt, EAcc) -> lists:keystore(K, 1, EAcc, Opt) end, E1, E2).
merge_config(false, C2) ->
C2;
@ -444,12 +444,12 @@ stop_apps(Apps) ->
verify_clean_suite_state(#{work_dir := WorkDir}) ->
{ok, []} = file:list_dir(WorkDir),
none = persistent_term:get(?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY, none),
false = emqx_schema_hooks:any_injections(),
[] = emqx_config:get_root_names(),
ok.
clean_suite_state() ->
_ = persistent_term:erase(?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY),
_ = emqx_schema_hooks:erase_injections(),
_ = emqx_config:erase_all(),
ok.

View File

@ -116,6 +116,172 @@ t_update_conf(_Conf) ->
?assert(is_running('wss:default')),
ok.
t_update_tcp_keepalive_conf(_Conf) ->
Keepalive = <<"240,30,5">>,
KeepaliveStr = binary_to_list(Keepalive),
Raw = emqx:get_raw_config(?LISTENERS),
Raw1 = emqx_utils_maps:deep_put(
[<<"tcp">>, <<"default">>, <<"bind">>], Raw, <<"127.0.0.1:1883">>
),
Raw2 = emqx_utils_maps:deep_put(
[<<"tcp">>, <<"default">>, <<"tcp_options">>, <<"keepalive">>], Raw1, Keepalive
),
?assertMatch({ok, _}, emqx:update_config(?LISTENERS, Raw2)),
?assertMatch(
#{
<<"tcp">> := #{
<<"default">> := #{
<<"bind">> := <<"127.0.0.1:1883">>,
<<"tcp_options">> := #{<<"keepalive">> := Keepalive}
}
}
},
emqx:get_raw_config(?LISTENERS)
),
?assertMatch(
#{tcp := #{default := #{tcp_options := #{keepalive := KeepaliveStr}}}},
emqx:get_config(?LISTENERS)
),
Keepalive2 = <<" 241, 31, 6 ">>,
KeepaliveStr2 = binary_to_list(Keepalive2),
Raw3 = emqx_utils_maps:deep_put(
[<<"tcp">>, <<"default">>, <<"tcp_options">>, <<"keepalive">>], Raw1, Keepalive2
),
?assertMatch({ok, _}, emqx:update_config(?LISTENERS, Raw3)),
?assertMatch(
#{
<<"tcp">> := #{
<<"default">> := #{
<<"bind">> := <<"127.0.0.1:1883">>,
<<"tcp_options">> := #{<<"keepalive">> := Keepalive2}
}
}
},
emqx:get_raw_config(?LISTENERS)
),
?assertMatch(
#{tcp := #{default := #{tcp_options := #{keepalive := KeepaliveStr2}}}},
emqx:get_config(?LISTENERS)
),
ok.
t_update_empty_ssl_options_conf(_Conf) ->
Raw = emqx:get_raw_config(?LISTENERS),
Raw1 = emqx_utils_maps:deep_put(
[<<"tcp">>, <<"default">>, <<"bind">>], Raw, <<"127.0.0.1:1883">>
),
Raw2 = emqx_utils_maps:deep_put(
[<<"ssl">>, <<"default">>, <<"bind">>], Raw1, <<"127.0.0.1:8883">>
),
Raw3 = emqx_utils_maps:deep_put(
[<<"ws">>, <<"default">>, <<"bind">>], Raw2, <<"0.0.0.0:8083">>
),
Raw4 = emqx_utils_maps:deep_put(
[<<"wss">>, <<"default">>, <<"bind">>], Raw3, <<"127.0.0.1:8084">>
),
Raw5 = emqx_utils_maps:deep_put(
[<<"ssl">>, <<"default">>, <<"ssl_options">>, <<"cacertfile">>], Raw4, <<"">>
),
Raw6 = emqx_utils_maps:deep_put(
[<<"wss">>, <<"default">>, <<"ssl_options">>, <<"cacertfile">>], Raw5, <<"">>
),
Raw7 = emqx_utils_maps:deep_put(
[<<"wss">>, <<"default">>, <<"ssl_options">>, <<"ciphers">>], Raw6, <<"">>
),
Ciphers = <<"TLS_AES_256_GCM_SHA384, TLS_AES_128_GCM_SHA256 ">>,
Raw8 = emqx_utils_maps:deep_put(
[<<"ssl">>, <<"default">>, <<"ssl_options">>, <<"ciphers">>],
Raw7,
Ciphers
),
?assertMatch({ok, _}, emqx:update_config(?LISTENERS, Raw8)),
?assertMatch(
#{
<<"tcp">> := #{<<"default">> := #{<<"bind">> := <<"127.0.0.1:1883">>}},
<<"ssl">> := #{
<<"default">> := #{
<<"bind">> := <<"127.0.0.1:8883">>,
<<"ssl_options">> := #{
<<"cacertfile">> := <<"">>,
<<"ciphers">> := Ciphers
}
}
},
<<"ws">> := #{<<"default">> := #{<<"bind">> := <<"0.0.0.0:8083">>}},
<<"wss">> := #{
<<"default">> := #{
<<"bind">> := <<"127.0.0.1:8084">>,
<<"ssl_options">> := #{
<<"cacertfile">> := <<"">>,
<<"ciphers">> := <<"">>
}
}
}
},
emqx:get_raw_config(?LISTENERS)
),
BindTcp = {{127, 0, 0, 1}, 1883},
BindSsl = {{127, 0, 0, 1}, 8883},
BindWs = {{0, 0, 0, 0}, 8083},
BindWss = {{127, 0, 0, 1}, 8084},
?assertMatch(
#{
tcp := #{default := #{bind := BindTcp}},
ssl := #{
default := #{
bind := BindSsl,
ssl_options := #{
cacertfile := <<"">>,
ciphers := ["TLS_AES_256_GCM_SHA384", "TLS_AES_128_GCM_SHA256"]
}
}
},
ws := #{default := #{bind := BindWs}},
wss := #{
default := #{
bind := BindWss,
ssl_options := #{
cacertfile := <<"">>,
ciphers := []
}
}
}
},
emqx:get_config(?LISTENERS)
),
?assertError(not_found, current_conns(<<"tcp:default">>, {{0, 0, 0, 0}, 1883})),
?assertError(not_found, current_conns(<<"ssl:default">>, {{0, 0, 0, 0}, 8883})),
?assertEqual(0, current_conns(<<"tcp:default">>, BindTcp)),
?assertEqual(0, current_conns(<<"ssl:default">>, BindSsl)),
?assertEqual({0, 0, 0, 0}, proplists:get_value(ip, ranch:info('ws:default'))),
?assertEqual({127, 0, 0, 1}, proplists:get_value(ip, ranch:info('wss:default'))),
?assert(is_running('ws:default')),
?assert(is_running('wss:default')),
Raw9 = emqx_utils_maps:deep_put(
[<<"ssl">>, <<"default">>, <<"ssl_options">>, <<"ciphers">>], Raw7, [
"TLS_AES_256_GCM_SHA384",
"TLS_AES_128_GCM_SHA256",
"TLS_CHACHA20_POLY1305_SHA256"
]
),
?assertMatch({ok, _}, emqx:update_config(?LISTENERS, Raw9)),
BadRaw = emqx_utils_maps:deep_put(
[<<"ssl">>, <<"default">>, <<"ssl_options">>, <<"keyfile">>], Raw4, <<"">>
),
?assertMatch(
{error,
{bad_ssl_config, #{
reason := pem_file_path_or_string_is_required,
which_options := [[<<"keyfile">>]]
}}},
emqx:update_config(?LISTENERS, BadRaw)
),
ok.
t_add_delete_conf(_Conf) ->
Raw = emqx:get_raw_config(?LISTENERS),
%% add

View File

@ -122,6 +122,17 @@ t_inc_sent(_) ->
with_metrics_server(
fun() ->
ok = emqx_metrics:inc_sent(?CONNACK_PACKET(0)),
ok = emqx_metrics:inc_sent(?CONNACK_PACKET(0, 1)),
ok = emqx_metrics:inc_sent(
?CONNACK_PACKET(0, 1, #{
'Maximum-Packet-Size' => 1048576,
'Retain-Available' => 1,
'Shared-Subscription-Available' => 1,
'Subscription-Identifier-Available' => 1,
'Topic-Alias-Maximum' => 65535,
'Wildcard-Subscription-Available' => 1
})
),
ok = emqx_metrics:inc_sent(?PUBLISH_PACKET(0, 0)),
ok = emqx_metrics:inc_sent(?PUBLISH_PACKET(1, 0)),
ok = emqx_metrics:inc_sent(?PUBLISH_PACKET(2, 0)),
@ -134,8 +145,8 @@ t_inc_sent(_) ->
ok = emqx_metrics:inc_sent(?PACKET(?PINGRESP)),
ok = emqx_metrics:inc_sent(?PACKET(?DISCONNECT)),
ok = emqx_metrics:inc_sent(?PACKET(?AUTH)),
?assertEqual(13, emqx_metrics:val('packets.sent')),
?assertEqual(1, emqx_metrics:val('packets.connack.sent')),
?assertEqual(15, emqx_metrics:val('packets.sent')),
?assertEqual(3, emqx_metrics:val('packets.connack.sent')),
?assertEqual(3, emqx_metrics:val('messages.sent')),
?assertEqual(1, emqx_metrics:val('messages.qos0.sent')),
?assertEqual(1, emqx_metrics:val('messages.qos1.sent')),

View File

@ -1094,7 +1094,7 @@ t_multi_streams_unsub(Config) ->
?retry(
_Sleep2 = 100,
_Attempts2 = 50,
false = emqx_router:has_routes(Topic)
[] = emqx_router:lookup_routes(Topic)
),
case emqtt:publish_via(C, PubVia, Topic, #{}, <<6, 7, 8, 9>>, [{qos, PubQos}]) of

View File

@ -26,24 +26,37 @@
-define(R, emqx_router).
all() -> emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
PrevBootModules = application:get_env(emqx, boot_modules),
emqx_common_test_helpers:boot_modules([router]),
emqx_common_test_helpers:start_apps([]),
all() ->
[
{prev_boot_modules, PrevBootModules}
| Config
{group, routing_schema_v1},
{group, routing_schema_v2}
].
end_per_suite(Config) ->
PrevBootModules = ?config(prev_boot_modules, Config),
case PrevBootModules of
undefined -> ok;
{ok, Mods} -> emqx_common_test_helpers:boot_modules(Mods)
end,
emqx_common_test_helpers:stop_apps([]).
groups() ->
TCs = emqx_common_test_helpers:all(?MODULE),
[
{routing_schema_v1, [], TCs},
{routing_schema_v2, [], TCs}
].
init_per_group(GroupName, Config) ->
WorkDir = filename:join([?config(priv_dir, Config), ?MODULE, GroupName]),
AppSpecs = [
{emqx, #{
config => mk_config(GroupName),
override_env => [{boot_modules, [router]}]
}}
],
Apps = emqx_cth_suite:start(AppSpecs, #{work_dir => WorkDir}),
[{group_apps, Apps}, {group_name, GroupName} | Config].
end_per_group(_GroupName, Config) ->
ok = emqx_cth_suite:stop(?config(group_apps, Config)).
mk_config(routing_schema_v1) ->
"broker.routing.storage_schema = v1";
mk_config(routing_schema_v2) ->
"broker.routing.storage_schema = v2".
init_per_testcase(_TestCase, Config) ->
clear_tables(),
@ -52,23 +65,16 @@ init_per_testcase(_TestCase, Config) ->
end_per_testcase(_TestCase, _Config) ->
clear_tables().
% t_add_route(_) ->
% error('TODO').
% t_do_add_route(_) ->
% error('TODO').
% t_lookup_routes(_) ->
% error('TODO').
% t_delete_route(_) ->
% error('TODO').
% t_do_delete_route(_) ->
% error('TODO').
% t_topics(_) ->
% error('TODO').
t_verify_type(Config) ->
case ?config(group_name, Config) of
routing_schema_v1 ->
?assertEqual(v1, ?R:get_schema_vsn());
routing_schema_v2 ->
?assertEqual(v2, ?R:get_schema_vsn())
end.
t_add_delete(_) ->
?R:add_route(<<"a/b/c">>),
@ -79,6 +85,55 @@ t_add_delete(_) ->
?R:delete_route(<<"a/+/b">>, node()),
?assertEqual([], ?R:topics()).
t_add_delete_incremental(_) ->
?R:add_route(<<"a/b/c">>),
?R:add_route(<<"a/+/c">>, node()),
?R:add_route(<<"a/+/+">>, node()),
?R:add_route(<<"a/b/#">>, node()),
?R:add_route(<<"#">>, node()),
?assertEqual(
[
#route{topic = <<"#">>, dest = node()},
#route{topic = <<"a/+/+">>, dest = node()},
#route{topic = <<"a/+/c">>, dest = node()},
#route{topic = <<"a/b/#">>, dest = node()},
#route{topic = <<"a/b/c">>, dest = node()}
],
lists:sort(?R:match_routes(<<"a/b/c">>))
),
?R:delete_route(<<"a/+/c">>, node()),
?assertEqual(
[
#route{topic = <<"#">>, dest = node()},
#route{topic = <<"a/+/+">>, dest = node()},
#route{topic = <<"a/b/#">>, dest = node()},
#route{topic = <<"a/b/c">>, dest = node()}
],
lists:sort(?R:match_routes(<<"a/b/c">>))
),
?R:delete_route(<<"a/+/+">>, node()),
?assertEqual(
[
#route{topic = <<"#">>, dest = node()},
#route{topic = <<"a/b/#">>, dest = node()},
#route{topic = <<"a/b/c">>, dest = node()}
],
lists:sort(?R:match_routes(<<"a/b/c">>))
),
?R:delete_route(<<"a/b/#">>, node()),
?assertEqual(
[
#route{topic = <<"#">>, dest = node()},
#route{topic = <<"a/b/c">>, dest = node()}
],
lists:sort(?R:match_routes(<<"a/b/c">>))
),
?R:delete_route(<<"a/b/c">>, node()),
?assertEqual(
[#route{topic = <<"#">>, dest = node()}],
lists:sort(?R:match_routes(<<"a/b/c">>))
).
t_do_add_delete(_) ->
?R:do_add_route(<<"a/b/c">>),
?R:do_add_route(<<"a/b/c">>, node()),
@ -114,9 +169,9 @@ t_print_routes(_) ->
?R:add_route(<<"+/+">>),
?R:print_routes(<<"a/b">>).
t_has_routes(_) ->
t_has_route(_) ->
?R:add_route(<<"devices/+/messages">>, node()),
?assert(?R:has_routes(<<"devices/+/messages">>)),
?assert(?R:has_route(<<"devices/+/messages">>, node())),
?R:delete_route(<<"devices/+/messages">>).
t_unexpected(_) ->
@ -128,5 +183,5 @@ t_unexpected(_) ->
clear_tables() ->
lists:foreach(
fun mnesia:clear_table/1,
[?ROUTE_TAB, ?TRIE, emqx_trie_node]
[?ROUTE_TAB, ?ROUTE_TAB_FILTERS, ?TRIE]
).

View File

@ -26,55 +26,45 @@
-define(ROUTER_HELPER, emqx_router_helper).
all() -> emqx_common_test_helpers:all(?MODULE).
all() ->
[
{group, routing_schema_v1},
{group, routing_schema_v2}
].
init_per_suite(Config) ->
DistPid =
case net_kernel:nodename() of
ignored ->
%% calling `net_kernel:start' without `epmd'
%% running will result in a failure.
emqx_common_test_helpers:start_epmd(),
{ok, Pid} = net_kernel:start(['test@127.0.0.1', longnames]),
Pid;
_ ->
undefined
end,
emqx_common_test_helpers:start_apps([]),
[{dist_pid, DistPid} | Config].
groups() ->
TCs = emqx_common_test_helpers:all(?MODULE),
[
{routing_schema_v1, [], TCs},
{routing_schema_v2, [], TCs}
].
end_per_suite(Config) ->
DistPid = ?config(dist_pid, Config),
case DistPid of
Pid when is_pid(Pid) ->
net_kernel:stop();
_ ->
ok
end,
emqx_common_test_helpers:stop_apps([]).
init_per_group(GroupName, Config) ->
WorkDir = filename:join([?config(priv_dir, Config), ?MODULE, GroupName]),
AppSpecs = [{emqx, mk_config(GroupName)}],
Apps = emqx_cth_suite:start(AppSpecs, #{work_dir => WorkDir}),
[{group_name, GroupName}, {group_apps, Apps} | Config].
end_per_group(_GroupName, Config) ->
ok = emqx_cth_suite:stop(?config(group_apps, Config)).
mk_config(routing_schema_v1) ->
#{
config => "broker.routing.storage_schema = v1",
override_env => [{boot_modules, [router]}]
};
mk_config(routing_schema_v2) ->
#{
config => "broker.routing.storage_schema = v2",
override_env => [{boot_modules, [router]}]
}.
init_per_testcase(TestCase, Config) when
TestCase =:= t_cleanup_membership_mnesia_down;
TestCase =:= t_cleanup_membership_node_down;
TestCase =:= t_cleanup_monitor_node_down
->
ok = snabbkaffe:start_trace(),
Slave = emqx_common_test_helpers:start_slave(some_node, []),
[{slave, Slave} | Config];
init_per_testcase(_TestCase, Config) ->
ok = snabbkaffe:start_trace(),
Config.
end_per_testcase(TestCase, Config) when
TestCase =:= t_cleanup_membership_mnesia_down;
TestCase =:= t_cleanup_membership_node_down;
TestCase =:= t_cleanup_monitor_node_down
->
Slave = ?config(slave, Config),
emqx_common_test_helpers:stop_slave(Slave),
mria:clear_table(?ROUTE_TAB),
snabbkaffe:stop(),
ok;
end_per_testcase(_TestCase, _Config) ->
ok = snabbkaffe:stop(),
ok.
t_monitor(_) ->
@ -89,8 +79,8 @@ t_mnesia(_) ->
?ROUTER_HELPER ! {membership, {mnesia, down, node()}},
ct:sleep(200).
t_cleanup_membership_mnesia_down(Config) ->
Slave = ?config(slave, Config),
t_cleanup_membership_mnesia_down(_Config) ->
Slave = emqx_cth_cluster:node_name(?FUNCTION_NAME),
emqx_router:add_route(<<"a/b/c">>, Slave),
emqx_router:add_route(<<"d/e/f">>, node()),
?assertMatch([_, _], emqx_router:topics()),
@ -101,8 +91,8 @@ t_cleanup_membership_mnesia_down(Config) ->
),
?assertEqual([<<"d/e/f">>], emqx_router:topics()).
t_cleanup_membership_node_down(Config) ->
Slave = ?config(slave, Config),
t_cleanup_membership_node_down(_Config) ->
Slave = emqx_cth_cluster:node_name(?FUNCTION_NAME),
emqx_router:add_route(<<"a/b/c">>, Slave),
emqx_router:add_route(<<"d/e/f">>, node()),
?assertMatch([_, _], emqx_router:topics()),
@ -113,13 +103,13 @@ t_cleanup_membership_node_down(Config) ->
),
?assertEqual([<<"d/e/f">>], emqx_router:topics()).
t_cleanup_monitor_node_down(Config) ->
Slave = ?config(slave, Config),
t_cleanup_monitor_node_down(_Config) ->
Slave = emqx_cth_cluster:start_bare_node(?FUNCTION_NAME, #{driver => ct_slave}),
emqx_router:add_route(<<"a/b/c">>, Slave),
emqx_router:add_route(<<"d/e/f">>, node()),
?assertMatch([_, _], emqx_router:topics()),
?wait_async_action(
emqx_common_test_helpers:stop_slave(Slave),
emqx_cth_cluster:stop([Slave]),
#{?snk_kind := emqx_router_helper_cleanup_done, node := Slave},
1_000
),

View File

@ -0,0 +1,258 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_routing_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("emqx/include/asserts.hrl").
all() ->
[
{group, routing_schema_v1},
{group, routing_schema_v2},
t_routing_schema_switch_v1,
t_routing_schema_switch_v2
].
groups() ->
TCs = [
t_cluster_routing
],
[
{routing_schema_v1, [], TCs},
{routing_schema_v2, [], TCs}
].
init_per_group(GroupName, Config) ->
WorkDir = filename:join([?config(priv_dir, Config), ?MODULE, GroupName]),
NodeSpecs = [
{emqx_routing_SUITE1, #{apps => [mk_emqx_appspec(GroupName, 1)], role => core}},
{emqx_routing_SUITE2, #{apps => [mk_emqx_appspec(GroupName, 2)], role => core}},
{emqx_routing_SUITE3, #{apps => [mk_emqx_appspec(GroupName, 3)], role => replicant}}
],
Nodes = emqx_cth_cluster:start(NodeSpecs, #{work_dir => WorkDir}),
[{cluster, Nodes} | Config].
end_per_group(_GroupName, Config) ->
emqx_cth_cluster:stop(?config(cluster, Config)).
init_per_testcase(TC, Config) ->
WorkDir = filename:join([?config(priv_dir, Config), ?MODULE, TC]),
[{work_dir, WorkDir} | Config].
end_per_testcase(_TC, _Config) ->
ok.
mk_emqx_appspec(GroupName, N) ->
{emqx, #{
config => mk_config(GroupName, N),
after_start => fun() ->
% NOTE
% This one is actually defined on `emqx_conf_schema` level, but used
% in `emqx_broker`. Thus we have to resort to this ugly hack.
emqx_config:force_put([rpc, mode], async)
end
}}.
mk_genrpc_appspec() ->
{gen_rpc, #{
override_env => [{port_discovery, stateless}]
}}.
mk_config(GroupName, N) ->
#{
broker => mk_config_broker(GroupName),
listeners => mk_config_listeners(N)
}.
mk_config_broker(Vsn) when Vsn == routing_schema_v1; Vsn == v1 ->
#{routing => #{storage_schema => v1}};
mk_config_broker(Vsn) when Vsn == routing_schema_v2; Vsn == v2 ->
#{routing => #{storage_schema => v2}}.
mk_config_listeners(N) ->
Port = 1883 + N,
#{
tcp => #{default => #{bind => "127.0.0.1:" ++ integer_to_list(Port)}},
ssl => #{default => #{enable => false}},
ws => #{default => #{enable => false}},
wss => #{default => #{enable => false}}
}.
%%
t_cluster_routing(Config) ->
Cluster = ?config(cluster, Config),
Clients = [C1, C2, C3] = [start_client(N) || N <- Cluster],
Commands = [
{fun publish/3, [C1, <<"a/b/c">>, <<"wontsee">>]},
{fun publish/3, [C2, <<"a/b/d">>, <<"wontsee">>]},
{fun subscribe/2, [C3, <<"a/+/c/#">>]},
{fun publish/3, [C1, <<"a/b/c">>, <<"01">>]},
{fun publish/3, [C2, <<"a/b/d">>, <<"wontsee">>]},
{fun subscribe/2, [C1, <<"a/b/c">>]},
{fun subscribe/2, [C2, <<"a/b/+">>]},
{fun publish/3, [C3, <<"a/b/c">>, <<"02">>]},
{fun publish/3, [C2, <<"a/b/d">>, <<"03">>]},
{fun publish/3, [C2, <<"a/b/c/d">>, <<"04">>]},
{fun subscribe/2, [C3, <<"a/b/d">>]},
{fun publish/3, [C1, <<"a/b/d">>, <<"05">>]},
{fun unsubscribe/2, [C3, <<"a/+/c/#">>]},
{fun publish/3, [C1, <<"a/b/c">>, <<"06">>]},
{fun publish/3, [C2, <<"a/b/d">>, <<"07">>]},
{fun publish/3, [C2, <<"a/b/c/d">>, <<"08">>]},
{fun unsubscribe/2, [C2, <<"a/b/+">>]},
{fun publish/3, [C1, <<"a/b/c">>, <<"09">>]},
{fun publish/3, [C2, <<"a/b/d">>, <<"10">>]},
{fun publish/3, [C2, <<"a/b/c/d">>, <<"11">>]},
{fun unsubscribe/2, [C3, <<"a/b/d">>]},
{fun unsubscribe/2, [C1, <<"a/b/c">>]},
{fun publish/3, [C1, <<"a/b/c">>, <<"wontsee">>]},
{fun publish/3, [C2, <<"a/b/d">>, <<"wontsee">>]}
],
ok = lists:foreach(fun({F, Args}) -> erlang:apply(F, Args) end, Commands),
_ = [emqtt:stop(C) || C <- Clients],
Deliveries = ?drainMailbox(),
?assertMatch(
[
{pub, C1, #{topic := <<"a/b/c">>, payload := <<"02">>}},
{pub, C1, #{topic := <<"a/b/c">>, payload := <<"06">>}},
{pub, C1, #{topic := <<"a/b/c">>, payload := <<"09">>}},
{pub, C2, #{topic := <<"a/b/c">>, payload := <<"02">>}},
{pub, C2, #{topic := <<"a/b/d">>, payload := <<"03">>}},
{pub, C2, #{topic := <<"a/b/d">>, payload := <<"05">>}},
{pub, C2, #{topic := <<"a/b/c">>, payload := <<"06">>}},
{pub, C2, #{topic := <<"a/b/d">>, payload := <<"07">>}},
{pub, C3, #{topic := <<"a/b/c">>, payload := <<"01">>}},
{pub, C3, #{topic := <<"a/b/c">>, payload := <<"02">>}},
{pub, C3, #{topic := <<"a/b/c/d">>, payload := <<"04">>}},
{pub, C3, #{topic := <<"a/b/d">>, payload := <<"05">>}},
{pub, C3, #{topic := <<"a/b/d">>, payload := <<"07">>}},
{pub, C3, #{topic := <<"a/b/d">>, payload := <<"10">>}}
],
lists:sort(
fun({pub, CL, #{payload := PL}}, {pub, CR, #{payload := PR}}) ->
{CL, PL} < {CR, PR}
end,
Deliveries
)
).
start_client(Node) ->
Self = self(),
{ok, C} = emqtt:start_link(#{
port => get_mqtt_tcp_port(Node),
msg_handler => #{
publish => fun(Msg) -> Self ! {pub, self(), Msg} end
}
}),
{ok, _Props} = emqtt:connect(C),
C.
publish(C, Topic, Payload) ->
{ok, #{reason_code := 0}} = emqtt:publish(C, Topic, Payload, 1).
subscribe(C, Topic) ->
% NOTE: sleeping here as lazy way to wait for subscribe to replicate
{ok, _Props, [0]} = emqtt:subscribe(C, Topic),
ok = timer:sleep(200).
unsubscribe(C, Topic) ->
% NOTE: sleeping here as lazy way to wait for unsubscribe to replicate
{ok, _Props, undefined} = emqtt:unsubscribe(C, Topic),
ok = timer:sleep(200).
%%
t_routing_schema_switch_v1(Config) ->
t_routing_schema_switch(_From = v2, _To = v1, Config).
t_routing_schema_switch_v2(Config) ->
t_routing_schema_switch(_From = v1, _To = v2, Config).
t_routing_schema_switch(VFrom, VTo, Config) ->
% Start first node with routing schema VTo (e.g. v1)
WorkDir = ?config(work_dir, Config),
[Node1] = emqx_cth_cluster:start(
[
{routing_schema_switch1, #{
apps => [mk_genrpc_appspec(), mk_emqx_appspec(VTo, 1)]
}}
],
#{work_dir => WorkDir}
),
% Ensure there's at least 1 route on Node1
C1 = start_client(Node1),
ok = subscribe(C1, <<"a/+/c">>),
ok = subscribe(C1, <<"d/e/f/#">>),
% Start rest of nodes with routing schema VFrom (e.g. v2)
[Node2, Node3] = emqx_cth_cluster:start(
[
{routing_schema_switch2, #{
apps => [mk_genrpc_appspec(), mk_emqx_appspec(VFrom, 2)],
base_port => 20000,
join_to => Node1
}},
{routing_schema_switch3, #{
apps => [mk_genrpc_appspec(), mk_emqx_appspec(VFrom, 3)],
base_port => 20100,
join_to => Node1
}}
],
#{work_dir => WorkDir}
),
% Verify that new nodes switched to schema v1/v2 in presence of v1/v2 routes respectively
Nodes = [Node1, Node2, Node3],
?assertEqual(
[{ok, VTo}, {ok, VTo}, {ok, VTo}],
erpc:multicall(Nodes, emqx_router, get_schema_vsn, [])
),
% Wait for all nodes to agree on cluster state
?retry(
500,
10,
?assertMatch(
[{ok, [Node1, Node2, Node3]}],
lists:usort(erpc:multicall(Nodes, emqx, running_nodes, []))
)
),
% Verify that routing works as expected
C2 = start_client(Node2),
ok = subscribe(C2, <<"a/+/d">>),
C3 = start_client(Node3),
ok = subscribe(C3, <<"d/e/f/#">>),
{ok, _} = publish(C1, <<"a/b/d">>, <<"hey-newbies">>),
{ok, _} = publish(C2, <<"a/b/c">>, <<"hi">>),
{ok, _} = publish(C3, <<"d/e/f/42">>, <<"hello">>),
?assertReceive({pub, C2, #{topic := <<"a/b/d">>, payload := <<"hey-newbies">>}}),
?assertReceive({pub, C1, #{topic := <<"a/b/c">>, payload := <<"hi">>}}),
?assertReceive({pub, C1, #{topic := <<"d/e/f/42">>, payload := <<"hello">>}}),
?assertReceive({pub, C3, #{topic := <<"d/e/f/42">>, payload := <<"hello">>}}),
?assertNotReceive(_),
ok = emqtt:stop(C1),
ok = emqtt:stop(C2),
ok = emqtt:stop(C3),
ok = emqx_cth_cluster:stop(Nodes).
%%
get_mqtt_tcp_port(Node) ->
{_, Port} = erpc:call(Node, emqx_config, get, [[listeners, tcp, default, bind]]),
Port.

View File

@ -1054,7 +1054,7 @@ t_queue_subscription(Config) when is_list(Config) ->
begin
ct:pal("routes: ~p", [ets:tab2list(emqx_route)]),
%% FIXME: should ensure we have 2 subscriptions
true = emqx_router:has_routes(Topic)
[_] = emqx_router:lookup_routes(Topic)
end
),
@ -1081,7 +1081,7 @@ t_queue_subscription(Config) when is_list(Config) ->
%% _Attempts0 = 50,
%% begin
%% ct:pal("routes: ~p", [ets:tab2list(emqx_route)]),
%% false = emqx_router:has_routes(Topic)
%% [] = emqx_router:lookup_routes(Topic)
%% end
%% ),
ct:sleep(500),

View File

@ -25,42 +25,82 @@
-import(emqx_proper_types, [scaled/2]).
all() ->
emqx_common_test_helpers:all(?MODULE).
[
{group, ets},
{group, gb_tree}
].
t_insert(_) ->
Tab = emqx_topic_index:new(),
true = emqx_topic_index:insert(<<"sensor/1/metric/2">>, t_insert_1, <<>>, Tab),
true = emqx_topic_index:insert(<<"sensor/+/#">>, t_insert_2, <<>>, Tab),
true = emqx_topic_index:insert(<<"sensor/#">>, t_insert_3, <<>>, Tab),
?assertEqual(<<"sensor/#">>, topic(match(<<"sensor">>, Tab))),
?assertEqual(t_insert_3, id(match(<<"sensor">>, Tab))).
groups() ->
All = emqx_common_test_helpers:all(?MODULE),
[
{ets, All},
{gb_tree, All}
].
t_match(_) ->
Tab = emqx_topic_index:new(),
true = emqx_topic_index:insert(<<"sensor/1/metric/2">>, t_match_1, <<>>, Tab),
true = emqx_topic_index:insert(<<"sensor/+/#">>, t_match_2, <<>>, Tab),
true = emqx_topic_index:insert(<<"sensor/#">>, t_match_3, <<>>, Tab),
?assertMatch(
[<<"sensor/#">>, <<"sensor/+/#">>],
[topic(M) || M <- matches(<<"sensor/1">>, Tab)]
init_per_group(ets, Config) ->
[{index_module, emqx_topic_index} | Config];
init_per_group(gb_tree, Config) ->
[{index_module, emqx_topic_gbt} | Config].
end_per_group(_Group, _Config) ->
ok.
get_module(Config) ->
proplists:get_value(index_module, Config).
t_insert(Config) ->
M = get_module(Config),
Tab = M:new(),
true = M:insert(<<"sensor/1/metric/2">>, t_insert_1, <<>>, Tab),
true = M:insert(<<"sensor/+/#">>, t_insert_2, <<>>, Tab),
true = M:insert(<<"sensor/#">>, t_insert_3, <<>>, Tab),
?assertEqual(<<"sensor/#">>, topic(match(M, <<"sensor">>, Tab))),
?assertEqual(t_insert_3, id(match(M, <<"sensor">>, Tab))).
t_insert_filter(Config) ->
M = get_module(Config),
Tab = M:new(),
Topic = <<"sensor/+/metric//#">>,
true = M:insert(Topic, 1, <<>>, Tab),
true = M:insert(emqx_trie_search:filter(Topic), 2, <<>>, Tab),
?assertEqual(
[Topic, Topic],
[topic(X) || X <- matches(M, <<"sensor/1/metric//2">>, Tab)]
).
t_match2(_) ->
Tab = emqx_topic_index:new(),
true = emqx_topic_index:insert(<<"#">>, t_match2_1, <<>>, Tab),
true = emqx_topic_index:insert(<<"+/#">>, t_match2_2, <<>>, Tab),
true = emqx_topic_index:insert(<<"+/+/#">>, t_match2_3, <<>>, Tab),
t_match(Config) ->
M = get_module(Config),
Tab = M:new(),
true = M:insert(<<"sensor/1/metric/2">>, t_match_1, <<>>, Tab),
true = M:insert(<<"sensor/+/#">>, t_match_2, <<>>, Tab),
true = M:insert(<<"sensor/#">>, t_match_3, <<>>, Tab),
?assertMatch(
[<<"sensor/#">>, <<"sensor/+/#">>],
[topic(X) || X <- matches(M, <<"sensor/1">>, Tab)]
).
t_match2(Config) ->
M = get_module(Config),
Tab = M:new(),
true = M:insert(<<"#">>, t_match2_1, <<>>, Tab),
true = M:insert(<<"+/#">>, t_match2_2, <<>>, Tab),
true = M:insert(<<"+/+/#">>, t_match2_3, <<>>, Tab),
?assertEqual(
[<<"#">>, <<"+/#">>, <<"+/+/#">>],
[topic(M) || M <- matches(<<"a/b/c">>, Tab)]
[topic(X) || X <- matches(M, <<"a/b/c">>, Tab)]
),
?assertEqual(
false,
emqx_topic_index:match(<<"$SYS/broker/zenmq">>, Tab)
M:match(<<"$SYS/broker/zenmq">>, Tab)
),
?assertEqual(
[],
matches(M, <<"$SYS/broker/zenmq">>, Tab)
).
t_match3(_) ->
Tab = emqx_topic_index:new(),
t_match3(Config) ->
M = get_module(Config),
Tab = M:new(),
Records = [
{<<"d/#">>, t_match3_1},
{<<"a/b/+">>, t_match3_2},
@ -69,37 +109,39 @@ t_match3(_) ->
{<<"$SYS/#">>, t_match3_sys}
],
lists:foreach(
fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end,
fun({Topic, ID}) -> M:insert(Topic, ID, <<>>, Tab) end,
Records
),
Matched = matches(<<"a/b/c">>, Tab),
Matched = matches(M, <<"a/b/c">>, Tab),
case length(Matched) of
3 -> ok;
_ -> error({unexpected, Matched})
end,
?assertEqual(
t_match3_sys,
id(match(<<"$SYS/a/b/c">>, Tab))
id(match(M, <<"$SYS/a/b/c">>, Tab))
).
t_match4(_) ->
Tab = emqx_topic_index:new(),
t_match4(Config) ->
M = get_module(Config),
Tab = M:new(),
Records = [{<<"/#">>, t_match4_1}, {<<"/+">>, t_match4_2}, {<<"/+/a/b/c">>, t_match4_3}],
lists:foreach(
fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end,
fun({Topic, ID}) -> M:insert(Topic, ID, <<>>, Tab) end,
Records
),
?assertEqual(
[<<"/#">>, <<"/+">>],
[topic(M) || M <- matches(<<"/">>, Tab)]
[topic(X) || X <- matches(M, <<"/">>, Tab)]
),
?assertEqual(
[<<"/#">>, <<"/+/a/b/c">>],
[topic(M) || M <- matches(<<"/0/a/b/c">>, Tab)]
[topic(X) || X <- matches(M, <<"/0/a/b/c">>, Tab)]
).
t_match5(_) ->
Tab = emqx_topic_index:new(),
t_match5(Config) ->
M = get_module(Config),
Tab = M:new(),
T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>,
Records = [
{<<"#">>, t_match5_1},
@ -107,58 +149,89 @@ t_match5(_) ->
{<<T/binary, "/+">>, t_match5_3}
],
lists:foreach(
fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end,
fun({Topic, ID}) -> M:insert(Topic, ID, <<>>, Tab) end,
Records
),
?assertEqual(
[<<"#">>, <<T/binary, "/#">>],
[topic(M) || M <- matches(T, Tab)]
[topic(X) || X <- matches(M, T, Tab)]
),
?assertEqual(
[<<"#">>, <<T/binary, "/#">>, <<T/binary, "/+">>],
[topic(M) || M <- matches(<<T/binary, "/1">>, Tab)]
[topic(X) || X <- matches(M, <<T/binary, "/1">>, Tab)]
).
t_match6(_) ->
Tab = emqx_topic_index:new(),
t_match6(Config) ->
M = get_module(Config),
Tab = M:new(),
T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>,
W = <<"+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/#">>,
emqx_topic_index:insert(W, ID = t_match6, <<>>, Tab),
?assertEqual(ID, id(match(T, Tab))).
M:insert(W, ID = t_match6, <<>>, Tab),
?assertEqual(ID, id(match(M, T, Tab))).
t_match7(_) ->
Tab = emqx_topic_index:new(),
t_match7(Config) ->
M = get_module(Config),
Tab = M:new(),
T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>,
W = <<"a/+/c/+/e/+/g/+/i/+/k/+/m/+/o/+/q/+/s/+/u/+/w/+/y/+/#">>,
emqx_topic_index:insert(W, t_match7, <<>>, Tab),
?assertEqual(W, topic(match(T, Tab))).
M:insert(W, t_match7, <<>>, Tab),
?assertEqual(W, topic(match(M, T, Tab))).
t_match_fast_forward(_) ->
Tab = emqx_topic_index:new(),
emqx_topic_index:insert(<<"a/b/1/2/3/4/5/6/7/8/9/#">>, id1, <<>>, Tab),
emqx_topic_index:insert(<<"z/y/x/+/+">>, id2, <<>>, Tab),
emqx_topic_index:insert(<<"a/b/c/+">>, id3, <<>>, Tab),
t_match8(Config) ->
M = get_module(Config),
Tab = M:new(),
Filters = [<<"+">>, <<"dev/global/sensor">>, <<"dev/+/sensor/#">>],
IDs = [1, 2, 3],
Keys = [{F, ID} || F <- Filters, ID <- IDs],
lists:foreach(
fun({F, ID}) ->
M:insert(F, ID, <<>>, Tab)
end,
Keys
),
Topic = <<"dev/global/sensor">>,
Matches = lists:sort(matches(M, Topic, Tab)),
?assertEqual(
[
<<"dev/+/sensor/#">>,
<<"dev/+/sensor/#">>,
<<"dev/+/sensor/#">>,
<<"dev/global/sensor">>,
<<"dev/global/sensor">>,
<<"dev/global/sensor">>
],
[emqx_topic_index:get_topic(Match) || Match <- Matches]
).
t_match_fast_forward(Config) ->
M = get_module(Config),
Tab = M:new(),
M:insert(<<"a/b/1/2/3/4/5/6/7/8/9/#">>, id1, <<>>, Tab),
M:insert(<<"z/y/x/+/+">>, id2, <<>>, Tab),
M:insert(<<"a/b/c/+">>, id3, <<>>, Tab),
% dbg:tracer(),
% dbg:p(all, c),
% dbg:tpl({ets, next, '_'}, x),
?assertEqual(id1, id(match(<<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab))),
?assertEqual([id1], [id(M) || M <- matches(<<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab)]).
?assertEqual(id1, id(match(M, <<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab))),
?assertEqual([id1], [id(X) || X <- matches(M, <<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab)]).
t_match_unique(_) ->
Tab = emqx_topic_index:new(),
emqx_topic_index:insert(<<"a/b/c">>, t_match_id1, <<>>, Tab),
emqx_topic_index:insert(<<"a/b/+">>, t_match_id1, <<>>, Tab),
emqx_topic_index:insert(<<"a/b/c/+">>, t_match_id2, <<>>, Tab),
t_match_unique(Config) ->
M = get_module(Config),
Tab = M:new(),
M:insert(<<"a/b/c">>, t_match_id1, <<>>, Tab),
M:insert(<<"a/b/+">>, t_match_id1, <<>>, Tab),
M:insert(<<"a/b/c/+">>, t_match_id2, <<>>, Tab),
?assertEqual(
[t_match_id1, t_match_id1],
[id(M) || M <- emqx_topic_index:matches(<<"a/b/c">>, Tab, [])]
[id(X) || X <- matches(M, <<"a/b/c">>, Tab, [])]
),
?assertEqual(
[t_match_id1],
[id(M) || M <- emqx_topic_index:matches(<<"a/b/c">>, Tab, [unique])]
[id(X) || X <- matches(M, <<"a/b/c">>, Tab, [unique])]
).
t_match_wildcard_edge_cases(_) ->
t_match_wildcard_edge_cases(Config) ->
M = get_module(Config),
CommonTopics = [
<<"a/b">>,
<<"a/b/#">>,
@ -179,32 +252,46 @@ t_match_wildcard_edge_cases(_) ->
{[<<"/">>, <<"+">>], <<"a">>, [2]}
],
F = fun({Topics, TopicName, Expected}) ->
Tab = emqx_topic_index:new(),
_ = [emqx_topic_index:insert(T, N, <<>>, Tab) || {N, T} <- lists:enumerate(Topics)],
Tab = M:new(),
_ = [M:insert(T, N, <<>>, Tab) || {N, T} <- lists:enumerate(Topics)],
?assertEqual(
lists:last(Expected),
id(emqx_topic_index:match(TopicName, Tab)),
id(M:match(TopicName, Tab)),
#{"Base topics" => Topics, "Topic name" => TopicName}
),
?assertEqual(
Expected,
[id(M) || M <- emqx_topic_index:matches(TopicName, Tab, [unique])],
[id(X) || X <- matches(M, TopicName, Tab, [unique])],
#{"Base topics" => Topics, "Topic name" => TopicName}
)
end,
lists:foreach(F, Datasets).
t_prop_matches(_) ->
t_prop_edgecase(Config) ->
M = get_module(Config),
Tab = M:new(),
Topic = <<"01/01">>,
Filters = [
{1, <<>>},
{2, <<"+/01">>},
{3, <<>>},
{4, <<"+/+/01">>}
],
_ = [M:insert(F, N, <<>>, Tab) || {N, F} <- Filters],
?assertMatch([2], [id(X) || X <- matches(M, Topic, Tab, [unique])]).
t_prop_matches(Config) ->
M = get_module(Config),
?assert(
proper:quickcheck(
topic_matches_prop(),
topic_matches_prop(M),
[{max_size, 100}, {numtests, 100}]
)
),
Statistics = [{C, account(C)} || C <- [filters, topics, matches, maxhits]],
ct:pal("Statistics: ~p", [maps:from_list(Statistics)]).
topic_matches_prop() ->
topic_matches_prop(M) ->
?FORALL(
% Generate a longer list of topics and a shorter list of topic filter patterns.
#{
@ -219,12 +306,12 @@ topic_matches_prop() ->
patterns => list(topic_filter_pattern_t())
}),
begin
Tab = emqx_topic_index:new(),
Tab = M:new(),
Topics = [emqx_topic:join(T) || T <- TTopics],
% Produce topic filters from generated topics and patterns.
% Number of filters is equal to the number of patterns, most of the time.
Filters = lists:enumerate(mk_filters(Pats, TTopics)),
_ = [emqx_topic_index:insert(F, N, <<>>, Tab) || {N, F} <- Filters],
_ = [M:insert(F, N, <<>>, Tab) || {N, F} <- Filters],
% Gather some basic statistics
_ = account(filters, length(Filters)),
_ = account(topics, NTopics = length(Topics)),
@ -233,7 +320,7 @@ topic_matches_prop() ->
% matching it against the list of filters one by one.
lists:all(
fun(Topic) ->
Ids1 = [id(M) || M <- emqx_topic_index:matches(Topic, Tab, [unique])],
Ids1 = [id(X) || X <- matches(M, Topic, Tab, [unique])],
Ids2 = lists:filtermap(
fun({N, F}) ->
case emqx_topic:match(Topic, F) of
@ -252,8 +339,9 @@ topic_matches_prop() ->
ct:pal(
"Topic name: ~p~n"
"Index results: ~p~n"
"Topic match results:: ~p~n",
[Topic, Ids1, Ids2]
"Topic match results: ~p~n"
"Filters: ~p~n",
[Topic, Ids1, Ids2, Filters]
),
false
end
@ -276,17 +364,20 @@ account(Counter) ->
%%
match(T, Tab) ->
emqx_topic_index:match(T, Tab).
match(M, T, Tab) ->
M:match(T, Tab).
matches(T, Tab) ->
lists:sort(emqx_topic_index:matches(T, Tab, [])).
matches(M, T, Tab) ->
lists:sort(M:matches(T, Tab, [])).
matches(M, T, Tab, Opts) ->
M:matches(T, Tab, Opts).
id(Match) ->
emqx_topic_index:get_id(Match).
emqx_trie_search:get_id(Match).
topic(Match) ->
emqx_topic_index:get_topic(Match).
emqx_trie_search:get_topic(Match).
%%

View File

@ -0,0 +1,47 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_trie_search_tests).
-include_lib("eunit/include/eunit.hrl").
-import(emqx_trie_search, [filter/1]).
filter_test_() ->
[
?_assertEqual(
[<<"sensor">>, '+', <<"metric">>, <<>>, '#'],
filter(<<"sensor/+/metric//#">>)
),
?_assertEqual(
false,
filter(<<"sensor/1/metric//42">>)
)
].
topic_validation_test_() ->
NextF = fun(_) -> '$end_of_table' end,
Call = fun(Topic) ->
emqx_trie_search:match(Topic, NextF)
end,
[
?_assertError(badarg, Call(<<"+">>)),
?_assertError(badarg, Call(<<"#">>)),
?_assertError(badarg, Call(<<"a/+/b">>)),
?_assertError(badarg, Call(<<"a/b/#">>)),
?_assertEqual(false, Call(<<"a/b/b+">>)),
?_assertEqual(false, Call(<<"a/b/c#">>))
].

View File

@ -483,7 +483,6 @@ t_handle_info_close(_) ->
t_handle_info_event(_) ->
ok = meck:expect(emqx_cm, register_channel, fun(_, _, _) -> ok end),
ok = meck:expect(emqx_cm, insert_channel_info, fun(_, _, _) -> ok end),
ok = meck:expect(emqx_cm, connection_closed, fun(_) -> true end),
{ok, _} = ?ws_conn:handle_info({event, connected}, st()),
{ok, _} = ?ws_conn:handle_info({event, disconnected}, st()),
{ok, _} = ?ws_conn:handle_info({event, updated}, st()).

View File

@ -18,8 +18,8 @@
-define(EMQX_AUTHENTICATION_HRL, true).
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx/include/emqx_access_control.hrl").
-define(AUTHN_TRACE_TAG, "AUTHN").
-define(GLOBAL, 'mqtt:global').
-define(TRACE_AUTHN_PROVIDER(Msg), ?TRACE_AUTHN_PROVIDER(Msg, #{})).
@ -36,12 +36,6 @@
-define(EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM, authentication).
-define(EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY, <<"authentication">>).
%% key to a persistent term which stores a module name in order to inject
%% schema module at run-time to keep emqx app's compile time purity.
%% see emqx_schema.erl for more details
%% and emqx_conf_schema for an examples
-define(EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY, emqx_authentication_schema_module).
%% authentication move cmd
-define(CMD_MOVE_FRONT, front).
-define(CMD_MOVE_REAR, rear).

View File

@ -17,7 +17,7 @@
-ifndef(EMQX_AUTHN_HRL).
-define(EMQX_AUTHN_HRL, true).
-include_lib("emqx/include/emqx_authentication.hrl").
-include_lib("emqx_authentication.hrl").
-define(APP, emqx_authn).

View File

@ -34,4 +34,6 @@
{cover_opts, [verbose]}.
{cover_export_enabled, true}.
{erl_first_files, ["src/emqx_authentication.erl"]}.
{project_plugins, [erlfmt]}.

View File

@ -22,18 +22,27 @@
-behaviour(gen_server).
-include("emqx.hrl").
-include("logger.hrl").
-include("emqx_authentication.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx/include/emqx_hooks.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
-define(CONF_ROOT, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM).
-define(IS_UNDEFINED(X), (X =:= undefined orelse X =:= <<>>)).
-record(authenticator, {
id :: binary(),
provider :: module(),
enable :: boolean(),
state :: map()
}).
-record(chain, {
name :: atom(),
authenticators :: [#authenticator{}]
}).
%% The authentication entrypoint.
-export([
pre_hook_authenticate/1,
authenticate/2
]).
@ -220,21 +229,6 @@ when
%%------------------------------------------------------------------------------
%% Authenticate
%%------------------------------------------------------------------------------
-spec pre_hook_authenticate(emqx_types:clientinfo()) ->
ok | continue | {error, not_authorized}.
pre_hook_authenticate(#{enable_authn := false}) ->
?TRACE_RESULT("authentication_result", ok, enable_authn_false);
pre_hook_authenticate(#{enable_authn := quick_deny_anonymous} = Credential) ->
case maps:get(username, Credential, undefined) of
U when ?IS_UNDEFINED(U) ->
?TRACE_RESULT(
"authentication_result", {error, not_authorized}, enable_authn_false
);
_ ->
continue
end;
pre_hook_authenticate(_) ->
continue.
authenticate(#{listener := Listener, protocol := Protocol} = Credential, AuthResult) ->
case get_authenticators(Listener, global_chain(Protocol)) of
@ -271,6 +265,7 @@ get_enabled(Authenticators) ->
%%------------------------------------------------------------------------------
%% @doc Get all registered authentication providers.
-spec get_providers() -> #{authn_type() => module()}.
get_providers() ->
call(get_providers).

View File

@ -21,7 +21,9 @@
-export([
pre_config_update/3,
post_config_update/5
post_config_update/5,
propagated_pre_config_update/3,
propagated_post_config_update/5
]).
-export([
@ -37,7 +39,7 @@
-export_type([config/0]).
-include("logger.hrl").
-include_lib("emqx/include/logger.hrl").
-include("emqx_authentication.hrl").
-type parsed_config() :: #{
@ -65,8 +67,8 @@
-spec pre_config_update(list(atom()), update_request(), emqx_config:raw_config()) ->
{ok, map() | list()} | {error, term()}.
pre_config_update(Paths, UpdateReq, OldConfig) ->
try do_pre_config_update(Paths, UpdateReq, to_list(OldConfig)) of
pre_config_update(ConfPath, UpdateReq, OldConfig) ->
try do_pre_config_update(ConfPath, UpdateReq, to_list(OldConfig)) of
{error, Reason} -> {error, Reason};
{ok, NewConfig} -> {ok, NewConfig}
catch
@ -130,31 +132,33 @@ do_pre_config_update(_, {move_authenticator, _ChainName, AuthenticatorID, Positi
end
end
end;
do_pre_config_update(Paths, {merge_authenticators, NewConfig}, OldConfig) ->
do_pre_config_update(ConfPath, {merge_authenticators, NewConfig}, OldConfig) ->
MergeConfig = merge_authenticators(OldConfig, NewConfig),
do_pre_config_update(Paths, MergeConfig, OldConfig);
do_pre_config_update(ConfPath, MergeConfig, OldConfig);
do_pre_config_update(_, OldConfig, OldConfig) ->
{ok, OldConfig};
do_pre_config_update(Paths, NewConfig, _OldConfig) ->
ChainName = chain_name(Paths),
{ok, [
begin
CertsDir = certs_dir(ChainName, New),
convert_certs(CertsDir, New)
end
|| New <- to_list(NewConfig)
]}.
do_pre_config_update(ConfPath, NewConfig, _OldConfig) ->
convert_certs_for_conf_path(ConfPath, NewConfig).
%% @doc Handle listener config changes made at higher level.
-spec propagated_pre_config_update(list(binary()), update_request(), emqx_config:raw_config()) ->
{ok, map() | list()} | {error, term()}.
propagated_pre_config_update(_, OldConfig, OldConfig) ->
{ok, OldConfig};
propagated_pre_config_update(ConfPath, NewConfig, _OldConfig) ->
convert_certs_for_conf_path(ConfPath, NewConfig).
-spec post_config_update(
list(atom()),
update_request(),
map() | list(),
map() | list() | undefined,
emqx_config:raw_config(),
emqx_config:app_envs()
) ->
ok | {ok, map()} | {error, term()}.
post_config_update(Paths, UpdateReq, NewConfig, OldConfig, AppEnvs) ->
do_post_config_update(Paths, UpdateReq, to_list(NewConfig), OldConfig, AppEnvs).
post_config_update(ConfPath, UpdateReq, NewConfig, OldConfig, AppEnvs) ->
do_post_config_update(ConfPath, UpdateReq, to_list(NewConfig), OldConfig, AppEnvs).
do_post_config_update(
_, {create_authenticator, ChainName, Config}, NewConfig, _OldConfig, _AppEnvs
@ -192,8 +196,8 @@ do_post_config_update(
emqx_authentication:move_authenticator(ChainName, AuthenticatorID, Position);
do_post_config_update(_, _UpdateReq, OldConfig, OldConfig, _AppEnvs) ->
ok;
do_post_config_update(Paths, _UpdateReq, NewConfig0, OldConfig0, _AppEnvs) ->
ChainName = chain_name(Paths),
do_post_config_update(ConfPath, _UpdateReq, NewConfig0, OldConfig0, _AppEnvs) ->
ChainName = chain_name(ConfPath),
OldConfig = to_list(OldConfig0),
NewConfig = to_list(NewConfig0),
OldIds = lists:map(fun authenticator_id/1, OldConfig),
@ -203,6 +207,20 @@ do_post_config_update(Paths, _UpdateReq, NewConfig0, OldConfig0, _AppEnvs) ->
ok = emqx_authentication:reorder_authenticator(ChainName, NewIds),
ok.
%% @doc Handle listener config changes made at higher level.
-spec propagated_post_config_update(
list(atom()),
update_request(),
map() | list() | undefined,
emqx_config:raw_config(),
emqx_config:app_envs()
) ->
ok.
propagated_post_config_update(ConfPath, UpdateReq, NewConfig, OldConfig, AppEnvs) ->
ok = post_config_update(ConfPath, UpdateReq, NewConfig, OldConfig, AppEnvs),
ok.
%% create new authenticators and update existing ones
create_or_update_authenticators(OldIds, ChainName, NewConfig) ->
lists:foreach(
@ -238,6 +256,17 @@ to_list(M) when M =:= #{} -> [];
to_list(M) when is_map(M) -> [M];
to_list(L) when is_list(L) -> L.
convert_certs_for_conf_path(ConfPath, NewConfig) ->
ChainName = chain_name_for_filepath(ConfPath),
CovertedConfs = lists:map(
fun(Conf) ->
CertsDir = certs_dir(ChainName, Conf),
convert_certs(CertsDir, Conf)
end,
to_list(NewConfig)
),
{ok, CovertedConfs}.
convert_certs(CertsDir, NewConfig) ->
NewSSL = maps:get(<<"ssl">>, NewConfig, undefined),
case emqx_tls_lib:ensure_ssl_files(CertsDir, NewSSL) of
@ -331,7 +360,16 @@ dir(ChainName, Config) when is_map(Config) ->
chain_name([authentication]) ->
?GLOBAL;
chain_name([listeners, Type, Name, authentication]) ->
binary_to_existing_atom(<<(atom_to_binary(Type))/binary, ":", (atom_to_binary(Name))/binary>>).
%% Type, Name atoms exist, so let 'Type:Name' exist too.
binary_to_atom(<<(atom_to_binary(Type))/binary, ":", (atom_to_binary(Name))/binary>>).
chain_name_for_filepath(Path) ->
do_chain_name_for_filepath([to_bin(Key) || Key <- Path]).
do_chain_name_for_filepath([<<"authentication">>]) ->
to_bin(?GLOBAL);
do_chain_name_for_filepath([<<"listeners">>, Type, Name, <<"authentication">>]) ->
<<(to_bin(Type))/binary, ":", (to_bin(Name))/binary>>.
merge_authenticators(OriginConf0, NewConf0) ->
{OriginConf1, NewConf1} =

View File

@ -21,7 +21,6 @@
-include("emqx_authn.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx/include/emqx_placeholder.hrl").
-include_lib("emqx/include/emqx_authentication.hrl").
-include_lib("hocon/include/hoconsc.hrl").
-import(hoconsc, [mk/2, ref/1, ref/2]).

View File

@ -26,7 +26,7 @@
stop/1
]).
-include_lib("emqx/include/emqx_authentication.hrl").
-include_lib("emqx_authentication.hrl").
-dialyzer({nowarn_function, [start/2]}).
@ -35,8 +35,7 @@
%%------------------------------------------------------------------------------
start(_StartType, _StartArgs) ->
%% required by test cases, ensure the injection of
%% EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY
%% required by test cases, ensure the injection of schema
_ = emqx_conf_schema:roots(),
ok = mria_rlog:wait_for_shards([?AUTH_SHARD], infinity),
{ok, Sup} = emqx_authn_sup:start_link(),

View File

@ -19,6 +19,12 @@
-elvis([{elvis_style, invalid_dynamic_call, disable}]).
-include_lib("hocon/include/hoconsc.hrl").
-include("emqx_authn.hrl").
-include("emqx_authentication.hrl").
-behaviour(emqx_schema_hooks).
-export([
injected_fields/0
]).
-export([
common_fields/0,
@ -28,13 +34,18 @@
fields/1,
authenticator_type/0,
authenticator_type_without_scram/0,
root_type/0,
mechanism/1,
backend/1
]).
roots() -> [].
injected_fields() ->
#{
'mqtt.listener' => global_auth_fields(),
'roots.high' => mqtt_listener_auth_fields()
}.
tags() ->
[<<"Authentication">>].
@ -121,12 +132,36 @@ try_select_union_member(Module, Value) ->
Module:refs()
end.
%% authn is a core functionality however implemented outside of emqx app
%% in emqx_schema, 'authentication' is a map() type which is to allow
%% EMQX more pluggable.
root_type() ->
hoconsc:array(authenticator_type()).
global_auth_fields() ->
[
{?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM,
hoconsc:mk(root_type(), #{
desc => ?DESC(global_authentication),
converter => fun ensure_array/2,
default => [],
importance => ?IMPORTANCE_LOW
})}
].
mqtt_listener_auth_fields() ->
[
{?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM,
hoconsc:mk(root_type(), #{
desc => ?DESC(listener_authentication),
converter => fun ensure_array/2,
default => [],
importance => ?IMPORTANCE_HIDDEN
})}
].
%% the older version schema allows individual element (instead of a chain) in config
ensure_array(undefined, _) -> undefined;
ensure_array(L, _) when is_list(L) -> L;
ensure_array(M, _) -> [M].
mechanism(Name) ->
?HOCON(
Name,

View File

@ -27,5 +27,15 @@ start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
ChildSpecs = [],
AuthNSup = #{
id => emqx_authentication_sup,
start => {emqx_authentication_sup, start_link, []},
restart => permanent,
shutdown => infinity,
type => supervisor,
modules => [emqx_authentication_sup]
},
ChildSpecs = [AuthNSup],
{ok, {{one_for_one, 10, 10}, ChildSpecs}}.

View File

@ -20,7 +20,6 @@
-include("emqx_authn.hrl").
-include_lib("emqx/include/logger.hrl").
-include_lib("emqx/include/emqx_authentication.hrl").
-include_lib("hocon/include/hoconsc.hrl").
-import(emqx_dashboard_swagger, [error_codes/2]).

View File

@ -173,6 +173,8 @@ update(Config, _State) ->
authenticate(#{auth_method := _}, _) ->
ignore;
authenticate(#{password := undefined}, _) ->
{error, bad_username_or_password};
authenticate(
#{password := Password} = Credential,
#{

View File

@ -160,6 +160,8 @@ destroy(#{resource_id := ResourceId}) ->
authenticate(#{auth_method := _}, _) ->
ignore;
authenticate(#{password := undefined}, _) ->
{error, bad_username_or_password};
authenticate(
#{password := Password} = Credential,
#{

View File

@ -110,6 +110,8 @@ destroy(#{resource_id := ResourceId}) ->
authenticate(#{auth_method := _}, _) ->
ignore;
authenticate(#{password := undefined}, _) ->
{error, bad_username_or_password};
authenticate(
#{password := Password} = Credential,
#{

View File

@ -113,6 +113,8 @@ destroy(#{resource_id := ResourceId}) ->
authenticate(#{auth_method := _}, _) ->
ignore;
authenticate(#{password := undefined}, _) ->
{error, bad_username_or_password};
authenticate(
#{password := Password} = Credential,
#{

View File

@ -148,6 +148,8 @@ destroy(#{resource_id := ResourceId}) ->
authenticate(#{auth_method := _}, _) ->
ignore;
authenticate(#{password := undefined}, _) ->
{error, bad_username_or_password};
authenticate(
#{password := Password} = Credential,
#{

View File

@ -94,19 +94,19 @@ all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
LogLevel = emqx_logger:get_primary_log_level(),
ok = emqx_logger:set_log_level(debug),
application:set_env(ekka, strict_mode, true),
emqx_config:erase_all(),
emqx_common_test_helpers:stop_apps([]),
emqx_common_test_helpers:boot_modules(all),
emqx_common_test_helpers:start_apps([]),
[{log_level, LogLevel} | Config].
Apps = emqx_cth_suite:start(
[
emqx,
emqx_conf,
emqx_authn
],
#{work_dir => ?config(priv_dir)}
),
ok = deregister_providers(),
[{apps, Apps} | Config].
end_per_suite(Config) ->
emqx_common_test_helpers:stop_apps([]),
LogLevel = ?config(log_level),
emqx_logger:set_log_level(LogLevel),
emqx_cth_suite:stop(?config(apps)),
ok.
init_per_testcase(Case, Config) ->
@ -302,15 +302,20 @@ t_update_config(Config) when is_list(Config) ->
ok = register_provider(?config("auth1"), ?MODULE),
ok = register_provider(?config("auth2"), ?MODULE),
Global = ?config(global),
%% We mocked provider implementation, but did't mock the schema
%% so we should provide full config
AuthenticatorConfig1 = #{
mechanism => password_based,
backend => built_in_database,
enable => true
<<"mechanism">> => <<"password_based">>,
<<"backend">> => <<"built_in_database">>,
<<"enable">> => true
},
AuthenticatorConfig2 = #{
mechanism => password_based,
backend => mysql,
enable => true
<<"mechanism">> => <<"password_based">>,
<<"backend">> => <<"mysql">>,
<<"query">> => <<"SELECT password_hash, salt FROM users WHERE username = ?">>,
<<"server">> => <<"127.0.0.1:5432">>,
<<"database">> => <<"emqx">>,
<<"enable">> => true
},
ID1 = <<"password_based:built_in_database">>,
ID2 = <<"password_based:mysql">>,
@ -580,3 +585,11 @@ certs(Certs) ->
register_provider(Type, Module) ->
ok = ?AUTHN:register_providers([{Type, Module}]).
deregister_providers() ->
lists:foreach(
fun({Type, _Module}) ->
ok = ?AUTHN:deregister_provider(Type)
end,
maps:to_list(?AUTHN:get_providers())
).

View File

@ -102,7 +102,7 @@ t_will_message_connection_denied(Config) when is_list(Config) ->
{error, _} = emqtt:connect(Publisher),
receive
{'DOWN', Ref, process, Publisher, Reason} ->
?assertEqual({shutdown, unauthorized_client}, Reason)
?assertEqual({shutdown, malformed_username_or_password}, Reason)
after 2000 ->
error(timeout)
end,
@ -151,7 +151,7 @@ t_password_undefined(Config) when is_list(Config) ->
header = #mqtt_packet_header{type = ?CONNACK},
variable = #mqtt_packet_connack{
ack_flags = 0,
reason_code = ?CONNACK_AUTH
reason_code = ?CONNACK_CREDENTIALS
},
payload = undefined
},

View File

@ -23,6 +23,7 @@
-include("emqx_authn.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-define(TCP_DEFAULT, 'tcp:default').
@ -43,7 +44,6 @@ init_per_testcase(t_authenticator_fail, Config) ->
meck:expect(emqx_authn_proto_v1, lookup_from_all_nodes, 3, [{error, {exception, badarg}}]),
init_per_testcase(default, Config);
init_per_testcase(_Case, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
emqx_authn_test_lib:delete_authenticators(
[?CONF_NS_ATOM],
?GLOBAL
@ -64,19 +64,27 @@ end_per_testcase(_, Config) ->
Config.
init_per_suite(Config) ->
emqx_config:erase(?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY),
_ = application:load(emqx_conf),
ok = emqx_mgmt_api_test_util:init_suite(
[emqx_conf, emqx_authn]
Apps = emqx_cth_suite:start(
[
emqx,
emqx_conf,
emqx_authn,
emqx_management,
{emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}
],
#{
work_dir => ?config(priv_dir, Config)
}
),
_ = emqx_common_test_http:create_default_app(),
?AUTHN:delete_chain(?GLOBAL),
{ok, Chains} = ?AUTHN:list_chains(),
?assertEqual(length(Chains), 0),
Config.
[{apps, Apps} | Config].
end_per_suite(_Config) ->
emqx_mgmt_api_test_util:end_suite([emqx_authn]),
end_per_suite(Config) ->
_ = emqx_common_test_http:delete_default_app(),
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
%%------------------------------------------------------------------------------
@ -351,7 +359,7 @@ test_authenticator_users(PathPrefix) ->
<<"metrics">> := #{
<<"total">> := 1,
<<"success">> := 0,
<<"nomatch">> := 1
<<"failed">> := 1
}
} = emqx_utils_json:decode(PageData0, [return_maps]);
["listeners", 'tcp:default'] ->
@ -409,7 +417,7 @@ test_authenticator_users(PathPrefix) ->
<<"metrics">> := #{
<<"total">> := 2,
<<"success">> := 1,
<<"nomatch">> := 1
<<"failed">> := 1
}
} = emqx_utils_json:decode(PageData01, [return_maps]);
["listeners", 'tcp:default'] ->

View File

@ -24,16 +24,19 @@
-define(PATH, [?CONF_NS_ATOM]).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
emqx_common_test_helpers:start_apps([emqx_conf, emqx_authn]),
Config.
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config].
end_per_suite(_) ->
emqx_common_test_helpers:stop_apps([emqx_authn, emqx_conf]),
end_per_suite(Config) ->
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
init_per_testcase(_Case, Config) ->
@ -42,9 +45,10 @@ init_per_testcase(_Case, Config) ->
<<"backend">> => <<"built_in_database">>,
<<"user_id_type">> => <<"clientid">>
},
{ok, _} = emqx:update_config(
{ok, _} = emqx_conf:update(
?PATH,
{create_authenticator, ?GLOBAL, AuthnConfig}
{create_authenticator, ?GLOBAL, AuthnConfig},
#{}
),
{ok, _} = emqx_conf:update(
[listeners, tcp, listener_authn_enabled],
@ -98,7 +102,7 @@ t_enable_authn(_Config) ->
%% enable_authn set to true, we go to the set up authn and fail
{ok, ConnPid1} = emqtt:start_link([{port, 18830}, {clientid, <<"clientid">>}]),
?assertMatch(
{error, {unauthorized_client, _}},
{error, {malformed_username_or_password, _}},
emqtt:connect(ConnPid1)
),
ok.

View File

@ -65,18 +65,17 @@ all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
_ = application:load(emqx_conf),
emqx_common_test_helpers:start_apps([emqx_authn]),
application:ensure_all_started(cowboy),
Config.
Apps = emqx_cth_suite:start([cowboy, emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config].
end_per_suite(_) ->
end_per_suite(Config) ->
emqx_authn_test_lib:delete_authenticators(
[authentication],
?GLOBAL
),
emqx_common_test_helpers:stop_apps([emqx_authn]),
application:stop(cowboy),
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
init_per_testcase(_Case, Config) ->

View File

@ -39,18 +39,17 @@ all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
_ = application:load(emqx_conf),
emqx_common_test_helpers:start_apps([emqx_authn]),
application:ensure_all_started(cowboy),
Config.
Apps = emqx_cth_suite:start([cowboy, emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config].
end_per_suite(_) ->
end_per_suite(Config) ->
emqx_authn_test_lib:delete_authenticators(
[authentication],
?GLOBAL
),
emqx_common_test_helpers:stop_apps([emqx_authn]),
application:stop(cowboy),
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
init_per_testcase(_Case, Config) ->

View File

@ -31,21 +31,14 @@
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_testcase(_, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
Config.
init_per_suite(Config) ->
_ = application:load(emqx_conf),
emqx_common_test_helpers:start_apps([emqx_conf, emqx_authn]),
application:ensure_all_started(emqx_resource),
application:ensure_all_started(emqx_connector),
Config.
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config].
end_per_suite(_) ->
application:stop(emqx_connector),
application:stop(emqx_resource),
emqx_common_test_helpers:stop_apps([emqx_authn]),
end_per_suite(Config) ->
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
%%------------------------------------------------------------------------------

View File

@ -0,0 +1,242 @@
%%--------------------------------------------------------------------
%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_authn_listeners_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include("emqx_authn.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config].
end_per_suite(Config) ->
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
init_per_testcase(_Case, Config) ->
Port = emqx_common_test_helpers:select_free_port(tcp),
[{port, Port} | Config].
end_per_testcase(_Case, _Config) ->
ok.
t_create_update_delete(Config) ->
ListenerConf = listener_mqtt_tcp_conf(Config),
AuthnConfig0 = #{
<<"mechanism">> => <<"password_based">>,
<<"backend">> => <<"built_in_database">>,
<<"user_id_type">> => <<"clientid">>
},
%% Create
{ok, _} = emqx_conf:update(
[listeners],
#{
<<"tcp">> => #{
<<"listener0">> => ListenerConf#{
?CONF_NS_BINARY => AuthnConfig0
}
}
},
#{}
),
?assertMatch(
{ok, [
#{
authenticators := [
#{
id := <<"password_based:built_in_database">>,
state := #{
user_id_type := clientid
}
}
],
name := 'tcp:listener0'
}
]},
emqx_authentication:list_chains()
),
%% Drop old, create new
{ok, _} = emqx_conf:update(
[listeners],
#{
<<"tcp">> => #{
<<"listener1">> => ListenerConf#{
?CONF_NS_BINARY => AuthnConfig0
}
}
},
#{}
),
?assertMatch(
{ok, [
#{
authenticators := [
#{
id := <<"password_based:built_in_database">>,
state := #{
user_id_type := clientid
}
}
],
name := 'tcp:listener1'
}
]},
emqx_authentication:list_chains()
),
%% Update
{ok, _} = emqx_conf:update(
[listeners],
#{
<<"tcp">> => #{
<<"listener1">> => ListenerConf#{
?CONF_NS_BINARY => AuthnConfig0#{<<"user_id_type">> => <<"username">>}
}
}
},
#{}
),
?assertMatch(
{ok, [
#{
authenticators := [
#{
id := <<"password_based:built_in_database">>,
state := #{
user_id_type := username
}
}
],
name := 'tcp:listener1'
}
]},
emqx_authentication:list_chains()
),
%% Update by listener path
{ok, _} = emqx_conf:update(
[listeners, tcp, listener1],
{update, ListenerConf#{
?CONF_NS_BINARY => AuthnConfig0#{<<"user_id_type">> => <<"clientid">>}
}},
#{}
),
?assertMatch(
{ok, [
#{
authenticators := [
#{
id := <<"password_based:built_in_database">>,
state := #{
user_id_type := clientid
}
}
],
name := 'tcp:listener1'
}
]},
emqx_authentication:list_chains()
),
%% Delete
{ok, _} = emqx_conf:tombstone(
[listeners, tcp, listener1],
#{}
),
?assertMatch(
{ok, []},
emqx_authentication:list_chains()
).
t_convert_certs(Config) ->
ListenerConf = listener_mqtt_tcp_conf(Config),
AuthnConfig0 = #{
<<"mechanism">> => <<"password_based">>,
<<"password_hash_algorithm">> => #{
<<"name">> => <<"plain">>,
<<"salt_position">> => <<"suffix">>
},
<<"enable">> => <<"true">>,
<<"backend">> => <<"redis">>,
<<"cmd">> => <<"HMGET mqtt_user:${username} password_hash salt is_superuser">>,
<<"database">> => <<"1">>,
<<"password">> => <<"public">>,
<<"server">> => <<"127.0.0.1:55555">>,
<<"redis_type">> => <<"single">>,
<<"ssl">> => #{
<<"enable">> => true,
<<"cacertfile">> => some_pem(),
<<"certfile">> => some_pem(),
<<"keyfile">> => some_pem()
}
},
{ok, _} = emqx_conf:update(
[listeners],
#{
<<"tcp">> => #{
<<"listener0">> => ListenerConf#{
?CONF_NS_BINARY => AuthnConfig0
}
}
},
#{}
),
lists:foreach(
fun(Key) ->
[#{ssl := #{Key := FilePath}}] = emqx_config:get([
listeners, tcp, listener0, authentication
]),
?assert(filelib:is_regular(FilePath))
end,
[cacertfile, certfile, keyfile]
).
%%--------------------------------------------------------------------
%% Helper Functions
%%--------------------------------------------------------------------
listener_mqtt_tcp_conf(Config) ->
Port = ?config(port, Config),
PortS = integer_to_binary(Port),
#{
<<"acceptors">> => 16,
<<"access_rules">> => [<<"allow all">>],
<<"bind">> => <<"0.0.0.0:", PortS/binary>>,
<<"max_connections">> => 1024000,
<<"mountpoint">> => <<>>,
<<"proxy_protocol">> => false,
<<"proxy_protocol_timeout">> => <<"3s">>,
<<"enable_authn">> => true
}.
some_pem() ->
Dir = code:lib_dir(emqx_authn, test),
Path = filename:join([Dir, "data", "private_key.pem"]),
{ok, Pem} = file:read_file(Path),
Pem.

View File

@ -20,8 +20,7 @@
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include("emqx_authn.hrl").
-include_lib("common_test/include/ct.hrl").
-define(AUTHN_ID, <<"mechanism:backend">>).
@ -29,16 +28,16 @@ all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
_ = application:load(emqx_conf),
emqx_common_test_helpers:start_apps([emqx_authn]),
Config.
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config].
end_per_suite(_) ->
emqx_common_test_helpers:stop_apps([emqx_authn]),
end_per_suite(Config) ->
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
init_per_testcase(_Case, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
mria:clear_table(emqx_authn_mnesia),
Config.

View File

@ -33,7 +33,6 @@ all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_testcase(_TestCase, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
emqx_authentication:initialize_authentication(?GLOBAL, []),
emqx_authn_test_lib:delete_authenticators(
[authentication],
@ -46,23 +45,23 @@ end_per_testcase(_TestCase, _Config) ->
ok = mc_worker_api:disconnect(?MONGO_CLIENT).
init_per_suite(Config) ->
_ = application:load(emqx_conf),
case emqx_common_test_helpers:is_tcp_server_available(?MONGO_HOST, ?MONGO_DEFAULT_PORT) of
true ->
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
ok = start_apps([emqx_resource]),
Config;
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config];
false ->
{skip, no_mongo}
end.
end_per_suite(_Config) ->
end_per_suite(Config) ->
emqx_authn_test_lib:delete_authenticators(
[authentication],
?GLOBAL
),
ok = stop_apps([emqx_resource]),
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
%%------------------------------------------------------------------------------
%% Tests

View File

@ -33,7 +33,6 @@ all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_testcase(_TestCase, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
emqx_authentication:initialize_authentication(?GLOBAL, []),
emqx_authn_test_lib:delete_authenticators(
[authentication],
@ -42,23 +41,23 @@ init_per_testcase(_TestCase, Config) ->
Config.
init_per_suite(Config) ->
_ = application:load(emqx_conf),
case emqx_common_test_helpers:is_tcp_server_available(?MONGO_HOST, ?MONGO_DEFAULT_PORT) of
true ->
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
ok = start_apps([emqx_resource]),
Config;
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config];
false ->
{skip, no_mongo}
end.
end_per_suite(_Config) ->
end_per_suite(Config) ->
emqx_authn_test_lib:delete_authenticators(
[authentication],
?GLOBAL
),
ok = stop_apps([emqx_resource]),
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
%%------------------------------------------------------------------------------
%% Tests

View File

@ -37,7 +37,6 @@ groups() ->
[{require_seeds, [], [t_authenticate, t_update, t_destroy]}].
init_per_testcase(_, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
emqx_authentication:initialize_authentication(?GLOBAL, []),
emqx_authn_test_lib:delete_authenticators(
[authentication],
@ -54,11 +53,11 @@ end_per_group(require_seeds, Config) ->
Config.
init_per_suite(Config) ->
_ = application:load(emqx_conf),
case emqx_common_test_helpers:is_tcp_server_available(?MYSQL_HOST, ?MYSQL_DEFAULT_PORT) of
true ->
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
ok = start_apps([emqx_resource]),
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
{ok, _} = emqx_resource:create_local(
?MYSQL_RESOURCE,
?RESOURCE_GROUP,
@ -66,19 +65,19 @@ init_per_suite(Config) ->
mysql_config(),
#{}
),
Config;
[{apps, Apps} | Config];
false ->
{skip, no_mysql}
end.
end_per_suite(_Config) ->
end_per_suite(Config) ->
emqx_authn_test_lib:delete_authenticators(
[authentication],
?GLOBAL
),
ok = emqx_resource:remove_local(?MYSQL_RESOURCE),
ok = stop_apps([emqx_resource]),
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
%%------------------------------------------------------------------------------
%% Tests

View File

@ -36,7 +36,6 @@ groups() ->
[].
init_per_testcase(_, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
emqx_authentication:initialize_authentication(?GLOBAL, []),
emqx_authn_test_lib:delete_authenticators(
[authentication],
@ -45,23 +44,23 @@ init_per_testcase(_, Config) ->
Config.
init_per_suite(Config) ->
_ = application:load(emqx_conf),
case emqx_common_test_helpers:is_tcp_server_available(?MYSQL_HOST, ?MYSQL_DEFAULT_PORT) of
true ->
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
ok = start_apps([emqx_resource]),
Config;
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config];
false ->
{skip, no_mysql_tls}
end.
end_per_suite(_Config) ->
end_per_suite(Config) ->
emqx_authn_test_lib:delete_authenticators(
[authentication],
?GLOBAL
),
ok = stop_apps([emqx_resource]),
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
%%------------------------------------------------------------------------------
%% Tests

View File

@ -23,7 +23,6 @@
-include_lib("emqx_authn/include/emqx_authn.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("emqx/include/emqx_placeholder.hrl").
-define(PGSQL_HOST, "pgsql").
-define(PGSQL_RESOURCE, <<"emqx_authn_pgsql_SUITE">>).
@ -42,7 +41,6 @@ groups() ->
[{require_seeds, [], [t_create, t_authenticate, t_update, t_destroy, t_is_superuser]}].
init_per_testcase(_, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
emqx_authentication:initialize_authentication(?GLOBAL, []),
emqx_authn_test_lib:delete_authenticators(
[authentication],
@ -59,11 +57,11 @@ end_per_group(require_seeds, Config) ->
Config.
init_per_suite(Config) ->
_ = application:load(emqx_conf),
case emqx_common_test_helpers:is_tcp_server_available(?PGSQL_HOST, ?PGSQL_DEFAULT_PORT) of
true ->
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
ok = start_apps([emqx_resource]),
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
{ok, _} = emqx_resource:create_local(
?PGSQL_RESOURCE,
?RESOURCE_GROUP,
@ -71,19 +69,19 @@ init_per_suite(Config) ->
pgsql_config(),
#{}
),
Config;
[{apps, Apps} | Config];
false ->
{skip, no_pgsql}
end.
end_per_suite(_Config) ->
end_per_suite(Config) ->
emqx_authn_test_lib:delete_authenticators(
[authentication],
?GLOBAL
),
ok = emqx_resource:remove_local(?PGSQL_RESOURCE),
ok = stop_apps([emqx_resource]),
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
%%------------------------------------------------------------------------------
%% Tests

View File

@ -48,20 +48,21 @@ init_per_suite(Config) ->
_ = application:load(emqx_conf),
case emqx_common_test_helpers:is_tcp_server_available(?PGSQL_HOST, ?PGSQL_DEFAULT_PORT) of
true ->
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
ok = start_apps([emqx_resource]),
Config;
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config];
false ->
{skip, no_pgsql_tls}
end.
end_per_suite(_Config) ->
end_per_suite(Config) ->
emqx_authn_test_lib:delete_authenticators(
[authentication],
?GLOBAL
),
ok = stop_apps([emqx_resource]),
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
%%------------------------------------------------------------------------------
%% Tests

View File

@ -42,7 +42,6 @@ groups() ->
[{require_seeds, [], [t_authenticate, t_update, t_destroy]}].
init_per_testcase(_, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
emqx_authentication:initialize_authentication(?GLOBAL, []),
emqx_authn_test_lib:delete_authenticators(
[authentication],
@ -59,11 +58,11 @@ end_per_group(require_seeds, Config) ->
Config.
init_per_suite(Config) ->
_ = application:load(emqx_conf),
case emqx_common_test_helpers:is_tcp_server_available(?REDIS_HOST, ?REDIS_DEFAULT_PORT) of
true ->
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
ok = start_apps([emqx_resource]),
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
{ok, _} = emqx_resource:create_local(
?REDIS_RESOURCE,
?RESOURCE_GROUP,
@ -71,19 +70,19 @@ init_per_suite(Config) ->
redis_config(),
#{}
),
Config;
[{apps, Apps} | Config];
false ->
{skip, no_redis}
end.
end_per_suite(_Config) ->
end_per_suite(Config) ->
emqx_authn_test_lib:delete_authenticators(
[authentication],
?GLOBAL
),
ok = emqx_resource:remove_local(?REDIS_RESOURCE),
ok = stop_apps([emqx_resource]),
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
%%------------------------------------------------------------------------------
%% Tests

View File

@ -19,7 +19,6 @@
-compile(nowarn_export_all).
-compile(export_all).
-include_lib("emqx_connector/include/emqx_connector.hrl").
-include_lib("emqx_authn/include/emqx_authn.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@ -36,7 +35,6 @@ groups() ->
[].
init_per_testcase(_, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
emqx_authentication:initialize_authentication(?GLOBAL, []),
emqx_authn_test_lib:delete_authenticators(
[authentication],
@ -45,23 +43,23 @@ init_per_testcase(_, Config) ->
Config.
init_per_suite(Config) ->
_ = application:load(emqx_conf),
case emqx_common_test_helpers:is_tcp_server_available(?REDIS_HOST, ?REDIS_TLS_PORT) of
true ->
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
ok = start_apps([emqx_resource]),
Config;
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config];
false ->
{skip, no_redis}
end.
end_per_suite(_Config) ->
end_per_suite(Config) ->
emqx_authn_test_lib:delete_authenticators(
[authentication],
?GLOBAL
),
ok = stop_apps([emqx_resource]),
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
%%------------------------------------------------------------------------------
%% Tests

View File

@ -4,6 +4,7 @@
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include("emqx_authn.hrl").
@ -11,16 +12,16 @@ all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
_ = application:load(emqx_conf),
emqx_common_test_helpers:start_apps([emqx_authn]),
Config.
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
[{apps, Apps} | Config].
end_per_suite(_) ->
emqx_common_test_helpers:stop_apps([emqx_authn]),
end_per_suite(Config) ->
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
init_per_testcase(_Case, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
mria:clear_table(emqx_authn_mnesia),
Config.

View File

@ -36,17 +36,18 @@ all() ->
emqx_common_test_helpers:all(?MODULE).
init_per_suite(Config) ->
_ = application:load(emqx_conf),
ok = emqx_common_test_helpers:start_apps([emqx_authn]),
Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{
work_dir => ?config(priv_dir, Config)
}),
IdleTimeout = emqx_config:get([mqtt, idle_timeout]),
[{idle_timeout, IdleTimeout} | Config].
[{apps, Apps}, {idle_timeout, IdleTimeout} | Config].
end_per_suite(Config) ->
ok = emqx_config:put([mqtt, idle_timeout], ?config(idle_timeout, Config)),
ok = emqx_common_test_helpers:stop_apps([emqx_authn]).
ok = emqx_cth_suite:stop(?config(apps, Config)),
ok.
init_per_testcase(_Case, Config) ->
{ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000),
mria:clear_table(emqx_enhanced_authn_scram_mnesia),
emqx_authn_test_lib:delete_authenticators(
[authentication],

Some files were not shown because too many files have changed in this diff Show More