diff --git a/.ci/docker-compose-file/.env b/.ci/docker-compose-file/.env index b7033caae..e65600f06 100644 --- a/.ci/docker-compose-file/.env +++ b/.ci/docker-compose-file/.env @@ -10,7 +10,7 @@ CASSANDRA_TAG=3.11.6 MINIO_TAG=RELEASE.2023-03-20T20-16-18Z OPENTS_TAG=9aa7f88 KINESIS_TAG=2.1 -HSTREAMDB_TAG=v0.15.0 +HSTREAMDB_TAG=v0.16.1 HSTREAMDB_ZK_TAG=3.8.1 MS_IMAGE_ADDR=mcr.microsoft.com/mssql/server diff --git a/.ci/docker-compose-file/docker-compose-ldap.yaml b/.ci/docker-compose-file/docker-compose-ldap.yaml index e6c8ba2d8..f92df47a0 100644 --- a/.ci/docker-compose-file/docker-compose-ldap.yaml +++ b/.ci/docker-compose-file/docker-compose-ldap.yaml @@ -11,6 +11,8 @@ services: image: openldap #ports: # - 389:389 + volumes: + - ./certs/ca.crt:/etc/certs/ca.crt restart: always networks: - emqx_bridge diff --git a/.ci/docker-compose-file/docker-compose-redis-single-tcp.yaml b/.ci/docker-compose-file/docker-compose-redis-single-tcp.yaml index 6706fe84f..ec7283219 100644 --- a/.ci/docker-compose-file/docker-compose-redis-single-tcp.yaml +++ b/.ci/docker-compose-file/docker-compose-redis-single-tcp.yaml @@ -4,12 +4,11 @@ services: redis_server: container_name: redis image: redis:${REDIS_TAG} + volumes: + - ./redis/single-tcp:/usr/local/etc/redis/ ports: - "6379:6379" - command: - - redis-server - - "--bind 0.0.0.0 ::" - - --requirepass public + command: redis-server /usr/local/etc/redis/redis.conf restart: always networks: - emqx_bridge diff --git a/.ci/docker-compose-file/docker-compose-redis-single-tls.yaml b/.ci/docker-compose-file/docker-compose-redis-single-tls.yaml index 8f59e7a9e..2ea36affd 100644 --- a/.ci/docker-compose-file/docker-compose-redis-single-tls.yaml +++ b/.ci/docker-compose-file/docker-compose-redis-single-tls.yaml @@ -8,18 +8,10 @@ services: - ./certs/server.crt:/etc/certs/redis.crt - ./certs/server.key:/etc/certs/redis.key - ./certs/ca.crt:/etc/certs/ca.crt + - ./redis/single-tls:/usr/local/etc/redis ports: - "6380:6380" - command: - - redis-server - - "--bind 0.0.0.0 ::" - - --requirepass public - - --tls-port 6380 - - --tls-cert-file /etc/certs/redis.crt - - --tls-key-file /etc/certs/redis.key - - --tls-ca-cert-file /etc/certs/ca.crt - - --tls-protocols "TLSv1.3" - - --tls-ciphersuites "TLS_CHACHA20_POLY1305_SHA256" + command: redis-server /usr/local/etc/redis/redis.conf restart: always networks: emqx_bridge: diff --git a/.ci/docker-compose-file/redis/cluster-tcp/redis.conf b/.ci/docker-compose-file/redis/cluster-tcp/redis.conf index 79a0d8a73..6930bde1c 100644 --- a/.ci/docker-compose-file/redis/cluster-tcp/redis.conf +++ b/.ci/docker-compose-file/redis/cluster-tcp/redis.conf @@ -1,10 +1,11 @@ bind :: 0.0.0.0 port 6379 -requirepass public cluster-enabled yes +masteruser default masterauth public +aclfile /usr/local/etc/redis/users.acl protected-mode no daemonize no diff --git a/.ci/docker-compose-file/redis/cluster-tcp/users.acl b/.ci/docker-compose-file/redis/cluster-tcp/users.acl new file mode 100644 index 000000000..5bafe9f6d --- /dev/null +++ b/.ci/docker-compose-file/redis/cluster-tcp/users.acl @@ -0,0 +1,2 @@ +user default on >public ~* &* +@all +user test_user on >test_passwd ~* &* +@all diff --git a/.ci/docker-compose-file/redis/cluster-tls/redis.conf b/.ci/docker-compose-file/redis/cluster-tls/redis.conf index 3020f46a7..5d203de80 100644 --- a/.ci/docker-compose-file/redis/cluster-tls/redis.conf +++ b/.ci/docker-compose-file/redis/cluster-tls/redis.conf @@ -1,10 +1,11 @@ bind :: 0.0.0.0 port 6379 -requirepass public cluster-enabled yes +masteruser default masterauth public +aclfile /usr/local/etc/redis/users.acl tls-port 6389 tls-cert-file /etc/certs/cert.pem diff --git a/.ci/docker-compose-file/redis/cluster-tls/users.acl b/.ci/docker-compose-file/redis/cluster-tls/users.acl new file mode 100644 index 000000000..5bafe9f6d --- /dev/null +++ b/.ci/docker-compose-file/redis/cluster-tls/users.acl @@ -0,0 +1,2 @@ +user default on >public ~* &* +@all +user test_user on >test_passwd ~* &* +@all diff --git a/.ci/docker-compose-file/redis/sentinel-tcp/master.conf b/.ci/docker-compose-file/redis/sentinel-tcp/master.conf index 25940c887..a531d1b40 100644 --- a/.ci/docker-compose-file/redis/sentinel-tcp/master.conf +++ b/.ci/docker-compose-file/redis/sentinel-tcp/master.conf @@ -1,6 +1,6 @@ bind :: 0.0.0.0 port 6379 -requirepass public +aclfile /usr/local/etc/redis/users.acl protected-mode no daemonize no diff --git a/.ci/docker-compose-file/redis/sentinel-tcp/slave.conf b/.ci/docker-compose-file/redis/sentinel-tcp/slave.conf index 2c61aeb6c..4a7e240fc 100644 --- a/.ci/docker-compose-file/redis/sentinel-tcp/slave.conf +++ b/.ci/docker-compose-file/redis/sentinel-tcp/slave.conf @@ -1,9 +1,10 @@ bind :: 0.0.0.0 port 6379 -requirepass public replicaof redis-sentinel-master 6379 +masteruser default masterauth public +aclfile /usr/local/etc/redis/users.acl protected-mode no daemonize no diff --git a/.ci/docker-compose-file/redis/sentinel-tcp/users.acl b/.ci/docker-compose-file/redis/sentinel-tcp/users.acl new file mode 100644 index 000000000..5bafe9f6d --- /dev/null +++ b/.ci/docker-compose-file/redis/sentinel-tcp/users.acl @@ -0,0 +1,2 @@ +user default on >public ~* &* +@all +user test_user on >test_passwd ~* &* +@all diff --git a/.ci/docker-compose-file/redis/sentinel-tls/master.conf b/.ci/docker-compose-file/redis/sentinel-tls/master.conf index f55433f79..68e01f323 100644 --- a/.ci/docker-compose-file/redis/sentinel-tls/master.conf +++ b/.ci/docker-compose-file/redis/sentinel-tls/master.conf @@ -1,6 +1,6 @@ bind :: 0.0.0.0 port 6379 -requirepass public +aclfile /usr/local/etc/redis/users.acl tls-port 6389 tls-cert-file /etc/certs/cert.pem diff --git a/.ci/docker-compose-file/redis/sentinel-tls/slave.conf b/.ci/docker-compose-file/redis/sentinel-tls/slave.conf index d8758da51..25102d5ed 100644 --- a/.ci/docker-compose-file/redis/sentinel-tls/slave.conf +++ b/.ci/docker-compose-file/redis/sentinel-tls/slave.conf @@ -1,9 +1,10 @@ bind :: 0.0.0.0 port 6379 -requirepass public replicaof redis-sentinel-tls-master 6389 +masteruser default masterauth public +aclfile /usr/local/etc/redis/users.acl tls-port 6389 tls-replication yes diff --git a/.ci/docker-compose-file/redis/sentinel-tls/users.acl b/.ci/docker-compose-file/redis/sentinel-tls/users.acl new file mode 100644 index 000000000..5bafe9f6d --- /dev/null +++ b/.ci/docker-compose-file/redis/sentinel-tls/users.acl @@ -0,0 +1,2 @@ +user default on >public ~* &* +@all +user test_user on >test_passwd ~* &* +@all diff --git a/.ci/docker-compose-file/redis/single-tcp/redis.conf b/.ci/docker-compose-file/redis/single-tcp/redis.conf new file mode 100644 index 000000000..1e5f629d2 --- /dev/null +++ b/.ci/docker-compose-file/redis/single-tcp/redis.conf @@ -0,0 +1,3 @@ +bind :: 0.0.0.0 +port 6379 +aclfile /usr/local/etc/redis/users.acl diff --git a/.ci/docker-compose-file/redis/single-tcp/users.acl b/.ci/docker-compose-file/redis/single-tcp/users.acl new file mode 100644 index 000000000..5bafe9f6d --- /dev/null +++ b/.ci/docker-compose-file/redis/single-tcp/users.acl @@ -0,0 +1,2 @@ +user default on >public ~* &* +@all +user test_user on >test_passwd ~* &* +@all diff --git a/.ci/docker-compose-file/redis/single-tls/redis.conf b/.ci/docker-compose-file/redis/single-tls/redis.conf new file mode 100644 index 000000000..e2d40bca6 --- /dev/null +++ b/.ci/docker-compose-file/redis/single-tls/redis.conf @@ -0,0 +1,9 @@ +bind :: 0.0.0.0 +aclfile /usr/local/etc/redis/users.acl + +tls-port 6380 +tls-cert-file /etc/certs/redis.crt +tls-key-file /etc/certs/redis.key +tls-ca-cert-file /etc/certs/ca.crt +tls-protocols "TLSv1.3" +tls-ciphersuites "TLS_CHACHA20_POLY1305_SHA256" diff --git a/.ci/docker-compose-file/redis/single-tls/users.acl b/.ci/docker-compose-file/redis/single-tls/users.acl new file mode 100644 index 000000000..5bafe9f6d --- /dev/null +++ b/.ci/docker-compose-file/redis/single-tls/users.acl @@ -0,0 +1,2 @@ +user default on >public ~* &* +@all +user test_user on >test_passwd ~* &* +@all diff --git a/.ci/docker-compose-file/toxiproxy.json b/.ci/docker-compose-file/toxiproxy.json index f5df5a853..4b2b6ccf2 100644 --- a/.ci/docker-compose-file/toxiproxy.json +++ b/.ci/docker-compose-file/toxiproxy.json @@ -179,5 +179,17 @@ "listen": "0.0.0.0:4566", "upstream": "kinesis:4566", "enabled": true + }, + { + "name": "ldap_tcp", + "listen": "0.0.0.0:389", + "upstream": "ldap:389", + "enabled": true + }, + { + "name": "ldap_ssl", + "listen": "0.0.0.0:636", + "upstream": "ldap:636", + "enabled": true } ] diff --git a/.github/workflows/build_docker_for_test.yaml b/.github/workflows/build_docker_for_test.yaml index 548d5e2cd..758cdd1cf 100644 --- a/.github/workflows/build_docker_for_test.yaml +++ b/.github/workflows/build_docker_for_test.yaml @@ -25,7 +25,7 @@ jobs: runs-on: ubuntu-latest env: EMQX_NAME: ${{ matrix.profile }} - PKG_VSN: ${{ matrix.profile == 'emqx-enterprise' && inputs.version-emqx-enterprise || inputs.version-emqx }} + PKG_VSN: ${{ startsWith(matrix.profile, 'emqx-enterprise') && inputs.version-emqx-enterprise || inputs.version-emqx }} OTP_VSN: ${{ inputs.otp_vsn }} ELIXIR_VSN: ${{ inputs.elixir_vsn }} @@ -36,6 +36,7 @@ jobs: - emqx - emqx-enterprise - emqx-elixir + - emqx-enterprise-elixir steps: - uses: actions/checkout@v3 @@ -58,4 +59,3 @@ jobs: name: "${{ env.EMQX_NAME }}-docker" path: "${{ env.EMQX_NAME }}-docker-${{ env.PKG_VSN }}.tar.gz" retention-days: 3 - diff --git a/Makefile b/Makefile index 435e4958a..93b059922 100644 --- a/Makefile +++ b/Makefile @@ -16,7 +16,7 @@ endif # Dashboard version # from https://github.com/emqx/emqx-dashboard5 export EMQX_DASHBOARD_VERSION ?= v1.3.2 -export EMQX_EE_DASHBOARD_VERSION ?= e1.2.0-beta.4 +export EMQX_EE_DASHBOARD_VERSION ?= e1.2.0-beta.9 # `:=` should be used here, otherwise the `$(shell ...)` will be executed every time when the variable is used # In make 4.4+, for backward-compatibility the value from the original environment is used. diff --git a/apps/emqx/include/emqx.hrl b/apps/emqx/include/emqx.hrl index e0d1685e8..bc1d66ca2 100644 --- a/apps/emqx/include/emqx.hrl +++ b/apps/emqx/include/emqx.hrl @@ -122,20 +122,4 @@ until :: integer() }). -%%-------------------------------------------------------------------- -%% Authentication -%%-------------------------------------------------------------------- - --record(authenticator, { - id :: binary(), - provider :: module(), - enable :: boolean(), - state :: map() -}). - --record(chain, { - name :: atom(), - authenticators :: [#authenticator{}] -}). - -endif. diff --git a/apps/emqx/include/emqx_access_control.hrl b/apps/emqx/include/emqx_access_control.hrl index e840d2b4a..65a159dd6 100644 --- a/apps/emqx/include/emqx_access_control.hrl +++ b/apps/emqx/include/emqx_access_control.hrl @@ -14,7 +14,9 @@ %% limitations under the License. %%-------------------------------------------------------------------- -%% config root name all auth providers have to agree on. +-ifndef(EMQX_ACCESS_CONTROL_HRL). +-define(EMQX_ACCESS_CONTROL_HRL, true). + -define(EMQX_AUTHORIZATION_CONFIG_ROOT_NAME, "authorization"). -define(EMQX_AUTHORIZATION_CONFIG_ROOT_NAME_ATOM, authorization). -define(EMQX_AUTHORIZATION_CONFIG_ROOT_NAME_BINARY, <<"authorization">>). @@ -32,3 +34,7 @@ -define(authz_action(PUBSUB, QOS), #{action_type := PUBSUB, qos := QOS}). -define(authz_action(PUBSUB), ?authz_action(PUBSUB, _)). -define(authz_action, ?authz_action(_)). + +-define(AUTHN_TRACE_TAG, "AUTHN"). + +-endif. diff --git a/apps/emqx/include/emqx_release.hrl b/apps/emqx/include/emqx_release.hrl index d66c7982a..6df3b7df7 100644 --- a/apps/emqx/include/emqx_release.hrl +++ b/apps/emqx/include/emqx_release.hrl @@ -35,7 +35,7 @@ -define(EMQX_RELEASE_CE, "5.1.6"). %% Enterprise edition --define(EMQX_RELEASE_EE, "5.2.0-alpha.3"). +-define(EMQX_RELEASE_EE, "5.2.0-alpha.4"). %% The HTTP API version -define(EMQX_API_VERSION, "5.0"). diff --git a/apps/emqx/include/emqx_router.hrl b/apps/emqx/include/emqx_router.hrl index 035ff5455..35a267aa7 100644 --- a/apps/emqx/include/emqx_router.hrl +++ b/apps/emqx/include/emqx_router.hrl @@ -17,8 +17,9 @@ -ifndef(EMQX_ROUTER_HRL). -define(EMQX_ROUTER_HRL, true). -%% ETS table for message routing +%% ETS tables for message routing -define(ROUTE_TAB, emqx_route). +-define(ROUTE_TAB_FILTERS, emqx_route_filters). %% Mnesia table for message routing -define(ROUTING_NODE, emqx_routing_node). diff --git a/apps/emqx/src/config/emqx_config_zones.erl b/apps/emqx/src/config/emqx_config_zones.erl index 57e2824ff..c367e2198 100644 --- a/apps/emqx/src/config/emqx_config_zones.erl +++ b/apps/emqx/src/config/emqx_config_zones.erl @@ -19,6 +19,7 @@ %% API -export([add_handler/0, remove_handler/0, pre_config_update/3]). +-export([is_olp_enabled/0]). -define(ZONES, [zones]). @@ -33,3 +34,13 @@ remove_handler() -> %% replace the old config with the new config pre_config_update(?ZONES, NewRaw, _OldRaw) -> {ok, NewRaw}. + +is_olp_enabled() -> + maps:fold( + fun + (_, #{overload_protection := #{enable := true}}, _Acc) -> true; + (_, _, Acc) -> Acc + end, + false, + emqx_config:get([zones], #{}) + ). diff --git a/apps/emqx/src/emqx_access_control.erl b/apps/emqx/src/emqx_access_control.erl index 43669bf6c..82604710a 100644 --- a/apps/emqx/src/emqx_access_control.erl +++ b/apps/emqx/src/emqx_access_control.erl @@ -17,6 +17,7 @@ -module(emqx_access_control). -include("emqx.hrl"). +-include("emqx_access_control.hrl"). -include("logger.hrl"). -export([ @@ -29,6 +30,14 @@ -compile(nowarn_export_all). -endif. +-define(TRACE_RESULT(Label, Result, Reason), begin + ?TRACE(Label, ?AUTHN_TRACE_TAG, #{ + result => (Result), + reason => (Reason) + }), + Result +end). + %%-------------------------------------------------------------------- %% APIs %%-------------------------------------------------------------------- @@ -44,7 +53,7 @@ authenticate(Credential) -> %% if auth backend returning nothing but just 'ok' %% it means it's not a superuser, or there is no way to tell. NotSuperUser = #{is_superuser => false}, - case emqx_authentication:pre_hook_authenticate(Credential) of + case pre_hook_authenticate(Credential) of ok -> inc_authn_metrics(anonymous), {ok, NotSuperUser}; @@ -99,6 +108,29 @@ authorize(ClientInfo, Action, Topic) -> inc_authz_metrics(Result), Result. +%%-------------------------------------------------------------------- +%% Internal Functions +%%-------------------------------------------------------------------- + +-spec pre_hook_authenticate(emqx_types:clientinfo()) -> + ok | continue | {error, not_authorized}. +pre_hook_authenticate(#{enable_authn := false}) -> + ?TRACE_RESULT("pre_hook_authenticate", ok, enable_authn_false); +pre_hook_authenticate(#{enable_authn := quick_deny_anonymous} = Credential) -> + case is_username_defined(Credential) of + true -> + continue; + false -> + ?TRACE_RESULT("pre_hook_authenticate", {error, not_authorized}, enable_authn_false) + end; +pre_hook_authenticate(_) -> + continue. + +is_username_defined(#{username := undefined}) -> false; +is_username_defined(#{username := <<>>}) -> false; +is_username_defined(#{username := _Username}) -> true; +is_username_defined(_) -> false. + check_authorization_cache(ClientInfo, Action, Topic) -> case emqx_authz_cache:get_authz_cache(Action, Topic) of not_found -> diff --git a/apps/emqx/src/emqx_app.erl b/apps/emqx/src/emqx_app.erl index 59a397836..0f4987085 100644 --- a/apps/emqx/src/emqx_app.erl +++ b/apps/emqx/src/emqx_app.erl @@ -55,7 +55,9 @@ prep_stop(_State) -> emqx_boot:is_enabled(listeners) andalso emqx_listeners:stop(). -stop(_State) -> ok. +stop(_State) -> + ok = emqx_router:deinit_schema(), + ok. -define(CONFIG_LOADER, config_loader). -define(DEFAULT_LOADER, emqx). diff --git a/apps/emqx/src/emqx_broker_sup.erl b/apps/emqx/src/emqx_broker_sup.erl index ac2fe587c..74baf5674 100644 --- a/apps/emqx/src/emqx_broker_sup.erl +++ b/apps/emqx/src/emqx_broker_sup.erl @@ -49,16 +49,6 @@ init([]) -> modules => [emqx_shared_sub] }, - %% Authentication - AuthNSup = #{ - id => emqx_authentication_sup, - start => {emqx_authentication_sup, start_link, []}, - restart => permanent, - shutdown => infinity, - type => supervisor, - modules => [emqx_authentication_sup] - }, - %% Broker helper Helper = #{ id => helper, @@ -69,4 +59,4 @@ init([]) -> modules => [emqx_broker_helper] }, - {ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, AuthNSup, Helper]}}. + {ok, {{one_for_all, 0, 1}, [BrokerPool, SharedSub, Helper]}}. diff --git a/apps/emqx/src/emqx_channel.erl b/apps/emqx/src/emqx_channel.erl index d5941c8e9..2b20ee709 100644 --- a/apps/emqx/src/emqx_channel.erl +++ b/apps/emqx/src/emqx_channel.erl @@ -2228,6 +2228,7 @@ disconnect_and_shutdown(Reason, Reply, Channel) -> NChannel = ensure_disconnected(Reason, Channel), shutdown(Reason, Reply, NChannel). +-compile({inline, [sp/1, flag/1]}). sp(true) -> 1; sp(false) -> 0. diff --git a/apps/emqx/src/emqx_cm.erl b/apps/emqx/src/emqx_cm.erl index b46a140c8..e3c126629 100644 --- a/apps/emqx/src/emqx_cm.erl +++ b/apps/emqx/src/emqx_cm.erl @@ -36,8 +36,6 @@ insert_channel_info/3 ]). --export([connection_closed/1]). - -export([ get_chan_info/1, get_chan_info/2, @@ -194,14 +192,6 @@ do_unregister_channel({_ClientId, ChanPid} = Chan) -> ok = emqx_hooks:run('cm.channel.unregistered', [ChanPid]), true. --spec connection_closed(emqx_types:clientid()) -> true. -connection_closed(ClientId) -> - connection_closed(ClientId, self()). - --spec connection_closed(emqx_types:clientid(), chan_pid()) -> true. -connection_closed(ClientId, ChanPid) -> - ets:delete_object(?CHAN_CONN_TAB, {ClientId, ChanPid}). - %% @doc Get info of a channel. -spec get_chan_info(emqx_types:clientid()) -> maybe(emqx_types:infos()). get_chan_info(ClientId) -> diff --git a/apps/emqx/src/emqx_config_handler.erl b/apps/emqx/src/emqx_config_handler.erl index 96690c26e..a189fc9e5 100644 --- a/apps/emqx/src/emqx_config_handler.erl +++ b/apps/emqx/src/emqx_config_handler.erl @@ -53,11 +53,17 @@ -optional_callbacks([ pre_config_update/3, - post_config_update/5 + propagated_pre_config_update/3, + post_config_update/5, + propagated_post_config_update/5 ]). -callback pre_config_update([atom()], emqx_config:update_request(), emqx_config:raw_config()) -> - {ok, emqx_config:update_request()} | {error, term()}. + ok | {ok, emqx_config:update_request()} | {error, term()}. +-callback propagated_pre_config_update( + [binary()], emqx_config:update_request(), emqx_config:raw_config() +) -> + ok | {ok, emqx_config:update_request()} | {error, term()}. -callback post_config_update( [atom()], @@ -68,6 +74,15 @@ ) -> ok | {ok, Result :: any()} | {error, Reason :: term()}. +-callback propagated_post_config_update( + [atom()], + emqx_config:update_request(), + emqx_config:config(), + emqx_config:config(), + emqx_config:app_envs() +) -> + ok | {ok, Result :: any()} | {error, Reason :: term()}. + -type state() :: #{handlers := any()}. start_link() -> @@ -244,7 +259,14 @@ do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq) -> do_update_config(ConfKeyPath, Handlers, OldRawConf, UpdateReq, []). do_update_config([], Handlers, OldRawConf, UpdateReq, ConfKeyPath) -> - call_pre_config_update(Handlers, OldRawConf, UpdateReq, ConfKeyPath); + call_pre_config_update(#{ + handlers => Handlers, + old_raw_conf => OldRawConf, + update_req => UpdateReq, + conf_key_path => ConfKeyPath, + callback => pre_config_update, + is_propagated => false + }); do_update_config( [ConfKey | SubConfKeyPath], Handlers, @@ -331,15 +353,16 @@ do_post_config_update( Result, ConfKeyPath ) -> - call_post_config_update( - Handlers, - OldConf, - NewConf, - AppEnvs, - up_req(UpdateArgs), - Result, - ConfKeyPath - ); + call_post_config_update(#{ + handlers => Handlers, + old_conf => OldConf, + new_conf => NewConf, + app_envs => AppEnvs, + update_req => up_req(UpdateArgs), + result => Result, + conf_key_path => ConfKeyPath, + callback => post_config_update + }); do_post_config_update( [ConfKey | SubConfKeyPath], Handlers, @@ -365,10 +388,16 @@ do_post_config_update( ConfKeyPath ). -get_sub_handlers(ConfKey, Handlers) -> +get_sub_handlers(ConfKey, Handlers) when is_atom(ConfKey) -> case maps:find(ConfKey, Handlers) of error -> maps:get(?WKEY, Handlers, #{}); {ok, SubHandlers} -> SubHandlers + end; +get_sub_handlers(ConfKey, Handlers) when is_binary(ConfKey) -> + ConcreteHandlerKeys = maps:keys(Handlers) -- [?MOD, ?WKEY], + case lists:search(fun(K) -> bin(K) =:= ConfKey end, ConcreteHandlerKeys) of + {value, Key} -> maps:get(Key, Handlers); + false -> maps:get(?WKEY, Handlers, #{}) end. get_sub_config(ConfKey, Conf) when is_map(Conf) -> @@ -377,57 +406,247 @@ get_sub_config(ConfKey, Conf) when is_map(Conf) -> get_sub_config(_, _Conf) -> undefined. -call_pre_config_update(#{?MOD := HandlerName}, OldRawConf, UpdateReq, ConfKeyPath) -> - case erlang:function_exported(HandlerName, pre_config_update, 3) of +call_pre_config_update(Ctx) -> + case call_proper_pre_config_update(Ctx) of + {ok, NewUpdateReq0} -> + case + propagate_pre_config_updates_to_subconf(Ctx#{ + update_req => NewUpdateReq0 + }) + of + {ok, #{update_req := NewUpdateReq1}} -> + {ok, NewUpdateReq1}; + {error, _} = Error -> + Error + end; + {error, _} = Error -> + Error + end. + +call_proper_pre_config_update( + #{ + handlers := #{?MOD := Module}, + callback := Callback, + update_req := UpdateReq, + old_raw_conf := OldRawConf + } = Ctx +) -> + case erlang:function_exported(Module, Callback, 3) of true -> - case HandlerName:pre_config_update(ConfKeyPath, UpdateReq, OldRawConf) of - {ok, NewUpdateReq} -> {ok, NewUpdateReq}; - {error, Reason} -> {error, {pre_config_update, HandlerName, Reason}} + case apply_pre_config_update(Module, Ctx) of + {ok, NewUpdateReq} -> + {ok, NewUpdateReq}; + ok -> + {ok, UpdateReq}; + {error, Reason} -> + {error, {pre_config_update, Module, Reason}} end; false -> merge_to_old_config(UpdateReq, OldRawConf) end; -call_pre_config_update(_Handlers, OldRawConf, UpdateReq, _ConfKeyPath) -> - merge_to_old_config(UpdateReq, OldRawConf). - -call_post_config_update( - #{?MOD := HandlerName}, - OldConf, - NewConf, - AppEnvs, - UpdateReq, - Result, - ConfKeyPath +call_proper_pre_config_update( + #{update_req := UpdateReq} ) -> - case erlang:function_exported(HandlerName, post_config_update, 5) of - true -> + {ok, UpdateReq}. + +apply_pre_config_update(Module, #{ + conf_key_path := ConfKeyPath, + update_req := UpdateReq, + old_raw_conf := OldRawConf, + callback := Callback +}) -> + Module:Callback( + ConfKeyPath, UpdateReq, OldRawConf + ). + +propagate_pre_config_updates_to_subconf( + #{handlers := #{?WKEY := _}} = Ctx +) -> + propagate_pre_config_updates_to_subconf_wkey(Ctx); +propagate_pre_config_updates_to_subconf( + #{handlers := Handlers} = Ctx +) -> + Keys = maps:keys(maps:without([?MOD], Handlers)), + propagate_pre_config_updates_to_subconf_keys(Keys, Ctx). + +propagate_pre_config_updates_to_subconf_wkey( + #{ + update_req := UpdateReq, + old_raw_conf := OldRawConf + } = Ctx +) -> + Keys = propagate_keys(UpdateReq, OldRawConf), + propagate_pre_config_updates_to_subconf_keys(Keys, Ctx). + +propagate_pre_config_updates_to_subconf_keys([], Ctx) -> + {ok, Ctx}; +propagate_pre_config_updates_to_subconf_keys([Key | Keys], Ctx0) -> + case propagate_pre_config_updates_to_subconf_key(Key, Ctx0) of + {ok, Ctx1} -> + propagate_pre_config_updates_to_subconf_keys(Keys, Ctx1); + {error, _} = Error -> + Error + end. + +propagate_pre_config_updates_to_subconf_key( + Key, + #{ + handlers := Handlers, + old_raw_conf := OldRawConf, + update_req := UpdateReq, + conf_key_path := ConfKeyPath, + is_propagated := IsPropagated + } = Ctx +) -> + BinKey = bin(Key), + SubHandlers = get_sub_handlers(BinKey, Handlers), + SubUpdateReq = get_sub_config(BinKey, UpdateReq), + SubOldConf = get_sub_config(BinKey, OldRawConf), + SubConfKeyPath = + case IsPropagated of + true -> ConfKeyPath ++ [BinKey]; + false -> bin_path(ConfKeyPath) ++ [BinKey] + end, + case {SubOldConf, SubUpdateReq} of + %% we have handler, but no relevant keys in both configs (new and old), + %% so we don't need to go further + {undefined, undefined} -> + {ok, Ctx}; + {_, _} -> case - HandlerName:post_config_update( - ConfKeyPath, - UpdateReq, - NewConf, - OldConf, - AppEnvs - ) + call_pre_config_update(Ctx#{ + handlers := SubHandlers, + old_raw_conf := SubOldConf, + update_req := SubUpdateReq, + conf_key_path := SubConfKeyPath, + is_propagated := true, + callback := propagated_pre_config_update + }) of + {ok, SubNewConf1} -> + %% we update only if the new config is not to be removed + %% i.e. SubUpdateReq is not undefined + case SubUpdateReq of + undefined -> + {ok, Ctx}; + _ -> + {ok, Ctx#{ + update_req := maps:put(BinKey, SubNewConf1, UpdateReq) + }} + end; + {error, _} = Error -> + Error + end + end. + +call_post_config_update(#{handlers := Handlers} = Ctx) -> + case call_proper_post_config_update(Ctx) of + {ok, Result} -> + SubHandlers = maps:without([?MOD], Handlers), + propagate_post_config_updates_to_subconf(Ctx#{ + handlers := SubHandlers, + callback := propagated_post_config_update, + result := Result + }); + {error, _} = Error -> + Error + end. + +call_proper_post_config_update( + #{ + handlers := #{?MOD := Module}, + callback := Callback, + result := Result + } = Ctx +) -> + case erlang:function_exported(Module, Callback, 5) of + true -> + case apply_post_config_update(Module, Ctx) of ok -> {ok, Result}; - {ok, Result1} -> {ok, Result#{HandlerName => Result1}}; - {error, Reason} -> {error, {post_config_update, HandlerName, Reason}} + {ok, Result1} -> {ok, Result#{Module => Result1}}; + {error, Reason} -> {error, {post_config_update, Module, Reason}} end; false -> {ok, Result} end; -call_post_config_update( - _Handlers, - _OldConf, - _NewConf, - _AppEnvs, - _UpdateReq, - Result, - _ConfKeyPath +call_proper_post_config_update( + #{result := Result} = _Ctx ) -> {ok, Result}. +apply_post_config_update(Module, #{ + conf_key_path := ConfKeyPath, + update_req := UpdateReq, + new_conf := NewConf, + old_conf := OldConf, + app_envs := AppEnvs, + callback := Callback +}) -> + Module:Callback( + ConfKeyPath, + UpdateReq, + NewConf, + OldConf, + AppEnvs + ). + +propagate_post_config_updates_to_subconf( + #{handlers := #{?WKEY := _}} = Ctx +) -> + propagate_post_config_updates_to_subconf_wkey(Ctx); +propagate_post_config_updates_to_subconf( + #{handlers := Handlers} = Ctx +) -> + Keys = maps:keys(Handlers), + propagate_post_config_updates_to_subconf_keys(Keys, Ctx). + +propagate_post_config_updates_to_subconf_wkey( + #{ + old_conf := OldConf, + new_conf := NewConf + } = Ctx +) -> + Keys = propagate_keys(OldConf, NewConf), + propagate_post_config_updates_to_subconf_keys(Keys, Ctx). +propagate_post_config_updates_to_subconf_keys([], #{result := Result}) -> + {ok, Result}; +propagate_post_config_updates_to_subconf_keys([Key | Keys], Ctx) -> + case propagate_post_config_updates_to_subconf_key(Key, Ctx) of + {ok, Result1} -> + propagate_post_config_updates_to_subconf_keys(Keys, Ctx#{result := Result1}); + Error -> + Error + end. + +propagate_keys(OldConf, NewConf) -> + sets:to_list(sets:union(propagate_keys(OldConf), propagate_keys(NewConf))). + +propagate_keys(Conf) when is_map(Conf) -> sets:from_list(maps:keys(Conf), [{version, 2}]); +propagate_keys(_) -> sets:new([{version, 2}]). + +propagate_post_config_updates_to_subconf_key( + Key, + #{ + handlers := Handlers, + new_conf := NewConf, + old_conf := OldConf, + result := Result, + conf_key_path := ConfKeyPath + } = Ctx +) -> + SubHandlers = maps:get(Key, Handlers, maps:get(?WKEY, Handlers, undefined)), + SubNewConf = get_sub_config(Key, NewConf), + SubOldConf = get_sub_config(Key, OldConf), + SubConfKeyPath = ConfKeyPath ++ [Key], + call_post_config_update(Ctx#{ + handlers := SubHandlers, + new_conf := SubNewConf, + old_conf := SubOldConf, + result := Result, + conf_key_path := SubConfKeyPath, + callback := propagated_post_config_update + }). + %% The default callback of config handlers %% the behaviour is overwriting the old config if: %% 1. the old config is undefined @@ -517,6 +736,7 @@ remove_empty_leaf(KeyPath, Handlers) -> end. assert_callback_function(Mod) -> + _ = Mod:module_info(), case erlang:function_exported(Mod, pre_config_update, 3) orelse erlang:function_exported(Mod, post_config_update, 5) diff --git a/apps/emqx/src/emqx_connection.erl b/apps/emqx/src/emqx_connection.erl index 70eb0d1e4..db36fbea9 100644 --- a/apps/emqx/src/emqx_connection.erl +++ b/apps/emqx/src/emqx_connection.erl @@ -636,7 +636,6 @@ handle_msg( handle_msg({event, disconnected}, State = #state{channel = Channel}) -> ClientId = emqx_channel:info(clientid, Channel), emqx_cm:set_chan_info(ClientId, info(State)), - emqx_cm:connection_closed(ClientId), {ok, State}; handle_msg({event, _Other}, State = #state{channel = Channel}) -> ClientId = emqx_channel:info(clientid, Channel), @@ -1217,9 +1216,9 @@ inc_counter(Key, Inc) -> set_tcp_keepalive({quic, _Listener}) -> ok; set_tcp_keepalive({Type, Id}) -> - Conf = emqx_config:get_listener_conf(Type, Id, [tcp_options, keepalive], <<"none">>), - case iolist_to_binary(Conf) of - <<"none">> -> + Conf = emqx_config:get_listener_conf(Type, Id, [tcp_options, keepalive], "none"), + case Conf of + "none" -> ok; Value -> %% the value is already validated by schema, so we do not validate it again. diff --git a/apps/emqx/src/emqx_listeners.erl b/apps/emqx/src/emqx_listeners.erl index 964873e53..b95169d3c 100644 --- a/apps/emqx/src/emqx_listeners.erl +++ b/apps/emqx/src/emqx_listeners.erl @@ -531,41 +531,15 @@ post_config_update(_Path, _Request, _NewConf, _OldConf, _AppEnvs) -> ok. create_listener(Type, Name, NewConf) -> - Res = start_listener(Type, Name, NewConf), - recreate_authenticators(Res, Type, Name, NewConf). - -recreate_authenticators(ok, Type, Name, Conf) -> - Chain = listener_id(Type, Name), - _ = emqx_authentication:delete_chain(Chain), - do_create_authneticators(Chain, maps:get(authentication, Conf, [])); -recreate_authenticators(Error, _Type, _Name, _NewConf) -> - Error. - -do_create_authneticators(Chain, [AuthN | T]) -> - case emqx_authentication:create_authenticator(Chain, AuthN) of - {ok, _} -> - do_create_authneticators(Chain, T); - Error -> - _ = emqx_authentication:delete_chain(Chain), - Error - end; -do_create_authneticators(_Chain, []) -> - ok. + start_listener(Type, Name, NewConf). remove_listener(Type, Name, OldConf) -> ok = unregister_ocsp_stapling_refresh(Type, Name), - case stop_listener(Type, Name, OldConf) of - ok -> - _ = emqx_authentication:delete_chain(listener_id(Type, Name)), - ok; - Err -> - Err - end. + stop_listener(Type, Name, OldConf). update_listener(Type, Name, {OldConf, NewConf}) -> ok = maybe_unregister_ocsp_stapling_refresh(Type, Name, NewConf), - Res = restart_listener(Type, Name, {OldConf, NewConf}), - recreate_authenticators(Res, Type, Name, NewConf). + restart_listener(Type, Name, {OldConf, NewConf}). perform_listener_changes([]) -> ok; @@ -847,10 +821,9 @@ convert_certs(ListenerConf) -> fun(Type, Listeners0, Acc) -> Listeners1 = maps:fold( - fun(Name, Conf, Acc1) -> - Conf1 = convert_certs(Type, Name, Conf), - Conf2 = convert_authn_certs(Type, Name, Conf1), - Acc1#{Name => Conf2} + fun(Name, Conf0, Acc1) -> + Conf1 = convert_certs(Type, Name, Conf0), + Acc1#{Name => Conf1} end, #{}, Listeners0 @@ -873,19 +846,6 @@ convert_certs(Type, Name, Conf) -> throw({bad_ssl_config, Reason}) end. -convert_authn_certs(Type, Name, #{<<"authentication">> := AuthNList} = Conf) -> - ChainName = listener_id(Type, Name), - AuthNList1 = lists:map( - fun(AuthN) -> - CertsDir = emqx_authentication_config:certs_dir(ChainName, AuthN), - emqx_authentication_config:convert_certs(CertsDir, AuthN) - end, - AuthNList - ), - Conf#{<<"authentication">> => AuthNList1}; -convert_authn_certs(_Type, _Name, Conf) -> - Conf. - filter_stacktrace({Reason, _Stacktrace}) -> Reason; filter_stacktrace(Reason) -> Reason. diff --git a/apps/emqx/src/emqx_metrics.erl b/apps/emqx/src/emqx_metrics.erl index 21a114c0f..ee86457b0 100644 --- a/apps/emqx/src/emqx_metrics.erl +++ b/apps/emqx/src/emqx_metrics.erl @@ -67,6 +67,7 @@ terminate/2, code_change/3 ]). +-export([olp_metrics/0]). %% BACKW: v4.3.0 -export([upgrade_retained_delayed_counter_type/0]). @@ -267,15 +268,18 @@ {counter, 'authentication.failure'} ]). -%% Overload protetion counters +%% Overload protection counters -define(OLP_METRICS, [ - {counter, 'olp.delay.ok'}, - {counter, 'olp.delay.timeout'}, - {counter, 'olp.hbn'}, - {counter, 'olp.gc'}, - {counter, 'olp.new_conn'} + {counter, 'overload_protection.delay.ok'}, + {counter, 'overload_protection.delay.timeout'}, + {counter, 'overload_protection.hibernation'}, + {counter, 'overload_protection.gc'}, + {counter, 'overload_protection.new_conn'} ]). +olp_metrics() -> + lists:map(fun({_, Metric}) -> Metric end, ?OLP_METRICS). + -record(state, {next_idx = 1}). -record(metric, {name, type, idx}). @@ -489,7 +493,7 @@ inc_sent(Packet) -> inc('packets.sent'), do_inc_sent(Packet). -do_inc_sent(?CONNACK_PACKET(ReasonCode)) -> +do_inc_sent(?CONNACK_PACKET(ReasonCode, _SessPresent)) -> (ReasonCode == ?RC_SUCCESS) orelse inc('packets.connack.error'), ((ReasonCode == ?RC_NOT_AUTHORIZED) orelse (ReasonCode == ?CONNACK_AUTH)) andalso @@ -701,9 +705,9 @@ reserved_idx('authorization.cache_hit') -> 302; reserved_idx('authentication.success') -> 310; reserved_idx('authentication.success.anonymous') -> 311; reserved_idx('authentication.failure') -> 312; -reserved_idx('olp.delay.ok') -> 400; -reserved_idx('olp.delay.timeout') -> 401; -reserved_idx('olp.hbn') -> 402; -reserved_idx('olp.gc') -> 403; -reserved_idx('olp.new_conn') -> 404; +reserved_idx('overload_protection.delay.ok') -> 400; +reserved_idx('overload_protection.delay.timeout') -> 401; +reserved_idx('overload_protection.hibernation') -> 402; +reserved_idx('overload_protection.gc') -> 403; +reserved_idx('overload_protection.new_conn') -> 404; reserved_idx(_) -> undefined. diff --git a/apps/emqx/src/emqx_metrics_worker.erl b/apps/emqx/src/emqx_metrics_worker.erl index 5f41346cb..a57a2f2f0 100644 --- a/apps/emqx/src/emqx_metrics_worker.erl +++ b/apps/emqx/src/emqx_metrics_worker.erl @@ -495,7 +495,7 @@ terminate(_Reason, #state{metric_ids = MIDs}) -> stop(Name) -> try - gen_server:stop(Name) + gen_server:stop(Name, normal, 10000) catch exit:noproc -> ok; diff --git a/apps/emqx/src/emqx_olp.erl b/apps/emqx/src/emqx_olp.erl index 5a4775896..5ca35d8b4 100644 --- a/apps/emqx/src/emqx_olp.erl +++ b/apps/emqx/src/emqx_olp.erl @@ -38,11 +38,11 @@ | backoff_new_conn. -type cnt_name() :: - 'olp.delay.ok' - | 'olp.delay.timeout' - | 'olp.hbn' - | 'olp.gc' - | 'olp.new_conn'. + 'overload_protection.delay.ok' + | 'overload_protection.delay.timeout' + | 'overload_protection.hibernation' + | 'overload_protection.gc' + | 'overload_protection.new_conn'. -define(overload_protection, overload_protection). @@ -63,10 +63,10 @@ backoff(Zone) -> false -> false; ok -> - emqx_metrics:inc('olp.delay.ok'), + emqx_metrics:inc('overload_protection.delay.ok'), ok; timeout -> - emqx_metrics:inc('olp.delay.timeout'), + emqx_metrics:inc('overload_protection.delay.timeout'), timeout end; _ -> @@ -76,18 +76,18 @@ backoff(Zone) -> %% @doc If forceful GC should be skipped when the system is overloaded. -spec backoff_gc(Zone :: atom()) -> boolean(). backoff_gc(Zone) -> - do_check(Zone, ?FUNCTION_NAME, 'olp.gc'). + do_check(Zone, ?FUNCTION_NAME, 'overload_protection.gc'). %% @doc If hibernation should be skipped when the system is overloaded. -spec backoff_hibernation(Zone :: atom()) -> boolean(). backoff_hibernation(Zone) -> - do_check(Zone, ?FUNCTION_NAME, 'olp.hbn'). + do_check(Zone, ?FUNCTION_NAME, 'overload_protection.hibernation'). %% @doc Returns {error, overloaded} if new connection should be %% closed when system is overloaded. -spec backoff_new_conn(Zone :: atom()) -> ok | {error, overloaded}. backoff_new_conn(Zone) -> - case do_check(Zone, ?FUNCTION_NAME, 'olp.new_conn') of + case do_check(Zone, ?FUNCTION_NAME, 'overload_protection.new_conn') of true -> {error, overloaded}; false -> diff --git a/apps/emqx/src/emqx_quic_connection.erl b/apps/emqx/src/emqx_quic_connection.erl index a77ec28f2..7ddf05af3 100644 --- a/apps/emqx/src/emqx_quic_connection.erl +++ b/apps/emqx/src/emqx_quic_connection.erl @@ -118,7 +118,7 @@ new_conn( {stop, stream_accept_error, S} end; true -> - emqx_metrics:inc('olp.new_conn'), + emqx_metrics:inc('overload_protection.new_conn'), _ = quicer:async_shutdown_connection( Conn, ?QUIC_CONNECTION_SHUTDOWN_FLAG_NONE, diff --git a/apps/emqx/src/emqx_router.erl b/apps/emqx/src/emqx_router.erl index 95b6136a7..464852ceb 100644 --- a/apps/emqx/src/emqx_router.erl +++ b/apps/emqx/src/emqx_router.erl @@ -21,7 +21,6 @@ -include("emqx.hrl"). -include("logger.hrl"). -include("types.hrl"). --include_lib("mria/include/mria.hrl"). -include_lib("emqx/include/emqx_router.hrl"). %% Mnesia bootstrap @@ -46,16 +45,25 @@ do_delete_route/2 ]). +-export([cleanup_routes/1]). + -export([ match_routes/1, - lookup_routes/1, - has_routes/1 + lookup_routes/1 ]). -export([print_routes/1]). +-export([ + foldl_routes/2, + foldr_routes/2 +]). + -export([topics/0]). +%% Exported for tests +-export([has_route/2]). + %% gen_server callbacks -export([ init/1, @@ -66,10 +74,21 @@ code_change/3 ]). +-export([ + get_schema_vsn/0, + init_schema/0, + deinit_schema/0 +]). + -type group() :: binary(). -type dest() :: node() | {group(), node()}. +-record(routeidx, { + entry :: emqx_topic_index:key(dest()), + unused = [] :: nil() +}). + %%-------------------------------------------------------------------- %% Mnesia bootstrap %%-------------------------------------------------------------------- @@ -88,6 +107,19 @@ mnesia(boot) -> {write_concurrency, true} ]} ]} + ]), + ok = mria:create_table(?ROUTE_TAB_FILTERS, [ + {type, ordered_set}, + {rlog_shard, ?ROUTE_SHARD}, + {storage, ram_copies}, + {record_name, routeidx}, + {attributes, record_info(fields, routeidx)}, + {storage_properties, [ + {ets, [ + {read_concurrency, true}, + {write_concurrency, auto} + ]} + ]} ]). %%-------------------------------------------------------------------- @@ -121,43 +153,49 @@ do_add_route(Topic) when is_binary(Topic) -> -spec do_add_route(emqx_types:topic(), dest()) -> ok | {error, term()}. do_add_route(Topic, Dest) when is_binary(Topic) -> - Route = #route{topic = Topic, dest = Dest}, - case lists:member(Route, lookup_routes(Topic)) of + case has_route(Topic, Dest) of true -> ok; false -> ok = emqx_router_helper:monitor(Dest), - case emqx_topic:wildcard(Topic) of - true -> - Fun = fun emqx_router_utils:insert_trie_route/2, - emqx_router_utils:maybe_trans(Fun, [?ROUTE_TAB, Route], ?ROUTE_SHARD); - false -> - emqx_router_utils:insert_direct_route(?ROUTE_TAB, Route) - end + mria_insert_route(get_schema_vsn(), Topic, Dest) end. -%% @doc Match routes +mria_insert_route(v2, Topic, Dest) -> + mria_insert_route_v2(Topic, Dest); +mria_insert_route(v1, Topic, Dest) -> + mria_insert_route_v1(Topic, Dest). + +%% @doc Take a real topic (not filter) as input, return the matching topics and topic +%% filters associated with route destination. -spec match_routes(emqx_types:topic()) -> [emqx_types:route()]. match_routes(Topic) when is_binary(Topic) -> - case match_trie(Topic) of - [] -> lookup_routes(Topic); - Matched -> lists:append([lookup_routes(To) || To <- [Topic | Matched]]) - end. + match_routes(get_schema_vsn(), Topic). -%% Optimize: routing table will be replicated to all router nodes. -match_trie(Topic) -> - case emqx_trie:empty() of - true -> []; - false -> emqx_trie:match(Topic) - end. +match_routes(v2, Topic) -> + match_routes_v2(Topic); +match_routes(v1, Topic) -> + match_routes_v1(Topic). +%% @doc Take a topic or filter as input, and return the existing routes with exactly +%% this topic or filter. -spec lookup_routes(emqx_types:topic()) -> [emqx_types:route()]. lookup_routes(Topic) -> - ets:lookup(?ROUTE_TAB, Topic). + lookup_routes(get_schema_vsn(), Topic). --spec has_routes(emqx_types:topic()) -> boolean(). -has_routes(Topic) when is_binary(Topic) -> - ets:member(?ROUTE_TAB, Topic). +lookup_routes(v2, Topic) -> + lookup_routes_v2(Topic); +lookup_routes(v1, Topic) -> + lookup_routes_v1(Topic). + +-spec has_route(emqx_types:topic(), dest()) -> boolean(). +has_route(Topic, Dest) -> + has_route(get_schema_vsn(), Topic, Dest). + +has_route(v2, Topic, Dest) -> + has_route_v2(Topic, Dest); +has_route(v1, Topic, Dest) -> + has_route_v1(Topic, Dest). -spec delete_route(emqx_types:topic()) -> ok | {error, term()}. delete_route(Topic) when is_binary(Topic) -> @@ -173,18 +211,21 @@ do_delete_route(Topic) when is_binary(Topic) -> -spec do_delete_route(emqx_types:topic(), dest()) -> ok | {error, term()}. do_delete_route(Topic, Dest) -> - Route = #route{topic = Topic, dest = Dest}, - case emqx_topic:wildcard(Topic) of - true -> - Fun = fun emqx_router_utils:delete_trie_route/2, - emqx_router_utils:maybe_trans(Fun, [?ROUTE_TAB, Route], ?ROUTE_SHARD); - false -> - emqx_router_utils:delete_direct_route(?ROUTE_TAB, Route) - end. + mria_delete_route(get_schema_vsn(), Topic, Dest). + +mria_delete_route(v2, Topic, Dest) -> + mria_delete_route_v2(Topic, Dest); +mria_delete_route(v1, Topic, Dest) -> + mria_delete_route_v1(Topic, Dest). -spec topics() -> list(emqx_types:topic()). topics() -> - mnesia:dirty_all_keys(?ROUTE_TAB). + topics(get_schema_vsn()). + +topics(v2) -> + list_topics_v2(); +topics(v1) -> + list_topics_v1(). %% @doc Print routes to a topic -spec print_routes(emqx_types:topic()) -> ok. @@ -196,12 +237,290 @@ print_routes(Topic) -> match_routes(Topic) ). +-spec cleanup_routes(node()) -> ok. +cleanup_routes(Node) -> + cleanup_routes(get_schema_vsn(), Node). + +cleanup_routes(v2, Node) -> + cleanup_routes_v2(Node); +cleanup_routes(v1, Node) -> + cleanup_routes_v1(Node). + +-spec foldl_routes(fun((emqx_types:route(), Acc) -> Acc), Acc) -> Acc. +foldl_routes(FoldFun, AccIn) -> + fold_routes(get_schema_vsn(), foldl, FoldFun, AccIn). + +-spec foldr_routes(fun((emqx_types:route(), Acc) -> Acc), Acc) -> Acc. +foldr_routes(FoldFun, AccIn) -> + fold_routes(get_schema_vsn(), foldr, FoldFun, AccIn). + +fold_routes(v2, FunName, FoldFun, AccIn) -> + fold_routes_v2(FunName, FoldFun, AccIn); +fold_routes(v1, FunName, FoldFun, AccIn) -> + fold_routes_v1(FunName, FoldFun, AccIn). + call(Router, Msg) -> gen_server:call(Router, Msg, infinity). pick(Topic) -> gproc_pool:pick_worker(router_pool, Topic). +%%-------------------------------------------------------------------- +%% Schema v1 +%% -------------------------------------------------------------------- + +-dialyzer({nowarn_function, [cleanup_routes_v1/1]}). + +mria_insert_route_v1(Topic, Dest) -> + Route = #route{topic = Topic, dest = Dest}, + case emqx_topic:wildcard(Topic) of + true -> + mria_route_tab_insert_update_trie(Route); + false -> + mria_route_tab_insert(Route) + end. + +mria_route_tab_insert_update_trie(Route) -> + emqx_router_utils:maybe_trans( + fun emqx_router_utils:insert_trie_route/2, + [?ROUTE_TAB, Route], + ?ROUTE_SHARD + ). + +mria_route_tab_insert(Route) -> + mria:dirty_write(?ROUTE_TAB, Route). + +mria_delete_route_v1(Topic, Dest) -> + Route = #route{topic = Topic, dest = Dest}, + case emqx_topic:wildcard(Topic) of + true -> + mria_route_tab_delete_update_trie(Route); + false -> + mria_route_tab_delete(Route) + end. + +mria_route_tab_delete_update_trie(Route) -> + emqx_router_utils:maybe_trans( + fun emqx_router_utils:delete_trie_route/2, + [?ROUTE_TAB, Route], + ?ROUTE_SHARD + ). + +mria_route_tab_delete(Route) -> + mria:dirty_delete_object(?ROUTE_TAB, Route). + +match_routes_v1(Topic) -> + lookup_route_tab(Topic) ++ + lists:flatmap(fun lookup_route_tab/1, match_global_trie(Topic)). + +match_global_trie(Topic) -> + case emqx_trie:empty() of + true -> []; + false -> emqx_trie:match(Topic) + end. + +lookup_routes_v1(Topic) -> + lookup_route_tab(Topic). + +lookup_route_tab(Topic) -> + ets:lookup(?ROUTE_TAB, Topic). + +has_route_v1(Topic, Dest) -> + has_route_tab_entry(Topic, Dest). + +has_route_tab_entry(Topic, Dest) -> + [] =/= ets:match(?ROUTE_TAB, #route{topic = Topic, dest = Dest}). + +cleanup_routes_v1(Node) -> + Patterns = [ + #route{_ = '_', dest = Node}, + #route{_ = '_', dest = {'_', Node}} + ], + mria:transaction(?ROUTE_SHARD, fun() -> + [ + mnesia:delete_object(?ROUTE_TAB, Route, write) + || Pat <- Patterns, + Route <- mnesia:match_object(?ROUTE_TAB, Pat, write) + ] + end). + +list_topics_v1() -> + list_route_tab_topics(). + +list_route_tab_topics() -> + mnesia:dirty_all_keys(?ROUTE_TAB). + +fold_routes_v1(FunName, FoldFun, AccIn) -> + ets:FunName(FoldFun, AccIn, ?ROUTE_TAB). + +%%-------------------------------------------------------------------- +%% Schema v2 +%% One bag table exclusively for regular, non-filter subscription +%% topics, and one `emqx_topic_index` table exclusively for wildcard +%% topics. Writes go to only one of the two tables at a time. +%% -------------------------------------------------------------------- + +mria_insert_route_v2(Topic, Dest) -> + case emqx_trie_search:filter(Topic) of + Words when is_list(Words) -> + K = emqx_topic_index:make_key(Words, Dest), + mria:dirty_write(?ROUTE_TAB_FILTERS, #routeidx{entry = K}); + false -> + mria_route_tab_insert(#route{topic = Topic, dest = Dest}) + end. + +mria_delete_route_v2(Topic, Dest) -> + case emqx_trie_search:filter(Topic) of + Words when is_list(Words) -> + K = emqx_topic_index:make_key(Words, Dest), + mria:dirty_delete(?ROUTE_TAB_FILTERS, K); + false -> + mria_route_tab_delete(#route{topic = Topic, dest = Dest}) + end. + +match_routes_v2(Topic) -> + lookup_route_tab(Topic) ++ + [match_to_route(M) || M <- match_filters(Topic)]. + +match_filters(Topic) -> + emqx_topic_index:matches(Topic, ?ROUTE_TAB_FILTERS, []). + +lookup_routes_v2(Topic) -> + case emqx_topic:wildcard(Topic) of + true -> + Pat = #routeidx{entry = emqx_topic_index:make_key(Topic, '$1')}, + [Dest || [Dest] <- ets:match(?ROUTE_TAB_FILTERS, Pat)]; + false -> + lookup_route_tab(Topic) + end. + +has_route_v2(Topic, Dest) -> + case emqx_topic:wildcard(Topic) of + true -> + ets:member(?ROUTE_TAB_FILTERS, emqx_topic_index:make_key(Topic, Dest)); + false -> + has_route_tab_entry(Topic, Dest) + end. + +cleanup_routes_v2(Node) -> + % NOTE + % No point in transaction here because all the operations on filters table are dirty. + ok = ets:foldl( + fun(#routeidx{entry = K}, ok) -> + case get_dest_node(emqx_topic_index:get_id(K)) of + Node -> + mria:dirty_delete(?ROUTE_TAB_FILTERS, K); + _ -> + ok + end + end, + ok, + ?ROUTE_TAB_FILTERS + ), + ok = ets:foldl( + fun(#route{dest = Dest} = Route, ok) -> + case get_dest_node(Dest) of + Node -> + mria:dirty_delete_object(?ROUTE_TAB, Route); + _ -> + ok + end + end, + ok, + ?ROUTE_TAB + ). + +get_dest_node({_, Node}) -> + Node; +get_dest_node(Node) -> + Node. + +list_topics_v2() -> + Pat = #routeidx{entry = '$1'}, + Filters = [emqx_topic_index:get_topic(K) || [K] <- ets:match(?ROUTE_TAB_FILTERS, Pat)], + list_route_tab_topics() ++ Filters. + +fold_routes_v2(FunName, FoldFun, AccIn) -> + FilterFoldFun = mk_filtertab_fold_fun(FoldFun), + Acc = ets:FunName(FoldFun, AccIn, ?ROUTE_TAB), + ets:FunName(FilterFoldFun, Acc, ?ROUTE_TAB_FILTERS). + +mk_filtertab_fold_fun(FoldFun) -> + fun(#routeidx{entry = K}, Acc) -> FoldFun(match_to_route(K), Acc) end. + +match_to_route(M) -> + #route{topic = emqx_topic_index:get_topic(M), dest = emqx_topic_index:get_id(M)}. + +%%-------------------------------------------------------------------- +%% Routing table type +%% -------------------------------------------------------------------- + +-define(PT_SCHEMA_VSN, {?MODULE, schemavsn}). + +-type schemavsn() :: v1 | v2. + +-spec get_schema_vsn() -> schemavsn(). +get_schema_vsn() -> + persistent_term:get(?PT_SCHEMA_VSN). + +-spec init_schema() -> ok. +init_schema() -> + ok = mria:wait_for_tables([?ROUTE_TAB, ?ROUTE_TAB_FILTERS]), + ok = emqx_trie:wait_for_tables(), + ConfSchema = emqx_config:get([broker, routing, storage_schema]), + Schema = choose_schema_vsn(ConfSchema), + ok = persistent_term:put(?PT_SCHEMA_VSN, Schema), + case Schema of + ConfSchema -> + ?SLOG(info, #{ + msg => "routing_schema_used", + schema => Schema + }); + _ -> + ?SLOG(notice, #{ + msg => "configured_routing_schema_ignored", + schema_in_use => Schema, + configured => ConfSchema, + reason => + "Could not use configured routing storage schema because " + "there are already non-empty routing tables pertaining to " + "another schema." + }) + end. + +-spec deinit_schema() -> ok. +deinit_schema() -> + _ = persistent_term:erase(?PT_SCHEMA_VSN), + ok. + +-spec choose_schema_vsn(schemavsn()) -> schemavsn(). +choose_schema_vsn(ConfType) -> + IsEmptyIndex = emqx_trie:empty(), + IsEmptyFilters = is_empty(?ROUTE_TAB_FILTERS), + case {IsEmptyIndex, IsEmptyFilters} of + {true, true} -> + ConfType; + {false, true} -> + v1; + {true, false} -> + v2; + {false, false} -> + ?SLOG(critical, #{ + msg => "conflicting_routing_schemas_detected_in_cluster", + configured => ConfType, + reason => + "There are records in the routing tables related to both v1 " + "and v2 storage schemas. This probably means that some nodes " + "in the cluster use v1 schema and some use v2, independently " + "of each other. The routing is likely broken. Manual intervention " + "and full cluster restart is required. This node will shut down." + }), + error(conflicting_routing_schemas_detected_in_cluster) + end. + +is_empty(Tab) -> + ets:first(Tab) =:= '$end_of_table'. + %%-------------------------------------------------------------------- %% gen_server callbacks %%-------------------------------------------------------------------- diff --git a/apps/emqx/src/emqx_router_helper.erl b/apps/emqx/src/emqx_router_helper.erl index 8d96bf81d..b9cdbae4b 100644 --- a/apps/emqx/src/emqx_router_helper.erl +++ b/apps/emqx/src/emqx_router_helper.erl @@ -148,11 +148,12 @@ handle_info({mnesia_table_event, Event}, State) -> handle_info({nodedown, Node}, State = #{nodes := Nodes}) -> case mria_rlog:role() of core -> + % TODO + % Node may flap, do we need to wait for any pending cleanups in `init/1` + % on the flapping node? global:trans( {?LOCK, self()}, - fun() -> - mria:transaction(?ROUTE_SHARD, fun ?MODULE:cleanup_routes/1, [Node]) - end + fun() -> cleanup_routes(Node) end ), ok = mria:dirty_delete(?ROUTING_NODE, Node); replicant -> @@ -197,11 +198,4 @@ stats_fun() -> end. cleanup_routes(Node) -> - Patterns = [ - #route{_ = '_', dest = Node}, - #route{_ = '_', dest = {'_', Node}} - ], - [ - mnesia:delete_object(?ROUTE_TAB, Route, write) - || Pat <- Patterns, Route <- mnesia:match_object(?ROUTE_TAB, Pat, write) - ]. + emqx_router:cleanup_routes(Node). diff --git a/apps/emqx/src/emqx_router_sup.erl b/apps/emqx/src/emqx_router_sup.erl index 0fa48d9d2..588b0de8e 100644 --- a/apps/emqx/src/emqx_router_sup.erl +++ b/apps/emqx/src/emqx_router_sup.erl @@ -23,6 +23,8 @@ -export([init/1]). start_link() -> + %% Init and log routing table type + ok = emqx_router:init_schema(), supervisor:start_link({local, ?MODULE}, ?MODULE, []). init([]) -> diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index 4fdec3179..04bd397ec 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -24,7 +24,6 @@ -elvis([{elvis_style, invalid_dynamic_call, disable}]). -include("emqx_schema.hrl"). --include("emqx_authentication.hrl"). -include("emqx_access_control.hrl"). -include_lib("typerefl/include/types.hrl"). -include_lib("hocon/include/hoconsc.hrl"). @@ -213,16 +212,18 @@ roots(high) -> desc => ?DESC(zones), importance => ?IMPORTANCE_HIDDEN } - )}, - {?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, authentication(global)}, - %% NOTE: authorization schema here is only to keep emqx app pure - %% the full schema for EMQX node is injected in emqx_conf_schema. - {?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME, - sc( - ref(?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME), - #{importance => ?IMPORTANCE_HIDDEN} )} - ]; + ] ++ + emqx_schema_hooks:injection_point('roots.high') ++ + [ + %% NOTE: authorization schema here is only to keep emqx app pure + %% the full schema for EMQX node is injected in emqx_conf_schema. + {?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME, + sc( + ref(?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME), + #{importance => ?IMPORTANCE_HIDDEN} + )} + ]; roots(medium) -> [ {"broker", @@ -1357,6 +1358,11 @@ fields("broker") -> ref("broker_perf"), #{importance => ?IMPORTANCE_HIDDEN} )}, + {"routing", + sc( + ref("broker_routing"), + #{importance => ?IMPORTANCE_HIDDEN} + )}, %% FIXME: Need new design for shared subscription group {"shared_subscription_group", sc( @@ -1368,6 +1374,18 @@ fields("broker") -> } )} ]; +fields("broker_routing") -> + [ + {"storage_schema", + sc( + hoconsc:enum([v1, v2]), + #{ + default => v1, + 'readOnly' => true, + desc => ?DESC(broker_routing_storage_schema) + } + )} + ]; fields("shared_subscription_group") -> [ {"strategy", @@ -1748,11 +1766,8 @@ mqtt_listener(Bind) -> desc => ?DESC(mqtt_listener_proxy_protocol_timeout), default => <<"3s">> } - )}, - {?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME, (authentication(listener))#{ - importance => ?IMPORTANCE_HIDDEN - }} - ]. + )} + ] ++ emqx_schema_hooks:injection_point('mqtt.listener'). base_listener(Bind) -> [ @@ -2316,18 +2331,7 @@ ciphers_schema(Default) -> hoconsc:array(string()), #{ default => default_ciphers(Default), - converter => fun - (undefined) -> - []; - (<<>>) -> - []; - ("") -> - []; - (Ciphers) when is_binary(Ciphers) -> - binary:split(Ciphers, <<",">>, [global]); - (Ciphers) when is_list(Ciphers) -> - Ciphers - end, + converter => fun converter_ciphers/2, validator => case Default =:= quic of %% quic has openssl statically linked @@ -2338,6 +2342,15 @@ ciphers_schema(Default) -> } ). +converter_ciphers(undefined, _Opts) -> + []; +converter_ciphers(<<>>, _Opts) -> + []; +converter_ciphers(Ciphers, _Opts) when is_list(Ciphers) -> Ciphers; +converter_ciphers(Ciphers, _Opts) when is_binary(Ciphers) -> + {ok, List} = to_comma_separated_binary(binary_to_list(Ciphers)), + List. + default_ciphers(Which) -> lists:map( fun erlang:iolist_to_binary/1, @@ -2654,7 +2667,7 @@ validate_tcp_keepalive(Value) -> %% @doc This function is used as value validator and also run-time parser. parse_tcp_keepalive(Str) -> try - [Idle, Interval, Probes] = binary:split(iolist_to_binary(Str), <<",">>, [global]), + {ok, [Idle, Interval, Probes]} = to_comma_separated_binary(Str), %% use 10 times the Linux defaults as range limit IdleInt = parse_ka_int(Idle, "Idle", 1, 7200_0), IntervalInt = parse_ka_int(Interval, "Interval", 1, 75_0), @@ -2770,41 +2783,6 @@ str(B) when is_binary(B) -> str(S) when is_list(S) -> S. -authentication(Which) -> - {Importance, Desc} = - case Which of - global -> - %% For root level authentication, it is recommended to configure - %% from the dashboard or API. - %% Hence it's considered a low-importance when it comes to - %% configuration importance. - {?IMPORTANCE_LOW, ?DESC(global_authentication)}; - listener -> - {?IMPORTANCE_HIDDEN, ?DESC(listener_authentication)} - end, - %% poor man's dependency injection - %% this is due to the fact that authn is implemented outside of 'emqx' app. - %% so it can not be a part of emqx_schema since 'emqx' app is supposed to - %% work standalone. - Type = - case persistent_term:get(?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY, undefined) of - undefined -> - hoconsc:array(typerefl:map()); - Module -> - Module:root_type() - end, - hoconsc:mk(Type, #{ - desc => Desc, - converter => fun ensure_array/2, - default => [], - importance => Importance - }). - -%% the older version schema allows individual element (instead of a chain) in config -ensure_array(undefined, _) -> undefined; -ensure_array(L, _) when is_list(L) -> L; -ensure_array(M, _) -> [M]. - -spec qos() -> typerefl:type(). qos() -> typerefl:alias("qos", typerefl:union([0, 1, 2])). @@ -3162,9 +3140,10 @@ quic_feature_toggle(Desc) -> importance => ?IMPORTANCE_HIDDEN, required => false, converter => fun - (true) -> 1; - (false) -> 0; - (Other) -> Other + (Val, #{make_serializable := true}) -> Val; + (true, _Opts) -> 1; + (false, _Opts) -> 0; + (Other, _Opts) -> Other end } ). diff --git a/apps/emqx/src/emqx_schema_hooks.erl b/apps/emqx/src/emqx_schema_hooks.erl new file mode 100644 index 000000000..e704af6cc --- /dev/null +++ b/apps/emqx/src/emqx_schema_hooks.erl @@ -0,0 +1,118 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_schema_hooks). + +-type hookpoint() :: atom(). + +-callback injected_fields() -> + #{ + hookpoint() => [hocon_schema:field()] + }. +-optional_callbacks([injected_fields/0]). + +-export_type([hookpoint/0]). + +-define(HOOKPOINT_PT_KEY(POINT_NAME), {?MODULE, fields, POINT_NAME}). + +-export([ + injection_point/1, + inject_from_modules/1 +]). + +%% for tests +-export([ + erase_injections/0, + any_injections/0 +]). + +%%-------------------------------------------------------------------- +%% API +%%-------------------------------------------------------------------- + +injection_point(PointName) -> + persistent_term:get(?HOOKPOINT_PT_KEY(PointName), []). + +erase_injections() -> + lists:foreach( + fun + ({?HOOKPOINT_PT_KEY(_) = Key, _}) -> + persistent_term:erase(Key); + (_) -> + ok + end, + persistent_term:get() + ). + +any_injections() -> + lists:any( + fun + ({?HOOKPOINT_PT_KEY(_), _}) -> + true; + (_) -> + false + end, + persistent_term:get() + ). + +inject_from_modules(Modules) -> + Injections = + lists:foldl( + fun append_module_injections/2, + #{}, + Modules + ), + ok = inject_fields(maps:to_list(Injections)). + +%%-------------------------------------------------------------------- +%% Internal functions +%%-------------------------------------------------------------------- + +append_module_injections(Module, AllInjections) when is_atom(Module) -> + append_module_injections(Module:injected_fields(), AllInjections); +append_module_injections(ModuleInjections, AllInjections) when is_map(ModuleInjections) -> + maps:fold( + fun(PointName, Fields, Acc) -> + maps:update_with( + PointName, + fun(Fields0) -> + Fields0 ++ Fields + end, + Fields, + Acc + ) + end, + AllInjections, + ModuleInjections + ). + +inject_fields([]) -> + ok; +inject_fields([{PointName, Fields} | Rest]) -> + case any_injections(PointName) of + true -> + inject_fields(Rest); + false -> + ok = inject_fields(PointName, Fields), + inject_fields(Rest) + end. + +inject_fields(PointName, Fields) -> + Key = ?HOOKPOINT_PT_KEY(PointName), + persistent_term:put(Key, Fields). + +any_injections(PointName) -> + persistent_term:get(?HOOKPOINT_PT_KEY(PointName), undefined) =/= undefined. diff --git a/apps/emqx/src/emqx_stats.erl b/apps/emqx/src/emqx_stats.erl index e590577da..dfd3115f0 100644 --- a/apps/emqx/src/emqx_stats.erl +++ b/apps/emqx/src/emqx_stats.erl @@ -177,7 +177,9 @@ names() -> emqx_subscriptions_shared_count, emqx_subscriptions_shared_max, emqx_retained_count, - emqx_retained_max + emqx_retained_max, + emqx_delayed_count, + emqx_delayed_max ]. %% @doc Get stats by name. diff --git a/apps/emqx/src/emqx_topic_gbt.erl b/apps/emqx/src/emqx_topic_gbt.erl new file mode 100644 index 000000000..063cba21d --- /dev/null +++ b/apps/emqx/src/emqx_topic_gbt.erl @@ -0,0 +1,120 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +%% @doc Topic index implemetation with gb_trees stored in persistent_term. +%% This is only suitable for a static set of topic or topic-filters. + +-module(emqx_topic_gbt). + +-export([new/0, new/1]). +-export([insert/4]). +-export([delete/3]). +-export([match/2]). +-export([matches/3]). + +-export([get_id/1]). +-export([get_topic/1]). +-export([get_record/2]). + +-type key(ID) :: emqx_trie_search:key(ID). +-type words() :: emqx_trie_search:words(). +-type match(ID) :: key(ID). +-type name() :: any(). + +%% @private Only for testing. +-spec new() -> name(). +new() -> + new(test). + +%% @doc Create a new gb_tree and store it in the persitent_term with the +%% given name. +-spec new(name()) -> name(). +new(Name) -> + T = gb_trees:from_orddict([]), + true = gbt_update(Name, T), + Name. + +%% @doc Insert a new entry into the index that associates given topic filter to given +%% record ID, and attaches arbitrary record to the entry. This allows users to choose +%% between regular and "materialized" indexes, for example. +-spec insert(emqx_types:topic() | words(), _ID, _Record, name()) -> true. +insert(Filter, ID, Record, Name) -> + Tree = gbt(Name), + Key = key(Filter, ID), + NewTree = gb_trees:enter(Key, Record, Tree), + true = gbt_update(Name, NewTree). + +%% @doc Delete an entry from the index that associates given topic filter to given +%% record ID. Deleting non-existing entry is not an error. +-spec delete(emqx_types:topic() | words(), _ID, name()) -> true. +delete(Filter, ID, Name) -> + Tree = gbt(Name), + Key = key(Filter, ID), + NewTree = gb_trees:delete_any(Key, Tree), + true = gbt_update(Name, NewTree). + +%% @doc Match given topic against the index and return the first match, or `false` if +%% no match is found. +-spec match(emqx_types:topic(), name()) -> match(_ID) | false. +match(Topic, Name) -> + emqx_trie_search:match(Topic, make_nextf(Name)). + +%% @doc Match given topic against the index and return _all_ matches. +%% If `unique` option is given, return only unique matches by record ID. +matches(Topic, Name, Opts) -> + emqx_trie_search:matches(Topic, make_nextf(Name), Opts). + +%% @doc Extract record ID from the match. +-spec get_id(match(ID)) -> ID. +get_id(Key) -> + emqx_trie_search:get_id(Key). + +%% @doc Extract topic (or topic filter) from the match. +-spec get_topic(match(_ID)) -> emqx_types:topic(). +get_topic(Key) -> + emqx_trie_search:get_topic(Key). + +%% @doc Fetch the record associated with the match. +-spec get_record(match(_ID), name()) -> _Record. +get_record(Key, Name) -> + Gbt = gbt(Name), + gb_trees:get(Key, Gbt). + +key(TopicOrFilter, ID) -> + emqx_trie_search:make_key(TopicOrFilter, ID). + +gbt(Name) -> + persistent_term:get({?MODULE, Name}). + +gbt_update(Name, Tree) -> + persistent_term:put({?MODULE, Name}, Tree), + true. + +gbt_next(nil, _Input) -> + '$end_of_table'; +gbt_next({P, _V, _Smaller, Bigger}, K) when K >= P -> + gbt_next(Bigger, K); +gbt_next({P, _V, Smaller, _Bigger}, K) -> + case gbt_next(Smaller, K) of + '$end_of_table' -> + P; + NextKey -> + NextKey + end. + +make_nextf(Name) -> + {_SizeWeDontCare, TheTree} = gbt(Name), + fun(Key) -> gbt_next(TheTree, Key) end. diff --git a/apps/emqx/src/emqx_topic_index.erl b/apps/emqx/src/emqx_topic_index.erl index a6f662f74..59dfdfeab 100644 --- a/apps/emqx/src/emqx_topic_index.erl +++ b/apps/emqx/src/emqx_topic_index.erl @@ -14,18 +14,7 @@ %% limitations under the License. %%-------------------------------------------------------------------- -%% @doc Topic index for matching topics to topic filters. -%% -%% Works on top of ETS ordered_set table. Keys are tuples constructed from -%% parsed topic filters and record IDs, wrapped in a tuple to order them -%% strictly greater than unit tuple (`{}`). Existing table may be used if -%% existing keys will not collide with index keys. -%% -%% Designed to effectively answer questions like: -%% 1. Does any topic filter match given topic? -%% 2. Which records are associated with topic filters matching given topic? -%% 3. Which topic filters match given topic? -%% 4. Which record IDs are associated with topic filters matching given topic? +%% @doc Topic index implemetation with ETS table as ordered-set storage. -module(emqx_topic_index). @@ -35,13 +24,15 @@ -export([match/2]). -export([matches/3]). +-export([make_key/2]). + -export([get_id/1]). -export([get_topic/1]). -export([get_record/2]). --type word() :: binary() | '+' | '#'. --type key(ID) :: {[word()], {ID}}. +-type key(ID) :: emqx_trie_search:key(ID). -type match(ID) :: key(ID). +-type words() :: emqx_trie_search:words(). %% @doc Create a new ETS table suitable for topic index. %% Usable mostly for testing purposes. @@ -52,191 +43,53 @@ new() -> %% @doc Insert a new entry into the index that associates given topic filter to given %% record ID, and attaches arbitrary record to the entry. This allows users to choose %% between regular and "materialized" indexes, for example. --spec insert(emqx_types:topic(), _ID, _Record, ets:table()) -> true. +-spec insert(emqx_types:topic() | words(), _ID, _Record, ets:table()) -> true. insert(Filter, ID, Record, Tab) -> - ets:insert(Tab, {{words(Filter), {ID}}, Record}). + Key = make_key(Filter, ID), + true = ets:insert(Tab, {Key, Record}). %% @doc Delete an entry from the index that associates given topic filter to given %% record ID. Deleting non-existing entry is not an error. --spec delete(emqx_types:topic(), _ID, ets:table()) -> true. +-spec delete(emqx_types:topic() | words(), _ID, ets:table()) -> true. delete(Filter, ID, Tab) -> - ets:delete(Tab, {words(Filter), {ID}}). + ets:delete(Tab, make_key(Filter, ID)). + +-spec make_key(emqx_types:topic() | words(), ID) -> key(ID). +make_key(TopicOrFilter, ID) -> + emqx_trie_search:make_key(TopicOrFilter, ID). %% @doc Match given topic against the index and return the first match, or `false` if %% no match is found. -spec match(emqx_types:topic(), ets:table()) -> match(_ID) | false. match(Topic, Tab) -> - {Words, RPrefix} = match_init(Topic), - match(Words, RPrefix, Tab). - -match(Words, RPrefix, Tab) -> - Prefix = lists:reverse(RPrefix), - match(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Tab). - -match(K, Prefix, Words, RPrefix, Tab) -> - case match_next(Prefix, K, Words) of - true -> - K; - skip -> - match(ets:next(Tab, K), Prefix, Words, RPrefix, Tab); - stop -> - false; - Matched -> - match_rest(Matched, Words, RPrefix, Tab) - end. - -match_rest([W1 | [W2 | _] = SLast], [W1 | [W2 | _] = Rest], RPrefix, Tab) -> - % NOTE - % Fast-forward through identical words in the topic and the last key suffixes. - % This should save us a few redundant `ets:next` calls at the cost of slightly - % more complex match patterns. - match_rest(SLast, Rest, [W1 | RPrefix], Tab); -match_rest(SLast, [W | Rest], RPrefix, Tab) when is_list(SLast) -> - match(Rest, [W | RPrefix], Tab); -match_rest(plus, [W | Rest], RPrefix, Tab) -> - % NOTE - % There's '+' in the key suffix, meaning we should consider 2 alternatives: - % 1. Match the rest of the topic as if there was '+' in the current position. - % 2. Skip this key and try to match the topic as it is. - case match(Rest, ['+' | RPrefix], Tab) of - Match = {_, _} -> - Match; - false -> - match(Rest, [W | RPrefix], Tab) - end; -match_rest(_, [], _RPrefix, _Tab) -> - false. + emqx_trie_search:match(Topic, make_nextf(Tab)). %% @doc Match given topic against the index and return _all_ matches. %% If `unique` option is given, return only unique matches by record ID. --spec matches(emqx_types:topic(), ets:table(), _Opts :: [unique]) -> [match(_ID)]. matches(Topic, Tab, Opts) -> - {Words, RPrefix} = match_init(Topic), - AccIn = - case Opts of - [unique | _] -> #{}; - [] -> [] - end, - Matches = matches(Words, RPrefix, AccIn, Tab), - case Matches of - #{} -> maps:values(Matches); - _ -> Matches - end. - -matches(Words, RPrefix, Acc, Tab) -> - Prefix = lists:reverse(RPrefix), - matches(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Acc, Tab). - -matches(Words, RPrefix, K = {Filter, _}, Acc, Tab) -> - Prefix = lists:reverse(RPrefix), - case Prefix > Filter of - true -> - % NOTE: Prefix already greater than the last key seen, need to `ets:next/2`. - matches(ets:next(Tab, {Prefix, {}}), Prefix, Words, RPrefix, Acc, Tab); - false -> - % NOTE: Prefix is still less than or equal to the last key seen, reuse it. - matches(K, Prefix, Words, RPrefix, Acc, Tab) - end. - -matches(K, Prefix, Words, RPrefix, Acc, Tab) -> - case match_next(Prefix, K, Words) of - true -> - matches(ets:next(Tab, K), Prefix, Words, RPrefix, match_add(K, Acc), Tab); - skip -> - matches(ets:next(Tab, K), Prefix, Words, RPrefix, Acc, Tab); - stop -> - Acc; - Matched -> - % NOTE: Prserve next key on the stack to save on `ets:next/2` calls. - matches_rest(Matched, Words, RPrefix, K, Acc, Tab) - end. - -matches_rest([W1 | [W2 | _] = SLast], [W1 | [W2 | _] = Rest], RPrefix, K, Acc, Tab) -> - % NOTE - % Fast-forward through identical words in the topic and the last key suffixes. - % This should save us a few redundant `ets:next` calls at the cost of slightly - % more complex match patterns. - matches_rest(SLast, Rest, [W1 | RPrefix], K, Acc, Tab); -matches_rest(SLast, [W | Rest], RPrefix, K, Acc, Tab) when is_list(SLast) -> - matches(Rest, [W | RPrefix], K, Acc, Tab); -matches_rest(plus, [W | Rest], RPrefix, K, Acc, Tab) -> - % NOTE - % There's '+' in the key suffix, meaning we should accumulate all matches from - % each of 2 branches: - % 1. Match the rest of the topic as if there was '+' in the current position. - % 2. Skip this key and try to match the topic as it is. - NAcc = matches(Rest, ['+' | RPrefix], K, Acc, Tab), - matches(Rest, [W | RPrefix], K, NAcc, Tab); -matches_rest(_, [], _RPrefix, _K, Acc, _Tab) -> - Acc. - -match_add(K = {_Filter, ID}, Acc = #{}) -> - % NOTE: ensuring uniqueness by record ID - Acc#{ID => K}; -match_add(K, Acc) -> - [K | Acc]. - -match_next(Prefix, {Filter, _ID}, Suffix) -> - match_filter(Prefix, Filter, Suffix); -match_next(_, '$end_of_table', _) -> - stop. - -match_filter([], [], []) -> - % NOTE: we matched the topic exactly - true; -match_filter([], [], _Suffix) -> - % NOTE: we matched the prefix, but there may be more matches next - skip; -match_filter([], ['#'], _Suffix) -> - % NOTE: naturally, '#' < '+', so this is already optimal for `match/2` - true; -match_filter([], ['+' | _], _Suffix) -> - plus; -match_filter([], [_H | _] = Rest, _Suffix) -> - Rest; -match_filter([H | T1], [H | T2], Suffix) -> - match_filter(T1, T2, Suffix); -match_filter([H1 | _], [H2 | _], _Suffix) when H2 > H1 -> - % NOTE: we're strictly past the prefix, no need to continue - stop. - -match_init(Topic) -> - case words(Topic) of - [W = <<"$", _/bytes>> | Rest] -> - % NOTE - % This will effectively skip attempts to match special topics to `#` or `+/...`. - {Rest, [W]}; - Words -> - {Words, []} - end. + emqx_trie_search:matches(Topic, make_nextf(Tab), Opts). %% @doc Extract record ID from the match. -spec get_id(match(ID)) -> ID. -get_id({_Filter, {ID}}) -> - ID. +get_id(Key) -> + emqx_trie_search:get_id(Key). %% @doc Extract topic (or topic filter) from the match. -spec get_topic(match(_ID)) -> emqx_types:topic(). -get_topic({Filter, _ID}) -> - emqx_topic:join(Filter). +get_topic(Key) -> + emqx_trie_search:get_topic(Key). %% @doc Fetch the record associated with the match. -%% NOTE: Only really useful for ETS tables where the record ID is the first element. --spec get_record(match(_ID), ets:table()) -> _Record. +%% May return empty list if the index entry was deleted in the meantime. +%% NOTE: Only really useful for ETS tables where the record data is the last element. +-spec get_record(match(_ID), ets:table()) -> [_Record]. get_record(K, Tab) -> - ets:lookup_element(Tab, K, 2). + case ets:lookup(Tab, K) of + [Entry] -> + [erlang:element(tuple_size(Entry), Entry)]; + [] -> + [] + end. -%% - --spec words(emqx_types:topic()) -> [word()]. -words(Topic) when is_binary(Topic) -> - % NOTE - % This is almost identical to `emqx_topic:words/1`, but it doesn't convert empty - % tokens to ''. This is needed to keep ordering of words consistent with what - % `match_filter/3` expects. - [word(W) || W <- emqx_topic:tokens(Topic)]. - --spec word(binary()) -> word(). -word(<<"+">>) -> '+'; -word(<<"#">>) -> '#'; -word(Bin) -> Bin. +make_nextf(Tab) -> + fun(Key) -> ets:next(Tab, Key) end. diff --git a/apps/emqx/src/emqx_trie.erl b/apps/emqx/src/emqx_trie.erl index 229a0e3f4..76be97d3e 100644 --- a/apps/emqx/src/emqx_trie.erl +++ b/apps/emqx/src/emqx_trie.erl @@ -21,6 +21,7 @@ %% Mnesia bootstrap -export([ mnesia/1, + wait_for_tables/0, create_session_trie/1 ]). @@ -105,6 +106,10 @@ create_session_trie(Type) -> ] ). +-spec wait_for_tables() -> ok | {error, _Reason}. +wait_for_tables() -> + mria:wait_for_tables([?TRIE]). + %%-------------------------------------------------------------------- %% Topics APIs %%-------------------------------------------------------------------- diff --git a/apps/emqx/src/emqx_trie_search.erl b/apps/emqx/src/emqx_trie_search.erl new file mode 100644 index 000000000..c8c088b58 --- /dev/null +++ b/apps/emqx/src/emqx_trie_search.erl @@ -0,0 +1,355 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +%% @doc Topic index for matching topics to topic filters. +%% +%% Works on top of a ordered collection data set, such as ETS ordered_set table. +%% Keys are tuples constructed from parsed topic filters and record IDs, +%% wrapped in a tuple to order them strictly greater than unit tuple (`{}`). +%% Existing table may be used if existing keys will not collide with index keys. +%% +%% Designed to effectively answer questions like: +%% 1. Does any topic filter match given topic? +%% 2. Which records are associated with topic filters matching given topic? +%% 3. Which topic filters match given topic? +%% 4. Which record IDs are associated with topic filters matching given topic? +%% +%% Trie-search algorithm: +%% +%% Given a 3-level topic (e.g. a/b/c), if we leave out '#' for now, +%% all possible subscriptions of a/b/c can be enumerated as below: +%% +%% a/b/c +%% a/b/+ +%% a/+/c <--- subscribed +%% a/+/+ +%% +/b/c <--- subscribed +%% +/b/+ +%% +/+/c +%% +/+/+ <--- start searching upward from here +%% +%% Let's name this search space "Space1". +%% If we brute-force it, the scope would be 8 (2^3). +%% Meaning this has O(2^N) complexity (N being the level of topics). +%% +%% This clearly isn't going to work. +%% Should we then try to enumerate all subscribers instead? +%% If there are also other subscriptions, e.g. "+/x/y" and "+/b/0" +%% +%% a/+/c <--- match of a/b/c +%% +/x/n +%% ... +%% +/x/2 +%% +/x/1 +%% +/b/c <--- match of a/b/c +%% +/b/1 +%% +/b/0 +%% +%% Let's name it "Space2". +%% +%% This has O(M * L) complexity (M being the total number of subscriptions, +%% and L being the number of topic levels). +%% This is usually a lot smaller than "Space1", but still not very effective +%% if the collection size is e.g. 1 million. +%% +%% To make it more effective, we'll need to combine the two algorithms: +%% Use the ordered subscription topics' prefixes as starting points to make +%% guesses about whether or not the next word can be a '+', and skip-over +%% to the next possible match. +%% +%% NOTE: A prerequisite of the ordered collection is, it should be able +%% to find the *immediate-next* topic/filter with a given prefix. +%% +%% In the above example, we start from "+/b/0". When comparing "+/b/0" +%% with "a/b/c", we know the matching prefix is "+/b", meaning we can +%% start guessing if the next word is '+' or 'c': +%% * It can't be '+' because '+' < '0' +%% * It might be 'c' because 'c' > '0' +%% +%% So, we try to jump to the next topic which has a prefix of "+/b/c" +%% (this effectively means skipping over "+/b/1"). +%% +%% After "+/b/c" is found to be a matching filter, we move up: +%% * The next possible match is "a/+/+" according to Space1 +%% * The next subscription is "+/x/1" according to Space2 +%% +%% "a/+/+" is lexicographically greater than "+/x/+", so let's jump to +%% the immediate-next of 'a/+/+', which is "a/+/c", allowing us to skip +%% over all the ones starting with "+/x". +%% +%% If we take '#' into consideration, it's only one extra comparison to see +%% if a filter ends with '#'. +%% +%% In summary, the complexity of this algorithm is O(N * L) +%% N being the number of total matches, and L being the level of the topic. + +-module(emqx_trie_search). + +-export([make_key/2, filter/1]). +-export([match/2, matches/3, get_id/1, get_topic/1]). +-export_type([key/1, word/0, words/0, nextf/0, opts/0]). + +-define(END, '$end_of_table'). + +-type word() :: binary() | '+' | '#'. +-type words() :: [word()]. +-type base_key() :: {binary() | [word()], {}}. +-type key(ID) :: {binary() | [word()], {ID}}. +-type nextf() :: fun((key(_) | base_key()) -> ?END | key(_)). +-type opts() :: [unique | return_first]. + +%% @doc Make a search-key for the given topic. +-spec make_key(emqx_types:topic() | words(), ID) -> key(ID). +make_key(Topic, ID) when is_binary(Topic) -> + case filter(Topic) of + Words when is_list(Words) -> + %% it's a wildcard + {Words, {ID}}; + false -> + %% Not a wildcard. We do not split the topic + %% because they can be found with direct lookups. + %% it is also more compact in memory. + {Topic, {ID}} + end; +make_key(Words, ID) when is_list(Words) -> + {Words, {ID}}. + +%% @doc Parse a topic filter into a list of words. Returns `false` if it's not a filter. +-spec filter(emqx_types:topic()) -> words() | false. +filter(Topic) -> + Words = filter_words(Topic), + emqx_topic:wildcard(Words) andalso Words. + +%% @doc Extract record ID from the match. +-spec get_id(key(ID)) -> ID. +get_id({_Filter, {ID}}) -> + ID. + +%% @doc Extract topic (or topic filter) from the match. +-spec get_topic(key(_ID)) -> emqx_types:topic(). +get_topic({Filter, _ID}) when is_list(Filter) -> + emqx_topic:join(Filter); +get_topic({Topic, _ID}) -> + Topic. + +-compile({inline, [base/1, move_up/2, match_add/2, compare/3]}). + +%% Make the base-key which can be used to locate the desired search target. +base(Prefix) -> + {Prefix, {}}. + +base_init([W = <<"$", _/bytes>> | _]) -> + base([W]); +base_init(_) -> + base([]). + +%% Move the search target to the key next to the given Base. +move_up(NextF, Base) -> + NextF(Base). + +%% @doc Match given topic against the index and return the first match, or `false` if +%% no match is found. +-spec match(emqx_types:topic(), nextf()) -> false | key(_). +match(Topic, NextF) -> + try search(Topic, NextF, [return_first]) of + _ -> false + catch + throw:{first, Res} -> + Res + end. + +%% @doc Match given topic against the index and return _all_ matches. +%% If `unique` option is given, return only unique matches by record ID. +-spec matches(emqx_types:topic(), nextf(), opts()) -> [key(_)]. +matches(Topic, NextF, Opts) -> + search(Topic, NextF, Opts). + +%% @doc Entrypoint of the search for a given topic. +search(Topic, NextF, Opts) -> + Words = topic_words(Topic), + Base = base_init(Words), + ORetFirst = proplists:get_bool(return_first, Opts), + OUnique = proplists:get_bool(unique, Opts), + Acc0 = + case ORetFirst of + true -> + first; + false when OUnique -> + #{}; + false -> + [] + end, + Matches = + case search_new(Words, Base, NextF, Acc0) of + {Cursor, Acc} -> + match_topics(Topic, Cursor, NextF, Acc); + Acc -> + Acc + end, + case is_map(Matches) of + true -> + maps:values(Matches); + false -> + Matches + end. + +%% The recursive entrypoint of the trie-search algorithm. +%% Always start from the initial prefix and words. +search_new(Words0, NewBase, NextF, Acc) -> + case move_up(NextF, NewBase) of + ?END -> + Acc; + Cursor -> + search_up(Words0, Cursor, NextF, Acc) + end. + +%% Search to the bigger end of ordered collection of topics and topic-filters. +search_up(Words, {Filter, _} = Cursor, NextF, Acc) -> + case compare(Filter, Words, 0) of + match_full -> + search_new(Words, Cursor, NextF, match_add(Cursor, Acc)); + match_prefix -> + search_new(Words, Cursor, NextF, Acc); + lower -> + {Cursor, Acc}; + {Pos, SeekWord} -> + % NOTE + % This is a seek instruction. It means we need to take `Pos` words + % from the current topic filter and attach `SeekWord` to the end of it. + NewBase = base(seek(Pos, SeekWord, Filter)), + search_new(Words, NewBase, NextF, Acc) + end. + +seek(_Pos = 0, SeekWord, _FilterTail) -> + [SeekWord]; +seek(Pos, SeekWord, [FilterWord | Rest]) -> + [FilterWord | seek(Pos - 1, SeekWord, Rest)]. + +compare(NotFilter, _, _) when is_binary(NotFilter) -> + lower; +compare([], [], _) -> + % NOTE + % Topic: a/b/c/d + % Filter: a/+/+/d + % We matched the topic to a topic filter exactly (possibly with pluses). + % We include it in the result set, and now need to try next entry in the table. + % Closest possible next entries that we must not miss: + % * a/+/+/d (same topic but a different ID) + % * a/+/+/d/# (also a match) + match_full; +compare([], _Words, _) -> + % NOTE + % Topic: a/b/c/d + % Filter: a/+/c + % We found out that a topic filter is a prefix of the topic (possibly with pluses). + % We discard it, and now need to try next entry in the table. + % Closest possible next entries that we must not miss: + % * a/+/c/# (which is a match) + % * a/+/c/+ (also a match) + match_prefix; +compare(['#'], _Words, _) -> + % NOTE + % Topic: a/b/c/d + % Filter: a/+/+/d/# or just a/# + % We matched the topic to a topic filter with wildcard (possibly with pluses). + % We include it in the result set, and now need to try next entry in the table. + % Closest possible next entries that we must not miss: + % * a/+/+/d/# (same topic but a different ID) + match_full; +compare(['+' | TF], [HW | TW], Pos) -> + case compare(TF, TW, Pos + 1) of + lower -> + % NOTE + % Topic: a/b/c/d + % Filter: a/+/+/e/1 or a/b/+/d/1 + % The topic is lower than a topic filter. But we're at the `+` position, + % so we emit a backtrack point to seek to: + % Seek: {2, c} + % We skip over part of search space, and seek to the next possible match: + % Next: a/+/c + {Pos, HW}; + Other -> + % NOTE + % It's either already a backtrack point, emitted from the last `+` + % position or just a seek / match. In both cases we just pass it + % through. + Other + end; +compare([HW | TF], [HW | TW], Pos) -> + % NOTE + % Skip over the same word in both topic and filter, keeping the last backtrack point. + compare(TF, TW, Pos + 1); +compare([HF | _], [HW | _], _) when HF > HW -> + % NOTE + % Topic: a/b/c/d + % Filter: a/b/c/e/1 or a/b/+/e + % The topic is lower than a topic filter. In the first case there's nowhere to + % backtrack to, we're out of the search space. In the second case there's a `+` + % on 3rd level, we'll seek up from there. + lower; +compare([_ | _], [], _) -> + % NOTE + % Topic: a/b/c/d + % Filter: a/b/c/d/1 or a/+/c/d/1 + % The topic is lower than a topic filter (since it's shorter). In the first case + % there's nowhere to backtrack to, we're out of the search space. In the second case + % there's a `+` on 2nd level, we'll seek up from there. + lower; +compare([_ | _], [HW | _], Pos) -> + % NOTE + % Topic: a/b/c/d + % Filter: a/+/+/0/1/2 + % Topic is higher than the filter, we need to skip over to the next possible filter. + % Seek: {3, d} + % Next: a/+/+/d + {Pos, HW}. + +match_add(K = {_Filter, ID}, Acc = #{}) -> + % NOTE: ensuring uniqueness by record ID + Acc#{ID => K}; +match_add(K, Acc) when is_list(Acc) -> + [K | Acc]; +match_add(K, first) -> + throw({first, K}). + +-spec filter_words(emqx_types:topic()) -> [word()]. +filter_words(Topic) when is_binary(Topic) -> + % NOTE + % This is almost identical to `emqx_topic:words/1`, but it doesn't convert empty + % tokens to ''. This is needed to keep ordering of words consistent with what + % `match_filter/3` expects. + [word(W, filter) || W <- emqx_topic:tokens(Topic)]. + +-spec topic_words(emqx_types:topic()) -> [binary()]. +topic_words(Topic) when is_binary(Topic) -> + [word(W, topic) || W <- emqx_topic:tokens(Topic)]. + +word(<<"+">>, topic) -> error(badarg); +word(<<"#">>, topic) -> error(badarg); +word(<<"+">>, filter) -> '+'; +word(<<"#">>, filter) -> '#'; +word(Bin, _) -> Bin. + +%% match non-wildcard topics +match_topics(Topic, {Topic, _} = Key, NextF, Acc) -> + %% found a topic match + match_topics(Topic, NextF(Key), NextF, match_add(Key, Acc)); +match_topics(Topic, {F, _}, NextF, Acc) when F < Topic -> + %% the last key is a filter, try jump to the topic + match_topics(Topic, NextF(base(Topic)), NextF, Acc); +match_topics(_Topic, _Key, _NextF, Acc) -> + %% gone pass the topic + Acc. diff --git a/apps/emqx/src/emqx_ws_connection.erl b/apps/emqx/src/emqx_ws_connection.erl index 93c29bf4f..37ce72d74 100644 --- a/apps/emqx/src/emqx_ws_connection.erl +++ b/apps/emqx/src/emqx_ws_connection.erl @@ -531,7 +531,6 @@ handle_info({event, connected}, State = #state{channel = Channel}) -> handle_info({event, disconnected}, State = #state{channel = Channel}) -> ClientId = emqx_channel:info(clientid, Channel), emqx_cm:set_chan_info(ClientId, info(State)), - emqx_cm:connection_closed(ClientId), return(State); handle_info({event, _Other}, State = #state{channel = Channel}) -> ClientId = emqx_channel:info(clientid, Channel), diff --git a/apps/emqx/test/emqx_broker_SUITE.erl b/apps/emqx/test/emqx_broker_SUITE.erl index 52cf230ff..ca464ee27 100644 --- a/apps/emqx/test/emqx_broker_SUITE.erl +++ b/apps/emqx/test/emqx_broker_SUITE.erl @@ -26,6 +26,7 @@ -include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("emqx/include/emqx.hrl"). +-include_lib("emqx/include/emqx_hooks.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl"). all() -> @@ -695,28 +696,17 @@ t_connect_client_never_negative({'end', _Config}) -> t_connack_auth_error({init, Config}) -> process_flag(trap_exit, true), - ChainName = 'mqtt:global', - AuthenticatorConfig = #{ - enable => true, - mechanism => password_based, - backend => built_in_database, - user_id_type => username, - password_hash_algorithm => #{ - name => plain, - salt_position => disable - }, - user_group => <<"global:mqtt">> - }, - ok = emqx_authentication:register_providers( - [{{password_based, built_in_database}, emqx_authentication_SUITE}] + emqx_hooks:put( + 'client.authenticate', + {?MODULE, authenticate_deny, []}, + ?HP_AUTHN ), - emqx_authentication:initialize_authentication(ChainName, AuthenticatorConfig), Config; t_connack_auth_error({'end', _Config}) -> - ChainName = 'mqtt:global', - AuthenticatorID = <<"password_based:built_in_database">>, - ok = emqx_authentication:deregister_provider({password_based, built_in_database}), - ok = emqx_authentication:delete_authenticator(ChainName, AuthenticatorID), + emqx_hooks:del( + 'client.authenticate', + {?MODULE, authenticate_deny, []} + ), ok; t_connack_auth_error(Config) when is_list(Config) -> %% MQTT 3.1 @@ -748,6 +738,9 @@ t_handle_in_empty_client_subscribe_hook(Config) when is_list(Config) -> emqtt:disconnect(C) end. +authenticate_deny(_Credentials, _Default) -> + {stop, {error, bad_username_or_password}}. + wait_for_events(Action, Kinds) -> wait_for_events(Action, Kinds, 500). diff --git a/apps/emqx/test/emqx_common_test_helpers.erl b/apps/emqx/test/emqx_common_test_helpers.erl index 7f1fe4628..3645fa06b 100644 --- a/apps/emqx/test/emqx_common_test_helpers.erl +++ b/apps/emqx/test/emqx_common_test_helpers.erl @@ -16,8 +16,6 @@ -module(emqx_common_test_helpers). --include_lib("emqx/include/emqx_authentication.hrl"). - -type special_config_handler() :: fun(). -type apps() :: list(atom()). @@ -351,7 +349,7 @@ stop_apps(Apps, Opts) -> %% to avoid inter-suite flakiness application:unset_env(emqx, config_loader), application:unset_env(emqx, boot_modules), - persistent_term:erase(?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY), + emqx_schema_hooks:erase_injections(), case Opts of #{erase_all_configs := false} -> %% FIXME: this means inter-suite or inter-test dependencies diff --git a/apps/emqx/test/emqx_config_handler_SUITE.erl b/apps/emqx/test/emqx_config_handler_SUITE.erl index b13da79f6..bb91bcbe4 100644 --- a/apps/emqx/test/emqx_config_handler_SUITE.erl +++ b/apps/emqx/test/emqx_config_handler_SUITE.erl @@ -26,7 +26,8 @@ -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). -all() -> emqx_common_test_helpers:all(?MODULE). +all() -> + emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> emqx_common_test_helpers:boot_modules(all), @@ -223,8 +224,8 @@ t_callback_crash(_Config) -> ok = emqx_config_handler:remove_handler(CrashPath), ok. -t_pre_callback_error(_Config) -> - callback_error( +t_pre_assert_update_result(_Config) -> + assert_update_result( [sysmon, os, mem_check_interval], <<"100s">>, {error, {pre_config_update, ?MODULE, pre_config_update_error}} @@ -232,13 +233,88 @@ t_pre_callback_error(_Config) -> ok. t_post_update_error(_Config) -> - callback_error( + assert_update_result( [sysmon, os, sysmem_high_watermark], <<"60%">>, {error, {post_config_update, ?MODULE, post_config_update_error}} ), ok. +t_post_update_propagate_error_wkey(_Config) -> + Conf0 = emqx_config:get_raw([sysmon]), + Conf1 = emqx_utils_maps:deep_put([<<"os">>, <<"sysmem_high_watermark">>], Conf0, <<"60%">>), + assert_update_result( + [ + [sysmon, '?', sysmem_high_watermark], + [sysmon] + ], + [sysmon], + Conf1, + {error, {post_config_update, ?MODULE, post_config_update_error}} + ), + ok. + +t_post_update_propagate_error_key(_Config) -> + Conf0 = emqx_config:get_raw([sysmon]), + Conf1 = emqx_utils_maps:deep_put([<<"os">>, <<"sysmem_high_watermark">>], Conf0, <<"60%">>), + assert_update_result( + [ + [sysmon, os, sysmem_high_watermark], + [sysmon] + ], + [sysmon], + Conf1, + {error, {post_config_update, ?MODULE, post_config_update_error}} + ), + ok. + +t_pre_update_propagate_error_wkey(_Config) -> + Conf0 = emqx_config:get_raw([sysmon]), + Conf1 = emqx_utils_maps:deep_put([<<"os">>, <<"mem_check_interval">>], Conf0, <<"70s">>), + assert_update_result( + [ + [sysmon, '?', mem_check_interval], + [sysmon] + ], + [sysmon], + Conf1, + {error, {pre_config_update, ?MODULE, pre_config_update_error}} + ), + ok. + +t_pre_update_propagate_error_key(_Config) -> + Conf0 = emqx_config:get_raw([sysmon]), + Conf1 = emqx_utils_maps:deep_put([<<"os">>, <<"mem_check_interval">>], Conf0, <<"70s">>), + assert_update_result( + [ + [sysmon, os, mem_check_interval], + [sysmon] + ], + [sysmon], + Conf1, + {error, {pre_config_update, ?MODULE, pre_config_update_error}} + ), + ok. + +t_pre_update_propagate_key_rewrite(_Config) -> + Conf0 = emqx_config:get_raw([sysmon]), + Conf1 = emqx_utils_maps:deep_put([<<"os">>, <<"cpu_check_interval">>], Conf0, <<"333s">>), + with_update_result( + [ + [sysmon, '?', cpu_check_interval], + [sysmon] + ], + [sysmon], + Conf1, + fun(_, Result) -> + ?assertMatch( + {ok, #{config := #{os := #{cpu_check_interval := 444000}}}}, + Result + ) + end + ), + ok. + t_handler_root() -> %% Don't rely on default emqx_config_handler's merge behaviour. RootKey = [], @@ -295,6 +371,17 @@ pre_config_update([sysmon, os, sysmem_high_watermark], UpdateReq, _RawConf) -> pre_config_update([sysmon, os, mem_check_interval], _UpdateReq, _RawConf) -> {error, pre_config_update_error}. +propagated_pre_config_update( + [<<"sysmon">>, <<"os">>, <<"cpu_check_interval">>], <<"333s">>, _RawConf +) -> + {ok, <<"444s">>}; +propagated_pre_config_update( + [<<"sysmon">>, <<"os">>, <<"mem_check_interval">>], _UpdateReq, _RawConf +) -> + {error, pre_config_update_error}; +propagated_pre_config_update(_ConfKeyPath, _UpdateReq, _RawConf) -> + ok. + post_config_update([sysmon], _UpdateReq, _NewConf, _OldConf, _AppEnvs) -> {ok, ok}; post_config_update([sysmon, os], _UpdateReq, _NewConf, _OldConf, _AppEnvs) -> @@ -308,6 +395,13 @@ post_config_update([sysmon, os, cpu_high_watermark], _UpdateReq, _NewConf, _OldC post_config_update([sysmon, os, sysmem_high_watermark], _UpdateReq, _NewConf, _OldConf, _AppEnvs) -> {error, post_config_update_error}. +propagated_post_config_update( + [sysmon, os, sysmem_high_watermark], _UpdateReq, _NewConf, _OldConf, _AppEnvs +) -> + {error, post_config_update_error}; +propagated_post_config_update(_ConfKeyPath, _UpdateReq, _NewConf, _OldConf, _AppEnvs) -> + ok. + wait_for_new_pid() -> case erlang:whereis(emqx_config_handler) of undefined -> @@ -317,20 +411,34 @@ wait_for_new_pid() -> Pid end. -callback_error(FailedPath, Update, ExpectError) -> +assert_update_result(FailedPath, Update, Expect) -> + assert_update_result([FailedPath], FailedPath, Update, Expect). + +assert_update_result(Paths, UpdatePath, Update, Expect) -> + with_update_result(Paths, UpdatePath, Update, fun(Old, Result) -> + case Expect of + {error, {post_config_update, ?MODULE, post_config_update_error}} -> + ?assertMatch( + {error, {post_config_update, ?MODULE, {post_config_update_error, _}}}, Result + ); + _ -> + ?assertEqual(Expect, Result) + end, + New = emqx:get_raw_config(UpdatePath, undefined), + ?assertEqual(Old, New) + end). + +with_update_result(Paths, UpdatePath, Update, Fun) -> + ok = lists:foreach( + fun(Path) -> emqx_config_handler:add_handler(Path, ?MODULE) end, + Paths + ), Opts = #{rawconf_with_defaults => true}, - ok = emqx_config_handler:add_handler(FailedPath, ?MODULE), - Old = emqx:get_raw_config(FailedPath, undefined), - Error = emqx:update_config(FailedPath, Update, Opts), - case ExpectError of - {error, {post_config_update, ?MODULE, post_config_update_error}} -> - ?assertMatch( - {error, {post_config_update, ?MODULE, {post_config_update_error, _}}}, Error - ); - _ -> - ?assertEqual(ExpectError, Error) - end, - New = emqx:get_raw_config(FailedPath, undefined), - ?assertEqual(Old, New), - ok = emqx_config_handler:remove_handler(FailedPath), + Old = emqx:get_raw_config(UpdatePath, undefined), + Result = emqx:update_config(UpdatePath, Update, Opts), + _ = Fun(Old, Result), + ok = lists:foreach( + fun(Path) -> emqx_config_handler:remove_handler(Path) end, + Paths + ), ok. diff --git a/apps/emqx/test/emqx_connection_SUITE.erl b/apps/emqx/test/emqx_connection_SUITE.erl index 2a96594e1..01fe3c3db 100644 --- a/apps/emqx/test/emqx_connection_SUITE.erl +++ b/apps/emqx/test/emqx_connection_SUITE.erl @@ -274,7 +274,6 @@ t_handle_msg_event(_) -> ok = meck:expect(emqx_cm, register_channel, fun(_, _, _) -> ok end), ok = meck:expect(emqx_cm, insert_channel_info, fun(_, _, _) -> ok end), ok = meck:expect(emqx_cm, set_chan_info, fun(_, _) -> ok end), - ok = meck:expect(emqx_cm, connection_closed, fun(_) -> ok end), ?assertEqual(ok, handle_msg({event, connected}, st())), ?assertMatch({ok, _St}, handle_msg({event, disconnected}, st())), ?assertMatch({ok, _St}, handle_msg({event, undefined}, st())). diff --git a/apps/emqx/test/emqx_cth_cluster.erl b/apps/emqx/test/emqx_cth_cluster.erl index 3f8ea9a89..b41586518 100644 --- a/apps/emqx/test/emqx_cth_cluster.erl +++ b/apps/emqx/test/emqx_cth_cluster.erl @@ -41,6 +41,8 @@ -export([start/2]). -export([stop/1, stop_node/1]). +-export([start_bare_node/2]). + -export([share_load_module/2]). -export([node_name/1, mk_nodespecs/2]). -export([start_apps/2, set_node_opts/2]). @@ -282,9 +284,6 @@ allocate_listener_ports(Types, Spec) -> start_node_init(Spec = #{name := Node}) -> Node = start_bare_node(Node, Spec), - pong = net_adm:ping(Node), - % Preserve node spec right on the remote node - ok = set_node_opts(Node, Spec), % Make it possible to call `ct:pal` and friends (if running under rebar3) _ = share_load_module(Node, cthr), % Enable snabbkaffe trace forwarding @@ -392,7 +391,8 @@ listener_port(BasePort, wss) -> %% -start_bare_node(Name, #{driver := ct_slave}) -> +-spec start_bare_node(atom(), map()) -> node(). +start_bare_node(Name, Spec = #{driver := ct_slave}) -> {ok, Node} = ct_slave:start( node_name(Name), [ @@ -404,9 +404,15 @@ start_bare_node(Name, #{driver := ct_slave}) -> {env, []} ] ), - Node; -start_bare_node(Name, #{driver := slave}) -> + init_bare_node(Node, Spec); +start_bare_node(Name, Spec = #{driver := slave}) -> {ok, Node} = slave:start_link(host(), Name, ebin_path()), + init_bare_node(Node, Spec). + +init_bare_node(Node, Spec) -> + pong = net_adm:ping(Node), + % Preserve node spec right on the remote node + ok = set_node_opts(Node, Spec), Node. erl_flags() -> @@ -429,6 +435,7 @@ share_load_module(Node, Module) -> error end. +-spec node_name(atom()) -> node(). node_name(Name) -> case string:tokens(atom_to_list(Name), "@") of [_Name, _Host] -> diff --git a/apps/emqx/test/emqx_cth_suite.erl b/apps/emqx/test/emqx_cth_suite.erl index 090bca762..dddd096fa 100644 --- a/apps/emqx/test/emqx_cth_suite.erl +++ b/apps/emqx/test/emqx_cth_suite.erl @@ -58,7 +58,7 @@ -module(emqx_cth_suite). -include_lib("common_test/include/ct.hrl"). --include_lib("emqx/include/emqx_authentication.hrl"). +-include_lib("emqx/include/emqx_access_control.hrl"). -export([start/2]). -export([stop/1]). @@ -306,7 +306,7 @@ merge_envs(false, E2) -> merge_envs(_E, false) -> []; merge_envs(E1, E2) -> - E1 ++ E2. + lists:foldl(fun({K, _} = Opt, EAcc) -> lists:keystore(K, 1, EAcc, Opt) end, E1, E2). merge_config(false, C2) -> C2; @@ -444,12 +444,12 @@ stop_apps(Apps) -> verify_clean_suite_state(#{work_dir := WorkDir}) -> {ok, []} = file:list_dir(WorkDir), - none = persistent_term:get(?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY, none), + false = emqx_schema_hooks:any_injections(), [] = emqx_config:get_root_names(), ok. clean_suite_state() -> - _ = persistent_term:erase(?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY), + _ = emqx_schema_hooks:erase_injections(), _ = emqx_config:erase_all(), ok. diff --git a/apps/emqx/test/emqx_listeners_update_SUITE.erl b/apps/emqx/test/emqx_listeners_update_SUITE.erl index c16a26f3a..c1fdff2e2 100644 --- a/apps/emqx/test/emqx_listeners_update_SUITE.erl +++ b/apps/emqx/test/emqx_listeners_update_SUITE.erl @@ -116,6 +116,172 @@ t_update_conf(_Conf) -> ?assert(is_running('wss:default')), ok. +t_update_tcp_keepalive_conf(_Conf) -> + Keepalive = <<"240,30,5">>, + KeepaliveStr = binary_to_list(Keepalive), + Raw = emqx:get_raw_config(?LISTENERS), + Raw1 = emqx_utils_maps:deep_put( + [<<"tcp">>, <<"default">>, <<"bind">>], Raw, <<"127.0.0.1:1883">> + ), + Raw2 = emqx_utils_maps:deep_put( + [<<"tcp">>, <<"default">>, <<"tcp_options">>, <<"keepalive">>], Raw1, Keepalive + ), + ?assertMatch({ok, _}, emqx:update_config(?LISTENERS, Raw2)), + ?assertMatch( + #{ + <<"tcp">> := #{ + <<"default">> := #{ + <<"bind">> := <<"127.0.0.1:1883">>, + <<"tcp_options">> := #{<<"keepalive">> := Keepalive} + } + } + }, + emqx:get_raw_config(?LISTENERS) + ), + ?assertMatch( + #{tcp := #{default := #{tcp_options := #{keepalive := KeepaliveStr}}}}, + emqx:get_config(?LISTENERS) + ), + Keepalive2 = <<" 241, 31, 6 ">>, + KeepaliveStr2 = binary_to_list(Keepalive2), + Raw3 = emqx_utils_maps:deep_put( + [<<"tcp">>, <<"default">>, <<"tcp_options">>, <<"keepalive">>], Raw1, Keepalive2 + ), + ?assertMatch({ok, _}, emqx:update_config(?LISTENERS, Raw3)), + ?assertMatch( + #{ + <<"tcp">> := #{ + <<"default">> := #{ + <<"bind">> := <<"127.0.0.1:1883">>, + <<"tcp_options">> := #{<<"keepalive">> := Keepalive2} + } + } + }, + emqx:get_raw_config(?LISTENERS) + ), + ?assertMatch( + #{tcp := #{default := #{tcp_options := #{keepalive := KeepaliveStr2}}}}, + emqx:get_config(?LISTENERS) + ), + ok. + +t_update_empty_ssl_options_conf(_Conf) -> + Raw = emqx:get_raw_config(?LISTENERS), + Raw1 = emqx_utils_maps:deep_put( + [<<"tcp">>, <<"default">>, <<"bind">>], Raw, <<"127.0.0.1:1883">> + ), + Raw2 = emqx_utils_maps:deep_put( + [<<"ssl">>, <<"default">>, <<"bind">>], Raw1, <<"127.0.0.1:8883">> + ), + Raw3 = emqx_utils_maps:deep_put( + [<<"ws">>, <<"default">>, <<"bind">>], Raw2, <<"0.0.0.0:8083">> + ), + Raw4 = emqx_utils_maps:deep_put( + [<<"wss">>, <<"default">>, <<"bind">>], Raw3, <<"127.0.0.1:8084">> + ), + Raw5 = emqx_utils_maps:deep_put( + [<<"ssl">>, <<"default">>, <<"ssl_options">>, <<"cacertfile">>], Raw4, <<"">> + ), + Raw6 = emqx_utils_maps:deep_put( + [<<"wss">>, <<"default">>, <<"ssl_options">>, <<"cacertfile">>], Raw5, <<"">> + ), + Raw7 = emqx_utils_maps:deep_put( + [<<"wss">>, <<"default">>, <<"ssl_options">>, <<"ciphers">>], Raw6, <<"">> + ), + Ciphers = <<"TLS_AES_256_GCM_SHA384, TLS_AES_128_GCM_SHA256 ">>, + Raw8 = emqx_utils_maps:deep_put( + [<<"ssl">>, <<"default">>, <<"ssl_options">>, <<"ciphers">>], + Raw7, + Ciphers + ), + ?assertMatch({ok, _}, emqx:update_config(?LISTENERS, Raw8)), + ?assertMatch( + #{ + <<"tcp">> := #{<<"default">> := #{<<"bind">> := <<"127.0.0.1:1883">>}}, + <<"ssl">> := #{ + <<"default">> := #{ + <<"bind">> := <<"127.0.0.1:8883">>, + <<"ssl_options">> := #{ + <<"cacertfile">> := <<"">>, + <<"ciphers">> := Ciphers + } + } + }, + <<"ws">> := #{<<"default">> := #{<<"bind">> := <<"0.0.0.0:8083">>}}, + <<"wss">> := #{ + <<"default">> := #{ + <<"bind">> := <<"127.0.0.1:8084">>, + <<"ssl_options">> := #{ + <<"cacertfile">> := <<"">>, + <<"ciphers">> := <<"">> + } + } + } + }, + emqx:get_raw_config(?LISTENERS) + ), + BindTcp = {{127, 0, 0, 1}, 1883}, + BindSsl = {{127, 0, 0, 1}, 8883}, + BindWs = {{0, 0, 0, 0}, 8083}, + BindWss = {{127, 0, 0, 1}, 8084}, + ?assertMatch( + #{ + tcp := #{default := #{bind := BindTcp}}, + ssl := #{ + default := #{ + bind := BindSsl, + ssl_options := #{ + cacertfile := <<"">>, + ciphers := ["TLS_AES_256_GCM_SHA384", "TLS_AES_128_GCM_SHA256"] + } + } + }, + ws := #{default := #{bind := BindWs}}, + wss := #{ + default := #{ + bind := BindWss, + ssl_options := #{ + cacertfile := <<"">>, + ciphers := [] + } + } + } + }, + emqx:get_config(?LISTENERS) + ), + ?assertError(not_found, current_conns(<<"tcp:default">>, {{0, 0, 0, 0}, 1883})), + ?assertError(not_found, current_conns(<<"ssl:default">>, {{0, 0, 0, 0}, 8883})), + + ?assertEqual(0, current_conns(<<"tcp:default">>, BindTcp)), + ?assertEqual(0, current_conns(<<"ssl:default">>, BindSsl)), + + ?assertEqual({0, 0, 0, 0}, proplists:get_value(ip, ranch:info('ws:default'))), + ?assertEqual({127, 0, 0, 1}, proplists:get_value(ip, ranch:info('wss:default'))), + ?assert(is_running('ws:default')), + ?assert(is_running('wss:default')), + + Raw9 = emqx_utils_maps:deep_put( + [<<"ssl">>, <<"default">>, <<"ssl_options">>, <<"ciphers">>], Raw7, [ + "TLS_AES_256_GCM_SHA384", + "TLS_AES_128_GCM_SHA256", + "TLS_CHACHA20_POLY1305_SHA256" + ] + ), + ?assertMatch({ok, _}, emqx:update_config(?LISTENERS, Raw9)), + + BadRaw = emqx_utils_maps:deep_put( + [<<"ssl">>, <<"default">>, <<"ssl_options">>, <<"keyfile">>], Raw4, <<"">> + ), + ?assertMatch( + {error, + {bad_ssl_config, #{ + reason := pem_file_path_or_string_is_required, + which_options := [[<<"keyfile">>]] + }}}, + emqx:update_config(?LISTENERS, BadRaw) + ), + ok. + t_add_delete_conf(_Conf) -> Raw = emqx:get_raw_config(?LISTENERS), %% add diff --git a/apps/emqx/test/emqx_metrics_SUITE.erl b/apps/emqx/test/emqx_metrics_SUITE.erl index 45a14a6f7..c9c384b44 100644 --- a/apps/emqx/test/emqx_metrics_SUITE.erl +++ b/apps/emqx/test/emqx_metrics_SUITE.erl @@ -122,6 +122,17 @@ t_inc_sent(_) -> with_metrics_server( fun() -> ok = emqx_metrics:inc_sent(?CONNACK_PACKET(0)), + ok = emqx_metrics:inc_sent(?CONNACK_PACKET(0, 1)), + ok = emqx_metrics:inc_sent( + ?CONNACK_PACKET(0, 1, #{ + 'Maximum-Packet-Size' => 1048576, + 'Retain-Available' => 1, + 'Shared-Subscription-Available' => 1, + 'Subscription-Identifier-Available' => 1, + 'Topic-Alias-Maximum' => 65535, + 'Wildcard-Subscription-Available' => 1 + }) + ), ok = emqx_metrics:inc_sent(?PUBLISH_PACKET(0, 0)), ok = emqx_metrics:inc_sent(?PUBLISH_PACKET(1, 0)), ok = emqx_metrics:inc_sent(?PUBLISH_PACKET(2, 0)), @@ -134,8 +145,8 @@ t_inc_sent(_) -> ok = emqx_metrics:inc_sent(?PACKET(?PINGRESP)), ok = emqx_metrics:inc_sent(?PACKET(?DISCONNECT)), ok = emqx_metrics:inc_sent(?PACKET(?AUTH)), - ?assertEqual(13, emqx_metrics:val('packets.sent')), - ?assertEqual(1, emqx_metrics:val('packets.connack.sent')), + ?assertEqual(15, emqx_metrics:val('packets.sent')), + ?assertEqual(3, emqx_metrics:val('packets.connack.sent')), ?assertEqual(3, emqx_metrics:val('messages.sent')), ?assertEqual(1, emqx_metrics:val('messages.qos0.sent')), ?assertEqual(1, emqx_metrics:val('messages.qos1.sent')), diff --git a/apps/emqx/test/emqx_quic_multistreams_SUITE.erl b/apps/emqx/test/emqx_quic_multistreams_SUITE.erl index 1b45cb669..267782ff9 100644 --- a/apps/emqx/test/emqx_quic_multistreams_SUITE.erl +++ b/apps/emqx/test/emqx_quic_multistreams_SUITE.erl @@ -1094,7 +1094,7 @@ t_multi_streams_unsub(Config) -> ?retry( _Sleep2 = 100, _Attempts2 = 50, - false = emqx_router:has_routes(Topic) + [] = emqx_router:lookup_routes(Topic) ), case emqtt:publish_via(C, PubVia, Topic, #{}, <<6, 7, 8, 9>>, [{qos, PubQos}]) of diff --git a/apps/emqx/test/emqx_router_SUITE.erl b/apps/emqx/test/emqx_router_SUITE.erl index 067f11634..1128112ff 100644 --- a/apps/emqx/test/emqx_router_SUITE.erl +++ b/apps/emqx/test/emqx_router_SUITE.erl @@ -26,24 +26,37 @@ -define(R, emqx_router). -all() -> emqx_common_test_helpers:all(?MODULE). - -init_per_suite(Config) -> - PrevBootModules = application:get_env(emqx, boot_modules), - emqx_common_test_helpers:boot_modules([router]), - emqx_common_test_helpers:start_apps([]), +all() -> [ - {prev_boot_modules, PrevBootModules} - | Config + {group, routing_schema_v1}, + {group, routing_schema_v2} ]. -end_per_suite(Config) -> - PrevBootModules = ?config(prev_boot_modules, Config), - case PrevBootModules of - undefined -> ok; - {ok, Mods} -> emqx_common_test_helpers:boot_modules(Mods) - end, - emqx_common_test_helpers:stop_apps([]). +groups() -> + TCs = emqx_common_test_helpers:all(?MODULE), + [ + {routing_schema_v1, [], TCs}, + {routing_schema_v2, [], TCs} + ]. + +init_per_group(GroupName, Config) -> + WorkDir = filename:join([?config(priv_dir, Config), ?MODULE, GroupName]), + AppSpecs = [ + {emqx, #{ + config => mk_config(GroupName), + override_env => [{boot_modules, [router]}] + }} + ], + Apps = emqx_cth_suite:start(AppSpecs, #{work_dir => WorkDir}), + [{group_apps, Apps}, {group_name, GroupName} | Config]. + +end_per_group(_GroupName, Config) -> + ok = emqx_cth_suite:stop(?config(group_apps, Config)). + +mk_config(routing_schema_v1) -> + "broker.routing.storage_schema = v1"; +mk_config(routing_schema_v2) -> + "broker.routing.storage_schema = v2". init_per_testcase(_TestCase, Config) -> clear_tables(), @@ -52,23 +65,16 @@ init_per_testcase(_TestCase, Config) -> end_per_testcase(_TestCase, _Config) -> clear_tables(). -% t_add_route(_) -> -% error('TODO'). - -% t_do_add_route(_) -> -% error('TODO'). - % t_lookup_routes(_) -> % error('TODO'). -% t_delete_route(_) -> -% error('TODO'). - -% t_do_delete_route(_) -> -% error('TODO'). - -% t_topics(_) -> -% error('TODO'). +t_verify_type(Config) -> + case ?config(group_name, Config) of + routing_schema_v1 -> + ?assertEqual(v1, ?R:get_schema_vsn()); + routing_schema_v2 -> + ?assertEqual(v2, ?R:get_schema_vsn()) + end. t_add_delete(_) -> ?R:add_route(<<"a/b/c">>), @@ -79,6 +85,55 @@ t_add_delete(_) -> ?R:delete_route(<<"a/+/b">>, node()), ?assertEqual([], ?R:topics()). +t_add_delete_incremental(_) -> + ?R:add_route(<<"a/b/c">>), + ?R:add_route(<<"a/+/c">>, node()), + ?R:add_route(<<"a/+/+">>, node()), + ?R:add_route(<<"a/b/#">>, node()), + ?R:add_route(<<"#">>, node()), + ?assertEqual( + [ + #route{topic = <<"#">>, dest = node()}, + #route{topic = <<"a/+/+">>, dest = node()}, + #route{topic = <<"a/+/c">>, dest = node()}, + #route{topic = <<"a/b/#">>, dest = node()}, + #route{topic = <<"a/b/c">>, dest = node()} + ], + lists:sort(?R:match_routes(<<"a/b/c">>)) + ), + ?R:delete_route(<<"a/+/c">>, node()), + ?assertEqual( + [ + #route{topic = <<"#">>, dest = node()}, + #route{topic = <<"a/+/+">>, dest = node()}, + #route{topic = <<"a/b/#">>, dest = node()}, + #route{topic = <<"a/b/c">>, dest = node()} + ], + lists:sort(?R:match_routes(<<"a/b/c">>)) + ), + ?R:delete_route(<<"a/+/+">>, node()), + ?assertEqual( + [ + #route{topic = <<"#">>, dest = node()}, + #route{topic = <<"a/b/#">>, dest = node()}, + #route{topic = <<"a/b/c">>, dest = node()} + ], + lists:sort(?R:match_routes(<<"a/b/c">>)) + ), + ?R:delete_route(<<"a/b/#">>, node()), + ?assertEqual( + [ + #route{topic = <<"#">>, dest = node()}, + #route{topic = <<"a/b/c">>, dest = node()} + ], + lists:sort(?R:match_routes(<<"a/b/c">>)) + ), + ?R:delete_route(<<"a/b/c">>, node()), + ?assertEqual( + [#route{topic = <<"#">>, dest = node()}], + lists:sort(?R:match_routes(<<"a/b/c">>)) + ). + t_do_add_delete(_) -> ?R:do_add_route(<<"a/b/c">>), ?R:do_add_route(<<"a/b/c">>, node()), @@ -114,9 +169,9 @@ t_print_routes(_) -> ?R:add_route(<<"+/+">>), ?R:print_routes(<<"a/b">>). -t_has_routes(_) -> +t_has_route(_) -> ?R:add_route(<<"devices/+/messages">>, node()), - ?assert(?R:has_routes(<<"devices/+/messages">>)), + ?assert(?R:has_route(<<"devices/+/messages">>, node())), ?R:delete_route(<<"devices/+/messages">>). t_unexpected(_) -> @@ -128,5 +183,5 @@ t_unexpected(_) -> clear_tables() -> lists:foreach( fun mnesia:clear_table/1, - [?ROUTE_TAB, ?TRIE, emqx_trie_node] + [?ROUTE_TAB, ?ROUTE_TAB_FILTERS, ?TRIE] ). diff --git a/apps/emqx/test/emqx_router_helper_SUITE.erl b/apps/emqx/test/emqx_router_helper_SUITE.erl index c0796288e..889c8293c 100644 --- a/apps/emqx/test/emqx_router_helper_SUITE.erl +++ b/apps/emqx/test/emqx_router_helper_SUITE.erl @@ -26,55 +26,45 @@ -define(ROUTER_HELPER, emqx_router_helper). -all() -> emqx_common_test_helpers:all(?MODULE). +all() -> + [ + {group, routing_schema_v1}, + {group, routing_schema_v2} + ]. -init_per_suite(Config) -> - DistPid = - case net_kernel:nodename() of - ignored -> - %% calling `net_kernel:start' without `epmd' - %% running will result in a failure. - emqx_common_test_helpers:start_epmd(), - {ok, Pid} = net_kernel:start(['test@127.0.0.1', longnames]), - Pid; - _ -> - undefined - end, - emqx_common_test_helpers:start_apps([]), - [{dist_pid, DistPid} | Config]. +groups() -> + TCs = emqx_common_test_helpers:all(?MODULE), + [ + {routing_schema_v1, [], TCs}, + {routing_schema_v2, [], TCs} + ]. -end_per_suite(Config) -> - DistPid = ?config(dist_pid, Config), - case DistPid of - Pid when is_pid(Pid) -> - net_kernel:stop(); - _ -> - ok - end, - emqx_common_test_helpers:stop_apps([]). +init_per_group(GroupName, Config) -> + WorkDir = filename:join([?config(priv_dir, Config), ?MODULE, GroupName]), + AppSpecs = [{emqx, mk_config(GroupName)}], + Apps = emqx_cth_suite:start(AppSpecs, #{work_dir => WorkDir}), + [{group_name, GroupName}, {group_apps, Apps} | Config]. + +end_per_group(_GroupName, Config) -> + ok = emqx_cth_suite:stop(?config(group_apps, Config)). + +mk_config(routing_schema_v1) -> + #{ + config => "broker.routing.storage_schema = v1", + override_env => [{boot_modules, [router]}] + }; +mk_config(routing_schema_v2) -> + #{ + config => "broker.routing.storage_schema = v2", + override_env => [{boot_modules, [router]}] + }. -init_per_testcase(TestCase, Config) when - TestCase =:= t_cleanup_membership_mnesia_down; - TestCase =:= t_cleanup_membership_node_down; - TestCase =:= t_cleanup_monitor_node_down --> - ok = snabbkaffe:start_trace(), - Slave = emqx_common_test_helpers:start_slave(some_node, []), - [{slave, Slave} | Config]; init_per_testcase(_TestCase, Config) -> + ok = snabbkaffe:start_trace(), Config. -end_per_testcase(TestCase, Config) when - TestCase =:= t_cleanup_membership_mnesia_down; - TestCase =:= t_cleanup_membership_node_down; - TestCase =:= t_cleanup_monitor_node_down --> - Slave = ?config(slave, Config), - emqx_common_test_helpers:stop_slave(Slave), - mria:clear_table(?ROUTE_TAB), - snabbkaffe:stop(), - ok; end_per_testcase(_TestCase, _Config) -> + ok = snabbkaffe:stop(), ok. t_monitor(_) -> @@ -89,8 +79,8 @@ t_mnesia(_) -> ?ROUTER_HELPER ! {membership, {mnesia, down, node()}}, ct:sleep(200). -t_cleanup_membership_mnesia_down(Config) -> - Slave = ?config(slave, Config), +t_cleanup_membership_mnesia_down(_Config) -> + Slave = emqx_cth_cluster:node_name(?FUNCTION_NAME), emqx_router:add_route(<<"a/b/c">>, Slave), emqx_router:add_route(<<"d/e/f">>, node()), ?assertMatch([_, _], emqx_router:topics()), @@ -101,8 +91,8 @@ t_cleanup_membership_mnesia_down(Config) -> ), ?assertEqual([<<"d/e/f">>], emqx_router:topics()). -t_cleanup_membership_node_down(Config) -> - Slave = ?config(slave, Config), +t_cleanup_membership_node_down(_Config) -> + Slave = emqx_cth_cluster:node_name(?FUNCTION_NAME), emqx_router:add_route(<<"a/b/c">>, Slave), emqx_router:add_route(<<"d/e/f">>, node()), ?assertMatch([_, _], emqx_router:topics()), @@ -113,13 +103,13 @@ t_cleanup_membership_node_down(Config) -> ), ?assertEqual([<<"d/e/f">>], emqx_router:topics()). -t_cleanup_monitor_node_down(Config) -> - Slave = ?config(slave, Config), +t_cleanup_monitor_node_down(_Config) -> + Slave = emqx_cth_cluster:start_bare_node(?FUNCTION_NAME, #{driver => ct_slave}), emqx_router:add_route(<<"a/b/c">>, Slave), emqx_router:add_route(<<"d/e/f">>, node()), ?assertMatch([_, _], emqx_router:topics()), ?wait_async_action( - emqx_common_test_helpers:stop_slave(Slave), + emqx_cth_cluster:stop([Slave]), #{?snk_kind := emqx_router_helper_cleanup_done, node := Slave}, 1_000 ), diff --git a/apps/emqx/test/emqx_routing_SUITE.erl b/apps/emqx/test/emqx_routing_SUITE.erl new file mode 100644 index 000000000..6966ac56a --- /dev/null +++ b/apps/emqx/test/emqx_routing_SUITE.erl @@ -0,0 +1,258 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_routing_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). +-include_lib("emqx/include/asserts.hrl"). + +all() -> + [ + {group, routing_schema_v1}, + {group, routing_schema_v2}, + t_routing_schema_switch_v1, + t_routing_schema_switch_v2 + ]. + +groups() -> + TCs = [ + t_cluster_routing + ], + [ + {routing_schema_v1, [], TCs}, + {routing_schema_v2, [], TCs} + ]. + +init_per_group(GroupName, Config) -> + WorkDir = filename:join([?config(priv_dir, Config), ?MODULE, GroupName]), + NodeSpecs = [ + {emqx_routing_SUITE1, #{apps => [mk_emqx_appspec(GroupName, 1)], role => core}}, + {emqx_routing_SUITE2, #{apps => [mk_emqx_appspec(GroupName, 2)], role => core}}, + {emqx_routing_SUITE3, #{apps => [mk_emqx_appspec(GroupName, 3)], role => replicant}} + ], + Nodes = emqx_cth_cluster:start(NodeSpecs, #{work_dir => WorkDir}), + [{cluster, Nodes} | Config]. + +end_per_group(_GroupName, Config) -> + emqx_cth_cluster:stop(?config(cluster, Config)). + +init_per_testcase(TC, Config) -> + WorkDir = filename:join([?config(priv_dir, Config), ?MODULE, TC]), + [{work_dir, WorkDir} | Config]. + +end_per_testcase(_TC, _Config) -> + ok. + +mk_emqx_appspec(GroupName, N) -> + {emqx, #{ + config => mk_config(GroupName, N), + after_start => fun() -> + % NOTE + % This one is actually defined on `emqx_conf_schema` level, but used + % in `emqx_broker`. Thus we have to resort to this ugly hack. + emqx_config:force_put([rpc, mode], async) + end + }}. + +mk_genrpc_appspec() -> + {gen_rpc, #{ + override_env => [{port_discovery, stateless}] + }}. + +mk_config(GroupName, N) -> + #{ + broker => mk_config_broker(GroupName), + listeners => mk_config_listeners(N) + }. + +mk_config_broker(Vsn) when Vsn == routing_schema_v1; Vsn == v1 -> + #{routing => #{storage_schema => v1}}; +mk_config_broker(Vsn) when Vsn == routing_schema_v2; Vsn == v2 -> + #{routing => #{storage_schema => v2}}. + +mk_config_listeners(N) -> + Port = 1883 + N, + #{ + tcp => #{default => #{bind => "127.0.0.1:" ++ integer_to_list(Port)}}, + ssl => #{default => #{enable => false}}, + ws => #{default => #{enable => false}}, + wss => #{default => #{enable => false}} + }. + +%% + +t_cluster_routing(Config) -> + Cluster = ?config(cluster, Config), + Clients = [C1, C2, C3] = [start_client(N) || N <- Cluster], + Commands = [ + {fun publish/3, [C1, <<"a/b/c">>, <<"wontsee">>]}, + {fun publish/3, [C2, <<"a/b/d">>, <<"wontsee">>]}, + {fun subscribe/2, [C3, <<"a/+/c/#">>]}, + {fun publish/3, [C1, <<"a/b/c">>, <<"01">>]}, + {fun publish/3, [C2, <<"a/b/d">>, <<"wontsee">>]}, + {fun subscribe/2, [C1, <<"a/b/c">>]}, + {fun subscribe/2, [C2, <<"a/b/+">>]}, + {fun publish/3, [C3, <<"a/b/c">>, <<"02">>]}, + {fun publish/3, [C2, <<"a/b/d">>, <<"03">>]}, + {fun publish/3, [C2, <<"a/b/c/d">>, <<"04">>]}, + {fun subscribe/2, [C3, <<"a/b/d">>]}, + {fun publish/3, [C1, <<"a/b/d">>, <<"05">>]}, + {fun unsubscribe/2, [C3, <<"a/+/c/#">>]}, + {fun publish/3, [C1, <<"a/b/c">>, <<"06">>]}, + {fun publish/3, [C2, <<"a/b/d">>, <<"07">>]}, + {fun publish/3, [C2, <<"a/b/c/d">>, <<"08">>]}, + {fun unsubscribe/2, [C2, <<"a/b/+">>]}, + {fun publish/3, [C1, <<"a/b/c">>, <<"09">>]}, + {fun publish/3, [C2, <<"a/b/d">>, <<"10">>]}, + {fun publish/3, [C2, <<"a/b/c/d">>, <<"11">>]}, + {fun unsubscribe/2, [C3, <<"a/b/d">>]}, + {fun unsubscribe/2, [C1, <<"a/b/c">>]}, + {fun publish/3, [C1, <<"a/b/c">>, <<"wontsee">>]}, + {fun publish/3, [C2, <<"a/b/d">>, <<"wontsee">>]} + ], + ok = lists:foreach(fun({F, Args}) -> erlang:apply(F, Args) end, Commands), + _ = [emqtt:stop(C) || C <- Clients], + Deliveries = ?drainMailbox(), + ?assertMatch( + [ + {pub, C1, #{topic := <<"a/b/c">>, payload := <<"02">>}}, + {pub, C1, #{topic := <<"a/b/c">>, payload := <<"06">>}}, + {pub, C1, #{topic := <<"a/b/c">>, payload := <<"09">>}}, + {pub, C2, #{topic := <<"a/b/c">>, payload := <<"02">>}}, + {pub, C2, #{topic := <<"a/b/d">>, payload := <<"03">>}}, + {pub, C2, #{topic := <<"a/b/d">>, payload := <<"05">>}}, + {pub, C2, #{topic := <<"a/b/c">>, payload := <<"06">>}}, + {pub, C2, #{topic := <<"a/b/d">>, payload := <<"07">>}}, + {pub, C3, #{topic := <<"a/b/c">>, payload := <<"01">>}}, + {pub, C3, #{topic := <<"a/b/c">>, payload := <<"02">>}}, + {pub, C3, #{topic := <<"a/b/c/d">>, payload := <<"04">>}}, + {pub, C3, #{topic := <<"a/b/d">>, payload := <<"05">>}}, + {pub, C3, #{topic := <<"a/b/d">>, payload := <<"07">>}}, + {pub, C3, #{topic := <<"a/b/d">>, payload := <<"10">>}} + ], + lists:sort( + fun({pub, CL, #{payload := PL}}, {pub, CR, #{payload := PR}}) -> + {CL, PL} < {CR, PR} + end, + Deliveries + ) + ). + +start_client(Node) -> + Self = self(), + {ok, C} = emqtt:start_link(#{ + port => get_mqtt_tcp_port(Node), + msg_handler => #{ + publish => fun(Msg) -> Self ! {pub, self(), Msg} end + } + }), + {ok, _Props} = emqtt:connect(C), + C. + +publish(C, Topic, Payload) -> + {ok, #{reason_code := 0}} = emqtt:publish(C, Topic, Payload, 1). + +subscribe(C, Topic) -> + % NOTE: sleeping here as lazy way to wait for subscribe to replicate + {ok, _Props, [0]} = emqtt:subscribe(C, Topic), + ok = timer:sleep(200). + +unsubscribe(C, Topic) -> + % NOTE: sleeping here as lazy way to wait for unsubscribe to replicate + {ok, _Props, undefined} = emqtt:unsubscribe(C, Topic), + ok = timer:sleep(200). + +%% + +t_routing_schema_switch_v1(Config) -> + t_routing_schema_switch(_From = v2, _To = v1, Config). + +t_routing_schema_switch_v2(Config) -> + t_routing_schema_switch(_From = v1, _To = v2, Config). + +t_routing_schema_switch(VFrom, VTo, Config) -> + % Start first node with routing schema VTo (e.g. v1) + WorkDir = ?config(work_dir, Config), + [Node1] = emqx_cth_cluster:start( + [ + {routing_schema_switch1, #{ + apps => [mk_genrpc_appspec(), mk_emqx_appspec(VTo, 1)] + }} + ], + #{work_dir => WorkDir} + ), + % Ensure there's at least 1 route on Node1 + C1 = start_client(Node1), + ok = subscribe(C1, <<"a/+/c">>), + ok = subscribe(C1, <<"d/e/f/#">>), + % Start rest of nodes with routing schema VFrom (e.g. v2) + [Node2, Node3] = emqx_cth_cluster:start( + [ + {routing_schema_switch2, #{ + apps => [mk_genrpc_appspec(), mk_emqx_appspec(VFrom, 2)], + base_port => 20000, + join_to => Node1 + }}, + {routing_schema_switch3, #{ + apps => [mk_genrpc_appspec(), mk_emqx_appspec(VFrom, 3)], + base_port => 20100, + join_to => Node1 + }} + ], + #{work_dir => WorkDir} + ), + % Verify that new nodes switched to schema v1/v2 in presence of v1/v2 routes respectively + Nodes = [Node1, Node2, Node3], + ?assertEqual( + [{ok, VTo}, {ok, VTo}, {ok, VTo}], + erpc:multicall(Nodes, emqx_router, get_schema_vsn, []) + ), + % Wait for all nodes to agree on cluster state + ?retry( + 500, + 10, + ?assertMatch( + [{ok, [Node1, Node2, Node3]}], + lists:usort(erpc:multicall(Nodes, emqx, running_nodes, [])) + ) + ), + % Verify that routing works as expected + C2 = start_client(Node2), + ok = subscribe(C2, <<"a/+/d">>), + C3 = start_client(Node3), + ok = subscribe(C3, <<"d/e/f/#">>), + {ok, _} = publish(C1, <<"a/b/d">>, <<"hey-newbies">>), + {ok, _} = publish(C2, <<"a/b/c">>, <<"hi">>), + {ok, _} = publish(C3, <<"d/e/f/42">>, <<"hello">>), + ?assertReceive({pub, C2, #{topic := <<"a/b/d">>, payload := <<"hey-newbies">>}}), + ?assertReceive({pub, C1, #{topic := <<"a/b/c">>, payload := <<"hi">>}}), + ?assertReceive({pub, C1, #{topic := <<"d/e/f/42">>, payload := <<"hello">>}}), + ?assertReceive({pub, C3, #{topic := <<"d/e/f/42">>, payload := <<"hello">>}}), + ?assertNotReceive(_), + ok = emqtt:stop(C1), + ok = emqtt:stop(C2), + ok = emqtt:stop(C3), + ok = emqx_cth_cluster:stop(Nodes). + +%% + +get_mqtt_tcp_port(Node) -> + {_, Port} = erpc:call(Node, emqx_config, get, [[listeners, tcp, default, bind]]), + Port. diff --git a/apps/emqx/test/emqx_shared_sub_SUITE.erl b/apps/emqx/test/emqx_shared_sub_SUITE.erl index 6439981f6..7a7729878 100644 --- a/apps/emqx/test/emqx_shared_sub_SUITE.erl +++ b/apps/emqx/test/emqx_shared_sub_SUITE.erl @@ -1054,7 +1054,7 @@ t_queue_subscription(Config) when is_list(Config) -> begin ct:pal("routes: ~p", [ets:tab2list(emqx_route)]), %% FIXME: should ensure we have 2 subscriptions - true = emqx_router:has_routes(Topic) + [_] = emqx_router:lookup_routes(Topic) end ), @@ -1081,7 +1081,7 @@ t_queue_subscription(Config) when is_list(Config) -> %% _Attempts0 = 50, %% begin %% ct:pal("routes: ~p", [ets:tab2list(emqx_route)]), - %% false = emqx_router:has_routes(Topic) + %% [] = emqx_router:lookup_routes(Topic) %% end %% ), ct:sleep(500), diff --git a/apps/emqx/test/emqx_topic_index_SUITE.erl b/apps/emqx/test/emqx_topic_index_SUITE.erl index ade98acec..9df9743f1 100644 --- a/apps/emqx/test/emqx_topic_index_SUITE.erl +++ b/apps/emqx/test/emqx_topic_index_SUITE.erl @@ -25,42 +25,82 @@ -import(emqx_proper_types, [scaled/2]). all() -> - emqx_common_test_helpers:all(?MODULE). + [ + {group, ets}, + {group, gb_tree} + ]. -t_insert(_) -> - Tab = emqx_topic_index:new(), - true = emqx_topic_index:insert(<<"sensor/1/metric/2">>, t_insert_1, <<>>, Tab), - true = emqx_topic_index:insert(<<"sensor/+/#">>, t_insert_2, <<>>, Tab), - true = emqx_topic_index:insert(<<"sensor/#">>, t_insert_3, <<>>, Tab), - ?assertEqual(<<"sensor/#">>, topic(match(<<"sensor">>, Tab))), - ?assertEqual(t_insert_3, id(match(<<"sensor">>, Tab))). +groups() -> + All = emqx_common_test_helpers:all(?MODULE), + [ + {ets, All}, + {gb_tree, All} + ]. -t_match(_) -> - Tab = emqx_topic_index:new(), - true = emqx_topic_index:insert(<<"sensor/1/metric/2">>, t_match_1, <<>>, Tab), - true = emqx_topic_index:insert(<<"sensor/+/#">>, t_match_2, <<>>, Tab), - true = emqx_topic_index:insert(<<"sensor/#">>, t_match_3, <<>>, Tab), - ?assertMatch( - [<<"sensor/#">>, <<"sensor/+/#">>], - [topic(M) || M <- matches(<<"sensor/1">>, Tab)] +init_per_group(ets, Config) -> + [{index_module, emqx_topic_index} | Config]; +init_per_group(gb_tree, Config) -> + [{index_module, emqx_topic_gbt} | Config]. + +end_per_group(_Group, _Config) -> + ok. + +get_module(Config) -> + proplists:get_value(index_module, Config). + +t_insert(Config) -> + M = get_module(Config), + Tab = M:new(), + true = M:insert(<<"sensor/1/metric/2">>, t_insert_1, <<>>, Tab), + true = M:insert(<<"sensor/+/#">>, t_insert_2, <<>>, Tab), + true = M:insert(<<"sensor/#">>, t_insert_3, <<>>, Tab), + ?assertEqual(<<"sensor/#">>, topic(match(M, <<"sensor">>, Tab))), + ?assertEqual(t_insert_3, id(match(M, <<"sensor">>, Tab))). + +t_insert_filter(Config) -> + M = get_module(Config), + Tab = M:new(), + Topic = <<"sensor/+/metric//#">>, + true = M:insert(Topic, 1, <<>>, Tab), + true = M:insert(emqx_trie_search:filter(Topic), 2, <<>>, Tab), + ?assertEqual( + [Topic, Topic], + [topic(X) || X <- matches(M, <<"sensor/1/metric//2">>, Tab)] ). -t_match2(_) -> - Tab = emqx_topic_index:new(), - true = emqx_topic_index:insert(<<"#">>, t_match2_1, <<>>, Tab), - true = emqx_topic_index:insert(<<"+/#">>, t_match2_2, <<>>, Tab), - true = emqx_topic_index:insert(<<"+/+/#">>, t_match2_3, <<>>, Tab), +t_match(Config) -> + M = get_module(Config), + Tab = M:new(), + true = M:insert(<<"sensor/1/metric/2">>, t_match_1, <<>>, Tab), + true = M:insert(<<"sensor/+/#">>, t_match_2, <<>>, Tab), + true = M:insert(<<"sensor/#">>, t_match_3, <<>>, Tab), + ?assertMatch( + [<<"sensor/#">>, <<"sensor/+/#">>], + [topic(X) || X <- matches(M, <<"sensor/1">>, Tab)] + ). + +t_match2(Config) -> + M = get_module(Config), + Tab = M:new(), + true = M:insert(<<"#">>, t_match2_1, <<>>, Tab), + true = M:insert(<<"+/#">>, t_match2_2, <<>>, Tab), + true = M:insert(<<"+/+/#">>, t_match2_3, <<>>, Tab), ?assertEqual( [<<"#">>, <<"+/#">>, <<"+/+/#">>], - [topic(M) || M <- matches(<<"a/b/c">>, Tab)] + [topic(X) || X <- matches(M, <<"a/b/c">>, Tab)] ), ?assertEqual( false, - emqx_topic_index:match(<<"$SYS/broker/zenmq">>, Tab) + M:match(<<"$SYS/broker/zenmq">>, Tab) + ), + ?assertEqual( + [], + matches(M, <<"$SYS/broker/zenmq">>, Tab) ). -t_match3(_) -> - Tab = emqx_topic_index:new(), +t_match3(Config) -> + M = get_module(Config), + Tab = M:new(), Records = [ {<<"d/#">>, t_match3_1}, {<<"a/b/+">>, t_match3_2}, @@ -69,37 +109,39 @@ t_match3(_) -> {<<"$SYS/#">>, t_match3_sys} ], lists:foreach( - fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end, + fun({Topic, ID}) -> M:insert(Topic, ID, <<>>, Tab) end, Records ), - Matched = matches(<<"a/b/c">>, Tab), + Matched = matches(M, <<"a/b/c">>, Tab), case length(Matched) of 3 -> ok; _ -> error({unexpected, Matched}) end, ?assertEqual( t_match3_sys, - id(match(<<"$SYS/a/b/c">>, Tab)) + id(match(M, <<"$SYS/a/b/c">>, Tab)) ). -t_match4(_) -> - Tab = emqx_topic_index:new(), +t_match4(Config) -> + M = get_module(Config), + Tab = M:new(), Records = [{<<"/#">>, t_match4_1}, {<<"/+">>, t_match4_2}, {<<"/+/a/b/c">>, t_match4_3}], lists:foreach( - fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end, + fun({Topic, ID}) -> M:insert(Topic, ID, <<>>, Tab) end, Records ), ?assertEqual( [<<"/#">>, <<"/+">>], - [topic(M) || M <- matches(<<"/">>, Tab)] + [topic(X) || X <- matches(M, <<"/">>, Tab)] ), ?assertEqual( [<<"/#">>, <<"/+/a/b/c">>], - [topic(M) || M <- matches(<<"/0/a/b/c">>, Tab)] + [topic(X) || X <- matches(M, <<"/0/a/b/c">>, Tab)] ). -t_match5(_) -> - Tab = emqx_topic_index:new(), +t_match5(Config) -> + M = get_module(Config), + Tab = M:new(), T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>, Records = [ {<<"#">>, t_match5_1}, @@ -107,58 +149,89 @@ t_match5(_) -> {<>, t_match5_3} ], lists:foreach( - fun({Topic, ID}) -> emqx_topic_index:insert(Topic, ID, <<>>, Tab) end, + fun({Topic, ID}) -> M:insert(Topic, ID, <<>>, Tab) end, Records ), ?assertEqual( [<<"#">>, <>], - [topic(M) || M <- matches(T, Tab)] + [topic(X) || X <- matches(M, T, Tab)] ), ?assertEqual( [<<"#">>, <>, <>], - [topic(M) || M <- matches(<>, Tab)] + [topic(X) || X <- matches(M, <>, Tab)] ). -t_match6(_) -> - Tab = emqx_topic_index:new(), +t_match6(Config) -> + M = get_module(Config), + Tab = M:new(), T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>, W = <<"+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/+/#">>, - emqx_topic_index:insert(W, ID = t_match6, <<>>, Tab), - ?assertEqual(ID, id(match(T, Tab))). + M:insert(W, ID = t_match6, <<>>, Tab), + ?assertEqual(ID, id(match(M, T, Tab))). -t_match7(_) -> - Tab = emqx_topic_index:new(), +t_match7(Config) -> + M = get_module(Config), + Tab = M:new(), T = <<"a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z">>, W = <<"a/+/c/+/e/+/g/+/i/+/k/+/m/+/o/+/q/+/s/+/u/+/w/+/y/+/#">>, - emqx_topic_index:insert(W, t_match7, <<>>, Tab), - ?assertEqual(W, topic(match(T, Tab))). + M:insert(W, t_match7, <<>>, Tab), + ?assertEqual(W, topic(match(M, T, Tab))). -t_match_fast_forward(_) -> - Tab = emqx_topic_index:new(), - emqx_topic_index:insert(<<"a/b/1/2/3/4/5/6/7/8/9/#">>, id1, <<>>, Tab), - emqx_topic_index:insert(<<"z/y/x/+/+">>, id2, <<>>, Tab), - emqx_topic_index:insert(<<"a/b/c/+">>, id3, <<>>, Tab), +t_match8(Config) -> + M = get_module(Config), + Tab = M:new(), + Filters = [<<"+">>, <<"dev/global/sensor">>, <<"dev/+/sensor/#">>], + IDs = [1, 2, 3], + Keys = [{F, ID} || F <- Filters, ID <- IDs], + lists:foreach( + fun({F, ID}) -> + M:insert(F, ID, <<>>, Tab) + end, + Keys + ), + Topic = <<"dev/global/sensor">>, + Matches = lists:sort(matches(M, Topic, Tab)), + ?assertEqual( + [ + <<"dev/+/sensor/#">>, + <<"dev/+/sensor/#">>, + <<"dev/+/sensor/#">>, + <<"dev/global/sensor">>, + <<"dev/global/sensor">>, + <<"dev/global/sensor">> + ], + [emqx_topic_index:get_topic(Match) || Match <- Matches] + ). + +t_match_fast_forward(Config) -> + M = get_module(Config), + Tab = M:new(), + M:insert(<<"a/b/1/2/3/4/5/6/7/8/9/#">>, id1, <<>>, Tab), + M:insert(<<"z/y/x/+/+">>, id2, <<>>, Tab), + M:insert(<<"a/b/c/+">>, id3, <<>>, Tab), % dbg:tracer(), % dbg:p(all, c), % dbg:tpl({ets, next, '_'}, x), - ?assertEqual(id1, id(match(<<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab))), - ?assertEqual([id1], [id(M) || M <- matches(<<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab)]). + ?assertEqual(id1, id(match(M, <<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab))), + ?assertEqual([id1], [id(X) || X <- matches(M, <<"a/b/1/2/3/4/5/6/7/8/9/0">>, Tab)]). -t_match_unique(_) -> - Tab = emqx_topic_index:new(), - emqx_topic_index:insert(<<"a/b/c">>, t_match_id1, <<>>, Tab), - emqx_topic_index:insert(<<"a/b/+">>, t_match_id1, <<>>, Tab), - emqx_topic_index:insert(<<"a/b/c/+">>, t_match_id2, <<>>, Tab), +t_match_unique(Config) -> + M = get_module(Config), + Tab = M:new(), + M:insert(<<"a/b/c">>, t_match_id1, <<>>, Tab), + M:insert(<<"a/b/+">>, t_match_id1, <<>>, Tab), + M:insert(<<"a/b/c/+">>, t_match_id2, <<>>, Tab), ?assertEqual( [t_match_id1, t_match_id1], - [id(M) || M <- emqx_topic_index:matches(<<"a/b/c">>, Tab, [])] + [id(X) || X <- matches(M, <<"a/b/c">>, Tab, [])] ), ?assertEqual( [t_match_id1], - [id(M) || M <- emqx_topic_index:matches(<<"a/b/c">>, Tab, [unique])] + [id(X) || X <- matches(M, <<"a/b/c">>, Tab, [unique])] ). -t_match_wildcard_edge_cases(_) -> +t_match_wildcard_edge_cases(Config) -> + M = get_module(Config), CommonTopics = [ <<"a/b">>, <<"a/b/#">>, @@ -179,32 +252,46 @@ t_match_wildcard_edge_cases(_) -> {[<<"/">>, <<"+">>], <<"a">>, [2]} ], F = fun({Topics, TopicName, Expected}) -> - Tab = emqx_topic_index:new(), - _ = [emqx_topic_index:insert(T, N, <<>>, Tab) || {N, T} <- lists:enumerate(Topics)], + Tab = M:new(), + _ = [M:insert(T, N, <<>>, Tab) || {N, T} <- lists:enumerate(Topics)], ?assertEqual( lists:last(Expected), - id(emqx_topic_index:match(TopicName, Tab)), + id(M:match(TopicName, Tab)), #{"Base topics" => Topics, "Topic name" => TopicName} ), ?assertEqual( Expected, - [id(M) || M <- emqx_topic_index:matches(TopicName, Tab, [unique])], + [id(X) || X <- matches(M, TopicName, Tab, [unique])], #{"Base topics" => Topics, "Topic name" => TopicName} ) end, lists:foreach(F, Datasets). -t_prop_matches(_) -> +t_prop_edgecase(Config) -> + M = get_module(Config), + Tab = M:new(), + Topic = <<"01/01">>, + Filters = [ + {1, <<>>}, + {2, <<"+/01">>}, + {3, <<>>}, + {4, <<"+/+/01">>} + ], + _ = [M:insert(F, N, <<>>, Tab) || {N, F} <- Filters], + ?assertMatch([2], [id(X) || X <- matches(M, Topic, Tab, [unique])]). + +t_prop_matches(Config) -> + M = get_module(Config), ?assert( proper:quickcheck( - topic_matches_prop(), + topic_matches_prop(M), [{max_size, 100}, {numtests, 100}] ) ), Statistics = [{C, account(C)} || C <- [filters, topics, matches, maxhits]], ct:pal("Statistics: ~p", [maps:from_list(Statistics)]). -topic_matches_prop() -> +topic_matches_prop(M) -> ?FORALL( % Generate a longer list of topics and a shorter list of topic filter patterns. #{ @@ -219,12 +306,12 @@ topic_matches_prop() -> patterns => list(topic_filter_pattern_t()) }), begin - Tab = emqx_topic_index:new(), + Tab = M:new(), Topics = [emqx_topic:join(T) || T <- TTopics], % Produce topic filters from generated topics and patterns. % Number of filters is equal to the number of patterns, most of the time. Filters = lists:enumerate(mk_filters(Pats, TTopics)), - _ = [emqx_topic_index:insert(F, N, <<>>, Tab) || {N, F} <- Filters], + _ = [M:insert(F, N, <<>>, Tab) || {N, F} <- Filters], % Gather some basic statistics _ = account(filters, length(Filters)), _ = account(topics, NTopics = length(Topics)), @@ -233,7 +320,7 @@ topic_matches_prop() -> % matching it against the list of filters one by one. lists:all( fun(Topic) -> - Ids1 = [id(M) || M <- emqx_topic_index:matches(Topic, Tab, [unique])], + Ids1 = [id(X) || X <- matches(M, Topic, Tab, [unique])], Ids2 = lists:filtermap( fun({N, F}) -> case emqx_topic:match(Topic, F) of @@ -252,8 +339,9 @@ topic_matches_prop() -> ct:pal( "Topic name: ~p~n" "Index results: ~p~n" - "Topic match results:: ~p~n", - [Topic, Ids1, Ids2] + "Topic match results: ~p~n" + "Filters: ~p~n", + [Topic, Ids1, Ids2, Filters] ), false end @@ -276,17 +364,20 @@ account(Counter) -> %% -match(T, Tab) -> - emqx_topic_index:match(T, Tab). +match(M, T, Tab) -> + M:match(T, Tab). -matches(T, Tab) -> - lists:sort(emqx_topic_index:matches(T, Tab, [])). +matches(M, T, Tab) -> + lists:sort(M:matches(T, Tab, [])). + +matches(M, T, Tab, Opts) -> + M:matches(T, Tab, Opts). id(Match) -> - emqx_topic_index:get_id(Match). + emqx_trie_search:get_id(Match). topic(Match) -> - emqx_topic_index:get_topic(Match). + emqx_trie_search:get_topic(Match). %% diff --git a/apps/emqx/test/emqx_trie_search_tests.erl b/apps/emqx/test/emqx_trie_search_tests.erl new file mode 100644 index 000000000..d78347de6 --- /dev/null +++ b/apps/emqx/test/emqx_trie_search_tests.erl @@ -0,0 +1,47 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_trie_search_tests). + +-include_lib("eunit/include/eunit.hrl"). + +-import(emqx_trie_search, [filter/1]). + +filter_test_() -> + [ + ?_assertEqual( + [<<"sensor">>, '+', <<"metric">>, <<>>, '#'], + filter(<<"sensor/+/metric//#">>) + ), + ?_assertEqual( + false, + filter(<<"sensor/1/metric//42">>) + ) + ]. + +topic_validation_test_() -> + NextF = fun(_) -> '$end_of_table' end, + Call = fun(Topic) -> + emqx_trie_search:match(Topic, NextF) + end, + [ + ?_assertError(badarg, Call(<<"+">>)), + ?_assertError(badarg, Call(<<"#">>)), + ?_assertError(badarg, Call(<<"a/+/b">>)), + ?_assertError(badarg, Call(<<"a/b/#">>)), + ?_assertEqual(false, Call(<<"a/b/b+">>)), + ?_assertEqual(false, Call(<<"a/b/c#">>)) + ]. diff --git a/apps/emqx/test/emqx_ws_connection_SUITE.erl b/apps/emqx/test/emqx_ws_connection_SUITE.erl index 37faef12b..3a26afec6 100644 --- a/apps/emqx/test/emqx_ws_connection_SUITE.erl +++ b/apps/emqx/test/emqx_ws_connection_SUITE.erl @@ -483,7 +483,6 @@ t_handle_info_close(_) -> t_handle_info_event(_) -> ok = meck:expect(emqx_cm, register_channel, fun(_, _, _) -> ok end), ok = meck:expect(emqx_cm, insert_channel_info, fun(_, _, _) -> ok end), - ok = meck:expect(emqx_cm, connection_closed, fun(_) -> true end), {ok, _} = ?ws_conn:handle_info({event, connected}, st()), {ok, _} = ?ws_conn:handle_info({event, disconnected}, st()), {ok, _} = ?ws_conn:handle_info({event, updated}, st()). diff --git a/apps/emqx/include/emqx_authentication.hrl b/apps/emqx_authn/include/emqx_authentication.hrl similarity index 84% rename from apps/emqx/include/emqx_authentication.hrl rename to apps/emqx_authn/include/emqx_authentication.hrl index 70b35a474..c294b8d99 100644 --- a/apps/emqx/include/emqx_authentication.hrl +++ b/apps/emqx_authn/include/emqx_authentication.hrl @@ -18,8 +18,8 @@ -define(EMQX_AUTHENTICATION_HRL, true). -include_lib("emqx/include/logger.hrl"). +-include_lib("emqx/include/emqx_access_control.hrl"). --define(AUTHN_TRACE_TAG, "AUTHN"). -define(GLOBAL, 'mqtt:global'). -define(TRACE_AUTHN_PROVIDER(Msg), ?TRACE_AUTHN_PROVIDER(Msg, #{})). @@ -36,12 +36,6 @@ -define(EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM, authentication). -define(EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY, <<"authentication">>). -%% key to a persistent term which stores a module name in order to inject -%% schema module at run-time to keep emqx app's compile time purity. -%% see emqx_schema.erl for more details -%% and emqx_conf_schema for an examples --define(EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY, emqx_authentication_schema_module). - %% authentication move cmd -define(CMD_MOVE_FRONT, front). -define(CMD_MOVE_REAR, rear). diff --git a/apps/emqx_authn/include/emqx_authn.hrl b/apps/emqx_authn/include/emqx_authn.hrl index 601b161d5..9574d092f 100644 --- a/apps/emqx_authn/include/emqx_authn.hrl +++ b/apps/emqx_authn/include/emqx_authn.hrl @@ -17,7 +17,7 @@ -ifndef(EMQX_AUTHN_HRL). -define(EMQX_AUTHN_HRL, true). --include_lib("emqx/include/emqx_authentication.hrl"). +-include_lib("emqx_authentication.hrl"). -define(APP, emqx_authn). diff --git a/apps/emqx_authn/rebar.config b/apps/emqx_authn/rebar.config index 932a1ff77..5bc8d3e91 100644 --- a/apps/emqx_authn/rebar.config +++ b/apps/emqx_authn/rebar.config @@ -34,4 +34,6 @@ {cover_opts, [verbose]}. {cover_export_enabled, true}. +{erl_first_files, ["src/emqx_authentication.erl"]}. + {project_plugins, [erlfmt]}. diff --git a/apps/emqx/src/emqx_authentication.erl b/apps/emqx_authn/src/emqx_authentication.erl similarity index 97% rename from apps/emqx/src/emqx_authentication.erl rename to apps/emqx_authn/src/emqx_authentication.erl index cce789f24..8f055e049 100644 --- a/apps/emqx/src/emqx_authentication.erl +++ b/apps/emqx_authn/src/emqx_authentication.erl @@ -22,18 +22,27 @@ -behaviour(gen_server). --include("emqx.hrl"). --include("logger.hrl"). -include("emqx_authentication.hrl"). +-include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/emqx_hooks.hrl"). -include_lib("stdlib/include/ms_transform.hrl"). -define(CONF_ROOT, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM). --define(IS_UNDEFINED(X), (X =:= undefined orelse X =:= <<>>)). + +-record(authenticator, { + id :: binary(), + provider :: module(), + enable :: boolean(), + state :: map() +}). + +-record(chain, { + name :: atom(), + authenticators :: [#authenticator{}] +}). %% The authentication entrypoint. -export([ - pre_hook_authenticate/1, authenticate/2 ]). @@ -220,21 +229,6 @@ when %%------------------------------------------------------------------------------ %% Authenticate %%------------------------------------------------------------------------------ --spec pre_hook_authenticate(emqx_types:clientinfo()) -> - ok | continue | {error, not_authorized}. -pre_hook_authenticate(#{enable_authn := false}) -> - ?TRACE_RESULT("authentication_result", ok, enable_authn_false); -pre_hook_authenticate(#{enable_authn := quick_deny_anonymous} = Credential) -> - case maps:get(username, Credential, undefined) of - U when ?IS_UNDEFINED(U) -> - ?TRACE_RESULT( - "authentication_result", {error, not_authorized}, enable_authn_false - ); - _ -> - continue - end; -pre_hook_authenticate(_) -> - continue. authenticate(#{listener := Listener, protocol := Protocol} = Credential, AuthResult) -> case get_authenticators(Listener, global_chain(Protocol)) of @@ -271,6 +265,7 @@ get_enabled(Authenticators) -> %%------------------------------------------------------------------------------ %% @doc Get all registered authentication providers. +-spec get_providers() -> #{authn_type() => module()}. get_providers() -> call(get_providers). diff --git a/apps/emqx/src/emqx_authentication_config.erl b/apps/emqx_authn/src/emqx_authentication_config.erl similarity index 84% rename from apps/emqx/src/emqx_authentication_config.erl rename to apps/emqx_authn/src/emqx_authentication_config.erl index 96718d611..95140a0e8 100644 --- a/apps/emqx/src/emqx_authentication_config.erl +++ b/apps/emqx_authn/src/emqx_authentication_config.erl @@ -21,7 +21,9 @@ -export([ pre_config_update/3, - post_config_update/5 + post_config_update/5, + propagated_pre_config_update/3, + propagated_post_config_update/5 ]). -export([ @@ -37,7 +39,7 @@ -export_type([config/0]). --include("logger.hrl"). +-include_lib("emqx/include/logger.hrl"). -include("emqx_authentication.hrl"). -type parsed_config() :: #{ @@ -65,8 +67,8 @@ -spec pre_config_update(list(atom()), update_request(), emqx_config:raw_config()) -> {ok, map() | list()} | {error, term()}. -pre_config_update(Paths, UpdateReq, OldConfig) -> - try do_pre_config_update(Paths, UpdateReq, to_list(OldConfig)) of +pre_config_update(ConfPath, UpdateReq, OldConfig) -> + try do_pre_config_update(ConfPath, UpdateReq, to_list(OldConfig)) of {error, Reason} -> {error, Reason}; {ok, NewConfig} -> {ok, NewConfig} catch @@ -130,31 +132,33 @@ do_pre_config_update(_, {move_authenticator, _ChainName, AuthenticatorID, Positi end end end; -do_pre_config_update(Paths, {merge_authenticators, NewConfig}, OldConfig) -> +do_pre_config_update(ConfPath, {merge_authenticators, NewConfig}, OldConfig) -> MergeConfig = merge_authenticators(OldConfig, NewConfig), - do_pre_config_update(Paths, MergeConfig, OldConfig); + do_pre_config_update(ConfPath, MergeConfig, OldConfig); do_pre_config_update(_, OldConfig, OldConfig) -> {ok, OldConfig}; -do_pre_config_update(Paths, NewConfig, _OldConfig) -> - ChainName = chain_name(Paths), - {ok, [ - begin - CertsDir = certs_dir(ChainName, New), - convert_certs(CertsDir, New) - end - || New <- to_list(NewConfig) - ]}. +do_pre_config_update(ConfPath, NewConfig, _OldConfig) -> + convert_certs_for_conf_path(ConfPath, NewConfig). + +%% @doc Handle listener config changes made at higher level. + +-spec propagated_pre_config_update(list(binary()), update_request(), emqx_config:raw_config()) -> + {ok, map() | list()} | {error, term()}. +propagated_pre_config_update(_, OldConfig, OldConfig) -> + {ok, OldConfig}; +propagated_pre_config_update(ConfPath, NewConfig, _OldConfig) -> + convert_certs_for_conf_path(ConfPath, NewConfig). -spec post_config_update( list(atom()), update_request(), - map() | list(), + map() | list() | undefined, emqx_config:raw_config(), emqx_config:app_envs() ) -> ok | {ok, map()} | {error, term()}. -post_config_update(Paths, UpdateReq, NewConfig, OldConfig, AppEnvs) -> - do_post_config_update(Paths, UpdateReq, to_list(NewConfig), OldConfig, AppEnvs). +post_config_update(ConfPath, UpdateReq, NewConfig, OldConfig, AppEnvs) -> + do_post_config_update(ConfPath, UpdateReq, to_list(NewConfig), OldConfig, AppEnvs). do_post_config_update( _, {create_authenticator, ChainName, Config}, NewConfig, _OldConfig, _AppEnvs @@ -192,8 +196,8 @@ do_post_config_update( emqx_authentication:move_authenticator(ChainName, AuthenticatorID, Position); do_post_config_update(_, _UpdateReq, OldConfig, OldConfig, _AppEnvs) -> ok; -do_post_config_update(Paths, _UpdateReq, NewConfig0, OldConfig0, _AppEnvs) -> - ChainName = chain_name(Paths), +do_post_config_update(ConfPath, _UpdateReq, NewConfig0, OldConfig0, _AppEnvs) -> + ChainName = chain_name(ConfPath), OldConfig = to_list(OldConfig0), NewConfig = to_list(NewConfig0), OldIds = lists:map(fun authenticator_id/1, OldConfig), @@ -203,6 +207,20 @@ do_post_config_update(Paths, _UpdateReq, NewConfig0, OldConfig0, _AppEnvs) -> ok = emqx_authentication:reorder_authenticator(ChainName, NewIds), ok. +%% @doc Handle listener config changes made at higher level. + +-spec propagated_post_config_update( + list(atom()), + update_request(), + map() | list() | undefined, + emqx_config:raw_config(), + emqx_config:app_envs() +) -> + ok. +propagated_post_config_update(ConfPath, UpdateReq, NewConfig, OldConfig, AppEnvs) -> + ok = post_config_update(ConfPath, UpdateReq, NewConfig, OldConfig, AppEnvs), + ok. + %% create new authenticators and update existing ones create_or_update_authenticators(OldIds, ChainName, NewConfig) -> lists:foreach( @@ -238,6 +256,17 @@ to_list(M) when M =:= #{} -> []; to_list(M) when is_map(M) -> [M]; to_list(L) when is_list(L) -> L. +convert_certs_for_conf_path(ConfPath, NewConfig) -> + ChainName = chain_name_for_filepath(ConfPath), + CovertedConfs = lists:map( + fun(Conf) -> + CertsDir = certs_dir(ChainName, Conf), + convert_certs(CertsDir, Conf) + end, + to_list(NewConfig) + ), + {ok, CovertedConfs}. + convert_certs(CertsDir, NewConfig) -> NewSSL = maps:get(<<"ssl">>, NewConfig, undefined), case emqx_tls_lib:ensure_ssl_files(CertsDir, NewSSL) of @@ -331,7 +360,16 @@ dir(ChainName, Config) when is_map(Config) -> chain_name([authentication]) -> ?GLOBAL; chain_name([listeners, Type, Name, authentication]) -> - binary_to_existing_atom(<<(atom_to_binary(Type))/binary, ":", (atom_to_binary(Name))/binary>>). + %% Type, Name atoms exist, so let 'Type:Name' exist too. + binary_to_atom(<<(atom_to_binary(Type))/binary, ":", (atom_to_binary(Name))/binary>>). + +chain_name_for_filepath(Path) -> + do_chain_name_for_filepath([to_bin(Key) || Key <- Path]). + +do_chain_name_for_filepath([<<"authentication">>]) -> + to_bin(?GLOBAL); +do_chain_name_for_filepath([<<"listeners">>, Type, Name, <<"authentication">>]) -> + <<(to_bin(Type))/binary, ":", (to_bin(Name))/binary>>. merge_authenticators(OriginConf0, NewConf0) -> {OriginConf1, NewConf1} = diff --git a/apps/emqx/src/emqx_authentication_sup.erl b/apps/emqx_authn/src/emqx_authentication_sup.erl similarity index 100% rename from apps/emqx/src/emqx_authentication_sup.erl rename to apps/emqx_authn/src/emqx_authentication_sup.erl diff --git a/apps/emqx_authn/src/emqx_authn_api.erl b/apps/emqx_authn/src/emqx_authn_api.erl index fa9f6c820..ce4647110 100644 --- a/apps/emqx_authn/src/emqx_authn_api.erl +++ b/apps/emqx_authn/src/emqx_authn_api.erl @@ -21,7 +21,6 @@ -include("emqx_authn.hrl"). -include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/emqx_placeholder.hrl"). --include_lib("emqx/include/emqx_authentication.hrl"). -include_lib("hocon/include/hoconsc.hrl"). -import(hoconsc, [mk/2, ref/1, ref/2]). diff --git a/apps/emqx_authn/src/emqx_authn_app.erl b/apps/emqx_authn/src/emqx_authn_app.erl index 5d4be5f41..689f6619a 100644 --- a/apps/emqx_authn/src/emqx_authn_app.erl +++ b/apps/emqx_authn/src/emqx_authn_app.erl @@ -26,7 +26,7 @@ stop/1 ]). --include_lib("emqx/include/emqx_authentication.hrl"). +-include_lib("emqx_authentication.hrl"). -dialyzer({nowarn_function, [start/2]}). @@ -35,8 +35,7 @@ %%------------------------------------------------------------------------------ start(_StartType, _StartArgs) -> - %% required by test cases, ensure the injection of - %% EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY + %% required by test cases, ensure the injection of schema _ = emqx_conf_schema:roots(), ok = mria_rlog:wait_for_shards([?AUTH_SHARD], infinity), {ok, Sup} = emqx_authn_sup:start_link(), diff --git a/apps/emqx_authn/src/emqx_authn_schema.erl b/apps/emqx_authn/src/emqx_authn_schema.erl index a7cdaac5f..b0a68e702 100644 --- a/apps/emqx_authn/src/emqx_authn_schema.erl +++ b/apps/emqx_authn/src/emqx_authn_schema.erl @@ -19,6 +19,12 @@ -elvis([{elvis_style, invalid_dynamic_call, disable}]). -include_lib("hocon/include/hoconsc.hrl"). -include("emqx_authn.hrl"). +-include("emqx_authentication.hrl"). + +-behaviour(emqx_schema_hooks). +-export([ + injected_fields/0 +]). -export([ common_fields/0, @@ -28,13 +34,18 @@ fields/1, authenticator_type/0, authenticator_type_without_scram/0, - root_type/0, mechanism/1, backend/1 ]). roots() -> []. +injected_fields() -> + #{ + 'mqtt.listener' => global_auth_fields(), + 'roots.high' => mqtt_listener_auth_fields() + }. + tags() -> [<<"Authentication">>]. @@ -121,12 +132,36 @@ try_select_union_member(Module, Value) -> Module:refs() end. -%% authn is a core functionality however implemented outside of emqx app -%% in emqx_schema, 'authentication' is a map() type which is to allow -%% EMQX more pluggable. root_type() -> hoconsc:array(authenticator_type()). +global_auth_fields() -> + [ + {?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM, + hoconsc:mk(root_type(), #{ + desc => ?DESC(global_authentication), + converter => fun ensure_array/2, + default => [], + importance => ?IMPORTANCE_LOW + })} + ]. + +mqtt_listener_auth_fields() -> + [ + {?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM, + hoconsc:mk(root_type(), #{ + desc => ?DESC(listener_authentication), + converter => fun ensure_array/2, + default => [], + importance => ?IMPORTANCE_HIDDEN + })} + ]. + +%% the older version schema allows individual element (instead of a chain) in config +ensure_array(undefined, _) -> undefined; +ensure_array(L, _) when is_list(L) -> L; +ensure_array(M, _) -> [M]. + mechanism(Name) -> ?HOCON( Name, diff --git a/apps/emqx_authn/src/emqx_authn_sup.erl b/apps/emqx_authn/src/emqx_authn_sup.erl index 635bd7323..211ebd518 100644 --- a/apps/emqx_authn/src/emqx_authn_sup.erl +++ b/apps/emqx_authn/src/emqx_authn_sup.erl @@ -27,5 +27,15 @@ start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []). init([]) -> - ChildSpecs = [], + AuthNSup = #{ + id => emqx_authentication_sup, + start => {emqx_authentication_sup, start_link, []}, + restart => permanent, + shutdown => infinity, + type => supervisor, + modules => [emqx_authentication_sup] + }, + + ChildSpecs = [AuthNSup], + {ok, {{one_for_one, 10, 10}, ChildSpecs}}. diff --git a/apps/emqx_authn/src/emqx_authn_user_import_api.erl b/apps/emqx_authn/src/emqx_authn_user_import_api.erl index 30836d3ba..f9d4208e6 100644 --- a/apps/emqx_authn/src/emqx_authn_user_import_api.erl +++ b/apps/emqx_authn/src/emqx_authn_user_import_api.erl @@ -20,7 +20,6 @@ -include("emqx_authn.hrl"). -include_lib("emqx/include/logger.hrl"). --include_lib("emqx/include/emqx_authentication.hrl"). -include_lib("hocon/include/hoconsc.hrl"). -import(emqx_dashboard_swagger, [error_codes/2]). diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl index bf0b04d04..2a124ae98 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl @@ -173,6 +173,8 @@ update(Config, _State) -> authenticate(#{auth_method := _}, _) -> ignore; +authenticate(#{password := undefined}, _) -> + {error, bad_username_or_password}; authenticate( #{password := Password} = Credential, #{ diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl index 4498d3d8d..9cbd1f2dc 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_mongodb.erl @@ -160,6 +160,8 @@ destroy(#{resource_id := ResourceId}) -> authenticate(#{auth_method := _}, _) -> ignore; +authenticate(#{password := undefined}, _) -> + {error, bad_username_or_password}; authenticate( #{password := Password} = Credential, #{ diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl index dc4e0d163..49471eb23 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_mysql.erl @@ -110,6 +110,8 @@ destroy(#{resource_id := ResourceId}) -> authenticate(#{auth_method := _}, _) -> ignore; +authenticate(#{password := undefined}, _) -> + {error, bad_username_or_password}; authenticate( #{password := Password} = Credential, #{ diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl index d9526cc7b..b9ce9db8d 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_pgsql.erl @@ -113,6 +113,8 @@ destroy(#{resource_id := ResourceId}) -> authenticate(#{auth_method := _}, _) -> ignore; +authenticate(#{password := undefined}, _) -> + {error, bad_username_or_password}; authenticate( #{password := Password} = Credential, #{ diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl index f6f02c1bc..a5312e41b 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_redis.erl @@ -148,6 +148,8 @@ destroy(#{resource_id := ResourceId}) -> authenticate(#{auth_method := _}, _) -> ignore; +authenticate(#{password := undefined}, _) -> + {error, bad_username_or_password}; authenticate( #{password := Password} = Credential, #{ diff --git a/apps/emqx/test/emqx_authentication_SUITE.erl b/apps/emqx_authn/test/emqx_authentication_SUITE.erl similarity index 94% rename from apps/emqx/test/emqx_authentication_SUITE.erl rename to apps/emqx_authn/test/emqx_authentication_SUITE.erl index fb73a3fc1..a15f22c41 100644 --- a/apps/emqx/test/emqx_authentication_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authentication_SUITE.erl @@ -94,19 +94,19 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - LogLevel = emqx_logger:get_primary_log_level(), - ok = emqx_logger:set_log_level(debug), - application:set_env(ekka, strict_mode, true), - emqx_config:erase_all(), - emqx_common_test_helpers:stop_apps([]), - emqx_common_test_helpers:boot_modules(all), - emqx_common_test_helpers:start_apps([]), - [{log_level, LogLevel} | Config]. + Apps = emqx_cth_suite:start( + [ + emqx, + emqx_conf, + emqx_authn + ], + #{work_dir => ?config(priv_dir)} + ), + ok = deregister_providers(), + [{apps, Apps} | Config]. end_per_suite(Config) -> - emqx_common_test_helpers:stop_apps([]), - LogLevel = ?config(log_level), - emqx_logger:set_log_level(LogLevel), + emqx_cth_suite:stop(?config(apps)), ok. init_per_testcase(Case, Config) -> @@ -302,15 +302,20 @@ t_update_config(Config) when is_list(Config) -> ok = register_provider(?config("auth1"), ?MODULE), ok = register_provider(?config("auth2"), ?MODULE), Global = ?config(global), + %% We mocked provider implementation, but did't mock the schema + %% so we should provide full config AuthenticatorConfig1 = #{ - mechanism => password_based, - backend => built_in_database, - enable => true + <<"mechanism">> => <<"password_based">>, + <<"backend">> => <<"built_in_database">>, + <<"enable">> => true }, AuthenticatorConfig2 = #{ - mechanism => password_based, - backend => mysql, - enable => true + <<"mechanism">> => <<"password_based">>, + <<"backend">> => <<"mysql">>, + <<"query">> => <<"SELECT password_hash, salt FROM users WHERE username = ?">>, + <<"server">> => <<"127.0.0.1:5432">>, + <<"database">> => <<"emqx">>, + <<"enable">> => true }, ID1 = <<"password_based:built_in_database">>, ID2 = <<"password_based:mysql">>, @@ -580,3 +585,11 @@ certs(Certs) -> register_provider(Type, Module) -> ok = ?AUTHN:register_providers([{Type, Module}]). + +deregister_providers() -> + lists:foreach( + fun({Type, _Module}) -> + ok = ?AUTHN:deregister_provider(Type) + end, + maps:to_list(?AUTHN:get_providers()) + ). diff --git a/apps/emqx_authn/test/emqx_authn_SUITE.erl b/apps/emqx_authn/test/emqx_authn_SUITE.erl index d5df4add3..b3c786875 100644 --- a/apps/emqx_authn/test/emqx_authn_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_SUITE.erl @@ -102,7 +102,7 @@ t_will_message_connection_denied(Config) when is_list(Config) -> {error, _} = emqtt:connect(Publisher), receive {'DOWN', Ref, process, Publisher, Reason} -> - ?assertEqual({shutdown, unauthorized_client}, Reason) + ?assertEqual({shutdown, malformed_username_or_password}, Reason) after 2000 -> error(timeout) end, @@ -151,7 +151,7 @@ t_password_undefined(Config) when is_list(Config) -> header = #mqtt_packet_header{type = ?CONNACK}, variable = #mqtt_packet_connack{ ack_flags = 0, - reason_code = ?CONNACK_AUTH + reason_code = ?CONNACK_CREDENTIALS }, payload = undefined }, diff --git a/apps/emqx_authn/test/emqx_authn_api_SUITE.erl b/apps/emqx_authn/test/emqx_authn_api_SUITE.erl index c0b3fe22f..4056f7f84 100644 --- a/apps/emqx_authn/test/emqx_authn_api_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_api_SUITE.erl @@ -23,6 +23,7 @@ -include("emqx_authn.hrl"). -include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). -define(TCP_DEFAULT, 'tcp:default'). @@ -43,7 +44,6 @@ init_per_testcase(t_authenticator_fail, Config) -> meck:expect(emqx_authn_proto_v1, lookup_from_all_nodes, 3, [{error, {exception, badarg}}]), init_per_testcase(default, Config); init_per_testcase(_Case, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authn_test_lib:delete_authenticators( [?CONF_NS_ATOM], ?GLOBAL @@ -64,19 +64,27 @@ end_per_testcase(_, Config) -> Config. init_per_suite(Config) -> - emqx_config:erase(?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY), - _ = application:load(emqx_conf), - ok = emqx_mgmt_api_test_util:init_suite( - [emqx_conf, emqx_authn] + Apps = emqx_cth_suite:start( + [ + emqx, + emqx_conf, + emqx_authn, + emqx_management, + {emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"} + ], + #{ + work_dir => ?config(priv_dir, Config) + } ), - + _ = emqx_common_test_http:create_default_app(), ?AUTHN:delete_chain(?GLOBAL), {ok, Chains} = ?AUTHN:list_chains(), ?assertEqual(length(Chains), 0), - Config. + [{apps, Apps} | Config]. -end_per_suite(_Config) -> - emqx_mgmt_api_test_util:end_suite([emqx_authn]), +end_per_suite(Config) -> + _ = emqx_common_test_http:delete_default_app(), + ok = emqx_cth_suite:stop(?config(apps, Config)), ok. %%------------------------------------------------------------------------------ @@ -351,7 +359,7 @@ test_authenticator_users(PathPrefix) -> <<"metrics">> := #{ <<"total">> := 1, <<"success">> := 0, - <<"nomatch">> := 1 + <<"failed">> := 1 } } = emqx_utils_json:decode(PageData0, [return_maps]); ["listeners", 'tcp:default'] -> @@ -409,7 +417,7 @@ test_authenticator_users(PathPrefix) -> <<"metrics">> := #{ <<"total">> := 2, <<"success">> := 1, - <<"nomatch">> := 1 + <<"failed">> := 1 } } = emqx_utils_json:decode(PageData01, [return_maps]); ["listeners", 'tcp:default'] -> diff --git a/apps/emqx_authn/test/emqx_authn_enable_flag_SUITE.erl b/apps/emqx_authn/test/emqx_authn_enable_flag_SUITE.erl index cc2785b1e..63cdb3f5f 100644 --- a/apps/emqx_authn/test/emqx_authn_enable_flag_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_enable_flag_SUITE.erl @@ -24,16 +24,19 @@ -define(PATH, [?CONF_NS_ATOM]). -include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - emqx_common_test_helpers:start_apps([emqx_conf, emqx_authn]), - Config. + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]. -end_per_suite(_) -> - emqx_common_test_helpers:stop_apps([emqx_authn, emqx_conf]), +end_per_suite(Config) -> + ok = emqx_cth_suite:stop(?config(apps, Config)), ok. init_per_testcase(_Case, Config) -> @@ -42,9 +45,10 @@ init_per_testcase(_Case, Config) -> <<"backend">> => <<"built_in_database">>, <<"user_id_type">> => <<"clientid">> }, - {ok, _} = emqx:update_config( + {ok, _} = emqx_conf:update( ?PATH, - {create_authenticator, ?GLOBAL, AuthnConfig} + {create_authenticator, ?GLOBAL, AuthnConfig}, + #{} ), {ok, _} = emqx_conf:update( [listeners, tcp, listener_authn_enabled], @@ -98,7 +102,7 @@ t_enable_authn(_Config) -> %% enable_authn set to true, we go to the set up authn and fail {ok, ConnPid1} = emqtt:start_link([{port, 18830}, {clientid, <<"clientid">>}]), ?assertMatch( - {error, {unauthorized_client, _}}, + {error, {malformed_username_or_password, _}}, emqtt:connect(ConnPid1) ), ok. diff --git a/apps/emqx_authn/test/emqx_authn_http_SUITE.erl b/apps/emqx_authn/test/emqx_authn_http_SUITE.erl index b08167a5b..dcd41a28d 100644 --- a/apps/emqx_authn/test/emqx_authn_http_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_http_SUITE.erl @@ -65,18 +65,17 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - _ = application:load(emqx_conf), - emqx_common_test_helpers:start_apps([emqx_authn]), - application:ensure_all_started(cowboy), - Config. + Apps = emqx_cth_suite:start([cowboy, emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]. -end_per_suite(_) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), - emqx_common_test_helpers:stop_apps([emqx_authn]), - application:stop(cowboy), + ok = emqx_cth_suite:stop(?config(apps, Config)), ok. init_per_testcase(_Case, Config) -> diff --git a/apps/emqx_authn/test/emqx_authn_https_SUITE.erl b/apps/emqx_authn/test/emqx_authn_https_SUITE.erl index c4315b69f..6fb8de294 100644 --- a/apps/emqx_authn/test/emqx_authn_https_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_https_SUITE.erl @@ -39,18 +39,17 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - _ = application:load(emqx_conf), - emqx_common_test_helpers:start_apps([emqx_authn]), - application:ensure_all_started(cowboy), - Config. + Apps = emqx_cth_suite:start([cowboy, emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]. -end_per_suite(_) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), - emqx_common_test_helpers:stop_apps([emqx_authn]), - application:stop(cowboy), + ok = emqx_cth_suite:stop(?config(apps, Config)), ok. init_per_testcase(_Case, Config) -> diff --git a/apps/emqx_authn/test/emqx_authn_jwt_SUITE.erl b/apps/emqx_authn/test/emqx_authn_jwt_SUITE.erl index bd18367b6..75dfcbc6f 100644 --- a/apps/emqx_authn/test/emqx_authn_jwt_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_jwt_SUITE.erl @@ -31,21 +31,14 @@ all() -> emqx_common_test_helpers:all(?MODULE). -init_per_testcase(_, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), - Config. - init_per_suite(Config) -> - _ = application:load(emqx_conf), - emqx_common_test_helpers:start_apps([emqx_conf, emqx_authn]), - application:ensure_all_started(emqx_resource), - application:ensure_all_started(emqx_connector), - Config. + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]. -end_per_suite(_) -> - application:stop(emqx_connector), - application:stop(emqx_resource), - emqx_common_test_helpers:stop_apps([emqx_authn]), +end_per_suite(Config) -> + ok = emqx_cth_suite:stop(?config(apps, Config)), ok. %%------------------------------------------------------------------------------ diff --git a/apps/emqx_authn/test/emqx_authn_listeners_SUITE.erl b/apps/emqx_authn/test/emqx_authn_listeners_SUITE.erl new file mode 100644 index 000000000..9708bf1bb --- /dev/null +++ b/apps/emqx_authn/test/emqx_authn_listeners_SUITE.erl @@ -0,0 +1,242 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_authn_listeners_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include("emqx_authn.hrl"). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +init_per_suite(Config) -> + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]. + +end_per_suite(Config) -> + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. + +init_per_testcase(_Case, Config) -> + Port = emqx_common_test_helpers:select_free_port(tcp), + [{port, Port} | Config]. + +end_per_testcase(_Case, _Config) -> + ok. + +t_create_update_delete(Config) -> + ListenerConf = listener_mqtt_tcp_conf(Config), + AuthnConfig0 = #{ + <<"mechanism">> => <<"password_based">>, + <<"backend">> => <<"built_in_database">>, + <<"user_id_type">> => <<"clientid">> + }, + %% Create + {ok, _} = emqx_conf:update( + [listeners], + #{ + <<"tcp">> => #{ + <<"listener0">> => ListenerConf#{ + ?CONF_NS_BINARY => AuthnConfig0 + } + } + }, + #{} + ), + ?assertMatch( + {ok, [ + #{ + authenticators := [ + #{ + id := <<"password_based:built_in_database">>, + state := #{ + user_id_type := clientid + } + } + ], + name := 'tcp:listener0' + } + ]}, + emqx_authentication:list_chains() + ), + + %% Drop old, create new + {ok, _} = emqx_conf:update( + [listeners], + #{ + <<"tcp">> => #{ + <<"listener1">> => ListenerConf#{ + ?CONF_NS_BINARY => AuthnConfig0 + } + } + }, + #{} + ), + ?assertMatch( + {ok, [ + #{ + authenticators := [ + #{ + id := <<"password_based:built_in_database">>, + state := #{ + user_id_type := clientid + } + } + ], + name := 'tcp:listener1' + } + ]}, + emqx_authentication:list_chains() + ), + + %% Update + {ok, _} = emqx_conf:update( + [listeners], + #{ + <<"tcp">> => #{ + <<"listener1">> => ListenerConf#{ + ?CONF_NS_BINARY => AuthnConfig0#{<<"user_id_type">> => <<"username">>} + } + } + }, + #{} + ), + ?assertMatch( + {ok, [ + #{ + authenticators := [ + #{ + id := <<"password_based:built_in_database">>, + state := #{ + user_id_type := username + } + } + ], + name := 'tcp:listener1' + } + ]}, + emqx_authentication:list_chains() + ), + + %% Update by listener path + {ok, _} = emqx_conf:update( + [listeners, tcp, listener1], + {update, ListenerConf#{ + ?CONF_NS_BINARY => AuthnConfig0#{<<"user_id_type">> => <<"clientid">>} + }}, + #{} + ), + ?assertMatch( + {ok, [ + #{ + authenticators := [ + #{ + id := <<"password_based:built_in_database">>, + state := #{ + user_id_type := clientid + } + } + ], + name := 'tcp:listener1' + } + ]}, + emqx_authentication:list_chains() + ), + + %% Delete + {ok, _} = emqx_conf:tombstone( + [listeners, tcp, listener1], + #{} + ), + ?assertMatch( + {ok, []}, + emqx_authentication:list_chains() + ). + +t_convert_certs(Config) -> + ListenerConf = listener_mqtt_tcp_conf(Config), + AuthnConfig0 = #{ + <<"mechanism">> => <<"password_based">>, + <<"password_hash_algorithm">> => #{ + <<"name">> => <<"plain">>, + <<"salt_position">> => <<"suffix">> + }, + <<"enable">> => <<"true">>, + + <<"backend">> => <<"redis">>, + <<"cmd">> => <<"HMGET mqtt_user:${username} password_hash salt is_superuser">>, + <<"database">> => <<"1">>, + <<"password">> => <<"public">>, + <<"server">> => <<"127.0.0.1:55555">>, + <<"redis_type">> => <<"single">>, + <<"ssl">> => #{ + <<"enable">> => true, + <<"cacertfile">> => some_pem(), + <<"certfile">> => some_pem(), + <<"keyfile">> => some_pem() + } + }, + {ok, _} = emqx_conf:update( + [listeners], + #{ + <<"tcp">> => #{ + <<"listener0">> => ListenerConf#{ + ?CONF_NS_BINARY => AuthnConfig0 + } + } + }, + #{} + ), + lists:foreach( + fun(Key) -> + [#{ssl := #{Key := FilePath}}] = emqx_config:get([ + listeners, tcp, listener0, authentication + ]), + ?assert(filelib:is_regular(FilePath)) + end, + [cacertfile, certfile, keyfile] + ). + +%%-------------------------------------------------------------------- +%% Helper Functions +%%-------------------------------------------------------------------- + +listener_mqtt_tcp_conf(Config) -> + Port = ?config(port, Config), + PortS = integer_to_binary(Port), + #{ + <<"acceptors">> => 16, + <<"access_rules">> => [<<"allow all">>], + <<"bind">> => <<"0.0.0.0:", PortS/binary>>, + <<"max_connections">> => 1024000, + <<"mountpoint">> => <<>>, + <<"proxy_protocol">> => false, + <<"proxy_protocol_timeout">> => <<"3s">>, + <<"enable_authn">> => true + }. + +some_pem() -> + Dir = code:lib_dir(emqx_authn, test), + Path = filename:join([Dir, "data", "private_key.pem"]), + {ok, Pem} = file:read_file(Path), + Pem. diff --git a/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl b/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl index 599eae92e..9781b8ca7 100644 --- a/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_mnesia_SUITE.erl @@ -20,8 +20,7 @@ -compile(nowarn_export_all). -include_lib("eunit/include/eunit.hrl"). - --include("emqx_authn.hrl"). +-include_lib("common_test/include/ct.hrl"). -define(AUTHN_ID, <<"mechanism:backend">>). @@ -29,16 +28,16 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - _ = application:load(emqx_conf), - emqx_common_test_helpers:start_apps([emqx_authn]), - Config. + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]. -end_per_suite(_) -> - emqx_common_test_helpers:stop_apps([emqx_authn]), +end_per_suite(Config) -> + ok = emqx_cth_suite:stop(?config(apps, Config)), ok. init_per_testcase(_Case, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), mria:clear_table(emqx_authn_mnesia), Config. diff --git a/apps/emqx_authn/test/emqx_authn_mongo_SUITE.erl b/apps/emqx_authn/test/emqx_authn_mongo_SUITE.erl index 07933031e..9ea7f9eb2 100644 --- a/apps/emqx_authn/test/emqx_authn_mongo_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_mongo_SUITE.erl @@ -33,7 +33,6 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_testcase(_TestCase, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authentication:initialize_authentication(?GLOBAL, []), emqx_authn_test_lib:delete_authenticators( [authentication], @@ -46,23 +45,23 @@ end_per_testcase(_TestCase, _Config) -> ok = mc_worker_api:disconnect(?MONGO_CLIENT). init_per_suite(Config) -> - _ = application:load(emqx_conf), case emqx_common_test_helpers:is_tcp_server_available(?MONGO_HOST, ?MONGO_DEFAULT_PORT) of true -> - ok = emqx_common_test_helpers:start_apps([emqx_authn]), - ok = start_apps([emqx_resource]), - Config; + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]; false -> {skip, no_mongo} end. -end_per_suite(_Config) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), - ok = stop_apps([emqx_resource]), - ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. %%------------------------------------------------------------------------------ %% Tests diff --git a/apps/emqx_authn/test/emqx_authn_mongo_tls_SUITE.erl b/apps/emqx_authn/test/emqx_authn_mongo_tls_SUITE.erl index 34f906dd9..af550379b 100644 --- a/apps/emqx_authn/test/emqx_authn_mongo_tls_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_mongo_tls_SUITE.erl @@ -33,7 +33,6 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_testcase(_TestCase, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authentication:initialize_authentication(?GLOBAL, []), emqx_authn_test_lib:delete_authenticators( [authentication], @@ -42,23 +41,23 @@ init_per_testcase(_TestCase, Config) -> Config. init_per_suite(Config) -> - _ = application:load(emqx_conf), case emqx_common_test_helpers:is_tcp_server_available(?MONGO_HOST, ?MONGO_DEFAULT_PORT) of true -> - ok = emqx_common_test_helpers:start_apps([emqx_authn]), - ok = start_apps([emqx_resource]), - Config; + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]; false -> {skip, no_mongo} end. -end_per_suite(_Config) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), - ok = stop_apps([emqx_resource]), - ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. %%------------------------------------------------------------------------------ %% Tests diff --git a/apps/emqx_authn/test/emqx_authn_mysql_SUITE.erl b/apps/emqx_authn/test/emqx_authn_mysql_SUITE.erl index 914ce4dd1..2173b943b 100644 --- a/apps/emqx_authn/test/emqx_authn_mysql_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_mysql_SUITE.erl @@ -37,7 +37,6 @@ groups() -> [{require_seeds, [], [t_authenticate, t_update, t_destroy]}]. init_per_testcase(_, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authentication:initialize_authentication(?GLOBAL, []), emqx_authn_test_lib:delete_authenticators( [authentication], @@ -54,11 +53,11 @@ end_per_group(require_seeds, Config) -> Config. init_per_suite(Config) -> - _ = application:load(emqx_conf), case emqx_common_test_helpers:is_tcp_server_available(?MYSQL_HOST, ?MYSQL_DEFAULT_PORT) of true -> - ok = emqx_common_test_helpers:start_apps([emqx_authn]), - ok = start_apps([emqx_resource]), + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), {ok, _} = emqx_resource:create_local( ?MYSQL_RESOURCE, ?RESOURCE_GROUP, @@ -66,19 +65,19 @@ init_per_suite(Config) -> mysql_config(), #{} ), - Config; + [{apps, Apps} | Config]; false -> {skip, no_mysql} end. -end_per_suite(_Config) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), ok = emqx_resource:remove_local(?MYSQL_RESOURCE), - ok = stop_apps([emqx_resource]), - ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. %%------------------------------------------------------------------------------ %% Tests diff --git a/apps/emqx_authn/test/emqx_authn_mysql_tls_SUITE.erl b/apps/emqx_authn/test/emqx_authn_mysql_tls_SUITE.erl index 5d5a3f7ac..888ff5e6b 100644 --- a/apps/emqx_authn/test/emqx_authn_mysql_tls_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_mysql_tls_SUITE.erl @@ -36,7 +36,6 @@ groups() -> []. init_per_testcase(_, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authentication:initialize_authentication(?GLOBAL, []), emqx_authn_test_lib:delete_authenticators( [authentication], @@ -45,23 +44,23 @@ init_per_testcase(_, Config) -> Config. init_per_suite(Config) -> - _ = application:load(emqx_conf), case emqx_common_test_helpers:is_tcp_server_available(?MYSQL_HOST, ?MYSQL_DEFAULT_PORT) of true -> - ok = emqx_common_test_helpers:start_apps([emqx_authn]), - ok = start_apps([emqx_resource]), - Config; + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]; false -> {skip, no_mysql_tls} end. -end_per_suite(_Config) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), - ok = stop_apps([emqx_resource]), - ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. %%------------------------------------------------------------------------------ %% Tests diff --git a/apps/emqx_authn/test/emqx_authn_pgsql_SUITE.erl b/apps/emqx_authn/test/emqx_authn_pgsql_SUITE.erl index 075ae5cb7..1c9f0f86b 100644 --- a/apps/emqx_authn/test/emqx_authn_pgsql_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_pgsql_SUITE.erl @@ -23,7 +23,6 @@ -include_lib("emqx_authn/include/emqx_authn.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). --include_lib("emqx/include/emqx_placeholder.hrl"). -define(PGSQL_HOST, "pgsql"). -define(PGSQL_RESOURCE, <<"emqx_authn_pgsql_SUITE">>). @@ -42,7 +41,6 @@ groups() -> [{require_seeds, [], [t_create, t_authenticate, t_update, t_destroy, t_is_superuser]}]. init_per_testcase(_, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authentication:initialize_authentication(?GLOBAL, []), emqx_authn_test_lib:delete_authenticators( [authentication], @@ -59,11 +57,11 @@ end_per_group(require_seeds, Config) -> Config. init_per_suite(Config) -> - _ = application:load(emqx_conf), case emqx_common_test_helpers:is_tcp_server_available(?PGSQL_HOST, ?PGSQL_DEFAULT_PORT) of true -> - ok = emqx_common_test_helpers:start_apps([emqx_authn]), - ok = start_apps([emqx_resource]), + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), {ok, _} = emqx_resource:create_local( ?PGSQL_RESOURCE, ?RESOURCE_GROUP, @@ -71,19 +69,19 @@ init_per_suite(Config) -> pgsql_config(), #{} ), - Config; + [{apps, Apps} | Config]; false -> {skip, no_pgsql} end. -end_per_suite(_Config) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), ok = emqx_resource:remove_local(?PGSQL_RESOURCE), - ok = stop_apps([emqx_resource]), - ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. %%------------------------------------------------------------------------------ %% Tests diff --git a/apps/emqx_authn/test/emqx_authn_pgsql_tls_SUITE.erl b/apps/emqx_authn/test/emqx_authn_pgsql_tls_SUITE.erl index ae0a01572..4862572e6 100644 --- a/apps/emqx_authn/test/emqx_authn_pgsql_tls_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_pgsql_tls_SUITE.erl @@ -48,20 +48,21 @@ init_per_suite(Config) -> _ = application:load(emqx_conf), case emqx_common_test_helpers:is_tcp_server_available(?PGSQL_HOST, ?PGSQL_DEFAULT_PORT) of true -> - ok = emqx_common_test_helpers:start_apps([emqx_authn]), - ok = start_apps([emqx_resource]), - Config; + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]; false -> {skip, no_pgsql_tls} end. -end_per_suite(_Config) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), - ok = stop_apps([emqx_resource]), - ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. %%------------------------------------------------------------------------------ %% Tests diff --git a/apps/emqx_authn/test/emqx_authn_redis_SUITE.erl b/apps/emqx_authn/test/emqx_authn_redis_SUITE.erl index 31602ecec..c8ae3d2a2 100644 --- a/apps/emqx_authn/test/emqx_authn_redis_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_redis_SUITE.erl @@ -42,7 +42,6 @@ groups() -> [{require_seeds, [], [t_authenticate, t_update, t_destroy]}]. init_per_testcase(_, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authentication:initialize_authentication(?GLOBAL, []), emqx_authn_test_lib:delete_authenticators( [authentication], @@ -59,11 +58,11 @@ end_per_group(require_seeds, Config) -> Config. init_per_suite(Config) -> - _ = application:load(emqx_conf), case emqx_common_test_helpers:is_tcp_server_available(?REDIS_HOST, ?REDIS_DEFAULT_PORT) of true -> - ok = emqx_common_test_helpers:start_apps([emqx_authn]), - ok = start_apps([emqx_resource]), + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), {ok, _} = emqx_resource:create_local( ?REDIS_RESOURCE, ?RESOURCE_GROUP, @@ -71,19 +70,19 @@ init_per_suite(Config) -> redis_config(), #{} ), - Config; + [{apps, Apps} | Config]; false -> {skip, no_redis} end. -end_per_suite(_Config) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), ok = emqx_resource:remove_local(?REDIS_RESOURCE), - ok = stop_apps([emqx_resource]), - ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. %%------------------------------------------------------------------------------ %% Tests diff --git a/apps/emqx_authn/test/emqx_authn_redis_tls_SUITE.erl b/apps/emqx_authn/test/emqx_authn_redis_tls_SUITE.erl index 8df54ebce..291caed1b 100644 --- a/apps/emqx_authn/test/emqx_authn_redis_tls_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_redis_tls_SUITE.erl @@ -19,7 +19,6 @@ -compile(nowarn_export_all). -compile(export_all). --include_lib("emqx_connector/include/emqx_connector.hrl"). -include_lib("emqx_authn/include/emqx_authn.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). @@ -36,7 +35,6 @@ groups() -> []. init_per_testcase(_, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authentication:initialize_authentication(?GLOBAL, []), emqx_authn_test_lib:delete_authenticators( [authentication], @@ -45,23 +43,23 @@ init_per_testcase(_, Config) -> Config. init_per_suite(Config) -> - _ = application:load(emqx_conf), case emqx_common_test_helpers:is_tcp_server_available(?REDIS_HOST, ?REDIS_TLS_PORT) of true -> - ok = emqx_common_test_helpers:start_apps([emqx_authn]), - ok = start_apps([emqx_resource]), - Config; + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]; false -> {skip, no_redis} end. -end_per_suite(_Config) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), - ok = stop_apps([emqx_resource]), - ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. %%------------------------------------------------------------------------------ %% Tests diff --git a/apps/emqx_authn/test/emqx_authn_schema_SUITE.erl b/apps/emqx_authn/test/emqx_authn_schema_SUITE.erl index 3afb8e973..8266ade10 100644 --- a/apps/emqx_authn/test/emqx_authn_schema_SUITE.erl +++ b/apps/emqx_authn/test/emqx_authn_schema_SUITE.erl @@ -4,6 +4,7 @@ -compile(nowarn_export_all). -include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). -include("emqx_authn.hrl"). @@ -11,16 +12,16 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - _ = application:load(emqx_conf), - emqx_common_test_helpers:start_apps([emqx_authn]), - Config. + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), + [{apps, Apps} | Config]. -end_per_suite(_) -> - emqx_common_test_helpers:stop_apps([emqx_authn]), +end_per_suite(Config) -> + ok = emqx_cth_suite:stop(?config(apps, Config)), ok. init_per_testcase(_Case, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), mria:clear_table(emqx_authn_mnesia), Config. diff --git a/apps/emqx_authn/test/emqx_enhanced_authn_scram_mnesia_SUITE.erl b/apps/emqx_authn/test/emqx_enhanced_authn_scram_mnesia_SUITE.erl index f52e895cc..baaf15175 100644 --- a/apps/emqx_authn/test/emqx_enhanced_authn_scram_mnesia_SUITE.erl +++ b/apps/emqx_authn/test/emqx_enhanced_authn_scram_mnesia_SUITE.erl @@ -36,17 +36,18 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - _ = application:load(emqx_conf), - ok = emqx_common_test_helpers:start_apps([emqx_authn]), + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), IdleTimeout = emqx_config:get([mqtt, idle_timeout]), - [{idle_timeout, IdleTimeout} | Config]. + [{apps, Apps}, {idle_timeout, IdleTimeout} | Config]. end_per_suite(Config) -> ok = emqx_config:put([mqtt, idle_timeout], ?config(idle_timeout, Config)), - ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. init_per_testcase(_Case, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), mria:clear_table(emqx_enhanced_authn_scram_mnesia), emqx_authn_test_lib:delete_authenticators( [authentication], diff --git a/apps/emqx_bridge/src/emqx_bridge.app.src b/apps/emqx_bridge/src/emqx_bridge.app.src index 96d953e34..3a76843a1 100644 --- a/apps/emqx_bridge/src/emqx_bridge.app.src +++ b/apps/emqx_bridge/src/emqx_bridge.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge, [ {description, "EMQX bridges"}, - {vsn, "0.1.26"}, + {vsn, "0.1.27"}, {registered, [emqx_bridge_sup]}, {mod, {emqx_bridge_app, []}}, {applications, [ diff --git a/apps/emqx_bridge/src/emqx_bridge.erl b/apps/emqx_bridge/src/emqx_bridge.erl index edd21a239..4fa6cd346 100644 --- a/apps/emqx_bridge/src/emqx_bridge.erl +++ b/apps/emqx_bridge/src/emqx_bridge.erl @@ -55,6 +55,7 @@ ]). -export([config_key_path/0]). +-export([validate_bridge_name/1]). %% exported for `emqx_telemetry' -export([get_basic_usage_info/0]). @@ -96,6 +97,9 @@ -define(ROOT_KEY, bridges). +%% See `hocon_tconf` +-define(MAP_KEY_RE, <<"^[A-Za-z0-9]+[A-Za-z0-9-_]*$">>). + load() -> Bridges = emqx:get_config([?ROOT_KEY], #{}), lists:foreach( @@ -580,3 +584,19 @@ get_basic_usage_info() -> _:_ -> InitialAcc end. + +validate_bridge_name(BridgeName0) -> + BridgeName = to_bin(BridgeName0), + case re:run(BridgeName, ?MAP_KEY_RE, [{capture, none}]) of + match -> + ok; + nomatch -> + {error, #{ + kind => validation_error, + reason => bad_bridge_name, + value => BridgeName + }} + end. + +to_bin(A) when is_atom(A) -> atom_to_binary(A, utf8); +to_bin(B) when is_binary(B) -> B. diff --git a/apps/emqx_bridge/src/emqx_bridge_api.erl b/apps/emqx_bridge/src/emqx_bridge_api.erl index 3190a2ef9..e49b54d67 100644 --- a/apps/emqx_bridge/src/emqx_bridge_api.erl +++ b/apps/emqx_bridge/src/emqx_bridge_api.erl @@ -609,6 +609,8 @@ create_or_update_bridge(BridgeType, BridgeName, Conf, HttpStatusCode) -> case emqx_bridge:create(BridgeType, BridgeName, Conf) of {ok, _} -> lookup_from_all_nodes(BridgeType, BridgeName, HttpStatusCode); + {error, {pre_config_update, _HandlerMod, Reason}} when is_map(Reason) -> + ?BAD_REQUEST(map_to_json(redact(Reason))); {error, Reason} when is_map(Reason) -> ?BAD_REQUEST(map_to_json(redact(Reason))) end. diff --git a/apps/emqx_bridge/src/emqx_bridge_app.erl b/apps/emqx_bridge/src/emqx_bridge_app.erl index 3bae55090..d0dd7da2b 100644 --- a/apps/emqx_bridge/src/emqx_bridge_app.erl +++ b/apps/emqx_bridge/src/emqx_bridge_app.erl @@ -62,11 +62,16 @@ pre_config_update(_, {Oper, _Type, _Name}, OldConfig) -> %% to save the 'enable' to the config files {ok, OldConfig#{<<"enable">> => operation_to_enable(Oper)}}; pre_config_update(Path, Conf, _OldConfig) when is_map(Conf) -> - case emqx_connector_ssl:convert_certs(filename:join(Path), Conf) of - {error, Reason} -> - {error, Reason}; - {ok, ConfNew} -> - {ok, ConfNew} + case validate_bridge_name(Path) of + ok -> + case emqx_connector_ssl:convert_certs(filename:join(Path), Conf) of + {error, Reason} -> + {error, Reason}; + {ok, ConfNew} -> + {ok, ConfNew} + end; + Error -> + Error end. post_config_update([bridges, BridgeType, BridgeName], '$remove', _, _OldConf, _AppEnvs) -> @@ -97,3 +102,12 @@ post_config_update([bridges, BridgeType, BridgeName], _Req, NewConf, OldConf, _A %% internal functions operation_to_enable(disable) -> false; operation_to_enable(enable) -> true. + +validate_bridge_name(Path) -> + [RootKey] = emqx_bridge:config_key_path(), + case Path of + [RootKey, _BridgeType, BridgeName] -> + emqx_bridge:validate_bridge_name(BridgeName); + _ -> + ok + end. diff --git a/apps/emqx_bridge/src/emqx_bridge_resource.erl b/apps/emqx_bridge/src/emqx_bridge_resource.erl index a48d0294e..0756e1382 100644 --- a/apps/emqx_bridge/src/emqx_bridge_resource.erl +++ b/apps/emqx_bridge/src/emqx_bridge_resource.erl @@ -49,11 +49,8 @@ update/4 ]). --callback connector_config(ParsedConfig, BridgeName :: atom() | binary()) -> - ParsedConfig -when - ParsedConfig :: #{atom() => any()}. --optional_callbacks([connector_config/2]). +-callback connector_config(ParsedConfig) -> ParsedConfig when ParsedConfig :: #{atom() => any()}. +-optional_callbacks([connector_config/1]). %% bi-directional bridge with producer/consumer or ingress/egress configs -define(IS_BI_DIR_BRIDGE(TYPE), @@ -175,14 +172,15 @@ create(BridgeId, Conf) -> create(Type, Name, Conf) -> create(Type, Name, Conf, #{}). -create(Type, Name, Conf, Opts) -> +create(Type, Name, Conf0, Opts) -> ?SLOG(info, #{ msg => "create bridge", type => Type, name => Name, - config => emqx_utils:redact(Conf) + config => emqx_utils:redact(Conf0) }), TypeBin = bin(Type), + Conf = Conf0#{bridge_type => TypeBin, bridge_name => Name}, {ok, _Data} = emqx_resource:create_local( resource_id(Type, Name), <<"emqx_bridge">>, @@ -249,8 +247,9 @@ recreate(Type, Name) -> recreate(Type, Name, Conf) -> recreate(Type, Name, Conf, #{}). -recreate(Type, Name, Conf, Opts) -> +recreate(Type, Name, Conf0, Opts) -> TypeBin = bin(Type), + Conf = Conf0#{bridge_type => TypeBin, bridge_name => Name}, emqx_resource:recreate_local( resource_id(Type, Name), bridge_to_resource_type(Type), @@ -267,17 +266,18 @@ create_dry_run(Type, Conf0) -> Conf1 = maps:without([<<"name">>], Conf0), RawConf = #{<<"bridges">> => #{TypeBin => #{<<"temp_name">> => Conf1}}}, try - #{bridges := #{TypeAtom := #{temp_name := Conf}}} = + #{bridges := #{TypeAtom := #{temp_name := Conf2}}} = hocon_tconf:check_plain( emqx_bridge_schema, RawConf, #{atom_key => true, required => false} ), + Conf = Conf2#{bridge_type => TypeBin, bridge_name => TmpName}, case emqx_connector_ssl:convert_certs(TmpPath, Conf) of {error, Reason} -> {error, Reason}; {ok, ConfNew} -> - ParseConf = parse_confs(bin(Type), TmpName, ConfNew), + ParseConf = parse_confs(TypeBin, TmpName, ConfNew), emqx_resource:create_dry_run_local(bridge_to_resource_type(Type), ParseConf) end catch @@ -387,23 +387,15 @@ parse_confs(Type, Name, Conf) when ?IS_INGRESS_BRIDGE(Type) -> %% receives a message from the external database. BId = bridge_id(Type, Name), BridgeHookpoint = bridge_hookpoint(BId), - Conf#{hookpoint => BridgeHookpoint, bridge_name => Name}; -%% TODO: rename this to `kafka_producer' after alias support is added -%% to hocon; keeping this as just `kafka' for backwards compatibility. -parse_confs(<<"kafka">> = _Type, Name, Conf) -> - Conf#{bridge_name => Name}; -parse_confs(<<"pulsar_producer">> = _Type, Name, Conf) -> - Conf#{bridge_name => Name}; -parse_confs(<<"kinesis_producer">> = _Type, Name, Conf) -> - Conf#{bridge_name => Name}; -parse_confs(BridgeType, BridgeName, Config) -> - connector_config(BridgeType, BridgeName, Config). + Conf#{hookpoint => BridgeHookpoint}; +parse_confs(BridgeType, _BridgeName, Config) -> + connector_config(BridgeType, Config). -connector_config(BridgeType, BridgeName, Config) -> +connector_config(BridgeType, Config) -> Mod = bridge_impl_module(BridgeType), - case erlang:function_exported(Mod, connector_config, 2) of + case erlang:function_exported(Mod, connector_config, 1) of true -> - Mod:connector_config(Config, BridgeName); + Mod:connector_config(Config); false -> Config end. diff --git a/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl b/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl index 07f858979..a6bd4a754 100644 --- a/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl +++ b/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl @@ -32,8 +32,7 @@ api_schemas(Method) -> api_ref(emqx_bridge_mongodb, <<"mongodb_rs">>, Method ++ "_rs"), api_ref(emqx_bridge_mongodb, <<"mongodb_sharded">>, Method ++ "_sharded"), api_ref(emqx_bridge_mongodb, <<"mongodb_single">>, Method ++ "_single"), - %% TODO: un-hide for e5.2.0... - %%api_ref(emqx_bridge_hstreamdb, <<"hstreamdb">>, Method), + api_ref(emqx_bridge_hstreamdb, <<"hstreamdb">>, Method), api_ref(emqx_bridge_influxdb, <<"influxdb_api_v1">>, Method ++ "_api_v1"), api_ref(emqx_bridge_influxdb, <<"influxdb_api_v2">>, Method ++ "_api_v2"), api_ref(emqx_bridge_redis, <<"redis_single">>, Method ++ "_single"), @@ -147,8 +146,7 @@ fields(bridges) -> hoconsc:map(name, ref(emqx_bridge_hstreamdb, "config")), #{ desc => <<"HStreamDB Bridge Config">>, - required => false, - importance => ?IMPORTANCE_HIDDEN + required => false } )}, {mysql, diff --git a/apps/emqx_bridge/test/emqx_bridge_SUITE.erl b/apps/emqx_bridge/test/emqx_bridge_SUITE.erl index 1cbc94d24..c9157d9e6 100644 --- a/apps/emqx_bridge/test/emqx_bridge_SUITE.erl +++ b/apps/emqx_bridge/test/emqx_bridge_SUITE.erl @@ -26,19 +26,20 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - _ = application:load(emqx_conf), - %% to avoid inter-suite dependencies - application:stop(emqx_connector), - ok = emqx_common_test_helpers:start_apps([emqx, emqx_bridge]), - Config. + Apps = emqx_cth_suite:start( + [ + emqx, + emqx_conf, + emqx_bridge + ], + #{work_dir => ?config(priv_dir, Config)} + ), + [{apps, Apps} | Config]. -end_per_suite(_Config) -> - emqx_common_test_helpers:stop_apps([ - emqx, - emqx_bridge, - emqx_resource, - emqx_connector - ]). +end_per_suite(Config) -> + Apps = ?config(apps, Config), + ok = emqx_cth_suite:stop(Apps), + ok. init_per_testcase(t_get_basic_usage_info_1, Config) -> {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), @@ -180,6 +181,31 @@ t_update_ssl_conf(Config) -> ?assertMatch({error, enoent}, file:list_dir(CertDir)), ok. +t_create_with_bad_name(_Config) -> + Path = [bridges, mqtt, 'test_哈哈'], + Conf = #{ + <<"bridge_mode">> => false, + <<"clean_start">> => true, + <<"keepalive">> => <<"60s">>, + <<"proto_ver">> => <<"v4">>, + <<"server">> => <<"127.0.0.1:1883">>, + <<"ssl">> => + #{ + %% needed to trigger pre_config_update + <<"certfile">> => cert_file("certfile"), + <<"enable">> => true + } + }, + ?assertMatch( + {error, + {pre_config_update, emqx_bridge_app, #{ + reason := bad_bridge_name, + kind := validation_error + }}}, + emqx:update_config(Path, Conf) + ), + ok. + data_file(Name) -> Dir = code:lib_dir(emqx_bridge, test), {ok, Bin} = file:read_file(filename:join([Dir, "data", Name])), diff --git a/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl b/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl index d08953682..f6129c09d 100644 --- a/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl +++ b/apps/emqx_bridge/test/emqx_bridge_api_SUITE.erl @@ -1335,6 +1335,35 @@ t_cluster_later_join_metrics(Config) -> ), ok. +t_create_with_bad_name(Config) -> + Port = ?config(port, Config), + URL1 = ?URL(Port, "path1"), + Name = <<"test_哈哈">>, + BadBridgeParams = + emqx_utils_maps:deep_merge( + ?HTTP_BRIDGE(URL1, Name), + #{ + <<"ssl">> => + #{ + <<"enable">> => true, + <<"certfile">> => cert_file("certfile") + } + } + ), + {ok, 400, #{ + <<"code">> := <<"BAD_REQUEST">>, + <<"message">> := Msg0 + }} = + request_json( + post, + uri(["bridges"]), + BadBridgeParams, + Config + ), + Msg = emqx_utils_json:decode(Msg0, [return_maps]), + ?assertMatch(#{<<"reason">> := <<"bad_bridge_name">>}, Msg), + ok. + validate_resource_request_ttl(single, Timeout, Name) -> SentData = #{payload => <<"Hello EMQX">>, timestamp => 1668602148000}, BridgeID = emqx_bridge_resource:bridge_id(?BRIDGE_TYPE_HTTP, Name), @@ -1418,3 +1447,11 @@ str(S) when is_binary(S) -> binary_to_list(S). json(B) when is_binary(B) -> emqx_utils_json:decode(B, [return_maps]). + +data_file(Name) -> + Dir = code:lib_dir(emqx_bridge, test), + {ok, Bin} = file:read_file(filename:join([Dir, "data", Name])), + Bin. + +cert_file(Name) -> + data_file(filename:join(["certs", Name])). diff --git a/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.app.src b/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.app.src index e29e9c83a..43033b657 100644 --- a/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.app.src +++ b/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_azure_event_hub, [ {description, "EMQX Enterprise Azure Event Hub Bridge"}, - {vsn, "0.1.1"}, + {vsn, "0.1.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.erl b/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.erl index 2d6343b74..abdc6a265 100644 --- a/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.erl +++ b/apps/emqx_bridge_azure_event_hub/src/emqx_bridge_azure_event_hub.erl @@ -20,7 +20,7 @@ %% emqx_bridge_enterprise "unofficial" API -export([conn_bridge_examples/1]). --export([connector_config/2]). +-export([connector_config/1]). -export([producer_converter/2, host_opts/0]). @@ -64,6 +64,10 @@ fields(producer_kafka_opts) -> kafka_producer_overrides() ), override_documentations(Fields); +fields(kafka_message) -> + Fields0 = emqx_bridge_kafka:fields(kafka_message), + Fields = proplists:delete(timestamp, Fields0), + override_documentations(Fields); fields(Method) -> Fields = emqx_bridge_kafka:fields(Method), override_documentations(Fields). @@ -85,6 +89,7 @@ desc(Name) -> struct_names() -> [ auth_username_password, + kafka_message, producer_kafka_opts ]. @@ -161,14 +166,14 @@ values(producer) -> %% `emqx_bridge_resource' API %%------------------------------------------------------------------------------------------------- -connector_config(Config, BridgeName) -> +connector_config(Config) -> %% Default port for AEH is 9093 BootstrapHosts0 = maps:get(bootstrap_hosts, Config), BootstrapHosts = emqx_schema:parse_servers( BootstrapHosts0, emqx_bridge_azure_event_hub:host_opts() ), - Config#{bridge_name => BridgeName, bootstrap_hosts := BootstrapHosts}. + Config#{bootstrap_hosts := BootstrapHosts}. %%------------------------------------------------------------------------------------------------- %% Internal fns @@ -245,6 +250,7 @@ kafka_producer_overrides() -> default => no_compression, importance => ?IMPORTANCE_HIDDEN }), + message => mk(ref(kafka_message), #{}), required_acks => mk(enum([all_isr, leader_only]), #{default => all_isr}) }. diff --git a/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_producer_SUITE.erl b/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_producer_SUITE.erl index af4b87718..87c2127c2 100644 --- a/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_producer_SUITE.erl +++ b/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_producer_SUITE.erl @@ -12,6 +12,7 @@ -define(BRIDGE_TYPE, azure_event_hub_producer). -define(BRIDGE_TYPE_BIN, <<"azure_event_hub_producer">>). +-define(KAFKA_BRIDGE_TYPE, kafka). -define(APPS, [emqx_resource, emqx_bridge, emqx_rule_engine]). -import(emqx_common_test_helpers, [on_exit/1]). @@ -65,10 +66,6 @@ init_per_suite(Config) -> end. end_per_suite(Config) -> - %% emqx_mgmt_api_test_util:end_suite(), - %% ok = emqx_common_test_helpers:stop_apps([emqx_conf]), - %% ok = emqx_connector_test_helpers:stop_apps([emqx_bridge, emqx_resource, emqx_rule_engine]), - %% _ = application:stop(emqx_connector), Apps = ?config(tc_apps, Config), emqx_cth_suite:stop(Apps), ok. @@ -145,7 +142,6 @@ bridge_config(TestCase, Config) -> <<"message">> => #{ <<"key">> => <<"${.clientid}">>, - <<"timestamp">> => <<"${.timestamp}">>, <<"value">> => <<"${.}">> }, <<"partition_count_refresh_interval">> => <<"60s">>, @@ -281,3 +277,42 @@ t_sync_query(Config) -> emqx_bridge_kafka_impl_producer_sync_query ), ok. + +t_same_name_azure_kafka_bridges(AehConfig) -> + ConfigKafka = lists:keyreplace(bridge_type, 1, AehConfig, {bridge_type, ?KAFKA_BRIDGE_TYPE}), + BridgeName = ?config(bridge_name, AehConfig), + AehResourceId = emqx_bridge_testlib:resource_id(AehConfig), + KafkaResourceId = emqx_bridge_testlib:resource_id(ConfigKafka), + TracePoint = emqx_bridge_kafka_impl_producer_sync_query, + %% creates the AEH bridge and check it's working + ok = emqx_bridge_testlib:t_sync_query( + AehConfig, + fun make_message/0, + fun(Res) -> ?assertEqual(ok, Res) end, + TracePoint + ), + %% than creates a Kafka bridge with same name and delete it after creation + ok = emqx_bridge_testlib:t_create_via_http(ConfigKafka), + %% check that both bridges are healthy + ?assertEqual({ok, connected}, emqx_resource_manager:health_check(AehResourceId)), + ?assertEqual({ok, connected}, emqx_resource_manager:health_check(KafkaResourceId)), + ?assertMatch( + {{ok, _}, {ok, _}}, + ?wait_async_action( + emqx_bridge:disable_enable(disable, ?KAFKA_BRIDGE_TYPE, BridgeName), + #{?snk_kind := kafka_producer_stopped}, + 5_000 + ) + ), + % check that AEH bridge is still working + ?check_trace( + begin + Message = {send_message, make_message()}, + ?assertEqual(ok, emqx_resource:simple_sync_query(AehResourceId, Message)), + ok + end, + fun(Trace) -> + ?assertMatch([#{instance_id := AehResourceId}], ?of_kind(TracePoint, Trace)) + end + ), + ok. diff --git a/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_tests.erl b/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_tests.erl index d624421c6..92d268d20 100644 --- a/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_tests.erl +++ b/apps/emqx_bridge_azure_event_hub/test/emqx_bridge_azure_event_hub_tests.erl @@ -33,7 +33,6 @@ bridges.azure_event_hub_producer.my_producer { max_inflight = 10 message { key = \"${.clientid}\" - timestamp = \"${.timestamp}\" value = \"${.}\" } partition_count_refresh_interval = 60s diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl index d1e827d84..685fd3397 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl @@ -18,7 +18,7 @@ ]). -export([ service_account_json_validator/1, - service_account_json_converter/1 + service_account_json_converter/2 ]). %% emqx_bridge_enterprise "unofficial" API @@ -105,7 +105,7 @@ fields(connector_config) -> #{ required => true, validator => fun ?MODULE:service_account_json_validator/1, - converter => fun ?MODULE:service_account_json_converter/1, + converter => fun ?MODULE:service_account_json_converter/2, sensitive => true, desc => ?DESC("service_account_json") } @@ -207,13 +207,13 @@ fields(consumer_topic_mapping) -> )} ]; fields("consumer_resource_opts") -> - ResourceFields = emqx_resource_schema:fields("creation_opts"), + ResourceFields = + emqx_resource_schema:create_opts( + [{health_check_interval, #{default => <<"30s">>}}] + ), SupportedFields = [ - auto_restart_interval, health_check_interval, - request_ttl, - resume_interval, - worker_pool_size + request_ttl ], lists:filter( fun({Field, _Sc}) -> lists:member(Field, SupportedFields) end, @@ -398,7 +398,9 @@ service_account_json_validator(Map) -> {error, #{missing_keys => MissingKeys}} end. -service_account_json_converter(Map) when is_map(Map) -> +service_account_json_converter(Val, #{make_serializable := true}) -> + Val; +service_account_json_converter(Map, _Opts) when is_map(Map) -> ExpectedKeys = [ <<"type">>, <<"project_id">>, @@ -407,7 +409,7 @@ service_account_json_converter(Map) when is_map(Map) -> <<"client_email">> ], maps:with(ExpectedKeys, Map); -service_account_json_converter(Val) -> +service_account_json_converter(Val, _Opts) -> Val. consumer_topic_mapping_validator(_TopicMapping = []) -> diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_consumer_worker.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_consumer_worker.erl index d984b42ed..291ace7e0 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_consumer_worker.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_consumer_worker.erl @@ -218,6 +218,8 @@ handle_continue(?ensure_subscription, State0) -> not_found -> %% there's nothing much to do if the topic suddenly doesn't exist anymore. {stop, {error, topic_not_found}, State0}; + bad_credentials -> + {stop, {error, bad_credentials}, State0}; permission_denied -> {stop, {error, permission_denied}, State0} end; @@ -295,6 +297,7 @@ handle_info(Msg, State0) -> terminate({error, Reason}, State) when Reason =:= topic_not_found; + Reason =:= bad_credentials; Reason =:= permission_denied -> #{ @@ -335,7 +338,7 @@ ensure_pull_timer(State = #{pull_retry_interval := PullRetryInterval}) -> State#{pull_timer := emqx_utils:start_timer(PullRetryInterval, pull)}. -spec ensure_subscription_exists(state()) -> - continue | retry | not_found | permission_denied | already_exists. + continue | retry | not_found | permission_denied | bad_credentials | already_exists. ensure_subscription_exists(State) -> ?tp(gcp_pubsub_consumer_worker_create_subscription_enter, #{}), #{ @@ -384,6 +387,17 @@ ensure_subscription_exists(State) -> } ), permission_denied; + {error, #{status_code := 401}} -> + %% bad credentials + ?tp( + warning, + "gcp_pubsub_consumer_worker_bad_credentials", + #{ + instance_id => InstanceId, + topic => Topic + } + ), + bad_credentials; {ok, #{status_code := 200}} -> ?tp( debug, diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl index 998a95a48..5c726ef9b 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub_impl_consumer.erl @@ -94,6 +94,8 @@ on_get_status(InstanceId, State) -> {disconnected, State, {unhealthy_target, ?TOPIC_MESSAGE}}; {error, permission_denied} -> {disconnected, State, {unhealthy_target, ?PERMISSION_MESSAGE}}; + {error, bad_credentials} -> + {disconnected, State, {unhealthy_target, ?PERMISSION_MESSAGE}}; ok -> #{client := Client} = State, check_workers(InstanceId, Client) @@ -103,7 +105,12 @@ on_get_status(InstanceId, State) -> %% Health check API (signalled by consumer worker) %%------------------------------------------------------------------------------------------------- --spec mark_as_unhealthy(resource_id(), topic_not_found | permission_denied) -> ok. +-spec mark_as_unhealthy( + resource_id(), + topic_not_found + | permission_denied + | bad_credentials +) -> ok. mark_as_unhealthy(InstanceId, Reason) -> optvar:set(?OPTVAR_UNHEALTHY(InstanceId), Reason), ok. @@ -114,7 +121,12 @@ clear_unhealthy(InstanceId) -> ?tp(gcp_pubsub_consumer_clear_unhealthy, #{}), ok. --spec check_if_unhealthy(resource_id()) -> ok | {error, topic_not_found | permission_denied}. +-spec check_if_unhealthy(resource_id()) -> + ok + | {error, + topic_not_found + | permission_denied + | bad_credentials}. check_if_unhealthy(InstanceId) -> case optvar:peek(?OPTVAR_UNHEALTHY(InstanceId)) of {ok, Reason} -> @@ -164,6 +176,11 @@ start_consumers(InstanceId, Client, Config) -> throw( {unhealthy_target, ?PERMISSION_MESSAGE} ); + {error, bad_credentials} -> + _ = emqx_bridge_gcp_pubsub_client:stop(InstanceId), + throw( + {unhealthy_target, ?PERMISSION_MESSAGE} + ); {error, _} -> %% connection might be down; we'll have to check topic existence during health %% check, or the workers will kill themselves when they realized there's no @@ -242,6 +259,8 @@ check_for_topic_existence(Topic, Client) -> {error, not_found}; {error, #{status_code := 403}} -> {error, permission_denied}; + {error, #{status_code := 401}} -> + {error, bad_credentials}; {error, Reason} -> ?tp(warning, "gcp_pubsub_consumer_check_topic_error", #{reason => Reason}), {error, Reason} diff --git a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl index 681e5fed7..8dc6cd7c4 100644 --- a/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl +++ b/apps/emqx_bridge_gcp_pubsub/test/emqx_bridge_gcp_pubsub_consumer_SUITE.erl @@ -818,6 +818,61 @@ permission_denied_response() -> ) }}. +unauthenticated_response() -> + Msg = << + "Request had invalid authentication credentials. Expected OAuth 2 access token," + " login cookie or other valid authentication credential. " + "See https://developers.google.com/identity/sign-in/web/devconsole-project." + >>, + {error, #{ + body => + #{ + <<"error">> => + #{ + <<"code">> => 401, + <<"details">> => + [ + #{ + <<"@type">> => + <<"type.googleapis.com/google.rpc.ErrorInfo">>, + <<"domain">> => <<"googleapis.com">>, + <<"metadata">> => + #{ + <<"email">> => + <<"test-516@emqx-cloud-pubsub.iam.gserviceaccount.com">>, + <<"method">> => + <<"google.pubsub.v1.Publisher.CreateTopic">>, + <<"service">> => + <<"pubsub.googleapis.com">> + }, + <<"reason">> => <<"ACCOUNT_STATE_INVALID">> + } + ], + <<"message">> => Msg, + + <<"status">> => <<"UNAUTHENTICATED">> + } + }, + headers => + [ + {<<"www-authenticate">>, <<"Bearer realm=\"https://accounts.google.com/\"">>}, + {<<"vary">>, <<"X-Origin">>}, + {<<"vary">>, <<"Referer">>}, + {<<"content-type">>, <<"application/json; charset=UTF-8">>}, + {<<"date">>, <<"Wed, 23 Aug 2023 12:41:40 GMT">>}, + {<<"server">>, <<"ESF">>}, + {<<"cache-control">>, <<"private">>}, + {<<"x-xss-protection">>, <<"0">>}, + {<<"x-frame-options">>, <<"SAMEORIGIN">>}, + {<<"x-content-type-options">>, <<"nosniff">>}, + {<<"alt-svc">>, <<"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000">>}, + {<<"accept-ranges">>, <<"none">>}, + {<<"vary">>, <<"Origin,Accept-Encoding">>}, + {<<"transfer-encoding">>, <<"chunked">>} + ], + status_code => 401 + }}. + %%------------------------------------------------------------------------------ %% Testcases %%------------------------------------------------------------------------------ @@ -2125,6 +2180,81 @@ t_permission_denied_worker(Config) -> ), ok. +t_unauthenticated_topic_check(Config) -> + [#{pubsub_topic := PubSubTopic}] = ?config(topic_mapping, Config), + ResourceId = resource_id(Config), + ?check_trace( + begin + %% the emulator does not check any credentials + emqx_common_test_helpers:with_mock( + emqx_bridge_gcp_pubsub_client, + query_sync, + fun(PreparedRequest = {prepared_request, {Method, Path, _Body}}, Client) -> + RE = iolist_to_binary(["/topics/", PubSubTopic, "$"]), + case {Method =:= get, re:run(Path, RE)} of + {true, {match, _}} -> + unauthenticated_response(); + _ -> + meck:passthrough([PreparedRequest, Client]) + end + end, + fun() -> + {{ok, _}, {ok, _}} = + ?wait_async_action( + create_bridge(Config), + #{?snk_kind := gcp_pubsub_stop}, + 5_000 + ), + ?assertMatch( + {ok, disconnected}, + emqx_resource_manager:health_check(ResourceId) + ), + ?assertMatch( + {ok, _Group, #{error := {unhealthy_target, "Permission denied" ++ _}}}, + emqx_resource_manager:lookup_cached(ResourceId) + ), + ok + end + ), + ok + end, + [] + ), + ok. + +t_unauthenticated_worker(Config) -> + ?check_trace( + begin + emqx_common_test_helpers:with_mock( + emqx_bridge_gcp_pubsub_client, + query_sync, + fun(PreparedRequest = {prepared_request, {Method, _Path, _Body}}, Client) -> + case Method =:= put of + true -> + unauthenticated_response(); + false -> + meck:passthrough([PreparedRequest, Client]) + end + end, + fun() -> + {{ok, _}, {ok, _}} = + ?wait_async_action( + create_bridge( + Config + ), + #{?snk_kind := gcp_pubsub_consumer_worker_terminate}, + 10_000 + ), + + ok + end + ), + ok + end, + [] + ), + ok. + t_cluster_subscription(Config) -> [ #{ diff --git a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src index fa7e0d4af..c3e3d34e2 100644 --- a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src +++ b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_greptimedb, [ {description, "EMQX GreptimeDB Bridge"}, - {vsn, "0.1.1"}, + {vsn, "0.1.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl index 877e464dd..d63103e2e 100644 --- a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl +++ b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl @@ -21,11 +21,6 @@ desc/1 ]). --type write_syntax() :: list(). --reflect_type([write_syntax/0]). --typerefl_from_string({write_syntax/0, ?MODULE, to_influx_lines}). --export([to_influx_lines/1]). - %% ------------------------------------------------------------------------------------------------- %% api @@ -131,169 +126,16 @@ desc(_) -> undefined. write_syntax(type) -> - ?MODULE:write_syntax(); + emqx_bridge_influxdb:write_syntax(); write_syntax(required) -> true; write_syntax(validator) -> [?NOT_EMPTY("the value of the field 'write_syntax' cannot be empty")]; write_syntax(converter) -> - fun to_influx_lines/1; + fun emqx_bridge_influxdb:to_influx_lines/1; write_syntax(desc) -> ?DESC("write_syntax"); write_syntax(format) -> <<"sql">>; write_syntax(_) -> undefined. - -to_influx_lines(RawLines) -> - try - influx_lines(str(RawLines), []) - catch - _:Reason:Stacktrace -> - Msg = lists:flatten( - io_lib:format("Unable to parse Greptimedb line protocol: ~p", [RawLines]) - ), - ?SLOG(error, #{msg => Msg, error_reason => Reason, stacktrace => Stacktrace}), - throw(Msg) - end. - --define(MEASUREMENT_ESC_CHARS, [$,, $\s]). --define(TAG_FIELD_KEY_ESC_CHARS, [$,, $=, $\s]). --define(FIELD_VAL_ESC_CHARS, [$", $\\]). -% Common separator for both tags and fields --define(SEP, $\s). --define(MEASUREMENT_TAG_SEP, $,). --define(KEY_SEP, $=). --define(VAL_SEP, $,). --define(NON_EMPTY, [_ | _]). - -influx_lines([] = _RawLines, Acc) -> - ?NON_EMPTY = lists:reverse(Acc); -influx_lines(RawLines, Acc) -> - {Acc1, RawLines1} = influx_line(string:trim(RawLines, leading, "\s\n"), Acc), - influx_lines(RawLines1, Acc1). - -influx_line([], Acc) -> - {Acc, []}; -influx_line(Line, Acc) -> - {?NON_EMPTY = Measurement, Line1} = measurement(Line), - {Tags, Line2} = tags(Line1), - {?NON_EMPTY = Fields, Line3} = influx_fields(Line2), - {Timestamp, Line4} = timestamp(Line3), - { - [ - #{ - measurement => Measurement, - tags => Tags, - fields => Fields, - timestamp => Timestamp - } - | Acc - ], - Line4 - }. - -measurement(Line) -> - unescape(?MEASUREMENT_ESC_CHARS, [?MEASUREMENT_TAG_SEP, ?SEP], Line, []). - -tags([?MEASUREMENT_TAG_SEP | Line]) -> - tags1(Line, []); -tags(Line) -> - {[], Line}. - -%% Empty line is invalid as fields are required after tags, -%% need to break recursion here and fail later on parsing fields -tags1([] = Line, Acc) -> - {lists:reverse(Acc), Line}; -%% Matching non empty Acc treats lines like "m, field=field_val" invalid -tags1([?SEP | _] = Line, ?NON_EMPTY = Acc) -> - {lists:reverse(Acc), Line}; -tags1(Line, Acc) -> - {Tag, Line1} = tag(Line), - tags1(Line1, [Tag | Acc]). - -tag(Line) -> - {?NON_EMPTY = Key, Line1} = key(Line), - {?NON_EMPTY = Val, Line2} = tag_val(Line1), - {{Key, Val}, Line2}. - -tag_val(Line) -> - {Val, Line1} = unescape(?TAG_FIELD_KEY_ESC_CHARS, [?VAL_SEP, ?SEP], Line, []), - {Val, strip_l(Line1, ?VAL_SEP)}. - -influx_fields([?SEP | Line]) -> - fields1(string:trim(Line, leading, "\s"), []). - -%% Timestamp is optional, so fields may be at the very end of the line -fields1([Ch | _] = Line, Acc) when Ch =:= ?SEP; Ch =:= $\n -> - {lists:reverse(Acc), Line}; -fields1([] = Line, Acc) -> - {lists:reverse(Acc), Line}; -fields1(Line, Acc) -> - {Field, Line1} = field(Line), - fields1(Line1, [Field | Acc]). - -field(Line) -> - {?NON_EMPTY = Key, Line1} = key(Line), - {Val, Line2} = field_val(Line1), - {{Key, Val}, Line2}. - -field_val([$" | Line]) -> - {Val, [$" | Line1]} = unescape(?FIELD_VAL_ESC_CHARS, [$"], Line, []), - %% Quoted val can be empty - {Val, strip_l(Line1, ?VAL_SEP)}; -field_val(Line) -> - %% Unquoted value should not be un-escaped according to Greptimedb protocol, - %% as it can only hold float, integer, uinteger or boolean value. - %% However, as templates are possible, un-escaping is applied here, - %% which also helps to detect some invalid lines, e.g.: "m,tag=1 field= ${timestamp}" - {Val, Line1} = unescape(?TAG_FIELD_KEY_ESC_CHARS, [?VAL_SEP, ?SEP, $\n], Line, []), - {?NON_EMPTY = Val, strip_l(Line1, ?VAL_SEP)}. - -timestamp([?SEP | Line]) -> - Line1 = string:trim(Line, leading, "\s"), - %% Similarly to unquoted field value, un-escape a timestamp to validate and handle - %% potentially escaped characters in a template - {T, Line2} = unescape(?TAG_FIELD_KEY_ESC_CHARS, [?SEP, $\n], Line1, []), - {timestamp1(T), Line2}; -timestamp(Line) -> - {undefined, Line}. - -timestamp1(?NON_EMPTY = Ts) -> Ts; -timestamp1(_Ts) -> undefined. - -%% Common for both tag and field keys -key(Line) -> - {Key, Line1} = unescape(?TAG_FIELD_KEY_ESC_CHARS, [?KEY_SEP], Line, []), - {Key, strip_l(Line1, ?KEY_SEP)}. - -%% Only strip a character between pairs, don't strip it(and let it fail) -%% if the char to be stripped is at the end, e.g.: m,tag=val, field=val -strip_l([Ch, Ch1 | Str], Ch) when Ch1 =/= ?SEP -> - [Ch1 | Str]; -strip_l(Str, _Ch) -> - Str. - -unescape(EscapeChars, SepChars, [$\\, Char | T], Acc) -> - ShouldEscapeBackslash = lists:member($\\, EscapeChars), - Acc1 = - case lists:member(Char, EscapeChars) of - true -> [Char | Acc]; - false when not ShouldEscapeBackslash -> [Char, $\\ | Acc] - end, - unescape(EscapeChars, SepChars, T, Acc1); -unescape(EscapeChars, SepChars, [Char | T] = L, Acc) -> - IsEscapeChar = lists:member(Char, EscapeChars), - case lists:member(Char, SepChars) of - true -> {lists:reverse(Acc), L}; - false when not IsEscapeChar -> unescape(EscapeChars, SepChars, T, [Char | Acc]) - end; -unescape(_EscapeChars, _SepChars, [] = L, Acc) -> - {lists:reverse(Acc), L}. - -str(A) when is_atom(A) -> - atom_to_list(A); -str(B) when is_binary(B) -> - binary_to_list(B); -str(S) when is_list(S) -> - S. diff --git a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl index 89fad78d2..d474db58c 100644 --- a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl +++ b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb_connector.erl @@ -68,7 +68,9 @@ on_start(InstId, Config) -> on_stop(InstId, _State) -> case emqx_resource:get_allocated_resources(InstId) of #{?greptime_client := Client} -> - greptimedb:stop_client(Client); + Res = greptimedb:stop_client(Client), + ?tp(greptimedb_client_stopped, #{instance_id => InstId}), + Res; _ -> ok end. diff --git a/apps/emqx_bridge_greptimedb/test/emqx_bridge_greptimedb_SUITE.erl b/apps/emqx_bridge_greptimedb/test/emqx_bridge_greptimedb_SUITE.erl index d4bc5b01e..15133a1ad 100644 --- a/apps/emqx_bridge_greptimedb/test/emqx_bridge_greptimedb_SUITE.erl +++ b/apps/emqx_bridge_greptimedb/test/emqx_bridge_greptimedb_SUITE.erl @@ -112,6 +112,9 @@ init_per_group(GreptimedbType, Config0) when {proxy_host, ProxyHost}, {proxy_port, ProxyPort}, {proxy_name, ProxyName}, + {bridge_type, greptimedb}, + {bridge_name, Name}, + {bridge_config, GreptimedbConfig}, {greptimedb_host, GreptimedbHost}, {greptimedb_port, GreptimedbPort}, {greptimedb_http_port, GreptimedbHttpPort}, @@ -457,6 +460,97 @@ t_start_ok(Config) -> ), ok. +t_start_stop(Config) -> + %% we can't use this test case directly because `greptimedb_worker' apparently leaks + %% atoms... + %% ok = emqx_bridge_testlib:t_start_stop(Config, greptimedb_client_stopped), + BridgeType = ?config(bridge_type, Config), + BridgeName = ?config(bridge_name, Config), + BridgeConfig = ?config(bridge_config, Config), + StopTracePoint = greptimedb_client_stopped, + ResourceId = emqx_bridge_resource:resource_id(BridgeType, BridgeName), + ?check_trace( + begin + ProbeRes0 = emqx_bridge_testlib:probe_bridge_api( + BridgeType, + BridgeName, + BridgeConfig + ), + ?assertMatch({ok, {{_, 204, _}, _Headers, _Body}}, ProbeRes0), + ?assertMatch({ok, _}, emqx_bridge:create(BridgeType, BridgeName, BridgeConfig)), + + %% Since the connection process is async, we give it some time to + %% stabilize and avoid flakiness. + ?retry( + _Sleep = 1_000, + _Attempts = 20, + ?assertEqual({ok, connected}, emqx_resource_manager:health_check(ResourceId)) + ), + + %% `start` bridge to trigger `already_started` + ?assertMatch( + {ok, {{_, 204, _}, _Headers, []}}, + emqx_bridge_testlib:op_bridge_api("start", BridgeType, BridgeName) + ), + + ?assertEqual({ok, connected}, emqx_resource_manager:health_check(ResourceId)), + + ?assertMatch( + {{ok, _}, {ok, _}}, + ?wait_async_action( + emqx_bridge_testlib:op_bridge_api("stop", BridgeType, BridgeName), + #{?snk_kind := StopTracePoint}, + 5_000 + ) + ), + + ?assertEqual( + {error, resource_is_stopped}, emqx_resource_manager:health_check(ResourceId) + ), + + ?assertMatch( + {ok, {{_, 204, _}, _Headers, []}}, + emqx_bridge_testlib:op_bridge_api("stop", BridgeType, BridgeName) + ), + + ?assertEqual( + {error, resource_is_stopped}, emqx_resource_manager:health_check(ResourceId) + ), + + ?assertMatch( + {ok, {{_, 204, _}, _Headers, []}}, + emqx_bridge_testlib:op_bridge_api("start", BridgeType, BridgeName) + ), + + ?retry( + _Sleep = 1_000, + _Attempts = 20, + ?assertEqual({ok, connected}, emqx_resource_manager:health_check(ResourceId)) + ), + + %% Disable the bridge, which will also stop it. + ?assertMatch( + {{ok, _}, {ok, _}}, + ?wait_async_action( + emqx_bridge:disable_enable(disable, BridgeType, BridgeName), + #{?snk_kind := StopTracePoint}, + 5_000 + ) + ), + + ok + end, + fun(Trace) -> + %% one for probe, two for real + ?assertMatch( + [_, #{instance_id := ResourceId}, #{instance_id := ResourceId}], + ?of_kind(StopTracePoint, Trace) + ), + ok + end + ), + ok. + t_start_already_started(Config) -> Type = greptimedb_type_bin(?config(greptimedb_type, Config)), Name = ?config(greptimedb_name, Config), diff --git a/apps/emqx_bridge_greptimedb/test/emqx_bridge_greptimedb_tests.erl b/apps/emqx_bridge_greptimedb/test/emqx_bridge_greptimedb_tests.erl deleted file mode 100644 index a07ccd92d..000000000 --- a/apps/emqx_bridge_greptimedb/test/emqx_bridge_greptimedb_tests.erl +++ /dev/null @@ -1,348 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%%-------------------------------------------------------------------- --module(emqx_bridge_greptimedb_tests). - --include_lib("eunit/include/eunit.hrl"). - --define(INVALID_LINES, [ - " ", - " \n", - " \n\n\n ", - "\n", - " \n\n \n \n", - "measurement", - "measurement ", - "measurement,tag", - "measurement field", - "measurement,tag field", - "measurement,tag field ${timestamp}", - "measurement,tag=", - "measurement,tag=tag1", - "measurement,tag =", - "measurement field=", - "measurement field= ", - "measurement field = ", - "measurement, tag = field = ", - "measurement, tag = field = ", - "measurement, tag = tag_val field = field_val", - "measurement, tag = tag_val field = field_val ${timestamp}", - "measurement,= = ${timestamp}", - "measurement,t=a, f=a, ${timestamp}", - "measurement,t=a,t1=b, f=a,f1=b, ${timestamp}", - "measurement,t=a,t1=b, f=a,f1=b,", - "measurement,t=a, t1=b, f=a,f1=b,", - "measurement,t=a,,t1=b, f=a,f1=b,", - "measurement,t=a,,t1=b f=a,,f1=b", - "measurement,t=a,,t1=b f=a,f1=b ${timestamp}", - "measurement, f=a,f1=b", - "measurement, f=a,f1=b ${timestamp}", - "measurement,, f=a,f1=b ${timestamp}", - "measurement,, f=a,f1=b", - "measurement,, f=a,f1=b,, ${timestamp}", - "measurement f=a,f1=b,, ${timestamp}", - "measurement,t=a f=a,f1=b,, ${timestamp}", - "measurement,t=a f=a,f1=b,, ", - "measurement,t=a f=a,f1=b,,", - "measurement, t=a f=a,f1=b", - "measurement,t=a f=a, f1=b", - "measurement,t=a f=a, f1=b ${timestamp}", - "measurement, t=a f=a, f1=b ${timestamp}", - "measurement,t= a f=a,f1=b ${timestamp}", - "measurement,t= a f=a,f1 =b ${timestamp}", - "measurement, t = a f = a,f1 = b ${timestamp}", - "measurement,t=a f=a,f1=b \n ${timestamp}", - "measurement,t=a \n f=a,f1=b \n ${timestamp}", - "measurement,t=a \n f=a,f1=b \n ", - "\n measurement,t=a \n f=a,f1=b \n ${timestamp}", - "\n measurement,t=a \n f=a,f1=b \n", - %% not escaped backslash in a quoted field value is invalid - "measurement,tag=1 field=\"val\\1\"" -]). - --define(VALID_LINE_PARSED_PAIRS, [ - {"m1,tag=tag1 field=field1 ${timestamp1}", #{ - measurement => "m1", - tags => [{"tag", "tag1"}], - fields => [{"field", "field1"}], - timestamp => "${timestamp1}" - }}, - {"m2,tag=tag2 field=field2", #{ - measurement => "m2", - tags => [{"tag", "tag2"}], - fields => [{"field", "field2"}], - timestamp => undefined - }}, - {"m3 field=field3 ${timestamp3}", #{ - measurement => "m3", - tags => [], - fields => [{"field", "field3"}], - timestamp => "${timestamp3}" - }}, - {"m4 field=field4", #{ - measurement => "m4", - tags => [], - fields => [{"field", "field4"}], - timestamp => undefined - }}, - {"m5,tag=tag5,tag_a=tag5a,tag_b=tag5b field=field5,field_a=field5a,field_b=field5b ${timestamp5}", - #{ - measurement => "m5", - tags => [{"tag", "tag5"}, {"tag_a", "tag5a"}, {"tag_b", "tag5b"}], - fields => [{"field", "field5"}, {"field_a", "field5a"}, {"field_b", "field5b"}], - timestamp => "${timestamp5}" - }}, - {"m6,tag=tag6,tag_a=tag6a,tag_b=tag6b field=field6,field_a=field6a,field_b=field6b", #{ - measurement => "m6", - tags => [{"tag", "tag6"}, {"tag_a", "tag6a"}, {"tag_b", "tag6b"}], - fields => [{"field", "field6"}, {"field_a", "field6a"}, {"field_b", "field6b"}], - timestamp => undefined - }}, - {"m7,tag=tag7,tag_a=\"tag7a\",tag_b=tag7b field=\"field7\",field_a=field7a,field_b=\"field7b\"", - #{ - measurement => "m7", - tags => [{"tag", "tag7"}, {"tag_a", "\"tag7a\""}, {"tag_b", "tag7b"}], - fields => [{"field", "field7"}, {"field_a", "field7a"}, {"field_b", "field7b"}], - timestamp => undefined - }}, - {"m8,tag=tag8,tag_a=\"tag8a\",tag_b=tag8b field=\"field8\",field_a=field8a,field_b=\"field8b\" ${timestamp8}", - #{ - measurement => "m8", - tags => [{"tag", "tag8"}, {"tag_a", "\"tag8a\""}, {"tag_b", "tag8b"}], - fields => [{"field", "field8"}, {"field_a", "field8a"}, {"field_b", "field8b"}], - timestamp => "${timestamp8}" - }}, - {"m9,tag=tag9,tag_a=\"tag9a\",tag_b=tag9b field=\"field9\",field_a=field9a,field_b=\"\" ${timestamp9}", - #{ - measurement => "m9", - tags => [{"tag", "tag9"}, {"tag_a", "\"tag9a\""}, {"tag_b", "tag9b"}], - fields => [{"field", "field9"}, {"field_a", "field9a"}, {"field_b", ""}], - timestamp => "${timestamp9}" - }}, - {"m10 field=\"\" ${timestamp10}", #{ - measurement => "m10", - tags => [], - fields => [{"field", ""}], - timestamp => "${timestamp10}" - }} -]). - --define(VALID_LINE_EXTRA_SPACES_PARSED_PAIRS, [ - {"\n m1,tag=tag1 field=field1 ${timestamp1} \n", #{ - measurement => "m1", - tags => [{"tag", "tag1"}], - fields => [{"field", "field1"}], - timestamp => "${timestamp1}" - }}, - {" m2,tag=tag2 field=field2 ", #{ - measurement => "m2", - tags => [{"tag", "tag2"}], - fields => [{"field", "field2"}], - timestamp => undefined - }}, - {" m3 field=field3 ${timestamp3} ", #{ - measurement => "m3", - tags => [], - fields => [{"field", "field3"}], - timestamp => "${timestamp3}" - }}, - {" \n m4 field=field4\n ", #{ - measurement => "m4", - tags => [], - fields => [{"field", "field4"}], - timestamp => undefined - }}, - {" \n m5,tag=tag5,tag_a=tag5a,tag_b=tag5b field=field5,field_a=field5a,field_b=field5b ${timestamp5} \n", - #{ - measurement => "m5", - tags => [{"tag", "tag5"}, {"tag_a", "tag5a"}, {"tag_b", "tag5b"}], - fields => [{"field", "field5"}, {"field_a", "field5a"}, {"field_b", "field5b"}], - timestamp => "${timestamp5}" - }}, - {" m6,tag=tag6,tag_a=tag6a,tag_b=tag6b field=field6,field_a=field6a,field_b=field6b\n ", #{ - measurement => "m6", - tags => [{"tag", "tag6"}, {"tag_a", "tag6a"}, {"tag_b", "tag6b"}], - fields => [{"field", "field6"}, {"field_a", "field6a"}, {"field_b", "field6b"}], - timestamp => undefined - }} -]). - --define(VALID_LINE_PARSED_ESCAPED_CHARS_PAIRS, [ - {"m\\ =1\\,,\\,tag\\ \\==\\=tag\\ 1\\, \\,fie\\ ld\\ =\\ field\\,1 ${timestamp1}", #{ - measurement => "m =1,", - tags => [{",tag =", "=tag 1,"}], - fields => [{",fie ld ", " field,1"}], - timestamp => "${timestamp1}" - }}, - {"m2,tag=tag2 field=\"field \\\"2\\\",\n\"", #{ - measurement => "m2", - tags => [{"tag", "tag2"}], - fields => [{"field", "field \"2\",\n"}], - timestamp => undefined - }}, - {"m\\ 3 field=\"field3\" ${payload.timestamp\\ 3}", #{ - measurement => "m 3", - tags => [], - fields => [{"field", "field3"}], - timestamp => "${payload.timestamp 3}" - }}, - {"m4 field=\"\\\"field\\\\4\\\"\"", #{ - measurement => "m4", - tags => [], - fields => [{"field", "\"field\\4\""}], - timestamp => undefined - }}, - { - "m5\\,mA,tag=\\=tag5\\=,\\,tag_a\\,=tag\\ 5a,tag_b=tag5b \\ field\\ =field5," - "field\\ _\\ a=field5a,\\,field_b\\ =\\=\\,\\ field5b ${timestamp5}", - #{ - measurement => "m5,mA", - tags => [{"tag", "=tag5="}, {",tag_a,", "tag 5a"}, {"tag_b", "tag5b"}], - fields => [ - {" field ", "field5"}, {"field _ a", "field5a"}, {",field_b ", "=, field5b"} - ], - timestamp => "${timestamp5}" - } - }, - {"m6,tag=tag6,tag_a=tag6a,tag_b=tag6b field=\"field6\",field_a=\"field6a\",field_b=\"field6b\"", - #{ - measurement => "m6", - tags => [{"tag", "tag6"}, {"tag_a", "tag6a"}, {"tag_b", "tag6b"}], - fields => [{"field", "field6"}, {"field_a", "field6a"}, {"field_b", "field6b"}], - timestamp => undefined - }}, - { - "\\ \\ m7\\ \\ ,tag=\\ tag\\,7\\ ,tag_a=\"tag7a\",tag_b\\,tag1=tag7b field=\"field7\"," - "field_a=field7a,field_b=\"field7b\\\\\n\"", - #{ - measurement => " m7 ", - tags => [{"tag", " tag,7 "}, {"tag_a", "\"tag7a\""}, {"tag_b,tag1", "tag7b"}], - fields => [{"field", "field7"}, {"field_a", "field7a"}, {"field_b", "field7b\\\n"}], - timestamp => undefined - } - }, - { - "m8,tag=tag8,tag_a=\"tag8a\",tag_b=tag8b field=\"field8\",field_a=field8a," - "field_b=\"\\\"field\\\" = 8b\" ${timestamp8}", - #{ - measurement => "m8", - tags => [{"tag", "tag8"}, {"tag_a", "\"tag8a\""}, {"tag_b", "tag8b"}], - fields => [{"field", "field8"}, {"field_a", "field8a"}, {"field_b", "\"field\" = 8b"}], - timestamp => "${timestamp8}" - } - }, - {"m\\9,tag=tag9,tag_a=\"tag9a\",tag_b=tag9b field\\=field=\"field9\",field_a=field9a,field_b=\"\" ${timestamp9}", - #{ - measurement => "m\\9", - tags => [{"tag", "tag9"}, {"tag_a", "\"tag9a\""}, {"tag_b", "tag9b"}], - fields => [{"field=field", "field9"}, {"field_a", "field9a"}, {"field_b", ""}], - timestamp => "${timestamp9}" - }}, - {"m\\,10 \"field\\\\\"=\"\" ${timestamp10}", #{ - measurement => "m,10", - tags => [], - %% backslash should not be un-escaped in tag key - fields => [{"\"field\\\\\"", ""}], - timestamp => "${timestamp10}" - }} -]). - --define(VALID_LINE_PARSED_ESCAPED_CHARS_EXTRA_SPACES_PAIRS, [ - {" \n m\\ =1\\,,\\,tag\\ \\==\\=tag\\ 1\\, \\,fie\\ ld\\ =\\ field\\,1 ${timestamp1} ", #{ - measurement => "m =1,", - tags => [{",tag =", "=tag 1,"}], - fields => [{",fie ld ", " field,1"}], - timestamp => "${timestamp1}" - }}, - {" m2,tag=tag2 field=\"field \\\"2\\\",\n\" ", #{ - measurement => "m2", - tags => [{"tag", "tag2"}], - fields => [{"field", "field \"2\",\n"}], - timestamp => undefined - }}, - {" m\\ 3 field=\"field3\" ${payload.timestamp\\ 3} ", #{ - measurement => "m 3", - tags => [], - fields => [{"field", "field3"}], - timestamp => "${payload.timestamp 3}" - }}, - {" m4 field=\"\\\"field\\\\4\\\"\" ", #{ - measurement => "m4", - tags => [], - fields => [{"field", "\"field\\4\""}], - timestamp => undefined - }}, - { - " m5\\,mA,tag=\\=tag5\\=,\\,tag_a\\,=tag\\ 5a,tag_b=tag5b \\ field\\ =field5," - "field\\ _\\ a=field5a,\\,field_b\\ =\\=\\,\\ field5b ${timestamp5} ", - #{ - measurement => "m5,mA", - tags => [{"tag", "=tag5="}, {",tag_a,", "tag 5a"}, {"tag_b", "tag5b"}], - fields => [ - {" field ", "field5"}, {"field _ a", "field5a"}, {",field_b ", "=, field5b"} - ], - timestamp => "${timestamp5}" - } - }, - {" m6,tag=tag6,tag_a=tag6a,tag_b=tag6b field=\"field6\",field_a=\"field6a\",field_b=\"field6b\" ", - #{ - measurement => "m6", - tags => [{"tag", "tag6"}, {"tag_a", "tag6a"}, {"tag_b", "tag6b"}], - fields => [{"field", "field6"}, {"field_a", "field6a"}, {"field_b", "field6b"}], - timestamp => undefined - }} -]). - -invalid_write_syntax_line_test_() -> - [?_assertThrow(_, to_influx_lines(L)) || L <- ?INVALID_LINES]. - -invalid_write_syntax_multiline_test_() -> - LinesList = [ - join("\n", ?INVALID_LINES), - join("\n\n\n", ?INVALID_LINES), - join("\n\n", lists:reverse(?INVALID_LINES)) - ], - [?_assertThrow(_, to_influx_lines(Lines)) || Lines <- LinesList]. - -valid_write_syntax_test_() -> - test_pairs(?VALID_LINE_PARSED_PAIRS). - -valid_write_syntax_with_extra_spaces_test_() -> - test_pairs(?VALID_LINE_EXTRA_SPACES_PARSED_PAIRS). - -valid_write_syntax_escaped_chars_test_() -> - test_pairs(?VALID_LINE_PARSED_ESCAPED_CHARS_PAIRS). - -valid_write_syntax_escaped_chars_with_extra_spaces_test_() -> - test_pairs(?VALID_LINE_PARSED_ESCAPED_CHARS_EXTRA_SPACES_PAIRS). - -test_pairs(PairsList) -> - {Lines, AllExpected} = lists:unzip(PairsList), - JoinedLines = join("\n", Lines), - JoinedLines1 = join("\n\n\n", Lines), - JoinedLines2 = join("\n\n", lists:reverse(Lines)), - SingleLineTests = - [ - ?_assertEqual([Expected], to_influx_lines(Line)) - || {Line, Expected} <- PairsList - ], - JoinedLinesTests = - [ - ?_assertEqual(AllExpected, to_influx_lines(JoinedLines)), - ?_assertEqual(AllExpected, to_influx_lines(JoinedLines1)), - ?_assertEqual(lists:reverse(AllExpected), to_influx_lines(JoinedLines2)) - ], - SingleLineTests ++ JoinedLinesTests. - -join(Sep, LinesList) -> - lists:flatten(lists:join(Sep, LinesList)). - -to_influx_lines(RawLines) -> - OldLevel = emqx_logger:get_primary_log_level(), - try - %% mute error logs from this call - emqx_logger:set_primary_log_level(none), - emqx_bridge_greptimedb:to_influx_lines(RawLines) - after - emqx_logger:set_primary_log_level(OldLevel) - end. diff --git a/apps/emqx_bridge_hstreamdb/rebar.config b/apps/emqx_bridge_hstreamdb/rebar.config index 9a70b55f9..fb99cd627 100644 --- a/apps/emqx_bridge_hstreamdb/rebar.config +++ b/apps/emqx_bridge_hstreamdb/rebar.config @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {erl_opts, [debug_info]}. {deps, [ - {hstreamdb_erl, {git, "https://github.com/hstreamdb/hstreamdb_erl.git", {tag, "0.3.1+v0.12.0"}}}, + {hstreamdb_erl, {git, "https://github.com/hstreamdb/hstreamdb_erl.git", {tag, "0.4.5+v0.16.1"}}}, {emqx, {path, "../../apps/emqx"}}, {emqx_utils, {path, "../../apps/emqx_utils"}} ]}. diff --git a/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb.app.src b/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb.app.src index 2a800baca..39c8c3258 100644 --- a/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb.app.src +++ b/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_hstreamdb, [ {description, "EMQX Enterprise HStreamDB Bridge"}, - {vsn, "0.1.1"}, + {vsn, "0.1.2"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb_connector.erl b/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb_connector.erl index 1763b252a..727ea4ad8 100644 --- a/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb_connector.erl +++ b/apps/emqx_bridge_hstreamdb/src/emqx_bridge_hstreamdb_connector.erl @@ -75,7 +75,7 @@ on_query( } ) -> try to_record(PartitionKey, HRecordTemplate, Data) of - Record -> append_record(Producer, Record) + Record -> append_record(Producer, Record, false) catch _:_ -> ?FAILED_TO_APPLY_HRECORD_TEMPLATE end. @@ -88,7 +88,7 @@ on_batch_query( } ) -> try to_multi_part_records(PartitionKey, HRecordTemplate, BatchList) of - Records -> append_record(Producer, Records) + Records -> append_record(Producer, Records, true) catch _:_ -> ?FAILED_TO_APPLY_HRECORD_TEMPLATE end. @@ -156,16 +156,29 @@ start_client(InstId, Config) -> {error, Error} end. -do_start_client(InstId, Config = #{url := Server, pool_size := PoolSize}) -> +do_start_client(InstId, Config = #{url := Server, pool_size := PoolSize, ssl := SSL}) -> ?SLOG(info, #{ msg => "starting hstreamdb connector: client", connector => InstId, config => Config }), ClientName = client_name(InstId), + RpcOpts = + case maps:get(enable, SSL) of + false -> + #{pool_size => PoolSize}; + true -> + #{ + pool_size => PoolSize, + gun_opts => #{ + transport => tls, + transport_opts => emqx_tls_lib:to_client_opts(SSL) + } + } + end, ClientOptions = [ {url, binary_to_list(Server)}, - {rpc_options, #{pool_size => PoolSize}} + {rpc_options, RpcOpts} ], case hstreamdb:start_client(ClientName, ClientOptions) of {ok, Client} -> @@ -206,12 +219,7 @@ do_start_client(InstId, Config = #{url := Server, pool_size := PoolSize}) -> end. is_alive(Client) -> - case hstreamdb:echo(Client) of - {ok, _Echo} -> - true; - _ErrorEcho -> - false - end. + hstreamdb_client:echo(Client) =:= ok. start_producer( InstId, @@ -280,54 +288,52 @@ to_record(PartitionKey, RawRecord) -> hstreamdb:to_record(PartitionKey, raw, RawRecord). to_multi_part_records(PartitionKeyTmpl, HRecordTmpl, BatchList) -> - Records0 = lists:map( + lists:map( fun({send_message, Data}) -> to_record(PartitionKeyTmpl, HRecordTmpl, Data) end, BatchList - ), - PartitionKeys = proplists:get_keys(Records0), - [ - {PartitionKey, proplists:get_all_values(PartitionKey, Records0)} - || PartitionKey <- PartitionKeys - ]. + ). -append_record(Producer, MultiPartsRecords) when is_list(MultiPartsRecords) -> - lists:foreach(fun(Record) -> append_record(Producer, Record) end, MultiPartsRecords); -append_record(Producer, Record) when is_tuple(Record) -> - do_append_records(false, Producer, Record). +append_record(Producer, MultiPartsRecords, MaybeBatch) when is_list(MultiPartsRecords) -> + lists:foreach( + fun(Record) -> append_record(Producer, Record, MaybeBatch) end, MultiPartsRecords + ); +append_record(Producer, Record, MaybeBatch) when is_tuple(Record) -> + do_append_records(Producer, Record, MaybeBatch). %% TODO: only sync request supported. implement async request later. -do_append_records(false, Producer, Record) -> - case hstreamdb:append_flush(Producer, Record) of - {ok, _Result} -> - ?tp( - hstreamdb_connector_query_return, - #{result => _Result} - ), - ?SLOG(debug, #{ - msg => "HStreamDB producer sync append success", - record => Record - }); - %% the HStream is warming up or buzy, something are not ready yet, retry after a while - {error, {unavailable, _} = Reason} -> - {error, - {recoverable_error, #{ - msg => "HStreamDB is warming up or buzy, will retry after a moment", - reason => Reason - }}}; - {error, Reason} = Err -> - ?tp( - hstreamdb_connector_query_return, - #{error => Reason} - ), - ?SLOG(error, #{ - msg => "HStreamDB producer sync append failed", - reason => Reason, - record => Record - }), - Err - end. +do_append_records(Producer, Record, true = IsBatch) -> + Result = hstreamdb:append(Producer, Record), + handle_result(Result, Record, IsBatch); +do_append_records(Producer, Record, false = IsBatch) -> + Result = hstreamdb:append_flush(Producer, Record), + handle_result(Result, Record, IsBatch). + +handle_result(ok = Result, Record, IsBatch) -> + handle_result({ok, Result}, Record, IsBatch); +handle_result({ok, Result}, Record, IsBatch) -> + ?tp( + hstreamdb_connector_query_append_return, + #{result => Result, is_batch => IsBatch} + ), + ?SLOG(debug, #{ + msg => "HStreamDB producer sync append success", + record => Record, + is_batch => IsBatch + }); +handle_result({error, Reason} = Err, Record, IsBatch) -> + ?tp( + hstreamdb_connector_query_append_return, + #{error => Reason, is_batch => IsBatch} + ), + ?SLOG(error, #{ + msg => "HStreamDB producer sync append failed", + reason => Reason, + record => Record, + is_batch => IsBatch + }), + Err. client_name(InstId) -> "client:" ++ to_string(InstId). diff --git a/apps/emqx_bridge_hstreamdb/test/emqx_bridge_hstreamdb_SUITE.erl b/apps/emqx_bridge_hstreamdb/test/emqx_bridge_hstreamdb_SUITE.erl index 430343274..14ea202be 100644 --- a/apps/emqx_bridge_hstreamdb/test/emqx_bridge_hstreamdb_SUITE.erl +++ b/apps/emqx_bridge_hstreamdb/test/emqx_bridge_hstreamdb_SUITE.erl @@ -13,8 +13,13 @@ -include_lib("snabbkaffe/include/snabbkaffe.hrl"). % SQL definitions --define(STREAM, "stream"). + +-define(STREAM, "demo_stream"). +%% could not be "stream" in Production Environment +%% especially not in hstreamdb_sql CLI client + -define(REPLICATION_FACTOR, 1). + %% in seconds -define(BACKLOG_RETENTION_SECOND, (24 * 60 * 60)). -define(SHARD_COUNT, 1). @@ -146,16 +151,23 @@ t_setup_via_config_and_publish(Config) -> begin ?wait_async_action( ?assertEqual(ok, send_message(Config, Data)), - #{?snk_kind := hstreamdb_connector_query_return}, + #{?snk_kind := hstreamdb_connector_query_append_return}, 10_000 ), ok end, fun(Trace0) -> - Trace = ?of_kind(hstreamdb_connector_query_return, Trace0), + Trace = ?of_kind(hstreamdb_connector_query_append_return, Trace0), lists:foreach( fun(EachTrace) -> - ?assertMatch(#{result := #{streamName := <>}}, EachTrace) + case ?config(enable_batch, Config) of + true -> + ?assertMatch(#{result := ok, is_batch := true}, EachTrace); + false -> + ?assertMatch( + #{result := #{'batchId' := _}, is_batch := false}, EachTrace + ) + end end, Trace ), @@ -181,16 +193,26 @@ t_setup_via_http_api_and_publish(Config) -> begin ?wait_async_action( ?assertEqual(ok, send_message(Config, Data)), - #{?snk_kind := hstreamdb_connector_query_return}, + #{?snk_kind := hstreamdb_connector_query_append_return}, 10_000 ), ok end, fun(Trace) -> - ?assertMatch( - [#{result := #{streamName := <>}}], - ?of_kind(hstreamdb_connector_query_return, Trace) - ) + lists:foreach( + fun(EachTrace) -> + case ?config(enable_batch, Config) of + true -> + ?assertMatch(#{result := ok, is_batch := true}, EachTrace); + false -> + ?assertMatch( + #{result := #{'batchId' := _}, is_batch := false}, EachTrace + ) + end + end, + ?of_kind(hstreamdb_connector_query_append_return, Trace) + ), + ok end ), ok. @@ -240,6 +262,7 @@ t_write_failure(Config) -> ProxyPort = ?config(proxy_port, Config), ProxyHost = ?config(proxy_host, Config), QueryMode = ?config(query_mode, Config), + EnableBatch = ?config(enable_batch, Config), Data = rand_data(), {{ok, _}, {ok, _}} = ?wait_async_action( @@ -251,10 +274,16 @@ t_write_failure(Config) -> health_check_resource_down(Config), case QueryMode of sync -> - ?assertMatch( - {error, {resource_error, #{msg := "call resource timeout", reason := timeout}}}, - send_message(Config, Data) - ); + case EnableBatch of + true -> + %% append to batch always returns ok + ?assertMatch(ok, send_message(Config, Data)); + false -> + ?assertMatch( + {error, {cannot_list_shards, {<>, econnrefused}}}, + send_message(Config, Data) + ) + end; async -> %% TODO: async mode is not supported yet, %% but it will return ok if calling emqx_resource_buffer_worker:async_query/3, @@ -282,17 +311,23 @@ t_simple_query(Config) -> end, Requests ), - #{?snk_kind := hstreamdb_connector_query_return}, + #{?snk_kind := hstreamdb_connector_query_append_return}, 10_000 ) end, - fun(Trace0) -> - Trace = ?of_kind(hstreamdb_connector_query_return, Trace0), + fun(Trace) -> lists:foreach( fun(EachTrace) -> - ?assertMatch(#{result := #{streamName := <>}}, EachTrace) + case ?config(enable_batch, Config) of + true -> + ?assertMatch(#{result := ok, is_batch := true}, EachTrace); + false -> + ?assertMatch( + #{result := #{'batchId' := _}, is_batch := false}, EachTrace + ) + end end, - Trace + ?of_kind(hstreamdb_connector_query_append_return, Trace) ), ok end @@ -432,7 +467,7 @@ client(Name, Config, N) -> try _ = hstreamdb:stop_client(Name), {ok, Client} = hstreamdb:start_client(Name, default_options(Config)), - {ok, echo} = hstreamdb:echo(Client), + ok = hstreamdb_client:echo(Client), Client catch Class:Error -> @@ -509,7 +544,7 @@ health_check_resource_down(Config) -> % These funs start and then stop the hstreamdb connection connect_and_create_stream(Config) -> ?WITH_CLIENT( - _ = hstreamdb:create_stream( + _ = hstreamdb_client:create_stream( Client, ?STREAM, ?REPLICATION_FACTOR, ?BACKLOG_RETENTION_SECOND, ?SHARD_COUNT ) ), @@ -531,7 +566,7 @@ connect_and_create_stream(Config) -> connect_and_delete_stream(Config) -> ?WITH_CLIENT( - _ = hstreamdb:delete_stream(Client, ?STREAM) + _ = hstreamdb_client:delete_stream(Client, ?STREAM) ). %%-------------------------------------------------------------------- diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl.erl index bbdb4f3c7..7532ba963 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl.erl @@ -24,10 +24,10 @@ hosts(Hosts) when is_list(Hosts) -> kpro:parse_endpoints(Hosts). %% Client ID is better to be unique to make it easier for Kafka side trouble shooting. -make_client_id(KafkaType0, BridgeName0) -> - KafkaType = to_bin(KafkaType0), +make_client_id(BridgeType0, BridgeName0) -> + BridgeType = to_bin(BridgeType0), BridgeName = to_bin(BridgeName0), - iolist_to_binary([KafkaType, ":", BridgeName, ":", atom_to_list(node())]). + iolist_to_binary([BridgeType, ":", BridgeName, ":", atom_to_list(node())]). sasl(none) -> undefined; diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl index b8abb928c..b16f163fb 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl @@ -121,6 +121,7 @@ on_start(ResourceId, Config) -> #{ authentication := Auth, bootstrap_hosts := BootstrapHosts0, + bridge_type := BridgeType, bridge_name := BridgeName, hookpoint := _, kafka := #{ @@ -134,9 +135,8 @@ on_start(ResourceId, Config) -> topic_mapping := _ } = Config, BootstrapHosts = emqx_bridge_kafka_impl:hosts(BootstrapHosts0), - KafkaType = kafka_consumer, %% Note: this is distinct per node. - ClientID = make_client_id(ResourceId, KafkaType, BridgeName), + ClientID = make_client_id(ResourceId, BridgeType, BridgeName), ClientOpts0 = case Auth of none -> []; @@ -517,11 +517,11 @@ is_dry_run(ResourceId) -> string:equal(TestIdStart, ResourceId) end. --spec make_client_id(resource_id(), kafka_consumer, atom() | binary()) -> atom(). -make_client_id(ResourceId, KafkaType, KafkaName) -> +-spec make_client_id(resource_id(), binary(), atom() | binary()) -> atom(). +make_client_id(ResourceId, BridgeType, BridgeName) -> case is_dry_run(ResourceId) of false -> - ClientID0 = emqx_bridge_kafka_impl:make_client_id(KafkaType, KafkaName), + ClientID0 = emqx_bridge_kafka_impl:make_client_id(BridgeType, BridgeName), binary_to_atom(ClientID0); true -> %% It is a dry run and we don't want to leak too many diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl index ea6666ea0..3485ac752 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl @@ -29,10 +29,6 @@ -define(kafka_client_id, kafka_client_id). -define(kafka_producers, kafka_producers). -%% TODO: rename this to `kafka_producer' after alias support is added -%% to hocon; keeping this as just `kafka' for backwards compatibility. --define(BRIDGE_TYPE, kafka). - query_mode(#{kafka := #{query_mode := sync}}) -> simple_sync; query_mode(_) -> @@ -46,6 +42,7 @@ on_start(InstId, Config) -> authentication := Auth, bootstrap_hosts := Hosts0, bridge_name := BridgeName, + bridge_type := BridgeType, connect_timeout := ConnTimeout, kafka := KafkaConfig = #{ message := MessageTemplate, @@ -60,7 +57,6 @@ on_start(InstId, Config) -> KafkaHeadersTokens = preproc_kafka_headers(maps:get(kafka_headers, KafkaConfig, undefined)), KafkaExtHeadersTokens = preproc_ext_headers(maps:get(kafka_ext_headers, KafkaConfig, [])), KafkaHeadersValEncodeMode = maps:get(kafka_header_value_encode_mode, KafkaConfig, none), - BridgeType = ?BRIDGE_TYPE, ResourceId = emqx_bridge_resource:resource_id(BridgeType, BridgeName), ok = emqx_resource:allocate_resource(InstId, ?kafka_resource_id, ResourceId), _ = maybe_install_wolff_telemetry_handlers(ResourceId), @@ -107,7 +103,7 @@ on_start(InstId, Config) -> _ -> string:equal(TestIdStart, InstId) end, - WolffProducerConfig = producers_config(BridgeName, ClientId, KafkaConfig, IsDryRun), + WolffProducerConfig = producers_config(BridgeType, BridgeName, ClientId, KafkaConfig, IsDryRun), case wolff:ensure_supervised_producers(ClientId, KafkaTopic, WolffProducerConfig) of {ok, Producers} -> ok = emqx_resource:allocate_resource(InstId, ?kafka_producers, Producers), @@ -213,7 +209,7 @@ on_stop(InstanceId, _State) -> ok. on_query( - _InstId, + InstId, {send_message, Message}, #{ message_template := Template, @@ -229,19 +225,34 @@ on_query( ext_headers_tokens => KafkaExtHeadersTokens, headers_val_encode_mode => KafkaHeadersValEncodeMode }, - KafkaMessage = render_message(Template, KafkaHeaders, Message), - ?tp( - emqx_bridge_kafka_impl_producer_sync_query, - #{headers_config => KafkaHeaders, instance_id => _InstId} - ), try - {_Partition, _Offset} = wolff:send_sync(Producers, [KafkaMessage], SyncTimeout), - ok + KafkaMessage = render_message(Template, KafkaHeaders, Message), + ?tp( + emqx_bridge_kafka_impl_producer_sync_query, + #{headers_config => KafkaHeaders, instance_id => InstId} + ), + do_send_msg(sync, KafkaMessage, Producers, SyncTimeout) catch - error:{producer_down, _} = Reason -> - {error, Reason}; - error:timeout -> - {error, timeout} + throw:{bad_kafka_header, _} = Error -> + ?tp( + emqx_bridge_kafka_impl_producer_sync_query_failed, + #{ + headers_config => KafkaHeaders, + instance_id => InstId, + reason => Error + } + ), + {error, {unrecoverable_error, Error}}; + throw:{bad_kafka_headers, _} = Error -> + ?tp( + emqx_bridge_kafka_impl_producer_sync_query_failed, + #{ + headers_config => KafkaHeaders, + instance_id => InstId, + reason => Error + } + ), + {error, {unrecoverable_error, Error}} end. %% @doc The callback API for rule-engine (or bridge without rules) @@ -251,7 +262,7 @@ on_query( %% E.g. the output of rule-engine process chain %% or the direct mapping from an MQTT message. on_query_async( - _InstId, + InstId, {send_message, Message}, AsyncReplyFn, #{ @@ -267,26 +278,40 @@ on_query_async( ext_headers_tokens => KafkaExtHeadersTokens, headers_val_encode_mode => KafkaHeadersValEncodeMode }, - KafkaMessage = render_message(Template, KafkaHeaders, Message), - ?tp( - emqx_bridge_kafka_impl_producer_async_query, - #{headers_config => KafkaHeaders, instance_id => _InstId} - ), - %% * Must be a batch because wolff:send and wolff:send_sync are batch APIs - %% * Must be a single element batch because wolff books calls, but not batch sizes - %% for counters and gauges. - Batch = [KafkaMessage], - %% The retuned information is discarded here. - %% If the producer process is down when sending, this function would - %% raise an error exception which is to be caught by the caller of this callback - {_Partition, Pid} = wolff:send(Producers, Batch, {fun ?MODULE:on_kafka_ack/3, [AsyncReplyFn]}), - %% this Pid is so far never used because Kafka producer is by-passing the buffer worker - {ok, Pid}. + try + KafkaMessage = render_message(Template, KafkaHeaders, Message), + ?tp( + emqx_bridge_kafka_impl_producer_async_query, + #{headers_config => KafkaHeaders, instance_id => InstId} + ), + do_send_msg(async, KafkaMessage, Producers, AsyncReplyFn) + catch + throw:{bad_kafka_header, _} = Error -> + ?tp( + emqx_bridge_kafka_impl_producer_async_query_failed, + #{ + headers_config => KafkaHeaders, + instance_id => InstId, + reason => Error + } + ), + {error, {unrecoverable_error, Error}}; + throw:{bad_kafka_headers, _} = Error -> + ?tp( + emqx_bridge_kafka_impl_producer_async_query_failed, + #{ + headers_config => KafkaHeaders, + instance_id => InstId, + reason => Error + } + ), + {error, {unrecoverable_error, Error}} + end. compile_message_template(T) -> KeyTemplate = maps:get(key, T, <<"${.clientid}">>), ValueTemplate = maps:get(value, T, <<"${.}">>), - TimestampTemplate = maps:get(value, T, <<"${.timestamp}">>), + TimestampTemplate = maps:get(timestamp, T, <<"${.timestamp}">>), #{ key => preproc_tmpl(KeyTemplate), value => preproc_tmpl(ValueTemplate), @@ -337,6 +362,28 @@ render_timestamp(Template, Message) -> erlang:system_time(millisecond) end. +do_send_msg(sync, KafkaMessage, Producers, SyncTimeout) -> + try + {_Partition, _Offset} = wolff:send_sync(Producers, [KafkaMessage], SyncTimeout), + ok + catch + error:{producer_down, _} = Reason -> + {error, Reason}; + error:timeout -> + {error, timeout} + end; +do_send_msg(async, KafkaMessage, Producers, AsyncReplyFn) -> + %% * Must be a batch because wolff:send and wolff:send_sync are batch APIs + %% * Must be a single element batch because wolff books calls, but not batch sizes + %% for counters and gauges. + Batch = [KafkaMessage], + %% The retuned information is discarded here. + %% If the producer process is down when sending, this function would + %% raise an error exception which is to be caught by the caller of this callback + {_Partition, Pid} = wolff:send(Producers, Batch, {fun ?MODULE:on_kafka_ack/3, [AsyncReplyFn]}), + %% this Pid is so far never used because Kafka producer is by-passing the buffer worker + {ok, Pid}. + %% Wolff producer never gives up retrying %% so there can only be 'ok' results. on_kafka_ack(_Partition, Offset, {ReplyFn, Args}) when is_integer(Offset) -> @@ -411,7 +458,7 @@ ssl(#{enable := true} = SSL) -> ssl(_) -> []. -producers_config(BridgeName, ClientId, Input, IsDryRun) -> +producers_config(BridgeType, BridgeName, ClientId, Input, IsDryRun) -> #{ max_batch_bytes := MaxBatchBytes, compression := Compression, @@ -437,10 +484,9 @@ producers_config(BridgeName, ClientId, Input, IsDryRun) -> disk -> {false, replayq_dir(ClientId)}; hybrid -> {true, replayq_dir(ClientId)} end, - BridgeType = ?BRIDGE_TYPE, ResourceID = emqx_bridge_resource:resource_id(BridgeType, BridgeName), #{ - name => make_producer_name(BridgeName, IsDryRun), + name => make_producer_name(BridgeType, BridgeName, IsDryRun), partitioner => partitioner(PartitionStrategy), partition_count_refresh_interval_seconds => PCntRefreshInterval, replayq_dir => ReplayqDir, @@ -465,20 +511,15 @@ replayq_dir(ClientId) -> %% Producer name must be an atom which will be used as a ETS table name for %% partition worker lookup. -make_producer_name(BridgeName, IsDryRun) when is_atom(BridgeName) -> - make_producer_name(atom_to_list(BridgeName), IsDryRun); -make_producer_name(BridgeName, IsDryRun) -> +make_producer_name(_BridgeType, _BridgeName, true = _IsDryRun) -> + %% It is a dry run and we don't want to leak too many atoms + %% so we use the default producer name instead of creating + %% an unique name. + probing_wolff_producers; +make_producer_name(BridgeType, BridgeName, _IsDryRun) -> %% Woff needs an atom for ets table name registration. The assumption here is %% that bridges with new names are not often created. - case IsDryRun of - true -> - %% It is a dry run and we don't want to leak too many atoms - %% so we use the default producer name instead of creating - %% an unique name. - probing_wolff_producers; - false -> - binary_to_atom(iolist_to_binary(["kafka_producer_", BridgeName])) - end. + binary_to_atom(iolist_to_binary([BridgeType, "_", bin(BridgeName)])). with_log_at_error(Fun, Log) -> try @@ -613,6 +654,9 @@ kvlist_headers([#{<<"key">> := K, <<"value">> := V} | Headers], Acc) -> kvlist_headers(Headers, [{K, V} | Acc]); kvlist_headers([{K, V} | Headers], Acc) -> kvlist_headers(Headers, [{K, V} | Acc]); +kvlist_headers([KVList | Headers], Acc) when is_list(KVList) -> + %% for instance, when user sets a json list as headers like '[{"foo":"bar"}, {"foo2":"bar2"}]'. + kvlist_headers(KVList ++ Headers, Acc); kvlist_headers([BadHeader | _], _) -> throw({bad_kafka_header, BadHeader}). @@ -643,7 +687,7 @@ merge_kafka_headers(HeadersTks, ExtHeaders, Msg) -> [undefined] -> ExtHeaders; [MaybeJson] when is_binary(MaybeJson) -> - case emqx_utils_json:safe_decode(MaybeJson) of + case emqx_utils_json:safe_decode(MaybeJson, [return_maps]) of {ok, JsonTerm} when is_map(JsonTerm) -> maps:to_list(JsonTerm) ++ ExtHeaders; {ok, JsonTerm} when is_list(JsonTerm) -> diff --git a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl index 2d8355e8e..1691fa6a4 100644 --- a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl +++ b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl @@ -66,7 +66,7 @@ only_once_tests() -> ]. init_per_suite(Config) -> - Config. + [{bridge_type, <<"kafka_consumer">>} | Config]. end_per_suite(_Config) -> emqx_mgmt_api_test_util:end_suite(), @@ -898,8 +898,9 @@ ensure_connected(Config) -> ok. consumer_clientid(Config) -> + BridgeType = ?config(bridge_type, Config), KafkaName = ?config(kafka_name, Config), - binary_to_atom(emqx_bridge_kafka_impl:make_client_id(kafka_consumer, KafkaName)). + binary_to_atom(emqx_bridge_kafka_impl:make_client_id(BridgeType, KafkaName)). get_client_connection(Config) -> KafkaHost = ?config(kafka_host, Config), @@ -1928,7 +1929,7 @@ t_node_joins_existing_cluster(Config) -> ?retry( _Sleep2 = 100, _Attempts2 = 50, - true = erpc:call(N2, emqx_router, has_routes, [MQTTTopic]) + [] =/= erpc:call(N2, emqx_router, lookup_routes, [MQTTTopic]) ), {ok, SRef1} = snabbkaffe:subscribe( diff --git a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_producer_SUITE.erl b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_producer_SUITE.erl index d93b6dd7d..432ce8697 100644 --- a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_producer_SUITE.erl +++ b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_producer_SUITE.erl @@ -40,6 +40,7 @@ %% TODO: rename this to `kafka_producer' after alias support is added %% to hocon; keeping this as just `kafka' for backwards compatibility. -define(BRIDGE_TYPE, "kafka"). +-define(BRIDGE_TYPE_BIN, <<"kafka">>). -define(APPS, [emqx_resource, emqx_bridge, emqx_rule_engine, emqx_bridge_kafka]). @@ -438,7 +439,7 @@ t_failed_creation_then_fix(Config) -> {ok, #{config := WrongConfigAtom1}} = emqx_bridge:create( Type, erlang:list_to_atom(Name), WrongConf ), - WrongConfigAtom = WrongConfigAtom1#{bridge_name => Name}, + WrongConfigAtom = WrongConfigAtom1#{bridge_name => Name, bridge_type => ?BRIDGE_TYPE_BIN}, ?assertThrow(Reason when is_list(Reason), ?PRODUCER:on_start(ResourceId, WrongConfigAtom)), %% before throwing, it should cleanup the client process. we %% retry because the supervisor might need some time to really @@ -448,7 +449,7 @@ t_failed_creation_then_fix(Config) -> {ok, #{config := ValidConfigAtom1}} = emqx_bridge:create( Type, erlang:list_to_atom(Name), ValidConf ), - ValidConfigAtom = ValidConfigAtom1#{bridge_name => Name}, + ValidConfigAtom = ValidConfigAtom1#{bridge_name => Name, bridge_type => ?BRIDGE_TYPE_BIN}, {ok, State} = ?PRODUCER:on_start(ResourceId, ValidConfigAtom), Time = erlang:unique_integer(), BinTime = integer_to_binary(Time), @@ -470,7 +471,51 @@ t_failed_creation_then_fix(Config) -> delete_all_bridges(), ok. -t_table_removed(_Config) -> +t_custom_timestamp(_Config) -> + HostsString = kafka_hosts_string_sasl(), + AuthSettings = valid_sasl_plain_settings(), + Hash = erlang:phash2([HostsString, ?FUNCTION_NAME]), + Type = ?BRIDGE_TYPE, + Name = "kafka_bridge_name_" ++ erlang:integer_to_list(Hash), + ResourceId = emqx_bridge_resource:resource_id(Type, Name), + KafkaTopic = "test-topic-one-partition", + MQTTTopic = <<"t/local/kafka">>, + emqx:subscribe(MQTTTopic), + Conf0 = config(#{ + "authentication" => AuthSettings, + "kafka_hosts_string" => HostsString, + "local_topic" => MQTTTopic, + "kafka_topic" => KafkaTopic, + "instance_id" => ResourceId, + "ssl" => #{} + }), + Conf = emqx_utils_maps:deep_put( + [<<"kafka">>, <<"message">>, <<"timestamp">>], + Conf0, + <<"123">> + ), + {ok, _} = emqx_bridge:create(Type, erlang:list_to_atom(Name), Conf), + {ok, Offset} = resolve_kafka_offset(kafka_hosts(), KafkaTopic, 0), + ct:pal("base offset before testing ~p", [Offset]), + Time = erlang:unique_integer(), + BinTime = integer_to_binary(Time), + Msg = #{ + clientid => BinTime, + payload => <<"payload">>, + timestamp => Time + }, + emqx:publish(emqx_message:make(MQTTTopic, emqx_utils_json:encode(Msg))), + {ok, {_, [KafkaMsg]}} = + ?retry( + _Interval = 500, + _NAttempts = 20, + {ok, {_, [_]}} = brod:fetch(kafka_hosts(), KafkaTopic, _Partition = 0, Offset) + ), + ?assertMatch(#kafka_message{ts = 123, ts_type = create}, KafkaMsg), + delete_all_bridges(), + ok. + +t_nonexistent_topic(_Config) -> HostsString = kafka_hosts_string_sasl(), AuthSettings = valid_sasl_plain_settings(), Hash = erlang:phash2([HostsString, ?FUNCTION_NAME]), @@ -496,7 +541,7 @@ t_table_removed(_Config) -> {ok, #{config := ValidConfigAtom1}} = emqx_bridge:create( Type, erlang:list_to_atom(Name), Conf ), - ValidConfigAtom = ValidConfigAtom1#{bridge_name => Name}, + ValidConfigAtom = ValidConfigAtom1#{bridge_name => Name, bridge_type => ?BRIDGE_TYPE_BIN}, ?assertThrow(_, ?PRODUCER:on_start(ResourceId, ValidConfigAtom)), ok = emqx_bridge_resource:remove(BridgeId), delete_all_bridges(), @@ -516,7 +561,7 @@ t_send_message_with_headers(Config) -> "kafka_hosts_string" => HostsString, "kafka_topic" => KafkaTopic, "instance_id" => ResourceId, - "kafka_headers" => <<"${pub_props}">>, + "kafka_headers" => <<"${payload.header}">>, "kafka_ext_headers" => emqx_utils_json:encode( [ #{ @@ -524,8 +569,8 @@ t_send_message_with_headers(Config) -> <<"kafka_ext_header_value">> => <<"${clientid}">> }, #{ - <<"kafka_ext_header_key">> => <<"payload">>, - <<"kafka_ext_header_value">> => <<"${payload}">> + <<"kafka_ext_header_key">> => <<"ext_header_val">>, + <<"kafka_ext_header_value">> => <<"${payload.ext_header_val}">> } ] ), @@ -541,14 +586,44 @@ t_send_message_with_headers(Config) -> {ok, #{config := ConfigAtom1}} = emqx_bridge:create( Type, erlang:list_to_atom(Name), Conf ), - ConfigAtom = ConfigAtom1#{bridge_name => Name}, + ConfigAtom = ConfigAtom1#{bridge_name => Name, bridge_type => ?BRIDGE_TYPE_BIN}, {ok, State} = ?PRODUCER:on_start(ResourceId, ConfigAtom), - Time = erlang:unique_integer(), - BinTime = integer_to_binary(Time), - Msg = #{ - clientid => BinTime, - payload => <<"payload">>, - timestamp => Time + Time1 = erlang:unique_integer(), + BinTime1 = integer_to_binary(Time1), + Payload1 = emqx_utils_json:encode( + #{ + <<"header">> => #{ + <<"foo">> => <<"bar">> + }, + <<"ext_header_val">> => <<"ext header ok">> + } + ), + Msg1 = #{ + clientid => BinTime1, + payload => Payload1, + timestamp => Time1 + }, + Time2 = erlang:unique_integer(), + BinTime2 = integer_to_binary(Time2), + Payload2 = emqx_utils_json:encode( + #{ + <<"header">> => [ + #{ + <<"key">> => <<"foo1">>, + <<"value">> => <<"bar1">> + }, + #{ + <<"key">> => <<"foo2">>, + <<"value">> => <<"bar2">> + } + ], + <<"ext_header_val">> => <<"ext header ok">> + } + ), + Msg2 = #{ + clientid => BinTime2, + payload => Payload2, + timestamp => Time2 }, {ok, Offset} = resolve_kafka_offset(kafka_hosts(), KafkaTopic, 0), ct:pal("base offset before testing ~p", [Offset]), @@ -559,7 +634,8 @@ t_send_message_with_headers(Config) -> end, ?check_trace( begin - ok = send(Config, ResourceId, Msg, State) + ok = send(Config, ResourceId, Msg1, State), + ok = send(Config, ResourceId, Msg2, State) end, fun(Trace) -> ?assertMatch( @@ -572,11 +648,27 @@ t_send_message_with_headers(Config) -> [{var, [<<"clientid">>]}] }, { - [{str, <<"payload">>}], - [{var, [<<"payload">>]}] + [{str, <<"ext_header_val">>}], + [{var, [<<"payload">>, <<"ext_header_val">>]}] } ], - headers_tokens := [{var, [<<"pub_props">>]}], + headers_tokens := [{var, [<<"payload">>, <<"header">>]}], + headers_val_encode_mode := json + } + }, + #{ + headers_config := #{ + ext_headers_tokens := [ + { + [{str, <<"clientid">>}], + [{var, [<<"clientid">>]}] + }, + { + [{str, <<"ext_header_val">>}], + [{var, [<<"payload">>, <<"ext_header_val">>]}] + } + ], + headers_tokens := [{var, [<<"payload">>, <<"header">>]}], headers_val_encode_mode := json } } @@ -585,16 +677,28 @@ t_send_message_with_headers(Config) -> ) end ), - {ok, {_, [KafkaMsg]}} = brod:fetch(kafka_hosts(), KafkaTopic, 0, Offset), + {ok, {_, KafkaMsgs}} = brod:fetch(kafka_hosts(), KafkaTopic, 0, Offset), ?assertMatch( - #kafka_message{ - headers = [ - {<<"clientid">>, _}, - {<<"payload">>, <<"\"payload\"">>} - ], - key = BinTime - }, - KafkaMsg + [ + #kafka_message{ + headers = [ + {<<"foo">>, <<"\"bar\"">>}, + {<<"clientid">>, _}, + {<<"ext_header_val">>, <<"\"ext header ok\"">>} + ], + key = BinTime1 + }, + #kafka_message{ + headers = [ + {<<"foo1">>, <<"\"bar1\"">>}, + {<<"foo2">>, <<"\"bar2\"">>}, + {<<"clientid">>, _}, + {<<"ext_header_val">>, <<"\"ext header ok\"">>} + ], + key = BinTime2 + } + ], + KafkaMsgs ), %% TODO: refactor those into init/end per testcase ok = ?PRODUCER:on_stop(ResourceId, State), @@ -677,6 +781,76 @@ t_wrong_headers(_Config) -> ), ok. +t_wrong_headers_from_message(Config) -> + HostsString = kafka_hosts_string_sasl(), + AuthSettings = valid_sasl_plain_settings(), + Hash = erlang:phash2([HostsString, ?FUNCTION_NAME]), + Type = ?BRIDGE_TYPE, + Name = "kafka_bridge_name_" ++ erlang:integer_to_list(Hash), + ResourceId = emqx_bridge_resource:resource_id(Type, Name), + BridgeId = emqx_bridge_resource:bridge_id(Type, Name), + KafkaTopic = "test-topic-one-partition", + Conf = config_with_headers(#{ + "authentication" => AuthSettings, + "kafka_hosts_string" => HostsString, + "kafka_topic" => KafkaTopic, + "instance_id" => ResourceId, + "kafka_headers" => <<"${payload}">>, + "producer" => #{ + "kafka" => #{ + "buffer" => #{ + "memory_overload_protection" => false + } + } + }, + "ssl" => #{} + }), + {ok, #{config := ConfigAtom1}} = emqx_bridge:create( + Type, erlang:list_to_atom(Name), Conf + ), + ConfigAtom = ConfigAtom1#{bridge_name => Name, bridge_type => ?BRIDGE_TYPE_BIN}, + {ok, State} = ?PRODUCER:on_start(ResourceId, ConfigAtom), + Time1 = erlang:unique_integer(), + Payload1 = <<"wrong_header">>, + Msg1 = #{ + clientid => integer_to_binary(Time1), + payload => Payload1, + timestamp => Time1 + }, + ?assertError( + {badmatch, {error, {unrecoverable_error, {bad_kafka_headers, Payload1}}}}, + send(Config, ResourceId, Msg1, State) + ), + Time2 = erlang:unique_integer(), + Payload2 = <<"[{\"foo\":\"bar\"}, {\"foo2\":\"bar2\"}]">>, + Msg2 = #{ + clientid => integer_to_binary(Time2), + payload => Payload2, + timestamp => Time2 + }, + ?assertError( + {badmatch, {error, {unrecoverable_error, {bad_kafka_header, #{<<"foo">> := <<"bar">>}}}}}, + send(Config, ResourceId, Msg2, State) + ), + Time3 = erlang:unique_integer(), + Payload3 = <<"[{\"key\":\"foo\"}, {\"value\":\"bar\"}]">>, + Msg3 = #{ + clientid => integer_to_binary(Time3), + payload => Payload3, + timestamp => Time3 + }, + ?assertError( + {badmatch, {error, {unrecoverable_error, {bad_kafka_header, #{<<"key">> := <<"foo">>}}}}}, + send(Config, ResourceId, Msg3, State) + ), + %% TODO: refactor those into init/end per testcase + ok = ?PRODUCER:on_stop(ResourceId, State), + ?assertEqual([], supervisor:which_children(wolff_client_sup)), + ?assertEqual([], supervisor:which_children(wolff_producers_sup)), + ok = emqx_bridge_resource:remove(BridgeId), + delete_all_bridges(), + ok. + %%------------------------------------------------------------------------------ %% Helper functions %%------------------------------------------------------------------------------ diff --git a/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.erl b/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.erl index cb3cd3788..d98e7ab11 100644 --- a/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.erl +++ b/apps/emqx_bridge_kinesis/src/emqx_bridge_kinesis.erl @@ -30,8 +30,27 @@ roots() -> fields("config_producer") -> emqx_bridge_schema:common_bridge_fields() ++ - emqx_resource_schema:fields("resource_opts") ++ - fields(connector_config) ++ fields(producer); + fields("resource_opts") ++ + fields(connector_config) ++ + fields(producer); +fields("resource_opts") -> + [ + {resource_opts, + mk( + ref(?MODULE, "creation_opts"), + #{ + required => false, + default => #{}, + desc => ?DESC(emqx_resource_schema, "creation_opts") + } + )} + ]; +fields("creation_opts") -> + emqx_resource_schema:create_opts([ + {batch_size, #{ + validator => emqx_resource_validator:max(int, 500) + }} + ]); fields(connector_config) -> [ {aws_access_key_id, @@ -55,7 +74,8 @@ fields(connector_config) -> mk( binary(), #{ - default => <<"https://kinesis.us-east-1.amazonaws.com">>, + required => true, + example => <<"https://kinesis.us-east-1.amazonaws.com">>, desc => ?DESC("endpoint") } )}, @@ -83,7 +103,7 @@ fields(producer) -> sc( binary(), #{ - default => <<>>, + default => <<"${.}">>, desc => ?DESC("payload_template") } )}, @@ -120,6 +140,8 @@ fields("put_producer") -> desc("config_producer") -> ?DESC("desc_config"); +desc("creation_opts") -> + ?DESC(emqx_resource_schema, "creation_opts"); desc(_) -> undefined. @@ -160,6 +182,8 @@ mk(Type, Meta) -> hoconsc:mk(Type, Meta). enum(OfSymbols) -> hoconsc:enum(OfSymbols). +ref(Module, Name) -> hoconsc:ref(Module, Name). + type_field_producer() -> {type, mk(enum([kinesis_producer]), #{required => true, desc => ?DESC("desc_type")})}. diff --git a/apps/emqx_bridge_kinesis/test/emqx_bridge_kinesis_impl_producer_SUITE.erl b/apps/emqx_bridge_kinesis/test/emqx_bridge_kinesis_impl_producer_SUITE.erl index d0fe4a1b4..ea926fc33 100644 --- a/apps/emqx_bridge_kinesis/test/emqx_bridge_kinesis_impl_producer_SUITE.erl +++ b/apps/emqx_bridge_kinesis/test/emqx_bridge_kinesis_impl_producer_SUITE.erl @@ -228,13 +228,17 @@ create_bridge_http(Config, KinesisConfigOverrides) -> Res. create_bridge(Config) -> - create_bridge(Config, _KinesisConfigOverrides = #{}). + create_bridge(Config, #{}, []). create_bridge(Config, KinesisConfigOverrides) -> + create_bridge(Config, KinesisConfigOverrides, []). + +create_bridge(Config, KinesisConfigOverrides, Removes) -> TypeBin = ?BRIDGE_TYPE_BIN, Name = ?config(kinesis_name, Config), KinesisConfig0 = ?config(kinesis_config, Config), - KinesisConfig = emqx_utils_maps:deep_merge(KinesisConfig0, KinesisConfigOverrides), + KinesisConfig1 = emqx_utils_maps:deep_merge(KinesisConfig0, KinesisConfigOverrides), + KinesisConfig = emqx_utils_maps:deep_remove(Removes, KinesisConfig1), ct:pal("creating bridge: ~p", [KinesisConfig]), Res = emqx_bridge:create(TypeBin, Name, KinesisConfig), ct:pal("bridge creation result: ~p", [Res]), @@ -862,3 +866,53 @@ t_access_denied(Config) -> end ), ok. + +t_empty_payload_template(Config) -> + ResourceId = ?config(resource_id, Config), + TelemetryTable = ?config(telemetry_table, Config), + Removes = [<<"payload_template">>], + ?assertMatch({ok, _}, create_bridge(Config, #{}, Removes)), + {ok, #{<<"id">> := RuleId}} = create_rule_and_action_http(Config), + emqx_common_test_helpers:on_exit(fun() -> ok = emqx_rule_engine:delete_rule(RuleId) end), + assert_empty_metrics(ResourceId), + ShardIt = get_shard_iterator(Config), + Payload = <<"payload">>, + Message = emqx_message:make(?TOPIC, Payload), + emqx:publish(Message), + %% to avoid test flakiness + wait_telemetry_event(TelemetryTable, success, ResourceId), + wait_until_gauge_is(queuing, 0, 500), + wait_until_gauge_is(inflight, 0, 500), + assert_metrics( + #{ + dropped => 0, + failed => 0, + inflight => 0, + matched => 1, + queuing => 0, + retried => 0, + success => 1 + }, + ResourceId + ), + Record = wait_record(Config, ShardIt, 100, 10), + Data = proplists:get_value(<<"Data">>, Record), + ?assertMatch( + #{<<"payload">> := <<"payload">>, <<"topic">> := ?TOPIC}, + emqx_utils_json:decode(Data, [return_maps]) + ), + ok. + +t_validate_static_constraints(Config) -> + % From : + % "Each PutRecords request can support up to 500 records. + % Each record in the request can be as large as 1 MiB, + % up to a limit of 5 MiB for the entire request, including partition keys." + % + % Message size and request size shall be controlled by user, so there is no validators + % for them - if exceeded, it will fail like on `t_publish_big_msg` test. + ?assertThrow( + {emqx_bridge_schema, [#{kind := validation_error, value := 501}]}, + generate_config([{batch_size, 501} | Config]) + ), + ok. diff --git a/apps/emqx_bridge_redis/src/emqx_bridge_redis.app.src b/apps/emqx_bridge_redis/src/emqx_bridge_redis.app.src index b380bc86d..5b6163969 100644 --- a/apps/emqx_bridge_redis/src/emqx_bridge_redis.app.src +++ b/apps/emqx_bridge_redis/src/emqx_bridge_redis.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_redis, [ {description, "EMQX Enterprise Redis Bridge"}, - {vsn, "0.1.2"}, + {vsn, "0.1.3"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_redis/src/emqx_bridge_redis_connector.erl b/apps/emqx_bridge_redis/src/emqx_bridge_redis_connector.erl index 38a80048e..696947726 100644 --- a/apps/emqx_bridge_redis/src/emqx_bridge_redis_connector.erl +++ b/apps/emqx_bridge_redis/src/emqx_bridge_redis_connector.erl @@ -35,6 +35,12 @@ on_start(InstId, #{command_template := CommandTemplate} = Config) -> conn_st => RedisConnSt, command_template => preproc_command_template(CommandTemplate) }}; + {error, {start_pool_failed, _, #{type := authentication_error, reason := Reason}}} = Error -> + ?tp( + redis_bridge_connector_start_error, + #{error => Error} + ), + throw({unhealthy_target, Reason}); {error, _} = Error -> ?tp( redis_bridge_connector_start_error, diff --git a/apps/emqx_bridge_redis/test/emqx_bridge_redis_SUITE.erl b/apps/emqx_bridge_redis/test/emqx_bridge_redis_SUITE.erl index 6a0248b67..c4089323b 100644 --- a/apps/emqx_bridge_redis/test/emqx_bridge_redis_SUITE.erl +++ b/apps/emqx_bridge_redis/test/emqx_bridge_redis_SUITE.erl @@ -30,6 +30,11 @@ <<"local_topic">> => <<"local_topic/#">> }). +-define(USERNAME_PASSWORD_AUTH_OPTS, #{ + <<"username">> => <<"test_user">>, + <<"password">> => <<"test_passwd">> +}). + -define(BATCH_SIZE, 5). -define(PROXY_HOST, "toxiproxy"). @@ -319,6 +324,63 @@ t_permanent_error(_Config) -> ), {ok, _} = emqx_bridge:remove(Type, Name). +t_auth_username_password(_Config) -> + Name = <<"mybridge">>, + Type = <<"redis_single">>, + ResourceId = emqx_bridge_resource:resource_id(Type, Name), + BridgeConfig = username_password_redis_bridge_config(), + ?assertMatch( + {ok, _}, + emqx_bridge:create(Type, Name, BridgeConfig) + ), + ?WAIT( + {ok, connected}, + emqx_resource:health_check(ResourceId), + 5 + ), + {ok, _} = emqx_bridge:remove(Type, Name). + +t_auth_error_username_password(_Config) -> + Name = <<"mybridge">>, + Type = <<"redis_single">>, + ResourceId = emqx_bridge_resource:resource_id(Type, Name), + BridgeConfig0 = username_password_redis_bridge_config(), + BridgeConfig = maps:merge(BridgeConfig0, #{<<"password">> => <<"wrong_password">>}), + ?assertMatch( + {ok, _}, + emqx_bridge:create(Type, Name, BridgeConfig) + ), + ?WAIT( + {ok, disconnected}, + emqx_resource:health_check(ResourceId), + 5 + ), + ?assertMatch( + {ok, _, #{error := {unhealthy_target, _Msg}}}, + emqx_resource_manager:lookup(ResourceId) + ), + {ok, _} = emqx_bridge:remove(Type, Name). + +t_auth_error_password_only(_Config) -> + Name = <<"mybridge">>, + Type = <<"redis_single">>, + ResourceId = emqx_bridge_resource:resource_id(Type, Name), + BridgeConfig0 = toxiproxy_redis_bridge_config(), + BridgeConfig = maps:merge(BridgeConfig0, #{<<"password">> => <<"wrong_password">>}), + ?assertMatch( + {ok, _}, + emqx_bridge:create(Type, Name, BridgeConfig) + ), + ?assertEqual( + {ok, disconnected}, + emqx_resource:health_check(ResourceId) + ), + ?assertMatch( + {ok, _, #{error := {unhealthy_target, _Msg}}}, + emqx_resource_manager:lookup(ResourceId) + ), + {ok, _} = emqx_bridge:remove(Type, Name). + t_create_disconnected(Config) -> Name = <<"toxic_bridge">>, Type = <<"redis_single">>, @@ -528,6 +590,19 @@ toxiproxy_redis_bridge_config() -> }, maps:merge(Conf0, ?COMMON_REDIS_OPTS). +username_password_redis_bridge_config() -> + Conf0 = ?REDIS_TOXYPROXY_CONNECT_CONFIG#{ + <<"resource_opts">> => #{ + <<"query_mode">> => <<"sync">>, + <<"worker_pool_size">> => <<"1">>, + <<"batch_size">> => integer_to_binary(?BATCH_SIZE), + <<"health_check_interval">> => <<"1s">>, + <<"start_timeout">> => <<"15s">> + } + }, + Conf1 = maps:merge(Conf0, ?COMMON_REDIS_OPTS), + maps:merge(Conf1, ?USERNAME_PASSWORD_AUTH_OPTS). + invalid_command_bridge_config() -> #{redis_single := #{tcp := Conf0}} = redis_connect_configs(), Conf1 = maps:merge(Conf0, ?COMMON_REDIS_OPTS), diff --git a/apps/emqx_conf/rebar.config b/apps/emqx_conf/rebar.config index c947932a0..1d2f23bd0 100644 --- a/apps/emqx_conf/rebar.config +++ b/apps/emqx_conf/rebar.config @@ -1,7 +1,10 @@ %% -*- mode: erlang -*- {erl_opts, [debug_info]}. -{deps, [{emqx, {path, "../emqx"}}]}. +{deps, [ + {emqx, {path, "../emqx"}}, + {emqx_authn, {path, "../emqx_authn"}} +]}. {shell, [ % {config, "config/sys.config"}, diff --git a/apps/emqx_conf/src/emqx_cluster_rpc.erl b/apps/emqx_conf/src/emqx_cluster_rpc.erl index bb154f8b5..934d7ef7a 100644 --- a/apps/emqx_conf/src/emqx_cluster_rpc.erl +++ b/apps/emqx_conf/src/emqx_cluster_rpc.erl @@ -649,7 +649,7 @@ do_wait_for_emqx_ready(N) -> ok -> ok; timeout -> - ?SLOG(warning, #{msg => "stil_waiting_for_emqx_app_to_be_ready"}), + ?SLOG(warning, #{msg => "still_waiting_for_emqx_app_to_be_ready"}), do_wait_for_emqx_ready(N - 1) end. diff --git a/apps/emqx_conf/src/emqx_conf.app.src b/apps/emqx_conf/src/emqx_conf.app.src index ab65c03c8..a4781e6fb 100644 --- a/apps/emqx_conf/src/emqx_conf.app.src +++ b/apps/emqx_conf/src/emqx_conf.app.src @@ -1,6 +1,6 @@ {application, emqx_conf, [ {description, "EMQX configuration management"}, - {vsn, "0.1.26"}, + {vsn, "0.1.27"}, {registered, []}, {mod, {emqx_conf_app, []}}, {applications, [kernel, stdlib, emqx_ctl]}, diff --git a/apps/emqx_conf/src/emqx_conf_cli.erl b/apps/emqx_conf/src/emqx_conf_cli.erl index fde3059d3..cdf188001 100644 --- a/apps/emqx_conf/src/emqx_conf_cli.erl +++ b/apps/emqx_conf/src/emqx_conf_cli.erl @@ -16,8 +16,8 @@ -module(emqx_conf_cli). -include("emqx_conf.hrl"). --include_lib("emqx/include/emqx_access_control.hrl"). --include_lib("emqx/include/emqx_authentication.hrl"). +-include_lib("emqx_authn/include/emqx_authentication.hrl"). +-include_lib("emqx/include/logger.hrl"). -export([ load/0, @@ -50,17 +50,17 @@ conf(["show"]) -> conf(["show", Key]) -> print_hocon(get_config(list_to_binary(Key))); conf(["load", "--replace", Path]) -> - load_config(Path, replace); + load_config(Path, #{mode => replace}); conf(["load", "--merge", Path]) -> - load_config(Path, merge); + load_config(Path, #{mode => merge}); conf(["load", Path]) -> - load_config(Path, merge); + load_config(Path, #{mode => merge}); conf(["cluster_sync" | Args]) -> admins(Args); conf(["reload", "--merge"]) -> - reload_etc_conf_on_local_node(merge); + reload_etc_conf_on_local_node(#{mode => merge}); conf(["reload", "--replace"]) -> - reload_etc_conf_on_local_node(replace); + reload_etc_conf_on_local_node(#{mode => replace}); conf(["reload"]) -> conf(["reload", "--merge"]); conf(_) -> @@ -191,32 +191,32 @@ get_config(Key) -> end. -define(OPTIONS, #{rawconf_with_defaults => true, override_to => cluster}). -load_config(Path, ReplaceOrMerge) when is_list(Path) -> +load_config(Path, Opts) when is_list(Path) -> case hocon:files([Path]) of {ok, RawConf} when RawConf =:= #{} -> emqx_ctl:warning("load ~ts is empty~n", [Path]), {error, empty_hocon_file}; {ok, RawConf} -> - load_config_from_raw(RawConf, ReplaceOrMerge); + load_config_from_raw(RawConf, Opts); {error, Reason} -> emqx_ctl:warning("load ~ts failed~n~p~n", [Path, Reason]), {error, bad_hocon_file} end; -load_config(Bin, ReplaceOrMerge) when is_binary(Bin) -> +load_config(Bin, Opts) when is_binary(Bin) -> case hocon:binary(Bin) of {ok, RawConf} -> - load_config_from_raw(RawConf, ReplaceOrMerge); + load_config_from_raw(RawConf, Opts); {error, Reason} -> {error, Reason} end. -load_config_from_raw(RawConf, ReplaceOrMerge) -> +load_config_from_raw(RawConf, Opts) -> case check_config(RawConf) of ok -> Error = lists:filtermap( fun({K, V}) -> - case update_config_cluster(K, V, ReplaceOrMerge) of + case update_config_cluster(K, V, Opts) of ok -> false; {error, Msg} -> {true, Msg} end @@ -228,53 +228,70 @@ load_config_from_raw(RawConf, ReplaceOrMerge) -> ErrorBin -> {error, ErrorBin} end; {error, ?UPDATE_READONLY_KEYS_PROHIBITED = Reason} -> - emqx_ctl:warning("load config failed~n~ts~n", [Reason]), - emqx_ctl:warning( - "Maybe try `emqx_ctl conf reload` to reload etc/emqx.conf on local node~n" + warning(Opts, "load config failed~n~ts~n", [Reason]), + warning( + Opts, + "Maybe try `emqx_ctl conf reload` to reload etc/emqx.conf on local node~n", + [] ), {error, Reason}; {error, Errors} -> - emqx_ctl:warning("load schema check failed~n"), + warning(Opts, "load schema check failed~n", []), lists:foreach( fun({Key, Error}) -> - emqx_ctl:warning("~ts: ~p~n", [Key, Error]) + warning(Opts, "~ts: ~p~n", [Key, Error]) end, Errors ), {error, Errors} end. -update_config_cluster(?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME_BINARY = Key, Conf, merge = Mode) -> - check_res(Key, emqx_authz:merge(Conf), Conf, Mode); -update_config_cluster(?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY = Key, Conf, merge = Mode) -> - check_res(Key, emqx_authn:merge_config(Conf), Conf, Mode); -update_config_cluster(Key, NewConf, merge = Mode) -> +update_config_cluster( + ?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME_BINARY = Key, + Conf, + #{mode := merge} = Opts +) -> + check_res(Key, emqx_authz:merge(Conf), Conf, Opts); +update_config_cluster( + ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY = Key, + Conf, + #{mode := merge} = Opts +) -> + check_res(Key, emqx_authn:merge_config(Conf), Conf, Opts); +update_config_cluster(Key, NewConf, #{mode := merge} = Opts) -> Merged = merge_conf(Key, NewConf), - check_res(Key, emqx_conf:update([Key], Merged, ?OPTIONS), NewConf, Mode); -update_config_cluster(Key, Value, replace = Mode) -> - check_res(Key, emqx_conf:update([Key], Value, ?OPTIONS), Value, Mode). + check_res(Key, emqx_conf:update([Key], Merged, ?OPTIONS), NewConf, Opts); +update_config_cluster(Key, Value, #{mode := replace} = Opts) -> + check_res(Key, emqx_conf:update([Key], Value, ?OPTIONS), Value, Opts). -define(LOCAL_OPTIONS, #{rawconf_with_defaults => true, persistent => false}). -update_config_local(?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME_BINARY = Key, Conf, merge = Mode) -> - check_res(node(), Key, emqx_authz:merge_local(Conf, ?LOCAL_OPTIONS), Conf, Mode); -update_config_local(?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY = Key, Conf, merge = Mode) -> - check_res(node(), Key, emqx_authn:merge_config_local(Conf, ?LOCAL_OPTIONS), Conf, Mode); -update_config_local(Key, NewConf, merge = Mode) -> +update_config_local( + ?EMQX_AUTHORIZATION_CONFIG_ROOT_NAME_BINARY = Key, + Conf, + #{mode := merge} = Opts +) -> + check_res(node(), Key, emqx_authz:merge_local(Conf, ?LOCAL_OPTIONS), Conf, Opts); +update_config_local( + ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY = Key, + Conf, + #{mode := merge} = Opts +) -> + check_res(node(), Key, emqx_authn:merge_config_local(Conf, ?LOCAL_OPTIONS), Conf, Opts); +update_config_local(Key, NewConf, #{mode := merge} = Opts) -> Merged = merge_conf(Key, NewConf), - check_res(node(), Key, emqx:update_config([Key], Merged, ?LOCAL_OPTIONS), NewConf, Mode); -update_config_local(Key, Value, replace = Mode) -> - check_res(node(), Key, emqx:update_config([Key], Value, ?LOCAL_OPTIONS), Value, Mode). + check_res(node(), Key, emqx:update_config([Key], Merged, ?LOCAL_OPTIONS), NewConf, Opts); +update_config_local(Key, Value, #{mode := replace} = Opts) -> + check_res(node(), Key, emqx:update_config([Key], Value, ?LOCAL_OPTIONS), Value, Opts). -check_res(Key, Res, Conf, Mode) -> check_res(cluster, Key, Res, Conf, Mode). -check_res(Node, Key, {ok, _}, _Conf, _Mode) -> - emqx_ctl:print("load ~ts on ~p ok~n", [Key, Node]), +check_res(Key, Res, Conf, Opts) -> check_res(cluster, Key, Res, Conf, Opts). +check_res(Node, Key, {ok, _}, _Conf, Opts) -> + print(Opts, "load ~ts on ~p ok~n", [Key, Node]), ok; -check_res(_Node, Key, {error, Reason}, Conf, Mode) -> +check_res(_Node, Key, {error, Reason}, Conf, Opts = #{mode := Mode}) -> Warning = "Can't ~ts the new configurations!~n" "Root key: ~ts~n" "Reason: ~p~n", - emqx_ctl:warning(Warning, [Mode, Key, Reason]), ActiveMsg0 = "The effective configurations:~n" "```~n" @@ -285,12 +302,14 @@ check_res(_Node, Key, {error, Reason}, Conf, Mode) -> "```~n" "~ts```~n", FailedMsg = io_lib:format(FailedMsg0, [Mode, hocon_pp:do(#{Key => Conf}, #{})]), - SuggestMsg = suggest_msg(Mode), + SuggestMsg = suggest_msg(Reason, Mode), Msg = iolist_to_binary([ActiveMsg, FailedMsg, SuggestMsg]), - emqx_ctl:print("~ts", [Msg]), - {error, iolist_to_binary([Warning, Msg])}. + print(Opts, "~ts~n", [Msg]), + warning(Opts, Warning, [Mode, Key, Reason]), + {error, iolist_to_binary([Msg, "\n", io_lib:format(Warning, [Mode, Key, Reason])])}. -suggest_msg(Mode) when Mode == merge orelse Mode == replace -> +%% The mix data failed validation, suggest the user to retry with another mode. +suggest_msg(#{kind := validation_error, reason := unknown_fields}, Mode) -> RetryMode = case Mode of merge -> "replace"; @@ -298,9 +317,11 @@ suggest_msg(Mode) when Mode == merge orelse Mode == replace -> end, io_lib:format( "Tips: There may be some conflicts in the new configuration under `~ts` mode,~n" - "Please retry with the `~ts` mode.~n", + "Please retry with the `~ts` mode.", [Mode, RetryMode] - ). + ); +suggest_msg(_, _) -> + <<"">>. check_config(Conf) -> case check_keys_is_not_readonly(Conf) of @@ -327,19 +348,19 @@ check_config_schema(Conf) -> sorted_fold(Fold, Conf). %% @doc Reload etc/emqx.conf to runtime config except for the readonly config --spec reload_etc_conf_on_local_node(replace | merge) -> ok | {error, term()}. -reload_etc_conf_on_local_node(ReplaceOrMerge) -> +-spec reload_etc_conf_on_local_node(#{mode => replace | merge}) -> ok | {error, term()}. +reload_etc_conf_on_local_node(Opts) -> case load_etc_config_file() of {ok, RawConf} -> case filter_readonly_config(RawConf) of {ok, Reloaded} -> - reload_config(Reloaded, ReplaceOrMerge); + reload_config(Reloaded, Opts); {error, Error} -> - emqx_ctl:warning("check config failed~n~p~n", [Error]), + warning(Opts, "check config failed~n~p~n", [Error]), {error, Error} end; {error, Error} -> - emqx_ctl:warning("bad_hocon_file~n ~p~n", [Error]), + warning(Opts, "bad_hocon_file~n ~p~n", [Error]), {error, bad_hocon_file} end. @@ -385,9 +406,9 @@ filter_readonly_config(Raw) -> {error, Error} end. -reload_config(AllConf, ReplaceOrMerge) -> +reload_config(AllConf, Opts) -> Fold = fun({Key, Conf}, Acc) -> - case update_config_local(Key, Conf, ReplaceOrMerge) of + case update_config_local(Key, Conf, Opts) of ok -> Acc; Error -> @@ -441,3 +462,9 @@ check_config(SchemaMod, Key, Value) -> throw:Error -> {error, Error} end. + +warning(#{log := none}, _, _) -> ok; +warning(_, Format, Args) -> emqx_ctl:warning(Format, Args). + +print(#{log := none}, _, _) -> ok; +print(_, Format, Args) -> emqx_ctl:print(Format, Args). diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index 246f36f41..94cdd2ecd 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -22,9 +22,9 @@ -dialyzer(no_unused). -dialyzer(no_fail_call). +-include_lib("emqx/include/emqx_access_control.hrl"). -include_lib("typerefl/include/types.hrl"). -include_lib("hocon/include/hoconsc.hrl"). --include_lib("emqx/include/emqx_authentication.hrl"). -type log_level() :: debug | info | notice | warning | error | critical | alert | emergency | all. -type file() :: string(). @@ -66,6 +66,10 @@ emqx_otel_schema, emqx_mgmt_api_key_schema ]). +-define(INJECTING_CONFIGS, [ + emqx_authn_schema +]). + %% 1 million default ports counter -define(DEFAULT_MAX_PORTS, 1024 * 1024). @@ -76,11 +80,7 @@ tags() -> [<<"EMQX">>]. roots() -> - PtKey = ?EMQX_AUTHENTICATION_SCHEMA_MODULE_PT_KEY, - case persistent_term:get(PtKey, undefined) of - undefined -> persistent_term:put(PtKey, emqx_authn_schema); - _ -> ok - end, + ok = emqx_schema_hooks:inject_from_modules(?INJECTING_CONFIGS), emqx_schema_high_prio_roots() ++ [ {"node", @@ -1105,12 +1105,7 @@ translation("gen_rpc") -> [{"default_client_driver", fun tr_default_config_driver/1}]; translation("prometheus") -> [ - {"vm_dist_collector_metrics", fun tr_vm_dist_collector/1}, - {"mnesia_collector_metrics", fun tr_mnesia_collector/1}, - {"vm_statistics_collector_metrics", fun tr_vm_statistics_collector/1}, - {"vm_system_info_collector_metrics", fun tr_vm_system_info_collector/1}, - {"vm_memory_collector_metrics", fun tr_vm_memory_collector/1}, - {"vm_msacc_collector_metrics", fun tr_vm_msacc_collector/1} + {"collectors", fun tr_prometheus_collectors/1} ]; translation("vm_args") -> [ @@ -1120,26 +1115,53 @@ translation("vm_args") -> tr_vm_args_process_limit(Conf) -> 2 * conf_get("node.max_ports", Conf, ?DEFAULT_MAX_PORTS). +tr_prometheus_collectors(Conf) -> + [ + %% builtin collectors + prometheus_boolean, + prometheus_counter, + prometheus_gauge, + prometheus_histogram, + prometheus_quantile_summary, + prometheus_summary, + %% emqx collectors + emqx_prometheus, + emqx_prometheus_mria + %% builtin vm collectors + | tr_vm_dist_collector(Conf) ++ + tr_mnesia_collector(Conf) ++ + tr_vm_statistics_collector(Conf) ++ + tr_vm_system_info_collector(Conf) ++ + tr_vm_memory_collector(Conf) ++ + tr_vm_msacc_collector(Conf) + ]. + tr_vm_dist_collector(Conf) -> - metrics_enabled(conf_get("prometheus.vm_dist_collector", Conf, enabled)). + Enabled = conf_get("prometheus.vm_dist_collector", Conf, disabled), + collector_enabled(Enabled, prometheus_vm_dist_collector). tr_mnesia_collector(Conf) -> - metrics_enabled(conf_get("prometheus.mnesia_collector", Conf, enabled)). + Enabled = conf_get("prometheus.mnesia_collector", Conf, disabled), + collector_enabled(Enabled, prometheus_mnesia_collector). tr_vm_statistics_collector(Conf) -> - metrics_enabled(conf_get("prometheus.vm_statistics_collector", Conf, enabled)). + Enabled = conf_get("prometheus.vm_statistics_collector", Conf, disabled), + collector_enabled(Enabled, prometheus_vm_statistics_collector). tr_vm_system_info_collector(Conf) -> - metrics_enabled(conf_get("prometheus.vm_system_info_collector", Conf, enabled)). + Enabled = conf_get("prometheus.vm_system_info_collector", Conf, disabled), + collector_enabled(Enabled, prometheus_vm_system_info_collector). tr_vm_memory_collector(Conf) -> - metrics_enabled(conf_get("prometheus.vm_memory_collector", Conf, enabled)). + Enabled = conf_get("prometheus.vm_memory_collector", Conf, disabled), + collector_enabled(Enabled, prometheus_vm_memory_collector). tr_vm_msacc_collector(Conf) -> - metrics_enabled(conf_get("prometheus.vm_msacc_collector", Conf, enabled)). + Enabled = conf_get("prometheus.vm_msacc_collector", Conf, disabled), + collector_enabled(Enabled, prometheus_vm_msacc_collector). -metrics_enabled(enabled) -> all; -metrics_enabled(disabled) -> []. +collector_enabled(enabled, Collector) -> [Collector]; +collector_enabled(disabled, _) -> []. tr_default_config_driver(Conf) -> conf_get("rpc.driver", Conf). diff --git a/apps/emqx_conf/test/emqx_conf_cli_SUITE.erl b/apps/emqx_conf/test/emqx_conf_cli_SUITE.erl index c7701b431..e5356c2ea 100644 --- a/apps/emqx_conf/test/emqx_conf_cli_SUITE.erl +++ b/apps/emqx_conf/test/emqx_conf_cli_SUITE.erl @@ -27,11 +27,11 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - emqx_mgmt_api_test_util:init_suite([emqx_conf, emqx_authz]), + emqx_mgmt_api_test_util:init_suite([emqx_conf, emqx_authz, emqx_authn]), Config. end_per_suite(_Config) -> - emqx_mgmt_api_test_util:end_suite([emqx_conf, emqx_authz]). + emqx_mgmt_api_test_util:end_suite([emqx_conf, emqx_authz, emqx_authn]). t_load_config(Config) -> Authz = authorization, @@ -64,6 +64,88 @@ t_load_config(Config) -> ?assertEqual({error, empty_hocon_file}, emqx_conf_cli:conf(["load", "non-exist-file"])), ok. +t_conflict_mix_conf(Config) -> + case emqx_release:edition() of + ce -> + %% Don't fail if the test is run with emqx profile + ok; + ee -> + AuthNInit = emqx_conf:get_raw([authentication]), + Redis = #{ + <<"backend">> => <<"redis">>, + <<"cmd">> => <<"HMGET mqtt_user:${username} password_hash salt">>, + <<"enable">> => false, + <<"mechanism">> => <<"password_based">>, + %% password_hash_algorithm {name = sha256, salt_position = suffix} + <<"redis_type">> => <<"single">>, + <<"server">> => <<"127.0.0.1:6379">> + }, + AuthN = #{<<"authentication">> => [Redis]}, + ConfBin = hocon_pp:do(AuthN, #{}), + ConfFile = prepare_conf_file(?FUNCTION_NAME, ConfBin, Config), + %% init with redis sources + ok = emqx_conf_cli:conf(["load", "--replace", ConfFile]), + ?assertMatch([Redis], emqx_conf:get_raw([authentication])), + %% change redis type from single to cluster + %% the server field will become servers field + RedisCluster = maps:remove(<<"server">>, Redis#{ + <<"redis_type">> => cluster, + <<"servers">> => [<<"127.0.0.1:6379">>] + }), + AuthN1 = AuthN#{<<"authentication">> => [RedisCluster]}, + ConfBin1 = hocon_pp:do(AuthN1, #{}), + ConfFile1 = prepare_conf_file(?FUNCTION_NAME, ConfBin1, Config), + {error, Reason} = emqx_conf_cli:conf(["load", "--merge", ConfFile1]), + ?assertNotEqual( + nomatch, + binary:match( + Reason, + [<<"Tips: There may be some conflicts in the new configuration under">>] + ), + Reason + ), + %% use replace to change redis type from single to cluster + ?assertMatch(ok, emqx_conf_cli:conf(["load", "--replace", ConfFile1])), + %% clean up + ConfBinInit = hocon_pp:do(#{<<"authentication">> => AuthNInit}, #{}), + ConfFileInit = prepare_conf_file(?FUNCTION_NAME, ConfBinInit, Config), + ok = emqx_conf_cli:conf(["load", "--replace", ConfFileInit]), + ok + end. + +t_config_handler_hook_failed(Config) -> + Listeners = + #{ + <<"listeners">> => #{ + <<"ssl">> => #{ + <<"default">> => #{ + <<"ssl_options">> => #{ + <<"keyfile">> => <<"">> + } + } + } + } + }, + ConfBin = hocon_pp:do(Listeners, #{}), + ConfFile = prepare_conf_file(?FUNCTION_NAME, ConfBin, Config), + {error, Reason} = emqx_conf_cli:conf(["load", "--merge", ConfFile]), + %% the hook failed with empty keyfile + ?assertEqual( + nomatch, + binary:match(Reason, [ + <<"Tips: There may be some conflicts in the new configuration under">> + ]), + Reason + ), + ?assertNotEqual( + nomatch, + binary:match(Reason, [ + <<"{bad_ssl_config,#{reason => pem_file_path_or_string_is_required">> + ]), + Reason + ), + ok. + t_load_readonly(Config) -> Base0 = base_conf(), Base1 = Base0#{<<"mqtt">> => emqx_conf:get_raw([mqtt])}, diff --git a/apps/emqx_dashboard/test/emqx_dashboard_api_test_helpers.erl b/apps/emqx_dashboard/test/emqx_dashboard_api_test_helpers.erl index e233eaaa2..5e8c61e15 100644 --- a/apps/emqx_dashboard/test/emqx_dashboard_api_test_helpers.erl +++ b/apps/emqx_dashboard/test/emqx_dashboard_api_test_helpers.erl @@ -28,7 +28,8 @@ multipart_formdata_request/4, host/0, uri/0, - uri/1 + uri/1, + uri/2 ]). -define(HOST, "http://127.0.0.1:18083"). @@ -96,10 +97,15 @@ request(Username, Method, Url, Body) -> host() -> ?HOST. -uri() -> uri([]). +uri() -> + uri([]). + uri(Parts) when is_list(Parts) -> + uri(host(), Parts). + +uri(Host, Parts) when is_list(Host), is_list(Parts) -> NParts = [E || E <- Parts], - host() ++ "/" ++ to_list(filename:join([?BASE_PATH, ?API_VERSION | NParts])). + Host ++ "/" ++ to_list(filename:join([?BASE_PATH, ?API_VERSION | NParts])). auth_header(Username) -> Password = <<"public">>, diff --git a/apps/emqx_eviction_agent/test/emqx_eviction_agent_test_helpers.erl b/apps/emqx_eviction_agent/test/emqx_eviction_agent_test_helpers.erl index 130a2628a..860436f67 100644 --- a/apps/emqx_eviction_agent/test/emqx_eviction_agent_test_helpers.erl +++ b/apps/emqx_eviction_agent/test/emqx_eviction_agent_test_helpers.erl @@ -85,7 +85,7 @@ start_cluster(NamesWithPorts, Apps, Env) -> NamesWithPorts ), Opts0 = [ - {env, [{emqx, boot_modules, [broker, listeners]}] ++ Env}, + {env, Env}, {apps, Apps}, {conf, [{[listeners, Proto, default, enable], false} || Proto <- [ssl, ws, wss]] ++ diff --git a/apps/emqx_gateway/rebar.config b/apps/emqx_gateway/rebar.config index 2340a2dd8..e78c8a44b 100644 --- a/apps/emqx_gateway/rebar.config +++ b/apps/emqx_gateway/rebar.config @@ -2,5 +2,6 @@ {erl_opts, [debug_info]}. {deps, [ {emqx, {path, "../emqx"}}, - {emqx_utils, {path, "../emqx_utils"}} + {emqx_utils, {path, "../emqx_utils"}}, + {emqx_authn, {path, "../emqx_authn"}} ]}. diff --git a/apps/emqx_gateway/src/emqx_gateway_conf.erl b/apps/emqx_gateway/src/emqx_gateway_conf.erl index 2a64a6914..480633652 100644 --- a/apps/emqx_gateway/src/emqx_gateway_conf.erl +++ b/apps/emqx_gateway/src/emqx_gateway_conf.erl @@ -71,7 +71,7 @@ ]). -include_lib("emqx/include/logger.hrl"). --include_lib("emqx/include/emqx_authentication.hrl"). +-include_lib("emqx_authn/include/emqx_authentication.hrl"). -define(AUTHN_BIN, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY). -type atom_or_bin() :: atom() | binary(). diff --git a/apps/emqx_gateway/src/emqx_gateway_http.erl b/apps/emqx_gateway/src/emqx_gateway_http.erl index 2186ac3d7..997539e7d 100644 --- a/apps/emqx_gateway/src/emqx_gateway_http.erl +++ b/apps/emqx_gateway/src/emqx_gateway_http.erl @@ -19,7 +19,7 @@ -include("include/emqx_gateway.hrl"). -include_lib("emqx/include/logger.hrl"). --include_lib("emqx/include/emqx_authentication.hrl"). +-include_lib("emqx_authn/include/emqx_authentication.hrl"). -define(AUTHN, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_ATOM). diff --git a/apps/emqx_gateway/src/emqx_gateway_schema.erl b/apps/emqx_gateway/src/emqx_gateway_schema.erl index b43f4ba98..8d9cc5a11 100644 --- a/apps/emqx_gateway/src/emqx_gateway_schema.erl +++ b/apps/emqx_gateway/src/emqx_gateway_schema.erl @@ -24,9 +24,9 @@ -dialyzer(no_unused). -dialyzer(no_fail_call). --include_lib("emqx/include/emqx_authentication.hrl"). -include_lib("hocon/include/hoconsc.hrl"). -include_lib("typerefl/include/types.hrl"). +-include_lib("emqx_authn/include/emqx_authentication.hrl"). -type ip_port() :: tuple() | integer(). -type duration() :: non_neg_integer(). diff --git a/apps/emqx_gcp_device/test/emqx_gcp_device_SUITE.erl b/apps/emqx_gcp_device/test/emqx_gcp_device_SUITE.erl index 5f286d629..4c8e89551 100644 --- a/apps/emqx_gcp_device/test/emqx_gcp_device_SUITE.erl +++ b/apps/emqx_gcp_device/test/emqx_gcp_device_SUITE.erl @@ -17,15 +17,19 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - ok = emqx_common_test_helpers:start_apps([emqx_conf, emqx_authn, emqx_retainer, emqx_gcp_device]), - Config. + Apps = emqx_cth_suite:start( + [emqx, emqx_conf, emqx_authn, emqx_gcp_device, {emqx_retainer, "retainer {enable = true}"}], + #{ + work_dir => ?config(priv_dir, Config) + } + ), + [{apps, Apps} | Config]. end_per_suite(Config) -> - _ = emqx_common_test_helpers:stop_apps([emqx_authn, emqx_retainer, emqx_gcp_device]), - Config. + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. init_per_testcase(_TestCase, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL diff --git a/apps/emqx_gcp_device/test/emqx_gcp_device_api_SUITE.erl b/apps/emqx_gcp_device/test/emqx_gcp_device_api_SUITE.erl index 238f99445..4ed34344e 100644 --- a/apps/emqx_gcp_device/test/emqx_gcp_device_api_SUITE.erl +++ b/apps/emqx_gcp_device/test/emqx_gcp_device_api_SUITE.erl @@ -14,32 +14,34 @@ -include_lib("emqx/include/emqx.hrl"). -define(PATH, [authentication]). --define(BASE_CONF, << - "" - "\n" - "retainer {\n" - " enable = true\n" - "}" - "" ->>). all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - ok = emqx_config:init_load(emqx_retainer_schema, ?BASE_CONF), - ok = emqx_common_test_helpers:start_apps([emqx_gcp_device, emqx_authn, emqx_conf, emqx_retainer]), - emqx_dashboard_api_test_helpers:set_default_config(), - emqx_mgmt_api_test_util:init_suite(), - Config. + Apps = emqx_cth_suite:start( + [ + emqx, + emqx_conf, + emqx_authn, + {emqx_retainer, "retainer {enable = true}"}, + emqx_management, + {emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"}, + emqx_gcp_device + ], + #{ + work_dir => ?config(priv_dir, Config) + } + ), + _ = emqx_common_test_http:create_default_app(), + [{apps, Apps} | Config]. end_per_suite(Config) -> - emqx_mgmt_api_test_util:end_suite(), - _ = emqx_common_test_helpers:stop_apps([emqx_authn, emqx_retainer, emqx_gcp_device]), + _ = emqx_common_test_http:delete_default_app(), + ok = emqx_cth_suite:stop(?config(apps, Config)), Config. init_per_testcase(_TestCase, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL diff --git a/apps/emqx_gcp_device/test/emqx_gcp_device_authn_SUITE.erl b/apps/emqx_gcp_device/test/emqx_gcp_device_authn_SUITE.erl index 8c3f8e0fa..23e69f4c5 100644 --- a/apps/emqx_gcp_device/test/emqx_gcp_device_authn_SUITE.erl +++ b/apps/emqx_gcp_device/test/emqx_gcp_device_authn_SUITE.erl @@ -23,7 +23,9 @@ all() -> init_per_suite(Config0) -> ok = snabbkaffe:start_trace(), - emqx_common_test_helpers:start_apps([emqx_conf, emqx_authn, emqx_gcp_device]), + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn, emqx_gcp_device], #{ + work_dir => ?config(priv_dir, Config0) + }), ValidExpirationTime = erlang:system_time(second) + 3600, ValidJWT = generate_jws(ValidExpirationTime), ExpiredJWT = generate_jws(0), @@ -35,16 +37,16 @@ init_per_suite(Config0) -> {valid_jwt, ValidJWT}, {expired_jwt, ExpiredJWT}, {valid_client, ValidClient}, - {expired_client, ExpiredClient} + {expired_client, ExpiredClient}, + {apps, Apps} | Config0 ]. -end_per_suite(_) -> - _ = emqx_common_test_helpers:stop_apps([emqx_authn, emqx_gcp_device]), +end_per_suite(Config) -> + ok = emqx_cth_suite:stop(?config(apps, Config)), ok. init_per_testcase(_, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), Config. end_per_testcase(_Case, Config) -> diff --git a/apps/emqx_ldap/src/emqx_ldap.erl b/apps/emqx_ldap/src/emqx_ldap.erl index 85ba73df9..d14ddf97d 100644 --- a/apps/emqx_ldap/src/emqx_ldap.erl +++ b/apps/emqx_ldap/src/emqx_ldap.erl @@ -70,7 +70,12 @@ fields(config) -> example => <<"(& (objectClass=mqttUser) (uid=${username}))">>, validator => fun emqx_schema:non_empty_string/1 } - )} + )}, + {request_timeout, + ?HOCON(emqx_schema:timeout_duration_ms(), #{ + desc => ?DESC(request_timeout), + default => <<"5s">> + })} ] ++ emqx_connector_schema_lib:ssl_fields(). server() -> @@ -145,19 +150,31 @@ on_get_status(_InstId, #{pool_name := PoolName} = _State) -> true -> connected; false -> - connecting + %% Note: here can only return `disconnected` not `connecting` + %% because the LDAP socket/connection can't be reused + %% searching on a died socket will never return until timeout + disconnected end. do_get_status(Conn) -> - erlang:is_process_alive(Conn). + %% search with an invalid base object + %% if the server is down, the result is {error, ldap_closed} + %% otherwise is {error, invalidDNSyntax/timeout} + {error, ldap_closed} =/= + eldap:search(Conn, [{base, "checkalive"}, {filter, eldap:'approxMatch'("", "")}]). %% =================================================================== connect(Options) -> - #{hostname := Host, username := Username, password := Password} = + #{ + hostname := Host, + username := Username, + password := Password, + request_timeout := RequestTimeout + } = Conf = proplists:get_value(options, Options), OpenOpts = maps:to_list(maps:with([port, sslopts], Conf)), - case eldap:open([Host], [{log, fun log/3} | OpenOpts]) of + case eldap:open([Host], [{log, fun log/3}, {timeout, RequestTimeout} | OpenOpts]) of {ok, Handle} = Ret -> case eldap:simple_bind(Handle, Username, Password) of ok -> Ret; diff --git a/apps/emqx_ldap/src/emqx_ldap_authn.erl b/apps/emqx_ldap/src/emqx_ldap_authn.erl index d814e2aae..c18ce3fc7 100644 --- a/apps/emqx_ldap/src/emqx_ldap_authn.erl +++ b/apps/emqx_ldap/src/emqx_ldap_authn.erl @@ -109,6 +109,8 @@ destroy(#{resource_id := ResourceId}) -> authenticate(#{auth_method := _}, _) -> ignore; +authenticate(#{password := undefined}, _) -> + {error, bad_username_or_password}; authenticate( #{password := Password} = Credential, #{ @@ -249,7 +251,7 @@ verify_password(Algorithm, LDAPPasswordType, LDAPPassword, Salt, Position, Passw true -> {ok, is_superuser(Entry, State)}; _ -> - {error, invalid_password} + {error, bad_username_or_password} end. is_superuser(Entry, #{is_superuser_attribute := Attr} = _State) -> diff --git a/apps/emqx_ldap/test/emqx_ldap_SUITE.erl b/apps/emqx_ldap/test/emqx_ldap_SUITE.erl index 8a90f6f02..79c549c22 100644 --- a/apps/emqx_ldap/test/emqx_ldap_SUITE.erl +++ b/apps/emqx_ldap/test/emqx_ldap_SUITE.erl @@ -9,12 +9,13 @@ -include_lib("emqx_connector/include/emqx_connector.hrl"). -include_lib("eunit/include/eunit.hrl"). --include_lib("emqx/include/emqx.hrl"). -include_lib("stdlib/include/assert.hrl"). -include_lib("eldap/include/eldap.hrl"). --define(LDAP_HOST, "ldap"). -define(LDAP_RESOURCE_MOD, emqx_ldap). +-define(PROXY_HOST, "toxiproxy"). +-define(PROXY_PORT, 8474). +-define(LDAP_HOST, ?PROXY_HOST). all() -> [ @@ -53,9 +54,11 @@ end_per_suite(_Config) -> _ = application:stop(emqx_connector). init_per_testcase(_, Config) -> + emqx_common_test_helpers:reset_proxy(?PROXY_HOST, ?PROXY_PORT), Config. end_per_testcase(_, _Config) -> + emqx_common_test_helpers:reset_proxy(?PROXY_HOST, ?PROXY_PORT), ok. % %%------------------------------------------------------------------------------ @@ -142,6 +145,31 @@ perform_lifecycle_check(ResourceId, InitialConfig) -> % Should not even be able to get the resource data out of ets now unlike just stopping. ?assertEqual({error, not_found}, emqx_resource:get_instance(ResourceId)). +t_get_status(Config) -> + ResourceId = <<"emqx_ldap_status">>, + ProxyName = proxy_name(Config), + + {ok, #{config := CheckedConfig}} = emqx_resource:check_config( + ?LDAP_RESOURCE_MOD, ldap_config(Config) + ), + {ok, _} = emqx_resource:create_local( + ResourceId, + ?CONNECTOR_RESOURCE_GROUP, + ?LDAP_RESOURCE_MOD, + CheckedConfig, + #{} + ), + + ?assertEqual({ok, connected}, emqx_resource:health_check(ResourceId)), + emqx_common_test_helpers:with_failure(down, ProxyName, ?PROXY_HOST, ?PROXY_PORT, fun() -> + ?assertMatch( + {ok, Status} when Status =:= disconnected, + emqx_resource:health_check(ResourceId) + ) + end), + ?assertEqual(ok, emqx_resource:remove_local(ResourceId)), + ok. + % %%------------------------------------------------------------------------------ % %% Helpers % %%------------------------------------------------------------------------------ @@ -190,5 +218,12 @@ ssl(Config) -> "ssl.enable=false"; ssl -> "ssl.enable=true\n" - "ssl.cacertfile=\"etc/openldap/cacert.pem\"" + "ssl.cacertfile=\"/etc/certs/ca.crt\"" end. + +proxy_name(tcp) -> + "ldap_tcp"; +proxy_name(ssl) -> + "ldap_ssl"; +proxy_name(Config) -> + proxy_name(proplists:get_value(group, Config, tcp)). diff --git a/apps/emqx_ldap/test/emqx_ldap_authn_SUITE.erl b/apps/emqx_ldap/test/emqx_ldap_authn_SUITE.erl index d3b7a90f4..40501456e 100644 --- a/apps/emqx_ldap/test/emqx_ldap_authn_SUITE.erl +++ b/apps/emqx_ldap/test/emqx_ldap_authn_SUITE.erl @@ -8,6 +8,7 @@ -include_lib("emqx_authn/include/emqx_authn.hrl"). -include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). -define(LDAP_HOST, "ldap"). -define(LDAP_DEFAULT_PORT, 389). @@ -20,7 +21,6 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_testcase(_, Config) -> - {ok, _} = emqx_cluster_rpc:start_link(node(), emqx_cluster_rpc, 1000), emqx_authentication:initialize_authentication(?GLOBAL, []), emqx_authn_test_lib:delete_authenticators( [authentication], @@ -32,8 +32,9 @@ init_per_suite(Config) -> _ = application:load(emqx_conf), case emqx_common_test_helpers:is_tcp_server_available(?LDAP_HOST, ?LDAP_DEFAULT_PORT) of true -> - ok = emqx_common_test_helpers:start_apps([emqx_authn]), - ok = start_apps([emqx_resource]), + Apps = emqx_cth_suite:start([emqx, emqx_conf, emqx_authn], #{ + work_dir => ?config(priv_dir, Config) + }), {ok, _} = emqx_resource:create_local( ?LDAP_RESOURCE, ?RESOURCE_GROUP, @@ -41,19 +42,18 @@ init_per_suite(Config) -> ldap_config(), #{} ), - Config; + [{apps, Apps} | Config]; false -> {skip, no_ldap} end. -end_per_suite(_Config) -> +end_per_suite(Config) -> emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL ), ok = emqx_resource:remove_local(?LDAP_RESOURCE), - ok = stop_apps([emqx_resource]), - ok = emqx_common_test_helpers:stop_apps([emqx_authn]). + ok = emqx_cth_suite:stop(?config(apps, Config)). %%------------------------------------------------------------------------------ %% Tests @@ -237,7 +237,7 @@ user_seeds() -> %% Not exists New(<<"notexists">>, <<"notexists">>, {error, not_authorized}), %% Wrong Password - New(<<"mqttuser0001">>, <<"wrongpassword">>, {error, invalid_password}), + New(<<"mqttuser0001">>, <<"wrongpassword">>, {error, bad_username_or_password}), %% Disabled New(<<"mqttuser0006">>, <<"mqttuser0006">>, {error, user_disabled}), %% IsSuperuser diff --git a/apps/emqx_machine/rebar.config b/apps/emqx_machine/rebar.config index 53b7bec13..8953b54a7 100644 --- a/apps/emqx_machine/rebar.config +++ b/apps/emqx_machine/rebar.config @@ -3,6 +3,7 @@ {deps, [ {emqx, {path, "../emqx"}}, {emqx_dashboard, {path, "../emqx_dashboard"}}, + {emqx_conf, {path, "../emqx_conf"}}, {emqx_utils, {path, "../emqx_utils"}} ]}. diff --git a/apps/emqx_machine/src/emqx_machine_boot.erl b/apps/emqx_machine/src/emqx_machine_boot.erl index 481927765..a27c2156d 100644 --- a/apps/emqx_machine/src/emqx_machine_boot.erl +++ b/apps/emqx_machine/src/emqx_machine_boot.erl @@ -69,6 +69,8 @@ stop_apps() -> ?SLOG(notice, #{msg => "stopping_emqx_apps"}), _ = emqx_alarm_handler:unload(), ok = emqx_conf_app:unset_config_loaded(), + %% Mute otel deps application. + _ = emqx_otel:stop_otel(), lists:foreach(fun stop_one_app/1, lists:reverse(sorted_reboot_apps())). %% Those port apps are terminated after the main apps diff --git a/apps/emqx_machine/test/emqx_machine_SUITE.erl b/apps/emqx_machine/test/emqx_machine_SUITE.erl index bd18c67aa..224732d1f 100644 --- a/apps/emqx_machine/test/emqx_machine_SUITE.erl +++ b/apps/emqx_machine/test/emqx_machine_SUITE.erl @@ -42,7 +42,7 @@ init_per_suite(Config) -> %% Unload emqx_authz to avoid reboot this application %% application:unload(emqx_authz), - emqx_common_test_helpers:start_apps([emqx_conf]), + emqx_common_test_helpers:start_apps([emqx_conf, emqx_opentelemetry]), application:set_env(emqx_machine, applications, [ emqx_prometheus, emqx_modules, @@ -56,12 +56,13 @@ init_per_suite(Config) -> emqx_exhook, emqx_authn, emqx_authz, - emqx_plugin + emqx_plugin, + emqx_opentelemetry ]), Config. end_per_suite(_Config) -> - emqx_common_test_helpers:stop_apps([]). + emqx_common_test_helpers:stop_apps([emqx_opentelemetry, emqx_conf]). init_per_testcase(t_custom_shard_transports, Config) -> OldConfig = application:get_env(emqx_machine, custom_shard_transports), diff --git a/apps/emqx_management/src/emqx_mgmt_api_configs.erl b/apps/emqx_management/src/emqx_mgmt_api_configs.erl index 5edf8c564..5fe08a0e4 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_configs.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_configs.erl @@ -344,7 +344,7 @@ configs(get, #{query_string := QueryStr, headers := Headers}, _Req) -> {error, _} = Error -> {400, #{code => 'INVALID_ACCEPT', message => ?ERR_MSG(Error)}} end; configs(put, #{body := Conf, query_string := #{<<"mode">> := Mode}}, _Req) -> - case emqx_conf_cli:load_config(Conf, Mode) of + case emqx_conf_cli:load_config(Conf, #{mode => Mode, log => none}) of ok -> {200}; {error, Msg} -> {400, #{<<"content-type">> => <<"text/plain">>}, Msg} end. diff --git a/apps/emqx_management/src/emqx_mgmt_api_plugins.erl b/apps/emqx_management/src/emqx_mgmt_api_plugins.erl index 3db0c42fb..c89ee202e 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_plugins.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_plugins.erl @@ -166,7 +166,10 @@ schema("/plugins/:name/move") -> tags => ?TAGS, parameters => [hoconsc:ref(name)], 'requestBody' => move_request_body(), - responses => #{200 => <<"OK">>} + responses => #{ + 200 => <<"OK">>, + 400 => emqx_dashboard_swagger:error_codes(['MOVE_FAILED'], <<"Move failed">>) + } } }. @@ -420,7 +423,7 @@ update_boot_order(post, #{bindings := #{name := Name}, body := Body}) -> {error, Reason} -> {400, #{code => 'BAD_POSITION', message => Reason}}; Position -> - case emqx_plugins:ensure_enabled(Name, Position) of + case emqx_plugins:ensure_enabled(Name, Position, _ConfLocation = global) of ok -> {200}; {error, Reason} -> diff --git a/apps/emqx_management/src/emqx_mgmt_cli.erl b/apps/emqx_management/src/emqx_mgmt_cli.erl index 8564653a4..72498fe12 100644 --- a/apps/emqx_management/src/emqx_mgmt_cli.erl +++ b/apps/emqx_management/src/emqx_mgmt_cli.erl @@ -22,9 +22,6 @@ -include_lib("emqx/include/emqx_mqtt.hrl"). -include_lib("emqx/include/logger.hrl"). --include("emqx_mgmt.hrl"). - --define(PRINT_CMD(Cmd, Descr), io:format("~-48s# ~ts~n", [Cmd, Descr])). -define(DATA_BACKUP_OPTS, #{print_fun => fun emqx_ctl:print/2}). -export([load/0]). @@ -49,20 +46,6 @@ data/1 ]). --define(PROC_INFOKEYS, [ - status, - memory, - message_queue_len, - total_heap_size, - heap_size, - stack_size, - reductions -]). - --define(MAX_LIMIT, 10000). - --define(APP, emqx). - -spec load() -> ok. load() -> Cmds = [Fun || {Fun, _} <- ?MODULE:module_info(exports), is_cmd(Fun)], @@ -197,9 +180,12 @@ if_client(ClientId, Fun) -> %% @doc Topics Command topics(["list"]) -> - dump(?ROUTE_TAB, emqx_topic); + emqx_router:foldr_routes( + fun(Route, Acc) -> [print({emqx_topic, Route}) | Acc] end, + [] + ); topics(["show", Topic]) -> - Routes = ets:lookup(?ROUTE_TAB, bin(Topic)), + Routes = emqx_router:lookup_routes(Topic), [print({emqx_topic, Route}) || Route <- Routes]; topics(_) -> emqx_ctl:usage([ diff --git a/apps/emqx_management/test/emqx_mgmt_api_plugins_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_plugins_SUITE.erl index ba613abc4..61ed94bdc 100644 --- a/apps/emqx_management/test/emqx_mgmt_api_plugins_SUITE.erl +++ b/apps/emqx_management/test/emqx_mgmt_api_plugins_SUITE.erl @@ -19,11 +19,14 @@ -compile(nowarn_export_all). -include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). -define(EMQX_PLUGIN_TEMPLATE_NAME, "emqx_plugin_template"). -define(EMQX_PLUGIN_TEMPLATE_VSN, "5.0.0"). -define(PACKAGE_SUFFIX, ".tar.gz"). +-define(CLUSTER_API_SERVER(PORT), ("http://127.0.0.1:" ++ (integer_to_list(PORT)))). + all() -> emqx_common_test_helpers:all(?MODULE). @@ -48,6 +51,25 @@ end_per_suite(Config) -> emqx_mgmt_api_test_util:end_suite([emqx_plugins, emqx_conf]), ok. +init_per_testcase(t_cluster_update_order = TestCase, Config0) -> + Config = [{api_port, 18085} | Config0], + Cluster = [Node1 | _] = cluster(TestCase, Config), + {ok, API} = init_api(Node1), + [ + {api, API}, + {cluster, Cluster} + | Config + ]; +init_per_testcase(_TestCase, Config) -> + Config. + +end_per_testcase(t_cluster_update_order, Config) -> + Cluster = ?config(cluster, Config), + emqx_cth_cluster:stop(Cluster), + ok; +end_per_testcase(_TestCase, _Config) -> + ok. + t_plugins(Config) -> DemoShDir = proplists:get_value(demo_sh_dir, Config), PackagePath = get_demo_plugin_package(DemoShDir), @@ -141,9 +163,83 @@ t_delete_non_existing(_Config) -> ), ok. -list_plugins() -> - Path = emqx_mgmt_api_test_util:api_path(["plugins"]), - case emqx_mgmt_api_test_util:request_api(get, Path) of +t_cluster_update_order(Config) -> + DemoShDir = proplists:get_value(demo_sh_dir, Config), + PackagePath1 = get_demo_plugin_package(DemoShDir), + NameVsn1 = filename:basename(PackagePath1, ?PACKAGE_SUFFIX), + Name2Str = ?EMQX_PLUGIN_TEMPLATE_NAME ++ "_a", + NameVsn2 = Name2Str ++ "-" ++ ?EMQX_PLUGIN_TEMPLATE_VSN, + PackagePath2 = create_renamed_package(PackagePath1, NameVsn2), + Name1 = list_to_binary(?EMQX_PLUGIN_TEMPLATE_NAME), + Name2 = list_to_binary(Name2Str), + + ok = install_plugin(Config, PackagePath1), + ok = install_plugin(Config, PackagePath2), + %% to get them configured... + {ok, _} = update_plugin(Config, NameVsn1, "start"), + {ok, _} = update_plugin(Config, NameVsn2, "start"), + + ?assertMatch( + {ok, [ + #{<<"name">> := Name1}, + #{<<"name">> := Name2} + ]}, + list_plugins(Config) + ), + + ct:pal("moving to rear"), + ?assertMatch({ok, _}, update_boot_order(NameVsn1, #{position => rear}, Config)), + ?assertMatch( + {ok, [ + #{<<"name">> := Name2}, + #{<<"name">> := Name1} + ]}, + list_plugins(Config) + ), + + ct:pal("moving to front"), + ?assertMatch({ok, _}, update_boot_order(NameVsn1, #{position => front}, Config)), + ?assertMatch( + {ok, [ + #{<<"name">> := Name1}, + #{<<"name">> := Name2} + ]}, + list_plugins(Config) + ), + + ct:pal("moving after"), + NameVsn2Bin = list_to_binary(NameVsn2), + ?assertMatch( + {ok, _}, + update_boot_order(NameVsn1, #{position => <<"after:", NameVsn2Bin/binary>>}, Config) + ), + ?assertMatch( + {ok, [ + #{<<"name">> := Name2}, + #{<<"name">> := Name1} + ]}, + list_plugins(Config) + ), + + ct:pal("moving before"), + ?assertMatch( + {ok, _}, + update_boot_order(NameVsn1, #{position => <<"before:", NameVsn2Bin/binary>>}, Config) + ), + ?assertMatch( + {ok, [ + #{<<"name">> := Name1}, + #{<<"name">> := Name2} + ]}, + list_plugins(Config) + ), + + ok. + +list_plugins(Config) -> + #{host := Host, auth := Auth} = get_host_and_auth(Config), + Path = emqx_mgmt_api_test_util:api_path(Host, ["plugins"]), + case emqx_mgmt_api_test_util:request_api(get, Path, Auth) of {ok, Apps} -> {ok, emqx_utils_json:decode(Apps, [return_maps])}; Error -> Error end. @@ -172,16 +268,47 @@ install_plugin(FilePath) -> Error -> Error end. +install_plugin(Config, FilePath) -> + #{host := Host, auth := Auth} = get_host_and_auth(Config), + Path = emqx_mgmt_api_test_util:api_path(Host, ["plugins", "install"]), + case + emqx_mgmt_api_test_util:upload_request( + Path, + FilePath, + "plugin", + <<"application/gzip">>, + [], + Auth + ) + of + {ok, {{"HTTP/1.1", 200, "OK"}, _Headers, <<>>}} -> ok; + Error -> Error + end. + update_plugin(Name, Action) -> Path = emqx_mgmt_api_test_util:api_path(["plugins", Name, Action]), emqx_mgmt_api_test_util:request_api(put, Path). -update_boot_order(Name, MoveBody) -> - Auth = emqx_mgmt_api_test_util:auth_header_(), - Path = emqx_mgmt_api_test_util:api_path(["plugins", Name, "move"]), - case emqx_mgmt_api_test_util:request_api(post, Path, "", Auth, MoveBody) of - {ok, Res} -> {ok, emqx_utils_json:decode(Res, [return_maps])}; - Error -> Error +update_plugin(Config, Name, Action) when is_list(Config) -> + #{host := Host, auth := Auth} = get_host_and_auth(Config), + Path = emqx_mgmt_api_test_util:api_path(Host, ["plugins", Name, Action]), + emqx_mgmt_api_test_util:request_api(put, Path, Auth). + +update_boot_order(Name, MoveBody, Config) -> + #{host := Host, auth := Auth} = get_host_and_auth(Config), + Path = emqx_mgmt_api_test_util:api_path(Host, ["plugins", Name, "move"]), + Opts = #{return_all => true}, + case emqx_mgmt_api_test_util:request_api(post, Path, "", Auth, MoveBody, Opts) of + {ok, Res} -> + Resp = + case emqx_utils_json:safe_decode(Res, [return_maps]) of + {ok, Decoded} -> Decoded; + {error, _} -> Res + end, + ct:pal("update_boot_order response:\n ~p", [Resp]), + {ok, Resp}; + Error -> + Error end. uninstall_plugin(Name) -> @@ -218,3 +345,51 @@ update_release_json(["release.json"], FileContent, NewName) -> emqx_utils_json:encode(ContentMap#{<<"name">> => NewName}); update_release_json(_FileName, FileContent, _NewName) -> FileContent. + +cluster(TestCase, Config) -> + APIPort = ?config(api_port, Config), + AppSpecs = app_specs(Config), + Node1Apps = AppSpecs ++ [app_spec_dashboard(APIPort)], + Node2Apps = AppSpecs, + Node1Name = emqx_mgmt_api_plugins_SUITE1, + Node1 = emqx_cth_cluster:node_name(Node1Name), + emqx_cth_cluster:start( + [ + {Node1Name, #{role => core, apps => Node1Apps, join_to => Node1}}, + {emqx_mgmt_api_plugins_SUITE2, #{role => core, apps => Node2Apps, join_to => Node1}} + ], + #{work_dir => filename:join(?config(priv_dir, Config), TestCase)} + ). + +app_specs(_Config) -> + [ + emqx, + emqx_conf, + emqx_management, + emqx_plugins + ]. + +app_spec_dashboard(APIPort) -> + {emqx_dashboard, #{ + config => + #{ + dashboard => + #{ + listeners => + #{ + http => + #{bind => APIPort} + } + } + } + }}. + +init_api(Node) -> + erpc:call(Node, emqx_common_test_http, create_default_app, []). + +get_host_and_auth(Config) when is_list(Config) -> + API = ?config(api, Config), + APIPort = ?config(api_port, Config), + Host = ?CLUSTER_API_SERVER(APIPort), + Auth = emqx_common_test_http:auth_header(API), + #{host => Host, auth => Auth}. diff --git a/apps/emqx_management/test/emqx_mgmt_api_test_util.erl b/apps/emqx_management/test/emqx_mgmt_api_test_util.erl index d511acf4d..e6c3bb3d6 100644 --- a/apps/emqx_management/test/emqx_mgmt_api_test_util.erl +++ b/apps/emqx_management/test/emqx_mgmt_api_test_util.erl @@ -61,6 +61,9 @@ request(Method, Url, Body) -> uri(Parts) -> emqx_dashboard_api_test_helpers:uri(Parts). +uri(Host, Parts) -> + emqx_dashboard_api_test_helpers:uri(Host, Parts). + %% compatible_mode will return as same as 'emqx_dashboard_api_test_helpers:request' request_api_with_body(Method, Url, Body) -> Opts = #{compatible_mode => true, httpc_req_opts => [{body_format, binary}]}, @@ -144,9 +147,15 @@ build_http_header(X) when is_list(X) -> build_http_header(X) -> [X]. +default_server() -> + ?SERVER. + api_path(Parts) -> join_http_path([?SERVER, ?BASE_PATH | Parts]). +api_path(Host, Parts) -> + join_http_path([Host, ?BASE_PATH | Parts]). + api_path_without_base_path(Parts) -> join_http_path([?SERVER | Parts]). @@ -193,9 +202,13 @@ upload_request(URL, FilePath, Name, MimeType, RequestData, AuthorizationToken) - ContentLength = integer_to_list(length(binary_to_list(RequestBody))), Headers = [ {"Content-Length", ContentLength}, - case AuthorizationToken =/= undefined of - true -> {"Authorization", "Bearer " ++ binary_to_list(AuthorizationToken)}; - false -> {} + case AuthorizationToken of + _ when is_tuple(AuthorizationToken) -> + AuthorizationToken; + _ when is_binary(AuthorizationToken) -> + {"Authorization", "Bearer " ++ binary_to_list(AuthorizationToken)}; + _ -> + {} end ], HTTPOptions = [], diff --git a/apps/emqx_opentelemetry/src/emqx_otel.erl b/apps/emqx_opentelemetry/src/emqx_otel.erl index 0b0e16cab..aa0c7d2d4 100644 --- a/apps/emqx_opentelemetry/src/emqx_otel.erl +++ b/apps/emqx_opentelemetry/src/emqx_otel.erl @@ -17,15 +17,34 @@ -module(emqx_otel). -include_lib("emqx/include/logger.hrl"). --export([start_link/1]). +-export([start_otel/1, stop_otel/0]). -export([get_cluster_gauge/1, get_stats_gauge/1, get_vm_gauge/1, get_metric_counter/1]). +-export([start_link/1]). -export([init/1, handle_continue/2, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). +-define(SUPERVISOR, emqx_otel_sup). + +start_otel(Conf) -> + Spec = emqx_otel_sup:worker_spec(?MODULE, Conf), + assert_started(supervisor:start_child(?SUPERVISOR, Spec)). + +stop_otel() -> + ok = cleanup(), + case erlang:whereis(?SUPERVISOR) of + undefined -> + ok; + Pid -> + case supervisor:terminate_child(Pid, ?MODULE) of + ok -> supervisor:delete_child(Pid, ?MODULE); + {error, not_found} -> ok; + Error -> Error + end + end. + start_link(Conf) -> gen_server:start_link({local, ?MODULE}, ?MODULE, Conf, []). init(Conf) -> - erlang:process_flag(trap_exit, true), {ok, #{}, {continue, {setup, Conf}}}. handle_continue({setup, Conf}, State) -> @@ -42,19 +61,19 @@ handle_info(_Msg, State) -> {noreply, State}. terminate(_Reason, _State) -> - cleanup(), ok. setup(Conf = #{enable := true}) -> ensure_apps(Conf), create_metric_views(); setup(_Conf) -> - cleanup(), + ok = cleanup(), ok. ensure_apps(Conf) -> #{exporter := #{interval := ExporterInterval}} = Conf, {ok, _} = application:ensure_all_started(opentelemetry_exporter), + {ok, _} = application:ensure_all_started(opentelemetry), _ = application:stop(opentelemetry_experimental), ok = application:set_env( opentelemetry_experimental, @@ -74,6 +93,7 @@ ensure_apps(Conf) -> ok. cleanup() -> + _ = application:stop(opentelemetry), _ = application:stop(opentelemetry_experimental), _ = application:stop(opentelemetry_experimental_api), _ = application:stop(opentelemetry_exporter), @@ -87,10 +107,32 @@ create_metric_views() -> create_gauge(Meter, VmGauge, fun ?MODULE:get_vm_gauge/1), ClusterGauge = [{'node.running', 0}, {'node.stopped', 0}], create_gauge(Meter, ClusterGauge, fun ?MODULE:get_cluster_gauge/1), - Metrics = lists:map(fun({K, V}) -> {K, V, unit(K)} end, emqx_metrics:all()), + Metrics0 = filter_olp_metrics(emqx_metrics:all()), + Metrics = lists:map(fun({K, V}) -> {to_metric_name(K), V, unit(K)} end, Metrics0), create_counter(Meter, Metrics, fun ?MODULE:get_metric_counter/1), ok. +filter_olp_metrics(Metrics) -> + case emqx_config_zones:is_olp_enabled() of + true -> + Metrics; + false -> + OlpMetrics = emqx_metrics:olp_metrics(), + lists:filter( + fun({K, _}) -> + not lists:member(K, OlpMetrics) + end, + Metrics + ) + end. + +to_metric_name('messages.dropped.await_pubrel_timeout') -> + 'messages.dropped.expired'; +to_metric_name('packets.connect.received') -> + 'packets.connect'; +to_metric_name(Name) -> + Name. + unit(K) -> case lists:member(K, bytes_metrics()) of true -> kb; @@ -205,3 +247,8 @@ create_counter(Meter, Counters, CallBack) -> normalize_name(Name) -> list_to_existing_atom(lists:flatten(string:replace(atom_to_list(Name), "_", ".", all))). + +assert_started({ok, _Pid}) -> ok; +assert_started({ok, _Pid, _Info}) -> ok; +assert_started({error, {already_started, _Pid}}) -> ok; +assert_started({error, Reason}) -> {error, Reason}. diff --git a/apps/emqx_opentelemetry/src/emqx_otel_config.erl b/apps/emqx_opentelemetry/src/emqx_otel_config.erl index 3df535890..45a38e134 100644 --- a/apps/emqx_opentelemetry/src/emqx_otel_config.erl +++ b/apps/emqx_opentelemetry/src/emqx_otel_config.erl @@ -52,7 +52,7 @@ post_config_update(_ConfPath, _Req, _NewConf, _OldConf, _AppEnvs) -> ok. ensure_otel(#{enable := true} = Conf) -> - _ = emqx_otel_sup:stop_otel(), - emqx_otel_sup:start_otel(Conf); + _ = emqx_otel:stop_otel(), + emqx_otel:start_otel(Conf); ensure_otel(#{enable := false}) -> - emqx_otel_sup:stop_otel(). + emqx_otel:stop_otel(). diff --git a/apps/emqx_opentelemetry/src/emqx_otel_sup.erl b/apps/emqx_opentelemetry/src/emqx_otel_sup.erl index 2240cca03..19f22cb84 100644 --- a/apps/emqx_opentelemetry/src/emqx_otel_sup.erl +++ b/apps/emqx_opentelemetry/src/emqx_otel_sup.erl @@ -19,35 +19,21 @@ -export([start_link/0]). -export([init/1]). --export([start_otel/1]). --export([stop_otel/0]). +-export([worker_spec/2]). --define(CHILD(Mod, Opts), #{ - id => Mod, - start => {Mod, start_link, [Opts]}, - restart => permanent, - shutdown => 5000, - type => worker, - modules => [Mod] -}). - --define(WORKER, emqx_otel). +worker_spec(Mod, Opts) -> + #{ + id => Mod, + start => {Mod, start_link, [Opts]}, + restart => permanent, + shutdown => 5000, + type => worker, + modules => [Mod] + }. start_link() -> supervisor:start_link({local, ?MODULE}, ?MODULE, []). --spec start_otel(map()) -> ok. -start_otel(Conf) -> - assert_started(supervisor:start_child(?MODULE, ?CHILD(?WORKER, Conf))). - --spec stop_otel() -> ok | {error, term()}. -stop_otel() -> - case supervisor:terminate_child(?MODULE, ?WORKER) of - ok -> supervisor:delete_child(?MODULE, ?WORKER); - {error, not_found} -> ok; - Error -> Error - end. - init([]) -> SupFlags = #{ strategy => one_for_one, @@ -57,11 +43,6 @@ init([]) -> Children = case emqx_conf:get([opentelemetry]) of #{enable := false} -> []; - #{enable := true} = Conf -> [?CHILD(?WORKER, Conf)] + #{enable := true} = Conf -> [worker_spec(emqx_otel, Conf)] end, {ok, {SupFlags, Children}}. - -assert_started({ok, _Pid}) -> ok; -assert_started({ok, _Pid, _Info}) -> ok; -assert_started({error, {already_started, _Pid}}) -> ok; -assert_started({error, Reason}) -> {error, Reason}. diff --git a/apps/emqx_plugins/src/emqx_plugins.app.src b/apps/emqx_plugins/src/emqx_plugins.app.src index 368a1ad46..d9c2d50df 100644 --- a/apps/emqx_plugins/src/emqx_plugins.app.src +++ b/apps/emqx_plugins/src/emqx_plugins.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_plugins, [ {description, "EMQX Plugin Management"}, - {vsn, "0.1.5"}, + {vsn, "0.1.6"}, {modules, []}, {mod, {emqx_plugins_app, []}}, {applications, [kernel, stdlib, emqx]}, diff --git a/apps/emqx_plugins/src/emqx_plugins.erl b/apps/emqx_plugins/src/emqx_plugins.erl index 5181000de..0e11062fc 100644 --- a/apps/emqx_plugins/src/emqx_plugins.erl +++ b/apps/emqx_plugins/src/emqx_plugins.erl @@ -29,6 +29,7 @@ ensure_uninstalled/1, ensure_enabled/1, ensure_enabled/2, + ensure_enabled/3, ensure_disabled/1, purge/1, delete_package/1 @@ -240,28 +241,34 @@ ensure_enabled(NameVsn) -> %% @doc Ensure a plugin is enabled at the given position of the plugin list. -spec ensure_enabled(name_vsn(), position()) -> ok | {error, any()}. ensure_enabled(NameVsn, Position) -> - ensure_state(NameVsn, Position, true). + ensure_state(NameVsn, Position, _Enabled = true, _ConfLocation = local). + +-spec ensure_enabled(name_vsn(), position(), local | global) -> ok | {error, any()}. +ensure_enabled(NameVsn, Position, ConfLocation) when + ConfLocation =:= local; ConfLocation =:= global +-> + ensure_state(NameVsn, Position, _Enabled = true, ConfLocation). %% @doc Ensure a plugin is disabled. -spec ensure_disabled(name_vsn()) -> ok | {error, any()}. ensure_disabled(NameVsn) -> - ensure_state(NameVsn, no_move, false). + ensure_state(NameVsn, no_move, false, _ConfLocation = local). -ensure_state(NameVsn, Position, State) when is_binary(NameVsn) -> - ensure_state(binary_to_list(NameVsn), Position, State); -ensure_state(NameVsn, Position, State) -> +ensure_state(NameVsn, Position, State, ConfLocation) when is_binary(NameVsn) -> + ensure_state(binary_to_list(NameVsn), Position, State, ConfLocation); +ensure_state(NameVsn, Position, State, ConfLocation) -> case read_plugin(NameVsn, #{}) of {ok, _} -> Item = #{ name_vsn => NameVsn, enable => State }, - tryit("ensure_state", fun() -> ensure_configured(Item, Position) end); + tryit("ensure_state", fun() -> ensure_configured(Item, Position, ConfLocation) end); {error, Reason} -> {error, Reason} end. -ensure_configured(#{name_vsn := NameVsn} = Item, Position) -> +ensure_configured(#{name_vsn := NameVsn} = Item, Position, ConfLocation) -> Configured = configured(), SplitFun = fun(#{name_vsn := Nv}) -> bin(Nv) =/= bin(NameVsn) end, {Front, Rear} = lists:splitwith(SplitFun, Configured), @@ -274,7 +281,7 @@ ensure_configured(#{name_vsn := NameVsn} = Item, Position) -> [] -> add_new_configured(Configured, Position, Item) end, - ok = put_configured(NewConfigured). + ok = put_configured(NewConfigured, ConfLocation). add_new_configured(Configured, no_move, Item) -> %% default to rear @@ -787,14 +794,23 @@ is_needed_by(AppToStop, RunningApp) -> undefined -> false end. -put_config(Key, Value) when is_atom(Key) -> - put_config([Key], Value); -put_config(Path, Values) when is_list(Path) -> +put_config(Key, Value) -> + put_config(Key, Value, _ConfLocation = local). + +put_config(Key, Value, ConfLocation) when is_atom(Key) -> + put_config([Key], Value, ConfLocation); +put_config(Path, Values, _ConfLocation = local) when is_list(Path) -> Opts = #{rawconf_with_defaults => true, override_to => cluster}, %% Already in cluster_rpc, don't use emqx_conf:update, dead calls case emqx:update_config([?CONF_ROOT | Path], bin_key(Values), Opts) of {ok, _} -> ok; Error -> Error + end; +put_config(Path, Values, _ConfLocation = global) when is_list(Path) -> + Opts = #{rawconf_with_defaults => true, override_to => cluster}, + case emqx_conf:update([?CONF_ROOT | Path], bin_key(Values), Opts) of + {ok, _} -> ok; + Error -> Error end. bin_key(Map) when is_map(Map) -> @@ -812,7 +828,10 @@ get_config(Path, Default) -> install_dir() -> get_config(install_dir, ""). put_configured(Configured) -> - ok = put_config(states, bin_key(Configured)). + put_configured(Configured, _ConfLocation = local). + +put_configured(Configured, ConfLocation) -> + ok = put_config(states, bin_key(Configured), ConfLocation). configured() -> get_config(states, []). diff --git a/apps/emqx_plugins/src/emqx_plugins_cli.erl b/apps/emqx_plugins/src/emqx_plugins_cli.erl index 2cc5f023c..210dedd1c 100644 --- a/apps/emqx_plugins/src/emqx_plugins_cli.erl +++ b/apps/emqx_plugins/src/emqx_plugins_cli.erl @@ -70,7 +70,7 @@ restart(NameVsn, LogFun) -> ?PRINT(emqx_plugins:restart(NameVsn), LogFun). ensure_enabled(NameVsn, Position, LogFun) -> - ?PRINT(emqx_plugins:ensure_enabled(NameVsn, Position), LogFun). + ?PRINT(emqx_plugins:ensure_enabled(NameVsn, Position, _ConfLocation = global), LogFun). ensure_disabled(NameVsn, LogFun) -> ?PRINT(emqx_plugins:ensure_disabled(NameVsn), LogFun). diff --git a/apps/emqx_plugins/test/emqx_plugins_SUITE.erl b/apps/emqx_plugins/test/emqx_plugins_SUITE.erl index 9bb3f5e72..bf359374e 100644 --- a/apps/emqx_plugins/test/emqx_plugins_SUITE.erl +++ b/apps/emqx_plugins/test/emqx_plugins_SUITE.erl @@ -216,7 +216,7 @@ t_position(Config) -> PosApp2 = <<"position-2">>, ok = write_info_file(Config, PosApp2, FakeInfo), %% fake a disabled plugin in config - ok = emqx_plugins:ensure_state(PosApp2, {before, NameVsn}, false), + ok = ensure_state(PosApp2, {before, NameVsn}, false), ListFun = fun() -> lists:map( fun( @@ -255,14 +255,14 @@ t_start_restart_and_stop(Config) -> Bar2 = <<"bar-2">>, ok = write_info_file(Config, Bar2, FakeInfo), %% fake a disabled plugin in config - ok = emqx_plugins:ensure_state(Bar2, front, false), + ok = ensure_state(Bar2, front, false), assert_app_running(emqx_plugin_template, false), ok = emqx_plugins:ensure_started(), assert_app_running(emqx_plugin_template, true), %% fake enable bar-2 - ok = emqx_plugins:ensure_state(Bar2, rear, true), + ok = ensure_state(Bar2, rear, true), %% should cause an error ?assertError( #{function := _, errors := [_ | _]}, @@ -274,7 +274,7 @@ t_start_restart_and_stop(Config) -> %% stop all ok = emqx_plugins:ensure_stopped(), assert_app_running(emqx_plugin_template, false), - ok = emqx_plugins:ensure_state(Bar2, rear, false), + ok = ensure_state(Bar2, rear, false), ok = emqx_plugins:restart(NameVsn), assert_app_running(emqx_plugin_template, true), @@ -826,3 +826,7 @@ make_tar(Cwd, NameWithVsn, TarfileVsn) -> after file:set_cwd(OriginalCwd) end. + +ensure_state(NameVsn, Position, Enabled) -> + %% NOTE: this is an internal function that is (legacy) exported in test builds only... + emqx_plugins:ensure_state(NameVsn, Position, Enabled, _ConfLocation = local). diff --git a/apps/emqx_prometheus/src/emqx_prometheus.app.src b/apps/emqx_prometheus/src/emqx_prometheus.app.src index 10fd75e98..c4abbec27 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.app.src +++ b/apps/emqx_prometheus/src/emqx_prometheus.app.src @@ -2,7 +2,7 @@ {application, emqx_prometheus, [ {description, "Prometheus for EMQX"}, % strict semver, bump manually! - {vsn, "5.0.15"}, + {vsn, "5.0.16"}, {modules, []}, {registered, [emqx_prometheus_sup]}, {applications, [kernel, stdlib, prometheus, emqx, emqx_management]}, diff --git a/apps/emqx_prometheus/src/emqx_prometheus.erl b/apps/emqx_prometheus/src/emqx_prometheus.erl index ac902ca55..e9030d3ed 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus.erl @@ -168,6 +168,9 @@ collect_mf(_Registry, Callback) -> _ = [add_collect_family(Name, Metrics, Callback, counter) || Name <- emqx_metrics_delivery()], _ = [add_collect_family(Name, Metrics, Callback, counter) || Name <- emqx_metrics_client()], _ = [add_collect_family(Name, Metrics, Callback, counter) || Name <- emqx_metrics_session()], + _ = [add_collect_family(Name, Metrics, Callback, counter) || Name <- emqx_metrics_olp()], + _ = [add_collect_family(Name, Metrics, Callback, counter) || Name <- emqx_metrics_acl()], + _ = [add_collect_family(Name, Metrics, Callback, counter) || Name <- emqx_metrics_authn()], ok. %% @private @@ -228,6 +231,10 @@ emqx_collect(emqx_sessions_count, Stats) -> gauge_metric(?C('sessions.count', Stats)); emqx_collect(emqx_sessions_max, Stats) -> gauge_metric(?C('sessions.max', Stats)); +emqx_collect(emqx_channels_count, Stats) -> + gauge_metric(?C('channels.count', Stats)); +emqx_collect(emqx_channels_max, Stats) -> + gauge_metric(?C('channels.max', Stats)); %% pub/sub stats emqx_collect(emqx_topics_count, Stats) -> gauge_metric(?C('topics.count', Stats)); @@ -254,6 +261,11 @@ emqx_collect(emqx_retained_count, Stats) -> gauge_metric(?C('retained.count', Stats)); emqx_collect(emqx_retained_max, Stats) -> gauge_metric(?C('retained.max', Stats)); +%% delayed +emqx_collect(emqx_delayed_count, Stats) -> + gauge_metric(?C('delayed.count', Stats)); +emqx_collect(emqx_delayed_max, Stats) -> + gauge_metric(?C('delayed.max', Stats)); %%-------------------------------------------------------------------- %% Metrics - packets & bytes @@ -408,7 +420,10 @@ emqx_collect(emqx_delivery_dropped_expired, Stats) -> counter_metric(?C('delivery.dropped.expired', Stats)); %%-------------------------------------------------------------------- %% Metrics - client - +emqx_collect(emqx_client_connect, Stats) -> + counter_metric(?C('client.connect', Stats)); +emqx_collect(emqx_client_connack, Stats) -> + counter_metric(?C('client.connack', Stats)); emqx_collect(emqx_client_connected, Stats) -> counter_metric(?C('client.connected', Stats)); emqx_collect(emqx_client_authenticate, Stats) -> @@ -437,6 +452,43 @@ emqx_collect(emqx_session_discarded, Stats) -> emqx_collect(emqx_session_terminated, Stats) -> counter_metric(?C('session.terminated', Stats)); %%-------------------------------------------------------------------- + +%% Metrics - overload protection +emqx_collect(emqx_overload_protection_delay_ok, Stats) -> + counter_metric(?C('overload_protection.delay.ok', Stats)); +emqx_collect(emqx_overload_protection_delay_timeout, Stats) -> + counter_metric(?C('overload_protection.delay.timeout', Stats)); +emqx_collect(emqx_overload_protection_hibernation, Stats) -> + counter_metric(?C('overload_protection.hibernation', Stats)); +emqx_collect(emqx_overload_protection_gc, Stats) -> + counter_metric(?C('overload_protection.gc', Stats)); +emqx_collect(emqx_overload_protection_new_conn, Stats) -> + counter_metric(?C('overload_protection.new_conn', Stats)); +%%-------------------------------------------------------------------- +%% Metrics - acl +emqx_collect(emqx_authorization_allow, Stats) -> + counter_metric(?C('authorization.allow', Stats)); +emqx_collect(emqx_authorization_deny, Stats) -> + counter_metric(?C('authorization.deny', Stats)); +emqx_collect(emqx_authorization_cache_hit, Stats) -> + counter_metric(?C('authorization.cache_hit', Stats)); +emqx_collect(emqx_authorization_superuser, Stats) -> + counter_metric(?C('authorization.superuser', Stats)); +emqx_collect(emqx_authorization_nomatch, Stats) -> + counter_metric(?C('authorization.nomatch', Stats)); +emqx_collect(emqx_authorization_matched_allow, Stats) -> + counter_metric(?C('authorization.matched_allow', Stats)); +emqx_collect(emqx_authorization_matched_deny, Stats) -> + counter_metric(?C('authorization.matched_deny', Stats)); +%%-------------------------------------------------------------------- +%% Metrics - authn +emqx_collect(emqx_authentication_success, Stats) -> + counter_metric(?C('authentication.success', Stats)); +emqx_collect(emqx_authentication_success_anonymous, Stats) -> + counter_metric(?C('authentication.success.anonymous', Stats)); +emqx_collect(emqx_authentication_failure, Stats) -> + counter_metric(?C('authentication.failure', Stats)); +%%-------------------------------------------------------------------- %% VM emqx_collect(emqx_vm_cpu_use, VMData) -> @@ -506,6 +558,38 @@ emqx_metrics_packets() -> emqx_packets_auth_sent ]. +emqx_metrics_olp() -> + case emqx_config_zones:is_olp_enabled() of + true -> + [ + emqx_overload_protection_delay_ok, + emqx_overload_protection_delay_timeout, + emqx_overload_protection_hibernation, + emqx_overload_protection_gc, + emqx_overload_protection_new_conn + ]; + false -> + [] + end. + +emqx_metrics_acl() -> + [ + emqx_authorization_allow, + emqx_authorization_deny, + emqx_authorization_cache_hit, + emqx_authorization_superuser, + emqx_authorization_nomatch, + emqx_authorization_matched_allow, + emqx_authorization_matched_deny + ]. + +emqx_metrics_authn() -> + [ + emqx_authentication_success, + emqx_authentication_success_anonymous, + emqx_authentication_failure + ]. + emqx_metrics_messages() -> [ emqx_messages_received, @@ -539,6 +623,8 @@ emqx_metrics_delivery() -> emqx_metrics_client() -> [ + emqx_client_connect, + emqx_client_connack, emqx_client_connected, emqx_client_authenticate, emqx_client_auth_anonymous, diff --git a/apps/emqx_prometheus/src/emqx_prometheus_config.erl b/apps/emqx_prometheus/src/emqx_prometheus_config.erl index 39d2d4f6b..00dad47f9 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_config.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_config.erl @@ -46,12 +46,25 @@ remove_handler() -> ok. post_config_update(?PROMETHEUS, _Req, New, _Old, AppEnvs) -> - application:set_env(AppEnvs), - update_prometheus(New); + update_prometheus(AppEnvs), + update_push_gateway(New); post_config_update(_ConfPath, _Req, _NewConf, _OldConf, _AppEnvs) -> ok. -update_prometheus(#{enable := true}) -> +update_prometheus(AppEnvs) -> + PrevCollectors = prometheus_registry:collectors(default), + CurCollectors = proplists:get_value(collectors, proplists:get_value(prometheus, AppEnvs)), + lists:foreach( + fun prometheus_registry:deregister_collector/1, + PrevCollectors -- CurCollectors + ), + lists:foreach( + fun prometheus_registry:register_collector/1, + CurCollectors -- PrevCollectors + ), + application:set_env(AppEnvs). + +update_push_gateway(#{enable := true}) -> emqx_prometheus_sup:start_child(?APP); -update_prometheus(#{enable := false}) -> +update_push_gateway(#{enable := false}) -> emqx_prometheus_sup:stop_child(?APP). diff --git a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl index 3884f7065..f34675c0b 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl @@ -26,7 +26,7 @@ fields/1, desc/1, translation/1, - convert_headers/1, + convert_headers/2, validate_push_gateway_server/1 ]). @@ -61,7 +61,7 @@ fields("prometheus") -> #{ default => #{}, required => false, - converter => fun ?MODULE:convert_headers/1, + converter => fun ?MODULE:convert_headers/2, desc => ?DESC(headers) } )}, @@ -99,7 +99,7 @@ fields("prometheus") -> ?HOCON( hoconsc:enum([enabled, disabled]), #{ - default => enabled, + default => disabled, required => true, importance => ?IMPORTANCE_LOW, desc => ?DESC(mnesia_collector) @@ -110,7 +110,7 @@ fields("prometheus") -> ?HOCON( hoconsc:enum([enabled, disabled]), #{ - default => enabled, + default => disabled, required => true, importance => ?IMPORTANCE_LOW, desc => ?DESC(vm_statistics_collector) @@ -121,7 +121,7 @@ fields("prometheus") -> ?HOCON( hoconsc:enum([enabled, disabled]), #{ - default => enabled, + default => disabled, required => true, importance => ?IMPORTANCE_LOW, desc => ?DESC(vm_system_info_collector) @@ -133,7 +133,7 @@ fields("prometheus") -> ?HOCON( hoconsc:enum([enabled, disabled]), #{ - default => enabled, + default => disabled, required => true, importance => ?IMPORTANCE_LOW, desc => ?DESC(vm_memory_collector) @@ -144,7 +144,7 @@ fields("prometheus") -> ?HOCON( hoconsc:enum([enabled, disabled]), #{ - default => enabled, + default => disabled, required => true, importance => ?IMPORTANCE_LOW, desc => ?DESC(vm_msacc_collector) @@ -155,9 +155,11 @@ fields("prometheus") -> desc("prometheus") -> ?DESC(prometheus); desc(_) -> undefined. -convert_headers(<<>>) -> +convert_headers(Headers, #{make_serializable := true}) -> + Headers; +convert_headers(<<>>, _Opts) -> []; -convert_headers(Headers) when is_map(Headers) -> +convert_headers(Headers, _Opts) when is_map(Headers) -> maps:fold( fun(K, V, Acc) -> [{binary_to_list(K), binary_to_list(V)} | Acc] @@ -165,7 +167,7 @@ convert_headers(Headers) when is_map(Headers) -> [], Headers ); -convert_headers(Headers) when is_list(Headers) -> +convert_headers(Headers, _Opts) when is_list(Headers) -> Headers. validate_push_gateway_server(Url) -> @@ -178,5 +180,5 @@ validate_push_gateway_server(Url) -> translation(Name) -> %% translate 'vm_dist_collector', 'mnesia_collector', 'vm_statistics_collector', %% 'vm_system_info_collector', 'vm_memory_collector', 'vm_msacc_collector' - %% to prometheus envrionments + %% to prometheus environments emqx_conf_schema:translation(Name). diff --git a/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl b/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl index 77d9902a2..3f9e743f3 100644 --- a/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl +++ b/apps/emqx_prometheus/test/emqx_prometheus_SUITE.erl @@ -30,12 +30,12 @@ " headers = { Authorization = \"some-authz-tokens\"}\n" " job_name = \"${name}~${host}\"\n" " enable = true\n" - " vm_dist_collector = enabled\n" - " mnesia_collector = enabled\n" + " vm_dist_collector = disabled\n" + " mnesia_collector = disabled\n" " vm_statistics_collector = disabled\n" " vm_system_info_collector = disabled\n" - " vm_memory_collector = enabled\n" - " vm_msacc_collector = enabled\n" + " vm_memory_collector = disabled\n" + " vm_msacc_collector = disabled\n" "}\n" >>). diff --git a/apps/emqx_prometheus/test/emqx_prometheus_api_SUITE.erl b/apps/emqx_prometheus/test/emqx_prometheus_api_SUITE.erl index e29d46720..eb909baf5 100644 --- a/apps/emqx_prometheus/test/emqx_prometheus_api_SUITE.erl +++ b/apps/emqx_prometheus/test/emqx_prometheus_api_SUITE.erl @@ -75,19 +75,64 @@ t_prometheus_api(_) -> <<"vm_statistics_collector">> := _, <<"vm_system_info_collector">> := _, <<"vm_memory_collector">> := _, - <<"vm_msacc_collector">> := _ + <<"vm_msacc_collector">> := _, + <<"headers">> := _ }, Conf ), #{<<"enable">> := Enable} = Conf, ?assertEqual(Enable, undefined =/= erlang:whereis(emqx_prometheus)), - NewConf = Conf#{<<"interval">> => <<"2s">>, <<"vm_statistics_collector">> => <<"disabled">>}, + + NewConf = Conf#{ + <<"interval">> => <<"2s">>, + <<"vm_statistics_collector">> => <<"enabled">>, + <<"headers">> => #{ + <<"test-str1">> => <<"test-value">>, + <<"test-str2">> => <<"42">> + } + }, {ok, Response2} = emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, NewConf), Conf2 = emqx_utils_json:decode(Response2, [return_maps]), ?assertMatch(NewConf, Conf2), - ?assertEqual({ok, []}, application:get_env(prometheus, vm_statistics_collector_metrics)), - ?assertEqual({ok, all}, application:get_env(prometheus, vm_memory_collector_metrics)), + + EnvCollectors = application:get_env(prometheus, collectors, []), + PromCollectors = prometheus_registry:collectors(default), + ?assertEqual(lists:sort(EnvCollectors), lists:sort(PromCollectors)), + ?assert(lists:member(prometheus_vm_statistics_collector, EnvCollectors), EnvCollectors), + + lists:foreach( + fun({C, Enabled}) -> + ?assertEqual(Enabled, lists:member(C, EnvCollectors), EnvCollectors) + end, + [ + {prometheus_vm_dist_collector, false}, + {prometheus_vm_system_info_collector, false}, + {prometheus_vm_memory_collector, false}, + {prometheus_mnesia_collector, false}, + {prometheus_vm_msacc_collector, false}, + {prometheus_vm_statistics_collector, true} + ] + ), + + ?assertMatch( + #{ + <<"headers">> := #{ + <<"test-str1">> := <<"test-value">>, + <<"test-str2">> := <<"42">> + } + }, + emqx_config:get_raw([prometheus]) + ), + ?assertMatch( + #{ + headers := [ + {"test-str2", "42"}, + {"test-str1", "test-value"} + ] + }, + emqx_config:get([prometheus]) + ), NewConf1 = Conf#{<<"enable">> => (not Enable)}, {ok, _Response3} = emqx_mgmt_api_test_util:request_api(put, Path, "", Auth, NewConf1), diff --git a/apps/emqx_redis/rebar.config b/apps/emqx_redis/rebar.config index c14536384..4e67e0986 100644 --- a/apps/emqx_redis/rebar.config +++ b/apps/emqx_redis/rebar.config @@ -3,7 +3,7 @@ {erl_opts, [debug_info]}. {deps, [ %% NOTE: mind ecpool version when updating eredis_cluster version - {eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.8.1"}}}, + {eredis_cluster, {git, "https://github.com/emqx/eredis_cluster", {tag, "0.8.2"}}}, {emqx_connector, {path, "../../apps/emqx_connector"}}, {emqx_resource, {path, "../../apps/emqx_resource"}} ]}. diff --git a/apps/emqx_redis/src/emqx_redis.app.src b/apps/emqx_redis/src/emqx_redis.app.src index 294a642f5..23e13bb72 100644 --- a/apps/emqx_redis/src/emqx_redis.app.src +++ b/apps/emqx_redis/src/emqx_redis.app.src @@ -1,6 +1,6 @@ {application, emqx_redis, [ {description, "EMQX Redis Database Connector"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_redis/src/emqx_redis.erl b/apps/emqx_redis/src/emqx_redis.erl index ef89c3931..2779620bf 100644 --- a/apps/emqx_redis/src/emqx_redis.erl +++ b/apps/emqx_redis/src/emqx_redis.erl @@ -146,7 +146,8 @@ on_start( Opts = [ {pool_size, PoolSize}, - {password, maps:get(password, Config, "")}, + {username, maps:get(username, Config, undefined)}, + {password, eredis_secret:wrap(maps:get(password, Config, ""))}, {auto_reconnect, ?AUTO_RECONNECT_INTERVAL} ] ++ Database ++ Servers, Options = @@ -292,6 +293,7 @@ connect(Opts) -> redis_fields() -> [ {pool_size, fun emqx_connector_schema_lib:pool_size/1}, + {username, fun emqx_connector_schema_lib:username/1}, {password, fun emqx_connector_schema_lib:password/1}, {database, #{ type => non_neg_integer(), diff --git a/apps/emqx_redis/test/emqx_redis_SUITE.erl b/apps/emqx_redis/test/emqx_redis_SUITE.erl index c425f19d9..e03b05921 100644 --- a/apps/emqx_redis/test/emqx_redis_SUITE.erl +++ b/apps/emqx_redis/test/emqx_redis_SUITE.erl @@ -137,6 +137,31 @@ perform_lifecycle_check(ResourceId, InitialConfig, RedisCommand) -> #{timeout => 500} ) ), + % check authentication methods + ?assertEqual( + {ok, <<"OK">>}, + emqx_resource:query(ResourceId, {cmd, ["AUTH", "public"]}) + ), + ?assertEqual( + {error, <<"WRONGPASS invalid username-password pair or user is disabled.">>}, + emqx_resource:query(ResourceId, {cmd, ["AUTH", "test_passwd"]}) + ), + ?assertEqual( + {ok, <<"OK">>}, + emqx_resource:query(ResourceId, {cmd, ["AUTH", "test_user", "test_passwd"]}) + ), + ?assertEqual( + {error, <<"WRONGPASS invalid username-password pair or user is disabled.">>}, + emqx_resource:query(ResourceId, {cmd, ["AUTH", "test_user", "public"]}) + ), + ?assertEqual( + {error, <<"WRONGPASS invalid username-password pair or user is disabled.">>}, + emqx_resource:query(ResourceId, {cmd, ["AUTH", "wrong_user", "test_passwd"]}) + ), + ?assertEqual( + {error, <<"WRONGPASS invalid username-password pair or user is disabled.">>}, + emqx_resource:query(ResourceId, {cmd, ["AUTH", "wrong_user", "public"]}) + ), ?assertEqual(ok, emqx_resource:stop(ResourceId)), % Resource will be listed still, but state will be changed and healthcheck will fail % as the worker no longer exists. @@ -186,7 +211,8 @@ redis_config_sentinel() -> " redis_type = ~s\n" ++ MaybeSentinel ++ MaybeDatabase ++ - " password = public\n" ++ + " username = test_user\n" ++ + " password = test_passwd\n" ++ " ~s = \"~s:~b\"\n" ++ " " ++ "" diff --git a/apps/emqx_resource/src/emqx_resource.app.src b/apps/emqx_resource/src/emqx_resource.app.src index 283262e99..fdd760de0 100644 --- a/apps/emqx_resource/src/emqx_resource.app.src +++ b/apps/emqx_resource/src/emqx_resource.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_resource, [ {description, "Manager for all external resources"}, - {vsn, "0.1.21"}, + {vsn, "0.1.22"}, {registered, []}, {mod, {emqx_resource_app, []}}, {applications, [ diff --git a/apps/emqx_resource/src/emqx_resource.erl b/apps/emqx_resource/src/emqx_resource.erl index d0d93a701..d41364315 100644 --- a/apps/emqx_resource/src/emqx_resource.erl +++ b/apps/emqx_resource/src/emqx_resource.erl @@ -281,8 +281,12 @@ query(ResId, Request, Opts) -> {ok, _Group, #{query_mode := QM, error := Error}} -> case {QM, Error} of {_, unhealthy_target} -> + emqx_resource_metrics:matched_inc(ResId), + emqx_resource_metrics:dropped_resource_stopped_inc(ResId), ?RESOURCE_ERROR(unhealthy_target, "unhealthy target"); {_, {unhealthy_target, _Message}} -> + emqx_resource_metrics:matched_inc(ResId), + emqx_resource_metrics:dropped_resource_stopped_inc(ResId), ?RESOURCE_ERROR(unhealthy_target, "unhealthy target"); {simple_async, _} -> %% TODO(5.1.1): pass Resource instead of ResId to simple APIs diff --git a/apps/emqx_resource/src/emqx_resource_manager.erl b/apps/emqx_resource/src/emqx_resource_manager.erl index 2e4822a2f..27936851e 100644 --- a/apps/emqx_resource/src/emqx_resource_manager.erl +++ b/apps/emqx_resource/src/emqx_resource_manager.erl @@ -179,7 +179,9 @@ create_dry_run(ResourceType, Config) -> false -> #{} end, ok = emqx_resource_manager_sup:ensure_child(ResId, <<"dry_run">>, ResourceType, Config, Opts), - case wait_for_ready(ResId, 5000) of + HealthCheckInterval = maps:get(health_check_interval, Opts, ?HEALTHCHECK_INTERVAL), + Timeout = emqx_utils:clamp(HealthCheckInterval, 5_000, 60_000), + case wait_for_ready(ResId, Timeout) of ok -> remove(ResId); {error, Reason} -> diff --git a/apps/emqx_resource/src/emqx_resource_validator.erl b/apps/emqx_resource/src/emqx_resource_validator.erl index bc733ef80..ea9547265 100644 --- a/apps/emqx_resource/src/emqx_resource_validator.erl +++ b/apps/emqx_resource/src/emqx_resource_validator.erl @@ -28,10 +28,18 @@ max(Type, Max) -> min(Type, Min) -> limit(Type, '>=', Min). -not_empty(ErrMsg) -> +not_empty(ErrMsg0) -> + ErrMsg = + try + lists:flatten(ErrMsg0) + catch + _:_ -> + ErrMsg0 + end, fun (undefined) -> {error, ErrMsg}; (<<>>) -> {error, ErrMsg}; + ("") -> {error, ErrMsg}; (_) -> ok end. @@ -50,7 +58,8 @@ len(string) -> fun string:length/1; len(_Type) -> fun(Val) -> Val end. err_limit({Type, {Op, Expected}, {got, Got}}) -> - io_lib:format("Expect the ~ts value ~ts ~p but got: ~p", [Type, Op, Expected, Got]). + Msg = io_lib:format("Expect the ~ts value ~ts ~p but got: ~p", [Type, Op, Expected, Got]), + lists:flatten(Msg). return(true, _) -> ok; return(false, Error) -> {error, Error}. diff --git a/apps/emqx_resource/src/schema/emqx_resource_schema.erl b/apps/emqx_resource/src/schema/emqx_resource_schema.erl index b98f50a98..dcf3414e6 100644 --- a/apps/emqx_resource/src/schema/emqx_resource_schema.erl +++ b/apps/emqx_resource/src/schema/emqx_resource_schema.erl @@ -47,6 +47,7 @@ fields("resource_opts") -> fields("creation_opts") -> create_opts([]). +-spec create_opts([{atom(), hocon_schema:field_schema_map()}]) -> [{atom(), hocon_schema:field()}]. create_opts(Overrides) -> override( [ diff --git a/apps/emqx_resource/test/emqx_resource_SUITE.erl b/apps/emqx_resource/test/emqx_resource_SUITE.erl index 934a97829..ef9ee29c7 100644 --- a/apps/emqx_resource/test/emqx_resource_SUITE.erl +++ b/apps/emqx_resource/test/emqx_resource_SUITE.erl @@ -899,6 +899,38 @@ t_healthy(_) -> end ). +t_unhealthy_target(_) -> + HealthCheckError = {unhealthy_target, "some message"}, + ?assertMatch( + {ok, _}, + emqx_resource:create_local( + ?ID, + ?DEFAULT_RESOURCE_GROUP, + ?TEST_RESOURCE, + #{name => test_resource, health_check_error => {msg, HealthCheckError}} + ) + ), + ?assertEqual( + {ok, disconnected}, + emqx_resource:health_check(?ID) + ), + ?assertMatch( + {ok, _Group, #{error := HealthCheckError}}, + emqx_resource_manager:lookup(?ID) + ), + %% messages are dropped when bridge is unhealthy + lists:foreach( + fun(_) -> + ?assertMatch( + {error, {resource_error, #{reason := unhealthy_target}}}, + emqx_resource:query(?ID, message) + ) + end, + lists:seq(1, 3) + ), + ?assertEqual(3, emqx_resource_metrics:matched_get(?ID)), + ?assertEqual(3, emqx_resource_metrics:dropped_resource_stopped_get(?ID)). + t_stop_start(_) -> ?check_trace( begin @@ -1121,10 +1153,58 @@ t_create_dry_run_local_failed(_) -> ). t_test_func(_) -> + IsErrorMsgPlainString = fun({error, Msg}) -> io_lib:printable_list(Msg) end, ?assertEqual(ok, erlang:apply(emqx_resource_validator:not_empty("not_empty"), [<<"someval">>])), ?assertEqual(ok, erlang:apply(emqx_resource_validator:min(int, 3), [4])), ?assertEqual(ok, erlang:apply(emqx_resource_validator:max(array, 10), [[a, b, c, d]])), - ?assertEqual(ok, erlang:apply(emqx_resource_validator:max(string, 10), ["less10"])). + ?assertEqual(ok, erlang:apply(emqx_resource_validator:max(string, 10), ["less10"])), + ?assertEqual( + true, IsErrorMsgPlainString(erlang:apply(emqx_resource_validator:min(int, 66), [42])) + ), + ?assertEqual( + true, IsErrorMsgPlainString(erlang:apply(emqx_resource_validator:max(int, 42), [66])) + ), + ?assertEqual( + true, IsErrorMsgPlainString(erlang:apply(emqx_resource_validator:min(array, 3), [[1, 2]])) + ), + ?assertEqual( + true, + IsErrorMsgPlainString(erlang:apply(emqx_resource_validator:max(array, 3), [[1, 2, 3, 4]])) + ), + ?assertEqual( + true, IsErrorMsgPlainString(erlang:apply(emqx_resource_validator:min(string, 3), ["1"])) + ), + ?assertEqual( + true, IsErrorMsgPlainString(erlang:apply(emqx_resource_validator:max(string, 3), ["1234"])) + ), + NestedMsg = io_lib:format("The answer: ~p", [42]), + ExpectedMsg = "The answer: 42", + BinMsg = <<"The answer: 42">>, + MapMsg = #{question => "The question", answer => 42}, + ?assertEqual( + {error, ExpectedMsg}, + erlang:apply(emqx_resource_validator:not_empty(NestedMsg), [""]) + ), + ?assertEqual( + {error, ExpectedMsg}, + erlang:apply(emqx_resource_validator:not_empty(NestedMsg), [<<>>]) + ), + ?assertEqual( + {error, ExpectedMsg}, + erlang:apply(emqx_resource_validator:not_empty(NestedMsg), [undefined]) + ), + ?assertEqual( + {error, ExpectedMsg}, + erlang:apply(emqx_resource_validator:not_empty(NestedMsg), [undefined]) + ), + ?assertEqual( + {error, BinMsg}, + erlang:apply(emqx_resource_validator:not_empty(BinMsg), [undefined]) + ), + ?assertEqual( + {error, MapMsg}, + erlang:apply(emqx_resource_validator:not_empty(MapMsg), [""]) + ). t_reset_metrics(_) -> {ok, _} = emqx_resource:create( diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.erl b/apps/emqx_rule_engine/src/emqx_rule_engine.erl index 41d1ed433..01b9b76d0 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.erl @@ -225,8 +225,9 @@ get_rules_ordered_by_ts() -> -spec get_rules_for_topic(Topic :: binary()) -> [rule()]. get_rules_for_topic(Topic) -> [ - emqx_topic_index:get_record(M, ?RULE_TOPIC_INDEX) - || M <- emqx_topic_index:matches(Topic, ?RULE_TOPIC_INDEX, [unique]) + Rule + || M <- emqx_topic_index:matches(Topic, ?RULE_TOPIC_INDEX, [unique]), + Rule <- lookup_rule(emqx_topic_index:get_id(M)) ]. -spec get_rules_with_same_event(Topic :: binary()) -> [rule()]. @@ -284,11 +285,14 @@ is_of_event_name(EventName, Topic) -> -spec get_rule(Id :: rule_id()) -> {ok, rule()} | not_found. get_rule(Id) -> - case ets:lookup(?RULE_TAB, Id) of - [{Id, Rule}] -> {ok, Rule#{id => Id}}; + case lookup_rule(Id) of + [Rule] -> {ok, Rule}; [] -> not_found end. +lookup_rule(Id) -> + [Rule || {_Id, Rule} <- ets:lookup(?RULE_TAB, Id)]. + load_hooks_for_rule(#{from := Topics}) -> lists:foreach(fun emqx_rule_events:load/1, Topics). @@ -483,7 +487,7 @@ with_parsed_rule(Params = #{id := RuleId, sql := Sql, actions := Actions}, Creat do_insert_rule(#{id := Id} = Rule) -> ok = load_hooks_for_rule(Rule), ok = maybe_add_metrics_for_rule(Id), - true = ets:insert(?RULE_TAB, {Id, maps:remove(id, Rule)}), + true = ets:insert(?RULE_TAB, {Id, Rule}), ok. do_delete_rule(#{id := Id} = Rule) -> @@ -492,10 +496,10 @@ do_delete_rule(#{id := Id} = Rule) -> true = ets:delete(?RULE_TAB, Id), ok. -do_update_rule_index(#{id := Id, from := From} = Rule) -> +do_update_rule_index(#{id := Id, from := From}) -> ok = lists:foreach( fun(Topic) -> - true = emqx_topic_index:insert(Topic, Id, Rule, ?RULE_TOPIC_INDEX) + true = emqx_topic_index:insert(Topic, Id, [], ?RULE_TOPIC_INDEX) end, From ). diff --git a/apps/emqx_rule_engine/src/emqx_rule_runtime.erl b/apps/emqx_rule_engine/src/emqx_rule_runtime.erl index f047e2047..74396dbc8 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_runtime.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_runtime.erl @@ -493,11 +493,20 @@ apply_func(Other, _, _) -> }). do_apply_func(Module, Name, Args, Columns) -> - case erlang:apply(Module, Name, Args) of - Func when is_function(Func) -> - erlang:apply(Func, [Columns]); - Result -> - Result + try + case erlang:apply(Module, Name, Args) of + Func when is_function(Func) -> + erlang:apply(Func, [Columns]); + Result -> + Result + end + catch + error:function_clause -> + ?RAISE_BAD_SQL(#{ + reason => bad_sql_function_argument, + arguments => Args, + function_name => Name + }) end. add_metadata(Columns, Metadata) when is_map(Columns), is_map(Metadata) -> diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl index 8c3bd0ebb..ae4bf43f6 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl @@ -293,6 +293,66 @@ t_kv_store(_) -> emqx_rule_funcs:kv_store_del(<<"abc">>), undefined = emqx_rule_funcs:kv_store_get(<<"abc">>). +t_function_clause_errors(_Config) -> + SQL0 = <<"select upper(xxxx) from \"t/a\"">>, + Payload = <<"{}">>, + ?assertMatch( + {error, + {select_and_transform_error, + {throw, + #{ + arguments := [undefined], + reason := bad_sql_function_argument, + function_name := upper + }, + _Stack}}}, + emqx_rule_sqltester:test( + #{ + sql => SQL0, + context => #{payload => Payload, topic => <<"t/a">>} + } + ) + ), + SQL1 = <<"foreach xs as x do upper(xxxx) from \"t/a\"">>, + ?assertMatch( + {error, { + {doeach_error, + {throw, + #{ + arguments := [undefined], + reason := bad_sql_function_argument, + function_name := upper + }, + _Stack0}}, + _Stack1 + }}, + emqx_rule_sqltester:test( + #{ + sql => SQL1, + context => #{payload => Payload, xs => [1, 2, 3], topic => <<"t/a">>} + } + ) + ), + SQL2 = <<"foreach upper(xxxx) as x from \"t/a\"">>, + ?assertMatch( + {error, + {select_and_collect_error, + {throw, + #{ + arguments := [undefined], + reason := bad_sql_function_argument, + function_name := upper + }, + _Stack}}}, + emqx_rule_sqltester:test( + #{ + sql => SQL2, + context => #{payload => Payload, topic => <<"t/a">>} + } + ) + ), + ok. + %%------------------------------------------------------------------------------ %% Test cases for rule registry %%------------------------------------------------------------------------------ diff --git a/bin/nodetool b/bin/nodetool index 8170e68e2..a96f5f9fd 100755 --- a/bin/nodetool +++ b/bin/nodetool @@ -357,6 +357,17 @@ add_libs_dir() -> {error, Reason} -> %% rel file was been deleted by release handler error({failed_to_read_RELEASES_file, RelFile, Reason}) + end, + ok = add_patches_dir(filename:join([RootDir, "data", "patches"])), + ok = add_patches_dir("/var/lib/emqx/patches"). + +add_patches_dir(PatchesDir) -> + case filelib:is_dir(PatchesDir) of + true -> + true = code:add_patha(PatchesDir), + ok; + false -> + ok end. add_lib_dir(RootDir, Name, Vsn) -> diff --git a/changes/ce/feat-11469.en.md b/changes/ce/feat-11469.en.md new file mode 100644 index 000000000..827fe3a87 --- /dev/null +++ b/changes/ce/feat-11469.en.md @@ -0,0 +1 @@ +Added support for specifying username in Redis authentication. diff --git a/changes/ce/feat-11496.en.md b/changes/ce/feat-11496.en.md new file mode 100644 index 000000000..5303ce8d8 --- /dev/null +++ b/changes/ce/feat-11496.en.md @@ -0,0 +1 @@ +Disabled the Erlang VM Prometheus exporter by default to improve performance and security. diff --git a/changes/ce/feat-11497.en.md b/changes/ce/feat-11497.en.md new file mode 100644 index 000000000..30ef73fb1 --- /dev/null +++ b/changes/ce/feat-11497.en.md @@ -0,0 +1,2 @@ +Enhanced broker metrics collection and export by adding new metrics for messages, overload protection, authorization, authentication, +and improving naming consistency for OpenTelemetry. diff --git a/changes/ce/fix-11466.en.md b/changes/ce/fix-11466.en.md new file mode 100644 index 000000000..80679d444 --- /dev/null +++ b/changes/ce/fix-11466.en.md @@ -0,0 +1 @@ +Fixed a crash that occurred when setting the `ssl_options.ciphers` configuration option to an empty string (""). diff --git a/changes/ce/fix-11480.en.md b/changes/ce/fix-11480.en.md new file mode 100644 index 000000000..f3440f59e --- /dev/null +++ b/changes/ce/fix-11480.en.md @@ -0,0 +1 @@ +Return more user-friendly messages when rule functions are fed bad arguments. diff --git a/changes/ce/fix-11520.en.md b/changes/ce/fix-11520.en.md new file mode 100644 index 000000000..42675c5f5 --- /dev/null +++ b/changes/ce/fix-11520.en.md @@ -0,0 +1 @@ +Fixed issue where packets_connack_sent metric was not incremented on CONNACK packets sent with non-zero ack_flag diff --git a/changes/ce/fix-11523.en.md b/changes/ce/fix-11523.en.md new file mode 100644 index 000000000..d8b6d8568 --- /dev/null +++ b/changes/ce/fix-11523.en.md @@ -0,0 +1 @@ +Fixes misunderstood prompt when invalid certificates/keys were specified for the `/configs` API. diff --git a/changes/ce/fix-11534.en.md b/changes/ce/fix-11534.en.md new file mode 100644 index 000000000..15c89f392 --- /dev/null +++ b/changes/ce/fix-11534.en.md @@ -0,0 +1 @@ +Fixed increment on data bridge statistics when bridge is unhealthy. Now, messages sent to unhealthy bridges are being counted as dropped messages. diff --git a/changes/ce/fix-11540.en.md b/changes/ce/fix-11540.en.md new file mode 100644 index 000000000..7486e7133 --- /dev/null +++ b/changes/ce/fix-11540.en.md @@ -0,0 +1 @@ +Improved HTTP response when attempting to create a bridge with an invalid name. diff --git a/changes/ce/fix-11548.en.md b/changes/ce/fix-11548.en.md new file mode 100644 index 000000000..3af38310e --- /dev/null +++ b/changes/ce/fix-11548.en.md @@ -0,0 +1 @@ +Fixed an issue that prevented the plugin order to be updated on the whole cluster. diff --git a/changes/ce/perf-11490.en.md b/changes/ce/perf-11490.en.md new file mode 100644 index 000000000..98f6f8f5c --- /dev/null +++ b/changes/ce/perf-11490.en.md @@ -0,0 +1 @@ +Quickly return the result when the password is absent in password-based authentication. diff --git a/changes/ee/feat-11478.en.md b/changes/ee/feat-11478.en.md new file mode 100644 index 000000000..b2d1a49a2 --- /dev/null +++ b/changes/ee/feat-11478.en.md @@ -0,0 +1,3 @@ +Add HStreamDB bridge support (both TCP and TLS connection allowed), adapted to the HStreamDB `v0.16.1`. + +Updated driver to `0.4.5+v0.16.1` in [PR#11530](https://github.com/emqx/emqx/pull/11530). diff --git a/changes/ee/fix-11452.en.md b/changes/ee/fix-11452.en.md new file mode 100644 index 000000000..bd6080fce --- /dev/null +++ b/changes/ee/fix-11452.en.md @@ -0,0 +1 @@ +The default payload template for Kinesis was updated to store the entire message when no template is provided. diff --git a/changes/ee/fix-11461.en.md b/changes/ee/fix-11461.en.md new file mode 100644 index 000000000..011e3e31e --- /dev/null +++ b/changes/ee/fix-11461.en.md @@ -0,0 +1 @@ +Made the timeout for testing bridges connectivity follow more closely the configured health check timeout. diff --git a/changes/ee/fix-11492.en.md b/changes/ee/fix-11492.en.md new file mode 100644 index 000000000..9f61abee2 --- /dev/null +++ b/changes/ee/fix-11492.en.md @@ -0,0 +1 @@ +Fixed an issue which would yield false negatives when testing the connectivity of GreptimeDB bridges. diff --git a/changes/ee/fix-11494.en.md b/changes/ee/fix-11494.en.md new file mode 100644 index 000000000..5ff887562 --- /dev/null +++ b/changes/ee/fix-11494.en.md @@ -0,0 +1 @@ +Added schema validator to reflect Amazon Kinesis' static constraint: batch request can support up to 500 records (max batch size); diff --git a/changes/ee/fix-11508.en.md b/changes/ee/fix-11508.en.md new file mode 100644 index 000000000..54ea90db3 --- /dev/null +++ b/changes/ee/fix-11508.en.md @@ -0,0 +1 @@ +Fix message error handling on Kafka bridge when headers translate to an invalid value. diff --git a/changes/ee/fix-11513.en.md b/changes/ee/fix-11513.en.md new file mode 100644 index 000000000..51d953933 --- /dev/null +++ b/changes/ee/fix-11513.en.md @@ -0,0 +1 @@ +Fixed a bug which prevented the Kafka Producer bridge from using the correct template for the `timestamp` field. diff --git a/changes/ee/fix-11527.en.md b/changes/ee/fix-11527.en.md new file mode 100644 index 000000000..33d077e94 --- /dev/null +++ b/changes/ee/fix-11527.en.md @@ -0,0 +1 @@ +Fixed an issue with Kafka header handling when placeholders resolve to an array of key-value pairs (e.g.: `[{"key": "foo", "value": "bar"}]`). diff --git a/deploy/charts/emqx-enterprise/Chart.yaml b/deploy/charts/emqx-enterprise/Chart.yaml index 575c6b354..b72b67e81 100644 --- a/deploy/charts/emqx-enterprise/Chart.yaml +++ b/deploy/charts/emqx-enterprise/Chart.yaml @@ -14,8 +14,8 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. -version: 5.2.0-alpha.3 +version: 5.2.0-alpha.4 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. -appVersion: 5.2.0-alpha.3 +appVersion: 5.2.0-alpha.4 diff --git a/mix.exs b/mix.exs index e3916245c..29596c872 100644 --- a/mix.exs +++ b/mix.exs @@ -102,26 +102,31 @@ defmodule EMQXUmbrella.MixProject do {:opentelemetry_api, github: "emqx/opentelemetry-erlang", sparse: "apps/opentelemetry_api", + tag: "v1.3.0-emqx", override: true, runtime: false}, {:opentelemetry, github: "emqx/opentelemetry-erlang", sparse: "apps/opentelemetry", + tag: "v1.3.0-emqx", override: true, runtime: false}, {:opentelemetry_api_experimental, github: "emqx/opentelemetry-erlang", sparse: "apps/opentelemetry_api_experimental", + tag: "v1.3.0-emqx", override: true, runtime: false}, {:opentelemetry_experimental, github: "emqx/opentelemetry-erlang", sparse: "apps/opentelemetry_experimental", + tag: "v1.3.0-emqx", override: true, runtime: false}, {:opentelemetry_exporter, github: "emqx/opentelemetry-erlang", sparse: "apps/opentelemetry_exporter", + tag: "v1.3.0-emqx", override: true, runtime: false} ] ++ @@ -227,7 +232,7 @@ defmodule EMQXUmbrella.MixProject do defp enterprise_deps(_profile_info = %{edition_type: :enterprise}) do [ - {:hstreamdb_erl, github: "hstreamdb/hstreamdb_erl", tag: "0.3.1+v0.12.0"}, + {:hstreamdb_erl, github: "hstreamdb/hstreamdb_erl", tag: "0.4.5+v0.16.1"}, {:influxdb, github: "emqx/influxdb-client-erl", tag: "1.1.11", override: true}, {:wolff, github: "kafka4beam/wolff", tag: "1.7.7"}, {:kafka_protocol, github: "kafka4beam/kafka_protocol", tag: "4.1.3", override: true}, diff --git a/rebar.config b/rebar.config index 450be64b3..a8cc269e6 100644 --- a/rebar.config +++ b/rebar.config @@ -85,13 +85,13 @@ , {jsone, {git, "https://github.com/emqx/jsone.git", {tag, "1.7.1"}}} , {uuid, {git, "https://github.com/okeuday/uuid.git", {tag, "v2.0.6"}}} %% trace - , {opentelemetry_api, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {branch, "main"}, "apps/opentelemetry_api"}} - , {opentelemetry, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {branch, "main"}, "apps/opentelemetry"}} + , {opentelemetry_api, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.0-emqx"}, "apps/opentelemetry_api"}} + , {opentelemetry, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.0-emqx"}, "apps/opentelemetry"}} %% log metrics - , {opentelemetry_experimental, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {branch, "main"}, "apps/opentelemetry_experimental"}} - , {opentelemetry_api_experimental, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {branch, "main"}, "apps/opentelemetry_api_experimental"}} + , {opentelemetry_experimental, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.0-emqx"}, "apps/opentelemetry_experimental"}} + , {opentelemetry_api_experimental, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.0-emqx"}, "apps/opentelemetry_api_experimental"}} %% export - , {opentelemetry_exporter, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {branch, "main"}, "apps/opentelemetry_exporter"}} + , {opentelemetry_exporter, {git_subdir, "http://github.com/emqx/opentelemetry-erlang", {tag, "v1.3.0-emqx"}, "apps/opentelemetry_exporter"}} ]}. {xref_ignores, diff --git a/rebar.config.erl b/rebar.config.erl index 6e1c64a40..3efdfe079 100644 --- a/rebar.config.erl +++ b/rebar.config.erl @@ -84,6 +84,7 @@ is_community_umbrella_app("apps/emqx_bridge_cassandra") -> false; is_community_umbrella_app("apps/emqx_bridge_opents") -> false; is_community_umbrella_app("apps/emqx_bridge_clickhouse") -> false; is_community_umbrella_app("apps/emqx_bridge_dynamo") -> false; +is_community_umbrella_app("apps/emqx_bridge_greptimedb") -> false; is_community_umbrella_app("apps/emqx_bridge_hstreamdb") -> false; is_community_umbrella_app("apps/emqx_bridge_influxdb") -> false; is_community_umbrella_app("apps/emqx_bridge_iotdb") -> false; @@ -189,7 +190,8 @@ test_deps() -> {meck, "0.9.2"}, {proper, "1.4.0"}, {er_coap_client, {git, "https://github.com/emqx/er_coap_client", {tag, "v1.0.5"}}}, - {erl_csv, "0.2.0"} + {erl_csv, "0.2.0"}, + {eministat, "0.10.1"} ]. common_compile_opts() -> diff --git a/rel/i18n/emqx_authn_schema.hocon b/rel/i18n/emqx_authn_schema.hocon index 98263ca49..a1910f95b 100644 --- a/rel/i18n/emqx_authn_schema.hocon +++ b/rel/i18n/emqx_authn_schema.hocon @@ -1,5 +1,32 @@ emqx_authn_schema { +global_authentication.desc: +"""Default authentication configs for all MQTT listeners. + +For per-listener overrides see authentication in listener configs + +This option can be configured with: +
    +
  • []: The default value, it allows *ALL* logins
  • +
  • one: For example {enable:true,backend:"built_in_database",mechanism="password_based"}
  • +
  • chain: An array of structs.
  • +
+ +When a chain is configured, the login credentials are checked against the backends per the configured order, until an 'allow' or 'deny' decision can be made. + +If there is no decision after a full chain exhaustion, the login is rejected.""" + +global_authentication.label: +"""Global authentication""" + +listener_authentication.desc: +"""Per-listener authentication override. +Authentication can be one single authenticator instance or a chain of authenticators as an array. +When authenticating a login (username, client ID, etc.) the authenticators are checked in the configured order.""" + +listener_authentication.label: +"""Per-listener authentication override""" + backend.desc: """Backend type.""" diff --git a/rel/i18n/emqx_bridge_azure_event_hub.hocon b/rel/i18n/emqx_bridge_azure_event_hub.hocon index e6b3172b9..a0ccb0f2f 100644 --- a/rel/i18n/emqx_bridge_azure_event_hub.hocon +++ b/rel/i18n/emqx_bridge_azure_event_hub.hocon @@ -291,10 +291,10 @@ auth_username_password.label: """Username/password Auth""" auth_sasl_password.desc: -"""The password for connecting to Azure Event Hub. Should be the "connection string-primary key" of a Namespace shared access policy.""" +"""The Connection String for connecting to Azure Event Hub. Should be the "connection string-primary key" of a Namespace shared access policy.""" auth_sasl_password.label: -"""Password""" +"""Connection String""" producer_kafka_opts.desc: """Azure Event Hub producer configs.""" diff --git a/rel/i18n/emqx_ldap.hocon b/rel/i18n/emqx_ldap.hocon index 99e00e63a..204431907 100644 --- a/rel/i18n/emqx_ldap.hocon +++ b/rel/i18n/emqx_ldap.hocon @@ -23,4 +23,10 @@ The syntax of the filter follows RFC 4515 and also supports placeholders.""" filter.label: """Filter""" +request_timeout.desc: +"""Sets the maximum time in milliseconds that is used for each individual request.""" + +request_timeout.label: +"""Request Timeout""" + } diff --git a/rel/i18n/emqx_schema.hocon b/rel/i18n/emqx_schema.hocon index 251dcdcb9..9e33ffb57 100644 --- a/rel/i18n/emqx_schema.hocon +++ b/rel/i18n/emqx_schema.hocon @@ -532,22 +532,6 @@ mqtt_server_keepalive.desc: mqtt_server_keepalive.label: """Server Keep Alive""" -global_authentication.desc: -"""Default authentication configs for all MQTT listeners. - -For per-listener overrides see authentication in listener configs - -This option can be configured with: -
    -
  • []: The default value, it allows *ALL* logins
  • -
  • one: For example {enable:true,backend:"built_in_database",mechanism="password_based"}
  • -
  • chain: An array of structs.
  • -
- -When a chain is configured, the login credentials are checked against the backends per the configured order, until an 'allow' or 'deny' decision can be made. - -If there is no decision after a full chain exhaustion, the login is rejected.""" - fields_mqtt_quic_listener_load_balancing_mode.desc: """0: Disabled, 1: SERVER_ID_IP, 2: SERVER_ID_FIXED. default: 0""" @@ -1103,14 +1087,6 @@ See: https://erlang.org/doc/man/inet.html#setopts-2""" fields_tcp_opts_active_n.label: """active_n""" -listener_authentication.desc: -"""Per-listener authentication override. -Authentication can be one single authenticator instance or a chain of authenticators as an array. -When authenticating a login (username, client ID, etc.) the authenticators are checked in the configured order.""" - -listener_authentication.label: -"""Per-listener authentication override""" - fields_trace_payload_encode.desc: """Determine the format of the payload format in the trace file.
`text`: Text-based protocol or plain text protocol. @@ -1573,6 +1549,13 @@ fields_ws_opts_max_frame_size.label: sys_event_messages.desc: """Client events messages.""" +broker_routing_storage_schema.desc: +"""Routing storage schema. +Set v1 to leave the default. +v2 is introduced in 5.2. It enables routing through 2 separate tables, one for topic filter and one for regular topic subscriptions. This schema should increase both subscription and routing performance at the cost of slight increase in memory consumption per subscription. +NOTE: Schema v2 is still experimental. +NOTE: Full non-rolling cluster restart is needed after altering this option for it to take any effect.""" + broker_perf_trie_compaction.desc: """Enable trie path compaction. Enabling it significantly improves wildcard topic subscribe rate, if wildcard topics have unique prefixes like: 'sensor/{{id}}/+/', where ID is unique per subscriber.