diff --git a/.github/workflows/build_packages.yaml b/.github/workflows/build_packages.yaml index 7ce71b1f3..8af709552 100644 --- a/.github/workflows/build_packages.yaml +++ b/.github/workflows/build_packages.yaml @@ -88,7 +88,7 @@ jobs: fetch-depth: 0 - uses: ilammy/msvc-dev-cmd@v1.12.0 - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: 25.3.2 - name: build diff --git a/.github/workflows/build_slim_packages.yaml b/.github/workflows/build_slim_packages.yaml index 981224d11..29e7bb13a 100644 --- a/.github/workflows/build_slim_packages.yaml +++ b/.github/workflows/build_slim_packages.yaml @@ -96,7 +96,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: ilammy/msvc-dev-cmd@v1.12.0 - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: ${{ matrix.otp }} - name: build diff --git a/.github/workflows/run_fvt_tests.yaml b/.github/workflows/run_fvt_tests.yaml index b9312d461..50d999851 100644 --- a/.github/workflows/run_fvt_tests.yaml +++ b/.github/workflows/run_fvt_tests.yaml @@ -58,7 +58,7 @@ jobs: arch: - amd64 steps: - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: 25.3.2 - uses: actions/download-artifact@v3 @@ -133,7 +133,7 @@ jobs: # - emqx-enterprise # TODO test enterprise steps: - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: 25.3.2 - uses: actions/download-artifact@v3 diff --git a/.github/workflows/run_jmeter_tests.yaml b/.github/workflows/run_jmeter_tests.yaml index 42ca9b2d3..3f45faaea 100644 --- a/.github/workflows/run_jmeter_tests.yaml +++ b/.github/workflows/run_jmeter_tests.yaml @@ -14,7 +14,7 @@ jobs: outputs: version: ${{ steps.build_docker.outputs.version}} steps: - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: 25.3.2 - name: download jmeter @@ -57,7 +57,7 @@ jobs: needs: build_emqx_for_jmeter_tests steps: - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: 25.3.2 - uses: actions/checkout@v3 @@ -153,7 +153,7 @@ jobs: needs: build_emqx_for_jmeter_tests steps: - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: 25.3.2 - uses: actions/checkout@v3 @@ -259,7 +259,7 @@ jobs: needs: build_emqx_for_jmeter_tests steps: - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: 25.3.2 - uses: actions/checkout@v3 @@ -361,7 +361,7 @@ jobs: needs: build_emqx_for_jmeter_tests steps: - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: 25.3.2 - uses: actions/checkout@v3 @@ -460,7 +460,7 @@ jobs: needs: build_emqx_for_jmeter_tests steps: - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: 25.3.2 - uses: actions/checkout@v3 diff --git a/.github/workflows/run_relup_tests.yaml b/.github/workflows/run_relup_tests.yaml index 4974e9aa9..4b33255c6 100644 --- a/.github/workflows/run_relup_tests.yaml +++ b/.github/workflows/run_relup_tests.yaml @@ -71,7 +71,7 @@ jobs: shell: bash steps: # setup Erlang to run lux - - uses: erlef/setup-beam@v1.15.2 + - uses: erlef/setup-beam@v1.15.4 with: otp-version: 25.3.2 - uses: actions/checkout@v3 diff --git a/Makefile b/Makefile index f322a0a80..1b4e843e6 100644 --- a/Makefile +++ b/Makefile @@ -16,7 +16,7 @@ endif # Dashbord version # from https://github.com/emqx/emqx-dashboard5 export EMQX_DASHBOARD_VERSION ?= v1.2.6-beta.1 -export EMQX_EE_DASHBOARD_VERSION ?= e1.1.0-beta.2 +export EMQX_EE_DASHBOARD_VERSION ?= e1.1.0-beta.3 # `:=` should be used here, otherwise the `$(shell ...)` will be executed every time when the variable is used # In make 4.4+, for backward-compatibility the value from the original environment is used. diff --git a/apps/emqx/include/asserts.hrl b/apps/emqx/include/asserts.hrl index 4936da1f9..8baa8fee8 100644 --- a/apps/emqx/include/asserts.hrl +++ b/apps/emqx/include/asserts.hrl @@ -59,3 +59,27 @@ end end)() ). + +-define(retrying(CONFIG, NUM_RETRIES, TEST_BODY_FN), begin + __TEST_CASE = ?FUNCTION_NAME, + (fun + __GO(__CONFIG, __N) when __N >= NUM_RETRIES -> + TEST_BODY_FN(__CONFIG); + __GO(__CONFIG, __N) -> + try + TEST_BODY_FN(__CONFIG) + catch + __KIND:__REASON:__STACKTRACE -> + ct:pal("test errored; will retry\n ~p", [ + #{kind => __KIND, reason => __REASON, stacktrace => __STACKTRACE} + ]), + end_per_testcase(__TEST_CASE, __CONFIG), + garbage_collect(), + timer:sleep(1000), + __CONFIG1 = init_per_testcase(__TEST_CASE, __CONFIG), + __GO(__CONFIG1, __N + 1) + end + end)( + CONFIG, 0 + ) +end). diff --git a/apps/emqx/src/bhvrs/emqx_config_backup.erl b/apps/emqx/src/bhvrs/emqx_config_backup.erl new file mode 100644 index 000000000..604fef106 --- /dev/null +++ b/apps/emqx/src/bhvrs/emqx_config_backup.erl @@ -0,0 +1,24 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_config_backup). + +-callback import_config(RawConf :: map()) -> + {ok, #{ + root_key => emqx_utils_maps:config_key(), + changed => [emqx_utils_maps:config_path()] + }} + | {error, #{root_key => emqx_utils_maps:config_key(), reason => term()}}. diff --git a/apps/emqx/src/bhvrs/emqx_db_backup.erl b/apps/emqx/src/bhvrs/emqx_db_backup.erl new file mode 100644 index 000000000..fddbdb1d0 --- /dev/null +++ b/apps/emqx/src/bhvrs/emqx_db_backup.erl @@ -0,0 +1,19 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_db_backup). + +-callback backup_tables() -> [mria:table()]. diff --git a/apps/emqx/src/emqx_banned.erl b/apps/emqx/src/emqx_banned.erl index a0ccd93d7..a5c46da19 100644 --- a/apps/emqx/src/emqx_banned.erl +++ b/apps/emqx/src/emqx_banned.erl @@ -17,6 +17,7 @@ -module(emqx_banned). -behaviour(gen_server). +-behaviour(emqx_db_backup). -include("emqx.hrl"). -include("logger.hrl"). @@ -50,6 +51,8 @@ code_change/3 ]). +-export([backup_tables/0]). + %% Internal exports (RPC) -export([ expire_banned_items/1 @@ -82,6 +85,11 @@ mnesia(boot) -> {storage_properties, [{ets, [{read_concurrency, true}]}]} ]). +%%-------------------------------------------------------------------- +%% Data backup +%%-------------------------------------------------------------------- +backup_tables() -> [?BANNED_TAB]. + %% @doc Start the banned server. -spec start_link() -> startlink_ret(). start_link() -> diff --git a/apps/emqx/src/emqx_config.erl b/apps/emqx/src/emqx_config.erl index ff241ed0e..835379fe7 100644 --- a/apps/emqx/src/emqx_config.erl +++ b/apps/emqx/src/emqx_config.erl @@ -930,7 +930,9 @@ maybe_update_zone([RootName | T], RootValue, Value) when is_atom(RootName) -> end, ExistingZones ), - persistent_term:put(?PERSIS_KEY(?CONF, zones), NewZones) + ZonesKey = ?PERSIS_KEY(?CONF, zones), + persistent_term:put(ZonesKey, NewZones), + put_config_post_change_actions(ZonesKey, NewZones) end, NewRootValue. diff --git a/apps/emqx/src/emqx_connection.erl b/apps/emqx/src/emqx_connection.erl index 385c20393..1172460ac 100644 --- a/apps/emqx/src/emqx_connection.erl +++ b/apps/emqx/src/emqx_connection.erl @@ -275,28 +275,17 @@ stats(#state{ async_set_keepalive(Idle, Interval, Probes) -> async_set_keepalive(os:type(), self(), Idle, Interval, Probes). -async_set_keepalive({unix, linux}, Pid, Idle, Interval, Probes) -> - Options = [ - {keepalive, true}, - {raw, 6, 4, <>}, - {raw, 6, 5, <>}, - {raw, 6, 6, <>} - ], - async_set_socket_options(Pid, Options); -async_set_keepalive({unix, darwin}, Pid, Idle, Interval, Probes) -> - Options = [ - {keepalive, true}, - {raw, 6, 16#10, <>}, - {raw, 6, 16#101, <>}, - {raw, 6, 16#102, <>} - ], - async_set_socket_options(Pid, Options); -async_set_keepalive(OS, _Pid, _Idle, _Interval, _Probes) -> - ?SLOG(warning, #{ - msg => "Unsupported operation: set TCP keepalive", - os => OS - }), - ok. +async_set_keepalive(OS, Pid, Idle, Interval, Probes) -> + case emqx_utils:tcp_keepalive_opts(OS, Idle, Interval, Probes) of + {ok, Options} -> + async_set_socket_options(Pid, Options); + {error, {unsupported_os, OS}} -> + ?SLOG(warning, #{ + msg => "Unsupported operation: set TCP keepalive", + os => OS + }), + ok + end. %% @doc Set custom socket options. %% This API is made async because the call might be originated from diff --git a/apps/emqx/src/emqx_listeners.erl b/apps/emqx/src/emqx_listeners.erl index f0dc47c7b..410519a6c 100644 --- a/apps/emqx/src/emqx_listeners.erl +++ b/apps/emqx/src/emqx_listeners.erl @@ -845,7 +845,9 @@ convert_certs(ListenerConf) -> Listeners1 = maps:fold( fun(Name, Conf, Acc1) -> - Acc1#{Name => convert_certs(Type, Name, Conf)} + Conf1 = convert_certs(Type, Name, Conf), + Conf2 = convert_authn_certs(Type, Name, Conf1), + Acc1#{Name => Conf2} end, #{}, Listeners0 @@ -868,6 +870,19 @@ convert_certs(Type, Name, Conf) -> throw({bad_ssl_config, Reason}) end. +convert_authn_certs(Type, Name, #{<<"authentication">> := AuthNList} = Conf) -> + ChainName = listener_id(Type, Name), + AuthNList1 = lists:map( + fun(AuthN) -> + CertsDir = emqx_authentication_config:certs_dir(ChainName, AuthN), + emqx_authentication_config:convert_certs(CertsDir, AuthN) + end, + AuthNList + ), + Conf#{<<"authentication">> => AuthNList1}; +convert_authn_certs(_Type, _Name, Conf) -> + Conf. + filter_stacktrace({Reason, _Stacktrace}) -> Reason; filter_stacktrace(Reason) -> Reason. diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index bb9dd7b3a..e9fb47eec 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -307,6 +307,8 @@ fields("persistent_session_store") -> boolean(), #{ default => false, + %% TODO(5.2): change field name to 'enable' and keep 'enabled' as an alias + aliases => [enable], desc => ?DESC(persistent_session_store_enabled) } )}, @@ -333,11 +335,11 @@ fields("persistent_session_store") -> default => #{ <<"type">> => <<"builtin">>, <<"session">> => - #{<<"ram_cache">> => <<"true">>}, + #{<<"ram_cache">> => true}, <<"session_messages">> => - #{<<"ram_cache">> => <<"true">>}, + #{<<"ram_cache">> => true}, <<"messages">> => - #{<<"ram_cache">> => <<"false">>} + #{<<"ram_cache">> => false} }, desc => ?DESC(persistent_session_store_backend) } @@ -2005,6 +2007,8 @@ base_listener(Bind) -> boolean(), #{ default => true, + %% TODO(5.2): change field name to 'enable' and keep 'enabled' as an alias + aliases => [enable], desc => ?DESC(fields_listener_enabled) } )}, diff --git a/apps/emqx/src/emqx_ssl_crl_cache.erl b/apps/emqx/src/emqx_ssl_crl_cache.erl index 13eccbd83..94d5e0697 100644 --- a/apps/emqx/src/emqx_ssl_crl_cache.erl +++ b/apps/emqx/src/emqx_ssl_crl_cache.erl @@ -33,11 +33,11 @@ %% limitations under the License. %%-------------------------------------------------------------------- -%---------------------------------------------------------------------- -% Based on `otp/lib/ssl/src/ssl_crl_cache.erl' -%---------------------------------------------------------------------- +%%---------------------------------------------------------------------- +%% Based on `otp/lib/ssl/src/ssl_crl_cache.erl' +%%---------------------------------------------------------------------- -%---------------------------------------------------------------------- +%%---------------------------------------------------------------------- %% Purpose: Simple default CRL cache %%---------------------------------------------------------------------- diff --git a/apps/emqx/test/emqx_common_test_helpers.erl b/apps/emqx/test/emqx_common_test_helpers.erl index e545bb624..1cfc10f74 100644 --- a/apps/emqx/test/emqx_common_test_helpers.erl +++ b/apps/emqx/test/emqx_common_test_helpers.erl @@ -725,10 +725,17 @@ start_slave(Name, Opts) when is_map(Opts) -> Node = node_name(Name), put_peer_mod(Node, SlaveMod), Cookie = atom_to_list(erlang:get_cookie()), + PrivDataDir = maps:get(priv_data_dir, Opts, "/tmp"), + NodeDataDir = filename:join([ + PrivDataDir, + Node, + integer_to_list(erlang:unique_integer()) + ]), DoStart = fun() -> case SlaveMod of ct_slave -> + ct:pal("~p: node data dir: ~s", [Node, NodeDataDir]), ct_slave:start( Node, [ @@ -739,7 +746,8 @@ start_slave(Name, Opts) when is_map(Opts) -> {erl_flags, erl_flags()}, {env, [ {"HOCON_ENV_OVERRIDE_PREFIX", "EMQX_"}, - {"EMQX_NODE__COOKIE", Cookie} + {"EMQX_NODE__COOKIE", Cookie}, + {"EMQX_NODE__DATA_DIR", NodeDataDir} ]} ] ); @@ -844,7 +852,14 @@ setup_node(Node, Opts) when is_map(Opts) -> integer_to_list(erlang:unique_integer()), "mnesia" ]), - erpc:call(Node, application, set_env, [mnesia, dir, MnesiaDataDir]), + case erpc:call(Node, application, get_env, [mnesia, dir, undefined]) of + undefined -> + ct:pal("~p: setting mnesia dir: ~p", [Node, MnesiaDataDir]), + erpc:call(Node, application, set_env, [mnesia, dir, MnesiaDataDir]); + PreviousMnesiaDir -> + ct:pal("~p: mnesia dir already set: ~p", [Node, PreviousMnesiaDir]), + ok + end, %% Needs to be set explicitly because ekka:start() (which calls `gen`) is called without Handler %% in emqx_common_test_helpers:start_apps(...) @@ -859,6 +874,12 @@ setup_node(Node, Opts) when is_map(Opts) -> %% Setting env before starting any applications set_envs(Node, Env), + NodeDataDir = filename:join([ + PrivDataDir, + node(), + integer_to_list(erlang:unique_integer()) + ]), + %% Here we start the apps EnvHandlerForRpc = fun(App) -> @@ -870,17 +891,10 @@ setup_node(Node, Opts) when is_map(Opts) -> %% to avoid sharing data between executions and/or %% nodes. these variables might not be in the %% config file (e.g.: emqx_enterprise_schema). - NodeDataDir = filename:join([ - PrivDataDir, - node(), - integer_to_list(erlang:unique_integer()) - ]), Cookie = atom_to_list(erlang:get_cookie()), - os:putenv("EMQX_NODE__DATA_DIR", NodeDataDir), - os:putenv("EMQX_NODE__COOKIE", Cookie), + set_env_once("EMQX_NODE__DATA_DIR", NodeDataDir), + set_env_once("EMQX_NODE__COOKIE", Cookie), emqx_config:init_load(SchemaMod), - os:unsetenv("EMQX_NODE__DATA_DIR"), - os:unsetenv("EMQX_NODE__COOKIE"), application:set_env(emqx, init_config_load_done, true) end, @@ -931,6 +945,15 @@ setup_node(Node, Opts) when is_map(Opts) -> %% Helpers +set_env_once(Var, Value) -> + case os:getenv(Var) of + false -> + os:putenv(Var, Value); + _OldValue -> + ok + end, + ok. + put_peer_mod(Node, SlaveMod) -> put({?MODULE, Node}, SlaveMod), ok. @@ -1290,6 +1313,7 @@ call_janitor() -> call_janitor(Timeout) -> Janitor = get_or_spawn_janitor(), ok = emqx_test_janitor:stop(Janitor, Timeout), + erase({?MODULE, janitor_proc}), ok. get_or_spawn_janitor() -> diff --git a/apps/emqx/test/emqx_flapping_SUITE.erl b/apps/emqx/test/emqx_flapping_SUITE.erl index b03334dc6..62c967078 100644 --- a/apps/emqx/test/emqx_flapping_SUITE.erl +++ b/apps/emqx/test/emqx_flapping_SUITE.erl @@ -138,22 +138,32 @@ t_conf_update(_) -> t_conf_update_timer(_Config) -> _ = emqx_flapping:start_link(), - validate_timer([default]), + validate_timer([{default, true}]), + %% change zones {ok, _} = emqx:update_config([zones], #{ <<"timer_1">> => #{<<"flapping_detect">> => #{<<"enable">> => true}}, <<"timer_2">> => #{<<"flapping_detect">> => #{<<"enable">> => true}}, <<"timer_3">> => #{<<"flapping_detect">> => #{<<"enable">> => false}} }), - validate_timer([timer_1, timer_2, timer_3, default]), + validate_timer([{timer_1, true}, {timer_2, true}, {timer_3, false}, {default, true}]), + %% change global flapping_detect + Global = emqx:get_raw_config([flapping_detect]), + {ok, _} = emqx:update_config([flapping_detect], Global#{<<"enable">> => false}), + validate_timer([{timer_1, true}, {timer_2, true}, {timer_3, false}, {default, false}]), + %% reset + {ok, _} = emqx:update_config([flapping_detect], Global#{<<"enable">> => true}), + validate_timer([{timer_1, true}, {timer_2, true}, {timer_3, false}, {default, true}]), ok. -validate_timer(Names) -> +validate_timer(Lists) -> + {Names, _} = lists:unzip(Lists), Zones = emqx:get_config([zones]), ?assertEqual(lists:sort(Names), lists:sort(maps:keys(Zones))), Timers = sys:get_state(emqx_flapping), maps:foreach( fun(Name, #{flapping_detect := #{enable := Enable}}) -> + ?assertEqual(lists:keyfind(Name, 1, Lists), {Name, Enable}), ?assertEqual(Enable, is_reference(maps:get(Name, Timers)), Timers) end, Zones diff --git a/apps/emqx/test/emqx_test_janitor.erl b/apps/emqx/test/emqx_test_janitor.erl index 041b03fa7..2ee01e807 100644 --- a/apps/emqx/test/emqx_test_janitor.erl +++ b/apps/emqx/test/emqx_test_janitor.erl @@ -60,13 +60,14 @@ init(Parent) -> {ok, #{callbacks => [], owner => Parent}}. terminate(_Reason, #{callbacks := Callbacks}) -> - do_terminate(Callbacks). + _ = do_terminate(Callbacks), + ok. handle_call({push, Callback}, _From, State = #{callbacks := Callbacks}) -> {reply, ok, State#{callbacks := [Callback | Callbacks]}}; handle_call(terminate, _From, State = #{callbacks := Callbacks}) -> - do_terminate(Callbacks), - {stop, normal, ok, State}; + FailedCallbacks = do_terminate(Callbacks), + {stop, normal, ok, State#{callbacks := FailedCallbacks}}; handle_call(_Req, _From, State) -> {reply, error, State}. @@ -83,17 +84,18 @@ handle_info(_Msg, State) -> %%---------------------------------------------------------------------------------- do_terminate(Callbacks) -> - lists:foreach( - fun(Fun) -> + lists:foldl( + fun(Fun, Failed) -> try - Fun() + Fun(), + Failed catch K:E:S -> ct:pal("error executing callback ~p: ~p", [Fun, {K, E}]), ct:pal("stacktrace: ~p", [S]), - ok + [Fun | Failed] end end, + [], Callbacks - ), - ok. + ). diff --git a/apps/emqx_authn/src/emqx_authn.erl b/apps/emqx_authn/src/emqx_authn.erl index 515c3bfd6..2a8d82439 100644 --- a/apps/emqx_authn/src/emqx_authn.erl +++ b/apps/emqx_authn/src/emqx_authn.erl @@ -16,6 +16,8 @@ -module(emqx_authn). +-behaviour(emqx_config_backup). + -export([ providers/0, check_config/1, @@ -24,6 +26,11 @@ get_enabled_authns/0 ]). +%% Data backup +-export([ + import_config/1 +]). + -include("emqx_authn.hrl"). providers() -> @@ -126,3 +133,32 @@ get_enabled_authns() -> tally_authenticators(#{id := AuthenticatorName}, Acc) -> maps:update_with(AuthenticatorName, fun(N) -> N + 1 end, 1, Acc). + +%%------------------------------------------------------------------------------ +%% Data backup +%%------------------------------------------------------------------------------ + +-define(IMPORT_OPTS, #{override_to => cluster}). + +import_config(RawConf) -> + AuthnList = authn_list(maps:get(?CONF_NS_BINARY, RawConf, [])), + OldAuthnList = emqx:get_raw_config([?CONF_NS_BINARY], []), + MergedAuthnList = emqx_utils:merge_lists( + OldAuthnList, AuthnList, fun emqx_authentication:authenticator_id/1 + ), + case emqx_conf:update([?CONF_NS_ATOM], MergedAuthnList, ?IMPORT_OPTS) of + {ok, #{raw_config := NewRawConf}} -> + {ok, #{root_key => ?CONF_NS_ATOM, changed => changed_paths(OldAuthnList, NewRawConf)}}; + Error -> + {error, #{root_key => ?CONF_NS_ATOM, reason => Error}} + end. + +changed_paths(OldAuthnList, NewAuthnList) -> + KeyFun = fun emqx_authentication:authenticator_id/1, + Changed = maps:get(changed, emqx_utils:diff_lists(NewAuthnList, OldAuthnList, KeyFun)), + [[?CONF_NS_BINARY, emqx_authentication:authenticator_id(OldAuthn)] || {OldAuthn, _} <- Changed]. + +authn_list(Authn) when is_list(Authn) -> + Authn; +authn_list(Authn) when is_map(Authn) -> + [Authn]. diff --git a/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl b/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl index b11b89081..158112747 100644 --- a/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl +++ b/apps/emqx_authn/src/enhanced_authn/emqx_enhanced_authn_scram_mnesia.erl @@ -22,6 +22,7 @@ -behaviour(hocon_schema). -behaviour(emqx_authentication). +-behaviour(emqx_db_backup). -export([ namespace/0, @@ -54,6 +55,8 @@ group_match_spec/1 ]). +-export([backup_tables/0]). + %% Internal exports (RPC) -export([ do_destroy/1, @@ -101,6 +104,12 @@ mnesia(boot) -> {storage_properties, [{ets, [{read_concurrency, true}]}]} ]). +%%------------------------------------------------------------------------------ +%% Data backup +%%------------------------------------------------------------------------------ + +backup_tables() -> [?TAB]. + %%------------------------------------------------------------------------------ %% Hocon Schema %%------------------------------------------------------------------------------ @@ -357,6 +366,9 @@ check_client_final_message(Bin, #{is_superuser := IsSuperuser} = Cache, #{algori add_user(UserGroup, UserID, Password, IsSuperuser, State) -> {StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State), + write_user(UserGroup, UserID, StoredKey, ServerKey, Salt, IsSuperuser). + +write_user(UserGroup, UserID, StoredKey, ServerKey, Salt, IsSuperuser) -> UserInfo = #user_info{ user_id = {UserGroup, UserID}, stored_key = StoredKey, diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl index 421af074e..f20632414 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_http.erl @@ -435,19 +435,19 @@ parse_body(ContentType, _) -> uri_encode(T) -> emqx_http_lib:uri_encode(to_list(T)). -request_for_log(Credential, #{url := Url} = State) -> +request_for_log(Credential, #{url := Url, method := Method} = State) -> SafeCredential = emqx_authn_utils:without_password(Credential), case generate_request(SafeCredential, State) of {PathQuery, Headers} -> #{ - method => post, + method => Method, base_url => Url, path_query => PathQuery, headers => Headers }; {PathQuery, Headers, Body} -> #{ - method => post, + method => Method, base_url => Url, path_query => PathQuery, headers => Headers, diff --git a/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl b/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl index d57e9e00e..bf0b04d04 100644 --- a/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl +++ b/apps/emqx_authn/src/simple_authn/emqx_authn_mnesia.erl @@ -23,6 +23,7 @@ -behaviour(hocon_schema). -behaviour(emqx_authentication). +-behaviour(emqx_db_backup). -export([ namespace/0, @@ -66,6 +67,10 @@ import_csv/3 ]). +-export([mnesia/1]). + +-export([backup_tables/0]). + -type user_group() :: binary(). -type user_id() :: binary(). @@ -76,8 +81,6 @@ is_superuser :: boolean() }). --export([mnesia/1]). - -boot_mnesia({mnesia, [boot]}). -define(TAB, ?MODULE). @@ -103,6 +106,11 @@ mnesia(boot) -> {storage_properties, [{ets, [{read_concurrency, true}]}]} ]). +%%------------------------------------------------------------------------------ +%% Data backup +%%------------------------------------------------------------------------------ +backup_tables() -> [?TAB]. + %%------------------------------------------------------------------------------ %% Hocon Schema %%------------------------------------------------------------------------------ diff --git a/apps/emqx_authz/src/emqx_authz.erl b/apps/emqx_authz/src/emqx_authz.erl index 278b70d6d..3c9698de0 100644 --- a/apps/emqx_authz/src/emqx_authz.erl +++ b/apps/emqx_authz/src/emqx_authz.erl @@ -15,7 +15,9 @@ %%-------------------------------------------------------------------- -module(emqx_authz). + -behaviour(emqx_config_handler). +-behaviour(emqx_config_backup). -include("emqx_authz.hrl"). -include_lib("emqx/include/logger.hrl"). @@ -44,6 +46,13 @@ -export([acl_conf_file/0]). +%% Data backup +-export([ + import_config/1, + maybe_read_acl_file/1, + maybe_write_acl_file/1 +]). + -type source() :: map(). -type match_result() :: {matched, allow} | {matched, deny} | nomatch. @@ -326,9 +335,9 @@ init_metrics(Source) -> ) end. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% AuthZ callbacks -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% @doc Check AuthZ -spec authorize( @@ -451,9 +460,58 @@ do_authorize( get_enabled_authzs() -> lists:usort([Type || #{type := Type, enable := true} <- lookup()]). -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ +%% Data backup +%%------------------------------------------------------------------------------ + +import_config(#{?CONF_NS_BINARY := AuthzConf}) -> + Sources = maps:get(<<"sources">>, AuthzConf, []), + OldSources = emqx:get_raw_config(?CONF_KEY_PATH, []), + MergedSources = emqx_utils:merge_lists(OldSources, Sources, fun type/1), + MergedAuthzConf = AuthzConf#{<<"sources">> => MergedSources}, + case emqx_conf:update([?CONF_NS_ATOM], MergedAuthzConf, #{override_to => cluster}) of + {ok, #{raw_config := #{<<"sources">> := NewSources}}} -> + {ok, #{ + root_key => ?CONF_NS_ATOM, + changed => changed_paths(OldSources, NewSources) + }}; + Error -> + {error, #{root_key => ?CONF_NS_ATOM, reason => Error}} + end; +import_config(_RawConf) -> + {ok, #{root_key => ?CONF_NS_ATOM, changed => []}}. + +changed_paths(OldSources, NewSources) -> + Changed = maps:get(changed, emqx_utils:diff_lists(NewSources, OldSources, fun type/1)), + [?CONF_KEY_PATH ++ [type(OldSource)] || {OldSource, _} <- Changed]. + +maybe_read_acl_file(RawConf) -> + maybe_convert_acl_file(RawConf, fun read_acl_file/1). + +maybe_write_acl_file(RawConf) -> + maybe_convert_acl_file(RawConf, fun write_acl_file/1). + +maybe_convert_acl_file( + #{?CONF_NS_BINARY := #{<<"sources">> := Sources} = AuthRawConf} = RawConf, Fun +) -> + Sources1 = lists:map( + fun + (#{<<"type">> := <<"file">>} = FileSource) -> Fun(FileSource); + (Source) -> Source + end, + Sources + ), + RawConf#{?CONF_NS_BINARY => AuthRawConf#{<<"sources">> => Sources1}}; +maybe_convert_acl_file(RawConf, _Fun) -> + RawConf. + +read_acl_file(#{<<"path">> := Path} = Source) -> + {ok, Rules} = emqx_authz_file:read_file(Path), + maps:remove(<<"path">>, Source#{<<"rules">> => Rules}). + +%%------------------------------------------------------------------------------ %% Internal function -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ client_info_source() -> emqx_authz_client_info:create( diff --git a/apps/emqx_authz/src/emqx_authz_mnesia.erl b/apps/emqx_authz/src/emqx_authz_mnesia.erl index 58df08653..bdb4877c0 100644 --- a/apps/emqx_authz/src/emqx_authz_mnesia.erl +++ b/apps/emqx_authz/src/emqx_authz_mnesia.erl @@ -42,6 +42,7 @@ }). -behaviour(emqx_authz). +-behaviour(emqx_db_backup). %% AuthZ Callbacks -export([ @@ -65,6 +66,8 @@ record_count/0 ]). +-export([backup_tables/0]). + -ifdef(TEST). -compile(export_all). -compile(nowarn_export_all). @@ -119,6 +122,12 @@ authorize( end, do_authorize(Client, PubSub, Topic, Rules). +%%-------------------------------------------------------------------- +%% Data backup +%%-------------------------------------------------------------------- + +backup_tables() -> [?ACL_TABLE]. + %%-------------------------------------------------------------------- %% Management API %%-------------------------------------------------------------------- diff --git a/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.erl b/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.erl index 32892992a..f8ecde84b 100644 --- a/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.erl +++ b/apps/emqx_auto_subscribe/src/emqx_auto_subscribe.erl @@ -16,6 +16,8 @@ -module(emqx_auto_subscribe). +-behaviour(emqx_config_backup). + -include_lib("emqx/include/emqx_hooks.hrl"). -behaviour(emqx_config_handler). @@ -24,7 +26,6 @@ -define(MAX_AUTO_SUBSCRIBE, 20). -% -export([load/0, unload/0]). -export([ @@ -40,6 +41,11 @@ %% exported for `emqx_telemetry' -export([get_basic_usage_info/0]). +%% Data backup +-export([ + import_config/1 +]). + load() -> ok = emqx_conf:add_handler([auto_subscribe, topics], ?MODULE), update_hook(). @@ -73,8 +79,9 @@ post_config_update(_KeyPath, _Req, NewTopics, _OldConf, _AppEnvs) -> Config = emqx_conf:get([auto_subscribe], #{}), update_hook(Config#{topics => NewTopics}). -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% hook +%%------------------------------------------------------------------------------ on_client_connected(ClientInfo, ConnInfo, {TopicHandler, Options}) -> case erlang:apply(TopicHandler, handle, [ClientInfo, ConnInfo, Options]) of @@ -87,17 +94,38 @@ on_client_connected(ClientInfo, ConnInfo, {TopicHandler, Options}) -> on_client_connected(_, _, _) -> ok. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Telemetry -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ -spec get_basic_usage_info() -> #{auto_subscribe_count => non_neg_integer()}. get_basic_usage_info() -> AutoSubscribe = emqx_conf:get([auto_subscribe, topics], []), #{auto_subscribe_count => length(AutoSubscribe)}. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ +%% Data backup +%%------------------------------------------------------------------------------ + +import_config(#{<<"auto_subscribe">> := #{<<"topics">> := Topics}}) -> + ConfPath = [auto_subscribe, topics], + OldTopics = emqx:get_raw_config(ConfPath, []), + KeyFun = fun(#{<<"topic">> := T}) -> T end, + MergedTopics = emqx_utils:merge_lists(OldTopics, Topics, KeyFun), + case emqx_conf:update(ConfPath, MergedTopics, #{override_to => cluster}) of + {ok, #{raw_config := NewTopics}} -> + Changed = maps:get(changed, emqx_utils:diff_lists(NewTopics, OldTopics, KeyFun)), + Changed1 = [ConfPath ++ [T] || {#{<<"topic">> := T}, _} <- Changed], + {ok, #{root_key => auto_subscribe, changed => Changed1}}; + Error -> + {error, #{root_key => auto_subscribe, reason => Error}} + end; +import_config(_RawConf) -> + {ok, #{root_key => auto_subscribe, changed => []}}. + +%%------------------------------------------------------------------------------ %% internal +%%------------------------------------------------------------------------------ format(Rules) when is_list(Rules) -> [format(Rule) || Rule <- Rules]; diff --git a/apps/emqx_bridge/src/emqx_bridge.erl b/apps/emqx_bridge/src/emqx_bridge.erl index e282c3dd8..9bdc1b3c2 100644 --- a/apps/emqx_bridge/src/emqx_bridge.erl +++ b/apps/emqx_bridge/src/emqx_bridge.erl @@ -14,13 +14,19 @@ %% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_bridge). + -behaviour(emqx_config_handler). +-behaviour(emqx_config_backup). + -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/emqx_hooks.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl"). --export([post_config_update/5]). +-export([ + pre_config_update/3, + post_config_update/5 +]). -export([ load_hook/0, @@ -53,6 +59,11 @@ %% exported for `emqx_telemetry' -export([get_basic_usage_info/0]). +%% Data backup +-export([ + import_config/1 +]). + -define(EGRESS_DIR_BRIDGES(T), T == webhook; T == mysql; @@ -80,8 +91,10 @@ T == iotdb ). +-define(ROOT_KEY, bridges). + load() -> - Bridges = emqx:get_config([bridges], #{}), + Bridges = emqx:get_config([?ROOT_KEY], #{}), lists:foreach( fun({Type, NamedConf}) -> lists:foreach( @@ -98,7 +111,7 @@ load() -> unload() -> unload_hook(), - Bridges = emqx:get_config([bridges], #{}), + Bridges = emqx:get_config([?ROOT_KEY], #{}), lists:foreach( fun({Type, NamedConf}) -> lists:foreach( @@ -139,7 +152,7 @@ reload_hook(Bridges) -> ok = load_hook(Bridges). load_hook() -> - Bridges = emqx:get_config([bridges], #{}), + Bridges = emqx:get_config([?ROOT_KEY], #{}), load_hook(Bridges). load_hook(Bridges) -> @@ -210,7 +223,7 @@ send_message(BridgeId, Message) -> send_message(BridgeType, BridgeName, ResId, Message). send_message(BridgeType, BridgeName, ResId, Message) -> - case emqx:get_config([bridges, BridgeType, BridgeName], not_found) of + case emqx:get_config([?ROOT_KEY, BridgeType, BridgeName], not_found) of not_found -> {error, bridge_not_found}; #{enable := true} = Config -> @@ -231,9 +244,14 @@ query_opts(Config) -> end. config_key_path() -> - [bridges]. + [?ROOT_KEY]. -post_config_update(_, _Req, NewConf, OldConf, _AppEnv) -> +pre_config_update([?ROOT_KEY], RawConf, RawConf) -> + {ok, RawConf}; +pre_config_update([?ROOT_KEY], NewConf, _RawConf) -> + {ok, convert_certs(NewConf)}. + +post_config_update([?ROOT_KEY], _Req, NewConf, OldConf, _AppEnv) -> #{added := Added, removed := Removed, changed := Updated} = diff_confs(NewConf, OldConf), %% The config update will be failed if any task in `perform_bridge_changes` failed. @@ -351,10 +369,74 @@ check_deps_and_remove(BridgeType, BridgeName, RemoveDeps) -> remove(BridgeType, BridgeName) end. +%%---------------------------------------------------------------------------------------- +%% Data backup +%%---------------------------------------------------------------------------------------- + +import_config(RawConf) -> + RootKeyPath = config_key_path(), + BridgesConf = maps:get(<<"bridges">>, RawConf, #{}), + OldBridgesConf = emqx:get_raw_config(RootKeyPath, #{}), + MergedConf = merge_confs(OldBridgesConf, BridgesConf), + case emqx_conf:update(RootKeyPath, MergedConf, #{override_to => cluster}) of + {ok, #{raw_config := NewRawConf}} -> + {ok, #{root_key => ?ROOT_KEY, changed => changed_paths(OldBridgesConf, NewRawConf)}}; + Error -> + {error, #{root_key => ?ROOT_KEY, reason => Error}} + end. + +merge_confs(OldConf, NewConf) -> + AllTypes = maps:keys(maps:merge(OldConf, NewConf)), + lists:foldr( + fun(Type, Acc) -> + NewBridges = maps:get(Type, NewConf, #{}), + OldBridges = maps:get(Type, OldConf, #{}), + Acc#{Type => maps:merge(OldBridges, NewBridges)} + end, + #{}, + AllTypes + ). + +changed_paths(OldRawConf, NewRawConf) -> + maps:fold( + fun(Type, Bridges, ChangedAcc) -> + OldBridges = maps:get(Type, OldRawConf, #{}), + Changed = maps:get(changed, emqx_utils_maps:diff_maps(Bridges, OldBridges)), + [[?ROOT_KEY, Type, K] || K <- maps:keys(Changed)] ++ ChangedAcc + end, + [], + NewRawConf + ). + %%======================================================================================== %% Helper functions %%======================================================================================== +convert_certs(BridgesConf) -> + maps:map( + fun(Type, Bridges) -> + maps:map( + fun(Name, BridgeConf) -> + Path = filename:join([?ROOT_KEY, Type, Name]), + case emqx_connector_ssl:convert_certs(Path, BridgeConf) of + {error, Reason} -> + ?SLOG(error, #{ + msg => "bad_ssl_config", + type => Type, + name => Name, + reason => Reason + }), + throw({bad_ssl_config, Reason}); + {ok, BridgeConf1} -> + BridgeConf1 + end + end, + Bridges + ) + end, + BridgesConf + ). + perform_bridge_changes(Tasks) -> perform_bridge_changes(Tasks, ok). diff --git a/apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb.erl b/apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb.erl index 9a9e95b65..724c3f43a 100644 --- a/apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb.erl +++ b/apps/emqx_bridge_iotdb/src/emqx_bridge_iotdb.erl @@ -141,14 +141,6 @@ request_config() -> default => 2, desc => ?DESC("config_max_retries") } - )}, - {request_timeout, - mk( - emqx_schema:timeout_duration_ms(), - #{ - default => <<"15s">>, - desc => ?DESC("config_request_timeout") - } )} ]. diff --git a/apps/emqx_bridge_iotdb/test/emqx_bridge_iotdb_impl_SUITE.erl b/apps/emqx_bridge_iotdb/test/emqx_bridge_iotdb_impl_SUITE.erl index f33795079..884f160f9 100644 --- a/apps/emqx_bridge_iotdb/test/emqx_bridge_iotdb_impl_SUITE.erl +++ b/apps/emqx_bridge_iotdb/test/emqx_bridge_iotdb_impl_SUITE.erl @@ -132,7 +132,7 @@ bridge_config(TestCase, _TestGroup, Config) -> " username = \"root\"\n" " password = \"root\"\n" " }\n" - "iotdb_version = \"~s\"\n" + " iotdb_version = \"~s\"\n" " pool_size = 1\n" " resource_opts = {\n" " health_check_interval = 5000\n" diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl index 31307fd16..4db685657 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka.erl @@ -74,7 +74,8 @@ values(common_config) -> socket_opts => #{ sndbuf => <<"1024KB">>, recbuf => <<"1024KB">>, - nodelay => true + nodelay => true, + tcp_keepalive => <<"none">> } }; values(producer) -> @@ -236,7 +237,13 @@ fields(socket_opts) -> importance => ?IMPORTANCE_HIDDEN, desc => ?DESC(socket_nodelay) } - )} + )}, + {tcp_keepalive, + mk(string(), #{ + default => <<"none">>, + desc => ?DESC(socket_tcp_keepalive), + validator => fun emqx_schema:validate_tcp_keepalive/1 + })} ]; fields(producer_opts) -> [ @@ -276,7 +283,7 @@ fields(producer_kafka_opts) -> )}, {partition_count_refresh_interval, mk( - emqx_schema:duration_s(), + emqx_schema:timeout_duration_s(), #{ default => <<"60s">>, desc => ?DESC(partition_count_refresh_interval) @@ -396,8 +403,8 @@ fields(consumer_kafka_opts) -> )}, {offset_commit_interval_seconds, mk( - pos_integer(), - #{default => 5, desc => ?DESC(consumer_offset_commit_interval_seconds)} + emqx_schema:timeout_duration_s(), + #{default => <<"5s">>, desc => ?DESC(consumer_offset_commit_interval_seconds)} )} ]. diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl.erl index 22a67c551..747515d9b 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl.erl @@ -8,9 +8,12 @@ -export([ hosts/1, make_client_id/2, - sasl/1 + sasl/1, + socket_opts/1 ]). +-include_lib("emqx/include/logger.hrl"). + %% Parse comma separated host:port list into a [{Host,Port}] list hosts(Hosts) when is_binary(Hosts) -> hosts(binary_to_list(Hosts)); @@ -33,6 +36,51 @@ sasl(#{ }) -> {callback, brod_gssapi, {gssapi, KeyTabFile, Principal}}. +%% Extra socket options, such as sndbuf size etc. +socket_opts(Opts) when is_map(Opts) -> + socket_opts(maps:to_list(Opts)); +socket_opts(Opts) when is_list(Opts) -> + socket_opts_loop(Opts, []). + +socket_opts_loop([], Acc) -> + lists:reverse(Acc); +socket_opts_loop([{tcp_keepalive, KeepAlive} | Rest], Acc) -> + Acc1 = tcp_keepalive(KeepAlive) ++ Acc, + socket_opts_loop(Rest, Acc1); +socket_opts_loop([{T, Bytes} | Rest], Acc) when + T =:= sndbuf orelse T =:= recbuf orelse T =:= buffer +-> + Acc1 = [{T, Bytes} | adjust_socket_buffer(Bytes, Acc)], + socket_opts_loop(Rest, Acc1); +socket_opts_loop([Other | Rest], Acc) -> + socket_opts_loop(Rest, [Other | Acc]). + +%% https://www.erlang.org/doc/man/inet.html +%% For TCP it is recommended to have val(buffer) >= val(recbuf) +%% to avoid performance issues because of unnecessary copying. +adjust_socket_buffer(Bytes, Opts) -> + case lists:keytake(buffer, 1, Opts) of + false -> + [{buffer, Bytes} | Opts]; + {value, {buffer, Bytes1}, Acc1} -> + [{buffer, max(Bytes1, Bytes)} | Acc1] + end. + +tcp_keepalive(None) when None =:= "none"; None =:= <<"none">> -> + []; +tcp_keepalive(KeepAlive) -> + {Idle, Interval, Probes} = emqx_schema:parse_tcp_keepalive(KeepAlive), + case emqx_utils:tcp_keepalive_opts(os:type(), Idle, Interval, Probes) of + {ok, Opts} -> + Opts; + {error, {unsupported_os, OS}} -> + ?SLOG(warning, #{ + msg => "Unsupported operation: set TCP keepalive", + os => OS + }), + [] + end. + to_bin(A) when is_atom(A) -> atom_to_binary(A); to_bin(L) when is_list(L) -> diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl index 9f4216297..e18bf7e29 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_consumer.erl @@ -129,6 +129,7 @@ on_start(ResourceId, Config) -> offset_commit_interval_seconds := _, offset_reset_policy := _ }, + socket_opts := SocketOpts0, ssl := SSL, topic_mapping := _ } = Config, @@ -142,8 +143,10 @@ on_start(ResourceId, Config) -> Auth -> [{sasl, emqx_bridge_kafka_impl:sasl(Auth)}] end, ClientOpts = add_ssl_opts(ClientOpts0, SSL), + SocketOpts = emqx_bridge_kafka_impl:socket_opts(SocketOpts0), + ClientOpts1 = [{extra_sock_opts, SocketOpts} | ClientOpts], ok = emqx_resource:allocate_resource(ResourceId, ?kafka_client_id, ClientID), - case brod:start_client(BootstrapHosts, ClientID, ClientOpts) of + case brod:start_client(BootstrapHosts, ClientID, ClientOpts1) of ok -> ?tp( kafka_consumer_client_started, diff --git a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl index d72728072..6eccaaf09 100644 --- a/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl +++ b/apps/emqx_bridge_kafka/src/emqx_bridge_kafka_impl_producer.erl @@ -69,7 +69,7 @@ on_start(InstId, Config) -> connect_timeout => ConnTimeout, client_id => ClientId, request_timeout => MetaReqTimeout, - extra_sock_opts => socket_opts(SocketOpts), + extra_sock_opts => emqx_bridge_kafka_impl:socket_opts(SocketOpts), sasl => emqx_bridge_kafka_impl:sasl(Auth), ssl => ssl(SSL) }, @@ -321,33 +321,6 @@ do_get_status(Client, KafkaTopic) -> disconnected end. -%% Extra socket options, such as sndbuf size etc. -socket_opts(Opts) when is_map(Opts) -> - socket_opts(maps:to_list(Opts)); -socket_opts(Opts) when is_list(Opts) -> - socket_opts_loop(Opts, []). - -socket_opts_loop([], Acc) -> - lists:reverse(Acc); -socket_opts_loop([{T, Bytes} | Rest], Acc) when - T =:= sndbuf orelse T =:= recbuf orelse T =:= buffer --> - Acc1 = [{T, Bytes} | adjust_socket_buffer(Bytes, Acc)], - socket_opts_loop(Rest, Acc1); -socket_opts_loop([Other | Rest], Acc) -> - socket_opts_loop(Rest, [Other | Acc]). - -%% https://www.erlang.org/doc/man/inet.html -%% For TCP it is recommended to have val(buffer) >= val(recbuf) -%% to avoid performance issues because of unnecessary copying. -adjust_socket_buffer(Bytes, Opts) -> - case lists:keytake(buffer, 1, Opts) of - false -> - [{buffer, Bytes} | Opts]; - {value, {buffer, Bytes1}, Acc1} -> - [{buffer, max(Bytes1, Bytes)} | Acc1] - end. - ssl(#{enable := true} = SSL) -> emqx_tls_lib:to_client_opts(SSL); ssl(_) -> diff --git a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl index 23bcf8e9d..0ccc19778 100644 --- a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl +++ b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_tests.erl @@ -168,6 +168,24 @@ message_key_dispatch_validations_test() -> ), ok. +tcp_keepalive_validation_test_() -> + ProducerConf = parse(kafka_producer_new_hocon()), + ConsumerConf = parse(kafka_consumer_hocon()), + test_keepalive_validation([<<"kafka">>, <<"myproducer">>], ProducerConf) ++ + test_keepalive_validation([<<"kafka_consumer">>, <<"my_consumer">>], ConsumerConf). + +test_keepalive_validation(Name, Conf) -> + Path = [<<"bridges">>] ++ Name ++ [<<"socket_opts">>, <<"tcp_keepalive">>], + Conf1 = emqx_utils_maps:deep_force_put(Path, Conf, <<"5,6,7">>), + Conf2 = emqx_utils_maps:deep_force_put(Path, Conf, <<"none">>), + ValidConfs = [Conf, Conf1, Conf2], + InvalidConf = emqx_utils_maps:deep_force_put(Path, Conf, <<"invalid">>), + InvalidConf1 = emqx_utils_maps:deep_force_put(Path, Conf, <<"5,6">>), + InvalidConf2 = emqx_utils_maps:deep_force_put(Path, Conf, <<"5,6,1000">>), + InvalidConfs = [InvalidConf, InvalidConf1, InvalidConf2], + [?_assertMatch(#{<<"bridges">> := _}, check(C)) || C <- ValidConfs] ++ + [?_assertThrow(_, check(C)) || C <- InvalidConfs]. + %%=========================================================================== %% Helper functions %%=========================================================================== diff --git a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl index ce14eb83d..9f9381c95 100644 --- a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl +++ b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_impl_producer_SUITE.erl @@ -9,6 +9,7 @@ -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl"). +-include_lib("emqx/include/asserts.hrl"). -import(emqx_common_test_helpers, [on_exit/1]). @@ -148,6 +149,7 @@ end_per_testcase(_Testcase, Config) -> true -> ok; false -> + ok = emqx_config:delete_override_conf_files(), ProxyHost = ?config(proxy_host, Config), ProxyPort = ?config(proxy_port, Config), emqx_common_test_helpers:reset_proxy(ProxyHost, ProxyPort), @@ -157,6 +159,7 @@ end_per_testcase(_Testcase, Config) -> %% machines struggle with all the containers running... emqx_common_test_helpers:call_janitor(60_000), ok = snabbkaffe:stop(), + flush_consumed(), ok end. @@ -373,7 +376,9 @@ start_consumer(TestCase, Config) -> (integer_to_binary(PulsarPort))/binary>> ), ConnOpts = #{}, - ConsumerClientId = TestCase, + ConsumerClientId = list_to_atom( + atom_to_list(TestCase) ++ integer_to_list(erlang:unique_integer()) + ), CertsPath = emqx_common_test_helpers:deps_path(emqx, "etc/certs"), SSLOpts = #{ enable => UseTLS, @@ -393,12 +398,12 @@ start_consumer(TestCase, Config) -> cb_init_args => #{send_to => self()}, cb_module => pulsar_echo_consumer, sub_type => 'Shared', - subscription => atom_to_list(TestCase), + subscription => atom_to_list(TestCase) ++ integer_to_list(erlang:unique_integer()), max_consumer_num => 1, %% Note! This must not coincide with the client %% id, or else weird bugs will happen, like the %% consumer never starts... - name => test_consumer, + name => list_to_atom("test_consumer" ++ integer_to_list(erlang:unique_integer())), consumer_id => 1, conn_opts => ConnOpts }, @@ -440,7 +445,10 @@ wait_until_connected(SupMod, Mod) -> ?retry( _Sleep = 300, _Attempts0 = 20, - lists:foreach(fun(P) -> {connected, _} = sys:get_state(P) end, Pids) + begin + true = length(Pids) > 0, + lists:foreach(fun(P) -> {connected, _} = sys:get_state(P) end, Pids) + end ), ok. @@ -483,6 +491,12 @@ receive_consumed(Timeout) -> ct:fail("no message consumed") end. +flush_consumed() -> + receive + {pulsar_message, _} -> flush_consumed() + after 0 -> ok + end. + try_decode_json(Payload) -> case emqx_utils_json:safe_decode(Payload, [return_maps]) of {error, _} -> @@ -1054,31 +1068,44 @@ t_resource_manager_crash_before_producers_started(Config) -> ), ok. -t_cluster(Config) -> - MQTTTopic = ?config(mqtt_topic, Config), - ResourceId = resource_id(Config), - Cluster = cluster(Config), - ClientId = emqx_guid:to_hexstr(emqx_guid:gen()), - QoS = 0, - Payload = emqx_guid:to_hexstr(emqx_guid:gen()), +t_cluster(Config0) -> + ct:timetrap({seconds, 120}), + ?retrying(Config0, 3, fun do_t_cluster/1). + +do_t_cluster(Config) -> ?check_trace( begin + MQTTTopic = ?config(mqtt_topic, Config), + ResourceId = resource_id(Config), + Cluster = cluster(Config), + ClientId = emqx_guid:to_hexstr(emqx_guid:gen()), + QoS = 0, + Payload = emqx_guid:to_hexstr(emqx_guid:gen()), + NumNodes = length(Cluster), + {ok, SRef0} = snabbkaffe:subscribe( + ?match_event(#{?snk_kind := emqx_bridge_app_started}), + NumNodes, + 25_000 + ), Nodes = [N1, N2 | _] = start_cluster(Cluster), %% wait until bridge app supervisor is up; by that point, %% `emqx_config_handler:add_handler' has been called and the node should be %% ready to create bridges. - NumNodes = length(Nodes), - {ok, _} = snabbkaffe:block_until( - ?match_n_events(NumNodes, #{?snk_kind := emqx_bridge_app_started}), - 15_000 - ), - {ok, SRef0} = snabbkaffe:subscribe( + {ok, _} = snabbkaffe:receive_events(SRef0), + {ok, SRef1} = snabbkaffe:subscribe( ?match_event(#{?snk_kind := pulsar_producer_bridge_started}), NumNodes, - 15_000 + 25_000 ), {ok, _} = erpc:call(N1, fun() -> create_bridge(Config) end), - {ok, _} = snabbkaffe:receive_events(SRef0), + {ok, _} = snabbkaffe:receive_events(SRef1), + {ok, _} = snabbkaffe:block_until( + ?match_n_events( + NumNodes, + #{?snk_kind := bridge_post_config_update_done} + ), + 25_000 + ), lists:foreach( fun(N) -> ?retry( @@ -1095,6 +1122,7 @@ t_cluster(Config) -> ), erpc:multicall(Nodes, fun wait_until_producer_connected/0), Message0 = emqx_message:make(ClientId, QoS, MQTTTopic, Payload), + ?tp(publishing_message, #{}), erpc:call(N2, emqx, publish, [Message0]), lists:foreach( @@ -1108,10 +1136,7 @@ t_cluster(Config) -> Nodes ), - ok - end, - fun(_Trace) -> - Data0 = receive_consumed(10_000), + Data0 = receive_consumed(30_000), ?assertMatch( [ #{ @@ -1123,7 +1148,9 @@ t_cluster(Config) -> ], Data0 ), + ok - end + end, + [] ), ok. diff --git a/apps/emqx_bridge_rabbitmq/test/emqx_bridge_rabbitmq_SUITE.erl b/apps/emqx_bridge_rabbitmq/test/emqx_bridge_rabbitmq_SUITE.erl index 45a8693e6..e6a6c03fb 100644 --- a/apps/emqx_bridge_rabbitmq/test/emqx_bridge_rabbitmq_SUITE.erl +++ b/apps/emqx_bridge_rabbitmq/test/emqx_bridge_rabbitmq_SUITE.erl @@ -1,4 +1,4 @@ -%-------------------------------------------------------------------- +%%-------------------------------------------------------------------- %% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. %%-------------------------------------------------------------------- diff --git a/apps/emqx_bridge_rocketmq/src/emqx_bridge_rocketmq_connector.erl b/apps/emqx_bridge_rocketmq/src/emqx_bridge_rocketmq_connector.erl index ce80e0888..dbac88249 100644 --- a/apps/emqx_bridge_rocketmq/src/emqx_bridge_rocketmq_connector.erl +++ b/apps/emqx_bridge_rocketmq/src/emqx_bridge_rocketmq_connector.erl @@ -1,4 +1,4 @@ -%-------------------------------------------------------------------- +%%-------------------------------------------------------------------- %% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. %%-------------------------------------------------------------------- diff --git a/apps/emqx_conf/src/emqx_conf_app.erl b/apps/emqx_conf/src/emqx_conf_app.erl index c09faec89..6a7638cc2 100644 --- a/apps/emqx_conf/src/emqx_conf_app.erl +++ b/apps/emqx_conf/src/emqx_conf_app.erl @@ -261,9 +261,13 @@ sync_data_from_node(Node) -> {ok, DataBin} -> case zip:unzip(DataBin, [{cwd, emqx:data_dir()}]) of {ok, []} -> - ?SLOG(debug, #{node => Node, msg => "sync_data_from_node_ignore"}); + ?SLOG(debug, #{node => Node, msg => "sync_data_from_node_empty_response"}); {ok, Files} -> - ?SLOG(debug, #{node => Node, msg => "sync_data_from_node_ok", files => Files}) + ?SLOG(debug, #{ + node => Node, + msg => "sync_data_from_node_non_empty_response", + files => Files + }) end, ok; Error -> diff --git a/apps/emqx_conf/test/emqx_conf_app_SUITE.erl b/apps/emqx_conf/test/emqx_conf_app_SUITE.erl index 95b4ce697..2a9888451 100644 --- a/apps/emqx_conf/test/emqx_conf_app_SUITE.erl +++ b/apps/emqx_conf/test/emqx_conf_app_SUITE.erl @@ -20,14 +20,17 @@ -compile(nowarn_export_all). -include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). all() -> emqx_common_test_helpers:all(?MODULE). -t_copy_conf_override_on_restarts(_Config) -> +t_copy_conf_override_on_restarts(Config) -> ct:timetrap({seconds, 120}), snabbkaffe:fix_ct_logging(), - Cluster = cluster([cluster_spec({core, 1}), cluster_spec({core, 2}), cluster_spec({core, 3})]), + Cluster = cluster( + [cluster_spec({core, 1}), cluster_spec({core, 2}), cluster_spec({core, 3})], Config + ), %% 1. Start all nodes Nodes = start_cluster(Cluster), @@ -50,16 +53,19 @@ t_copy_conf_override_on_restarts(_Config) -> stop_cluster(Nodes) end. -t_copy_new_data_dir(_Config) -> +t_copy_new_data_dir(Config) -> net_kernel:start(['master1@127.0.0.1', longnames]), ct:timetrap({seconds, 120}), snabbkaffe:fix_ct_logging(), - Cluster = cluster([cluster_spec({core, 4}), cluster_spec({core, 5}), cluster_spec({core, 6})]), + Cluster = cluster( + [cluster_spec({core, 4}), cluster_spec({core, 5}), cluster_spec({core, 6})], Config + ), %% 1. Start all nodes [First | Rest] = Nodes = start_cluster(Cluster), try - File = "/configs/cluster.hocon", + NodeDataDir = erpc:call(First, emqx, data_dir, []), + File = NodeDataDir ++ "/configs/cluster.hocon", assert_config_load_done(Nodes), rpc:call(First, ?MODULE, create_data_dir, [File]), {[ok, ok, ok], []} = rpc:multicall(Nodes, application, stop, [emqx_conf]), @@ -74,16 +80,19 @@ t_copy_new_data_dir(_Config) -> stop_cluster(Nodes) end. -t_copy_deprecated_data_dir(_Config) -> +t_copy_deprecated_data_dir(Config) -> net_kernel:start(['master2@127.0.0.1', longnames]), ct:timetrap({seconds, 120}), snabbkaffe:fix_ct_logging(), - Cluster = cluster([cluster_spec({core, 7}), cluster_spec({core, 8}), cluster_spec({core, 9})]), + Cluster = cluster( + [cluster_spec({core, 7}), cluster_spec({core, 8}), cluster_spec({core, 9})], Config + ), %% 1. Start all nodes [First | Rest] = Nodes = start_cluster(Cluster), try - File = "/configs/cluster-override.conf", + NodeDataDir = erpc:call(First, emqx, data_dir, []), + File = NodeDataDir ++ "/configs/cluster-override.conf", assert_config_load_done(Nodes), rpc:call(First, ?MODULE, create_data_dir, [File]), {[ok, ok, ok], []} = rpc:multicall(Nodes, application, stop, [emqx_conf]), @@ -98,11 +107,13 @@ t_copy_deprecated_data_dir(_Config) -> stop_cluster(Nodes) end. -t_no_copy_from_newer_version_node(_Config) -> +t_no_copy_from_newer_version_node(Config) -> net_kernel:start(['master2@127.0.0.1', longnames]), ct:timetrap({seconds, 120}), snabbkaffe:fix_ct_logging(), - Cluster = cluster([cluster_spec({core, 10}), cluster_spec({core, 11}), cluster_spec({core, 12})]), + Cluster = cluster( + [cluster_spec({core, 10}), cluster_spec({core, 11}), cluster_spec({core, 12})], Config + ), OKs = [ok, ok, ok], [First | Rest] = Nodes = start_cluster(Cluster), try @@ -131,56 +142,60 @@ t_no_copy_from_newer_version_node(_Config) -> %%------------------------------------------------------------------------------ create_data_dir(File) -> - Node = atom_to_list(node()), - ok = filelib:ensure_dir(Node ++ "/certs/"), - ok = filelib:ensure_dir(Node ++ "/authz/"), - ok = filelib:ensure_dir(Node ++ "/configs/"), - ok = file:write_file(Node ++ "/certs/fake-cert", list_to_binary(Node)), - ok = file:write_file(Node ++ "/authz/fake-authz", list_to_binary(Node)), + NodeDataDir = emqx:data_dir(), + ok = filelib:ensure_dir(NodeDataDir ++ "/certs/"), + ok = filelib:ensure_dir(NodeDataDir ++ "/authz/"), + ok = filelib:ensure_dir(NodeDataDir ++ "/configs/"), + ok = file:write_file(NodeDataDir ++ "/certs/fake-cert", list_to_binary(NodeDataDir)), + ok = file:write_file(NodeDataDir ++ "/authz/fake-authz", list_to_binary(NodeDataDir)), Telemetry = <<"telemetry.enable = false">>, - ok = file:write_file(Node ++ File, Telemetry). + ok = file:write_file(File, Telemetry). set_data_dir_env() -> - Node = atom_to_list(node()), + NodeDataDir = emqx:data_dir(), + NodeStr = atom_to_list(node()), %% will create certs and authz dir - ok = filelib:ensure_dir(Node ++ "/configs/"), + ok = filelib:ensure_dir(NodeDataDir ++ "/configs/"), {ok, [ConfigFile]} = application:get_env(emqx, config_files), - NewConfigFile = ConfigFile ++ "." ++ Node, + NewConfigFile = ConfigFile ++ "." ++ NodeStr, + ok = filelib:ensure_dir(NewConfigFile), {ok, _} = file:copy(ConfigFile, NewConfigFile), Bin = iolist_to_binary(io_lib:format("node.config_files = [~p]~n", [NewConfigFile])), ok = file:write_file(NewConfigFile, Bin, [append]), - DataDir = iolist_to_binary(io_lib:format("node.data_dir = ~p~n", [Node])), + DataDir = iolist_to_binary(io_lib:format("node.data_dir = ~p~n", [NodeDataDir])), ok = file:write_file(NewConfigFile, DataDir, [append]), application:set_env(emqx, config_files, [NewConfigFile]), - application:set_env(emqx, data_dir, Node), + %% application:set_env(emqx, data_dir, Node), %% We set env both cluster.hocon and cluster-override.conf, but only one will be used - application:set_env(emqx, cluster_hocon_file, Node ++ "/configs/cluster.hocon"), - application:set_env(emqx, cluster_override_conf_file, Node ++ "/configs/cluster-override.conf"), + application:set_env(emqx, cluster_hocon_file, NodeDataDir ++ "/configs/cluster.hocon"), + application:set_env( + emqx, cluster_override_conf_file, NodeDataDir ++ "/configs/cluster-override.conf" + ), ok. -assert_data_copy_done([First0 | Rest], File) -> - First = atom_to_list(First0), - {ok, FakeCertFile} = file:read_file(First ++ "/certs/fake-cert"), - {ok, FakeAuthzFile} = file:read_file(First ++ "/authz/fake-authz"), - {ok, FakeOverrideFile} = file:read_file(First ++ File), +assert_data_copy_done([_First | Rest], File) -> + FirstDataDir = filename:dirname(filename:dirname(File)), + {ok, FakeCertFile} = file:read_file(FirstDataDir ++ "/certs/fake-cert"), + {ok, FakeAuthzFile} = file:read_file(FirstDataDir ++ "/authz/fake-authz"), + {ok, FakeOverrideFile} = file:read_file(File), {ok, ExpectFake} = hocon:binary(FakeOverrideFile), lists:foreach( fun(Node0) -> - Node = atom_to_list(Node0), + NodeDataDir = erpc:call(Node0, emqx, data_dir, []), ?assertEqual( {ok, FakeCertFile}, - file:read_file(Node ++ "/certs/fake-cert"), - #{node => Node} + file:read_file(NodeDataDir ++ "/certs/fake-cert"), + #{node => Node0} ), ?assertEqual( {ok, ExpectFake}, - hocon:files([Node ++ File]), - #{node => Node} + hocon:files([File]), + #{node => Node0} ), ?assertEqual( {ok, FakeAuthzFile}, - file:read_file(Node ++ "/authz/fake-authz"), - #{node => Node} + file:read_file(NodeDataDir ++ "/authz/fake-authz"), + #{node => Node0} ) end, Rest @@ -207,7 +222,7 @@ assert_config_load_done(Nodes) -> ). stop_cluster(Nodes) -> - [emqx_common_test_helpers:stop_slave(Node) || Node <- Nodes]. + emqx_utils:pmap(fun emqx_common_test_helpers:stop_slave/1, Nodes). start_cluster(Specs) -> [emqx_common_test_helpers:start_slave(Name, Opts) || {Name, Opts} <- Specs]. @@ -222,7 +237,8 @@ start_cluster_async(Specs) -> || {Name, Opts} <- Specs ]. -cluster(Specs) -> +cluster(Specs, Config) -> + PrivDataDir = ?config(priv_dir, Config), Env = [ {emqx, init_config_load_done, false}, {emqx, boot_modules, []} @@ -232,6 +248,7 @@ cluster(Specs) -> {apps, [emqx_conf]}, {load_schema, false}, {join_to, true}, + {priv_data_dir, PrivDataDir}, {env_handler, fun (emqx) -> application:set_env(emqx, boot_modules, []), diff --git a/apps/emqx_dashboard/src/emqx_dashboard.erl b/apps/emqx_dashboard/src/emqx_dashboard.erl index ca995990d..a69a1fea2 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard.erl @@ -72,8 +72,7 @@ start_listeners(Listeners) -> {"/", cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}}, {"/static/[...]", cowboy_static, {priv_dir, emqx_dashboard, "www/static"}}, {emqx_mgmt_api_status:path(), emqx_mgmt_api_status, []}, - {emqx_dashboard_swagger:relative_uri("/[...]"), emqx_dashboard_bad_api, []}, - {'_', cowboy_static, {priv_file, emqx_dashboard, "www/index.html"}} + {'_', emqx_dashboard_not_found, []} ], BaseMinirest = #{ base_path => emqx_dashboard_swagger:base_path(), diff --git a/apps/emqx_dashboard/src/emqx_dashboard_admin.erl b/apps/emqx_dashboard/src/emqx_dashboard_admin.erl index aaa43d621..e8f95d609 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_admin.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_admin.erl @@ -24,6 +24,8 @@ -boot_mnesia({mnesia, [boot]}). +-behaviour(emqx_db_backup). + %% Mnesia bootstrap -export([mnesia/1]). @@ -54,6 +56,8 @@ default_username/0 ]). +-export([backup_tables/0]). + -type emqx_admin() :: #?ADMIN{}. -define(BOOTSTRAP_USER_TAG, <<"bootstrap user">>). @@ -76,6 +80,12 @@ mnesia(boot) -> ]} ]). +%%-------------------------------------------------------------------- +%% Data backup +%%-------------------------------------------------------------------- + +backup_tables() -> [?ADMIN]. + %%-------------------------------------------------------------------- %% bootstrap API %%-------------------------------------------------------------------- diff --git a/apps/emqx_dashboard/src/emqx_dashboard_error_code_api.erl b/apps/emqx_dashboard/src/emqx_dashboard_error_code_api.erl index 47ba02ebf..b0e24527f 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_error_code_api.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_error_code_api.erl @@ -63,7 +63,7 @@ schema("/error_codes/:code") -> 'operationId' => error_code, get => #{ security => [], - description => ?DESC(error_codes_u), + description => ?DESC(error_codes), tags => [<<"Error Codes">>], parameters => [ {code, diff --git a/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl b/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl index e80b2da31..a152531f1 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl @@ -65,7 +65,7 @@ schema("/monitor_current") -> 'operationId' => monitor_current, get => #{ tags => [<<"Metrics">>], - description => ?DESC(current_status), + description => ?DESC(current_stats), responses => #{ 200 => hoconsc:mk(hoconsc:ref(sampler_current), #{}) } @@ -76,7 +76,7 @@ schema("/monitor_current/nodes/:node") -> 'operationId' => monitor_current, get => #{ tags => [<<"Metrics">>], - description => ?DESC(current_status_node), + description => ?DESC(current_stats_node), parameters => [parameter_node()], responses => #{ 200 => hoconsc:mk(hoconsc:ref(sampler_current), #{}), diff --git a/apps/emqx_dashboard/src/emqx_dashboard_bad_api.erl b/apps/emqx_dashboard/src/emqx_dashboard_not_found.erl similarity index 63% rename from apps/emqx_dashboard/src/emqx_dashboard_bad_api.erl rename to apps/emqx_dashboard/src/emqx_dashboard_not_found.erl index 6d65ac081..20fe81483 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_bad_api.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_not_found.erl @@ -14,7 +14,7 @@ %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_dashboard_bad_api). +-module(emqx_dashboard_not_found). -include_lib("emqx/include/logger.hrl"). @@ -23,10 +23,22 @@ init(Req0, State) -> RedactedReq = emqx_utils:redact(Req0), ?SLOG(warning, #{msg => "unexpected_api_access", request => RedactedReq}), + CT = ct(cowboy_req:header(<<"accept">>, Req0, <<"text/html">>)), Req = cowboy_req:reply( 404, - #{<<"content-type">> => <<"application/json">>}, - <<"{\"code\": \"API_NOT_EXIST\", \"message\": \"Request Path Not Found\"}">>, + #{<<"content-type">> => CT}, + ct_body(CT), RedactedReq ), {ok, Req, State}. + +ct(<<"text/plain", _/binary>>) -> <<"text/plain">>; +ct(<<"application/json", _/binary>>) -> <<"application/json">>; +ct(_AnyOther) -> <<"text/html">>. + +ct_body(<<"text/html">>) -> + <<"404 - NOT FOUND

404 - NOT FOUND

">>; +ct_body(<<"text/plain">>) -> + <<"404 - NOT FOUND">>; +ct_body(<<"application/json">>) -> + <<"{\"code\": \"NOT_FOUND\", \"message\": \"Request Path Not Found\"}">>. diff --git a/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl b/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl index ae0d4171b..71e559647 100644 --- a/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl @@ -99,9 +99,7 @@ t_monitor_current_api_live_connections(_) -> ok = emqtt:disconnect(C), {ok, C1} = emqtt:start_link([{clean_start, true}, {clientid, ClientId1}]), {ok, _} = emqtt:connect(C1), - %% waiting for emqx_stats ticker - timer:sleep(1500), - _ = emqx_dashboard_monitor:current_rate(), + ok = waiting_emqx_stats_and_monitor_update('live_connections.max'), {ok, Rate} = request(["monitor_current"]), ?assertEqual(1, maps:get(<<"live_connections">>, Rate)), ?assertEqual(2, maps:get(<<"connections">>, Rate)), @@ -181,3 +179,24 @@ wait_new_monitor(OldMonitor, Count) -> timer:sleep(100), wait_new_monitor(OldMonitor, Count - 1) end. + +waiting_emqx_stats_and_monitor_update(WaitKey) -> + Self = self(), + meck:new(emqx_stats, [passthrough]), + meck:expect( + emqx_stats, + setstat, + fun(Stat, MaxStat, Val) -> + (Stat =:= WaitKey orelse MaxStat =:= WaitKey) andalso (Self ! updated), + meck:passthrough([Stat, MaxStat, Val]) + end + ), + receive + updated -> ok + after 5000 -> + error(waiting_emqx_stats_update_timeout) + end, + meck:unload([emqx_stats]), + %% manually call monitor update + _ = emqx_dashboard_monitor:current_rate(), + ok. diff --git a/apps/emqx_dashboard/test/emqx_dashboard_bad_api_SUITE.erl b/apps/emqx_dashboard/test/emqx_dashboard_not_found_SUITE.erl similarity index 93% rename from apps/emqx_dashboard/test/emqx_dashboard_bad_api_SUITE.erl rename to apps/emqx_dashboard/test/emqx_dashboard_not_found_SUITE.erl index 92327a7db..3a8e23c21 100644 --- a/apps/emqx_dashboard/test/emqx_dashboard_bad_api_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_dashboard_not_found_SUITE.erl @@ -14,7 +14,7 @@ %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_dashboard_bad_api_SUITE). +-module(emqx_dashboard_not_found_SUITE). -compile(nowarn_export_all). -compile(export_all). @@ -23,7 +23,7 @@ -include_lib("eunit/include/eunit.hrl"). --define(SERVER, "http://127.0.0.1:18083/api/v5"). +-define(SERVER, "http://127.0.0.1:18083/"). -import(emqx_mgmt_api_test_util, [request/2]). diff --git a/apps/emqx_exhook/src/emqx_exhook_mgr.erl b/apps/emqx_exhook/src/emqx_exhook_mgr.erl index 80a508f62..6ff5350e2 100644 --- a/apps/emqx_exhook/src/emqx_exhook_mgr.erl +++ b/apps/emqx_exhook/src/emqx_exhook_mgr.erl @@ -18,6 +18,7 @@ -module(emqx_exhook_mgr). -behaviour(gen_server). +-behaviour(emqx_config_backup). -include("emqx_exhook.hrl"). -include_lib("emqx/include/logger.hrl"). @@ -66,6 +67,11 @@ -export([roots/0]). +%% Data backup +-export([ + import_config/1 +]). + %% Running servers -type state() :: #{servers := servers()}. @@ -98,9 +104,9 @@ -export_type([servers/0, server/0]). -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- %% APIs -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- -spec start_link() -> ignore @@ -137,7 +143,7 @@ call(Req) -> init_ref_counter_table() -> _ = ets:new(?HOOKS_REF_COUNTER, [named_table, public]). -%%===================================================================== +%%======================================================================================== %% Hocon schema roots() -> emqx_exhook_schema:server_config(). @@ -179,9 +185,30 @@ post_config_update(_KeyPath, UpdateReq, NewConf, OldConf, _AppEnvs) -> Result = call({update_config, UpdateReq, NewConf, OldConf}), {ok, Result}. -%%-------------------------------------------------------------------- +%%======================================================================================== + +%%---------------------------------------------------------------------------------------- +%% Data backup +%%---------------------------------------------------------------------------------------- + +import_config(#{<<"exhook">> := #{<<"servers">> := Servers} = ExHook}) -> + OldServers = emqx:get_raw_config(?SERVERS, []), + KeyFun = fun(#{<<"name">> := Name}) -> Name end, + ExHook1 = ExHook#{<<"servers">> => emqx_utils:merge_lists(OldServers, Servers, KeyFun)}, + case emqx_conf:update(?EXHOOK, ExHook1, #{override_to => cluster}) of + {ok, #{raw_config := #{<<"servers">> := NewRawServers}}} -> + Changed = maps:get(changed, emqx_utils:diff_lists(NewRawServers, OldServers, KeyFun)), + ChangedPaths = [?SERVERS ++ [Name] || {#{<<"name">> := Name}, _} <- Changed], + {ok, #{root_key => ?EXHOOK, changed => ChangedPaths}}; + Error -> + {error, #{root_key => ?EXHOOK, reason => Error}} + end; +import_config(_RawConf) -> + {ok, #{root_key => ?EXHOOK, changed => []}}. + +%%---------------------------------------------------------------------------------------- %% gen_server callbacks -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- init([]) -> process_flag(trap_exit, true), @@ -333,9 +360,9 @@ terminate(Reason, State = #{servers := Servers}) -> code_change(_OldVsn, State, _Extra) -> {ok, State}. -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- %% Internal funcs -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- unload_exhooks() -> [ @@ -572,7 +599,7 @@ update_servers(Servers, State) -> set_disable(Server) -> Server#{status := disabled, timer := undefined}. -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- %% Server state persistent save(Name, ServerState) -> Saved = persistent_term:get(?APP, []), diff --git a/apps/emqx_ft/src/emqx_ft_schema.erl b/apps/emqx_ft/src/emqx_ft_schema.erl index 90b80ea3d..dd21e9524 100644 --- a/apps/emqx_ft/src/emqx_ft_schema.erl +++ b/apps/emqx_ft/src/emqx_ft_schema.erl @@ -76,6 +76,7 @@ fields(file_transfer) -> #{ desc => ?DESC("init_timeout"), required => false, + importance => ?IMPORTANCE_LOW, default => "10s" } )}, @@ -85,6 +86,7 @@ fields(file_transfer) -> #{ desc => ?DESC("store_segment_timeout"), required => false, + importance => ?IMPORTANCE_LOW, default => "5m" } )}, @@ -94,6 +96,7 @@ fields(file_transfer) -> #{ desc => ?DESC("assemble_timeout"), required => false, + importance => ?IMPORTANCE_LOW, default => "5m" } )}, diff --git a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE.erl b/apps/emqx_ft/test/emqx_ft_fs_util_SUITE.erl index e4aa70f81..0f6b8a5ae 100644 --- a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE.erl +++ b/apps/emqx_ft/test/emqx_ft_fs_util_SUITE.erl @@ -26,6 +26,51 @@ all() -> emqx_common_test_helpers:all(?MODULE). +%%├── a +%%│ ├── b +%%│ │ └── foo +%%│ │ ├── 42 +%%│ │ └── Я +%%│ └── link -> ../c +%%├── c +%%│ ├── bar +%%│ │ └── 中文 +%%│ └── link -> ../a +%%└── d +%% ├── e +%% │ └── baz +%% │ └── needle +%% └── haystack + +init_per_suite(Config) -> + Root = ?config(data_dir, Config), + A = filename:join([Root, "a", "b", "foo"]), + C = filename:join([Root, "c", "bar"]), + D = filename:join([Root, "d", "e", "baz"]), + + F42 = filename:join([A, "42"]), + F42_1 = filename:join([A, "Я"]), + FBar = filename:join([C, "中文"]), + FNeedle = filename:join([D, "needle"]), + FHayStack = filename:join([Root, "d", "haystack"]), + Files = [F42, F42_1, FBar, FNeedle, FHayStack], + lists:foreach(fun filelib:ensure_dir/1, Files), + %% create files + lists:foreach(fun(File) -> file:write_file(File, <<"">>, [write]) end, Files), + %% create links + ALink = filename:join([Root, "a", "link"]), + CLink = filename:join([Root, "c", "link"]), + make_symlink("../c", ALink), + make_symlink("../a", CLink), + Config. + +end_per_suite(Config) -> + Root = ?config(data_dir, Config), + ok = file:del_dir_r(filename:join([Root, "a"])), + ok = file:del_dir_r(filename:join([Root, "c"])), + ok = file:del_dir_r(filename:join([Root, "d"])), + ok. + t_fold_single_level(Config) -> Root = ?config(data_dir, Config), ?assertMatch( @@ -248,3 +293,7 @@ cons(Entry, Acc) -> sort(L) when is_list(L) -> lists:sort(L). + +make_symlink(FileOrDir, NewLink) -> + _ = file:delete(NewLink), + ok = file:make_symlink(FileOrDir, NewLink). diff --git a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/a/b/foo/42 b/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/a/b/foo/42 deleted file mode 100644 index e69de29bb..000000000 diff --git a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/a/b/foo/Я b/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/a/b/foo/Я deleted file mode 100644 index ac31ffd53..000000000 --- a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/a/b/foo/Я +++ /dev/null @@ -1 +0,0 @@ -Ты diff --git a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/a/link b/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/a/link deleted file mode 120000 index 1b271d838..000000000 --- a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/a/link +++ /dev/null @@ -1 +0,0 @@ -../c \ No newline at end of file diff --git a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/c/bar/中文 b/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/c/bar/中文 deleted file mode 100644 index 2e11eb72f..000000000 --- a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/c/bar/中文 +++ /dev/null @@ -1 +0,0 @@ -Zhōngwén diff --git a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/c/link b/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/c/link deleted file mode 120000 index 82f488f26..000000000 --- a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/c/link +++ /dev/null @@ -1 +0,0 @@ -../a \ No newline at end of file diff --git a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/d/e/baz/needle b/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/d/e/baz/needle deleted file mode 100644 index d755762d1..000000000 --- a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/d/e/baz/needle +++ /dev/null @@ -1 +0,0 @@ -haystack diff --git a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/d/haystack b/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/d/haystack deleted file mode 100644 index a6b681bf4..000000000 --- a/apps/emqx_ft/test/emqx_ft_fs_util_SUITE_data/d/haystack +++ /dev/null @@ -1 +0,0 @@ -needle diff --git a/apps/emqx_gateway/src/emqx_gateway_api_listeners.erl b/apps/emqx_gateway/src/emqx_gateway_api_listeners.erl index 7c0b8c4bf..e62923bc2 100644 --- a/apps/emqx_gateway/src/emqx_gateway_api_listeners.erl +++ b/apps/emqx_gateway/src/emqx_gateway_api_listeners.erl @@ -304,8 +304,7 @@ do_listeners_cluster_status(Listeners) -> status => #{ running => Running, current_connections => Curr, - %% XXX: Since it is taken from raw-conf, it is possible a string - max_connections => int(Max) + max_connections => ensure_integer_or_infinity(Max) } } } @@ -314,10 +313,15 @@ do_listeners_cluster_status(Listeners) -> Listeners ). -int(B) when is_binary(B) -> +ensure_integer_or_infinity(infinity) -> + infinity; +ensure_integer_or_infinity(<<"infinity">>) -> + infinity; +ensure_integer_or_infinity(B) when is_binary(B) -> binary_to_integer(B); -int(I) when is_integer(I) -> +ensure_integer_or_infinity(I) when is_integer(I) -> I. + aggregate_listener_status(NodeStatus) -> aggregate_listener_status(NodeStatus, 0, 0, undefined). @@ -330,8 +334,9 @@ aggregate_listener_status( CurrAcc, RunningAcc ) -> + NMaxAcc = emqx_gateway_utils:plus_max_connections(MaxAcc, Max), NRunning = aggregate_running(Running, RunningAcc), - aggregate_listener_status(T, MaxAcc + Max, Current + CurrAcc, NRunning); + aggregate_listener_status(T, NMaxAcc, Current + CurrAcc, NRunning); aggregate_listener_status([], MaxAcc, CurrAcc, RunningAcc) -> {MaxAcc, CurrAcc, RunningAcc}. diff --git a/apps/emqx_gateway/src/emqx_gateway_cm.erl b/apps/emqx_gateway/src/emqx_gateway_cm.erl index 814e37163..4c07d3938 100644 --- a/apps/emqx_gateway/src/emqx_gateway_cm.erl +++ b/apps/emqx_gateway/src/emqx_gateway_cm.erl @@ -389,7 +389,7 @@ open_session( end, case takeover_session(GwName, ClientId) of {ok, ConnMod, ChanPid, Session} -> - ok = emqx_session:resume(ClientInfo, Session), + ok = SessionMod:resume(ClientInfo, Session), case request_stepdown({takeover, 'end'}, ConnMod, ChanPid) of {ok, Pendings} -> register_channel( diff --git a/apps/emqx_gateway/src/emqx_gateway_conf.erl b/apps/emqx_gateway/src/emqx_gateway_conf.erl index ed7f6cf9a..2a64a6914 100644 --- a/apps/emqx_gateway/src/emqx_gateway_conf.erl +++ b/apps/emqx_gateway/src/emqx_gateway_conf.erl @@ -18,6 +18,7 @@ -module(emqx_gateway_conf). -behaviour(emqx_config_handler). +-behaviour(emqx_config_backup). %% Load/Unload -export([ @@ -64,6 +65,11 @@ post_config_update/5 ]). +%% Data backup +-export([ + import_config/1 +]). + -include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/emqx_authentication.hrl"). -define(AUTHN_BIN, ?EMQX_AUTHENTICATION_CONFIG_ROOT_NAME_BINARY). @@ -76,9 +82,9 @@ -define(IS_SSL(T), (T == <<"ssl_options">> orelse T == <<"dtls_options">>)). -define(IGNORE_KEYS, [<<"listeners">>, ?AUTHN_BIN]). -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- %% Load/Unload -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- -define(GATEWAY, [gateway]). -spec load() -> ok. @@ -89,7 +95,7 @@ load() -> unload() -> emqx_conf:remove_handler(?GATEWAY). -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- %% APIs -spec load_gateway(atom_or_bin(), map()) -> map_or_err(). @@ -365,9 +371,26 @@ ret_listener_or_err(GwName, {LType, LName}, {ok, #{raw_config := GwConf}}) -> ret_listener_or_err(_, _, Err) -> Err. -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- +%% Data backup +%%---------------------------------------------------------------------------------------- + +import_config(RawConf) -> + GatewayConf = maps:get(<<"gateway">>, RawConf, #{}), + OldGatewayConf = emqx:get_raw_config([<<"gateway">>], #{}), + MergedConf = maps:merge(OldGatewayConf, GatewayConf), + case emqx_conf:update([gateway], MergedConf, #{override_to => cluster}) of + {ok, #{raw_config := NewRawConf}} -> + Changed = maps:get(changed, emqx_utils_maps:diff_maps(NewRawConf, OldGatewayConf)), + ChangedPaths = [[gateway, GwName] || GwName <- maps:keys(Changed)], + {ok, #{root_key => gateway, changed => ChangedPaths}}; + Error -> + {error, #{root_key => gateway, reason => Error}} + end. + +%%---------------------------------------------------------------------------------------- %% Config Handler -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- -spec pre_config_update( list(atom()), @@ -793,9 +816,9 @@ post_config_update(?GATEWAY, _Req = #{}, NewConfig, OldConfig, _AppEnvs) -> ), ok. -%%-------------------------------------------------------------------- -%% Internal functions -%%-------------------------------------------------------------------- +%%---------------------------------------------------------------------------------------- +%% Internal funcs +%%---------------------------------------------------------------------------------------- tune_gw_certs(Fun, GwName, Conf) -> apply_to_gateway_basic_confs( diff --git a/apps/emqx_gateway/src/emqx_gateway_http.erl b/apps/emqx_gateway/src/emqx_gateway_http.erl index 7aaaee9cb..58c201c75 100644 --- a/apps/emqx_gateway/src/emqx_gateway_http.erl +++ b/apps/emqx_gateway/src/emqx_gateway_http.erl @@ -161,7 +161,10 @@ max_connections_count(Config) -> Listeners = emqx_gateway_utils:normalize_config(Config), lists:foldl( fun({_, _, _, SocketOpts, _}, Acc) -> - Acc + proplists:get_value(max_connections, SocketOpts, 0) + emqx_gateway_utils:plus_max_connections( + Acc, + proplists:get_value(max_connections, SocketOpts, 0) + ) end, 0, Listeners @@ -588,10 +591,12 @@ sum_cluster_connections(List) -> %%-------------------------------------------------------------------- %% Internal funcs + sum_cluster_connections( [#{max_connections := Max, current_connections := Current} | T], MaxAcc, CurrAcc ) -> - sum_cluster_connections(T, MaxAcc + Max, Current + CurrAcc); + NMaxAcc = emqx_gateway_utils:plus_max_connections(MaxAcc, Max), + sum_cluster_connections(T, NMaxAcc, Current + CurrAcc); sum_cluster_connections([_ | T], MaxAcc, CurrAcc) -> sum_cluster_connections(T, MaxAcc, CurrAcc); sum_cluster_connections([], MaxAcc, CurrAcc) -> diff --git a/apps/emqx_gateway/src/emqx_gateway_schema.erl b/apps/emqx_gateway/src/emqx_gateway_schema.erl index 3c5706e82..845229425 100644 --- a/apps/emqx_gateway/src/emqx_gateway_schema.erl +++ b/apps/emqx_gateway/src/emqx_gateway_schema.erl @@ -266,7 +266,7 @@ common_listener_opts() -> )}, {max_connections, sc( - integer(), + hoconsc:union([pos_integer(), infinity]), #{ default => 1024, desc => ?DESC(gateway_common_listener_max_connections) diff --git a/apps/emqx_gateway/src/emqx_gateway_utils.erl b/apps/emqx_gateway/src/emqx_gateway_utils.erl index d41b3c93b..b91e48354 100644 --- a/apps/emqx_gateway/src/emqx_gateway_utils.erl +++ b/apps/emqx_gateway/src/emqx_gateway_utils.erl @@ -45,7 +45,8 @@ is_running/2, global_chain/1, listener_chain/3, - find_gateway_definitions/0 + find_gateway_definitions/0, + plus_max_connections/2 ]). -export([stringfy/1]). @@ -607,3 +608,12 @@ ignore_lib_apps(Apps) -> wx ], [AppName || {AppName, _, _} <- Apps, not lists:member(AppName, LibApps)]. + +-spec plus_max_connections(non_neg_integer() | infinity, non_neg_integer() | infinity) -> + pos_integer() | infinity. +plus_max_connections(_, infinity) -> + infinity; +plus_max_connections(infinity, _) -> + infinity; +plus_max_connections(A, B) when is_integer(A) andalso is_integer(B) -> + A + B. diff --git a/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl b/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl index b2e5861af..f1cfd26d0 100644 --- a/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl +++ b/apps/emqx_gateway/test/emqx_gateway_api_SUITE.erl @@ -411,6 +411,45 @@ t_listeners_tcp(_) -> {404, _} = request(get, "/gateways/stomp/listeners/stomp:tcp:def"), ok. +t_listeners_max_conns(_) -> + {204, _} = request(put, "/gateways/stomp", #{}), + {404, _} = request(get, "/gateways/stomp/listeners"), + LisConf = #{ + name => <<"def">>, + type => <<"tcp">>, + bind => <<"127.0.0.1:61613">>, + max_connections => 1024 + }, + {201, _} = request(post, "/gateways/stomp/listeners", LisConf), + {200, ConfResp} = request(get, "/gateways/stomp/listeners"), + assert_confs([LisConf], ConfResp), + {200, ConfResp1} = request(get, "/gateways/stomp/listeners/stomp:tcp:def"), + assert_confs(LisConf, ConfResp1), + + LisConf2 = maps:merge(LisConf, #{max_connections => <<"infinity">>}), + {200, _} = request( + put, + "/gateways/stomp/listeners/stomp:tcp:def", + LisConf2 + ), + + {200, ConfResp2} = request(get, "/gateways/stomp/listeners/stomp:tcp:def"), + assert_confs(LisConf2, ConfResp2), + + {200, [Listeners]} = request(get, "/gateways/stomp/listeners"), + ?assertMatch(#{max_connections := <<"infinity">>}, Listeners), + + {200, Gateways} = request(get, "/gateways"), + [StompGwOverview] = lists:filter( + fun(Gw) -> maps:get(name, Gw) =:= <<"stomp">> end, + Gateways + ), + ?assertMatch(#{max_connections := <<"infinity">>}, StompGwOverview), + + {204, _} = request(delete, "/gateways/stomp/listeners/stomp:tcp:def"), + {404, _} = request(get, "/gateways/stomp/listeners/stomp:tcp:def"), + ok. + t_listeners_authn(_) -> GwConf = #{ name => <<"stomp">>, diff --git a/apps/emqx_gateway_coap/src/emqx_coap_channel.erl b/apps/emqx_gateway_coap/src/emqx_coap_channel.erl index 7ef008954..21066655e 100644 --- a/apps/emqx_gateway_coap/src/emqx_coap_channel.erl +++ b/apps/emqx_gateway_coap/src/emqx_coap_channel.erl @@ -381,6 +381,8 @@ ensure_keepalive_timer(Fun, #channel{keepalive = KeepAlive} = Channel) -> Heartbeat = emqx_keepalive:info(interval, KeepAlive), Fun(keepalive, Heartbeat, keepalive, Channel). +check_auth_state(Msg, #channel{connection_required = false} = Channel) -> + call_session(handle_request, Msg, Channel); check_auth_state(Msg, #channel{connection_required = true} = Channel) -> case is_create_connection_request(Msg) of true -> diff --git a/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl b/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl index 9b6f7ce1f..999493a79 100644 --- a/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl +++ b/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl @@ -58,14 +58,14 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> application:load(emqx_gateway_coap), ok = emqx_common_test_helpers:load_config(emqx_gateway_schema, ?CONF_DEFAULT), - emqx_mgmt_api_test_util:init_suite([emqx_authn, emqx_gateway]), + emqx_mgmt_api_test_util:init_suite([emqx_conf, emqx_authn, emqx_gateway]), ok = meck:new(emqx_access_control, [passthrough, no_history, no_link]), Config. end_per_suite(_) -> meck:unload(emqx_access_control), {ok, _} = emqx:remove_config([<<"gateway">>, <<"coap">>]), - emqx_mgmt_api_test_util:end_suite([emqx_gateway, emqx_authn]). + emqx_mgmt_api_test_util:end_suite([emqx_gateway, emqx_authn, emqx_conf]). init_per_testcase(t_connection_with_authn_failed, Config) -> ok = meck:expect( @@ -92,6 +92,13 @@ mqtt_prefix() -> ps_prefix() -> ?PS_PREFIX. +restart_coap_with_connection_mode(Bool) -> + Conf = emqx:get_raw_config([gateway, coap]), + emqx_gateway_conf:update_gateway( + coap, + Conf#{<<"connection_required">> => atom_to_binary(Bool)} + ). + %%-------------------------------------------------------------------- %% Test Cases %%-------------------------------------------------------------------- @@ -373,6 +380,35 @@ t_on_offline_event(_) -> end, do(Fun). +t_connectionless_pubsub(_) -> + restart_coap_with_connection_mode(false), + Fun = fun(Channel) -> + Topic = <<"t/a">>, + Payload = <<"123">>, + URI = pubsub_uri(binary_to_list(Topic)), + Req = make_req(get, Payload, [{observe, 0}]), + {ok, content, _} = do_request(Channel, URI, Req), + ?LOGT("observer topic:~ts~n", [Topic]), + + %% ensure subscribe succeed + timer:sleep(100), + [SubPid] = emqx:subscribers(Topic), + ?assert(is_pid(SubPid)), + + %% publish a message + Req2 = make_req(post, Payload), + {ok, changed, _} = do_request(Channel, URI, Req2), + + {ok, content, Notify} = with_response(Channel), + ?LOGT("observer get Notif=~p", [Notify]), + + #coap_content{payload = PayloadRecv} = Notify, + + ?assertEqual(Payload, PayloadRecv) + end, + do(Fun), + restart_coap_with_connection_mode(true). + %%-------------------------------------------------------------------- %% helpers @@ -402,6 +438,9 @@ observe(Channel, Token, false) -> {ok, nocontent, _Data} = do_request(Channel, URI, Req), ok. +pubsub_uri(Topic) when is_list(Topic) -> + ?PS_PREFIX ++ "/" ++ Topic. + pubsub_uri(Topic, Token) when is_list(Topic), is_list(Token) -> ?PS_PREFIX ++ "/" ++ Topic ++ "?clientid=client1&token=" ++ Token. diff --git a/apps/emqx_gateway_mqttsn/include/emqx_mqttsn.hrl b/apps/emqx_gateway_mqttsn/include/emqx_mqttsn.hrl index 5ab2d4a05..2b63e8c12 100644 --- a/apps/emqx_gateway_mqttsn/include/emqx_mqttsn.hrl +++ b/apps/emqx_gateway_mqttsn/include/emqx_mqttsn.hrl @@ -14,6 +14,8 @@ %% limitations under the License. %%-------------------------------------------------------------------- +-define(SN_MAX_PREDEF_TOPIC_ID, 1024). + %%-------------------------------------------------------------------- %% MQTT-SN Types %%-------------------------------------------------------------------- diff --git a/apps/emqx_gateway_mqttsn/src/emqx_gateway_mqttsn.erl b/apps/emqx_gateway_mqttsn/src/emqx_gateway_mqttsn.erl index 167ee465c..23f32497b 100644 --- a/apps/emqx_gateway_mqttsn/src/emqx_gateway_mqttsn.erl +++ b/apps/emqx_gateway_mqttsn/src/emqx_gateway_mqttsn.erl @@ -56,8 +56,7 @@ on_gateway_load( }, Ctx ) -> - %% We Also need to start `emqx_mqttsn_broadcast` & - %% `emqx_mqttsn_registry` process + %% We Also need to start `emqx_mqttsn_broadcast` case maps:get(broadcast, Config, false) of false -> ok; @@ -70,12 +69,9 @@ on_gateway_load( end, PredefTopics = maps:get(predefined, Config, []), - {ok, RegistrySvr} = emqx_mqttsn_registry:start_link(GwName, PredefTopics), + ok = emqx_mqttsn_registry:persist_predefined_topics(PredefTopics), - NConfig = maps:without( - [broadcast, predefined], - Config#{registry => emqx_mqttsn_registry:lookup_name(RegistrySvr)} - ), + NConfig = maps:without([broadcast, predefined], Config), Listeners = emqx_gateway_utils:normalize_config(NConfig), @@ -125,5 +121,7 @@ on_gateway_unload( }, _GwState ) -> + PredefTopics = maps:get(predefined, Config, []), + ok = emqx_mqttsn_registry:clear_predefined_topics(PredefTopics), Listeners = normalize_config(Config), stop_listeners(GwName, Listeners). diff --git a/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_channel.erl b/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_channel.erl index 71a891850..720c288d3 100644 --- a/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_channel.erl +++ b/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_channel.erl @@ -23,6 +23,7 @@ -include_lib("emqx/include/types.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl"). -include_lib("emqx/include/logger.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). %% API -export([ @@ -50,20 +51,16 @@ -record(channel, { %% Context ctx :: emqx_gateway_ctx:context(), - %% Registry - registry :: emqx_mqttsn_registry:registry(), %% Gateway Id gateway_id :: integer(), - %% Enable QoS3 - - %% XXX: Get confs from ctx ? - enable_qos3 :: boolean(), + %% Enable negative_qos + enable_negative_qos :: boolean(), %% MQTT-SN Connection Info conninfo :: emqx_types:conninfo(), %% MQTT-SN Client Info clientinfo :: emqx_types:clientinfo(), %% Session - session :: emqx_session:session() | undefined, + session :: emqx_mqttsn_session:session() | undefined, %% Keepalive keepalive :: emqx_keepalive:keepalive() | undefined, %% Will Msg @@ -148,9 +145,8 @@ init( ) -> Peercert = maps:get(peercert, ConnInfo, undefined), Mountpoint = maps:get(mountpoint, Option, undefined), - Registry = maps:get(registry, Option), GwId = maps:get(gateway_id, Option), - EnableQoS3 = maps:get(enable_qos3, Option, true), + EnableNegQoS = maps:get(enable_qos3, Option, true), ListenerId = case maps:get(listener, Option, undefined) of undefined -> undefined; @@ -181,9 +177,8 @@ init( ), #channel{ ctx = Ctx, - registry = Registry, gateway_id = GwId, - enable_qos3 = EnableQoS3, + enable_negative_qos = EnableNegQoS, conninfo = ConnInfo, clientinfo = ClientInfo, clientinfo_override = Override, @@ -218,7 +213,7 @@ info(conn_state, #channel{conn_state = ConnState}) -> info(clientinfo, #channel{clientinfo = ClientInfo}) -> ClientInfo; info(session, #channel{session = Session}) -> - emqx_utils:maybe_apply(fun emqx_session:info/1, Session); + emqx_utils:maybe_apply(fun emqx_mqttsn_session:info/1, Session); info(will_msg, #channel{will_msg = WillMsg}) -> WillMsg; info(clientid, #channel{clientinfo = #{clientid := ClientId}}) -> @@ -230,7 +225,7 @@ info(ctx, #channel{ctx = Ctx}) -> stats(#channel{session = undefined}) -> []; stats(#channel{session = Session}) -> - emqx_session:stats(Session). + emqx_mqttsn_session:stats(Session). set_conn_state(ConnState, Channel) -> Channel#channel{conn_state = ConnState}. @@ -389,19 +384,15 @@ process_connect( clientinfo = ClientInfo } ) -> - SessFun = fun(ClientInfoT, _) -> - Conf = emqx_cm:get_session_confs( - ClientInfoT, #{receive_maximum => 1, expiry_interval => 0} - ), - emqx_session:init(Conf) - end, + SessFun = fun(ClientInfoT, _) -> emqx_mqttsn_session:init(ClientInfoT) end, case emqx_gateway_ctx:open_session( Ctx, CleanStart, ClientInfo, ConnInfo, - SessFun + SessFun, + _SessMod = emqx_mqttsn_session ) of {ok, #{ @@ -461,48 +452,58 @@ handle_in(?SN_ADVERTISE_MSG(_GwId, _Radius), Channel) -> % ignore shutdown(normal, Channel); handle_in( - ?SN_PUBLISH_MSG( - #mqtt_sn_flags{ - qos = ?QOS_NEG1, - topic_id_type = TopicIdType - }, - TopicId, - _MsgId, - Data - ), - Channel = #channel{conn_state = idle, registry = Registry} + Publish = + ?SN_PUBLISH_MSG( + #mqtt_sn_flags{ + qos = ?QOS_NEG1, + topic_id_type = TopicIdType + }, + TopicId, + MsgId, + Data + ), + Channel = #channel{conn_state = idle} ) -> - %% FIXME: check enable_qos3 ?? - TopicName = - case (TopicIdType =:= ?SN_SHORT_TOPIC) of - true -> - <>; - false -> - emqx_mqttsn_registry:lookup_topic( - Registry, - ?NEG_QOS_CLIENT_ID, - TopicId - ) - end, - _ = - case TopicName =/= undefined of - true -> - Msg = emqx_message:make( - ?NEG_QOS_CLIENT_ID, - ?QOS_0, - TopicName, - Data - ), - emqx_broker:publish(Msg); - false -> - ok - end, - ?SLOG(debug, #{ - msg => "receive_qo3_message_in_idle_mode", - topic => TopicName, - data => Data - }), - {ok, Channel}; + case check_negative_qos_enable(Publish, Channel) of + ok -> + TopicName = + case TopicIdType of + ?SN_SHORT_TOPIC -> + TopicId; + ?SN_PREDEFINED_TOPIC -> + Registry = emqx_mqttsn_registry:init(), + emqx_mqttsn_registry:lookup_topic(TopicId, Registry); + _ -> + undefined + end, + case TopicName =/= undefined of + true -> + Msg = emqx_message:make( + ?NEG_QOS_CLIENT_ID, + ?QOS_0, + TopicName, + Data + ), + ?SLOG(debug, #{ + msg => "receive_qo3_message_in_idle_mode", + topic => TopicName, + data => Data + }), + _ = emqx_broker:publish(Msg), + ok; + false -> + ok + end, + shutdown(normal, Channel); + {error, Rc} -> + ?tp(info, ignore_negative_qos, #{ + topic_id => TopicId, + msg_id => MsgId, + return_code => Rc + }), + PubAck = ?SN_PUBACK_MSG(TopicId, MsgId, Rc), + shutdown(normal, PubAck, Channel) + end; handle_in( Pkt = #mqtt_sn_message{type = Type}, Channel = #channel{conn_state = idle} @@ -619,20 +620,19 @@ handle_in( end; handle_in( ?SN_REGISTER_MSG(_TopicId, MsgId, TopicName), - Channel = #channel{ - registry = Registry, - clientinfo = #{clientid := ClientId} - } + Channel = #channel{session = Session} ) -> - case emqx_mqttsn_registry:register_topic(Registry, ClientId, TopicName) of - TopicId when is_integer(TopicId) -> + Registry = emqx_mqttsn_session:registry(Session), + case emqx_mqttsn_registry:reg(TopicName, Registry) of + {ok, TopicId, NRegistry} -> ?SLOG(debug, #{ msg => "registered_topic_name", topic_name => TopicName, topic_id => TopicId }), AckPacket = ?SN_REGACK_MSG(TopicId, MsgId, ?SN_RC_ACCEPTED), - {ok, {outgoing, AckPacket}, Channel}; + NSession = emqx_mqttsn_session:set_registry(NRegistry, Session), + {ok, {outgoing, AckPacket}, Channel#channel{session = NSession}}; {error, too_large} -> ?SLOG(error, #{ msg => "register_topic_failed", @@ -720,7 +720,7 @@ handle_in(PubPkt = ?SN_PUBLISH_MSG(_Flags, TopicId0, MsgId, _Data), Channel) -> case emqx_utils:pipeline( [ - fun check_qos3_enable/2, + fun check_negative_qos_enable/2, fun preproc_pub_pkt/2, fun convert_topic_id_to_name/2, fun check_pub_authz/2, @@ -733,20 +733,25 @@ handle_in(PubPkt = ?SN_PUBLISH_MSG(_Flags, TopicId0, MsgId, _Data), Channel) -> {ok, Msg, NChannel} -> do_publish(TopicId, MsgId, Msg, NChannel); {error, ReturnCode, NChannel} -> + ?tp(info, publish_msg_rejected, #{ + topic_id => TopicId, + msg_id => MsgId, + return_code => ReturnCode + }), handle_out(puback, {TopicId, MsgId, ReturnCode}, NChannel) end; handle_in( ?SN_PUBACK_MSG(TopicId, MsgId, ReturnCode), Channel = #channel{ ctx = Ctx, - registry = Registry, session = Session, - clientinfo = ClientInfo = #{clientid := ClientId} + clientinfo = ClientInfo } ) -> + Registry = emqx_mqttsn_session:registry(Session), case ReturnCode of ?SN_RC_ACCEPTED -> - case emqx_session:puback(ClientInfo, MsgId, Session) of + case emqx_mqttsn_session:puback(ClientInfo, MsgId, Session) of {ok, Msg, NSession} -> ok = after_message_acked(ClientInfo, Msg, Channel), {Replies, NChannel} = goto_asleep_if_buffered_msgs_sent( @@ -778,7 +783,7 @@ handle_in( {ok, Channel} end; ?SN_RC_INVALID_TOPIC_ID -> - case emqx_mqttsn_registry:lookup_topic(Registry, ClientId, TopicId) of + case emqx_mqttsn_registry:lookup_topic(TopicId, Registry) of undefined -> {ok, Channel}; TopicName -> @@ -803,7 +808,7 @@ handle_in( clientinfo = ClientInfo } ) -> - case emqx_session:pubrec(ClientInfo, MsgId, Session) of + case emqx_mqttsn_session:pubrec(ClientInfo, MsgId, Session) of {ok, Msg, NSession} -> ok = after_message_acked(ClientInfo, Msg, Channel), NChannel = Channel#channel{session = NSession}, @@ -829,7 +834,7 @@ handle_in( ?SN_PUBREC_MSG(?SN_PUBREL, MsgId), Channel = #channel{ctx = Ctx, session = Session, clientinfo = ClientInfo} ) -> - case emqx_session:pubrel(ClientInfo, MsgId, Session) of + case emqx_mqttsn_session:pubrel(ClientInfo, MsgId, Session) of {ok, NSession} -> NChannel = Channel#channel{session = NSession}, handle_out(pubcomp, MsgId, NChannel); @@ -846,7 +851,7 @@ handle_in( ?SN_PUBREC_MSG(?SN_PUBCOMP, MsgId), Channel = #channel{ctx = Ctx, session = Session, clientinfo = ClientInfo} ) -> - case emqx_session:pubcomp(ClientInfo, MsgId, Session) of + case emqx_mqttsn_session:pubcomp(ClientInfo, MsgId, Session) of {ok, NSession} -> {Replies, NChannel} = goto_asleep_if_buffered_msgs_sent( Channel#channel{session = NSession} @@ -1044,18 +1049,13 @@ send_next_register_or_replay_publish( %%-------------------------------------------------------------------- %% Handle Publish -check_qos3_enable( - ?SN_PUBLISH_MSG(Flags, TopicId, _MsgId, Data), - #channel{enable_qos3 = EnableQoS3} +check_negative_qos_enable( + ?SN_PUBLISH_MSG(Flags, _TopicId, _MsgId, _Data), + #channel{enable_negative_qos = EnableNegQoS} ) -> #mqtt_sn_flags{qos = QoS} = Flags, - case EnableQoS3 =:= false andalso QoS =:= ?QOS_NEG1 of + case EnableNegQoS =:= false andalso QoS =:= ?QOS_NEG1 of true -> - ?SLOG(debug, #{ - msg => "ignore_msg_due_to_qos3_disabled", - topic_id => TopicId, - data => Data - }), {error, ?SN_RC_NOT_SUPPORTED}; false -> ok @@ -1088,12 +1088,10 @@ convert_topic_id_to_name({{name, TopicName}, Flags, Data}, Channel) -> {ok, {TopicName, Flags, Data}, Channel}; convert_topic_id_to_name( {{id, TopicId}, Flags, Data}, - Channel = #channel{ - registry = Registry, - clientinfo = #{clientid := ClientId} - } + Channel = #channel{session = Session} ) -> - case emqx_mqttsn_registry:lookup_topic(Registry, ClientId, TopicId) of + Registry = emqx_mqttsn_session:registry(Session), + case emqx_mqttsn_registry:lookup_topic(TopicId, Registry) of undefined -> {error, ?SN_RC_INVALID_TOPIC_ID}; TopicName -> @@ -1162,7 +1160,7 @@ do_publish( Msg = #message{qos = ?QOS_2}, Channel = #channel{ctx = Ctx, session = Session, clientinfo = ClientInfo} ) -> - case emqx_session:publish(ClientInfo, MsgId, Msg, Session) of + case emqx_mqttsn_session:publish(ClientInfo, MsgId, Msg, Session) of {ok, _PubRes, NSession} -> NChannel1 = ensure_timer( await_timer, @@ -1195,15 +1193,13 @@ preproc_subs_type( TopicName, QoS ), - Channel = #channel{ - registry = Registry, - clientinfo = #{clientid := ClientId} - } + Channel = #channel{session = Session} ) -> + Registry = emqx_mqttsn_session:registry(Session), %% If the gateway is able accept the subscription, %% it assigns a topic id to the received topic name %% and returns it within a SUBACK message - case emqx_mqttsn_registry:register_topic(Registry, ClientId, TopicName) of + case emqx_mqttsn_registry:reg(TopicName, Registry) of {error, too_large} -> {error, ?SN_RC2_EXCEED_LIMITATION}; {error, wildcard_topic} -> @@ -1214,8 +1210,9 @@ preproc_subs_type( %% value when it has the first PUBLISH message with a matching %% topic name to be sent to the client, see also Section 6.10. {ok, {?SN_INVALID_TOPIC_ID, TopicName, QoS}, Channel}; - TopicId when is_integer(TopicId) -> - {ok, {TopicId, TopicName, QoS}, Channel} + {ok, TopicId, NRegistry} -> + NSession = emqx_mqttsn_session:set_registry(NRegistry, Session), + {ok, {TopicId, TopicName, QoS}, Channel#channel{session = NSession}} end; preproc_subs_type( ?SN_SUBSCRIBE_MSG_TYPE( @@ -1223,18 +1220,10 @@ preproc_subs_type( TopicId, QoS ), - Channel = #channel{ - registry = Registry, - clientinfo = #{clientid := ClientId} - } + Channel = #channel{session = Session} ) -> - case - emqx_mqttsn_registry:lookup_topic( - Registry, - ClientId, - TopicId - ) - of + Registry = emqx_mqttsn_session:registry(Session), + case emqx_mqttsn_registry:lookup_topic(TopicId, Registry) of undefined -> {error, ?SN_RC_INVALID_TOPIC_ID}; TopicName -> @@ -1311,7 +1300,7 @@ do_subscribe( ) -> NTopicName = emqx_mountpoint:mount(Mountpoint, TopicName), NSubOpts = maps:merge(emqx_gateway_utils:default_subopts(), SubOpts), - case emqx_session:subscribe(ClientInfo, NTopicName, NSubOpts, Session) of + case emqx_mqttsn_session:subscribe(ClientInfo, NTopicName, NSubOpts, Session) of {ok, NSession} -> {ok, {TopicId, NTopicName, NSubOpts}, Channel#channel{session = NSession}}; {error, ?RC_QUOTA_EXCEEDED} -> @@ -1339,18 +1328,10 @@ preproc_unsub_type( ?SN_PREDEFINED_TOPIC, TopicId ), - Channel = #channel{ - registry = Registry, - clientinfo = #{clientid := ClientId} - } + Channel = #channel{session = Session} ) -> - case - emqx_mqttsn_registry:lookup_topic( - Registry, - ClientId, - TopicId - ) - of + Registry = emqx_mqttsn_session:registry(Session), + case emqx_mqttsn_registry:lookup_topic(TopicId, Registry) of undefined -> {error, not_found}; TopicName -> @@ -1410,7 +1391,7 @@ do_unsubscribe( SubOpts ), case - emqx_session:unsubscribe( + emqx_mqttsn_session:unsubscribe( ClientInfo, NTopicName, NSubOpts, @@ -1455,9 +1436,9 @@ awake( clientid => ClientId, previous_state => ConnState }), - {ok, Publishes, Session1} = emqx_session:replay(ClientInfo, Session), + {ok, Publishes, Session1} = emqx_mqttsn_session:replay(ClientInfo, Session), {NPublishes, NSession} = - case emqx_session:deliver(ClientInfo, [], Session1) of + case emqx_mqttsn_session:deliver(ClientInfo, [], Session1) of {ok, Session2} -> {Publishes, Session2}; {ok, More, Session2} -> @@ -1485,8 +1466,8 @@ goto_asleep_if_buffered_msgs_sent( } ) -> case - emqx_mqueue:is_empty(emqx_session:info(mqueue, Session)) andalso - emqx_inflight:is_empty(emqx_session:info(inflight, Session)) + emqx_mqueue:is_empty(emqx_mqttsn_session:info(mqueue, Session)) andalso + emqx_inflight:is_empty(emqx_mqttsn_session:info(inflight, Session)) of true -> ?SLOG(info, #{ @@ -1579,7 +1560,7 @@ handle_out( register_inflight = undefined } ) -> - {MsgId, NSession} = emqx_session:obtain_next_pkt_id(Session), + {MsgId, NSession} = emqx_mqttsn_session:obtain_next_pkt_id(Session), Outgoing = {outgoing, ?SN_REGISTER_MSG(TopicId, MsgId, TopicName)}, NChannel = Channel#channel{ session = NSession, @@ -1655,7 +1636,7 @@ maybe_resume_session( resuming = true } ) -> - Subs = emqx_session:info(subscriptions, Session), + Subs = emqx_mqttsn_session:info(subscriptions, Session), case subs_resume() andalso map_size(Subs) =/= 0 of true -> TopicNames = lists:filter(fun(T) -> not emqx_topic:wildcard(T) end, maps:keys(Subs)), @@ -1680,9 +1661,9 @@ resume_or_replay_messages( false -> {[], Channel} end, - {ok, Publishes, Session1} = emqx_session:replay(ClientInfo, Session), + {ok, Publishes, Session1} = emqx_mqttsn_session:replay(ClientInfo, Session), {NPublishes, NSession} = - case emqx_session:deliver(ClientInfo, NPendings, Session1) of + case emqx_mqttsn_session:deliver(ClientInfo, NPendings, Session1) of {ok, Session2} -> {Publishes, Session2}; {ok, More, Session2} -> @@ -1753,10 +1734,7 @@ outgoing_deliver_and_register({Packets, Channel}) -> message_to_packet( MsgId, Message, - #channel{ - registry = Registry, - clientinfo = #{clientid := ClientId} - } + #channel{session = Session} ) -> QoS = emqx_message:qos(Message), Topic = emqx_message:topic(Message), @@ -1766,7 +1744,8 @@ message_to_packet( ?QOS_0 -> 0; _ -> MsgId end, - case emqx_mqttsn_registry:lookup_topic_id(Registry, ClientId, Topic) of + Registry = emqx_mqttsn_session:registry(Session), + case emqx_mqttsn_registry:lookup_topic_id(Topic, Registry) of {predef, PredefTopicId} -> Flags = #mqtt_sn_flags{qos = QoS, topic_id_type = ?SN_PREDEFINED_TOPIC}, ?SN_PUBLISH_MSG(Flags, PredefTopicId, NMsgId, Payload); @@ -1801,7 +1780,7 @@ handle_call({unsubscribe, Topic}, _From, Channel) -> {ok, _, NChannel} = do_unsubscribe(TopicFilters, Channel), reply_and_update(ok, NChannel); handle_call(subscriptions, _From, Channel = #channel{session = Session}) -> - reply({ok, maps:to_list(emqx_session:info(subscriptions, Session))}, Channel); + reply({ok, maps:to_list(emqx_mqttsn_session:info(subscriptions, Session))}, Channel); handle_call(kick, _From, Channel) -> NChannel = ensure_disconnected(kicked, Channel), shutdown_and_reply(kicked, ok, NChannel); @@ -1822,7 +1801,7 @@ handle_call( pendings = Pendings } ) -> - ok = emqx_session:takeover(Session), + ok = emqx_mqttsn_session:takeover(Session), %% TODO: Should not drain deliver here (side effect) Delivers = emqx_utils:drain_deliver(), AllPendings = lists:append(Delivers, Pendings), @@ -1899,8 +1878,9 @@ handle_info(clean_authz_cache, Channel) -> {ok, Channel}; handle_info({subscribe, _}, Channel) -> {ok, Channel}; -handle_info({register, TopicName}, Channel) -> - case ensure_registered_topic_name(TopicName, Channel) of +handle_info({register, TopicName}, Channel = #channel{session = Session}) -> + Registry = emqx_mqttsn_session:registry(Session), + case emqx_mqttsn_registry:reg(TopicName, Registry) of {error, Reason} -> ?SLOG(error, #{ msg => "register_topic_failed", @@ -1908,8 +1888,9 @@ handle_info({register, TopicName}, Channel) -> reason => Reason }), {ok, Channel}; - {ok, TopicId} -> - handle_out(register, {TopicId, TopicName}, Channel) + {ok, TopicId, NRegistry} -> + NSession = emqx_mqttsn_session:set_registry(NRegistry, Session), + handle_out(register, {TopicId, TopicName}, Channel#channel{session = NSession}) end; handle_info(Info, Channel) -> ?SLOG(error, #{ @@ -1928,21 +1909,6 @@ maybe_shutdown(Reason, Channel = #channel{conninfo = ConnInfo}) -> shutdown(Reason, Channel) end. -ensure_registered_topic_name( - TopicName, - Channel = #channel{registry = Registry} -) -> - ClientId = clientid(Channel), - case emqx_mqttsn_registry:lookup_topic_id(Registry, ClientId, TopicName) of - undefined -> - case emqx_mqttsn_registry:register_topic(Registry, ClientId, TopicName) of - {error, Reason} -> {error, Reason}; - TopicId -> {ok, TopicId} - end; - TopicId -> - {ok, TopicId} - end. - %%-------------------------------------------------------------------- %% Ensure disconnected @@ -1991,7 +1957,7 @@ handle_deliver( ConnState =:= disconnected; ConnState =:= asleep -> - NSession = emqx_session:enqueue( + NSession = emqx_mqttsn_session:enqueue( ClientInfo, ignore_local(maybe_nack(Delivers), ClientId, Session, Ctx), Session @@ -2027,7 +1993,7 @@ handle_deliver( } ) -> case - emqx_session:deliver( + emqx_mqttsn_session:deliver( ClientInfo, ignore_local(Delivers, ClientId, Session, Ctx), Session @@ -2045,7 +2011,7 @@ handle_deliver( end. ignore_local(Delivers, Subscriber, Session, Ctx) -> - Subs = emqx_session:info(subscriptions, Session), + Subs = emqx_mqttsn_session:info(subscriptions, Session), lists:filter( fun({deliver, Topic, #message{from = Publisher}}) -> case maps:find(Topic, Subs) of @@ -2120,7 +2086,7 @@ handle_timeout( retry_delivery, Channel = #channel{session = Session, clientinfo = ClientInfo} ) -> - case emqx_session:retry(ClientInfo, Session) of + case emqx_mqttsn_session:retry(ClientInfo, Session) of {ok, NSession} -> {ok, clean_timer(retry_timer, Channel#channel{session = NSession})}; {ok, Publishes, Timeout, NSession} -> @@ -2145,7 +2111,7 @@ handle_timeout( expire_awaiting_rel, Channel = #channel{session = Session, clientinfo = ClientInfo} ) -> - case emqx_session:expire(ClientInfo, awaiting_rel, Session) of + case emqx_mqttsn_session:expire(ClientInfo, awaiting_rel, Session) of {ok, NSession} -> {ok, clean_timer(await_timer, Channel#channel{session = NSession})}; {ok, Timeout, NSession} -> @@ -2289,17 +2255,14 @@ clean_timer(Name, Channel = #channel{timers = Timers}) -> interval(alive_timer, #channel{keepalive = KeepAlive}) -> emqx_keepalive:info(interval, KeepAlive); interval(retry_timer, #channel{session = Session}) -> - emqx_session:info(retry_interval, Session); + emqx_mqttsn_session:info(retry_interval, Session); interval(await_timer, #channel{session = Session}) -> - emqx_session:info(await_rel_timeout, Session). + emqx_mqttsn_session:info(await_rel_timeout, Session). %%-------------------------------------------------------------------- %% Helper functions %%-------------------------------------------------------------------- -clientid(#channel{clientinfo = #{clientid := ClientId}}) -> - ClientId. - run_hooks(Ctx, Name, Args) -> emqx_gateway_ctx:metrics_inc(Ctx, Name), emqx_hooks:run(Name, Args). diff --git a/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_registry.erl b/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_registry.erl index 9db355a9b..3113fc43d 100644 --- a/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_registry.erl +++ b/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_registry.erl @@ -17,64 +17,92 @@ %% @doc The MQTT-SN Topic Registry -module(emqx_mqttsn_registry). --behaviour(gen_server). - -include("emqx_mqttsn.hrl"). --include_lib("emqx/include/logger.hrl"). - --export([start_link/2]). -export([ - register_topic/3, - unregister_topic/2 + persist_predefined_topics/1, + clear_predefined_topics/1 ]). -export([ - lookup_topic/3, - lookup_topic_id/3 + init/0, + reg/2, + unreg/2, + lookup_topic/2, + lookup_topic_id/2 ]). -%% gen_server callbacks --export([ - init/1, - handle_call/3, - handle_cast/2, - handle_info/2, - terminate/2, - code_change/3 -]). +-define(PKEY(Id), {mqttsn, predef_topics, Id}). -%% Internal exports (RPC) --export([ - do_register/4 -]). +-type registry() :: #{ + %% The last topic id aallocated + last_topic_id := pos_integer(), + %% The mapping from topic id to topic name + id_to_name := map(), + %% The mapping from topic name to topic id + name_to_id := map() +}. --export([lookup_name/1]). - --define(SN_SHARD, emqx_mqttsn_shard). - --record(state, {tabname, max_predef_topic_id = 0}). - --record(emqx_mqttsn_registry, {key, value}). - --type registry() :: {Tab :: atom(), RegistryPid :: pid()}. +-type predef_topic() :: #{ + id := 1..1024, + topic := iolist() +}. %%----------------------------------------------------------------------------- +%% APIs --spec start_link(atom(), list()) -> - ignore - | {ok, pid()} - | {error, Reason :: term()}. -start_link(InstaId, PredefTopics) -> - gen_server:start_link(?MODULE, [InstaId, PredefTopics], []). +-spec persist_predefined_topics([predef_topic()]) -> ok. +persist_predefined_topics(PredefTopics) when is_list(PredefTopics) -> + try + F = fun(#{id := TopicId, topic := TopicName0}) when TopicId =< 1024 -> + TopicName = iolist_to_binary(TopicName0), + persistent_term:put(?PKEY(TopicId), TopicName), + persistent_term:put(?PKEY(TopicName), TopicId) + end, + lists:foreach(F, PredefTopics) + catch + _:_ -> + clear_predefined_topics(PredefTopics), + error(badarg) + end. --spec register_topic(registry(), emqx_types:clientid(), emqx_types:topic()) -> - integer() +-spec clear_predefined_topics([predef_topic()]) -> ok. +clear_predefined_topics(PredefTopics) -> + lists:foreach( + fun(#{id := TopicId, topic := TopicName0}) -> + TopicName = iolist_to_binary(TopicName0), + persistent_term:erase(?PKEY(TopicId)), + persistent_term:erase(?PKEY(TopicName)) + end, + PredefTopics + ), + ok. + +-spec init() -> registry(). +init() -> + #{ + last_topic_id => ?SN_MAX_PREDEF_TOPIC_ID, + id_to_name => #{}, + name_to_id => #{} + }. + +-spec reg(emqx_types:topic(), registry()) -> + {ok, integer(), registry()} | {error, term()}. -register_topic({_, Pid}, ClientId, TopicName) when is_binary(TopicName) -> +reg( + TopicName, + Registry +) when is_binary(TopicName) -> case emqx_topic:wildcard(TopicName) of false -> - gen_server:call(Pid, {register, ClientId, TopicName}); + case lookup_topic_id(TopicName, Registry) of + {predef, TopicId} when is_integer(TopicId) -> + {ok, TopicId, Registry}; + TopicId when is_integer(TopicId) -> + {ok, TopicId, Registry}; + undefined -> + do_reg(TopicName, Registry) + end; %% TopicId: in case of “accepted” the value that will be used as topic %% id by the gateway when sending PUBLISH messages to the client (not %% relevant in case of subscriptions to a short topic name or to a topic @@ -83,182 +111,64 @@ register_topic({_, Pid}, ClientId, TopicName) when is_binary(TopicName) -> {error, wildcard_topic} end. --spec lookup_topic(registry(), emqx_types:clientid(), pos_integer()) -> +do_reg( + TopicName, + Registry = #{ + last_topic_id := TopicId0, + id_to_name := IdMap, + name_to_id := NameMap + } +) -> + case next_topic_id(TopicId0) of + {error, too_large} -> + {error, too_large}; + NextTopicId -> + NRegistry = Registry#{ + last_topic_id := NextTopicId, + id_to_name := maps:put(NextTopicId, TopicName, IdMap), + name_to_id := maps:put(TopicName, NextTopicId, NameMap) + }, + {ok, NextTopicId, NRegistry} + end. + +next_topic_id(Id) when is_integer(Id) andalso (Id < 16#FFFF) -> + Id + 1; +next_topic_id(Id) when is_integer(Id) -> + {error, too_large}. + +-spec lookup_topic(pos_integer(), registry()) -> undefined | binary(). -lookup_topic({Tab, _}, ClientId, TopicId) when is_integer(TopicId) -> - case lookup_element(Tab, {predef, TopicId}, 3) of +lookup_topic(TopicId, _Registry = #{id_to_name := IdMap}) when is_integer(TopicId) -> + case persistent_term:get(?PKEY(TopicId), undefined) of undefined -> - lookup_element(Tab, {ClientId, TopicId}, 3); + maps:get(TopicId, IdMap, undefined); Topic -> Topic end. --spec lookup_topic_id(registry(), emqx_types:clientid(), emqx_types:topic()) -> +-spec lookup_topic_id(emqx_types:topic(), registry()) -> undefined | pos_integer() | {predef, integer()}. -lookup_topic_id({Tab, _}, ClientId, TopicName) when is_binary(TopicName) -> - case lookup_element(Tab, {predef, TopicName}, 3) of +lookup_topic_id(TopicName, _Registry = #{name_to_id := NameMap}) when is_binary(TopicName) -> + case persistent_term:get(?PKEY(TopicName), undefined) of undefined -> - lookup_element(Tab, {ClientId, TopicName}, 3); + maps:get(TopicName, NameMap, undefined); TopicId -> {predef, TopicId} end. -%% @private -lookup_element(Tab, Key, Pos) -> - try - ets:lookup_element(Tab, Key, Pos) - catch - error:badarg -> undefined - end. - --spec unregister_topic(registry(), emqx_types:clientid()) -> ok. -unregister_topic({_, Pid}, ClientId) -> - gen_server:call(Pid, {unregister, ClientId}). - -lookup_name(Pid) -> - gen_server:call(Pid, name). - -%%----------------------------------------------------------------------------- - -name(InstaId) -> - list_to_atom(lists:concat([emqx_mqttsn_, InstaId, '_registry'])). - -init([InstaId, PredefTopics]) -> - %% {predef, TopicId} -> TopicName - %% {predef, TopicName} -> TopicId - %% {ClientId, TopicId} -> TopicName - %% {ClientId, TopicName} -> TopicId - Tab = name(InstaId), - ok = mria:create_table(Tab, [ - {storage, ram_copies}, - {record_name, emqx_mqttsn_registry}, - {attributes, record_info(fields, emqx_mqttsn_registry)}, - {storage_properties, [{ets, [{read_concurrency, true}]}]}, - {rlog_shard, ?SN_SHARD} - ]), - ok = mria:wait_for_tables([Tab]), - MaxPredefId = lists:foldl( - fun(#{id := TopicId, topic := TopicName0}, AccId) -> - TopicName = iolist_to_binary(TopicName0), - mria:dirty_write(Tab, #emqx_mqttsn_registry{ - key = {predef, TopicId}, - value = TopicName - }), - mria:dirty_write(Tab, #emqx_mqttsn_registry{ - key = {predef, TopicName}, - value = TopicId - }), - case TopicId > AccId of - true -> TopicId; - false -> AccId - end - end, - 0, - PredefTopics - ), - {ok, #state{tabname = Tab, max_predef_topic_id = MaxPredefId}}. - -handle_call( - {register, ClientId, TopicName}, - _From, - State = #state{tabname = Tab, max_predef_topic_id = PredefId} -) -> - case lookup_topic_id({Tab, self()}, ClientId, TopicName) of - {predef, PredefTopicId} when is_integer(PredefTopicId) -> - {reply, PredefTopicId, State}; - TopicId when is_integer(TopicId) -> - {reply, TopicId, State}; - undefined -> - case next_topic_id(Tab, PredefId, ClientId) of - TopicId when TopicId >= 16#FFFF -> - {reply, {error, too_large}, State}; - TopicId -> - case - mria:transaction(?SN_SHARD, fun ?MODULE:do_register/4, [ - Tab, ClientId, TopicId, TopicName - ]) - of - {atomic, ok} -> - {reply, TopicId, State}; - {aborted, Error} -> - {reply, {error, Error}, State} - end - end - end; -handle_call({unregister, ClientId}, _From, State = #state{tabname = Tab}) -> - Registry = mnesia:dirty_match_object( - Tab, - {emqx_mqttsn_registry, {ClientId, '_'}, '_'} - ), - lists:foreach( - fun(R) -> - mria:dirty_delete_object(Tab, R) - end, - Registry - ), - {reply, ok, State}; -handle_call(name, _From, State = #state{tabname = Tab}) -> - {reply, {Tab, self()}, State}; -handle_call(Req, _From, State) -> - ?SLOG(error, #{ - msg => "unexpected_call", - call => Req - }), - {reply, ignored, State}. - -handle_cast(Msg, State) -> - ?SLOG(error, #{ - msg => "unexpected_cast", - cast => Msg - }), - {noreply, State}. - -handle_info(Info, State) -> - ?SLOG(error, #{ - msg => "unexpected_info", - info => Info - }), - {noreply, State}. - -terminate(_Reason, _State) -> - ok. - -code_change(_OldVsn, State, _Extra) -> - {ok, State}. - -do_register(Tab, ClientId, TopicId, TopicName) -> - mnesia:write( - Tab, - #emqx_mqttsn_registry{ - key = {ClientId, next_topic_id}, - value = TopicId + 1 - }, - write - ), - mnesia:write( - Tab, - #emqx_mqttsn_registry{ - key = {ClientId, TopicName}, - value = TopicId - }, - write - ), - mnesia:write( - Tab, - #emqx_mqttsn_registry{ - key = {ClientId, TopicId}, - value = TopicName - }, - write - ). - -%%----------------------------------------------------------------------------- - -next_topic_id(Tab, PredefId, ClientId) -> - case mnesia:dirty_read(Tab, {ClientId, next_topic_id}) of - [#emqx_mqttsn_registry{value = Id}] -> Id; - [] -> PredefId + 1 +-spec unreg(emqx_types:topic(), registry()) -> registry(). +unreg(TopicName, Registry = #{name_to_id := NameMap, id_to_name := IdMap}) when + is_binary(TopicName) +-> + case maps:find(TopicName, NameMap) of + {ok, TopicId} -> + Registry#{ + name_to_id := maps:remove(TopicName, NameMap), + id_to_name := maps:remove(TopicId, IdMap) + }; + error -> + Registry end. diff --git a/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_schema.erl b/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_schema.erl index cb33cbe95..08fb854b4 100644 --- a/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_schema.erl +++ b/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_schema.erl @@ -16,6 +16,7 @@ -module(emqx_mqttsn_schema). +-include("emqx_mqttsn.hrl"). -include_lib("hocon/include/hoconsc.hrl"). -include_lib("typerefl/include/types.hrl"). @@ -41,7 +42,6 @@ fields(mqttsn) -> desc => ?DESC(mqttsn_broadcast) } )}, - %% TODO: rename {enable_qos3, sc( boolean(), @@ -73,7 +73,7 @@ fields(mqttsn) -> fields(mqttsn_predefined) -> [ {id, - sc(integer(), #{ + sc(range(1, ?SN_MAX_PREDEF_TOPIC_ID), #{ required => true, desc => ?DESC(mqttsn_predefined_id) })}, diff --git a/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_session.erl b/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_session.erl new file mode 100644 index 000000000..7c62800cc --- /dev/null +++ b/apps/emqx_gateway_mqttsn/src/emqx_mqttsn_session.erl @@ -0,0 +1,144 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_mqttsn_session). + +-export([registry/1, set_registry/2]). + +-export([ + init/1, + info/1, + info/2, + stats/1, + resume/2 +]). + +-export([ + publish/4, + subscribe/4, + unsubscribe/4, + puback/3, + pubrec/3, + pubrel/3, + pubcomp/3 +]). + +-export([ + replay/2, + deliver/3, + obtain_next_pkt_id/1, + takeover/1, + enqueue/3, + retry/2, + expire/3 +]). + +-type session() :: #{ + registry := emqx_mqttsn_registry:registry(), + session := emqx_session:session() +}. + +-export_type([session/0]). + +init(ClientInfo) -> + Conf = emqx_cm:get_session_confs( + ClientInfo, #{receive_maximum => 1, expiry_interval => 0} + ), + #{ + registry => emqx_mqttsn_registry:init(), + session => emqx_session:init(Conf) + }. + +registry(#{registry := Registry}) -> + Registry. + +set_registry(Registry, Session) -> + Session#{registry := Registry}. + +info(#{session := Session}) -> + emqx_session:info(Session). + +info(Key, #{session := Session}) -> + emqx_session:info(Key, Session). + +stats(#{session := Session}) -> + emqx_session:stats(Session). + +puback(ClientInfo, MsgId, Session) -> + with_sess(?FUNCTION_NAME, [ClientInfo, MsgId], Session). + +pubrec(ClientInfo, MsgId, Session) -> + with_sess(?FUNCTION_NAME, [ClientInfo, MsgId], Session). + +pubrel(ClientInfo, MsgId, Session) -> + with_sess(?FUNCTION_NAME, [ClientInfo, MsgId], Session). + +pubcomp(ClientInfo, MsgId, Session) -> + with_sess(?FUNCTION_NAME, [ClientInfo, MsgId], Session). + +publish(ClientInfo, MsgId, Msg, Session) -> + with_sess(?FUNCTION_NAME, [ClientInfo, MsgId, Msg], Session). + +subscribe(ClientInfo, Topic, SubOpts, Session) -> + with_sess(?FUNCTION_NAME, [ClientInfo, Topic, SubOpts], Session). + +unsubscribe(ClientInfo, Topic, SubOpts, Session) -> + with_sess(?FUNCTION_NAME, [ClientInfo, Topic, SubOpts], Session). + +replay(ClientInfo, Session) -> + with_sess(?FUNCTION_NAME, [ClientInfo], Session). + +deliver(ClientInfo, Delivers, Session1) -> + with_sess(?FUNCTION_NAME, [ClientInfo, Delivers], Session1). + +obtain_next_pkt_id(Session = #{session := Sess}) -> + {Id, Sess1} = emqx_session:obtain_next_pkt_id(Sess), + {Id, Session#{session := Sess1}}. + +takeover(_Session = #{session := Sess}) -> + emqx_session:takeover(Sess). + +enqueue(ClientInfo, Delivers, Session = #{session := Sess}) -> + Sess1 = emqx_session:enqueue(ClientInfo, Delivers, Sess), + Session#{session := Sess1}. + +retry(ClientInfo, Session) -> + with_sess(?FUNCTION_NAME, [ClientInfo], Session). + +expire(ClientInfo, awaiting_rel, Session) -> + with_sess(?FUNCTION_NAME, [ClientInfo, awaiting_rel], Session). + +resume(ClientInfo, #{session := Sess}) -> + emqx_session:resume(ClientInfo, Sess). + +%%-------------------------------------------------------------------- +%% internal funcs + +with_sess(Fun, Args, Session = #{session := Sess}) -> + case apply(emqx_session, Fun, Args ++ [Sess]) of + %% for subscribe + {error, Reason} -> + {error, Reason}; + %% for pubrel + {ok, Sess1} -> + {ok, Session#{session := Sess1}}; + %% for publish and puback + {ok, Result, Sess1} -> + {ok, Result, Session#{session := Sess1}}; + %% for puback + {ok, Msgs, Replies, Sess1} -> + {ok, Msgs, Replies, Session#{session := Sess1}} + end. diff --git a/apps/emqx_gateway_mqttsn/test/emqx_sn_protocol_SUITE.erl b/apps/emqx_gateway_mqttsn/test/emqx_sn_protocol_SUITE.erl index b22d7b4b0..c3fa89c70 100644 --- a/apps/emqx_gateway_mqttsn/test/emqx_sn_protocol_SUITE.erl +++ b/apps/emqx_gateway_mqttsn/test/emqx_sn_protocol_SUITE.erl @@ -35,6 +35,8 @@ -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). + -define(HOST, {127, 0, 0, 1}). -define(PORT, 1884). @@ -45,12 +47,15 @@ -define(LOG(Format, Args), ct:log("TEST: " ++ Format, Args)). --define(MAX_PRED_TOPIC_ID, 2). +-define(MAX_PRED_TOPIC_ID, ?SN_MAX_PREDEF_TOPIC_ID). -define(PREDEF_TOPIC_ID1, 1). -define(PREDEF_TOPIC_ID2, 2). -define(PREDEF_TOPIC_NAME1, <<"/predefined/topic/name/hello">>). -define(PREDEF_TOPIC_NAME2, <<"/predefined/topic/name/nice">>). --define(ENABLE_QOS3, true). +-define(DEFAULT_PREDEFINED_TOPICS, [ + #{<<"id">> => ?PREDEF_TOPIC_ID1, <<"topic">> => ?PREDEF_TOPIC_NAME1}, + #{<<"id">> => ?PREDEF_TOPIC_ID2, <<"topic">> => ?PREDEF_TOPIC_NAME2} +]). % FLAG NOT USED -define(FNU, 0). @@ -120,6 +125,20 @@ restart_mqttsn_with_subs_resume_off() -> Conf#{<<"subs_resume">> => <<"false">>} ). +restart_mqttsn_with_neg_qos_on() -> + Conf = emqx:get_raw_config([gateway, mqttsn]), + emqx_gateway_conf:update_gateway( + mqttsn, + Conf#{<<"enable_qos3">> => <<"true">>} + ). + +restart_mqttsn_with_neg_qos_off() -> + Conf = emqx:get_raw_config([gateway, mqttsn]), + emqx_gateway_conf:update_gateway( + mqttsn, + Conf#{<<"enable_qos3">> => <<"false">>} + ). + restart_mqttsn_with_mountpoint(Mp) -> Conf = emqx:get_raw_config([gateway, mqttsn]), emqx_gateway_conf:update_gateway( @@ -127,6 +146,13 @@ restart_mqttsn_with_mountpoint(Mp) -> Conf#{<<"mountpoint">> => Mp} ). +restart_mqttsn_with_predefined_topics(Topics) -> + Conf = emqx:get_raw_config([gateway, mqttsn]), + emqx_gateway_conf:update_gateway( + mqttsn, + Conf#{<<"predefined">> => Topics} + ). + default_config() -> ?CONF_DEFAULT. @@ -471,7 +497,36 @@ t_subscribe_case08(_) -> ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), gen_udp:close(Socket). -t_publish_negqos_case09(_) -> +t_subscribe_predefined_topic(_) -> + Dup = 0, + QoS = 0, + Retain = 0, + Will = 0, + CleanSession = 0, + MsgId = 1, + Socket = ensure_connected_client(?CLIENTID), + send_subscribe_msg_predefined_topic(Socket, 0, ?PREDEF_TOPIC_ID1, 1), + ?assertEqual( + <<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, + ?PREDEF_TOPIC_ID1:16, MsgId:16, ?SN_RC_ACCEPTED>>, + receive_response(Socket) + ), + send_disconnect_msg(Socket, undefined), + gen_udp:close(Socket), + + restart_mqttsn_with_predefined_topics([]), + Socket1 = ensure_connected_client(?CLIENTID), + send_subscribe_msg_predefined_topic(Socket1, 0, ?PREDEF_TOPIC_ID1, 1), + ?assertEqual( + <<8, ?SN_SUBACK, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, 0:16, + MsgId:16, ?SN_RC_INVALID_TOPIC_ID>>, + receive_response(Socket1) + ), + send_disconnect_msg(Socket1, undefined), + restart_mqttsn_with_predefined_topics(?DEFAULT_PREDEFINED_TOPICS), + gen_udp:close(Socket1). + +t_publish_negqos_enabled(_) -> Dup = 0, QoS = 0, NegQoS = 3, @@ -497,19 +552,40 @@ t_publish_negqos_case09(_) -> Payload1 = <<20, 21, 22, 23>>, send_publish_msg_normal_topic(Socket, NegQoS, MsgId1, TopicId1, Payload1), timer:sleep(100), - case ?ENABLE_QOS3 of - true -> - Eexp = - <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, - ?SN_NORMAL_TOPIC:2, TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, - What = receive_response(Socket), - ?assertEqual(Eexp, What) - end, + Eexp = + <<11, ?SN_PUBLISH, Dup:1, QoS:2, Retain:1, Will:1, CleanSession:1, ?SN_NORMAL_TOPIC:2, + TopicId1:16, (mid(0)):16, <<20, 21, 22, 23>>/binary>>, + What = receive_response(Socket), + ?assertEqual(Eexp, What), send_disconnect_msg(Socket, undefined), ?assertEqual(<<2, ?SN_DISCONNECT>>, receive_response(Socket)), gen_udp:close(Socket). +t_publish_negqos_disabled(_) -> + restart_mqttsn_with_neg_qos_off(), + NegQoS = 3, + MsgId = 1, + Payload = <<"abc">>, + TopicId = ?MAX_PRED_TOPIC_ID, + {ok, Socket} = gen_udp:open(0, [binary]), + ?check_trace( + begin + send_publish_msg_predefined_topic(Socket, NegQoS, MsgId, TopicId, Payload), + ?assertEqual( + <<7, ?SN_PUBACK, TopicId:16, MsgId:16, ?SN_RC_NOT_SUPPORTED>>, + receive_response(Socket) + ), + receive_response(Socket) + end, + fun(Trace0) -> + Trace = ?of_kind(ignore_negative_qos, Trace0), + ?assertMatch([#{return_code := ?SN_RC_NOT_SUPPORTED}], Trace) + end + ), + restart_mqttsn_with_neg_qos_on(), + gen_udp:close(Socket). + t_publish_qos0_case01(_) -> Dup = 0, QoS = 0, @@ -2737,3 +2813,9 @@ flush(Msgs) -> M -> flush([M | Msgs]) after 0 -> lists:reverse(Msgs) end. + +ensure_connected_client(ClientId) -> + {ok, Socket} = gen_udp:open(0, [binary]), + send_connect_msg(Socket, ClientId), + ?assertEqual(<<3, ?SN_CONNACK, 0>>, receive_response(Socket)), + Socket. diff --git a/apps/emqx_gateway_mqttsn/test/emqx_sn_registry_SUITE.erl b/apps/emqx_gateway_mqttsn/test/emqx_sn_registry_SUITE.erl index 4d89a802d..8d60570a0 100644 --- a/apps/emqx_gateway_mqttsn/test/emqx_sn_registry_SUITE.erl +++ b/apps/emqx_gateway_mqttsn/test/emqx_sn_registry_SUITE.erl @@ -19,10 +19,11 @@ -compile(export_all). -compile(nowarn_export_all). +-include("emqx_mqttsn.hrl"). -include_lib("eunit/include/eunit.hrl"). -define(REGISTRY, emqx_mqttsn_registry). --define(MAX_PREDEF_ID, 2). +-define(MAX_PREDEF_ID, ?SN_MAX_PREDEF_TOPIC_ID). -define(PREDEF_TOPICS, [ #{id => 1, topic => <<"/predefined/topic/name/hello">>}, #{id => 2, topic => <<"/predefined/topic/name/nice">>} @@ -36,96 +37,64 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - application:ensure_all_started(ekka), - mria:start(), Config. end_per_suite(_Config) -> - application:stop(ekka), ok. init_per_testcase(_TestCase, Config) -> - {ok, Pid} = ?REGISTRY:start_link('mqttsn', ?PREDEF_TOPICS), - {Tab, Pid} = ?REGISTRY:lookup_name(Pid), - [{reg, {Tab, Pid}} | Config]. + emqx_mqttsn_registry:persist_predefined_topics(?PREDEF_TOPICS), + Config. end_per_testcase(_TestCase, Config) -> - {Tab, _Pid} = proplists:get_value(reg, Config), - mria:clear_table(Tab), + emqx_mqttsn_registry:clear_predefined_topics(?PREDEF_TOPICS), Config. %%-------------------------------------------------------------------- %% Test cases %%-------------------------------------------------------------------- -t_register(Config) -> - Reg = proplists:get_value(reg, Config), - ?assertEqual(?MAX_PREDEF_ID + 1, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"Topic1">>)), - ?assertEqual(?MAX_PREDEF_ID + 2, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"Topic2">>)), - ?assertEqual(<<"Topic1">>, ?REGISTRY:lookup_topic(Reg, <<"ClientId">>, ?MAX_PREDEF_ID + 1)), - ?assertEqual(<<"Topic2">>, ?REGISTRY:lookup_topic(Reg, <<"ClientId">>, ?MAX_PREDEF_ID + 2)), - ?assertEqual(?MAX_PREDEF_ID + 1, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, <<"Topic1">>)), - ?assertEqual(?MAX_PREDEF_ID + 2, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, <<"Topic2">>)), - emqx_mqttsn_registry:unregister_topic(Reg, <<"ClientId">>), - ?assertEqual(undefined, ?REGISTRY:lookup_topic(Reg, <<"ClientId">>, ?MAX_PREDEF_ID + 1)), - ?assertEqual(undefined, ?REGISTRY:lookup_topic(Reg, <<"ClientId">>, ?MAX_PREDEF_ID + 2)), - ?assertEqual(undefined, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, <<"Topic1">>)), - ?assertEqual(undefined, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, <<"Topic2">>)). +t_register(_) -> + Reg = ?REGISTRY:init(), + {ok, ?MAX_PREDEF_ID + 1, Reg1} = ?REGISTRY:reg(<<"Topic1">>, Reg), + {ok, ?MAX_PREDEF_ID + 2, Reg2} = ?REGISTRY:reg(<<"Topic2">>, Reg1), + ?assertMatch({ok, ?MAX_PREDEF_ID + 1, Reg2}, ?REGISTRY:reg(<<"Topic1">>, Reg2)), + ?assertEqual(<<"Topic1">>, ?REGISTRY:lookup_topic(?MAX_PREDEF_ID + 1, Reg2)), + ?assertEqual(<<"Topic2">>, ?REGISTRY:lookup_topic(?MAX_PREDEF_ID + 2, Reg2)), + ?assertEqual(?MAX_PREDEF_ID + 1, ?REGISTRY:lookup_topic_id(<<"Topic1">>, Reg2)), + ?assertEqual(?MAX_PREDEF_ID + 2, ?REGISTRY:lookup_topic_id(<<"Topic2">>, Reg2)), -t_register_case2(Config) -> - Reg = proplists:get_value(reg, Config), - ?assertEqual(?MAX_PREDEF_ID + 1, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"Topic1">>)), - ?assertEqual(?MAX_PREDEF_ID + 2, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"Topic2">>)), - ?assertEqual(?MAX_PREDEF_ID + 1, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"Topic1">>)), - ?assertEqual(<<"Topic1">>, ?REGISTRY:lookup_topic(Reg, <<"ClientId">>, ?MAX_PREDEF_ID + 1)), - ?assertEqual(<<"Topic2">>, ?REGISTRY:lookup_topic(Reg, <<"ClientId">>, ?MAX_PREDEF_ID + 2)), - ?assertEqual(?MAX_PREDEF_ID + 1, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, <<"Topic1">>)), - ?assertEqual(?MAX_PREDEF_ID + 2, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, <<"Topic2">>)), - ?assertEqual(undefined, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, <<"Topic3">>)), - ?REGISTRY:unregister_topic(Reg, <<"ClientId">>), - ?assertEqual(undefined, ?REGISTRY:lookup_topic(Reg, <<"ClientId">>, ?MAX_PREDEF_ID + 1)), - ?assertEqual(undefined, ?REGISTRY:lookup_topic(Reg, <<"ClientId">>, ?MAX_PREDEF_ID + 2)), - ?assertEqual(undefined, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, <<"Topic1">>)), - ?assertEqual(undefined, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, <<"Topic2">>)). + Reg3 = emqx_mqttsn_registry:unreg(<<"Topic1">>, Reg2), + ?assertEqual(undefined, ?REGISTRY:lookup_topic(?MAX_PREDEF_ID + 1, Reg3)), + ?assertEqual(undefined, ?REGISTRY:lookup_topic_id(<<"Topic1">>, Reg3)), + ?assertEqual(<<"Topic2">>, ?REGISTRY:lookup_topic(?MAX_PREDEF_ID + 2, Reg3)), + ?assertEqual(?MAX_PREDEF_ID + 2, ?REGISTRY:lookup_topic_id(<<"Topic2">>, Reg3)), -t_reach_maximum(Config) -> - Reg = proplists:get_value(reg, Config), - register_a_lot(?MAX_PREDEF_ID + 1, 16#ffff, Reg), - ?assertEqual({error, too_large}, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"TopicABC">>)), - Topic1 = iolist_to_binary(io_lib:format("Topic~p", [?MAX_PREDEF_ID + 1])), - Topic2 = iolist_to_binary(io_lib:format("Topic~p", [?MAX_PREDEF_ID + 2])), - ?assertEqual(?MAX_PREDEF_ID + 1, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, Topic1)), - ?assertEqual(?MAX_PREDEF_ID + 2, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, Topic2)), - ?REGISTRY:unregister_topic(Reg, <<"ClientId">>), - ?assertEqual(undefined, ?REGISTRY:lookup_topic(Reg, <<"ClientId">>, ?MAX_PREDEF_ID + 1)), - ?assertEqual(undefined, ?REGISTRY:lookup_topic(Reg, <<"ClientId">>, ?MAX_PREDEF_ID + 2)), - ?assertEqual(undefined, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, Topic1)), - ?assertEqual(undefined, ?REGISTRY:lookup_topic_id(Reg, <<"ClientId">>, Topic2)). + ?assertMatch({ok, ?MAX_PREDEF_ID + 3, _Reg4}, ?REGISTRY:reg(<<"Topic3">>, Reg3)). -t_register_case4(Config) -> - Reg = proplists:get_value(reg, Config), - ?assertEqual(?MAX_PREDEF_ID + 1, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"TopicA">>)), - ?assertEqual(?MAX_PREDEF_ID + 2, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"TopicB">>)), - ?assertEqual(?MAX_PREDEF_ID + 3, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"TopicC">>)), - ?REGISTRY:unregister_topic(Reg, <<"ClientId">>), - ?assertEqual(?MAX_PREDEF_ID + 1, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"TopicD">>)). +t_reach_maximum(_) -> + Reg0 = ?REGISTRY:init(), + Reg = register_a_lot(?MAX_PREDEF_ID + 1, 16#ffff, Reg0), + ?assertEqual({error, too_large}, ?REGISTRY:reg(<<"TopicABC">>, Reg)), + ?assertEqual(?MAX_PREDEF_ID + 1, ?REGISTRY:lookup_topic_id(<<"Topic1025">>, Reg)), + ?assertEqual(?MAX_PREDEF_ID + 2, ?REGISTRY:lookup_topic_id(<<"Topic1026">>, Reg)). -t_deny_wildcard_topic(Config) -> - Reg = proplists:get_value(reg, Config), - ?assertEqual( - {error, wildcard_topic}, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"/TopicA/#">>) - ), - ?assertEqual( - {error, wildcard_topic}, ?REGISTRY:register_topic(Reg, <<"ClientId">>, <<"/+/TopicB">>) - ). +t_deny_wildcard_topic(_) -> + Reg = ?REGISTRY:init(), + ?assertEqual({error, wildcard_topic}, ?REGISTRY:reg(<<"/TopicA/#">>, Reg)), + ?assertEqual({error, wildcard_topic}, ?REGISTRY:reg(<<"/+/TopicB">>, Reg)). %%-------------------------------------------------------------------- %% Helper funcs %%-------------------------------------------------------------------- -register_a_lot(Max, Max, _Reg) -> - ok; -register_a_lot(N, Max, Reg) when N < Max -> +register_a_lot(N, Max, Reg) when N =< Max -> Topic = iolist_to_binary(["Topic", integer_to_list(N)]), - ?assertEqual(N, ?REGISTRY:register_topic(Reg, <<"ClientId">>, Topic)), - register_a_lot(N + 1, Max, Reg). + {ok, ReturnedId, Reg1} = ?REGISTRY:reg(Topic, Reg), + ?assertEqual(N, ReturnedId), + case N == Max of + true -> + Reg1; + _ -> + register_a_lot(N + 1, Max, Reg1) + end. diff --git a/apps/emqx_management/src/emqx_mgmt_api_api_keys.erl b/apps/emqx_management/src/emqx_mgmt_api_api_keys.erl index ba21adaa5..432734688 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_api_keys.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_api_keys.erl @@ -183,7 +183,7 @@ delete(Keys, Fields) -> lists:foldl(fun(Key, Acc) -> lists:keydelete(Key, 1, Acc) end, Fields, Keys). api_key(get, _) -> - {200, [format(App) || App <- emqx_mgmt_auth:list()]}; + {200, [emqx_mgmt_auth:format(App) || App <- emqx_mgmt_auth:list()]}; api_key(post, #{body := App}) -> #{ <<"name">> := Name, @@ -194,7 +194,7 @@ api_key(post, #{body := App}) -> Desc = unicode:characters_to_binary(Desc0, unicode), case emqx_mgmt_auth:create(Name, Enable, ExpiredAt, Desc) of {ok, NewApp} -> - {200, format(NewApp)}; + {200, emqx_mgmt_auth:format(NewApp)}; {error, Reason} -> {400, #{ code => 'BAD_REQUEST', @@ -206,7 +206,7 @@ api_key(post, #{body := App}) -> api_key_by_name(get, #{bindings := #{name := Name}}) -> case emqx_mgmt_auth:read(Name) of - {ok, App} -> {200, format(App)}; + {ok, App} -> {200, emqx_mgmt_auth:format(App)}; {error, not_found} -> {404, ?NOT_FOUND_RESPONSE} end; api_key_by_name(delete, #{bindings := #{name := Name}}) -> @@ -219,20 +219,9 @@ api_key_by_name(put, #{bindings := #{name := Name}, body := Body}) -> ExpiredAt = ensure_expired_at(Body), Desc = maps:get(<<"desc">>, Body, undefined), case emqx_mgmt_auth:update(Name, Enable, ExpiredAt, Desc) of - {ok, App} -> {200, format(App)}; + {ok, App} -> {200, emqx_mgmt_auth:format(App)}; {error, not_found} -> {404, ?NOT_FOUND_RESPONSE} end. -format(App = #{expired_at := ExpiredAt0, created_at := CreateAt}) -> - ExpiredAt = - case ExpiredAt0 of - infinity -> <<"infinity">>; - _ -> list_to_binary(calendar:system_time_to_rfc3339(ExpiredAt0)) - end, - App#{ - expired_at => ExpiredAt, - created_at => list_to_binary(calendar:system_time_to_rfc3339(CreateAt)) - }. - ensure_expired_at(#{<<"expired_at">> := ExpiredAt}) when is_integer(ExpiredAt) -> ExpiredAt; ensure_expired_at(_) -> infinity. diff --git a/apps/emqx_management/src/emqx_mgmt_api_listeners.erl b/apps/emqx_management/src/emqx_mgmt_api_listeners.erl index 1f1dda5f2..5c2419ccf 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_listeners.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_listeners.erl @@ -19,7 +19,6 @@ -behaviour(minirest_api). -export([namespace/0, api_spec/0, paths/0, schema/1, fields/1]). --import(emqx_dashboard_swagger, [error_codes/2, error_codes/1]). -export([ listener_type_status/2, @@ -36,6 +35,16 @@ do_list_listeners/0 ]). +-import(emqx_dashboard_swagger, [error_codes/2, error_codes/1]). + +-import(emqx_mgmt_listeners_conf, [ + action/4, + create/3, + ensure_remove/2, + get_raw/2, + update/3 +]). + -include_lib("emqx/include/emqx.hrl"). -include_lib("hocon/include/hoconsc.hrl"). @@ -44,7 +53,6 @@ -define(LISTENER_NOT_FOUND, <<"Listener id not found">>). -define(LISTENER_ID_INCONSISTENT, <<"Path and body's listener id not match">>). -define(ADDR_PORT_INUSE, <<"Addr port in use">>). --define(OPTS(_OverrideTo_), #{rawconf_with_defaults => true, override_to => _OverrideTo_}). namespace() -> "listeners". @@ -387,14 +395,13 @@ crud_listeners_by_id(get, #{bindings := #{id := Id0}}) -> crud_listeners_by_id(put, #{bindings := #{id := Id}, body := Body0}) -> case parse_listener_conf(Body0) of {Id, Type, Name, Conf} -> - Path = [listeners, Type, Name], - case emqx_conf:get_raw(Path, undefined) of + case get_raw(Type, Name) of undefined -> {404, #{code => 'BAD_LISTENER_ID', message => ?LISTENER_NOT_FOUND}}; PrevConf -> MergeConfT = emqx_utils_maps:deep_merge(PrevConf, Conf), MergeConf = emqx_listeners:ensure_override_limiter_conf(MergeConfT, Conf), - case update(Path, MergeConf) of + case update(Type, Name, MergeConf) of {ok, #{raw_config := _RawConf}} -> crud_listeners_by_id(get, #{bindings => #{id => Id}}); {error, not_found} -> @@ -412,7 +419,7 @@ crud_listeners_by_id(post, #{body := Body}) -> create_listener(Body); crud_listeners_by_id(delete, #{bindings := #{id := Id}}) -> {ok, #{type := Type, name := Name}} = emqx_listeners:parse_listener_id(Id), - case ensure_remove([listeners, Type, Name]) of + case ensure_remove(Type, Name) of {ok, _} -> {204}; {error, Reason} -> {400, #{code => 'BAD_REQUEST', message => err_msg(Reason)}} end. @@ -457,12 +464,11 @@ restart_listeners_by_id(Method, Body = #{bindings := Bindings}) -> action_listeners_by_id(post, #{bindings := #{id := Id, action := Action}}) -> {ok, #{type := Type, name := Name}} = emqx_listeners:parse_listener_id(Id), - Path = [listeners, Type, Name], - case emqx_conf:get_raw(Path, undefined) of + case get_raw(Type, Name) of undefined -> {404, #{code => 'BAD_LISTENER_ID', message => ?LISTENER_NOT_FOUND}}; _PrevConf -> - case action(Path, Action, enabled(Action)) of + case action(Type, Name, Action, enabled(Action)) of {ok, #{raw_config := _RawConf}} -> {200}; {error, not_found} -> @@ -634,23 +640,6 @@ max_conn(_Int1, <<"infinity">>) -> <<"infinity">>; max_conn(<<"infinity">>, _Int) -> <<"infinity">>; max_conn(Int1, Int2) -> Int1 + Int2. -update(Path, Conf) -> - wrap(emqx_conf:update(Path, {update, Conf}, ?OPTS(cluster))). - -action(Path, Action, Conf) -> - wrap(emqx_conf:update(Path, {action, Action, Conf}, ?OPTS(cluster))). - -create(Path, Conf) -> - wrap(emqx_conf:update(Path, {create, Conf}, ?OPTS(cluster))). - -ensure_remove(Path) -> - wrap(emqx_conf:tombstone(Path, ?OPTS(cluster))). - -wrap({error, {post_config_update, emqx_listeners, Reason}}) -> {error, Reason}; -wrap({error, {pre_config_update, emqx_listeners, Reason}}) -> {error, Reason}; -wrap({error, Reason}) -> {error, Reason}; -wrap(Ok) -> Ok. - listener_type_status_example() -> [ #{ @@ -813,8 +802,7 @@ tcp_schema_example() -> create_listener(Body) -> case parse_listener_conf(Body) of {Id, Type, Name, Conf} -> - Path = [listeners, Type, Name], - case create(Path, Conf) of + case create(Type, Name, Conf) of {ok, #{raw_config := _RawConf}} -> crud_listeners_by_id(get, #{bindings => #{id => Id}}); {error, already_exist} -> diff --git a/apps/emqx_management/src/emqx_mgmt_api_trace.erl b/apps/emqx_management/src/emqx_mgmt_api_trace.erl index 579f977d8..27789fff9 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_trace.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_trace.erl @@ -206,7 +206,7 @@ fields(log_file_detail) -> fields(node) ++ [ {size, hoconsc:mk(integer(), #{description => ?DESC(file_size)})}, - {mtime, hoconsc:mk(integer(), #{description => ?DESC(modification_date)})} + {mtime, hoconsc:mk(integer(), #{description => ?DESC(file_mtime)})} ]; fields(trace) -> [ @@ -214,7 +214,7 @@ fields(trace) -> hoconsc:mk( binary(), #{ - description => ?DESC(format), + description => ?DESC(trace_name), validator => fun ?MODULE:validate_name/1, required => true, example => <<"EMQX-TRACE-1">> diff --git a/apps/emqx_management/src/emqx_mgmt_auth.erl b/apps/emqx_management/src/emqx_mgmt_auth.erl index ffb41179f..4fe47cf93 100644 --- a/apps/emqx_management/src/emqx_mgmt_auth.erl +++ b/apps/emqx_management/src/emqx_mgmt_auth.erl @@ -17,6 +17,8 @@ -include_lib("emqx/include/emqx.hrl"). -include_lib("emqx/include/logger.hrl"). +-behaviour(emqx_db_backup). + %% API -export([mnesia/1]). -boot_mnesia({mnesia, [boot]}). @@ -28,12 +30,15 @@ update/4, delete/1, list/0, - init_bootstrap_file/0 + init_bootstrap_file/0, + format/1 ]). -export([authorize/3]). -export([post_config_update/5]). +-export([backup_tables/0]). + %% Internal exports (RPC) -export([ do_update/4, @@ -67,6 +72,12 @@ mnesia(boot) -> {attributes, record_info(fields, ?APP)} ]). +%%-------------------------------------------------------------------- +%% Data backup +%%-------------------------------------------------------------------- + +backup_tables() -> [?APP]. + post_config_update([api_key], _Req, NewConf, _OldConf, _AppEnvs) -> #{bootstrap_file := File} = NewConf, case init_bootstrap_file(File) of @@ -127,6 +138,17 @@ do_delete(Name) -> [_App] -> mnesia:delete({?APP, Name}) end. +format(App = #{expired_at := ExpiredAt0, created_at := CreateAt}) -> + ExpiredAt = + case ExpiredAt0 of + infinity -> <<"infinity">>; + _ -> list_to_binary(calendar:system_time_to_rfc3339(ExpiredAt0)) + end, + App#{ + expired_at => ExpiredAt, + created_at => list_to_binary(calendar:system_time_to_rfc3339(CreateAt)) + }. + list() -> to_map(ets:match_object(?APP, #?APP{_ = '_'})). diff --git a/apps/emqx_management/src/emqx_mgmt_cli.erl b/apps/emqx_management/src/emqx_mgmt_cli.erl index 3ebf8e314..a3a643681 100644 --- a/apps/emqx_management/src/emqx_mgmt_cli.erl +++ b/apps/emqx_management/src/emqx_mgmt_cli.erl @@ -25,6 +25,7 @@ -include("emqx_mgmt.hrl"). -define(PRINT_CMD(Cmd, Descr), io:format("~-48s# ~ts~n", [Cmd, Descr])). +-define(DATA_BACKUP_OPTS, #{print_fun => fun emqx_ctl:print/2}). -export([load/0]). @@ -44,7 +45,8 @@ log/1, authz/1, pem_cache/1, - olp/1 + olp/1, + data/1 ]). -define(PROC_INFOKEYS, [ @@ -739,6 +741,37 @@ olp(_) -> {"olp disable", "Disable overload protection"} ]). +%%-------------------------------------------------------------------- +%% @doc data Command + +data(["export"]) -> + case emqx_mgmt_data_backup:export(?DATA_BACKUP_OPTS) of + {ok, #{filename := Filename}} -> + emqx_ctl:print("Data has been successfully exported to ~s.~n", [Filename]); + {error, Reason} -> + Reason1 = emqx_mgmt_data_backup:format_error(Reason), + emqx_ctl:print("[error] Data export failed, reason: ~p.~n", [Reason1]) + end; +data(["import", Filename]) -> + case emqx_mgmt_data_backup:import(Filename, ?DATA_BACKUP_OPTS) of + {ok, #{db_errors := DbErrs, config_errors := ConfErrs}} when + map_size(DbErrs) =:= 0, map_size(ConfErrs) =:= 0 + -> + emqx_ctl:print("Data has been imported successfully.~n"); + {ok, _} -> + emqx_ctl:print( + "Data has been imported, but some errors occurred, see the the log above.~n" + ); + {error, Reason} -> + Reason1 = emqx_mgmt_data_backup:format_error(Reason), + emqx_ctl:print("[error] Data import failed, reason: ~p.~n", [Reason1]) + end; +data(_) -> + emqx_ctl:usage([ + {"data import ", "Import data from the specified tar archive file"}, + {"data export", "Export data"} + ]). + %%-------------------------------------------------------------------- %% Dump ETS %%-------------------------------------------------------------------- diff --git a/apps/emqx_management/src/emqx_mgmt_data_backup.erl b/apps/emqx_management/src/emqx_mgmt_data_backup.erl new file mode 100644 index 000000000..5e59bd057 --- /dev/null +++ b/apps/emqx_management/src/emqx_mgmt_data_backup.erl @@ -0,0 +1,690 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_mgmt_data_backup). + +-export([ + export/0, + export/1, + import/1, + import/2, + format_error/1 +]). + +-ifdef(TEST). +-compile(export_all). +-compile(nowarn_export_all). +-endif. + +-elvis([{elvis_style, invalid_dynamic_call, disable}]). + +-include_lib("kernel/include/file.hrl"). +-include_lib("emqx/include/logger.hrl"). + +-define(ROOT_BACKUP_DIR, "backup"). +-define(BACKUP_MNESIA_DIR, "mnesia"). +-define(TAR_SUFFIX, ".tar.gz"). +-define(META_FILENAME, "META.hocon"). +-define(CLUSTER_HOCON_FILENAME, "cluster.hocon"). +-define(CONF_KEYS, [ + <<"delayed">>, + <<"rewrite">>, + <<"retainer">>, + <<"mqtt">>, + <<"alarm">>, + <<"sysmon">>, + <<"sys_topics">>, + <<"limiter">>, + <<"log">>, + <<"persistent_session_store">>, + <<"prometheus">>, + <<"crl_cache">>, + <<"conn_congestion">>, + <<"force_shutdown">>, + <<"flapping_detect">>, + <<"broker">>, + <<"force_gc">>, + <<"zones">> +]). + +-define(DEFAULT_OPTS, #{}). +-define(tar(_FileName_), _FileName_ ++ ?TAR_SUFFIX). +-define(fmt_tar_err(_Expr_), + fun() -> + case _Expr_ of + {error, _Reason_} -> {error, erl_tar:format_error(_Reason_)}; + _Other_ -> _Other_ + end + end() +). + +-type backup_file_info() :: #{ + filename => binary(), + size => non_neg_integer(), + created_at => binary(), + node => node(), + atom() => _ +}. + +-type db_error_details() :: #{mria:table() => {error, _}}. +-type config_error_details() :: #{emqx_utils_maps:config_path() => {error, _}}. + +%%------------------------------------------------------------------------------ +%% APIs +%%------------------------------------------------------------------------------ + +-spec export() -> {ok, backup_file_info()} | {error, _}. +export() -> + export(?DEFAULT_OPTS). + +-spec export(map()) -> {ok, backup_file_info()} | {error, _}. +export(Opts) -> + {BackupName, TarDescriptor} = prepare_new_backup(Opts), + try + do_export(BackupName, TarDescriptor, Opts) + catch + Class:Reason:Stack -> + ?SLOG(error, #{ + msg => "emqx_data_export_failed", + exception => Class, + reason => Reason, + stacktrace => Stack + }), + {error, Reason} + after + %% erl_tar:close/1 raises error if called on an already closed tar + catch erl_tar:close(TarDescriptor), + file:del_dir_r(BackupName) + end. + +-spec import(file:filename_all()) -> + {ok, #{db_errors => db_error_details(), config_errors => config_error_details()}} + | {error, _}. +import(BackupFileName) -> + import(BackupFileName, ?DEFAULT_OPTS). + +-spec import(file:filename_all(), map()) -> + {ok, #{db_errors => db_error_details(), config_errors => config_error_details()}} + | {error, _}. +import(BackupFileName, Opts) -> + case is_import_allowed() of + true -> + case lookup_file(str(BackupFileName)) of + {ok, FilePath} -> + do_import(FilePath, Opts); + Err -> + Err + end; + false -> + {error, not_core_node} + end. + +format_error(not_core_node) -> + str( + io_lib:format( + "backup data import is only allowed on core EMQX nodes, but requested node ~p is not core", + [node()] + ) + ); +format_error(ee_to_ce_backup) -> + "importing EMQX Enterprise data backup to EMQX is not allowed"; +format_error(missing_backup_meta) -> + "invalid backup archive file: missing " ?META_FILENAME; +format_error(invalid_edition) -> + "invalid backup archive content: wrong EMQX edition value in " ?META_FILENAME; +format_error(invalid_version) -> + "invalid backup archive content: wrong EMQX version value in " ?META_FILENAME; +format_error(bad_archive_dir) -> + "invalid backup archive content: all files in the archive must be under directory"; +format_error(not_found) -> + "backup file not found"; +format_error(bad_backup_name) -> + "invalid backup name: file name must have " ?TAR_SUFFIX " extension"; +format_error({unsupported_version, ImportVersion}) -> + str( + io_lib:format( + "[warning] Backup version ~p is newer than EMQX version ~p, import is not allowed.~n", + [str(ImportVersion), str(emqx_release:version())] + ) + ); +format_error(Reason) -> + Reason. + +%%------------------------------------------------------------------------------ +%% Internal functions +%%------------------------------------------------------------------------------ + +prepare_new_backup(Opts) -> + Ts = erlang:system_time(millisecond), + {{Y, M, D}, {HH, MM, SS}} = local_datetime(Ts), + BackupBaseName = str( + io_lib:format( + "emqx-export-~0p-~2..0b-~2..0b-~2..0b-~2..0b-~2..0b.~3..0b", + [Y, M, D, HH, MM, SS, Ts rem 1000] + ) + ), + BackupName = filename:join(root_backup_dir(), BackupBaseName), + BackupTarName = ?tar(BackupName), + maybe_print("Exporting data to ~p...~n", [BackupTarName], Opts), + {ok, TarDescriptor} = ?fmt_tar_err(erl_tar:open(BackupTarName, [write, compressed])), + {BackupName, TarDescriptor}. + +do_export(BackupName, TarDescriptor, Opts) -> + BackupBaseName = filename:basename(BackupName), + BackupTarName = ?tar(BackupName), + Meta = #{ + version => emqx_release:version(), + edition => emqx_release:edition() + }, + MetaBin = bin(hocon_pp:do(Meta, #{})), + MetaFileName = filename:join(BackupBaseName, ?META_FILENAME), + + ok = ?fmt_tar_err(erl_tar:add(TarDescriptor, MetaBin, MetaFileName, [])), + ok = export_cluster_hocon(TarDescriptor, BackupBaseName, Opts), + ok = export_mnesia_tabs(TarDescriptor, BackupName, BackupBaseName, Opts), + ok = ?fmt_tar_err(erl_tar:close(TarDescriptor)), + {ok, #file_info{ + size = Size, + ctime = {{Y1, M1, D1}, {H1, MM1, S1}} + }} = file:read_file_info(BackupTarName), + CreatedAt = io_lib:format("~p-~p-~p ~p:~p:~p", [Y1, M1, D1, H1, MM1, S1]), + {ok, #{ + filename => bin(BackupTarName), + size => Size, + created_at => bin(CreatedAt), + node => node() + }}. + +export_cluster_hocon(TarDescriptor, BackupBaseName, Opts) -> + maybe_print("Exporting cluster configuration...~n", [], Opts), + RawConf = emqx_config:read_override_conf(#{override_to => cluster}), + maybe_print( + "Exporting additional files from EMQX data_dir: ~p...~n", [str(emqx:data_dir())], Opts + ), + RawConf1 = read_data_files(RawConf), + RawConfBin = bin(hocon_pp:do(RawConf1, #{})), + NameInArchive = filename:join(BackupBaseName, ?CLUSTER_HOCON_FILENAME), + ok = ?fmt_tar_err(erl_tar:add(TarDescriptor, RawConfBin, NameInArchive, [])). + +export_mnesia_tabs(TarDescriptor, BackupName, BackupBaseName, Opts) -> + maybe_print("Exporting built-in database...~n", [], Opts), + lists:foreach( + fun(Tab) -> export_mnesia_tab(TarDescriptor, Tab, BackupName, BackupBaseName, Opts) end, + tabs_to_backup() + ). + +export_mnesia_tab(TarDescriptor, TabName, BackupName, BackupBaseName, Opts) -> + maybe_print("Exporting ~p database table...~n", [TabName], Opts), + {ok, MnesiaBackupName} = do_export_mnesia_tab(TabName, BackupName), + NameInArchive = mnesia_backup_name(BackupBaseName, TabName), + ok = ?fmt_tar_err(erl_tar:add(TarDescriptor, MnesiaBackupName, NameInArchive, [])), + _ = file:delete(MnesiaBackupName), + ok. + +do_export_mnesia_tab(TabName, BackupName) -> + Node = node(), + try + {ok, TabName, [Node]} = mnesia:activate_checkpoint( + [{name, TabName}, {min, [TabName]}, {allow_remote, false}] + ), + MnesiaBackupName = mnesia_backup_name(BackupName, TabName), + ok = filelib:ensure_dir(MnesiaBackupName), + ok = mnesia:backup_checkpoint(TabName, MnesiaBackupName), + {ok, MnesiaBackupName} + after + mnesia:deactivate_checkpoint(TabName) + end. + +-ifdef(TEST). +tabs_to_backup() -> + %% Allow mocking in tests + ?MODULE:mnesia_tabs_to_backup(). +-else. +tabs_to_backup() -> + mnesia_tabs_to_backup(). +-endif. + +mnesia_tabs_to_backup() -> + lists:flatten([M:backup_tables() || M <- find_behaviours(emqx_db_backup)]). + +mnesia_backup_name(Path, TabName) -> + filename:join([Path, ?BACKUP_MNESIA_DIR, atom_to_list(TabName)]). + +is_import_allowed() -> + mria_rlog:role() =:= core. + +validate_backup(BackupDir) -> + case hocon:files([filename:join(BackupDir, ?META_FILENAME)]) of + {ok, #{ + <<"edition">> := Edition, + <<"version">> := Version + }} = Meta -> + validate( + [ + fun() -> check_edition(Edition) end, + fun() -> check_version(Version) end + ], + Meta + ); + _ -> + ?SLOG(error, #{msg => "missing_backup_meta", backup => BackupDir}), + {error, missing_backup_meta} + end. + +validate([ValidatorFun | T], OkRes) -> + case ValidatorFun() of + ok -> validate(T, OkRes); + Err -> Err + end; +validate([], OkRes) -> + OkRes. + +check_edition(BackupEdition) when BackupEdition =:= <<"ce">>; BackupEdition =:= <<"ee">> -> + Edition = bin(emqx_release:edition()), + case {BackupEdition, Edition} of + {<<"ee">>, <<"ce">>} -> + {error, ee_to_ce_backup}; + _ -> + ok + end; +check_edition(BackupEdition) -> + ?SLOG(error, #{msg => "invalid_backup_edition", edition => BackupEdition}), + {error, invalid_edition}. + +check_version(ImportVersion) -> + case parse_version_no_patch(ImportVersion) of + {ok, {ImportMajorInt, ImportMinorInt}} -> + Version = emqx_release:version(), + {ok, {MajorInt, MinorInt}} = parse_version_no_patch(bin(Version)), + case ImportMajorInt > MajorInt orelse ImportMinorInt > MinorInt of + true -> + %% 4.x backup files are anyway not compatible and will be treated as invalid, + %% before this step, + {error, {unsupported_version, str(ImportVersion)}}; + false -> + ok + end; + Err -> + Err + end. + +parse_version_no_patch(VersionBin) -> + case string:split(VersionBin, ".", all) of + [Major, Minor | _] -> + {MajorInt, _} = emqx_utils_binary:bin_to_int(Major), + {MinorInt, _} = emqx_utils_binary:bin_to_int(Minor), + {ok, {MajorInt, MinorInt}}; + _ -> + ?SLOG(error, #{msg => "failed_to_parse_backup_version", version => VersionBin}), + {error, invalid_version} + end. + +do_import(BackupFileName, Opts) -> + BackupDir = filename:join(root_backup_dir(), filename:basename(BackupFileName, ?TAR_SUFFIX)), + maybe_print("Importing data from ~p...~n", [BackupFileName], Opts), + try + ok = validate_backup_name(BackupFileName), + ok = extract_backup(BackupFileName), + {ok, _} = validate_backup(BackupDir), + ConfErrors = import_cluster_hocon(BackupDir, Opts), + MnesiaErrors = import_mnesia_tabs(BackupDir, Opts), + ?SLOG(info, #{msg => "emqx_data_import_success"}), + {ok, #{db_errors => MnesiaErrors, config_errors => ConfErrors}} + catch + error:{badmatch, {error, Reason}}:Stack -> + ?SLOG(error, #{msg => "emqx_data_import_failed", reason => Reason, stacktrace => Stack}), + {error, Reason}; + Class:Reason:Stack -> + ?SLOG(error, #{ + msg => "emqx_data_import_failed", + exception => Class, + reason => Reason, + stacktrace => Stack + }), + {error, Reason} + after + file:del_dir_r(BackupDir) + end. + +import_mnesia_tabs(BackupDir, Opts) -> + maybe_print("Importing built-in database...~n", [], Opts), + filter_errors( + lists:foldr( + fun(Tab, Acc) -> Acc#{Tab => import_mnesia_tab(BackupDir, Tab, Opts)} end, + #{}, + tabs_to_backup() + ) + ). + +import_mnesia_tab(BackupDir, TabName, Opts) -> + MnesiaBackupFileName = mnesia_backup_name(BackupDir, TabName), + case filelib:is_regular(MnesiaBackupFileName) of + true -> + maybe_print("Importing ~p database table...~n", [TabName], Opts), + restore_mnesia_tab(BackupDir, MnesiaBackupFileName, TabName, Opts); + false -> + maybe_print("No backup file for ~p database table...~n", [TabName], Opts), + ?SLOG(info, #{msg => "missing_mnesia_backup", table => TabName, backup => BackupDir}), + ok + end. + +restore_mnesia_tab(BackupDir, MnesiaBackupFileName, TabName, Opts) -> + BackupNameToImport = MnesiaBackupFileName ++ "_for_import", + Prepared = + catch mnesia:traverse_backup( + MnesiaBackupFileName, BackupNameToImport, fun backup_converter/2, 0 + ), + try + case Prepared of + {ok, _} -> + Restored = mnesia:restore(BackupNameToImport, [{default_op, keep_tables}]), + case Restored of + {atomic, [TabName]} -> + ok; + RestoreErr -> + ?SLOG(error, #{ + msg => "failed_to_restore_mnesia_backup", + table => TabName, + backup => BackupDir, + reason => RestoreErr + }), + maybe_print_mnesia_import_err(TabName, RestoreErr, Opts), + {error, RestoreErr} + end; + PrepareErr -> + ?SLOG(error, #{ + msg => "failed_to_prepare_mnesia_backup_for_restoring", + table => TabName, + backup => BackupDir, + reason => PrepareErr + }), + maybe_print_mnesia_import_err(TabName, PrepareErr, Opts), + PrepareErr + end + after + %% Cleanup files as soon as they are not needed any more for more efficient disk usage + _ = file:delete(BackupNameToImport), + _ = file:delete(MnesiaBackupFileName) + end. + +backup_converter({schema, Tab, CreateList}, Acc) -> + check_rec_attributes(Tab, CreateList), + {[{schema, Tab, lists:map(fun convert_copies/1, CreateList)}], Acc}; +backup_converter(Other, Acc) -> + {[Other], Acc}. + +check_rec_attributes(Tab, CreateList) -> + ImportAttributes = proplists:get_value(attributes, CreateList), + Attributes = mnesia:table_info(Tab, attributes), + case ImportAttributes =/= Attributes of + true -> + throw({error, different_table_schema}); + false -> + ok + end. + +convert_copies({K, [_ | _]}) when K == ram_copies; K == disc_copies; K == disc_only_copies -> + {K, [node()]}; +convert_copies(Other) -> + Other. + +extract_backup(BackupFileName) -> + BackupDir = root_backup_dir(), + ok = validate_filenames(BackupFileName), + ?fmt_tar_err(erl_tar:extract(BackupFileName, [{cwd, BackupDir}, compressed])). + +validate_filenames(BackupFileName) -> + {ok, FileNames} = ?fmt_tar_err(erl_tar:table(BackupFileName, [compressed])), + BackupName = filename:basename(BackupFileName, ?TAR_SUFFIX), + IsValid = lists:all( + fun(FileName) -> + [Root | _] = filename:split(FileName), + Root =:= BackupName + end, + FileNames + ), + case IsValid of + true -> ok; + false -> {error, bad_archive_dir} + end. + +import_cluster_hocon(BackupDir, Opts) -> + HoconFileName = filename:join(BackupDir, ?CLUSTER_HOCON_FILENAME), + case filelib:is_regular(HoconFileName) of + true -> + {ok, RawConf} = hocon:files([HoconFileName]), + {ok, _} = validate_cluster_hocon(RawConf), + maybe_print("Importing cluster configuration...~n", [], Opts), + %% At this point, when all validations have been passed, we want to log errors (if any) + %% but proceed with the next items, instead of aborting the whole import operation + do_import_conf(RawConf, Opts); + false -> + maybe_print("No cluster configuration to be imported.~n", [], Opts), + ?SLOG(info, #{ + msg => "no_backup_hocon_config_to_import", + backup => BackupDir + }), + #{} + end. + +read_data_files(RawConf) -> + DataDir = bin(emqx:data_dir()), + {ok, Cwd} = file:get_cwd(), + AbsDataDir = bin(filename:join(Cwd, DataDir)), + RawConf1 = emqx_authz:maybe_read_acl_file(RawConf), + emqx_utils_maps:deep_convert(RawConf1, fun read_data_file/4, [DataDir, AbsDataDir]). + +-define(dir_pattern(_Dir_), <<_Dir_:(byte_size(_Dir_))/binary, _/binary>>). + +read_data_file(Key, Val, DataDir, AbsDataDir) -> + Val1 = + case Val of + ?dir_pattern(DataDir) = FileName -> + do_read_file(FileName); + ?dir_pattern(AbsDataDir) = FileName -> + do_read_file(FileName); + V -> + V + end, + {Key, Val1}. + +do_read_file(FileName) -> + case file:read_file(FileName) of + {ok, Content} -> + Content; + {error, Reason} -> + ?SLOG(warning, #{ + msg => "failed_to_read_data_file", + filename => FileName, + reason => Reason + }), + FileName + end. + +validate_cluster_hocon(RawConf) -> + %% write ACL file to comply with the schema... + RawConf1 = emqx_authz:maybe_write_acl_file(RawConf), + emqx_hocon:check( + emqx_conf:schema_module(), + maps:merge(emqx:get_raw_config([]), RawConf1), + #{atom_key => false, required => false} + ). + +do_import_conf(RawConf, Opts) -> + GenConfErrs = filter_errors(maps:from_list(import_generic_conf(RawConf))), + maybe_print_errors(GenConfErrs, Opts), + Errors = + lists:foldr( + fun(Module, ErrorsAcc) -> + Module:import_config(RawConf), + case Module:import_config(RawConf) of + {ok, #{changed := Changed}} -> + maybe_print_changed(Changed, Opts), + ErrorsAcc; + {error, #{root_key := RootKey, reason := Reason}} -> + ErrorsAcc#{[RootKey] => Reason} + end + end, + GenConfErrs, + find_behaviours(emqx_config_backup) + ), + maybe_print_errors(Errors, Opts), + Errors. + +import_generic_conf(Data) -> + lists:map( + fun(Key) -> + case maps:get(Key, Data, undefined) of + undefined -> {[Key], ok}; + Conf -> {[Key], emqx_conf:update([Key], Conf, #{override_to => cluster})} + end + end, + ?CONF_KEYS + ). + +maybe_print_changed(Changed, Opts) -> + lists:foreach( + fun(ChangedPath) -> + maybe_print( + "Config key path ~p was present before import and " + "has been overwritten.~n", + [pretty_path(ChangedPath)], + Opts + ) + end, + Changed + ). + +maybe_print_errors(Errors, Opts) -> + maps:foreach( + fun(Path, Err) -> + maybe_print( + "Failed to import the following config path: ~p, reason: ~p~n", + [pretty_path(Path), Err], + Opts + ) + end, + Errors + ). + +filter_errors(Results) -> + maps:filter( + fun + (_Path, {error, _}) -> true; + (_, _) -> false + end, + Results + ). + +pretty_path(Path) -> + str(lists:join(".", [str(Part) || Part <- Path])). + +str(Data) when is_atom(Data) -> + atom_to_list(Data); +str(Data) -> + unicode:characters_to_list(Data). + +bin(Data) when is_atom(Data) -> + atom_to_binary(Data, utf8); +bin(Data) -> + unicode:characters_to_binary(Data). + +validate_backup_name(FileName) -> + BaseName = filename:basename(FileName, ?TAR_SUFFIX), + ValidName = BaseName ++ ?TAR_SUFFIX, + case filename:basename(FileName) of + ValidName -> ok; + _ -> {error, bad_backup_name} + end. + +lookup_file(FileName) -> + case filelib:is_regular(FileName) of + true -> + {ok, FileName}; + false -> + %% Only lookup by basename, don't allow to lookup by file path + case FileName =:= filename:basename(FileName) of + true -> + FilePath = filename:join(root_backup_dir(), FileName), + case filelib:is_file(FilePath) of + true -> {ok, FilePath}; + false -> {error, not_found} + end; + false -> + {error, not_found} + end + end. + +root_backup_dir() -> + Dir = filename:join(emqx:data_dir(), ?ROOT_BACKUP_DIR), + ok = ensure_path(Dir), + Dir. + +-if(?OTP_RELEASE < 25). +ensure_path(Path) -> filelib:ensure_dir(filename:join([Path, "dummy"])). +-else. +ensure_path(Path) -> filelib:ensure_path(Path). +-endif. + +local_datetime(MillisecondTs) -> + calendar:system_time_to_local_time(MillisecondTs, millisecond). + +maybe_print(Format, Args, #{print_fun := PrintFun}) -> + PrintFun(Format, Args); +maybe_print(_Format, _Args, _Opts) -> + ok. + +maybe_print_mnesia_import_err(TabName, Error, Opts) -> + maybe_print( + "[error] Failed to import built-in database table: ~p, reason: ~p~n", + [TabName, Error], + Opts + ). + +find_behaviours(Behaviour) -> + find_behaviours(Behaviour, apps(), []). + +%% Based on minirest_api:find_api_modules/1 +find_behaviours(_Behaviour, [] = _Apps, Acc) -> + Acc; +find_behaviours(Behaviour, [App | Apps], Acc) -> + case application:get_key(App, modules) of + undefined -> + Acc; + {ok, Modules} -> + NewAcc = lists:filter( + fun(Module) -> + Info = Module:module_info(attributes), + Bhvrs = lists:flatten( + proplists:get_all_values(behavior, Info) ++ + proplists:get_all_values(behaviour, Info) + ), + lists:member(Behaviour, Bhvrs) + end, + Modules + ), + find_behaviours(Behaviour, Apps, NewAcc ++ Acc) + end. + +apps() -> + [ + App + || {App, _, _} <- application:loaded_applications(), + case re:run(atom_to_list(App), "^emqx") of + {match, [{0, 4}]} -> true; + _ -> false + end + ]. diff --git a/apps/emqx_management/src/emqx_mgmt_listeners_conf.erl b/apps/emqx_management/src/emqx_mgmt_listeners_conf.erl new file mode 100644 index 000000000..f54aca845 --- /dev/null +++ b/apps/emqx_management/src/emqx_mgmt_listeners_conf.erl @@ -0,0 +1,96 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_mgmt_listeners_conf). + +-behaviour(emqx_config_backup). + +-export([ + action/4, + create/3, + ensure_remove/2, + get_raw/2, + update/3 +]). + +%% Data backup +-export([ + import_config/1 +]). + +-include_lib("emqx/include/logger.hrl"). + +-define(CONF_ROOT_KEY, listeners). +-define(path(_Type_, _Name_), [?CONF_ROOT_KEY, _Type_, _Name_]). +-define(OPTS, #{rawconf_with_defaults => true, override_to => cluster}). +-define(IMPORT_OPTS, #{override_to => cluster}). + +action(Type, Name, Action, Conf) -> + wrap(emqx_conf:update(?path(Type, Name), {action, Action, Conf}, ?OPTS)). + +create(Type, Name, Conf) -> + wrap(emqx_conf:update(?path(Type, Name), {create, Conf}, ?OPTS)). + +ensure_remove(Type, Name) -> + wrap(emqx_conf:tombstone(?path(Type, Name), ?OPTS)). + +get_raw(Type, Name) -> emqx_conf:get_raw(?path(Type, Name), undefined). + +update(Type, Name, Conf) -> + wrap(emqx_conf:update(?path(Type, Name), {update, Conf}, ?OPTS)). + +wrap({error, {post_config_update, emqx_listeners, Reason}}) -> {error, Reason}; +wrap({error, {pre_config_update, emqx_listeners, Reason}}) -> {error, Reason}; +wrap({error, Reason}) -> {error, Reason}; +wrap(Ok) -> Ok. + +%%------------------------------------------------------------------------------ +%% Data backup +%%------------------------------------------------------------------------------ + +import_config(RawConf) -> + NewConf = maps:get(<<"listeners">>, RawConf, #{}), + OldConf = emqx:get_raw_config([?CONF_ROOT_KEY], #{}), + MergedConf = merge_confs(OldConf, NewConf), + case emqx_conf:update([?CONF_ROOT_KEY], MergedConf, ?IMPORT_OPTS) of + {ok, #{raw_config := NewRawConf}} -> + {ok, #{root_key => ?CONF_ROOT_KEY, changed => changed_paths(OldConf, NewRawConf)}}; + Error -> + {error, #{root_key => ?CONF_ROOT_KEY, reason => Error}} + end. + +merge_confs(OldConf, NewConf) -> + AllTypes = maps:keys(maps:merge(OldConf, NewConf)), + lists:foldr( + fun(Type, Acc) -> + NewListeners = maps:get(Type, NewConf, #{}), + OldListeners = maps:get(Type, OldConf, #{}), + Acc#{Type => maps:merge(OldListeners, NewListeners)} + end, + #{}, + AllTypes + ). + +changed_paths(OldRawConf, NewRawConf) -> + maps:fold( + fun(Type, Listeners, ChangedAcc) -> + OldListeners = maps:get(Type, OldRawConf, #{}), + Changed = maps:get(changed, emqx_utils_maps:diff_maps(Listeners, OldListeners)), + [?path(Type, K) || K <- maps:keys(Changed)] ++ ChangedAcc + end, + [], + NewRawConf + ). diff --git a/apps/emqx_management/test/emqx_mgmt_data_backup_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_data_backup_SUITE.erl new file mode 100644 index 000000000..9df6d2138 --- /dev/null +++ b/apps/emqx_management/test/emqx_mgmt_data_backup_SUITE.erl @@ -0,0 +1,519 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_mgmt_data_backup_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). + +-define(BOOTSTRAP_BACKUP, "emqx-export-test-bootstrap-ce.tar.gz"). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +init_per_suite(Config) -> + [application:load(App) || App <- apps_to_start() ++ apps_to_load()], + Config. + +end_per_suite(_Config) -> + ok. + +init_per_testcase(t_import_on_cluster, Config) -> + %% Don't import listeners to avoid port conflicts + %% when the same conf will be imported to another cluster + meck:new(emqx_mgmt_listeners_conf, [passthrough]), + meck:new(emqx_gateway_conf, [passthrough]), + meck:expect( + emqx_mgmt_listeners_conf, + import_config, + 1, + {ok, #{changed => [], root_key => listeners}} + ), + meck:expect( + emqx_gateway_conf, + import_config, + 1, + {ok, #{changed => [], root_key => gateway}} + ), + [{cluster, cluster(Config)} | setup(Config)]; +init_per_testcase(t_verify_imported_mnesia_tab_on_cluster, Config) -> + [{cluster, cluster(Config)} | setup(Config)]; +init_per_testcase(t_mnesia_bad_tab_schema, Config) -> + meck:new(emqx_mgmt_data_backup, [passthrough]), + meck:expect(emqx_mgmt_data_backup, mnesia_tabs_to_backup, 0, [data_backup_test]), + setup(Config); +init_per_testcase(_TestCase, Config) -> + setup(Config). + +end_per_testcase(t_import_on_cluster, Config) -> + cleanup_cluster(?config(cluster, Config)), + cleanup(Config), + meck:unload(emqx_mgmt_listeners_conf), + meck:unload(emqx_gateway_conf); +end_per_testcase(t_verify_imported_mnesia_tab_on_cluster, Config) -> + cleanup_cluster(?config(cluster, Config)), + cleanup(Config); +end_per_testcase(t_mnesia_bad_tab_schema, Config) -> + cleanup(Config), + meck:unload(emqx_mgmt_data_backup); +end_per_testcase(_TestCase, Config) -> + cleanup(Config). + +t_empty_export_import(_Config) -> + ExpRawConf = emqx:get_raw_config([]), + {ok, #{filename := FileName}} = emqx_mgmt_data_backup:export(), + Exp = {ok, #{db_errors => #{}, config_errors => #{}}}, + ?assertEqual(Exp, emqx_mgmt_data_backup:import(FileName)), + ?assertEqual(ExpRawConf, emqx:get_raw_config([])), + %% idempotent update assert + ?assertEqual(Exp, emqx_mgmt_data_backup:import(FileName)), + ?assertEqual(ExpRawConf, emqx:get_raw_config([])). + +t_cluster_hocon_export_import(Config) -> + RawConfBeforeImport = emqx:get_raw_config([]), + BootstrapFile = filename:join(?config(data_dir, Config), ?BOOTSTRAP_BACKUP), + Exp = {ok, #{db_errors => #{}, config_errors => #{}}}, + ?assertEqual(Exp, emqx_mgmt_data_backup:import(BootstrapFile)), + RawConfAfterImport = emqx:get_raw_config([]), + ?assertNotEqual(RawConfBeforeImport, RawConfAfterImport), + {ok, #{filename := FileName}} = emqx_mgmt_data_backup:export(), + ?assertEqual(Exp, emqx_mgmt_data_backup:import(FileName)), + ?assertEqual(RawConfAfterImport, emqx:get_raw_config([])), + %% idempotent update assert + ?assertEqual(Exp, emqx_mgmt_data_backup:import(FileName)), + ?assertEqual(RawConfAfterImport, emqx:get_raw_config([])), + %% lookup file inside /backup + ?assertEqual(Exp, emqx_mgmt_data_backup:import(filename:basename(FileName))). + +t_ee_to_ce_backup(Config) -> + case emqx_release:edition() of + ce -> + EEBackupFileName = filename:join(?config(priv_dir, Config), "export-backup-ee.tar.gz"), + Meta = unicode:characters_to_binary( + hocon_pp:do(#{edition => ee, version => emqx_release:version()}, #{}) + ), + ok = erl_tar:create( + EEBackupFileName, + [ + {"export-backup-ee/cluster.hocon", <<>>}, + {"export-backup-ee/META.hocon", Meta} + ], + [compressed] + ), + ExpReason = ee_to_ce_backup, + ?assertEqual( + {error, ExpReason}, emqx_mgmt_data_backup:import(EEBackupFileName) + ), + %% Must be translated to a readable string + ?assertMatch([_ | _], emqx_mgmt_data_backup:format_error(ExpReason)); + ee -> + %% Don't fail if the test is run with emqx-enterprise profile + ok + end. + +t_no_backup_file(_Config) -> + ExpReason = not_found, + ?assertEqual( + {error, not_found}, emqx_mgmt_data_backup:import("no_such_backup.tar.gz") + ), + ?assertMatch([_ | _], emqx_mgmt_data_backup:format_error(ExpReason)). + +t_bad_backup_file(Config) -> + BadFileName = filename:join(?config(priv_dir, Config), "export-bad-backup-tar-gz"), + ok = file:write_file(BadFileName, <<>>), + NoMetaFileName = filename:join(?config(priv_dir, Config), "export-no-meta.tar.gz"), + ok = erl_tar:create(NoMetaFileName, [{"export-no-meta/cluster.hocon", <<>>}], [compressed]), + BadArchiveDirFileName = filename:join(?config(priv_dir, Config), "export-bad-dir.tar.gz"), + ok = erl_tar:create( + BadArchiveDirFileName, + [ + {"tmp/cluster.hocon", <<>>}, + {"export-bad-dir-inside/META.hocon", <<>>}, + {"/export-bad-dir-inside/mnesia/test_tab", <<>>} + ], + [compressed] + ), + InvalidEditionFileName = filename:join( + ?config(priv_dir, Config), "export-invalid-edition.tar.gz" + ), + Meta = unicode:characters_to_binary( + hocon_pp:do(#{edition => "test", version => emqx_release:version()}, #{}) + ), + ok = erl_tar:create( + InvalidEditionFileName, + [ + {"export-invalid-edition/cluster.hocon", <<>>}, + {"export-invalid-edition/META.hocon", Meta} + ], + [compressed] + ), + InvalidVersionFileName = filename:join( + ?config(priv_dir, Config), "export-invalid-version.tar.gz" + ), + Meta1 = unicode:characters_to_binary( + hocon_pp:do(#{edition => emqx_release:edition(), version => "test"}, #{}) + ), + ok = erl_tar:create( + InvalidVersionFileName, + [ + {"export-invalid-version/cluster.hocon", <<>>}, + {"export-invalid-version/META.hocon", Meta1} + ], + [compressed] + ), + BadFileNameReason = bad_backup_name, + NoMetaReason = missing_backup_meta, + BadArchiveDirReason = bad_archive_dir, + InvalidEditionReason = invalid_edition, + InvalidVersionReason = invalid_version, + ?assertEqual({error, BadFileNameReason}, emqx_mgmt_data_backup:import(BadFileName)), + ?assertMatch([_ | _], emqx_mgmt_data_backup:format_error(BadFileNameReason)), + ?assertEqual({error, NoMetaReason}, emqx_mgmt_data_backup:import(NoMetaFileName)), + ?assertMatch([_ | _], emqx_mgmt_data_backup:format_error(NoMetaReason)), + ?assertEqual( + {error, BadArchiveDirReason}, + emqx_mgmt_data_backup:import(BadArchiveDirFileName) + ), + ?assertMatch([_ | _], emqx_mgmt_data_backup:format_error(BadArchiveDirReason)), + ?assertEqual( + {error, InvalidEditionReason}, + emqx_mgmt_data_backup:import(InvalidEditionFileName) + ), + ?assertMatch([_ | _], emqx_mgmt_data_backup:format_error(InvalidEditionReason)), + ?assertEqual( + {error, InvalidVersionReason}, + emqx_mgmt_data_backup:import(InvalidVersionFileName) + ), + ?assertMatch([_ | _], emqx_mgmt_data_backup:format_error(InvalidVersionReason)). + +t_future_version(Config) -> + CurrentVersion = list_to_binary(emqx_release:version()), + [_, _ | Patch] = string:split(CurrentVersion, ".", all), + {ok, {MajorInt, MinorInt}} = emqx_mgmt_data_backup:parse_version_no_patch(CurrentVersion), + FutureMajorVersion = recompose_version(MajorInt + 1, MinorInt, Patch), + FutureMinorVersion = recompose_version(MajorInt, MinorInt + 1, Patch), + [MajorMeta, MinorMeta] = + [ + unicode:characters_to_binary( + hocon_pp:do(#{edition => emqx_release:edition(), version => V}, #{}) + ) + || V <- [FutureMajorVersion, FutureMinorVersion] + ], + MajorFileName = filename:join(?config(priv_dir, Config), "export-future-major-ver.tar.gz"), + MinorFileName = filename:join(?config(priv_dir, Config), "export-future-minor-ver.tar.gz"), + ok = erl_tar:create( + MajorFileName, + [ + {"export-future-major-ver/cluster.hocon", <<>>}, + {"export-future-major-ver/META.hocon", MajorMeta} + ], + [compressed] + ), + ok = erl_tar:create( + MinorFileName, + [ + {"export-future-minor-ver/cluster.hocon", <<>>}, + {"export-future-minor-ver/META.hocon", MinorMeta} + ], + [compressed] + ), + ExpMajorReason = {unsupported_version, FutureMajorVersion}, + ExpMinorReason = {unsupported_version, FutureMinorVersion}, + ?assertEqual({error, ExpMajorReason}, emqx_mgmt_data_backup:import(MajorFileName)), + ?assertEqual({error, ExpMinorReason}, emqx_mgmt_data_backup:import(MinorFileName)), + ?assertMatch([_ | _], emqx_mgmt_data_backup:format_error(ExpMajorReason)), + ?assertMatch([_ | _], emqx_mgmt_data_backup:format_error(ExpMinorReason)). + +t_bad_config(Config) -> + BadConfigFileName = filename:join(?config(priv_dir, Config), "export-bad-config-backup.tar.gz"), + Meta = unicode:characters_to_binary( + hocon_pp:do(#{edition => emqx_release:edition(), version => emqx_release:version()}, #{}) + ), + BadConfigMap = #{ + <<"listeners">> => + #{ + <<"bad-type">> => + #{<<"bad-name">> => #{<<"bad-field">> => <<"bad-val">>}} + } + }, + BadConfig = unicode:characters_to_binary(hocon_pp:do(BadConfigMap, #{})), + ok = erl_tar:create( + BadConfigFileName, + [ + {"export-bad-config-backup/cluster.hocon", BadConfig}, + {"export-bad-config-backup/META.hocon", Meta} + ], + [compressed] + ), + Res = emqx_mgmt_data_backup:import(BadConfigFileName), + ?assertMatch({error, #{kind := validation_error}}, Res). + +t_import_on_cluster(Config) -> + %% Randomly chosen config key to verify import result additionally + ?assertEqual([], emqx:get_config([authentication])), + BootstrapFile = filename:join(?config(data_dir, Config), ?BOOTSTRAP_BACKUP), + ExpImportRes = {ok, #{db_errors => #{}, config_errors => #{}}}, + ?assertEqual(ExpImportRes, emqx_mgmt_data_backup:import(BootstrapFile)), + ImportedAuthnConf = emqx:get_config([authentication]), + ?assertMatch([_ | _], ImportedAuthnConf), + {ok, #{filename := FileName}} = emqx_mgmt_data_backup:export(), + {ok, Cwd} = file:get_cwd(), + AbsFilePath = filename:join(Cwd, FileName), + [CoreNode1, _CoreNode2, ReplicantNode] = NodesList = ?config(cluster, Config), + ReplImportReason = not_core_node, + ?assertEqual( + {error, ReplImportReason}, + rpc:call(ReplicantNode, emqx_mgmt_data_backup, import, [AbsFilePath]) + ), + ?assertMatch([_ | _], emqx_mgmt_data_backup:format_error(ReplImportReason)), + [?assertEqual([], rpc:call(N, emqx, get_config, [[authentication]])) || N <- NodesList], + ?assertEqual( + ExpImportRes, + rpc:call(CoreNode1, emqx_mgmt_data_backup, import, [AbsFilePath]) + ), + [ + ?assertEqual( + authn_ids(ImportedAuthnConf), + authn_ids(rpc:call(N, emqx, get_config, [[authentication]])) + ) + || N <- NodesList + ]. + +t_verify_imported_mnesia_tab_on_cluster(Config) -> + UsersToExport = users(<<"user_to_export_">>), + UsersBeforeImport = users(<<"user_before_import_">>), + [{ok, _} = emqx_dashboard_admin:add_user(U, U, U) || U <- UsersToExport], + {ok, #{filename := FileName}} = emqx_mgmt_data_backup:export(), + {ok, Cwd} = file:get_cwd(), + AbsFilePath = filename:join(Cwd, FileName), + + [CoreNode1, CoreNode2, ReplicantNode] = NodesList = ?config(cluster, Config), + + [ + {ok, _} = rpc:call(CoreNode1, emqx_dashboard_admin, add_user, [U, U, U]) + || U <- UsersBeforeImport + ], + + ?assertEqual( + {ok, #{db_errors => #{}, config_errors => #{}}}, + rpc:call(CoreNode1, emqx_mgmt_data_backup, import, [AbsFilePath]) + ), + + [Tab] = emqx_dashboard_admin:backup_tables(), + AllUsers = lists:sort(mnesia:dirty_all_keys(Tab) ++ UsersBeforeImport), + [ + ?assertEqual( + AllUsers, + lists:sort(rpc:call(N, mnesia, dirty_all_keys, [Tab])) + ) + || N <- [CoreNode1, CoreNode2] + ], + + %% Give some extra time to replicant to import data... + timer:sleep(3000), + ?assertEqual(AllUsers, lists:sort(rpc:call(ReplicantNode, mnesia, dirty_all_keys, [Tab]))), + + [rpc:call(N, ekka, leave, []) || N <- lists:reverse(NodesList)], + [emqx_common_test_helpers:stop_slave(N) || N <- NodesList]. + +t_mnesia_bad_tab_schema(_Config) -> + OldAttributes = [id, name, description], + ok = create_test_tab(OldAttributes), + ok = mria:dirty_write({data_backup_test, <<"id">>, <<"old_name">>, <<"old_description">>}), + {ok, #{filename := FileName}} = emqx_mgmt_data_backup:export(), + {atomic, ok} = mnesia:delete_table(data_backup_test), + NewAttributes = [id, name, description, new_field], + ok = create_test_tab(NewAttributes), + NewRec = + {data_backup_test, <<"id">>, <<"new_name">>, <<"new_description">>, <<"new_field_value">>}, + ok = mria:dirty_write(NewRec), + ?assertEqual( + {ok, #{ + db_errors => + #{data_backup_test => {error, {"Backup traversal failed", different_table_schema}}}, + config_errors => #{} + }}, + emqx_mgmt_data_backup:import(FileName) + ), + ?assertEqual([NewRec], mnesia:dirty_read(data_backup_test, <<"id">>)), + ?assertEqual([<<"id">>], mnesia:dirty_all_keys(data_backup_test)). + +t_read_files(_Config) -> + DataDir = emqx:data_dir(), + %% Relative "data" path is set in init_per_testcase/2, asserting it must be safe + ?assertEqual("data", DataDir), + {ok, Cwd} = file:get_cwd(), + AbsDataDir = filename:join(Cwd, DataDir), + FileBaseName = "t_read_files_tmp_file", + TestFileAbsPath = iolist_to_binary(filename:join(AbsDataDir, FileBaseName)), + TestFilePath = iolist_to_binary(filename:join(DataDir, FileBaseName)), + TestFileContent = <<"test_file_content">>, + ok = file:write_file(TestFileAbsPath, TestFileContent), + + RawConf = #{ + <<"test_rootkey">> => #{ + <<"test_field">> => <<"test_field_path">>, + <<"abs_data_dir_path_file">> => TestFileAbsPath, + <<"rel_data_dir_path_file">> => TestFilePath, + <<"path_outside_data_dir">> => <<"/tmp/some-file">> + } + }, + + RawConf1 = emqx_utils_maps:deep_put( + [<<"test_rootkey">>, <<"abs_data_dir_path_file">>], RawConf, TestFileContent + ), + ExpectedConf = emqx_utils_maps:deep_put( + [<<"test_rootkey">>, <<"rel_data_dir_path_file">>], RawConf1, TestFileContent + ), + ?assertEqual(ExpectedConf, emqx_mgmt_data_backup:read_data_files(RawConf)). + +%%------------------------------------------------------------------------------ +%% Internal test helpers +%%------------------------------------------------------------------------------ + +setup(Config) -> + %% avoid port conflicts if the cluster is started + AppHandler = fun + (emqx_dashboard) -> + ok = emqx_config:put([dashboard, listeners, http, bind], 0); + (_) -> + ok + end, + ok = emqx_common_test_helpers:start_apps(apps_to_start(), AppHandler), + PrevDataDir = application:get_env(emqx, data_dir), + application:set_env(emqx, data_dir, "data"), + [{previous_emqx_data_dir, PrevDataDir} | Config]. + +cleanup(Config) -> + emqx_common_test_helpers:stop_apps(apps_to_start()), + case ?config(previous_emqx_data_dir, Config) of + undefined -> + application:unset_env(emqx, data_dir); + {ok, Val} -> + application:set_env(emqx, data_dir, Val) + end. + +cleanup_cluster(ClusterNodes) -> + [rpc:call(N, ekka, leave, []) || N <- lists:reverse(ClusterNodes)], + [emqx_common_test_helpers:stop_slave(N) || N <- ClusterNodes]. + +users(Prefix) -> + [ + <> + || _ <- lists:seq(1, 10) + ]. + +authn_ids(AuthnConf) -> + lists:sort([emqx_authentication:authenticator_id(Conf) || Conf <- AuthnConf]). + +recompose_version(MajorInt, MinorInt, Patch) -> + unicode:characters_to_list( + [integer_to_list(MajorInt + 1), $., integer_to_list(MinorInt), $. | Patch] + ). + +cluster(Config) -> + PrivDataDir = ?config(priv_dir, Config), + [{Core1, Core1Opts}, {Core2, Core2Opts}, {Replicant, ReplOpts}] = + emqx_common_test_helpers:emqx_cluster( + [ + {core, data_backup_core1}, + {core, data_backup_core2}, + {replicant, data_backup_replicant} + ], + #{ + priv_data_dir => PrivDataDir, + schema_mod => emqx_conf_schema, + apps => apps_to_start(), + load_apps => apps_to_start() ++ apps_to_load(), + env => [{mria, db_backend, rlog}], + load_schema => true, + start_autocluster => true, + join_to => true, + listener_ports => [], + conf => [{[dashboard, listeners, http, bind], 0}], + env_handler => + fun(_) -> + application:set_env(emqx, boot_modules, [broker, router]) + end + } + ), + Node1 = emqx_common_test_helpers:start_slave(Core1, Core1Opts), + Node2 = emqx_common_test_helpers:start_slave(Core2, Core2Opts), + #{conf := _ReplConf, env := ReplEnv} = ReplOpts, + ClusterDiscovery = {static, [{seeds, [Node1, Node2]}]}, + ReplOpts1 = maps:remove( + join_to, + ReplOpts#{ + env => [{ekka, cluster_discovery, ClusterDiscovery} | ReplEnv], + env_handler => fun(_) -> + application:set_env(emqx, boot_modules, [broker, router]), + application:set_env( + ekka, + cluster_discovery, + ClusterDiscovery + ) + end + } + ), + ReplNode = emqx_common_test_helpers:start_slave(Replicant, ReplOpts1), + [Node1, Node2, ReplNode]. + +create_test_tab(Attributes) -> + ok = mria:create_table(data_backup_test, [ + {type, set}, + {rlog_shard, data_backup_test_shard}, + {storage, disc_copies}, + {record_name, data_backup_test}, + {attributes, Attributes}, + {storage_properties, [ + {ets, [ + {read_concurrency, true}, + {write_concurrency, true} + ]} + ]} + ]), + ok = mria:wait_for_tables([data_backup_test]). + +apps_to_start() -> + [ + emqx, + emqx_conf, + emqx_psk, + emqx_management, + emqx_dashboard, + emqx_authz, + emqx_authn, + emqx_rule_engine, + emqx_retainer, + emqx_prometheus, + emqx_modules, + emqx_gateway, + emqx_exhook, + emqx_bridge, + emqx_auto_subscribe + ]. + +apps_to_load() -> + [ + emqx_gateway_lwm2m, + emqx_gateway_coap, + emqx_gateway_exproto, + emqx_gateway_stomp, + emqx_gateway_mqttsn + ]. diff --git a/apps/emqx_management/test/emqx_mgmt_data_backup_SUITE_data/emqx-export-test-bootstrap-ce.tar.gz b/apps/emqx_management/test/emqx_mgmt_data_backup_SUITE_data/emqx-export-test-bootstrap-ce.tar.gz new file mode 100644 index 000000000..b7da76bbb Binary files /dev/null and b/apps/emqx_management/test/emqx_mgmt_data_backup_SUITE_data/emqx-export-test-bootstrap-ce.tar.gz differ diff --git a/apps/emqx_modules/src/emqx_delayed.erl b/apps/emqx_modules/src/emqx_delayed.erl index 5a4f1fc3e..32219a139 100644 --- a/apps/emqx_modules/src/emqx_delayed.erl +++ b/apps/emqx_modules/src/emqx_delayed.erl @@ -98,9 +98,9 @@ -define(FORMAT_FUN, {?MODULE, format_delayed}). -define(NOW, erlang:system_time(milli_seconds)). -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Mnesia bootstrap -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ mnesia(boot) -> ok = mria:create_table(?TAB, [ {type, ordered_set}, @@ -110,9 +110,9 @@ mnesia(boot) -> {attributes, record_info(fields, delayed_message)} ]). -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Hooks -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ on_message_publish( Msg = #message{ id = Id, @@ -143,9 +143,9 @@ on_message_publish( on_message_publish(Msg) -> {ok, Msg}. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Start delayed publish server -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ -spec start_link() -> emqx_types:startlink_ret(). start_link() -> @@ -270,9 +270,9 @@ post_config_update(_KeyPath, _ConfigReq, NewConf, _OldConf, _AppEnvs) -> Enable = maps:get(enable, NewConf, undefined), load_or_unload(Enable). -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% gen_server callback -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ init([]) -> ok = mria:wait_for_tables([?TAB]), @@ -335,9 +335,9 @@ terminate(_Reason, #{stats_timer := StatsTimer} = State) -> code_change(_Vsn, State, _Extra) -> {ok, State}. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Telemetry -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ -spec get_basic_usage_info() -> #{delayed_message_count => non_neg_integer()}. get_basic_usage_info() -> @@ -348,9 +348,9 @@ get_basic_usage_info() -> end, #{delayed_message_count => DelayedCount}. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Internal functions -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Ensure the stats -spec ensure_stats_event(state()) -> state(). diff --git a/apps/emqx_modules/src/emqx_rewrite.erl b/apps/emqx_modules/src/emqx_rewrite.erl index 6bf8abb89..7f7955cf8 100644 --- a/apps/emqx_modules/src/emqx_rewrite.erl +++ b/apps/emqx_modules/src/emqx_rewrite.erl @@ -49,9 +49,12 @@ %% exported for `emqx_telemetry' -export([get_basic_usage_info/0]). -%%-------------------------------------------------------------------- +-define(update(_Rules_), + emqx_conf:update([rewrite], _Rules_, #{override_to => cluster}) +). +%%------------------------------------------------------------------------------ %% Load/Unload -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ enable() -> emqx_conf:add_handler([rewrite], ?MODULE), @@ -67,7 +70,7 @@ list() -> emqx_conf:get_raw([<<"rewrite">>], []). update(Rules0) -> - case emqx_conf:update([rewrite], Rules0, #{override_to => cluster}) of + case ?update(Rules0) of {ok, _} -> ok; {error, Reason} -> @@ -109,18 +112,19 @@ rewrite_publish(Message = #message{topic = Topic}, Rules) -> Binds = fill_client_binds(Message), {ok, Message#message{topic = match_and_rewrite(Topic, Rules, Binds)}}. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Telemetry -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ -spec get_basic_usage_info() -> #{topic_rewrite_rule_count => non_neg_integer()}. get_basic_usage_info() -> RewriteRules = list(), #{topic_rewrite_rule_count => length(RewriteRules)}. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Internal functions -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ + compile(Rules) -> lists:foldl( fun(Rule, {Publish, Subscribe, Error}) -> diff --git a/apps/emqx_psk/src/emqx_psk.app.src b/apps/emqx_psk/src/emqx_psk.app.src index c3786bcc0..fc5fb707c 100644 --- a/apps/emqx_psk/src/emqx_psk.app.src +++ b/apps/emqx_psk/src/emqx_psk.app.src @@ -2,7 +2,7 @@ {application, emqx_psk, [ {description, "EMQX PSK"}, % strict semver, bump manually! - {vsn, "5.0.1"}, + {vsn, "5.0.2"}, {modules, []}, {registered, [emqx_psk_sup]}, {applications, [kernel, stdlib]}, diff --git a/apps/emqx_psk/src/emqx_psk.erl b/apps/emqx_psk/src/emqx_psk.erl index 65bdeab48..6bdf48c9b 100644 --- a/apps/emqx_psk/src/emqx_psk.erl +++ b/apps/emqx_psk/src/emqx_psk.erl @@ -17,6 +17,8 @@ -module(emqx_psk). -behaviour(gen_server). +-behaviour(emqx_db_backup). +-behaviour(emqx_config_backup). -include_lib("emqx/include/logger.hrl"). -include_lib("emqx/include/emqx_hooks.hrl"). @@ -48,6 +50,12 @@ insert_psks/1 ]). +%% Data backup +-export([ + import_config/1, + backup_tables/0 +]). + -record(psk_entry, { psk_id :: binary(), shared_secret :: binary(), @@ -86,6 +94,12 @@ mnesia(boot) -> {storage_properties, [{ets, [{read_concurrency, true}]}]} ]). +%%------------------------------------------------------------------------------ +%% Data backup +%%------------------------------------------------------------------------------ + +backup_tables() -> [?TAB]. + %%------------------------------------------------------------------------------ %% APIs %%------------------------------------------------------------------------------ @@ -115,9 +129,27 @@ start_link() -> stop() -> gen_server:stop(?MODULE). -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ +%% Data backup +%%------------------------------------------------------------------------------ + +import_config(#{<<"psk_authentication">> := PskConf}) -> + case emqx_conf:update([psk_authentication], PskConf, #{override_to => cluster}) of + {ok, _} -> + case get_config(enable) of + true -> load(); + false -> ok + end, + {ok, #{root_key => psk_authentication, changed => []}}; + Error -> + {error, #{root_key => psk_authentication, reason => Error}} + end; +import_config(_RawConf) -> + {ok, #{root_key => psk_authentication, changed => []}}. + +%%------------------------------------------------------------------------------ %% gen_server callbacks -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ init(_Opts) -> _ = diff --git a/apps/emqx_resource/src/schema/emqx_resource_schema.erl b/apps/emqx_resource/src/schema/emqx_resource_schema.erl index 4b36f5b89..eb0a48b06 100644 --- a/apps/emqx_resource/src/schema/emqx_resource_schema.erl +++ b/apps/emqx_resource/src/schema/emqx_resource_schema.erl @@ -23,6 +23,8 @@ -export([namespace/0, roots/0, fields/1, desc/1]). +-export([create_opts/1]). + %% range interval in ms -define(HEALTH_CHECK_INTERVAL_RANGE_MIN, 1). -define(HEALTH_CHECK_INTERVAL_RANGE_MAX, 3_600_000). @@ -43,25 +45,41 @@ fields("resource_opts") -> )} ]; fields("creation_opts") -> - [ - {buffer_mode, fun buffer_mode/1}, - {worker_pool_size, fun worker_pool_size/1}, - {health_check_interval, fun health_check_interval/1}, - {resume_interval, fun resume_interval/1}, - {metrics_flush_interval, fun metrics_flush_interval/1}, - {start_after_created, fun start_after_created/1}, - {start_timeout, fun start_timeout/1}, - {auto_restart_interval, fun auto_restart_interval/1}, - {query_mode, fun query_mode/1}, - {request_ttl, fun request_ttl/1}, - {inflight_window, fun inflight_window/1}, - {enable_batch, fun enable_batch/1}, - {batch_size, fun batch_size/1}, - {batch_time, fun batch_time/1}, - {enable_queue, fun enable_queue/1}, - {max_buffer_bytes, fun max_buffer_bytes/1}, - {buffer_seg_bytes, fun buffer_seg_bytes/1} - ]. + create_opts([]). + +create_opts(Overrides) -> + override( + [ + {buffer_mode, fun buffer_mode/1}, + {worker_pool_size, fun worker_pool_size/1}, + {health_check_interval, fun health_check_interval/1}, + {resume_interval, fun resume_interval/1}, + {metrics_flush_interval, fun metrics_flush_interval/1}, + {start_after_created, fun start_after_created/1}, + {start_timeout, fun start_timeout/1}, + {auto_restart_interval, fun auto_restart_interval/1}, + {query_mode, fun query_mode/1}, + {request_ttl, fun request_ttl/1}, + {inflight_window, fun inflight_window/1}, + {enable_batch, fun enable_batch/1}, + {batch_size, fun batch_size/1}, + {batch_time, fun batch_time/1}, + {enable_queue, fun enable_queue/1}, + {max_buffer_bytes, fun max_buffer_bytes/1}, + {buffer_seg_bytes, fun buffer_seg_bytes/1} + ], + Overrides + ). + +override([], _) -> + []; +override([{Name, Sc} | Rest], Overrides) -> + case lists:keyfind(Name, 1, Overrides) of + {Name, Override} -> + [{Name, hocon_schema:override(Sc, Override)} | override(Rest, Overrides)]; + false -> + [{Name, Sc} | override(Rest, Overrides)] + end. resource_opts_meta() -> #{ @@ -142,6 +160,7 @@ request_ttl(_) -> undefined. enable_batch(type) -> boolean(); enable_batch(required) -> false; enable_batch(default) -> true; +enable_batch(importance) -> ?IMPORTANCE_HIDDEN; enable_batch(deprecated) -> {since, "v5.0.14"}; enable_batch(desc) -> ?DESC("enable_batch"); enable_batch(_) -> undefined. @@ -169,6 +188,7 @@ batch_size(_) -> undefined. batch_time(type) -> emqx_schema:timeout_duration_ms(); batch_time(desc) -> ?DESC("batch_time"); batch_time(default) -> ?DEFAULT_BATCH_TIME_RAW; +batch_time(importance) -> ?IMPORTANCE_LOW; batch_time(required) -> false; batch_time(_) -> undefined. diff --git a/apps/emqx_retainer/src/emqx_retainer.erl b/apps/emqx_retainer/src/emqx_retainer.erl index b81ea2446..b9a608f62 100644 --- a/apps/emqx_retainer/src/emqx_retainer.erl +++ b/apps/emqx_retainer/src/emqx_retainer.erl @@ -82,9 +82,9 @@ -callback clean(context()) -> ok. -callback size(context()) -> non_neg_integer(). -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Hook API -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ -spec on_session_subscribed(_, _, emqx_types:subopts(), _) -> any(). on_session_subscribed(_, _, #{share := ShareName}, _) when ShareName =/= undefined -> ok; @@ -118,9 +118,9 @@ on_message_publish(Msg = #message{flags = #{retain := true}}, Context) -> on_message_publish(Msg, _) -> {ok, Msg}. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% APIs -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% @doc Start the retainer -spec start_link() -> emqx_types:startlink_ret(). @@ -169,9 +169,9 @@ call(Req) -> stats_fun() -> gen_server:cast(?MODULE, ?FUNCTION_NAME). -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% APIs -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ -spec get_basic_usage_info() -> #{retained_messages => non_neg_integer()}. get_basic_usage_info() -> @@ -183,9 +183,9 @@ get_basic_usage_info() -> #{retained_messages => 0} end. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% gen_server callbacks -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ init([]) -> erlang:process_flag(trap_exit, true), @@ -248,9 +248,9 @@ terminate(_Reason, #{clear_timer := ClearTimer}) -> code_change(_OldVsn, State, _Extra) -> {ok, State}. -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ %% Internal functions -%%-------------------------------------------------------------------- +%%------------------------------------------------------------------------------ -spec new_state() -> state(). new_state() -> #{ diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.erl b/apps/emqx_rule_engine/src/emqx_rule_engine.erl index ff6636b9a..4a2cb2ea1 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.erl @@ -18,6 +18,7 @@ -behaviour(gen_server). -behaviour(emqx_config_handler). +-behaiour(emqx_config_backup). -include("rule_engine.hrl"). -include_lib("emqx/include/logger.hrl"). @@ -78,6 +79,11 @@ code_change/3 ]). +%% Data backup +-export([ + import_config/1 +]). + -define(RULE_ENGINE, ?MODULE). -define(T_CALL, infinity). @@ -105,7 +111,7 @@ start_link() -> gen_server:start_link({local, ?RULE_ENGINE}, ?MODULE, [], []). -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- %% The config handler for emqx_rule_engine %%------------------------------------------------------------------------------ post_config_update(?RULE_PATH(RuleId), _Req, NewRule, undefined, _AppEnvs) -> @@ -142,9 +148,9 @@ post_config_update([rule_engine], _Req, #{rules := NewRules}, #{rules := OldRule {error, Error} end. -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- %% APIs for rules -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- -spec load_rules() -> ok. load_rules() -> @@ -185,9 +191,9 @@ delete_rule(RuleId) when is_binary(RuleId) -> insert_rule(Rule) -> gen_server:call(?RULE_ENGINE, {insert_rule, Rule}, ?T_CALL). -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- %% Rule Management -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- -spec get_rules() -> [rule()]. get_rules() -> @@ -301,9 +307,9 @@ unload_hooks_for_rule(#{id := Id, from := Topics}) -> Topics ). -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- %% Telemetry helper functions -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- -spec get_basic_usage_info() -> #{ @@ -362,9 +368,27 @@ tally_referenced_bridges(BridgeIDs, Acc0) -> BridgeIDs ). -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- +%% Data backup +%%---------------------------------------------------------------------------------------- + +import_config(#{<<"rule_engine">> := #{<<"rules">> := NewRules} = RuleEngineConf}) -> + OldRules = emqx:get_raw_config(?KEY_PATH, #{}), + RuleEngineConf1 = RuleEngineConf#{<<"rules">> => maps:merge(OldRules, NewRules)}, + case emqx_conf:update([rule_engine], RuleEngineConf1, #{override_to => cluster}) of + {ok, #{raw_config := #{<<"rules">> := NewRawRules}}} -> + Changed = maps:get(changed, emqx_utils_maps:diff_maps(NewRawRules, OldRules)), + ChangedPaths = [?RULE_PATH(Id) || Id <- maps:keys(Changed)], + {ok, #{root_key => rule_engine, changed => ChangedPaths}}; + Error -> + {error, #{root_key => rule_engine, reason => Error}} + end; +import_config(_RawConf) -> + {ok, #{root_key => rule_engine, changed => []}}. + +%%---------------------------------------------------------------------------------------- %% gen_server callbacks -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- init([]) -> _TableId = ets:new(?KV_TAB, [ @@ -404,9 +428,9 @@ terminate(_Reason, _State) -> code_change(_OldVsn, State, _Extra) -> {ok, State}. -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- %% Internal Functions -%%------------------------------------------------------------------------------ +%%---------------------------------------------------------------------------------------- parse_and_insert(Params = #{id := RuleId, sql := Sql, actions := Actions}, CreatedAt) -> case emqx_rule_sqlparser:parse(Sql) of diff --git a/apps/emqx_utils/src/emqx_utils.erl b/apps/emqx_utils/src/emqx_utils.erl index 1badabc38..bb69a9e56 100644 --- a/apps/emqx_utils/src/emqx_utils.erl +++ b/apps/emqx_utils/src/emqx_utils.erl @@ -56,7 +56,9 @@ safe_to_existing_atom/2, pub_props_to_packet/1, safe_filename/1, - diff_lists/3 + diff_lists/3, + merge_lists/3, + tcp_keepalive_opts/4 ]). -export([ @@ -487,6 +489,26 @@ safe_to_existing_atom(Atom, _Encoding) when is_atom(Atom) -> safe_to_existing_atom(_Any, _Encoding) -> {error, invalid_type}. +-spec tcp_keepalive_opts(term(), non_neg_integer(), non_neg_integer(), non_neg_integer()) -> + {ok, [{keepalive, true} | {raw, non_neg_integer(), non_neg_integer(), binary()}]} + | {error, {unsupported_os, term()}}. +tcp_keepalive_opts({unix, linux}, Idle, Interval, Probes) -> + {ok, [ + {keepalive, true}, + {raw, 6, 4, <>}, + {raw, 6, 5, <>}, + {raw, 6, 6, <>} + ]}; +tcp_keepalive_opts({unix, darwin}, Idle, Interval, Probes) -> + {ok, [ + {keepalive, true}, + {raw, 6, 16#10, <>}, + {raw, 6, 16#101, <>}, + {raw, 6, 16#102, <>} + ]}; +tcp_keepalive_opts(OS, _Idle, _Interval, _Probes) -> + {error, {unsupported_os, OS}}. + %%------------------------------------------------------------------------------ %% Internal Functions %%------------------------------------------------------------------------------ @@ -578,15 +600,18 @@ try_to_existing_atom(Convert, Data, Encoding) -> _:Reason -> {error, Reason} end. -is_sensitive_key(token) -> true; -is_sensitive_key("token") -> true; -is_sensitive_key(<<"token">>) -> true; is_sensitive_key(authorization) -> true; is_sensitive_key("authorization") -> true; is_sensitive_key(<<"authorization">>) -> true; +is_sensitive_key(aws_secret_access_key) -> true; +is_sensitive_key("aws_secret_access_key") -> true; +is_sensitive_key(<<"aws_secret_access_key">>) -> true; is_sensitive_key(password) -> true; is_sensitive_key("password") -> true; is_sensitive_key(<<"password">>) -> true; +is_sensitive_key('proxy-authorization') -> true; +is_sensitive_key("proxy-authorization") -> true; +is_sensitive_key(<<"proxy-authorization">>) -> true; is_sensitive_key(secret) -> true; is_sensitive_key("secret") -> true; is_sensitive_key(<<"secret">>) -> true; @@ -596,9 +621,9 @@ is_sensitive_key(<<"secret_key">>) -> true; is_sensitive_key(security_token) -> true; is_sensitive_key("security_token") -> true; is_sensitive_key(<<"security_token">>) -> true; -is_sensitive_key(aws_secret_access_key) -> true; -is_sensitive_key("aws_secret_access_key") -> true; -is_sensitive_key(<<"aws_secret_access_key">>) -> true; +is_sensitive_key(token) -> true; +is_sensitive_key("token") -> true; +is_sensitive_key(<<"token">>) -> true; is_sensitive_key(_) -> false. redact(Term) -> @@ -709,9 +734,14 @@ redact_test_() -> Types = [atom, string, binary], Keys = [ - token, + authorization, + aws_secret_access_key, password, - secret + 'proxy-authorization', + secret, + secret_key, + security_token, + token ], [{case_name(Type, Key), fun() -> Case(Type, Key) end} || Key <- Keys, Type <- Types]. @@ -819,6 +849,42 @@ diff_lists(New, Old, KeyFunc) when is_list(New) andalso is_list(Old) -> changed => lists:reverse(Changed) }. +%% @doc Merges two lists preserving the original order of elements in both lists. +%% KeyFunc must extract a unique key from each element. +%% If two keys exist in both lists, the value in List1 is superseded by the value in List2, but +%% the element position in the result list will equal its position in List1. +%% Example: +%% emqx_utils:merge_append_lists( +%% [#{id => a, val => old}, #{id => b, val => old}], +%% [#{id => a, val => new}, #{id => c}, #{id => b, val => new}, #{id => d}], +%% fun(#{id := Id}) -> Id end). +%% [#{id => a,val => new}, +%% #{id => b,val => new}, +%% #{id => c}, +%% #{id => d}] +-spec merge_lists(list(T), list(T), KeyFunc) -> list(T) when + KeyFunc :: fun((T) -> any()), + T :: any(). +merge_lists(List1, List2, KeyFunc) -> + WithKeysList2 = lists:map(fun(E) -> {KeyFunc(E), E} end, List2), + WithKeysList1 = lists:map( + fun(E) -> + K = KeyFunc(E), + case lists:keyfind(K, 1, WithKeysList2) of + false -> {K, E}; + WithKey1 -> WithKey1 + end + end, + List1 + ), + NewWithKeysList2 = lists:filter( + fun({K, _}) -> + not lists:keymember(K, 1, WithKeysList1) + end, + WithKeysList2 + ), + [E || {_, E} <- WithKeysList1 ++ NewWithKeysList2]. + search(_ExpectValue, _KeyFunc, []) -> false; search(ExpectValue, KeyFunc, [Item | List]) -> diff --git a/apps/emqx_utils/test/emqx_utils_fs_SUITE.erl b/apps/emqx_utils/test/emqx_utils_fs_SUITE.erl index 243db98cd..d74a258d4 100644 --- a/apps/emqx_utils/test/emqx_utils_fs_SUITE.erl +++ b/apps/emqx_utils/test/emqx_utils_fs_SUITE.erl @@ -26,6 +26,33 @@ all() -> emqx_common_test_helpers:all(?MODULE). +init_per_suite(Config) -> + Root = ?config(data_dir, Config), + D1 = filename:join([Root, "nonempty", "d1/"]), + D2 = filename:join([Root, "nonempty", "d2/"]), + F1 = filename:join([D1, "1"]), + F2 = filename:join([D1, "2"]), + DeepDir = filename:join([Root, "nonempty", "d2", "deep", "down/"]), + DeepFile = filename:join([DeepDir, "here"]), + Files = [F1, F2, DeepFile], + lists:foreach(fun filelib:ensure_dir/1, Files), + D1LinkMutrec = filename:join([D1, "mutrec"]), + D2LinkMutrec = filename:join([D2, "deep", "mutrec"]), + lists:foreach(fun(File) -> file:write_file(File, <<"">>, [write]) end, Files), + chmod_file(D1, 8#00777), + chmod_file(DeepFile, 8#00600), + make_symlink(DeepDir, D1LinkMutrec), + %% can't file:make_link("../../d1", D2Mutrec) on mac, return {error, eperm} + make_symlink("../../d1", D2LinkMutrec), + {ok, D2MutrecInfo} = file:read_link_info(D2LinkMutrec), + ct:pal("~ts 's file_info is ~p~n", [D2LinkMutrec, D2MutrecInfo]), + Config. + +end_per_suite(Config) -> + Root = ?config(data_dir, Config), + ok = file:del_dir_r(filename:join([Root, "nonempty"])), + ok. + %% t_traverse_dir(Config) -> @@ -115,3 +142,11 @@ t_canonicalize_non_utf8(_) -> badarg, emqx_utils_fs:canonicalize(<<128, 128, 128>>) ). + +chmod_file(File, Mode) -> + {ok, FileInfo} = file:read_file_info(File), + ok = file:write_file_info(File, FileInfo#file_info{mode = Mode}). + +make_symlink(FileOrDir, NewLink) -> + _ = file:delete(NewLink), + ok = file:make_symlink(FileOrDir, NewLink). diff --git a/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d1/1 b/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d1/1 deleted file mode 100644 index e69de29bb..000000000 diff --git a/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d1/2 b/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d1/2 deleted file mode 100644 index e69de29bb..000000000 diff --git a/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d1/mutrec b/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d1/mutrec deleted file mode 120000 index d378eb1b6..000000000 --- a/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d1/mutrec +++ /dev/null @@ -1 +0,0 @@ -../d2/deep/down \ No newline at end of file diff --git a/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d2/deep/down/here b/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d2/deep/down/here deleted file mode 100644 index e69de29bb..000000000 diff --git a/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d2/deep/mutrec b/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d2/deep/mutrec deleted file mode 120000 index 354c7c5c3..000000000 --- a/apps/emqx_utils/test/emqx_utils_fs_SUITE_data/nonempty/d2/deep/mutrec +++ /dev/null @@ -1 +0,0 @@ -../../d1 \ No newline at end of file diff --git a/changes/ce/feat-10676.en.md b/changes/ce/feat-10676.en.md new file mode 100644 index 000000000..9628c0b74 --- /dev/null +++ b/changes/ce/feat-10676.en.md @@ -0,0 +1,4 @@ +Implement configuration and user data import/export CLI. + +The `emqx ctl export` and `emqx ctl import` commands allow to export configuration and built-in database +data from a running EMQX cluster and later import it to the same or another running EMQX cluster. diff --git a/changes/ce/feat-10926.en.md b/changes/ce/feat-10926.en.md new file mode 100644 index 000000000..607dd06de --- /dev/null +++ b/changes/ce/feat-10926.en.md @@ -0,0 +1,5 @@ +Allow 'enable' as well as 'enabled' as the state flag for listeners. + +Prior to this change, listener can be enable/disabled by setting the 'true' or 'false' on the 'enabled' config. +This is slightly different naming comparing to other state flags in the system. +No the 'enable' flag is added as an aliase on listeners. diff --git a/changes/ce/feat-10961.en.md b/changes/ce/feat-10961.en.md new file mode 100644 index 000000000..375a11af7 --- /dev/null +++ b/changes/ce/feat-10961.en.md @@ -0,0 +1,3 @@ +Adds support for unlimited max connections for gateway listeners by allowing +infinity as a valid value for the `max_connections` field in the configuration +and HTTP API diff --git a/changes/ce/fix-10950.en.md b/changes/ce/fix-10950.en.md new file mode 100644 index 000000000..e87c0c552 --- /dev/null +++ b/changes/ce/fix-10950.en.md @@ -0,0 +1 @@ +Fix the issue where the `enable_qos` option does not take effect in the MQTT-SN gateway. diff --git a/changes/ce/fix-10955.en.md b/changes/ce/fix-10955.en.md new file mode 100644 index 000000000..a08b80560 --- /dev/null +++ b/changes/ce/fix-10955.en.md @@ -0,0 +1 @@ +Fix the issue in MQTT-SN gateway where deleting Predefined Topics configuration does not work. diff --git a/changes/ce/fix-10994.en.md b/changes/ce/fix-10994.en.md new file mode 100644 index 000000000..803646be6 --- /dev/null +++ b/changes/ce/fix-10994.en.md @@ -0,0 +1 @@ +Redact `proxy-authorization` headers as used by HTTP connector to not leak secrets into log-files. diff --git a/changes/ce/fix-10996.en.md b/changes/ce/fix-10996.en.md new file mode 100644 index 000000000..846faecac --- /dev/null +++ b/changes/ce/fix-10996.en.md @@ -0,0 +1 @@ +Default to 404 for any HTTP/API request, we don't know, rather than the dashboard's index.html. diff --git a/changes/ce/fix-11005.en.md b/changes/ce/fix-11005.en.md new file mode 100644 index 000000000..e4cf135d7 --- /dev/null +++ b/changes/ce/fix-11005.en.md @@ -0,0 +1 @@ +Fix the issue where the `method` field cannot be correctly printed in the trace logs of AuthN HTTP. diff --git a/changes/ee/feat-11003.en.md b/changes/ee/feat-11003.en.md new file mode 100644 index 000000000..57f3dd3b5 --- /dev/null +++ b/changes/ee/feat-11003.en.md @@ -0,0 +1 @@ +Add an option to configure TCP keepalive in Kafka bridge. diff --git a/changes/ee/fix-10998.en.md b/changes/ee/fix-10998.en.md new file mode 100644 index 000000000..682727551 --- /dev/null +++ b/changes/ee/fix-10998.en.md @@ -0,0 +1,2 @@ +Do not allow `batch_size` option for MongoDB bridge resource. +MongoDB connector currently does not support batching, the `bath_size` config value is forced to be 1 if provided. diff --git a/changes/ee/fix-10999.en.md b/changes/ee/fix-10999.en.md new file mode 100644 index 000000000..d9a38b326 --- /dev/null +++ b/changes/ee/fix-10999.en.md @@ -0,0 +1 @@ +Changed schema validation for Kafka fields 'Partition Count Refresh Interval' and 'Offset Commit Interval' to avoid accepting values larger then maximum allowed. diff --git a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mongodb.erl b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mongodb.erl index fec5a4a7f..e115bbf37 100644 --- a/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mongodb.erl +++ b/lib-ee/emqx_ee_bridge/src/emqx_ee_bridge_mongodb.erl @@ -37,8 +37,24 @@ fields("config") -> [ {enable, mk(boolean(), #{desc => ?DESC("enable"), default => true})}, {collection, mk(binary(), #{desc => ?DESC("collection"), default => <<"mqtt">>})}, - {payload_template, mk(binary(), #{required => false, desc => ?DESC("payload_template")})} - ] ++ emqx_resource_schema:fields("resource_opts"); + {payload_template, mk(binary(), #{required => false, desc => ?DESC("payload_template")})}, + {resource_opts, + mk( + ref(?MODULE, "creation_opts"), + #{required => true, desc => ?DESC(emqx_resource_schema, "creation_opts")} + )} + ]; +fields("creation_opts") -> + %% so far, mongodb connector does not support batching + %% but we cannot delete this field due to compatibility reasons + %% so we'll keep this field, but hide it in the docs. + emqx_resource_schema:create_opts([ + {batch_size, #{ + importance => ?IMPORTANCE_HIDDEN, + converter => fun(_, _) -> 1 end, + desc => ?DESC("batch_size") + }} + ]); fields(mongodb_rs) -> emqx_connector_mongo:fields(rs) ++ fields("config"); fields(mongodb_sharded) -> diff --git a/lib-ee/emqx_ee_connector/src/emqx_ee_connector_redis.erl b/lib-ee/emqx_ee_connector/src/emqx_ee_connector_redis.erl index 9caba2beb..5413ee471 100644 --- a/lib-ee/emqx_ee_connector/src/emqx_ee_connector_redis.erl +++ b/lib-ee/emqx_ee_connector/src/emqx_ee_connector_redis.erl @@ -44,7 +44,9 @@ on_start(InstId, #{command_template := CommandTemplate} = Config) -> end. on_stop(InstId, #{conn_st := RedisConnSt}) -> - emqx_connector_redis:on_stop(InstId, RedisConnSt). + emqx_connector_redis:on_stop(InstId, RedisConnSt); +on_stop(InstId, undefined = _State) -> + emqx_connector_redis:on_stop(InstId, undefined). on_get_status(InstId, #{conn_st := RedisConnSt}) -> emqx_connector_redis:on_get_status(InstId, RedisConnSt). diff --git a/lib-ee/emqx_ee_schema_registry/src/emqx_ee_schema_registry.erl b/lib-ee/emqx_ee_schema_registry/src/emqx_ee_schema_registry.erl index 1390f9bfe..90127e629 100644 --- a/lib-ee/emqx_ee_schema_registry/src/emqx_ee_schema_registry.erl +++ b/lib-ee/emqx_ee_schema_registry/src/emqx_ee_schema_registry.erl @@ -5,6 +5,7 @@ -behaviour(gen_server). -behaviour(emqx_config_handler). +-behaviour(emqx_config_backup). -include("emqx_ee_schema_registry.hrl"). -include_lib("emqx/include/logger.hrl"). @@ -13,9 +14,7 @@ %% API -export([ start_link/0, - get_serde/1, - add_schema/2, get_schema/1, delete_schema/1, @@ -34,6 +33,11 @@ %% `emqx_config_handler' API -export([post_config_update/5]). +%% Data backup +-export([ + import_config/1 +]). + -type schema() :: #{ type := serde_type(), source := binary(), @@ -129,7 +133,50 @@ post_config_update( {error, Reason, SerdesToRollback} -> lists:foreach(fun ensure_serde_absent/1, SerdesToRollback), {error, Reason} - end. + end; +post_config_update(?CONF_KEY_PATH, _Cmd, NewConf = #{schemas := NewSchemas}, OldConf, _AppEnvs) -> + OldSchemas = maps:get(schemas, OldConf, #{}), + #{ + added := Added, + changed := Changed0, + removed := Removed + } = emqx_utils_maps:diff_maps(NewSchemas, OldSchemas), + Changed = maps:map(fun(_N, {_Old, New}) -> New end, Changed0), + RemovedNames = maps:keys(Removed), + case RemovedNames of + [] -> + ok; + _ -> + async_delete_serdes(RemovedNames) + end, + SchemasToBuild = maps:to_list(maps:merge(Changed, Added)), + case build_serdes(SchemasToBuild) of + ok -> + {ok, NewConf}; + {error, Reason, SerdesToRollback} -> + lists:foreach(fun ensure_serde_absent/1, SerdesToRollback), + {error, Reason} + end; +post_config_update(_Path, _Cmd, NewConf, _OldConf, _AppEnvs) -> + {ok, NewConf}. + +%%------------------------------------------------------------------------------------------------- +%% Data backup +%%------------------------------------------------------------------------------------------------- + +import_config(#{<<"schema_registry">> := #{<<"schemas">> := Schemas} = SchemaRegConf}) -> + OldSchemas = emqx:get_raw_config([?CONF_KEY_ROOT, schemas], #{}), + SchemaRegConf1 = SchemaRegConf#{<<"schemas">> => maps:merge(OldSchemas, Schemas)}, + case emqx_conf:update(?CONF_KEY_PATH, SchemaRegConf1, #{override_to => cluster}) of + {ok, #{raw_config := #{<<"schemas">> := NewRawSchemas}}} -> + Changed = maps:get(changed, emqx_utils_maps:diff_maps(NewRawSchemas, OldSchemas)), + ChangedPaths = [[?CONF_KEY_ROOT, schemas, Name] || Name <- maps:keys(Changed)], + {ok, #{root_key => ?CONF_KEY_ROOT, changed => ChangedPaths}}; + Error -> + {error, #{root_key => ?CONF_KEY_ROOT, reason => Error}} + end; +import_config(_RawConf) -> + {ok, #{root_key => ?CONF_KEY_ROOT, changed => []}}. %%------------------------------------------------------------------------------------------------- %% `gen_server' API diff --git a/lib-ee/emqx_ee_schema_registry/src/emqx_ee_schema_registry_app.erl b/lib-ee/emqx_ee_schema_registry/src/emqx_ee_schema_registry_app.erl index 195a54c15..85d35be1f 100644 --- a/lib-ee/emqx_ee_schema_registry/src/emqx_ee_schema_registry_app.erl +++ b/lib-ee/emqx_ee_schema_registry/src/emqx_ee_schema_registry_app.erl @@ -11,9 +11,13 @@ start(_StartType, _StartArgs) -> ok = mria_rlog:wait_for_shards([?SCHEMA_REGISTRY_SHARD], infinity), + %% HTTP API handler emqx_conf:add_handler([?CONF_KEY_ROOT, schemas, '?'], emqx_ee_schema_registry), + %% Conf load / data import handler + emqx_conf:add_handler(?CONF_KEY_PATH, emqx_ee_schema_registry), emqx_ee_schema_registry_sup:start_link(). stop(_State) -> emqx_conf:remove_handler([?CONF_KEY_ROOT, schemas, '?']), + emqx_conf:remove_handler(?CONF_KEY_PATH), ok. diff --git a/lib-ee/emqx_ee_schema_registry/test/emqx_ee_schema_registry_SUITE.erl b/lib-ee/emqx_ee_schema_registry/test/emqx_ee_schema_registry_SUITE.erl index 71f7c7d8b..9167fed9e 100644 --- a/lib-ee/emqx_ee_schema_registry/test/emqx_ee_schema_registry_SUITE.erl +++ b/lib-ee/emqx_ee_schema_registry/test/emqx_ee_schema_registry_SUITE.erl @@ -700,3 +700,34 @@ t_cluster_serde_build(Config) -> ] ), ok. + +t_import_config(_Config) -> + RawConf = #{ + <<"schema_registry">> => + #{ + <<"schemas">> => + #{ + <<"my_avro_schema">> => + #{ + <<"description">> => <<"My Avro Schema">>, + <<"source">> => + <<"{\"type\":\"record\",\"fields\":[{\"type\":\"int\",\"name\":\"i\"},{\"type\":\"string\",\"name\":\"s\"}]}">>, + <<"type">> => <<"avro">> + } + } + } + }, + RawConf1 = emqx_utils_maps:deep_put( + [<<"schema_registry">>, <<"schemas">>, <<"my_avro_schema">>, <<"description">>], + RawConf, + <<"Updated description">> + ), + Path = [schema_registry, schemas, <<"my_avro_schema">>], + ?assertEqual( + {ok, #{root_key => schema_registry, changed => []}}, + emqx_ee_schema_registry:import_config(RawConf) + ), + ?assertEqual( + {ok, #{root_key => schema_registry, changed => [Path]}}, + emqx_ee_schema_registry:import_config(RawConf1) + ). diff --git a/mix.exs b/mix.exs index 67fdee443..455f2e6a9 100644 --- a/mix.exs +++ b/mix.exs @@ -58,7 +58,7 @@ defmodule EMQXUmbrella.MixProject do {:ekka, github: "emqx/ekka", tag: "0.15.2", override: true}, {:gen_rpc, github: "emqx/gen_rpc", tag: "2.8.1", override: true}, {:grpc, github: "emqx/grpc-erl", tag: "0.6.8", override: true}, - {:minirest, github: "emqx/minirest", tag: "1.3.10", override: true}, + {:minirest, github: "emqx/minirest", tag: "1.3.11", override: true}, {:ecpool, github: "emqx/ecpool", tag: "0.5.4", override: true}, {:replayq, github: "emqx/replayq", tag: "0.3.7", override: true}, {:pbkdf2, github: "emqx/erlang-pbkdf2", tag: "2.0.4", override: true}, diff --git a/rebar.config b/rebar.config index 8bbba5b96..d0e9570f8 100644 --- a/rebar.config +++ b/rebar.config @@ -65,7 +65,7 @@ , {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.2"}}} , {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "2.8.1"}}} , {grpc, {git, "https://github.com/emqx/grpc-erl", {tag, "0.6.8"}}} - , {minirest, {git, "https://github.com/emqx/minirest", {tag, "1.3.10"}}} + , {minirest, {git, "https://github.com/emqx/minirest", {tag, "1.3.11"}}} , {ecpool, {git, "https://github.com/emqx/ecpool", {tag, "0.5.4"}}} , {replayq, {git, "https://github.com/emqx/replayq.git", {tag, "0.3.7"}}} , {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}} diff --git a/rel/i18n/emqx_bridge_iotdb.hocon b/rel/i18n/emqx_bridge_iotdb.hocon index d1ceecbe0..e38c828f5 100644 --- a/rel/i18n/emqx_bridge_iotdb.hocon +++ b/rel/i18n/emqx_bridge_iotdb.hocon @@ -42,7 +42,9 @@ config_is_aligned.label: """Align Timeseries""" config_device_id.desc: -"""A fixed device name this data should be inserted for. If empty it must either be set in the rule action, the message itself, or it will be extracted from the topic.""" +"""The IoTDB device ID this data should be inserted for. +If left empty, the MQTT message payload must contain a `device_id` field, +or EMQX's rule-engine SQL must produce a `device_id` field.""" config_device_id.label: """Device ID""" @@ -57,12 +59,6 @@ config_max_retries.desc: config_max_retries.label: """HTTP Request Max Retries""" -config_request_timeout.desc: -"""HTTP request timeout.""" - -config_request_timeout.label: -"""HTTP Request Timeout""" - desc_config.desc: """Configuration for Apache IoTDB bridge.""" diff --git a/rel/i18n/emqx_bridge_kafka.hocon b/rel/i18n/emqx_bridge_kafka.hocon index fcb9b0074..d35e31a12 100644 --- a/rel/i18n/emqx_bridge_kafka.hocon +++ b/rel/i18n/emqx_bridge_kafka.hocon @@ -38,6 +38,24 @@ socket_send_buffer.desc: socket_send_buffer.label: """Socket Send Buffer Size""" +socket_receive_buffer.desc: +"""Fine tune the socket receive buffer. The default value is tuned for high throughput.""" + +socket_receive_buffer.label: +"""Socket Receive Buffer Size""" + +socket_tcp_keepalive.desc: +"""Enable TCP keepalive for Kafka bridge connections. +The value is three comma separated numbers in the format of 'Idle,Interval,Probes' + - Idle: The number of seconds a connection needs to be idle before the server begins to send out keep-alive probes (Linux default 7200). + - Interval: The number of seconds between TCP keep-alive probes (Linux default 75). + - Probes: The maximum number of TCP keep-alive probes to send before giving up and killing the connection if no response is obtained from the other end (Linux default 9). +For example "240,30,5" means: TCP keepalive probes are sent after the connection is idle for 240 seconds, and the probes are sent every 30 seconds until a response is received, if it misses 5 consecutive responses, the connection should be closed. +Default: 'none'""" + +socket_tcp_keepalive.label: +"""TCP keepalive options""" + desc_name.desc: """Bridge name, used as a human-readable description of the bridge.""" @@ -56,12 +74,6 @@ consumer_max_batch_bytes.desc: consumer_max_batch_bytes.label: """Fetch Bytes""" -socket_receive_buffer.desc: -"""Fine tune the socket receive buffer. The default value is tuned for high throughput.""" - -socket_receive_buffer.label: -"""Socket Receive Buffer Size""" - consumer_topic_mapping.desc: """Defines the mapping between Kafka topics and MQTT topics. Must contain at least one item.""" diff --git a/rel/i18n/emqx_dashboard_error_code_api.hocon b/rel/i18n/emqx_dashboard_error_code_api.hocon index 835f200ab..856501098 100644 --- a/rel/i18n/emqx_dashboard_error_code_api.hocon +++ b/rel/i18n/emqx_dashboard_error_code_api.hocon @@ -5,9 +5,4 @@ error_codes.desc: error_codes.label: """API Error Codes""" -error_codes_u.desc: -"""API Error Codes""" -error_codes_u.label: -"""API Error Codes""" - } diff --git a/rel/i18n/emqx_dashboard_monitor_api.hocon b/rel/i18n/emqx_dashboard_monitor_api.hocon index 1d45c45ae..37da0cf5f 100644 --- a/rel/i18n/emqx_dashboard_monitor_api.hocon +++ b/rel/i18n/emqx_dashboard_monitor_api.hocon @@ -1,23 +1,23 @@ emqx_dashboard_monitor_api { list_monitor.desc: -"""List monitor data.""" +"""List monitor (statistics) data for the whole cluster.""" list_monitor.label: -"""List monitor data.""" +"""List cluster stats data""" list_monitor_node.desc: -"""List the monitor data on the node.""" +"""List the monitor (statistics) data on the specified node.""" list_monitor_node.label: -"""List the monitor data on the node.""" +"""List node's stats data""" -current_status.desc: -"""Current status. Gauge and rate.""" -current_status.label: -"""Current status. Gauge and rate.""" +current_stats.desc: +"""Current monitor (statistics) data, e.g. number of connections and connection rate in the whole cluster.""" +current_stats.label: +"""Cluster runtime stats""" -current_status_node.desc: -"""Node current status. Gauge and rate.""" -current_status_node.label: -"""Node current status. Gauge and rate.""" +current_stats_node.desc: +"""Node monitor (statistics) data, e.g. number of connections and connection rate on the specified node.""" +current_stats_node.label: +"""Node runtime stats""" } diff --git a/rel/i18n/emqx_ee_bridge_mongodb.hocon b/rel/i18n/emqx_ee_bridge_mongodb.hocon index fab371824..b703bf5a5 100644 --- a/rel/i18n/emqx_ee_bridge_mongodb.hocon +++ b/rel/i18n/emqx_ee_bridge_mongodb.hocon @@ -54,4 +54,9 @@ payload_template.desc: payload_template.label: """Payload template""" +batch_size.desc: +"""There is no batching support for MongoDB at the moment, so this config field has no effect. Internally the value is overridden to 1.""" +batch_size.label: +"""Batch Size""" + } diff --git a/rel/i18n/emqx_ft_schema.hocon b/rel/i18n/emqx_ft_schema.hocon index aabdd2032..f4b48e9f4 100644 --- a/rel/i18n/emqx_ft_schema.hocon +++ b/rel/i18n/emqx_ft_schema.hocon @@ -7,16 +7,16 @@ This toggle also affects the availability of the File Transfer REST API and storage-dependent background activities (e.g. garbage collection).""" init_timeout.desc: -"""Timeout for initializing the file transfer.
-After reaching the timeout, `init` message will be acked with an error""" +"""Timeout for EMQX to initialize the file transfer.
+After reaching the timeout (e.g. due to system is overloaded), the PUBACK message for `init` will contain error code (0x80).""" assemble_timeout.desc: """Timeout for assembling and exporting file segments into a final file.
-After reaching the timeout, `fin` message will be acked with an error""" +After reaching the timeout (e.g. due to system is overloaded), the PUBACK message for `fin` will contain error code (0x80)""" store_segment_timeout.desc: """Timeout for storing a file segment.
-After reaching the timeout, message with the segment will be acked with an error""" +After reaching the timeout (e.g. due to system overloaded), the PUBACK message will contain error code (0x80).""" backend_enable.desc: """Whether to enable this backend.""" diff --git a/rel/i18n/emqx_mgmt_api_configs.hocon b/rel/i18n/emqx_mgmt_api_configs.hocon index 3255806d0..292658900 100644 --- a/rel/i18n/emqx_mgmt_api_configs.hocon +++ b/rel/i18n/emqx_mgmt_api_configs.hocon @@ -6,9 +6,9 @@ get_conf_node.label: """Get all the configurations for node.""" node_name.desc: -"""Node's name: If you do not fill in the fields, this node will be used by default.""" +"""Node's name. If not specified, the configs on the node which receives the HTTP request will be returned.""" node_name.label: -"""Node's name.""" +"""Node's name""" rest_conf_query.desc: """Reset the config entry specified by the query string parameter `conf_path`.
diff --git a/rel/i18n/emqx_mgmt_api_listeners.hocon b/rel/i18n/emqx_mgmt_api_listeners.hocon index b45f55977..c2330ecd7 100644 --- a/rel/i18n/emqx_mgmt_api_listeners.hocon +++ b/rel/i18n/emqx_mgmt_api_listeners.hocon @@ -3,12 +3,12 @@ emqx_mgmt_api_listeners { list_node_live_statuses.desc: """List all running node's listeners live status. group by listener type""" list_node_live_statuses.label: -"""List all running node's listeners live status. group by listener type""" +"""List listeners live status""" list_listeners.desc: """List all running node's listeners for the specified type.""" list_listeners.label: -"""List all running node's listeners for the specified type.""" +"""List listeners per type""" listener_type.desc: """Listener type""" @@ -18,42 +18,37 @@ listener_type.label: create_on_all_nodes.desc: """Create the specified listener on all nodes.""" create_on_all_nodes.label: -"""Create the specified listener on all nodes.""" +"""Create listener""" list_by_id.desc: """List all running node's listeners for the specified id.""" list_by_id.label: -"""List all running node's listeners for the specified id.""" +"""List listeners per ID""" update_lisener.desc: """Update the specified listener on all nodes.""" update_lisener.label: -"""Update the specified listener on all nodes.""" - -create_on_all_nodes_2.desc: -"""Create the specified listener on all nodes.""" -create_on_all_nodes_2.label: -"""Create the specified listener on all nodes.""" +"""Update listener""" delete_on_all_nodes.desc: """Delete the specified listener on all nodes.""" delete_on_all_nodes.label: -"""Delete the specified listener on all nodes.""" +"""Delete listener""" start_on_all_nodes.desc: """Start the listener on all nodes.""" start_on_all_nodes.label: -"""Start the listener on all nodes.""" +"""Start listener""" stop_on_all_nodes.desc: """Stop the listener on all nodes.""" stop_on_all_nodes.label: -"""Stop the listener on all nodes.""" +"""Stop listener""" restart_on_all_nodes.desc: """Restart listeners on all nodes.""" restart_on_all_nodes.label: -"""Restart listeners on all nodes.""" +"""Restart listener""" } diff --git a/rel/i18n/emqx_mgmt_api_nodes.hocon b/rel/i18n/emqx_mgmt_api_nodes.hocon index e875b0dcc..801cc778e 100644 --- a/rel/i18n/emqx_mgmt_api_nodes.hocon +++ b/rel/i18n/emqx_mgmt_api_nodes.hocon @@ -11,12 +11,12 @@ get_node_info.label: """Get node info""" get_node_metrics.desc: -"""Get node metrics""" +"""Get node run-time counter metrics. Such as received or sent bytes or messages, the number of succeeded or failed authentications or authorizations, etc.""" get_node_metrics.label: """Get node metrics""" get_node_stats.desc: -"""Get node stats""" +"""Get node run-time stats. Such as the number of topics, connections, etc.""" get_node_stats.label: """Get node stats""" diff --git a/rel/i18n/emqx_mgmt_api_trace.hocon b/rel/i18n/emqx_mgmt_api_trace.hocon index 4af68a460..a53dabae3 100644 --- a/rel/i18n/emqx_mgmt_api_trace.hocon +++ b/rel/i18n/emqx_mgmt_api_trace.hocon @@ -50,15 +50,15 @@ file_size.desc: file_size.label: """file size""" -modification_date.desc: -"""the modification and last access times of a file""" -modification_date.label: -"""last access time""" +file_mtime.desc: +"""The last time this file is modified.""" +file_mtime.label: +"""file mtime""" -format.desc: -"""Unique and format by [a-zA-Z0-9-_]""" -format.label: -"""Unique and format by [a-zA-Z0-9-_]""" +trace_name.desc: +"""Unique name of the trace. Only ascii letters in a-z, A-Z, 0-9 and underscore '_' are allowed.""" +trace_name.label: +"""Unique name of the trace""" filter_type.desc: """Filter type""" @@ -66,19 +66,19 @@ filter_type.label: """Filter type""" support_wildcard.desc: -"""support mqtt wildcard topic.""" +"""Specify the topic or topic filter if the trace 'type' is 'topic'.""" support_wildcard.label: -"""support mqtt wildcard topic""" +"""MQTT Topic""" mqtt_clientid.desc: -"""mqtt clientid.""" +"""Specify the MQTT clientid if the trace 'type' is 'clientid'.""" mqtt_clientid.label: -"""mqtt clientid""" +"""MQTT clientid""" client_ip_addess.desc: -"""client ip address""" +"""Specify the client's IP address if the trace type is 'ip_address'.""" client_ip_addess.label: -"""client ip address""" +"""Client IP Address""" trace_status.desc: """trace status""" @@ -90,11 +90,6 @@ time_format.desc: time_format.label: """rfc3339 timestamp or epoch second""" -time_format_g.desc: -"""rfc3339 timestamp or epoch second""" -time_format_g.label: -"""rfc3339 timestamp or epoch second""" - trace_log_size.desc: """trace log size""" trace_log_size.label: diff --git a/scripts/ct/run.sh b/scripts/ct/run.sh index b24c760f0..135eec919 100755 --- a/scripts/ct/run.sh +++ b/scripts/ct/run.sh @@ -264,7 +264,7 @@ fi # rebar, mix and hex cache directory need to be writable by $DOCKER_USER docker exec -i $TTY -u root:root "$ERLANG_CONTAINER" bash -c "mkdir -p /.cache /.hex /.mix && chown $DOCKER_USER /.cache /.hex /.mix" # need to initialize .erlang.cookie manually here because / is not writable by $DOCKER_USER -docker exec -i $TTY -u root:root "$ERLANG_CONTAINER" bash -c "openssl rand -base64 16 > /.erlang.cookie && chown $DOCKER_USER /.erlang.cookie && chmod 0400 /.erlang.cookie" +docker exec -i $TTY -u root:root "$ERLANG_CONTAINER" bash -c "openssl rand -base64 -hex 16 > /.erlang.cookie && chown $DOCKER_USER /.erlang.cookie && chmod 0400 /.erlang.cookie" # the user must exist inside the container for `whoami` to work docker exec -i $TTY -u root:root "$ERLANG_CONTAINER" bash -c "useradd --uid $DOCKER_USER -M -d / emqx" || true docker exec -i $TTY -u root:root "$ERLANG_CONTAINER" bash -c "chown -R $DOCKER_USER /var/lib/secret" || true diff --git a/scripts/spellcheck/dicts/emqx.txt b/scripts/spellcheck/dicts/emqx.txt index 533269c87..03587aa54 100644 --- a/scripts/spellcheck/dicts/emqx.txt +++ b/scripts/spellcheck/dicts/emqx.txt @@ -52,7 +52,9 @@ PINGREQ PSK PSK PSKs +PUBACK PUBREL +PUBLISH QoS RESTful ROADMAP