From 808237364cad717d1c42d2028a25fc10db750b1d Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Mon, 9 Oct 2023 09:58:20 +0200 Subject: [PATCH 001/155] fix(api-docs): add file-transfer config to hot-config scope --- apps/emqx_management/src/emqx_management.app.src | 2 +- apps/emqx_management/src/emqx_mgmt_api_configs.erl | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/apps/emqx_management/src/emqx_management.app.src b/apps/emqx_management/src/emqx_management.app.src index 1a37ce2ea..3c13a1935 100644 --- a/apps/emqx_management/src/emqx_management.app.src +++ b/apps/emqx_management/src/emqx_management.app.src @@ -2,7 +2,7 @@ {application, emqx_management, [ {description, "EMQX Management API and CLI"}, % strict semver, bump manually! - {vsn, "5.0.31"}, + {vsn, "5.0.32"}, {modules, []}, {registered, [emqx_management_sup]}, {applications, [kernel, stdlib, emqx_plugins, minirest, emqx, emqx_ctl, emqx_bridge_http]}, diff --git a/apps/emqx_management/src/emqx_mgmt_api_configs.erl b/apps/emqx_management/src/emqx_mgmt_api_configs.erl index 29afa9d09..d5879be36 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_configs.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_configs.erl @@ -38,12 +38,21 @@ -define(OPTS, #{rawconf_with_defaults => true, override_to => cluster}). -define(TAGS, ["Configs"]). +-if(?EMQX_RELEASE_EDITION == ee). +-define(ROOT_KEYS_EE, [ + <<"file_transfer">> +]). +-else. +-define(ROOT_KEYS_EE, []). +-endif. + -define(ROOT_KEYS, [ <<"dashboard">>, <<"alarm">>, <<"sys_topics">>, <<"sysmon">>, <<"log">> + | ?ROOT_KEYS_EE ]). %% erlfmt-ignore From 2b44349b21323f810d21ef7b44e7afa5fa0862e6 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Mon, 9 Oct 2023 10:00:24 +0200 Subject: [PATCH 002/155] docs: add changelog for pull request 11731 --- changes/ee/fix-11731.en.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/ee/fix-11731.en.md diff --git a/changes/ee/fix-11731.en.md b/changes/ee/fix-11731.en.md new file mode 100644 index 000000000..5584da337 --- /dev/null +++ b/changes/ee/fix-11731.en.md @@ -0,0 +1 @@ +Add file_transfer feature configs to hot-config schema. From 2626d793a71ecb6b27ec722c7446cb674b1bb56f Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Mon, 9 Oct 2023 12:40:30 +0200 Subject: [PATCH 003/155] test: try to resolve schema module from PROFILE in tests --- apps/emqx_conf/src/emqx_conf.erl | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl index 1efeb4d69..f9e64ffa6 100644 --- a/apps/emqx_conf/src/emqx_conf.erl +++ b/apps/emqx_conf/src/emqx_conf.erl @@ -203,14 +203,30 @@ gen_config_md(Dir, SchemaModule, Lang) -> -spec schema_module() -> module(). schema_module() -> case os:getenv("SCHEMA_MOD") of - false -> emqx_conf_schema; - Value -> list_to_existing_atom(Value) + false -> + resolve_schema_module(); + Value -> + list_to_existing_atom(Value) end. %%-------------------------------------------------------------------- %% Internal functions %%-------------------------------------------------------------------- +-ifdef(TEST). +resolve_schema_module() -> + case os:getenv("PROFILE") of + "emqx" -> + emqx_conf_schema; + "emqx-enterprise" -> + emqx_enterprise_schema + end. +-else. +-spec resolve_schema_module() -> no_return(). +resolve_schema_module() -> + error("SCHEMA_MOD environment variable is not set"). +-endif. + %% @doc Make a resolver function that can be used to lookup the description by hocon_schema_json dump. make_desc_resolver(Lang) -> fun From 5fff2ffe459cbfe6435a0789942347c193724dd3 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Tue, 10 Oct 2023 17:18:24 +0700 Subject: [PATCH 004/155] test(kafka): try to stabilize `t_dynamic_mqtt_topic/1` testcase --- .../test/emqx_bridge_kafka_impl_consumer_SUITE.erl | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl index 60a571b2d..693b59048 100644 --- a/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl +++ b/apps/emqx_bridge_kafka/test/emqx_bridge_kafka_impl_consumer_SUITE.erl @@ -1707,6 +1707,7 @@ t_dynamic_mqtt_topic(Config) -> create_bridge(Config) ), wait_until_subscribers_are_ready(NPartitions, 40_000), + ping_until_healthy(Config, _Period = 1_500, _Timeout = 24_000), {ok, C} = emqtt:start_link(), on_exit(fun() -> emqtt:stop(C) end), {ok, _} = emqtt:connect(C), From ec588f94e2e25d99d2ccf3a598454451e112500b Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Tue, 10 Oct 2023 09:50:09 -0300 Subject: [PATCH 005/155] test: attempt to fix flaky http bridge suite ``` =CRASH REPORT==== 10-Oct-2023::05:35:30.911371 === crasher: initial call: ehttpc:init/1 pid: <0.23358.2> registered_name: [] exception error: bad argument in function persistent_term:get/1 called as persistent_term:get({emqx_bridge_http_SUITE, do_t_async_retries,attempts}) *** argument 1: no persistent term stored with this key in call from emqx_bridge_http_SUITE:'-do_t_async_retries/3-fun-2-'/0 (/__w/emqx/emqx/apps/emqx_bridge_http/test/emqx_bridge_http_SUITE.erl, line 697) in call from emqx_bridge_http_SUITE:'-do_t_async_retries/3-fun-4-'/6 (/__w/emqx/emqx/apps/emqx_bridge_http/test/emqx_bridge_http_SUITE.erl, line 705) ``` --- .../test/emqx_bridge_http_SUITE.erl | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/apps/emqx_bridge_http/test/emqx_bridge_http_SUITE.erl b/apps/emqx_bridge_http/test/emqx_bridge_http_SUITE.erl index 5395460b8..6fdd0e0d5 100644 --- a/apps/emqx_bridge_http/test/emqx_bridge_http_SUITE.erl +++ b/apps/emqx_bridge_http/test/emqx_bridge_http_SUITE.erl @@ -429,8 +429,8 @@ t_async_free_retries(Config) -> ), ?assertEqual(ExpectedAttempts, Get(), #{error => Error}) end, - do_t_async_retries(Context, {error, normal}, Fn), - do_t_async_retries(Context, {error, {shutdown, normal}}, Fn), + do_t_async_retries(?FUNCTION_NAME, Context, {error, normal}, Fn), + do_t_async_retries(?FUNCTION_NAME, Context, {error, {shutdown, normal}}, Fn), ok. t_async_common_retries(Config) -> @@ -465,10 +465,12 @@ t_async_common_retries(Config) -> end, %% These two succeed because they're further retried by the buffer %% worker synchronously, and we're not mock that call. - do_t_async_retries(Context, {error, {closed, "The connection was lost."}}, FnSucceed), - do_t_async_retries(Context, {error, {shutdown, closed}}, FnSucceed), + do_t_async_retries( + ?FUNCTION_NAME, Context, {error, {closed, "The connection was lost."}}, FnSucceed + ), + do_t_async_retries(?FUNCTION_NAME, Context, {error, {shutdown, closed}}, FnSucceed), %% This fails because this error is treated as unrecoverable. - do_t_async_retries(Context, {error, something_else}, FnFail), + do_t_async_retries(?FUNCTION_NAME, Context, {error, something_else}, FnFail), ok. t_bad_bridge_config(_Config) -> @@ -688,14 +690,15 @@ t_bridge_probes_header_atoms(Config) -> ok. %% helpers -do_t_async_retries(TestContext, Error, Fn) -> +do_t_async_retries(TestCase, TestContext, Error, Fn) -> #{error_attempts := ErrorAttempts} = TestContext, - persistent_term:put({?MODULE, ?FUNCTION_NAME, attempts}, 0), - on_exit(fun() -> persistent_term:erase({?MODULE, ?FUNCTION_NAME, attempts}) end), - Get = fun() -> persistent_term:get({?MODULE, ?FUNCTION_NAME, attempts}) end, + PTKey = {?MODULE, TestCase, attempts}, + persistent_term:put(PTKey, 0), + on_exit(fun() -> persistent_term:erase(PTKey) end), + Get = fun() -> persistent_term:get(PTKey) end, GetAndBump = fun() -> - Attempts = persistent_term:get({?MODULE, ?FUNCTION_NAME, attempts}), - persistent_term:put({?MODULE, ?FUNCTION_NAME, attempts}, Attempts + 1), + Attempts = persistent_term:get(PTKey), + persistent_term:put(PTKey, Attempts + 1), Attempts + 1 end, emqx_common_test_helpers:with_mock( @@ -714,6 +717,7 @@ do_t_async_retries(TestContext, Error, Fn) -> end, fun() -> Fn(Get, Error) end ), + persistent_term:erase(PTKey), ok. receive_request_notifications(MessageIDs, _ResponseDelay, _Acc) when map_size(MessageIDs) =:= 0 -> From 574dc2f24382261984d152a99b24149e323dc74c Mon Sep 17 00:00:00 2001 From: zhongwencool Date: Wed, 11 Oct 2023 10:36:33 +0800 Subject: [PATCH 006/155] fix: observer load command crash when loading noexist module --- apps/emqx_modules/src/emqx_modules.app.src | 2 +- apps/emqx_modules/src/emqx_observer_cli.erl | 25 +++++++++++---------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/apps/emqx_modules/src/emqx_modules.app.src b/apps/emqx_modules/src/emqx_modules.app.src index 09a404a44..e986a3fe1 100644 --- a/apps/emqx_modules/src/emqx_modules.app.src +++ b/apps/emqx_modules/src/emqx_modules.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_modules, [ {description, "EMQX Modules"}, - {vsn, "5.0.22"}, + {vsn, "5.0.23"}, {modules, []}, {applications, [kernel, stdlib, emqx, emqx_ctl]}, {mod, {emqx_modules_app, []}}, diff --git a/apps/emqx_modules/src/emqx_observer_cli.erl b/apps/emqx_modules/src/emqx_observer_cli.erl index abed31edc..0be17decb 100644 --- a/apps/emqx_modules/src/emqx_observer_cli.erl +++ b/apps/emqx_modules/src/emqx_observer_cli.erl @@ -40,10 +40,18 @@ cmd(["bin_leak"]) -> recon:bin_leak(100) ); cmd(["load", Mod]) -> - Module = list_to_existing_atom(Mod), - Nodes = nodes(), - Res = remote_load(Nodes, Module), - emqx_ctl:print("Loaded ~p module on ~p: ~p~n", [Module, Nodes, Res]); + case nodes() of + [] -> + emqx_ctl:print("No other nodes in the cluster~n"); + Nodes -> + case emqx_utils:safe_to_existing_atom(Mod) of + {ok, Module} -> + Res = recon:remote_load(Nodes, Module), + emqx_ctl:print("Loaded ~p module on ~p: ~p~n", [Module, Nodes, Res]); + {error, Reason} -> + emqx_ctl:print("Module(~s) not found: ~p~n", [Mod, Reason]) + end + end; cmd(_) -> emqx_ctl:usage([ {"observer status", "Start observer in the current console"}, @@ -51,12 +59,5 @@ cmd(_) -> "Force all processes to perform garbage collection " "and prints the top-100 processes that freed the " "biggest amount of binaries, potentially highlighting leaks."}, - {"observer load Mod", "Ensure a module is loaded in all EMQX nodes in the cluster"} + {"observer load Mod", "Enhanced module synchronization across all cluster nodes"} ]). - -%% recon:remote_load/1 has a bug, when nodes() returns [], it is -%% taken by recon as a node name. -%% before OTP 23, the call returns a 'badrpc' tuple -%% after OTP 23, it crashes with 'badarg' error -remote_load([], _Module) -> ok; -remote_load(Nodes, Module) -> recon:remote_load(Nodes, Module). From 8e7ba16c3ad8a185c942108a5682880e5d23c347 Mon Sep 17 00:00:00 2001 From: zhongwencool Date: Wed, 11 Oct 2023 11:21:49 +0800 Subject: [PATCH 007/155] fix: nothing show when run clients list command --- apps/emqx_management/src/emqx_mgmt_cli.erl | 58 +++++++++++++++------- 1 file changed, 39 insertions(+), 19 deletions(-) diff --git a/apps/emqx_management/src/emqx_mgmt_cli.erl b/apps/emqx_management/src/emqx_mgmt_cli.erl index 15f05da04..866efb267 100644 --- a/apps/emqx_management/src/emqx_mgmt_cli.erl +++ b/apps/emqx_management/src/emqx_mgmt_cli.erl @@ -157,7 +157,10 @@ sort_map_list_field(Field, Map) -> %% @doc Query clients clients(["list"]) -> - dump(?CHAN_TAB, client); + case ets:info(?CHAN_TAB, size) of + 0 -> emqx_ctl:print("No clients.~n"); + _ -> dump(?CHAN_TAB, client) + end; clients(["show", ClientId]) -> if_client(ClientId, fun print/1); clients(["kick", ClientId]) -> @@ -180,10 +183,15 @@ if_client(ClientId, Fun) -> %% @doc Topics Command topics(["list"]) -> - emqx_router:foldr_routes( - fun(Route, Acc) -> [print({emqx_topic, Route}) | Acc] end, - [] - ); + Res = + emqx_router:foldr_routes( + fun(Route, Acc) -> [print({emqx_topic, Route}) | Acc] end, + [] + ), + case Res of + [] -> emqx_ctl:print("No topics.~n"); + _ -> ok + end; topics(["show", Topic]) -> Routes = emqx_router:lookup_routes(Topic), [print({emqx_topic, Route}) || Route <- Routes]; @@ -194,12 +202,17 @@ topics(_) -> ]). subscriptions(["list"]) -> - lists:foreach( - fun(Suboption) -> - print({?SUBOPTION, Suboption}) - end, - ets:tab2list(?SUBOPTION) - ); + case ets:info(?SUBOPTION, size) of + 0 -> + emqx_ctl:print("No subscriptions.~n"); + _ -> + lists:foreach( + fun(SubOption) -> + print({?SUBOPTION, SubOption}) + end, + ets:tab2list(?SUBOPTION) + ) + end; subscriptions(["show", ClientId]) -> case ets:lookup(emqx_subid, bin(ClientId)) of [] -> @@ -207,7 +220,7 @@ subscriptions(["show", ClientId]) -> [{_, Pid}] -> case ets:match_object(?SUBOPTION, {{'_', Pid}, '_'}) of [] -> emqx_ctl:print("Not Found.~n"); - Suboption -> [print({?SUBOPTION, Sub}) || Sub <- Suboption] + SubOption -> [print({?SUBOPTION, Sub}) || Sub <- SubOption] end end; subscriptions(["add", ClientId, Topic, QoS]) -> @@ -446,13 +459,20 @@ log(_) -> %% @doc Trace Command trace(["list"]) -> - lists:foreach( - fun(Trace) -> - #{type := Type, filter := Filter, level := Level, dst := Dst} = Trace, - emqx_ctl:print("Trace(~s=~s, level=~s, destination=~0p)~n", [Type, Filter, Level, Dst]) - end, - emqx_trace_handler:running() - ); + case emqx_trace_handler:running() of + [] -> + emqx_ctl:print("Trace is empty~n", []); + Traces -> + lists:foreach( + fun(Trace) -> + #{type := Type, filter := Filter, level := Level, dst := Dst} = Trace, + emqx_ctl:print("Trace(~s=~s, level=~s, destination=~0p)~n", [ + Type, Filter, Level, Dst + ]) + end, + Traces + ) + end; trace(["stop", Operation, Filter0]) -> case trace_type(Operation, Filter0) of {ok, Type, Filter} -> trace_off(Type, Filter); From f4a88f717bac8b9c331f3cf5b9da5347a603a526 Mon Sep 17 00:00:00 2001 From: zhongwencool Date: Wed, 11 Oct 2023 11:47:37 +0800 Subject: [PATCH 008/155] feat: support retainer Start Length command --- apps/emqx_retainer/src/emqx_retainer.app.src | 2 +- .../src/emqx_retainer_mnesia_cli.erl | 20 +++++++++++++++---- .../test/emqx_retainer_cli_SUITE.erl | 3 +++ 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/apps/emqx_retainer/src/emqx_retainer.app.src b/apps/emqx_retainer/src/emqx_retainer.app.src index 8f7c9aa17..cab070826 100644 --- a/apps/emqx_retainer/src/emqx_retainer.app.src +++ b/apps/emqx_retainer/src/emqx_retainer.app.src @@ -2,7 +2,7 @@ {application, emqx_retainer, [ {description, "EMQX Retainer"}, % strict semver, bump manually! - {vsn, "5.0.17"}, + {vsn, "5.0.18"}, {modules, []}, {registered, [emqx_retainer_sup]}, {applications, [kernel, stdlib, emqx, emqx_ctl]}, diff --git a/apps/emqx_retainer/src/emqx_retainer_mnesia_cli.erl b/apps/emqx_retainer/src/emqx_retainer_mnesia_cli.erl index 5710e4df3..9c1f507cb 100644 --- a/apps/emqx_retainer/src/emqx_retainer_mnesia_cli.erl +++ b/apps/emqx_retainer/src/emqx_retainer_mnesia_cli.erl @@ -32,10 +32,11 @@ load() -> ok = emqx_ctl:register_command(retainer, {?MODULE, retainer}, []). retainer(["info"]) -> - ?PRINT("Number of retained messages: ~p~n", [emqx_retainer:retained_count()]); + count(); retainer(["topics"]) -> - [?PRINT("~ts~n", [I]) || I <- emqx_retainer_mnesia:topics()], - ok; + topic(1, 1000); +retainer(["topics", Start, Len]) -> + topic(list_to_integer(Start), list_to_integer(Len)); retainer(["clean", Topic]) -> emqx_retainer:delete(list_to_binary(Topic)); retainer(["clean"]) -> @@ -65,7 +66,9 @@ retainer(_) -> emqx_ctl:usage( [ {"retainer info", "Show the count of retained messages"}, - {"retainer topics", "Show all topics of retained messages"}, + {"retainer topics", "Same as retainer topic 1 1000"}, + {"retainer topics ", + "Show topics of retained messages by the specified range"}, {"retainer clean", "Clean all retained messages"}, {"retainer clean ", "Clean retained messages by the specified topic filter"}, {"retainer reindex status", "Show reindex status"}, @@ -98,3 +101,12 @@ do_reindex(Force) -> end ), ?PRINT_MSG("Reindexing finished~n"). + +count() -> + ?PRINT("Number of retained messages: ~p~n", [emqx_retainer:retained_count()]). + +topic(Start, Len) -> + count(), + Topics = lists:sublist(emqx_retainer_mnesia:topics(), Start, Len), + [?PRINT("~ts~n", [I]) || I <- Topics], + ok. diff --git a/apps/emqx_retainer/test/emqx_retainer_cli_SUITE.erl b/apps/emqx_retainer/test/emqx_retainer_cli_SUITE.erl index bddad5fb3..c04f7a6de 100644 --- a/apps/emqx_retainer/test/emqx_retainer_cli_SUITE.erl +++ b/apps/emqx_retainer/test/emqx_retainer_cli_SUITE.erl @@ -44,6 +44,9 @@ t_info(_Config) -> t_topics(_Config) -> ok = emqx_retainer_mnesia_cli:retainer(["topics"]). +t_topics_with_len(_Config) -> + ok = emqx_retainer_mnesia_cli:retainer(["topics", "100", "200"]). + t_clean(_Config) -> ok = emqx_retainer_mnesia_cli:retainer(["clean"]). From 7212e8aceeaeb5dd0d11fe51672f5d1c2c56c732 Mon Sep 17 00:00:00 2001 From: Ivan Dyachkov Date: Wed, 11 Oct 2023 09:59:27 +0200 Subject: [PATCH 009/155] chore(ci): add ossf scorecard analysis workflow --- .github/workflows/scorecard.yaml | 51 ++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 .github/workflows/scorecard.yaml diff --git a/.github/workflows/scorecard.yaml b/.github/workflows/scorecard.yaml new file mode 100644 index 000000000..29f6610ce --- /dev/null +++ b/.github/workflows/scorecard.yaml @@ -0,0 +1,51 @@ +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '25 21 * * 6' + push: + branches: [ "master" ] + workflow_dispatch: + +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + security-events: write + id-token: write + + steps: + - name: "Checkout code" + uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@483ef80eb98fb506c348f7d62e28055e49fe2398 # v2.3.0 + with: + results_file: results.sarif + results_format: sarif + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + publish_results: true + + - name: "Upload artifact" + uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@8e0b1c74b1d5a0077b04d064c76ee714d3da7637 # v2.22.1 + with: + sarif_file: results.sarif From cc392cadb80f6348bd82caf6bd6bdd6b19f51248 Mon Sep 17 00:00:00 2001 From: zhongwencool Date: Wed, 11 Oct 2023 18:40:09 +0800 Subject: [PATCH 010/155] chore: update apps/emqx_retainer/src/emqx_retainer_mnesia_cli.erl Co-authored-by: Zaiming (Stone) Shi --- apps/emqx_retainer/src/emqx_retainer_mnesia_cli.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/emqx_retainer/src/emqx_retainer_mnesia_cli.erl b/apps/emqx_retainer/src/emqx_retainer_mnesia_cli.erl index 9c1f507cb..3fef4c8b0 100644 --- a/apps/emqx_retainer/src/emqx_retainer_mnesia_cli.erl +++ b/apps/emqx_retainer/src/emqx_retainer_mnesia_cli.erl @@ -67,7 +67,7 @@ retainer(_) -> [ {"retainer info", "Show the count of retained messages"}, {"retainer topics", "Same as retainer topic 1 1000"}, - {"retainer topics ", + {"retainer topics ", "Show topics of retained messages by the specified range"}, {"retainer clean", "Clean all retained messages"}, {"retainer clean ", "Clean retained messages by the specified topic filter"}, From e069680bca27e2c9b005d638825f5aa4cb09cb84 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 11 Oct 2023 15:41:35 +0200 Subject: [PATCH 011/155] refactor: move emqx_license app from lib-ee to apps --- {lib-ee => apps}/emqx_license/.gitignore | 0 {lib-ee => apps/emqx_license}/BSL.txt | 0 {lib-ee => apps}/emqx_license/README.md | 0 {lib-ee => apps}/emqx_license/etc/emqx_license.conf | 0 {lib-ee => apps}/emqx_license/include/emqx_license.hrl | 0 {lib-ee => apps}/emqx_license/rebar.config | 0 {lib-ee => apps}/emqx_license/src/emqx_license.app.src | 0 {lib-ee => apps}/emqx_license/src/emqx_license.erl | 0 {lib-ee => apps}/emqx_license/src/emqx_license_app.erl | 0 {lib-ee => apps}/emqx_license/src/emqx_license_checker.erl | 0 {lib-ee => apps}/emqx_license/src/emqx_license_cli.erl | 0 {lib-ee => apps}/emqx_license/src/emqx_license_http_api.erl | 0 {lib-ee => apps}/emqx_license/src/emqx_license_parser.erl | 0 .../emqx_license/src/emqx_license_parser_v20220101.erl | 0 {lib-ee => apps}/emqx_license/src/emqx_license_resources.erl | 0 {lib-ee => apps}/emqx_license/src/emqx_license_schema.erl | 0 {lib-ee => apps}/emqx_license/src/emqx_license_sup.erl | 0 {lib-ee => apps}/emqx_license/src/proto/emqx_license_proto_v1.erl | 0 {lib-ee => apps}/emqx_license/src/proto/emqx_license_proto_v2.erl | 0 {lib-ee => apps}/emqx_license/test/data/emqx.lic | 0 {lib-ee => apps}/emqx_license/test/data/pub.pem | 0 {lib-ee => apps}/emqx_license/test/data/pvt.key | 0 {lib-ee => apps}/emqx_license/test/emqx_license_SUITE.erl | 0 {lib-ee => apps}/emqx_license/test/emqx_license_checker_SUITE.erl | 0 {lib-ee => apps}/emqx_license/test/emqx_license_cli_SUITE.erl | 0 .../emqx_license/test/emqx_license_http_api_SUITE.erl | 0 {lib-ee => apps}/emqx_license/test/emqx_license_parser_SUITE.erl | 0 .../emqx_license/test/emqx_license_resources_SUITE.erl | 0 {lib-ee => apps}/emqx_license/test/emqx_license_test_lib.erl | 0 29 files changed, 0 insertions(+), 0 deletions(-) rename {lib-ee => apps}/emqx_license/.gitignore (100%) rename {lib-ee => apps/emqx_license}/BSL.txt (100%) rename {lib-ee => apps}/emqx_license/README.md (100%) rename {lib-ee => apps}/emqx_license/etc/emqx_license.conf (100%) rename {lib-ee => apps}/emqx_license/include/emqx_license.hrl (100%) rename {lib-ee => apps}/emqx_license/rebar.config (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license.app.src (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license.erl (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license_app.erl (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license_checker.erl (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license_cli.erl (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license_http_api.erl (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license_parser.erl (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license_parser_v20220101.erl (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license_resources.erl (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license_schema.erl (100%) rename {lib-ee => apps}/emqx_license/src/emqx_license_sup.erl (100%) rename {lib-ee => apps}/emqx_license/src/proto/emqx_license_proto_v1.erl (100%) rename {lib-ee => apps}/emqx_license/src/proto/emqx_license_proto_v2.erl (100%) rename {lib-ee => apps}/emqx_license/test/data/emqx.lic (100%) rename {lib-ee => apps}/emqx_license/test/data/pub.pem (100%) rename {lib-ee => apps}/emqx_license/test/data/pvt.key (100%) rename {lib-ee => apps}/emqx_license/test/emqx_license_SUITE.erl (100%) rename {lib-ee => apps}/emqx_license/test/emqx_license_checker_SUITE.erl (100%) rename {lib-ee => apps}/emqx_license/test/emqx_license_cli_SUITE.erl (100%) rename {lib-ee => apps}/emqx_license/test/emqx_license_http_api_SUITE.erl (100%) rename {lib-ee => apps}/emqx_license/test/emqx_license_parser_SUITE.erl (100%) rename {lib-ee => apps}/emqx_license/test/emqx_license_resources_SUITE.erl (100%) rename {lib-ee => apps}/emqx_license/test/emqx_license_test_lib.erl (100%) diff --git a/lib-ee/emqx_license/.gitignore b/apps/emqx_license/.gitignore similarity index 100% rename from lib-ee/emqx_license/.gitignore rename to apps/emqx_license/.gitignore diff --git a/lib-ee/BSL.txt b/apps/emqx_license/BSL.txt similarity index 100% rename from lib-ee/BSL.txt rename to apps/emqx_license/BSL.txt diff --git a/lib-ee/emqx_license/README.md b/apps/emqx_license/README.md similarity index 100% rename from lib-ee/emqx_license/README.md rename to apps/emqx_license/README.md diff --git a/lib-ee/emqx_license/etc/emqx_license.conf b/apps/emqx_license/etc/emqx_license.conf similarity index 100% rename from lib-ee/emqx_license/etc/emqx_license.conf rename to apps/emqx_license/etc/emqx_license.conf diff --git a/lib-ee/emqx_license/include/emqx_license.hrl b/apps/emqx_license/include/emqx_license.hrl similarity index 100% rename from lib-ee/emqx_license/include/emqx_license.hrl rename to apps/emqx_license/include/emqx_license.hrl diff --git a/lib-ee/emqx_license/rebar.config b/apps/emqx_license/rebar.config similarity index 100% rename from lib-ee/emqx_license/rebar.config rename to apps/emqx_license/rebar.config diff --git a/lib-ee/emqx_license/src/emqx_license.app.src b/apps/emqx_license/src/emqx_license.app.src similarity index 100% rename from lib-ee/emqx_license/src/emqx_license.app.src rename to apps/emqx_license/src/emqx_license.app.src diff --git a/lib-ee/emqx_license/src/emqx_license.erl b/apps/emqx_license/src/emqx_license.erl similarity index 100% rename from lib-ee/emqx_license/src/emqx_license.erl rename to apps/emqx_license/src/emqx_license.erl diff --git a/lib-ee/emqx_license/src/emqx_license_app.erl b/apps/emqx_license/src/emqx_license_app.erl similarity index 100% rename from lib-ee/emqx_license/src/emqx_license_app.erl rename to apps/emqx_license/src/emqx_license_app.erl diff --git a/lib-ee/emqx_license/src/emqx_license_checker.erl b/apps/emqx_license/src/emqx_license_checker.erl similarity index 100% rename from lib-ee/emqx_license/src/emqx_license_checker.erl rename to apps/emqx_license/src/emqx_license_checker.erl diff --git a/lib-ee/emqx_license/src/emqx_license_cli.erl b/apps/emqx_license/src/emqx_license_cli.erl similarity index 100% rename from lib-ee/emqx_license/src/emqx_license_cli.erl rename to apps/emqx_license/src/emqx_license_cli.erl diff --git a/lib-ee/emqx_license/src/emqx_license_http_api.erl b/apps/emqx_license/src/emqx_license_http_api.erl similarity index 100% rename from lib-ee/emqx_license/src/emqx_license_http_api.erl rename to apps/emqx_license/src/emqx_license_http_api.erl diff --git a/lib-ee/emqx_license/src/emqx_license_parser.erl b/apps/emqx_license/src/emqx_license_parser.erl similarity index 100% rename from lib-ee/emqx_license/src/emqx_license_parser.erl rename to apps/emqx_license/src/emqx_license_parser.erl diff --git a/lib-ee/emqx_license/src/emqx_license_parser_v20220101.erl b/apps/emqx_license/src/emqx_license_parser_v20220101.erl similarity index 100% rename from lib-ee/emqx_license/src/emqx_license_parser_v20220101.erl rename to apps/emqx_license/src/emqx_license_parser_v20220101.erl diff --git a/lib-ee/emqx_license/src/emqx_license_resources.erl b/apps/emqx_license/src/emqx_license_resources.erl similarity index 100% rename from lib-ee/emqx_license/src/emqx_license_resources.erl rename to apps/emqx_license/src/emqx_license_resources.erl diff --git a/lib-ee/emqx_license/src/emqx_license_schema.erl b/apps/emqx_license/src/emqx_license_schema.erl similarity index 100% rename from lib-ee/emqx_license/src/emqx_license_schema.erl rename to apps/emqx_license/src/emqx_license_schema.erl diff --git a/lib-ee/emqx_license/src/emqx_license_sup.erl b/apps/emqx_license/src/emqx_license_sup.erl similarity index 100% rename from lib-ee/emqx_license/src/emqx_license_sup.erl rename to apps/emqx_license/src/emqx_license_sup.erl diff --git a/lib-ee/emqx_license/src/proto/emqx_license_proto_v1.erl b/apps/emqx_license/src/proto/emqx_license_proto_v1.erl similarity index 100% rename from lib-ee/emqx_license/src/proto/emqx_license_proto_v1.erl rename to apps/emqx_license/src/proto/emqx_license_proto_v1.erl diff --git a/lib-ee/emqx_license/src/proto/emqx_license_proto_v2.erl b/apps/emqx_license/src/proto/emqx_license_proto_v2.erl similarity index 100% rename from lib-ee/emqx_license/src/proto/emqx_license_proto_v2.erl rename to apps/emqx_license/src/proto/emqx_license_proto_v2.erl diff --git a/lib-ee/emqx_license/test/data/emqx.lic b/apps/emqx_license/test/data/emqx.lic similarity index 100% rename from lib-ee/emqx_license/test/data/emqx.lic rename to apps/emqx_license/test/data/emqx.lic diff --git a/lib-ee/emqx_license/test/data/pub.pem b/apps/emqx_license/test/data/pub.pem similarity index 100% rename from lib-ee/emqx_license/test/data/pub.pem rename to apps/emqx_license/test/data/pub.pem diff --git a/lib-ee/emqx_license/test/data/pvt.key b/apps/emqx_license/test/data/pvt.key similarity index 100% rename from lib-ee/emqx_license/test/data/pvt.key rename to apps/emqx_license/test/data/pvt.key diff --git a/lib-ee/emqx_license/test/emqx_license_SUITE.erl b/apps/emqx_license/test/emqx_license_SUITE.erl similarity index 100% rename from lib-ee/emqx_license/test/emqx_license_SUITE.erl rename to apps/emqx_license/test/emqx_license_SUITE.erl diff --git a/lib-ee/emqx_license/test/emqx_license_checker_SUITE.erl b/apps/emqx_license/test/emqx_license_checker_SUITE.erl similarity index 100% rename from lib-ee/emqx_license/test/emqx_license_checker_SUITE.erl rename to apps/emqx_license/test/emqx_license_checker_SUITE.erl diff --git a/lib-ee/emqx_license/test/emqx_license_cli_SUITE.erl b/apps/emqx_license/test/emqx_license_cli_SUITE.erl similarity index 100% rename from lib-ee/emqx_license/test/emqx_license_cli_SUITE.erl rename to apps/emqx_license/test/emqx_license_cli_SUITE.erl diff --git a/lib-ee/emqx_license/test/emqx_license_http_api_SUITE.erl b/apps/emqx_license/test/emqx_license_http_api_SUITE.erl similarity index 100% rename from lib-ee/emqx_license/test/emqx_license_http_api_SUITE.erl rename to apps/emqx_license/test/emqx_license_http_api_SUITE.erl diff --git a/lib-ee/emqx_license/test/emqx_license_parser_SUITE.erl b/apps/emqx_license/test/emqx_license_parser_SUITE.erl similarity index 100% rename from lib-ee/emqx_license/test/emqx_license_parser_SUITE.erl rename to apps/emqx_license/test/emqx_license_parser_SUITE.erl diff --git a/lib-ee/emqx_license/test/emqx_license_resources_SUITE.erl b/apps/emqx_license/test/emqx_license_resources_SUITE.erl similarity index 100% rename from lib-ee/emqx_license/test/emqx_license_resources_SUITE.erl rename to apps/emqx_license/test/emqx_license_resources_SUITE.erl diff --git a/lib-ee/emqx_license/test/emqx_license_test_lib.erl b/apps/emqx_license/test/emqx_license_test_lib.erl similarity index 100% rename from lib-ee/emqx_license/test/emqx_license_test_lib.erl rename to apps/emqx_license/test/emqx_license_test_lib.erl From 534c9bdc133f7274c4ca94ff8098b8fb1db44352 Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Tue, 29 Aug 2023 22:54:14 +0300 Subject: [PATCH 012/155] feat(ft): add additional operation status report channel --- apps/emqx_ft/src/emqx_ft.erl | 243 +++++++++----- apps/emqx_ft/src/emqx_ft_assembler.erl | 16 +- apps/emqx_ft/src/emqx_ft_async_reply.erl | 28 +- apps/emqx_ft/src/emqx_ft_error.erl | 39 +++ apps/emqx_ft/src/emqx_ft_schema.erl | 37 ++- apps/emqx_ft/src/emqx_ft_storage_exporter.erl | 2 +- apps/emqx_ft/src/emqx_ft_storage_fs.erl | 2 +- apps/emqx_ft/test/emqx_ft_SUITE.erl | 303 ++++++++++++------ apps/emqx_ft/test/emqx_ft_api_SUITE.erl | 6 +- .../test/emqx_ft_async_reply_SUITE.erl | 16 +- apps/emqx_ft/test/emqx_ft_conf_SUITE.erl | 4 +- apps/emqx_ft/test/emqx_ft_request_SUITE.erl | 113 +++++++ .../emqx_ft_storage_exporter_s3_SUITE.erl | 32 +- .../emqx_ft/test/emqx_ft_storage_fs_SUITE.erl | 4 +- .../test/emqx_ft_storage_fs_reader_SUITE.erl | 15 +- apps/emqx_ft/test/emqx_ft_test_helpers.erl | 88 +++-- changes/ee/feat-11541.en.md | 3 + 17 files changed, 678 insertions(+), 273 deletions(-) create mode 100644 apps/emqx_ft/src/emqx_ft_error.erl create mode 100644 apps/emqx_ft/test/emqx_ft_request_SUITE.erl create mode 100644 changes/ee/feat-11541.en.md diff --git a/apps/emqx_ft/src/emqx_ft.erl b/apps/emqx_ft/src/emqx_ft.erl index 41020e76f..c886b86bd 100644 --- a/apps/emqx_ft/src/emqx_ft.erl +++ b/apps/emqx_ft/src/emqx_ft.erl @@ -90,6 +90,10 @@ -define(FT_EVENT(EVENT), {?MODULE, EVENT}). +-define(ACK_AND_PUBLISH(Result), {true, Result}). +-define(ACK(Result), {false, Result}). +-define(DELAY_ACK, delay). + %%-------------------------------------------------------------------- %% API for app %%-------------------------------------------------------------------- @@ -116,46 +120,34 @@ unhook() -> %% API %%-------------------------------------------------------------------- -decode_filemeta(Payload) when is_binary(Payload) -> - case emqx_utils_json:safe_decode(Payload, [return_maps]) of - {ok, Map} -> - decode_filemeta(Map); - {error, Error} -> - {error, {invalid_filemeta_json, Error}} - end; -decode_filemeta(Map) when is_map(Map) -> - Schema = emqx_ft_schema:schema(filemeta), - try - Meta = hocon_tconf:check_plain(Schema, Map, #{atom_key => true, required => false}), - {ok, Meta} - catch - throw:{_Schema, Errors} -> - {error, {invalid_filemeta, Errors}} - end. +decode_filemeta(Payload) -> + emqx_ft_schema:decode(filemeta, Payload). encode_filemeta(Meta = #{}) -> - Schema = emqx_ft_schema:schema(filemeta), - hocon_tconf:make_serializable(Schema, emqx_utils_maps:binary_key_map(Meta), #{}). + emqx_ft_schema:encode(filemeta, Meta). + +encode_response(Response) -> + emqx_ft_schema:encode(command_response, Response). %%-------------------------------------------------------------------- %% Hooks %%-------------------------------------------------------------------- -on_message_publish( - Msg = #message{ - id = _Id, - topic = <<"$file/", _/binary>> - } -) -> +on_message_publish(Msg = #message{topic = <<"$file-async/", _/binary>>}) -> + Headers = Msg#message.headers, + {stop, Msg#message{headers = Headers#{allow_publish => false}}}; +on_message_publish(Msg = #message{topic = <<"$file/", _/binary>>}) -> Headers = Msg#message.headers, {stop, Msg#message{headers = Headers#{allow_publish => false}}}; on_message_publish(Msg) -> {ok, Msg}. -on_message_puback(PacketId, #message{topic = Topic} = Msg, _PubRes, _RC) -> +on_message_puback(PacketId, #message{from = From, topic = Topic} = Msg, _PubRes, _RC) -> case Topic of - <<"$file/", FileCommand/binary>> -> - {stop, on_file_command(PacketId, Msg, FileCommand)}; + <<"$file/", _/binary>> -> + {stop, on_file_command(sync, From, PacketId, Msg, Topic)}; + <<"$file-async/", _/binary>> -> + {stop, on_file_command(async, From, PacketId, Msg, Topic)}; _ -> ignore end. @@ -163,18 +155,33 @@ on_message_puback(PacketId, #message{topic = Topic} = Msg, _PubRes, _RC) -> on_channel_unregistered(ChannelPid) -> ok = emqx_ft_async_reply:deregister_all(ChannelPid). -on_client_timeout(_TRef, ?FT_EVENT({MRef, PacketId}), Acc) -> +on_client_timeout(_TRef0, ?FT_EVENT({MRef, TopicReplyData}), Acc) -> _ = erlang:demonitor(MRef, [flush]), - _ = emqx_ft_async_reply:take_by_mref(MRef), - {stop, [?REPLY_OUTGOING(?PUBACK_PACKET(PacketId, ?RC_UNSPECIFIED_ERROR)) | Acc]}; + Result = {error, timeout}, + _ = publish_response(Result, TopicReplyData), + case emqx_ft_async_reply:take_by_mref(MRef) of + {ok, undefined, _TRef1, _TopicReplyData} -> + {stop, Acc}; + {ok, PacketId, _TRef1, _TopicReplyData} -> + {stop, [?REPLY_OUTGOING(?PUBACK_PACKET(PacketId, result_to_rc(Result))) | Acc]}; + not_found -> + {ok, Acc} + end; on_client_timeout(_TRef, _Event, Acc) -> {ok, Acc}. -on_process_down(MRef, _Pid, Reason, Acc) -> +on_process_down(MRef, _Pid, DownReason, Acc) -> case emqx_ft_async_reply:take_by_mref(MRef) of - {ok, PacketId, TRef} -> + {ok, PacketId, TRef, TopicReplyData} -> _ = emqx_utils:cancel_timer(TRef), - {stop, [?REPLY_OUTGOING(?PUBACK_PACKET(PacketId, reason_to_rc(Reason))) | Acc]}; + Result = down_reason_to_result(DownReason), + _ = publish_response(Result, TopicReplyData), + case PacketId of + undefined -> + {stop, Acc}; + _ -> + {stop, [?REPLY_OUTGOING(?PUBACK_PACKET(PacketId, result_to_rc(Result))) | Acc]} + end; not_found -> {ok, Acc} end. @@ -185,24 +192,27 @@ on_process_down(MRef, _Pid, Reason, Acc) -> %% TODO Move to emqx_ft_mqtt? -on_file_command(PacketId, Msg, FileCommand) -> - case emqx_topic:tokens(FileCommand) of - [FileIdIn | Rest] -> - validate([{fileid, FileIdIn}], fun([FileId]) -> - on_file_command(PacketId, FileId, Msg, Rest) - end); - [] -> - ?RC_UNSPECIFIED_ERROR - end. +on_file_command(Mode, From, PacketId, Msg, Topic) -> + TopicReplyData = topic_reply_data(Mode, From, PacketId, Msg), + Result = + case emqx_topic:tokens(Topic) of + [_FTPrefix, FileIdIn | Rest] -> + validate([{fileid, FileIdIn}], fun([FileId]) -> + do_on_file_command(TopicReplyData, FileId, Msg, Rest) + end); + [] -> + ?ACK_AND_PUBLISH({error, {invalid_topic, Topic}}) + end, + maybe_publish_response(Result, TopicReplyData). -on_file_command(PacketId, FileId, Msg, FileCommand) -> +do_on_file_command(TopicReplyData, FileId, Msg, FileCommand) -> Transfer = transfer(Msg, FileId), case FileCommand of [<<"init">>] -> validate( [{filemeta, Msg#message.payload}], fun([Meta]) -> - on_init(PacketId, Msg, Transfer, Meta) + on_init(TopicReplyData, Msg, Transfer, Meta) end ); [<<"fin">>, FinalSizeBin | MaybeChecksum] when length(MaybeChecksum) =< 1 -> @@ -210,14 +220,14 @@ on_file_command(PacketId, FileId, Msg, FileCommand) -> validate( [{size, FinalSizeBin}, {{maybe, checksum}, ChecksumBin}], fun([FinalSize, FinalChecksum]) -> - on_fin(PacketId, Msg, Transfer, FinalSize, FinalChecksum) + on_fin(TopicReplyData, Msg, Transfer, FinalSize, FinalChecksum) end ); [<<"abort">>] -> - on_abort(Msg, Transfer); + on_abort(TopicReplyData, Msg, Transfer); [OffsetBin] -> validate([{offset, OffsetBin}], fun([Offset]) -> - on_segment(PacketId, Msg, Transfer, Offset, undefined) + on_segment(TopicReplyData, Msg, Transfer, Offset, undefined) end); [OffsetBin, ChecksumBin] -> validate( @@ -226,16 +236,16 @@ on_file_command(PacketId, FileId, Msg, FileCommand) -> validate( [{integrity, Msg#message.payload, Checksum}], fun(_) -> - on_segment(PacketId, Msg, Transfer, Offset, Checksum) + on_segment(TopicReplyData, Msg, Transfer, Offset, Checksum) end ) end ); _ -> - ?RC_UNSPECIFIED_ERROR + ?ACK_AND_PUBLISH({error, {invalid_file_command, FileCommand}}) end. -on_init(PacketId, Msg, Transfer, Meta) -> +on_init(#{packet_id := PacketId}, Msg, Transfer, Meta) -> ?tp(info, "file_transfer_init", #{ mqtt_msg => Msg, packet_id => PacketId, @@ -245,16 +255,13 @@ on_init(PacketId, Msg, Transfer, Meta) -> %% Currently synchronous. %% If we want to make it async, we need to use `emqx_ft_async_reply`, %% like in `on_fin`. - case store_filemeta(Transfer, Meta) of - ok -> ?RC_SUCCESS; - {error, _} -> ?RC_UNSPECIFIED_ERROR - end. + ?ACK_AND_PUBLISH(store_filemeta(Transfer, Meta)). -on_abort(_Msg, _FileId) -> +on_abort(_TopicReplyData, _Msg, _FileId) -> %% TODO - ?RC_SUCCESS. + ?ACK_AND_PUBLISH(ok). -on_segment(PacketId, Msg, Transfer, Offset, Checksum) -> +on_segment(#{packet_id := PacketId}, Msg, Transfer, Offset, Checksum) -> ?tp(info, "file_transfer_segment", #{ mqtt_msg => Msg, packet_id => PacketId, @@ -266,12 +273,9 @@ on_segment(PacketId, Msg, Transfer, Offset, Checksum) -> %% Currently synchronous. %% If we want to make it async, we need to use `emqx_ft_async_reply`, %% like in `on_fin`. - case store_segment(Transfer, Segment) of - ok -> ?RC_SUCCESS; - {error, _} -> ?RC_UNSPECIFIED_ERROR - end. + ?ACK_AND_PUBLISH(store_segment(Transfer, Segment)). -on_fin(PacketId, Msg, Transfer, FinalSize, FinalChecksum) -> +on_fin(#{packet_id := PacketId} = TopicReplyData, Msg, Transfer, FinalSize, FinalChecksum) -> ?tp(info, "file_transfer_fin", #{ mqtt_msg => Msg, packet_id => PacketId, @@ -280,30 +284,94 @@ on_fin(PacketId, Msg, Transfer, FinalSize, FinalChecksum) -> checksum => FinalChecksum }), %% TODO: handle checksum? Do we need it? - emqx_ft_async_reply:with_new_packet( + with_new_packet( + TopicReplyData, PacketId, fun() -> case assemble(Transfer, FinalSize, FinalChecksum) of ok -> - ?RC_SUCCESS; - %% Assembling started, packet will be acked by monitor or timeout + ?ACK_AND_PUBLISH(ok); + %% Assembling started, packet will be acked/replied by monitor or timeout {async, Pid} -> - ok = register_async_reply(Pid, PacketId), - ok = emqx_ft_storage:kickoff(Pid), - undefined; - {error, _} -> - ?RC_UNSPECIFIED_ERROR + register_async_worker(Pid, TopicReplyData); + {error, _} = Error -> + ?ACK_AND_PUBLISH(Error) end - end, - undefined + end ). -register_async_reply(Pid, PacketId) -> +register_async_worker(Pid, #{mode := Mode, packet_id := PacketId} = TopicReplyData) -> MRef = erlang:monitor(process, Pid), TRef = erlang:start_timer( - emqx_ft_conf:assemble_timeout(), self(), ?FT_EVENT({MRef, PacketId}) + emqx_ft_conf:assemble_timeout(), self(), ?FT_EVENT({MRef, TopicReplyData}) ), - ok = emqx_ft_async_reply:register(PacketId, MRef, TRef). + case Mode of + async -> + ok = emqx_ft_async_reply:register(MRef, TRef, TopicReplyData), + ok = emqx_ft_storage:kickoff(Pid), + ?ACK(ok); + sync -> + ok = emqx_ft_async_reply:register(PacketId, MRef, TRef, TopicReplyData), + ok = emqx_ft_storage:kickoff(Pid), + ?DELAY_ACK + end. + +topic_reply_data(Mode, From, PacketId, #message{topic = Topic, headers = Headers}) -> + Props = maps:get(properties, Headers, #{}), + #{ + mode => Mode, + clientid => From, + command_topic => Topic, + correlation_data => maps:get('Correlation-Data', Props, undefined), + response_topic => maps:get('Response-Topic', Props, undefined), + packet_id => PacketId + }. + +maybe_publish_response(?DELAY_ACK, _TopicReplyData) -> + undefined; +maybe_publish_response(?ACK(Result), _TopicReplyData) -> + result_to_rc(Result); +maybe_publish_response(?ACK_AND_PUBLISH(Result), TopicReplyData) -> + publish_response(Result, TopicReplyData). + +publish_response(Result, #{ + clientid := ClientId, + command_topic := CommandTopic, + correlation_data := CorrelationData, + response_topic := ResponseTopic, + packet_id := PacketId +}) -> + ResultCode = result_to_rc(Result), + Response = encode_response(#{ + topic => CommandTopic, + packet_id => PacketId, + reason_code => ResultCode, + reason_description => emqx_ft_error:format(Result) + }), + Payload = emqx_utils_json:encode(Response), + Topic = emqx_maybe:define(ResponseTopic, response_topic(ClientId)), + Msg = emqx_message:make( + emqx_guid:gen(), + undefined, + ?QOS_1, + Topic, + Payload, + #{}, + #{properties => response_properties(CorrelationData)} + ), + _ = emqx_broker:publish(Msg), + ResultCode. + +response_properties(undefined) -> #{}; +response_properties(CorrelationData) -> #{'Correlation-Data' => CorrelationData}. + +response_topic(ClientId) -> + <<"$file-response/", (clientid_to_binary(ClientId))/binary>>. + +result_to_rc(ok) -> + ?RC_SUCCESS; +result_to_rc({error, _}) -> + ?RC_UNSPECIFIED_ERROR. store_filemeta(Transfer, Segment) -> try @@ -347,9 +415,9 @@ validate(Validations, Fun) -> case do_validate(Validations, []) of {ok, Parsed} -> Fun(Parsed); - {error, Reason} -> + {error, Reason} = Error -> ?tp(info, "client_violated_protocol", #{reason => Reason}), - ?RC_UNSPECIFIED_ERROR + ?ACK_AND_PUBLISH(Error) end. do_validate([], Parsed) -> @@ -416,19 +484,18 @@ clientid_to_binary(A) when is_atom(A) -> clientid_to_binary(B) when is_binary(B) -> B. -reason_to_rc(Reason) -> - case map_down_reason(Reason) of - ok -> ?RC_SUCCESS; - {error, _} -> ?RC_UNSPECIFIED_ERROR - end. - -map_down_reason(normal) -> +down_reason_to_result(normal) -> ok; -map_down_reason(shutdown) -> +down_reason_to_result(shutdown) -> ok; -map_down_reason({shutdown, Result}) -> +down_reason_to_result({shutdown, Result}) -> Result; -map_down_reason(noproc) -> +down_reason_to_result(noproc) -> {error, noproc}; -map_down_reason(Error) -> +down_reason_to_result(Error) -> {error, {internal_error, Error}}. + +with_new_packet(#{mode := async}, _PacketId, Fun) -> + Fun(); +with_new_packet(#{mode := sync}, PacketId, Fun) -> + emqx_ft_async_reply:with_new_packet(PacketId, Fun, undefined). diff --git a/apps/emqx_ft/src/emqx_ft_assembler.erl b/apps/emqx_ft/src/emqx_ft_assembler.erl index 0d9e86a49..b625d0ffe 100644 --- a/apps/emqx_ft/src/emqx_ft_assembler.erl +++ b/apps/emqx_ft/src/emqx_ft_assembler.erl @@ -156,12 +156,16 @@ handle_event(internal, _, {assemble, [{Node, Segment} | Rest]}, St = #{export := % Currently, race is possible between getting segment info from the remote node and % this node garbage collecting the segment itself. % TODO: pipelining - % TODO: better error handling - {ok, Content} = pread(Node, Segment, St), - case emqx_ft_storage_exporter:write(Export, Content) of - {ok, NExport} -> - {next_state, {assemble, Rest}, St#{export := NExport}, ?internal([])}; - {error, _} = Error -> + case pread(Node, Segment, St) of + {ok, Content} -> + case emqx_ft_storage_exporter:write(Export, Content) of + {ok, NExport} -> + {next_state, {assemble, Rest}, St#{export := NExport}, ?internal([])}; + {error, _} = Error -> + {stop, {shutdown, Error}, maps:remove(export, St)} + end; + {error, ReadError} -> + Error = {error, {read_segment, ReadError}}, {stop, {shutdown, Error}, maps:remove(export, St)} end; handle_event(internal, _, {assemble, []}, St = #{}) -> diff --git a/apps/emqx_ft/src/emqx_ft_async_reply.erl b/apps/emqx_ft/src/emqx_ft_async_reply.erl index 501f91629..e099196f9 100644 --- a/apps/emqx_ft/src/emqx_ft_async_reply.erl +++ b/apps/emqx_ft/src/emqx_ft_async_reply.erl @@ -27,6 +27,7 @@ -export([ register/3, + register/4, take_by_mref/1, with_new_packet/3, deregister_all/1 @@ -42,12 +43,14 @@ -define(MON_TAB, emqx_ft_async_mons). -define(MON_KEY(MRef), ?MON_KEY(self(), MRef)). -define(MON_KEY(ChannelPid, MRef), {ChannelPid, MRef}). +-define(MON_RECORD(KEY, PACKET_ID, TREF, DATA), {KEY, PACKET_ID, TREF, DATA}). %% async worker monitors by packet ids -define(PACKET_TAB, emqx_ft_async_packets). -define(PACKET_KEY(PacketId), ?PACKET_KEY(self(), PacketId)). -define(PACKET_KEY(ChannelPid, PacketId), {ChannelPid, PacketId}). +-define(PACKET_RECORD(KEY, MREF, DATA), {KEY, MREF, DATA}). %%-------------------------------------------------------------------- %% API @@ -66,10 +69,15 @@ create_tables() -> ok = emqx_utils_ets:new(?PACKET_TAB, EtsOptions), ok. --spec register(packet_id(), mon_ref(), timer_ref()) -> ok. -register(PacketId, MRef, TRef) -> - _ = ets:insert(?PACKET_TAB, {?PACKET_KEY(PacketId), MRef}), - _ = ets:insert(?MON_TAB, {?MON_KEY(MRef), PacketId, TRef}), +-spec register(packet_id(), mon_ref(), timer_ref(), term()) -> ok. +register(PacketId, MRef, TRef, Data) -> + _ = ets:insert(?PACKET_TAB, ?PACKET_RECORD(?PACKET_KEY(PacketId), MRef, Data)), + _ = ets:insert(?MON_TAB, ?MON_RECORD(?MON_KEY(MRef), PacketId, TRef, Data)), + ok. + +-spec register(mon_ref(), timer_ref(), term()) -> ok. +register(MRef, TRef, Data) -> + _ = ets:insert(?MON_TAB, ?MON_RECORD(?MON_KEY(MRef), undefined, TRef, Data)), ok. -spec with_new_packet(packet_id(), fun(() -> any()), any()) -> any(). @@ -79,12 +87,12 @@ with_new_packet(PacketId, Fun, Default) -> false -> Fun() end. --spec take_by_mref(mon_ref()) -> {ok, packet_id(), timer_ref()} | not_found. +-spec take_by_mref(mon_ref()) -> {ok, packet_id() | undefined, timer_ref(), term()} | not_found. take_by_mref(MRef) -> case ets:take(?MON_TAB, ?MON_KEY(MRef)) of - [{_, PacketId, TRef}] -> - _ = ets:delete(?PACKET_TAB, ?PACKET_KEY(PacketId)), - {ok, PacketId, TRef}; + [?MON_RECORD(_, PacketId, TRef, Data)] -> + PacketId =/= undefined andalso ets:delete(?PACKET_TAB, ?PACKET_KEY(PacketId)), + {ok, PacketId, TRef, Data}; [] -> not_found end. @@ -104,11 +112,11 @@ info() -> %%------------------------------------------------------------------- deregister_packets(ChannelPid) when is_pid(ChannelPid) -> - MS = [{{?PACKET_KEY(ChannelPid, '_'), '_'}, [], [true]}], + MS = [{?PACKET_RECORD(?PACKET_KEY(ChannelPid, '_'), '_', '_'), [], [true]}], _ = ets:select_delete(?PACKET_TAB, MS), ok. deregister_mons(ChannelPid) -> - MS = [{{?MON_KEY(ChannelPid, '_'), '_', '_'}, [], [true]}], + MS = [{?MON_RECORD(?MON_KEY(ChannelPid, '_'), '_', '_', '_'), [], [true]}], _ = ets:select_delete(?MON_TAB, MS), ok. diff --git a/apps/emqx_ft/src/emqx_ft_error.erl b/apps/emqx_ft/src/emqx_ft_error.erl new file mode 100644 index 000000000..06d575ede --- /dev/null +++ b/apps/emqx_ft/src/emqx_ft_error.erl @@ -0,0 +1,39 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +%% @doc File Transfer error description module + +-module(emqx_ft_error). + +-export([format/1]). + +%%-------------------------------------------------------------------- +%% API +%%-------------------------------------------------------------------- + +format(ok) -> <<"success">>; +format({error, Reason}) -> format_error_reson(Reason). + +%%-------------------------------------------------------------------- +%% Internal functions +%%-------------------------------------------------------------------- + +format_error_reson(Reason) when is_atom(Reason) -> + atom_to_binary(Reason, utf8); +format_error_reson({ErrorKind, _}) when is_atom(ErrorKind) -> + atom_to_binary(ErrorKind, utf8); +format_error_reson(_Reason) -> + <<"internal_error">>. diff --git a/apps/emqx_ft/src/emqx_ft_schema.erl b/apps/emqx_ft/src/emqx_ft_schema.erl index dd21e9524..c89ba7d6a 100644 --- a/apps/emqx_ft/src/emqx_ft_schema.erl +++ b/apps/emqx_ft/src/emqx_ft_schema.erl @@ -26,7 +26,7 @@ -export([schema/1]). %% Utilities --export([backend/1]). +-export([backend/1, encode/2, decode/2]). %% Test-only helpers -export([translate/1]). @@ -76,7 +76,7 @@ fields(file_transfer) -> #{ desc => ?DESC("init_timeout"), required => false, - importance => ?IMPORTANCE_LOW, + importance => ?IMPORTANCE_HIDDEN, default => "10s" } )}, @@ -86,7 +86,7 @@ fields(file_transfer) -> #{ desc => ?DESC("store_segment_timeout"), required => false, - importance => ?IMPORTANCE_LOW, + importance => ?IMPORTANCE_HIDDEN, default => "5m" } )}, @@ -282,6 +282,16 @@ schema(filemeta) -> {segments_ttl, hoconsc:mk(pos_integer())}, {user_data, hoconsc:mk(json_value())} ] + }; +schema(command_response) -> + #{ + roots => [ + {vsn, hoconsc:mk(string(), #{default => <<"0.1">>})}, + {topic, hoconsc:mk(string())}, + {packet_id, hoconsc:mk(pos_integer())}, + {reason_code, hoconsc:mk(non_neg_integer())}, + {reason_description, hoconsc:mk(binary())} + ] }. validator(filename) -> @@ -345,6 +355,27 @@ backend(Config) -> emit_enabled(Type, BConf = #{enable := Enabled}) -> Enabled andalso throw({Type, BConf}). +decode(SchemaName, Payload) when is_binary(Payload) -> + case emqx_utils_json:safe_decode(Payload, [return_maps]) of + {ok, Map} -> + decode(SchemaName, Map); + {error, Error} -> + {error, {invalid_filemeta_json, Error}} + end; +decode(SchemaName, Map) when is_map(Map) -> + Schema = schema(SchemaName), + try + Meta = hocon_tconf:check_plain(Schema, Map, #{atom_key => true, required => false}), + {ok, Meta} + catch + throw:{_Schema, Errors} -> + {error, {invalid_filemeta, Errors}} + end. + +encode(SchemaName, Map = #{}) -> + Schema = schema(SchemaName), + hocon_tconf:make_serializable(Schema, emqx_utils_maps:binary_key_map(Map), #{}). + %% Test-only helpers -spec translate(emqx_config:raw_config()) -> diff --git a/apps/emqx_ft/src/emqx_ft_storage_exporter.erl b/apps/emqx_ft/src/emqx_ft_storage_exporter.erl index bc1b5fb4d..886dc27f6 100644 --- a/apps/emqx_ft/src/emqx_ft_storage_exporter.erl +++ b/apps/emqx_ft/src/emqx_ft_storage_exporter.erl @@ -195,7 +195,7 @@ verify_checksum(Ctx, {Algo, Digest} = Checksum) -> Digest -> {ok, Checksum}; Mismatch -> - {error, {checksum, Algo, binary:encode_hex(Mismatch)}} + {error, {checksum_mismatch, Algo, binary:encode_hex(Mismatch)}} end; verify_checksum(Ctx, undefined) -> Digest = crypto:hash_final(Ctx), diff --git a/apps/emqx_ft/src/emqx_ft_storage_fs.erl b/apps/emqx_ft/src/emqx_ft_storage_fs.erl index 5d0395989..0102756ca 100644 --- a/apps/emqx_ft/src/emqx_ft_storage_fs.erl +++ b/apps/emqx_ft/src/emqx_ft_storage_fs.erl @@ -145,7 +145,7 @@ store_filemeta(Storage, Transfer, Meta) -> % We won't see conflicts in case of concurrent `store_filemeta` % requests. It's rather odd scenario so it's fine not to worry % about it too much now. - {error, conflict}; + {error, filemeta_conflict}; {error, Reason} when Reason =:= notfound; Reason =:= corrupted; Reason =:= enoent -> write_file_atomic(Storage, Transfer, Filepath, encode_filemeta(Meta)); {error, _} = Error -> diff --git a/apps/emqx_ft/test/emqx_ft_SUITE.erl b/apps/emqx_ft/test/emqx_ft_SUITE.erl index 7da9ccf69..6b6437971 100644 --- a/apps/emqx_ft/test/emqx_ft_SUITE.erl +++ b/apps/emqx_ft/test/emqx_ft_SUITE.erl @@ -31,7 +31,8 @@ all() -> [ - {group, single_node}, + {group, async_mode}, + {group, sync_mode}, {group, cluster} ]. @@ -50,7 +51,14 @@ groups() -> t_nasty_filenames, t_no_meta, t_no_segment, - t_simple_transfer + t_simple_transfer, + t_assemble_timeout + ]}, + {async_mode, [], [ + {group, single_node} + ]}, + {sync_mode, [], [ + {group, single_node} ]}, {cluster, [], [ t_switch_node, @@ -72,9 +80,10 @@ init_per_suite(Config) -> emqx_ft_test_helpers:local_storage(Config), #{<<"local">> => #{<<"segments">> => #{<<"gc">> => #{<<"interval">> => 0}}}} ), + FTConfig = emqx_ft_test_helpers:config(Storage, #{<<"assemble_timeout">> => <<"2s">>}), Apps = emqx_cth_suite:start( [ - {emqx_ft, #{config => emqx_ft_test_helpers:config(Storage)}} + {emqx_ft, #{config => FTConfig}} ], #{work_dir => emqx_cth_suite:work_dir(Config)} ), @@ -85,7 +94,10 @@ end_per_suite(Config) -> ok. init_per_testcase(Case, Config) -> - ClientId = atom_to_binary(Case), + ClientId = iolist_to_binary([ + atom_to_binary(Case), <<"-">>, emqx_ft_test_helpers:unique_binary_string() + ]), + ok = set_client_specific_ft_dirs(ClientId, Config), case ?config(group, Config) of cluster -> [{clientid, ClientId} | Config]; @@ -103,6 +115,10 @@ init_per_group(Group = cluster, Config) -> Cluster = mk_cluster_specs(Config), Nodes = emqx_cth_cluster:start(Cluster, #{work_dir => WorkDir}), [{group, Group}, {cluster_nodes, Nodes} | Config]; +init_per_group(_Group = async_mode, Config) -> + [{mode, sync} | Config]; +init_per_group(_Group = sync_mode, Config) -> + [{mode, async} | Config]; init_per_group(Group, Config) -> [{group, Group} | Config]. @@ -127,7 +143,7 @@ mk_cluster_specs(_Config) -> ]. %%-------------------------------------------------------------------- -%% Tests +%% Single node tests %%-------------------------------------------------------------------- t_invalid_topic_format(Config) -> @@ -171,32 +187,32 @@ t_invalid_fileid(Config) -> C = ?config(client, Config), ?assertRCName( unspecified_error, - emqtt:publish(C, <<"$file//init">>, <<>>, 1) + emqtt:publish(C, mk_init_topic(Config, <<>>), <<>>, 1) ). t_invalid_filename(Config) -> C = ?config(client, Config), ?assertRCName( unspecified_error, - emqtt:publish(C, mk_init_topic(<<"f1">>), encode_meta(meta(".", <<>>)), 1) + emqtt:publish(C, mk_init_topic(Config, <<"f1">>), encode_meta(meta(".", <<>>)), 1) ), ?assertRCName( unspecified_error, - emqtt:publish(C, mk_init_topic(<<"f2">>), encode_meta(meta("..", <<>>)), 1) + emqtt:publish(C, mk_init_topic(Config, <<"f2">>), encode_meta(meta("..", <<>>)), 1) ), ?assertRCName( unspecified_error, - emqtt:publish(C, mk_init_topic(<<"f2">>), encode_meta(meta("../nice", <<>>)), 1) + emqtt:publish(C, mk_init_topic(Config, <<"f2">>), encode_meta(meta("../nice", <<>>)), 1) ), ?assertRCName( unspecified_error, - emqtt:publish(C, mk_init_topic(<<"f3">>), encode_meta(meta("/etc/passwd", <<>>)), 1) + emqtt:publish(C, mk_init_topic(Config, <<"f3">>), encode_meta(meta("/etc/passwd", <<>>)), 1) ), ?assertRCName( unspecified_error, emqtt:publish( C, - mk_init_topic(<<"f4">>), + mk_init_topic(Config, <<"f4">>), encode_meta(meta(lists:duplicate(1000, $A), <<>>)), 1 ) @@ -204,6 +220,7 @@ t_invalid_filename(Config) -> t_simple_transfer(Config) -> C = ?config(client, Config), + ClientId = ?config(clientid, Config), Filename = "topsecret.pdf", FileId = <<"f1">>, @@ -214,22 +231,24 @@ t_simple_transfer(Config) -> ?assertRCName( success, - emqtt:publish(C, mk_init_topic(FileId), encode_meta(Meta), 1) + emqtt:publish(C, mk_init_topic(Config, FileId), encode_meta(Meta), 1) ), lists:foreach( fun({Chunk, Offset}) -> ?assertRCName( success, - emqtt:publish(C, mk_segment_topic(FileId, Offset), Chunk, 1) + emqtt:publish(C, mk_segment_topic(Config, FileId, Offset), Chunk, 1) ) end, with_offsets(Data) ), - ?assertRCName( - success, - emqtt:publish(C, mk_fin_topic(FileId, Filesize), <<>>, 1) + ?assertEqual( + ok, + emqx_ft_test_helpers:fin_result( + mode(Config), ClientId, C, mk_fin_topic(Config, FileId, Filesize) + ) ), [Export] = list_files(?config(clientid, Config)), @@ -238,7 +257,7 @@ t_simple_transfer(Config) -> read_export(Export) ). -t_nasty_clientids_fileids(_Config) -> +t_nasty_clientids_fileids(Config) -> Transfers = [ {<<".">>, <<".">>}, {<<"🌚"/utf8>>, <<"🌝"/utf8>>}, @@ -249,15 +268,16 @@ t_nasty_clientids_fileids(_Config) -> ok = lists:foreach( fun({ClientId, FileId}) -> - ok = emqx_ft_test_helpers:upload_file(ClientId, FileId, "justfile", ClientId), + Data = ClientId, + ok = emqx_ft_test_helpers:upload_file(mode(Config), ClientId, FileId, "justfile", Data), [Export] = list_files(ClientId), ?assertMatch(#{meta := #{name := "justfile"}}, Export), - ?assertEqual({ok, ClientId}, read_export(Export)) + ?assertEqual({ok, Data}, read_export(Export)) end, Transfers ). -t_nasty_filenames(_Config) -> +t_nasty_filenames(Config) -> Filenames = [ {<<"nasty1">>, "146%"}, {<<"nasty2">>, "🌚"}, @@ -267,7 +287,7 @@ t_nasty_filenames(_Config) -> ok = lists:foreach( fun({ClientId, Filename}) -> FileId = unicode:characters_to_binary(Filename), - ok = emqx_ft_test_helpers:upload_file(ClientId, FileId, Filename, FileId), + ok = emqx_ft_test_helpers:upload_file(mode(Config), ClientId, FileId, Filename, FileId), [Export] = list_files(ClientId), ?assertMatch(#{meta := #{name := Filename}}, Export), ?assertEqual({ok, FileId}, read_export(Export)) @@ -285,34 +305,36 @@ t_meta_conflict(Config) -> ?assertRCName( success, - emqtt:publish(C, mk_init_topic(FileId), encode_meta(Meta), 1) + emqtt:publish(C, mk_init_topic(Config, FileId), encode_meta(Meta), 1) ), ConflictMeta = Meta#{name => "conflict.pdf"}, ?assertRCName( unspecified_error, - emqtt:publish(C, mk_init_topic(FileId), encode_meta(ConflictMeta), 1) + emqtt:publish(C, mk_init_topic(Config, FileId), encode_meta(ConflictMeta), 1) ). t_no_meta(Config) -> C = ?config(client, Config), + ClientId = ?config(clientid, Config), FileId = <<"f1">>, Data = <<"first">>, ?assertRCName( success, - emqtt:publish(C, mk_segment_topic(FileId, 0), Data, 1) + emqtt:publish(C, mk_segment_topic(Config, FileId, 0), Data, 1) ), - ?assertRCName( - unspecified_error, - emqtt:publish(C, mk_fin_topic(FileId, 42), <<>>, 1) + ?assertEqual( + {error, unspecified_error}, + emqx_ft_test_helpers:fin_result(mode(Config), ClientId, C, mk_fin_topic(Config, FileId, 42)) ). t_no_segment(Config) -> C = ?config(client, Config), + ClientId = ?config(clientid, Config), Filename = "topsecret.pdf", FileId = <<"f1">>, @@ -323,23 +345,25 @@ t_no_segment(Config) -> ?assertRCName( success, - emqtt:publish(C, mk_init_topic(FileId), encode_meta(Meta), 1) + emqtt:publish(C, mk_init_topic(Config, FileId), encode_meta(Meta), 1) ), lists:foreach( fun({Chunk, Offset}) -> ?assertRCName( success, - emqtt:publish(C, mk_segment_topic(FileId, Offset), Chunk, 1) + emqtt:publish(C, mk_segment_topic(Config, FileId, Offset), Chunk, 1) ) end, %% Skip the first segment tl(with_offsets(Data)) ), - ?assertRCName( - unspecified_error, - emqtt:publish(C, mk_fin_topic(FileId, Filesize), <<>>, 1) + ?assertEqual( + {error, unspecified_error}, + emqx_ft_test_helpers:fin_result( + mode(Config), ClientId, C, mk_fin_topic(Config, FileId, Filesize) + ) ). t_invalid_meta(Config) -> @@ -352,17 +376,18 @@ t_invalid_meta(Config) -> MetaPayload = emqx_utils_json:encode(Meta), ?assertRCName( unspecified_error, - emqtt:publish(C, mk_init_topic(FileId), MetaPayload, 1) + emqtt:publish(C, mk_init_topic(Config, FileId), MetaPayload, 1) ), %% Invalid JSON ?assertRCName( unspecified_error, - emqtt:publish(C, mk_init_topic(FileId), <<"{oops;">>, 1) + emqtt:publish(C, mk_init_topic(Config, FileId), <<"{oops;">>, 1) ). t_invalid_checksum(Config) -> C = ?config(client, Config), + ClientId = ?config(clientid, Config), Filename = "topsecret.pdf", FileId = <<"f1">>, @@ -374,35 +399,39 @@ t_invalid_checksum(Config) -> ?assertRCName( success, - emqtt:publish(C, mk_init_topic(FileId), MetaPayload, 1) + emqtt:publish(C, mk_init_topic(Config, FileId), MetaPayload, 1) ), lists:foreach( fun({Chunk, Offset}) -> ?assertRCName( success, - emqtt:publish(C, mk_segment_topic(FileId, Offset), Chunk, 1) + emqtt:publish(C, mk_segment_topic(Config, FileId, Offset), Chunk, 1) ) end, with_offsets(Data) ), % Send `fin` w/o checksum, should fail since filemeta checksum is invalid - FinTopic = mk_fin_topic(FileId, Filesize), - ?assertRCName( - unspecified_error, - emqtt:publish(C, FinTopic, <<>>, 1) + FinTopic = mk_fin_topic(Config, FileId, Filesize), + + ?assertEqual( + {error, unspecified_error}, + emqx_ft_test_helpers:fin_result(mode(Config), ClientId, C, FinTopic) ), % Send `fin` with the correct checksum Checksum = binary:encode_hex(sha256(Data)), - ?assertRCName( - success, - emqtt:publish(C, <>, <<>>, 1) + ?assertEqual( + ok, + emqx_ft_test_helpers:fin_result( + mode(Config), ClientId, C, <> + ) ). t_corrupted_segment_retry(Config) -> C = ?config(client, Config), + ClientId = ?config(clientid, Config), Filename = "corruption.pdf", FileId = <<"4242-4242">>, @@ -421,35 +450,89 @@ t_corrupted_segment_retry(Config) -> Meta = #{size := Filesize} = meta(Filename, Data), - ?assertRCName(success, emqtt:publish(C, mk_init_topic(FileId), encode_meta(Meta), 1)), + ?assertRCName(success, emqtt:publish(C, mk_init_topic(Config, FileId), encode_meta(Meta), 1)), ?assertRCName( success, - emqtt:publish(C, mk_segment_topic(FileId, Offset1, Checksum1), Seg1, 1) + emqtt:publish(C, mk_segment_topic(Config, FileId, Offset1, Checksum1), Seg1, 1) ), % segment is corrupted ?assertRCName( unspecified_error, - emqtt:publish(C, mk_segment_topic(FileId, Offset2, Checksum2), <>, 1) + emqtt:publish( + C, mk_segment_topic(Config, FileId, Offset2, Checksum2), <>, 1 + ) ), % retry ?assertRCName( success, - emqtt:publish(C, mk_segment_topic(FileId, Offset2, Checksum2), Seg2, 1) + emqtt:publish(C, mk_segment_topic(Config, FileId, Offset2, Checksum2), Seg2, 1) ), ?assertRCName( success, - emqtt:publish(C, mk_segment_topic(FileId, Offset3, Checksum3), Seg3, 1) + emqtt:publish(C, mk_segment_topic(Config, FileId, Offset3, Checksum3), Seg3, 1) ), - ?assertRCName( - success, - emqtt:publish(C, mk_fin_topic(FileId, Filesize), <<>>, 1) + ?assertEqual( + ok, + emqx_ft_test_helpers:fin_result( + mode(Config), ClientId, C, mk_fin_topic(Config, FileId, Filesize) + ) ). +t_assemble_crash(Config) -> + C = ?config(client, Config), + + meck:new(emqx_ft_storage_fs), + meck:expect(emqx_ft_storage_fs, assemble, fun(_, _, _, _) -> meck:exception(error, oops) end), + + ?assertRCName( + unspecified_error, + emqtt:publish(C, <<"$file/someid/fin">>, <<>>, 1) + ), + + meck:unload(emqx_ft_storage_fs). + +t_assemble_timeout(Config) -> + C = ?config(client, Config), + ClientId = ?config(clientid, Config), + + SleepForever = fun() -> + Ref = make_ref(), + receive + Ref -> ok + end + end, + + ok = meck:new(emqx_ft_storage, [passthrough]), + ok = meck:expect(emqx_ft_storage, assemble, fun(_, _, _) -> + {async, spawn_link(SleepForever)} + end), + + {Time, Res} = timer:tc( + fun() -> + emqx_ft_test_helpers:fin_result( + mode(Config), ClientId, C, <<"$file/someid/fin/9999999">> + ) + end + ), + + ok = meck:unload(emqx_ft_storage), + + ?assertEqual( + {error, unspecified_error}, + Res + ), + + ?assert(2_000_000 < Time). + +%%-------------------------------------------------------------------- +%% Cluster tests +%%-------------------------------------------------------------------- + t_switch_node(Config) -> [Node | _] = ?config(cluster_nodes, Config), AdditionalNodePort = emqx_ft_test_helpers:tcp_port(Node), @@ -471,11 +554,11 @@ t_switch_node(Config) -> ?assertRCName( success, - emqtt:publish(C1, mk_init_topic(FileId), encode_meta(Meta), 1) + emqtt:publish(C1, mk_init_topic(Config, FileId), encode_meta(Meta), 1) ), ?assertRCName( success, - emqtt:publish(C1, mk_segment_topic(FileId, Offset0), Data0, 1) + emqtt:publish(C1, mk_segment_topic(Config, FileId, Offset0), Data0, 1) ), %% Then, switch the client to the main node @@ -487,16 +570,16 @@ t_switch_node(Config) -> ?assertRCName( success, - emqtt:publish(C2, mk_segment_topic(FileId, Offset1), Data1, 1) + emqtt:publish(C2, mk_segment_topic(Config, FileId, Offset1), Data1, 1) ), ?assertRCName( success, - emqtt:publish(C2, mk_segment_topic(FileId, Offset2), Data2, 1) + emqtt:publish(C2, mk_segment_topic(Config, FileId, Offset2), Data2, 1) ), ?assertRCName( success, - emqtt:publish(C2, mk_fin_topic(FileId, Filesize), <<>>, 1) + emqtt:publish(C2, mk_fin_topic(Config, FileId, Filesize), <<>>, 1) ), ok = emqtt:stop(C2), @@ -509,17 +592,6 @@ t_switch_node(Config) -> read_export(Export) ). -t_assemble_crash(Config) -> - C = ?config(client, Config), - - meck:new(emqx_ft_storage_fs), - meck:expect(emqx_ft_storage_fs, assemble, fun(_, _, _, _) -> meck:exception(error, oops) end), - - ?assertRCName( - unspecified_error, - emqtt:publish(C, <<"$file/someid/fin">>, <<>>, 1) - ). - t_unreliable_migrating_client(Config) -> NodeSelf = node(), [Node1, Node2] = ?config(cluster_nodes, Config), @@ -543,10 +615,10 @@ t_unreliable_migrating_client(Config) -> {fun connect_mqtt_client/2, [NodeSelf]}, % Send filemeta and 3 initial segments % (assuming client chose 100 bytes as a desired segment size) - {fun send_filemeta/2, [Meta]}, - {fun send_segment/3, [0, 100]}, - {fun send_segment/3, [100, 100]}, - {fun send_segment/3, [200, 100]}, + {fun send_filemeta/3, [Config, Meta]}, + {fun send_segment/4, [Config, 0, 100]}, + {fun send_segment/4, [Config, 100, 100]}, + {fun send_segment/4, [Config, 200, 100]}, % Disconnect the client cleanly {fun stop_mqtt_client/1, []}, % Connect to the broker on `Node1` @@ -555,27 +627,27 @@ t_unreliable_migrating_client(Config) -> % Client forgot the state for some reason and started the transfer again. % (assuming this is usual for a client on a device that was rebooted) {fun connect_mqtt_client/2, [Node2]}, - {fun send_filemeta/2, [Meta]}, + {fun send_filemeta/3, [Config, Meta]}, % This time it chose 200 bytes as a segment size - {fun send_segment/3, [0, 200]}, - {fun send_segment/3, [200, 200]}, + {fun send_segment/4, [Config, 0, 200]}, + {fun send_segment/4, [Config, 200, 200]}, % But now it downscaled back to 100 bytes segments - {fun send_segment/3, [400, 100]}, + {fun send_segment/4, [Config, 400, 100]}, % Client lost connectivity and reconnected % (also had last few segments unacked and decided to resend them) {fun connect_mqtt_client/2, [Node2]}, - {fun send_segment/3, [200, 200]}, - {fun send_segment/3, [400, 200]}, + {fun send_segment/4, [Config, 200, 200]}, + {fun send_segment/4, [Config, 400, 200]}, % Client lost connectivity and reconnected, this time to another node % (also had last segment unacked and decided to resend it) {fun connect_mqtt_client/2, [Node1]}, - {fun send_segment/3, [400, 200]}, - {fun send_segment/3, [600, eof]}, - {fun send_finish/1, []}, + {fun send_segment/4, [Config, 400, 200]}, + {fun send_segment/4, [Config, 600, eof]}, + {fun send_finish/2, [Config]}, % Client lost connectivity and reconnected, this time to the current node % (client had `fin` unacked and decided to resend it) {fun connect_mqtt_client/2, [NodeSelf]}, - {fun send_finish/1, []} + {fun send_finish/2, [Config]} ], _Context = run_commands(Commands, Context), @@ -621,8 +693,8 @@ t_concurrent_fins(Config) -> Context1 = run_commands( [ {fun connect_mqtt_client/2, [Node1]}, - {fun send_filemeta/2, [Meta]}, - {fun send_segment/3, [0, 100]}, + {fun send_filemeta/3, [Config, Meta]}, + {fun send_segment/4, [Config, 0, 100]}, {fun stop_mqtt_client/1, []} ], Context0 @@ -634,7 +706,7 @@ t_concurrent_fins(Config) -> run_commands( [ {fun connect_mqtt_client/2, [Node]}, - {fun send_finish/1, []} + {fun send_finish/2, [Config]} ], Context1 ) @@ -708,14 +780,16 @@ disown_mqtt_client(Context = #{client := Client}) -> disown_mqtt_client(Context = #{}) -> Context. -send_filemeta(Meta, Context = #{client := Client, fileid := FileId}) -> +send_filemeta(Config, Meta, Context = #{client := Client, fileid := FileId}) -> ?assertRCName( success, - emqtt:publish(Client, mk_init_topic(FileId), encode_meta(Meta), 1) + emqtt:publish(Client, mk_init_topic(Config, FileId), encode_meta(Meta), 1) ), Context. -send_segment(Offset, Size, Context = #{client := Client, fileid := FileId, payload := Payload}) -> +send_segment( + Config, Offset, Size, Context = #{client := Client, fileid := FileId, payload := Payload} +) -> Data = case Size of eof -> @@ -725,14 +799,14 @@ send_segment(Offset, Size, Context = #{client := Client, fileid := FileId, paylo end, ?assertRCName( success, - emqtt:publish(Client, mk_segment_topic(FileId, Offset), Data, 1) + emqtt:publish(Client, mk_segment_topic(Config, FileId, Offset), Data, 1) ), Context. -send_finish(Context = #{client := Client, fileid := FileId, filesize := Filesize}) -> +send_finish(Config, Context = #{client := Client, fileid := FileId, filesize := Filesize}) -> ?assertRCName( success, - emqtt:publish(Client, mk_fin_topic(FileId, Filesize), <<>>, 1) + emqtt:publish(Client, mk_fin_topic(Config, FileId, Filesize), <<>>, 1) ), Context. @@ -749,23 +823,30 @@ fs_exported_file_attributes(FSExports) -> lists:sort(FSExports) ). -mk_init_topic(FileId) -> - <<"$file/", FileId/binary, "/init">>. +mk_init_topic(Config, FileId) -> + RequestTopicPrefix = request_topic_prefix(Config, FileId), + <>. -mk_segment_topic(FileId, Offset) when is_integer(Offset) -> - mk_segment_topic(FileId, integer_to_binary(Offset)); -mk_segment_topic(FileId, Offset) when is_binary(Offset) -> - <<"$file/", FileId/binary, "/", Offset/binary>>. +mk_segment_topic(Config, FileId, Offset) when is_integer(Offset) -> + mk_segment_topic(Config, FileId, integer_to_binary(Offset)); +mk_segment_topic(Config, FileId, Offset) when is_binary(Offset) -> + RequestTopicPrefix = request_topic_prefix(Config, FileId), + <>. -mk_segment_topic(FileId, Offset, Checksum) when is_integer(Offset) -> - mk_segment_topic(FileId, integer_to_binary(Offset), Checksum); -mk_segment_topic(FileId, Offset, Checksum) when is_binary(Offset) -> - <<"$file/", FileId/binary, "/", Offset/binary, "/", Checksum/binary>>. +mk_segment_topic(Config, FileId, Offset, Checksum) when is_integer(Offset) -> + mk_segment_topic(Config, FileId, integer_to_binary(Offset), Checksum); +mk_segment_topic(Config, FileId, Offset, Checksum) when is_binary(Offset) -> + RequestTopicPrefix = request_topic_prefix(Config, FileId), + <>. -mk_fin_topic(FileId, Size) when is_integer(Size) -> - mk_fin_topic(FileId, integer_to_binary(Size)); -mk_fin_topic(FileId, Size) when is_binary(Size) -> - <<"$file/", FileId/binary, "/fin/", Size/binary>>. +mk_fin_topic(Config, FileId, Size) when is_integer(Size) -> + mk_fin_topic(Config, FileId, integer_to_binary(Size)); +mk_fin_topic(Config, FileId, Size) when is_binary(Size) -> + RequestTopicPrefix = request_topic_prefix(Config, FileId), + <>. + +request_topic_prefix(Config, FileId) -> + emqx_ft_test_helpers:request_topic_prefix(mode(Config), FileId). with_offsets(Items) -> {List, _} = lists:mapfoldl( @@ -799,3 +880,17 @@ list_files(ClientId) -> read_export(#{path := AbsFilepath}) -> % TODO: only works for the local filesystem exporter right now file:read_file(AbsFilepath). + +set_client_specific_ft_dirs(ClientId, Config) -> + FTRoot = emqx_ft_test_helpers:ft_root(Config), + ok = emqx_config:put( + [file_transfer, storage, local, segments, root], + filename:join([FTRoot, ClientId, segments]) + ), + ok = emqx_config:put( + [file_transfer, storage, local, exporter, local, root], + filename:join([FTRoot, ClientId, exports]) + ). + +mode(Config) -> + proplists:get_value(mode, Config, sync). diff --git a/apps/emqx_ft/test/emqx_ft_api_SUITE.erl b/apps/emqx_ft/test/emqx_ft_api_SUITE.erl index ae8a5c01c..092927d70 100644 --- a/apps/emqx_ft/test/emqx_ft_api_SUITE.erl +++ b/apps/emqx_ft/test/emqx_ft_api_SUITE.erl @@ -85,7 +85,7 @@ t_list_files(Config) -> FileId = <<"f1">>, Node = lists:last(test_nodes(Config)), - ok = emqx_ft_test_helpers:upload_file(ClientId, FileId, "f1", <<"data">>, Node), + ok = emqx_ft_test_helpers:upload_file(sync, ClientId, FileId, "f1", <<"data">>, Node), {ok, 200, #{<<"files">> := Files}} = request_json(get, uri(["file_transfer", "files"]), Config), @@ -114,7 +114,7 @@ t_download_transfer(Config) -> Nodes = [Node | _] = test_nodes(Config), NodeUpload = lists:last(Nodes), - ok = emqx_ft_test_helpers:upload_file(ClientId, FileId, "f1", <<"data">>, NodeUpload), + ok = emqx_ft_test_helpers:upload_file(sync, ClientId, FileId, "f1", <<"data">>, NodeUpload), ?assertMatch( {ok, 400, #{<<"code">> := <<"BAD_REQUEST">>}}, @@ -185,7 +185,7 @@ t_list_files_paging(Config) -> ], ok = lists:foreach( fun({FileId, Name, Node}) -> - ok = emqx_ft_test_helpers:upload_file(ClientId, FileId, Name, <<"data">>, Node) + ok = emqx_ft_test_helpers:upload_file(sync, ClientId, FileId, Name, <<"data">>, Node) end, Uploads ), diff --git a/apps/emqx_ft/test/emqx_ft_async_reply_SUITE.erl b/apps/emqx_ft/test/emqx_ft_async_reply_SUITE.erl index 78a9b371c..daa83de74 100644 --- a/apps/emqx_ft/test/emqx_ft_async_reply_SUITE.erl +++ b/apps/emqx_ft/test/emqx_ft_async_reply_SUITE.erl @@ -55,7 +55,7 @@ t_register(_Config) -> PacketId = 1, MRef = make_ref(), TRef = make_ref(), - ok = emqx_ft_async_reply:register(PacketId, MRef, TRef), + ok = emqx_ft_async_reply:register(PacketId, MRef, TRef, somedata), ?assertEqual( undefined, @@ -68,7 +68,7 @@ t_register(_Config) -> ), ?assertEqual( - {ok, PacketId, TRef}, + {ok, PacketId, TRef, somedata}, emqx_ft_async_reply:take_by_mref(MRef) ). @@ -76,7 +76,7 @@ t_process_independence(_Config) -> PacketId = 1, MRef = make_ref(), TRef = make_ref(), - ok = emqx_ft_async_reply:register(PacketId, MRef, TRef), + ok = emqx_ft_async_reply:register(PacketId, MRef, TRef, somedata), Self = self(), @@ -112,10 +112,10 @@ t_take(_Config) -> PacketId = 1, MRef = make_ref(), TRef = make_ref(), - ok = emqx_ft_async_reply:register(PacketId, MRef, TRef), + ok = emqx_ft_async_reply:register(PacketId, MRef, TRef, somedata), ?assertEqual( - {ok, PacketId, TRef}, + {ok, PacketId, TRef, somedata}, emqx_ft_async_reply:take_by_mref(MRef) ), @@ -135,12 +135,12 @@ t_cleanup(_Config) -> TRef0 = make_ref(), MRef1 = make_ref(), TRef1 = make_ref(), - ok = emqx_ft_async_reply:register(PacketId, MRef0, TRef0), + ok = emqx_ft_async_reply:register(PacketId, MRef0, TRef0, somedata0), Self = self(), Pid = spawn_link(fun() -> - ok = emqx_ft_async_reply:register(PacketId, MRef1, TRef1), + ok = emqx_ft_async_reply:register(PacketId, MRef1, TRef1, somedata1), receive kickoff -> ?assertEqual( @@ -149,7 +149,7 @@ t_cleanup(_Config) -> ), ?assertEqual( - {ok, PacketId, TRef1}, + {ok, PacketId, TRef1, somedata1}, emqx_ft_async_reply:take_by_mref(MRef1) ), diff --git a/apps/emqx_ft/test/emqx_ft_conf_SUITE.erl b/apps/emqx_ft/test/emqx_ft_conf_SUITE.erl index 0acdea213..8ce282f6d 100644 --- a/apps/emqx_ft/test/emqx_ft_conf_SUITE.erl +++ b/apps/emqx_ft/test/emqx_ft_conf_SUITE.erl @@ -39,10 +39,10 @@ init_per_testcase(Case, Config) -> ], #{work_dir => emqx_cth_suite:work_dir(Case, Config)} ), - [{suite_apps, Apps} | Config]. + [{apps, Apps} | Config]. end_per_testcase(_Case, Config) -> - ok = emqx_cth_suite:stop(?config(suite_apps, Config)), + ok = emqx_cth_suite:stop(?config(apps, Config)), ok. %%-------------------------------------------------------------------- diff --git a/apps/emqx_ft/test/emqx_ft_request_SUITE.erl b/apps/emqx_ft/test/emqx_ft_request_SUITE.erl new file mode 100644 index 000000000..b21917093 --- /dev/null +++ b/apps/emqx_ft/test/emqx_ft_request_SUITE.erl @@ -0,0 +1,113 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_ft_request_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("common_test/include/ct.hrl"). +-include_lib("stdlib/include/assert.hrl"). + +all() -> emqx_common_test_helpers:all(?MODULE). + +init_per_suite(Config) -> + Apps = emqx_cth_suite:start( + [ + {emqx_ft, "file_transfer { enable = true, assemble_timeout = 1s}"} + ], + #{work_dir => ?config(priv_dir, Config)} + ), + [{suite_apps, Apps} | Config]. + +end_per_suite(Config) -> + ok = emqx_cth_suite:stop(?config(suite_apps, Config)), + ok. + +init_per_testcase(_Case, Config) -> + Config. + +end_per_testcase(_Case, _Config) -> + ok. + +%%------------------------------------------------------------------- +%% Tests +%%------------------------------------------------------------------- + +t_upload_via_requests(_Config) -> + C = emqx_ft_test_helpers:start_client(<<"client">>), + + FileId = <<"f1">>, + Data = <<"hello world">>, + Size = byte_size(Data), + Meta = #{ + name => "test.txt", + expire_at => erlang:system_time(_Unit = second) + 3600, + size => Size + }, + MetaPayload = emqx_utils_json:encode(emqx_ft:encode_filemeta(Meta)), + MetaTopic = <<"$file/", FileId/binary, "/init">>, + + ?assertMatch( + {ok, #{<<"reason_code">> := 0, <<"topic">> := MetaTopic}}, + request(C, MetaTopic, MetaPayload) + ), + + SegmentTopic = <<"$file/", FileId/binary, "/0">>, + + ?assertMatch( + {ok, #{<<"reason_code">> := 0, <<"topic">> := SegmentTopic}}, + request(C, SegmentTopic, Data) + ), + + FinTopic = <<"$file/", FileId/binary, "/fin/", (integer_to_binary(Size))/binary>>, + + ?assertMatch( + {ok, #{<<"reason_code">> := 0, <<"topic">> := FinTopic}}, + request(C, FinTopic, <<>>) + ). + +%%-------------------------------------------------------------------- +%% Helper functions +%%-------------------------------------------------------------------- + +request(C, Topic, Request) -> + CorrelaionData = emqx_ft_test_helpers:unique_binary_string(), + ResponseTopic = emqx_ft_test_helpers:unique_binary_string(), + + Properties = #{ + 'Correlation-Data' => CorrelaionData, + 'Response-Topic' => ResponseTopic + }, + Opts = [{qos, 1}], + + {ok, _, _} = emqtt:subscribe(C, ResponseTopic, 1), + {ok, _} = emqtt:publish(C, Topic, Properties, Request, Opts), + + try + receive + {publish, #{ + topic := ResponseTopic, + payload := Payload, + properties := #{'Correlation-Data' := CorrelaionData} + }} -> + {ok, emqx_utils_json:decode(Payload)} + after 1000 -> + {error, timeout} + end + after + emqtt:unsubscribe(C, ResponseTopic) + end. diff --git a/apps/emqx_ft/test/emqx_ft_storage_exporter_s3_SUITE.erl b/apps/emqx_ft/test/emqx_ft_storage_exporter_s3_SUITE.erl index 9e6050f36..90da824ef 100644 --- a/apps/emqx_ft/test/emqx_ft_storage_exporter_s3_SUITE.erl +++ b/apps/emqx_ft/test/emqx_ft_storage_exporter_s3_SUITE.erl @@ -38,25 +38,23 @@ init_per_suite(Config) -> end_per_suite(_Config) -> ok. -set_special_configs(Config) -> - fun - (emqx_ft) -> - Storage = emqx_ft_test_helpers:local_storage(Config, #{ - exporter => s3, bucket_name => ?config(bucket_name, Config) - }), - emqx_ft_test_helpers:load_config(#{<<"enable">> => true, <<"storage">> => Storage}); - (_) -> - ok - end. - -init_per_testcase(Case, Config0) -> +init_per_testcase(Case, Config) -> ClientId = atom_to_binary(Case), BucketName = create_bucket(), - Config1 = [{bucket_name, BucketName}, {clientid, ClientId} | Config0], - ok = emqx_common_test_helpers:start_apps([emqx_conf, emqx_ft], set_special_configs(Config1)), - Config1. -end_per_testcase(_Case, _Config) -> - ok = emqx_common_test_helpers:stop_apps([emqx_ft, emqx_conf]), + Storage = emqx_ft_test_helpers:local_storage(Config, #{ + exporter => s3, bucket_name => BucketName + }), + WorkDir = filename:join(?config(priv_dir, Config), atom_to_list(Case)), + Apps = emqx_cth_suite:start( + [ + emqx_conf, + {emqx_ft, #{config => emqx_ft_test_helpers:config(Storage)}} + ], + #{work_dir => WorkDir} + ), + [{apps, Apps}, {bucket_name, BucketName}, {clientid, ClientId} | Config]. +end_per_testcase(_Case, Config) -> + ok = emqx_cth_suite:stop(?config(apps, Config)), ok. %%-------------------------------------------------------------------- diff --git a/apps/emqx_ft/test/emqx_ft_storage_fs_SUITE.erl b/apps/emqx_ft/test/emqx_ft_storage_fs_SUITE.erl index 52d372e63..6ca158833 100644 --- a/apps/emqx_ft/test/emqx_ft_storage_fs_SUITE.erl +++ b/apps/emqx_ft/test/emqx_ft_storage_fs_SUITE.erl @@ -81,8 +81,8 @@ end_per_group(_Group, _Config) -> t_multinode_exports(Config) -> [Node1, Node2 | _] = ?config(cluster, Config), - ok = emqx_ft_test_helpers:upload_file(<<"c/1">>, <<"f:1">>, "fn1", <<"data">>, Node1), - ok = emqx_ft_test_helpers:upload_file(<<"c/2">>, <<"f:2">>, "fn2", <<"data">>, Node2), + ok = emqx_ft_test_helpers:upload_file(sync, <<"c/1">>, <<"f:1">>, "fn1", <<"data">>, Node1), + ok = emqx_ft_test_helpers:upload_file(sync, <<"c/2">>, <<"f:2">>, "fn2", <<"data">>, Node2), ?assertMatch( [ #{transfer := {<<"c/1">>, <<"f:1">>}, name := "fn1"}, diff --git a/apps/emqx_ft/test/emqx_ft_storage_fs_reader_SUITE.erl b/apps/emqx_ft/test/emqx_ft_storage_fs_reader_SUITE.erl index 217205f6f..f0b658e0d 100644 --- a/apps/emqx_ft/test/emqx_ft_storage_fs_reader_SUITE.erl +++ b/apps/emqx_ft/test/emqx_ft_storage_fs_reader_SUITE.erl @@ -25,11 +25,18 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - ok = emqx_common_test_helpers:start_apps([emqx_ft], emqx_ft_test_helpers:env_handler(Config)), - Config. + WorkDir = ?config(priv_dir, Config), + Storage = emqx_ft_test_helpers:local_storage(Config), + Apps = emqx_cth_suite:start( + [ + {emqx_ft, #{config => emqx_ft_test_helpers:config(Storage)}} + ], + #{work_dir => WorkDir} + ), + [{suite_apps, Apps} | Config]. -end_per_suite(_Config) -> - ok = emqx_common_test_helpers:stop_apps([emqx_ft]), +end_per_suite(Config) -> + ok = emqx_cth_suite:stop(?config(suite_apps, Config)), ok. init_per_testcase(_Case, Config) -> diff --git a/apps/emqx_ft/test/emqx_ft_test_helpers.erl b/apps/emqx_ft/test/emqx_ft_test_helpers.erl index 9e69118c8..efcaa3048 100644 --- a/apps/emqx_ft/test/emqx_ft_test_helpers.erl +++ b/apps/emqx_ft/test/emqx_ft_test_helpers.erl @@ -24,16 +24,15 @@ -define(S3_HOST, <<"minio">>). -define(S3_PORT, 9000). -env_handler(Config) -> - fun - (emqx_ft) -> - load_config(#{<<"enable">> => true, <<"storage">> => local_storage(Config)}); - (_) -> - ok - end. - config(Storage) -> - #{<<"file_transfer">> => #{<<"enable">> => true, <<"storage">> => Storage}}. + config(Storage, #{}). + +config(Storage, FTOptions0) -> + FTOptions1 = maps:merge( + #{<<"enable">> => true, <<"storage">> => Storage}, + FTOptions0 + ), + #{<<"file_transfer">> => FTOptions1}. local_storage(Config) -> local_storage(Config, #{exporter => local}). @@ -73,7 +72,13 @@ tcp_port(Node) -> Port. root(Config, Node, Tail) -> - iolist_to_binary(filename:join([?config(priv_dir, Config), "file_transfer", Node | Tail])). + iolist_to_binary(filename:join([ft_root(Config), Node | Tail])). + +ft_root(Config) -> + filename:join([?config(priv_dir, Config), "file_transfer"]). + +cleanup_ft_root(Config) -> + file:del_dir_r(emqx_ft_test_helpers:ft_root(Config)). start_client(ClientId) -> start_client(ClientId, node()). @@ -85,11 +90,15 @@ start_client(ClientId, Node) -> Client. upload_file(ClientId, FileId, Name, Data) -> - upload_file(ClientId, FileId, Name, Data, node()). + upload_file(sync, ClientId, FileId, Name, Data). -upload_file(ClientId, FileId, Name, Data, Node) -> +upload_file(Mode, ClientId, FileId, Name, Data) -> + upload_file(Mode, ClientId, FileId, Name, Data, node()). + +upload_file(Mode, ClientId, FileId, Name, Data, Node) -> C1 = start_client(ClientId, Node), + ReqTopicPrefix = request_topic_prefix(Mode, FileId), Size = byte_size(Data), Meta = #{ name => Name, @@ -98,25 +107,53 @@ upload_file(ClientId, FileId, Name, Data, Node) -> }, MetaPayload = emqx_utils_json:encode(emqx_ft:encode_filemeta(Meta)), - ct:pal("MetaPayload = ~ts", [MetaPayload]), - - MetaTopic = <<"$file/", FileId/binary, "/init">>, + MetaTopic = <>, {ok, #{reason_code_name := success}} = emqtt:publish(C1, MetaTopic, MetaPayload, 1), {ok, #{reason_code_name := success}} = emqtt:publish( - C1, <<"$file/", FileId/binary, "/0">>, Data, 1 + C1, <>, Data, 1 ), - FinTopic = <<"$file/", FileId/binary, "/fin/", (integer_to_binary(Size))/binary>>, - FinResult = - case emqtt:publish(C1, FinTopic, <<>>, 1) of - {ok, #{reason_code_name := success}} -> - ok; - {ok, #{reason_code_name := Error}} -> - {error, Error} - end, + FinTopic = <>, + FinResult = fin_result(Mode, ClientId, C1, FinTopic), ok = emqtt:stop(C1), FinResult. +fin_result(Mode, ClientId, C, FinTopic) -> + {ok, _, _} = emqtt:subscribe(C, response_topic(ClientId), 1), + case emqtt:publish(C, FinTopic, <<>>, 1) of + {ok, #{reason_code_name := success}} -> + maybe_wait_for_assemble(Mode, ClientId, FinTopic); + {ok, #{reason_code_name := Error}} -> + {error, Error} + end. + +maybe_wait_for_assemble(sync, _ClientId, _FinTopic) -> + ok; +maybe_wait_for_assemble(async, ClientId, FinTopic) -> + ResponseTopic = response_topic(ClientId), + receive + {publish, #{payload := Payload, topic := ResponseTopic}} -> + case emqx_utils_json:decode(Payload) of + #{<<"topic">> := FinTopic, <<"reason_code">> := 0} -> + ok; + #{<<"topic">> := FinTopic, <<"reason_code">> := Code} -> + {error, emqx_reason_codes:name(Code)}; + _ -> + maybe_wait_for_assemble(async, ClientId, FinTopic) + end + end. + +response_topic(ClientId) -> + <<"$file-response/", (to_bin(ClientId))/binary>>. + +request_topic_prefix(sync, FileId) -> + <<"$file/", (to_bin(FileId))/binary>>; +request_topic_prefix(async, FileId) -> + <<"$file-async/", (to_bin(FileId))/binary>>. + +to_bin(Val) -> + iolist_to_binary(Val). + aws_config() -> emqx_s3_test_helpers:aws_config(tcp, binary_to_list(?S3_HOST), ?S3_PORT). @@ -129,3 +166,6 @@ pem_privkey() -> "ju0VBj6tOX1y6C0U+85VOM0UU5xqvw==\n" "-----END EC PRIVATE KEY-----\n" >>. + +unique_binary_string() -> + emqx_guid:to_hexstr(emqx_guid:gen()). diff --git a/changes/ee/feat-11541.en.md b/changes/ee/feat-11541.en.md new file mode 100644 index 000000000..dee06609d --- /dev/null +++ b/changes/ee/feat-11541.en.md @@ -0,0 +1,3 @@ +Introduced additional way of file transfer interactions. Now client may send file transfer commands to `$file-async/...` topic instead of `$file/...` and receive command execution results as messages to `$file-response/{clientId}` topic. +This simplifies file transfer feature usage in certain cases, for example, when a client uses MQTTv3 or when the broker is behind an MQTT bridge. +See the [EIP-0021](https://github.com/emqx/eip) for more details. From d753edc0fd2692c6ef4077a504a009ce1ead8953 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 11 Oct 2023 16:00:01 +0200 Subject: [PATCH 013/155] chore: remove lib-ee in scripts --- .github/workflows/static_checks.yaml | 2 +- Makefile | 2 +- dev | 7 ------ elvis.config | 2 +- mix.exs | 21 ++++-------------- rebar.config.erl | 11 +++------- scripts/check-deps-integrity.escript | 4 +--- scripts/ct/run.sh | 33 ++++++++-------------------- scripts/find-apps.sh | 7 +----- scripts/update-appup.sh | 8 +------ 10 files changed, 22 insertions(+), 75 deletions(-) diff --git a/.github/workflows/static_checks.yaml b/.github/workflows/static_checks.yaml index 758c3712b..cb8740b1a 100644 --- a/.github/workflows/static_checks.yaml +++ b/.github/workflows/static_checks.yaml @@ -40,7 +40,7 @@ jobs: - uses: actions/cache@v3 with: path: "emqx_dialyzer_${{ matrix.otp }}_plt" - key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*', 'lib-ee/*/rebar.*') }} + key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*') }} restore-keys: | rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}- - name: run static checks diff --git a/Makefile b/Makefile index 3d11491f9..3a7227362 100644 --- a/Makefile +++ b/Makefile @@ -296,7 +296,7 @@ $(foreach tt,$(ALL_ELIXIR_TGZS),$(eval $(call gen-elixir-tgz-target,$(tt)))) .PHONY: fmt fmt: $(REBAR) - @$(SCRIPTS)/erlfmt -w '{apps,lib-ee}/*/{src,include,priv,test,integration_test}/**/*.{erl,hrl,app.src,eterm}' + @$(SCRIPTS)/erlfmt -w 'apps/*/{src,include,priv,test,integration_test}/**/*.{erl,hrl,app.src,eterm}' @$(SCRIPTS)/erlfmt -w 'rebar.config.erl' @mix format diff --git a/dev b/dev index 7622c72fc..67cb6e969 100755 --- a/dev +++ b/dev @@ -336,13 +336,6 @@ copy_other_conf_files() { is_current_profile_app() { local app="$1" case "$app" in - lib-ee*) - if [ "$PROFILE" = 'emqx-enterprise' ]; then - return 0 - else - return 1 - fi - ;; *emqx_telemetry*) if [ "$PROFILE" = 'emqx-enterprise' ]; then return 1 diff --git a/elvis.config b/elvis.config index ee7eaeaee..87d739865 100644 --- a/elvis.config +++ b/elvis.config @@ -5,7 +5,7 @@ [ {config, [ - #{dirs => ["src", "apps/**/src", "lib-ee/**/src"], + #{dirs => ["src", "apps/**/src"], filter => "*.erl", ruleset => erl_files, rules => [ diff --git a/mix.exs b/mix.exs index 07f6b0209..336bde4bf 100644 --- a/mix.exs +++ b/mix.exs @@ -169,24 +169,10 @@ defmodule EMQXUmbrella.MixProject do end defp enterprise_apps(_profile_info = %{edition_type: :enterprise}) do - umbrella_apps = - Enum.map(enterprise_umbrella_apps(), fn app_name -> - path = "apps/#{app_name}" - {app_name, path: path, manager: :rebar3, override: true} - end) - - "lib-ee/*" - |> Path.wildcard() - |> Enum.filter(&File.dir?/1) - |> Enum.map(fn path -> - app = - path - |> Path.basename() - |> String.to_atom() - - {app, path: path, manager: :rebar3, override: true} + Enum.map(enterprise_umbrella_apps(), fn app_name -> + path = "apps/#{app_name}" + {app_name, path: path, manager: :rebar3, override: true} end) - |> Enum.concat(umbrella_apps) end defp enterprise_apps(_profile_info) do @@ -220,6 +206,7 @@ defmodule EMQXUmbrella.MixProject do :emqx_bridge_rabbitmq, :emqx_bridge_clickhouse, :emqx_ft, + :emqx_license, :emqx_s3, :emqx_schema_registry, :emqx_enterprise, diff --git a/rebar.config.erl b/rebar.config.erl index 5dad62af6..8bf502566 100644 --- a/rebar.config.erl +++ b/rebar.config.erl @@ -164,11 +164,7 @@ project_app_dirs(Edition) -> || Path <- filelib:wildcard("apps/*"), is_community_umbrella_app(Path) orelse IsEnterprise ], - UmbrellaApps ++ - case IsEnterprise of - true -> ["lib-ee/*"]; - false -> [] - end. + UmbrellaApps. plugins() -> [ @@ -539,8 +535,7 @@ provide_bcrypt_release(ReleaseType) -> erl_opts_i() -> [{i, "apps"}] ++ - [{i, Dir} || Dir <- filelib:wildcard(filename:join(["apps", "*", "include"]))] ++ - [{i, Dir} || Dir <- filelib:wildcard(filename:join(["lib-ee", "*", "include"]))]. + [{i, Dir} || Dir <- filelib:wildcard(filename:join(["apps", "*", "include"]))]. dialyzer(Config) -> {dialyzer, OldDialyzerConfig} = lists:keyfind(dialyzer, 1, Config), @@ -597,7 +592,7 @@ coveralls() -> [] end. -app_names() -> list_dir("apps") ++ list_dir("lib-ee"). +app_names() -> list_dir("apps"). list_dir(Dir) -> case filelib:is_dir(Dir) of diff --git a/scripts/check-deps-integrity.escript b/scripts/check-deps-integrity.escript index 738aeec31..03cd509de 100755 --- a/scripts/check-deps-integrity.escript +++ b/scripts/check-deps-integrity.escript @@ -5,9 +5,7 @@ -mode(compile). main([]) -> - Files = ["rebar.config"] ++ - apps_rebar_config("apps") ++ - apps_rebar_config("lib-ee"), + Files = ["rebar.config"] ++ apps_rebar_config("apps"), Deps = collect_deps(Files, #{}), case count_bad_deps(Deps) of 0 -> diff --git a/scripts/ct/run.sh b/scripts/ct/run.sh index 5ad289303..f3c01358b 100755 --- a/scripts/ct/run.sh +++ b/scripts/ct/run.sh @@ -98,33 +98,18 @@ if [ ! -d "${WHICH_APP}" ]; then exit 1 fi -if [[ "${WHICH_APP}" == lib-ee* && (-z "${PROFILE+x}" || "${PROFILE}" != emqx-enterprise) ]]; then - echo 'You are trying to run an enterprise test case without the emqx-enterprise profile.' - echo 'This will most likely not work.' - echo '' - echo 'Run "export PROFILE=emqx-enterprise" and "make" to fix this' - exit 1 -fi - ERLANG_CONTAINER='erlang' DOCKER_CT_ENVS_FILE="${WHICH_APP}/docker-ct" -case "${WHICH_APP}" in - lib-ee*) - ## ensure enterprise profile when testing lib-ee applications - export PROFILE='emqx-enterprise' - ;; - apps/*) - if [[ -f "${WHICH_APP}/BSL.txt" ]]; then - export PROFILE='emqx-enterprise' - else - export PROFILE='emqx' - fi - ;; - *) - export PROFILE="${PROFILE:-emqx}" - ;; -esac +if [ -f "${WHICH_APP}/BSL.txt" ]; then + if [ -n "${PROFILE:-}" ] && [ "${PROFILE}" != 'emqx-enterprise' ]; then + echo "bad_profile: PROFILE=${PROFILE} will not work for app ${WHICH_APP}" + exit 1 + fi + export PROFILE='emqx-enterprise' +else + export PROFILE='emqx' +fi if [ -f "$DOCKER_CT_ENVS_FILE" ]; then # shellcheck disable=SC2002 diff --git a/scripts/find-apps.sh b/scripts/find-apps.sh index 9120181c9..5ebb363a9 100755 --- a/scripts/find-apps.sh +++ b/scripts/find-apps.sh @@ -41,9 +41,7 @@ find_app() { "$FIND" "${appdir}" -mindepth 1 -maxdepth 1 -type d } -CE="$(find_app 'apps')" -EE="$(find_app 'lib-ee')" -APPS_ALL="$(echo -e "${CE}\n${EE}")" +APPS_ALL="$(find_app 'apps')" if [ "$MODE" = 'list' ]; then echo "${APPS_ALL}" @@ -87,9 +85,6 @@ describe_app() { profile='emqx' fi ;; - lib-ee/*) - profile='emqx-enterprise' - ;; *) echo "unknown app: $app" exit 1 diff --git a/scripts/update-appup.sh b/scripts/update-appup.sh index 2c99c641d..e7dbff89a 100755 --- a/scripts/update-appup.sh +++ b/scripts/update-appup.sh @@ -72,12 +72,6 @@ while [ "$#" -gt 0 ]; do esac done -if [ "$TAG_PREFIX" = 'v' ]; then - SRC_DIRS="{apps}" -else - SRC_DIRS="{apps,lib-ee}" -fi - ## make sure we build here in bash and always pass --skip-build to escript if [ "${SKIP_BUILD:-}" != 'yes' ]; then make "${PROFILE}" @@ -114,7 +108,7 @@ PREV_REL_DIR="${PREV_DIR_BASE}/${PREV_TAG}/_build/${PROFILE}/lib" # this in turn makes quoting "${ESCRIPT_ARGS[@]}" problematic, hence disable SC2068 check here # shellcheck disable=SC2068 ./scripts/update_appup.escript \ - --src-dirs "${SRC_DIRS}/**" \ + --src-dirs "apps/**" \ --release-dir "_build/${PROFILE}/lib" \ --prev-release-dir "${PREV_REL_DIR}" \ --skip-build \ From b07dddd49e8470047581f63d108a073f86275612 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Wed, 11 Oct 2023 17:31:26 -0300 Subject: [PATCH 014/155] fix(postgres): format unicode error messages from driver Fixes https://emqx.atlassian.net/browse/EMQX-11024 Sample error: ``` {error, error, <<"42501">>, insufficient_privilege, <<229,175,185,232,161,168,32,109,113,116,116,95,117,115,101,114,32,230,157,131,233,153,144,228,184,141,229,164,159>>, []} ``` --- .../emqx_connector/src/emqx_connector.app.src | 2 +- .../src/emqx_connector_pgsql.erl | 61 +++++++++++++++---- changes/ee/fix-11754.en.md | 1 + 3 files changed, 50 insertions(+), 14 deletions(-) create mode 100644 changes/ee/fix-11754.en.md diff --git a/apps/emqx_connector/src/emqx_connector.app.src b/apps/emqx_connector/src/emqx_connector.app.src index a3ca61d82..7ecabb0ff 100644 --- a/apps/emqx_connector/src/emqx_connector.app.src +++ b/apps/emqx_connector/src/emqx_connector.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_connector, [ {description, "EMQX Data Integration Connectors"}, - {vsn, "0.1.32"}, + {vsn, "0.1.33"}, {registered, []}, {mod, {emqx_connector_app, []}}, {applications, [ diff --git a/apps/emqx_connector/src/emqx_connector_pgsql.erl b/apps/emqx_connector/src/emqx_connector_pgsql.erl index 41d587a02..b1e4a9fdb 100644 --- a/apps/emqx_connector/src/emqx_connector_pgsql.erl +++ b/apps/emqx_connector/src/emqx_connector_pgsql.erl @@ -246,13 +246,18 @@ on_sql_query(InstId, PoolName, Type, NameOrSQL, Data) -> pgsql_connector_query_return, #{error => Reason} ), - ?SLOG(error, #{ - msg => "postgresql_connector_do_sql_query_failed", - connector => InstId, - type => Type, - sql => NameOrSQL, - reason => Reason - }), + ?SLOG( + error, + maps:merge( + #{ + msg => "postgresql_connector_do_sql_query_failed", + connector => InstId, + type => Type, + sql => NameOrSQL + }, + translate_to_log_context(Reason) + ) + ), case Reason of sync_required -> {error, {recoverable_error, Reason}}; @@ -452,10 +457,12 @@ init_prepare(State = #{prepare_sql := Prepares, pool_name := PoolName}) -> {ok, Sts} -> State#{prepare_statement := Sts}; Error -> - LogMeta = #{ - msg => <<"postgresql_init_prepare_statement_failed">>, error => Error - }, - ?SLOG(error, LogMeta), + LogMsg = + maps:merge( + #{msg => <<"postgresql_init_prepare_statement_failed">>}, + translate_to_log_context(Error) + ), + ?SLOG(error, LogMsg), %% mark the prepare_sql as failed State#{prepare_sql => {error, Prepares}} end @@ -500,10 +507,20 @@ prepare_sql_to_conn(Conn, [{Key, SQL} | PrepareList], Statements) when is_pid(Co {error, {error, error, _, undefined_table, _, _} = Error} -> %% Target table is not created ?tp(pgsql_undefined_table, #{}), - ?SLOG(error, LogMeta#{msg => "postgresql_parse_failed", error => Error}), + LogMsg = + maps:merge( + LogMeta#{msg => "postgresql_parse_failed"}, + translate_to_log_context(Error) + ), + ?SLOG(error, LogMsg), {error, undefined_table}; {error, Error} = Other -> - ?SLOG(error, LogMeta#{msg => "postgresql_parse_failed", error => Error}), + LogMsg = + maps:merge( + LogMeta#{msg => "postgresql_parse_failed"}, + translate_to_log_context(Error) + ), + ?SLOG(error, LogMsg), Other end. @@ -529,3 +546,21 @@ handle_batch_result([{error, Error} | _Rest], _Acc) -> {error, {unrecoverable_error, Error}}; handle_batch_result([], Acc) -> {ok, Acc}. + +translate_to_log_context(#error{} = Reason) -> + #error{ + severity = Severity, + code = Code, + codename = Codename, + message = Message, + extra = Extra + } = Reason, + #{ + driver_severity => Severity, + driver_error_codename => Codename, + driver_error_code => Code, + driver_error_message => emqx_logger_textfmt:try_format_unicode(Message), + driver_error_extra => Extra + }; +translate_to_log_context(Reason) -> + #{reason => Reason}. diff --git a/changes/ee/fix-11754.en.md b/changes/ee/fix-11754.en.md new file mode 100644 index 000000000..5aa2bed15 --- /dev/null +++ b/changes/ee/fix-11754.en.md @@ -0,0 +1 @@ +Improved log formatting for Postgres bridge when there are unicode characters in the error messages returned by the driver. From fd15939ae843b91592854b7aacefa516d6c0f72c Mon Sep 17 00:00:00 2001 From: Ivan Dyachkov Date: Thu, 12 Oct 2023 08:41:50 +0200 Subject: [PATCH 015/155] chore(ci): limit token scope in workflows --- .github/workflows/build_and_push_docker_images.yaml | 3 +++ .github/workflows/build_docker_for_test.yaml | 3 +++ .github/workflows/check_deps_integrity.yaml | 3 +++ .github/workflows/codeql.yaml | 4 +++- .github/workflows/green_master.yaml | 3 +++ .github/workflows/performance_test.yaml | 3 +++ .github/workflows/release.yaml | 5 +++++ .github/workflows/run_conf_tests.yaml | 3 +++ .github/workflows/run_docker_tests.yaml | 3 +++ .github/workflows/run_emqx_app_tests.yaml | 3 +++ .github/workflows/run_helm_tests.yaml | 3 +++ .github/workflows/run_relup_tests.yaml | 3 +++ .github/workflows/spellcheck.yaml | 3 +++ .github/workflows/stale.yaml | 3 +++ .github/workflows/static_checks.yaml | 3 +++ 15 files changed, 47 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build_and_push_docker_images.yaml b/.github/workflows/build_and_push_docker_images.yaml index 62399575c..ea67ffbf5 100644 --- a/.github/workflows/build_and_push_docker_images.yaml +++ b/.github/workflows/build_and_push_docker_images.yaml @@ -75,6 +75,9 @@ on: type: string default: '["self-hosted","ephemeral", "linux"]' +permissions: + contents: read + jobs: docker: runs-on: ${{ fromJSON(inputs.runner_labels) }} diff --git a/.github/workflows/build_docker_for_test.yaml b/.github/workflows/build_docker_for_test.yaml index a71bb96d9..3983f3fa9 100644 --- a/.github/workflows/build_docker_for_test.yaml +++ b/.github/workflows/build_docker_for_test.yaml @@ -23,6 +23,9 @@ on: required: true type: string +permissions: + contents: read + jobs: docker: runs-on: ${{ fromJSON(inputs.runner_labels) }} diff --git a/.github/workflows/check_deps_integrity.yaml b/.github/workflows/check_deps_integrity.yaml index 1b260fae9..df7170523 100644 --- a/.github/workflows/check_deps_integrity.yaml +++ b/.github/workflows/check_deps_integrity.yaml @@ -10,6 +10,9 @@ on: required: true type: string +permissions: + contents: read + jobs: check_deps_integrity: runs-on: ${{ fromJSON(inputs.runner_labels) }} diff --git a/.github/workflows/codeql.yaml b/.github/workflows/codeql.yaml index a0b701d17..7e2057e30 100644 --- a/.github/workflows/codeql.yaml +++ b/.github/workflows/codeql.yaml @@ -8,6 +8,9 @@ on: ref: required: false +permissions: + contents: read + jobs: analyze: name: Analyze @@ -15,7 +18,6 @@ jobs: timeout-minutes: 360 permissions: actions: read - contents: read security-events: write container: image: ghcr.io/emqx/emqx-builder/5.1-4:1.14.5-25.3.2-2-ubuntu22.04 diff --git a/.github/workflows/green_master.yaml b/.github/workflows/green_master.yaml index d7d4517d3..de343a8f9 100644 --- a/.github/workflows/green_master.yaml +++ b/.github/workflows/green_master.yaml @@ -8,6 +8,9 @@ on: - cron: "0 * * * *" workflow_dispatch: +permissions: + contents: read + jobs: rerun-failed-jobs: if: github.repository_owner == 'emqx' diff --git a/.github/workflows/performance_test.yaml b/.github/workflows/performance_test.yaml index 224cfb0b3..5c938481e 100644 --- a/.github/workflows/performance_test.yaml +++ b/.github/workflows/performance_test.yaml @@ -19,6 +19,9 @@ env: TF_VAR_prometheus_remote_write_url: ${{ secrets.TF_EMQX_PERF_TEST_PROMETHEUS_REMOTE_WRITE_URL }} SLACK_WEBHOOK_URL: ${{ secrets.TF_EMQX_PERF_TEST_SLACK_URL }} +permissions: + contents: read + jobs: prepare: runs-on: ubuntu-latest diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index ab145a764..b23f91128 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -13,9 +13,14 @@ on: required: true default: false +permissions: + contents: read + jobs: upload: runs-on: ubuntu-22.04 + permissions: + packages: write strategy: fail-fast: false steps: diff --git a/.github/workflows/run_conf_tests.yaml b/.github/workflows/run_conf_tests.yaml index a69746d76..e218ae838 100644 --- a/.github/workflows/run_conf_tests.yaml +++ b/.github/workflows/run_conf_tests.yaml @@ -14,6 +14,9 @@ on: required: true type: string +permissions: + contents: read + jobs: run_conf_tests: runs-on: ${{ fromJSON(inputs.runner_labels) }} diff --git a/.github/workflows/run_docker_tests.yaml b/.github/workflows/run_docker_tests.yaml index 18dd9ac38..08391611f 100644 --- a/.github/workflows/run_docker_tests.yaml +++ b/.github/workflows/run_docker_tests.yaml @@ -17,6 +17,9 @@ on: required: true type: string +permissions: + contents: read + jobs: basic-tests: runs-on: ${{ fromJSON(inputs.runner_labels) }} diff --git a/.github/workflows/run_emqx_app_tests.yaml b/.github/workflows/run_emqx_app_tests.yaml index c5554805a..695ad4750 100644 --- a/.github/workflows/run_emqx_app_tests.yaml +++ b/.github/workflows/run_emqx_app_tests.yaml @@ -26,6 +26,9 @@ on: env: IS_CI: "yes" +permissions: + contents: read + jobs: run_emqx_app_tests: runs-on: ${{ fromJSON(inputs.runner_labels) }} diff --git a/.github/workflows/run_helm_tests.yaml b/.github/workflows/run_helm_tests.yaml index 9c314afde..c8f46948c 100644 --- a/.github/workflows/run_helm_tests.yaml +++ b/.github/workflows/run_helm_tests.yaml @@ -17,6 +17,9 @@ on: required: true type: string +permissions: + contents: read + jobs: helm_test: runs-on: ${{ fromJSON(inputs.runner_labels) }} diff --git a/.github/workflows/run_relup_tests.yaml b/.github/workflows/run_relup_tests.yaml index a38ccae4f..b110e8512 100644 --- a/.github/workflows/run_relup_tests.yaml +++ b/.github/workflows/run_relup_tests.yaml @@ -14,6 +14,9 @@ on: required: true type: string +permissions: + contents: read + jobs: relup_test_plan: runs-on: ["${{ inputs.runner }}", 'linux', 'x64', 'ephemeral'] diff --git a/.github/workflows/spellcheck.yaml b/.github/workflows/spellcheck.yaml index 08f6c2c38..57e6ac214 100644 --- a/.github/workflows/spellcheck.yaml +++ b/.github/workflows/spellcheck.yaml @@ -11,6 +11,9 @@ on: required: true type: string +permissions: + contents: read + jobs: spellcheck: strategy: diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index eb8038dcc..d26ae79c2 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -8,6 +8,9 @@ on: - cron: "0 * * * *" workflow_dispatch: +permissions: + contents: read + jobs: stale: if: github.repository_owner == 'emqx' diff --git a/.github/workflows/static_checks.yaml b/.github/workflows/static_checks.yaml index 758c3712b..7014c3365 100644 --- a/.github/workflows/static_checks.yaml +++ b/.github/workflows/static_checks.yaml @@ -20,6 +20,9 @@ on: env: IS_CI: "yes" +permissions: + contents: read + jobs: static_checks: runs-on: ${{ fromJSON(inputs.runner_labels) }} From 1021088f5f96296ef84636355e51340a9dec1232 Mon Sep 17 00:00:00 2001 From: zhongwencool Date: Thu, 12 Oct 2023 15:23:23 +0800 Subject: [PATCH 016/155] fix: 500 error response when downloading non-existent trace files --- apps/emqx_management/src/emqx_mgmt_api_trace.erl | 2 ++ .../test/emqx_mgmt_api_trace_SUITE.erl | 11 ++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/apps/emqx_management/src/emqx_mgmt_api_trace.erl b/apps/emqx_management/src/emqx_mgmt_api_trace.erl index bcc21a97b..4aff66efc 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_trace.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_trace.erl @@ -552,6 +552,8 @@ group_trace_files(TraceLog, TraceFiles) -> empty; ({ok, _Node, _Bin}) -> nonempty; + ({error, _Node, enoent}) -> + empty; ({error, Node, Reason}) -> ?SLOG(error, #{ msg => "download_trace_log_error", diff --git a/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl b/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl index f4725b453..9b8222d20 100644 --- a/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl +++ b/apps/emqx_management/test/emqx_mgmt_api_trace_SUITE.erl @@ -389,7 +389,16 @@ t_download_empty_trace(_Config) -> ), {error, {{_, 404, _}, _Headers, Body}} = request_api(get, api_path(<<"trace/", Name/binary, "/download">>), [], #{return_all => true}), - ?assertMatch(#{<<"message">> := <<"Trace is empty">>}, emqx_utils_json:decode(Body)). + ?assertMatch(#{<<"message">> := <<"Trace is empty">>}, emqx_utils_json:decode(Body)), + File = emqx_trace:log_file(Name, Now), + ct:pal("FileName: ~p", [File]), + ?assertEqual({ok, <<>>}, file:read_file(File)), + ?assertEqual(ok, file:delete(File)), + %% return 404 if trace file is not found + {error, {{_, 404, _}, _Headers, Body}} = + request_api(get, api_path(<<"trace/", Name/binary, "/download">>), [], #{return_all => true}), + ?assertMatch(#{<<"message">> := <<"Trace is empty">>}, emqx_utils_json:decode(Body)), + ok. to_rfc3339(Second) -> list_to_binary(calendar:system_time_to_rfc3339(Second)). From 9b2a5e6c3dee0d1b145059748d102ef68b2e3ad5 Mon Sep 17 00:00:00 2001 From: zhongwencool Date: Thu, 12 Oct 2023 15:25:36 +0800 Subject: [PATCH 017/155] chore: add changelog for 11757 --- changes/ce/fix-11757.en.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/ce/fix-11757.en.md diff --git a/changes/ce/fix-11757.en.md b/changes/ce/fix-11757.en.md new file mode 100644 index 000000000..cc57e70a8 --- /dev/null +++ b/changes/ce/fix-11757.en.md @@ -0,0 +1 @@ +Fixed 500 error response when downloading non-existent trace files, now returns 404. From 9b4def885a85e933d27a7ea7edfa07ee7ca15223 Mon Sep 17 00:00:00 2001 From: firest Date: Thu, 12 Oct 2023 16:37:32 +0800 Subject: [PATCH 018/155] chore: change the LDAP integration to opensource --- apps/emqx_auth_ldap/src/emqx_authn_ldap.erl | 12 +++ .../src/emqx_authn_ldap_bind.erl | 12 +++ apps/emqx_auth_ldap/src/emqx_authz_ldap.erl | 14 ++- .../test/emqx_authn_ldap_SUITE.erl | 12 +++ .../test/emqx_authn_ldap_bind_SUITE.erl | 12 +++ .../test/emqx_authz_ldap_SUITE.erl | 12 +++ apps/emqx_authn/src/emqx_authn_enterprise.erl | 26 +++++ apps/emqx_authz/src/emqx_authz.app.src | 23 +++++ apps/emqx_authz/src/emqx_authz_enterprise.erl | 60 ++++++++++++ apps/emqx_ldap/BSL.txt | 94 ------------------- apps/emqx_ldap/src/emqx_ldap.erl | 12 +++ apps/emqx_ldap/src/emqx_ldap_bind_worker.erl | 12 +++ apps/emqx_ldap/src/emqx_ldap_filter_lexer.xrl | 12 +++ .../emqx_ldap/src/emqx_ldap_filter_parser.yrl | 12 +++ apps/emqx_ldap/test/emqx_ldap_SUITE.erl | 12 +++ .../emqx_ldap/test/emqx_ldap_filter_SUITE.erl | 12 +++ apps/emqx_machine/priv/reboot_lists.eterm | 2 - mix.exs | 2 - rebar.config.erl | 2 - 19 files changed, 254 insertions(+), 101 deletions(-) create mode 100644 apps/emqx_authn/src/emqx_authn_enterprise.erl create mode 100644 apps/emqx_authz/src/emqx_authz.app.src create mode 100644 apps/emqx_authz/src/emqx_authz_enterprise.erl delete mode 100644 apps/emqx_ldap/BSL.txt diff --git a/apps/emqx_auth_ldap/src/emqx_authn_ldap.erl b/apps/emqx_auth_ldap/src/emqx_authn_ldap.erl index 8685faecd..975a7f828 100644 --- a/apps/emqx_auth_ldap/src/emqx_authn_ldap.erl +++ b/apps/emqx_auth_ldap/src/emqx_authn_ldap.erl @@ -1,5 +1,17 @@ %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_authn_ldap). diff --git a/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind.erl b/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind.erl index 82f8b9443..000d545b9 100644 --- a/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind.erl +++ b/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind.erl @@ -1,5 +1,17 @@ %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_authn_ldap_bind). diff --git a/apps/emqx_auth_ldap/src/emqx_authz_ldap.erl b/apps/emqx_auth_ldap/src/emqx_authz_ldap.erl index eb12fdd37..84f9d7ed6 100644 --- a/apps/emqx_auth_ldap/src/emqx_authz_ldap.erl +++ b/apps/emqx_auth_ldap/src/emqx_authz_ldap.erl @@ -1,5 +1,5 @@ %%-------------------------------------------------------------------- -%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. %% %% Licensed under the Apache License, Version 2.0 (the "License"); %% you may not use this file except in compliance with the License. @@ -13,6 +13,18 @@ %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- -module(emqx_authz_ldap). diff --git a/apps/emqx_auth_ldap/test/emqx_authn_ldap_SUITE.erl b/apps/emqx_auth_ldap/test/emqx_authn_ldap_SUITE.erl index e75a9a617..63bceee85 100644 --- a/apps/emqx_auth_ldap/test/emqx_authn_ldap_SUITE.erl +++ b/apps/emqx_auth_ldap/test/emqx_authn_ldap_SUITE.erl @@ -1,5 +1,17 @@ %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_authn_ldap_SUITE). diff --git a/apps/emqx_auth_ldap/test/emqx_authn_ldap_bind_SUITE.erl b/apps/emqx_auth_ldap/test/emqx_authn_ldap_bind_SUITE.erl index a796b8e01..1f390264b 100644 --- a/apps/emqx_auth_ldap/test/emqx_authn_ldap_bind_SUITE.erl +++ b/apps/emqx_auth_ldap/test/emqx_authn_ldap_bind_SUITE.erl @@ -1,5 +1,17 @@ %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_authn_ldap_bind_SUITE). diff --git a/apps/emqx_auth_ldap/test/emqx_authz_ldap_SUITE.erl b/apps/emqx_auth_ldap/test/emqx_authz_ldap_SUITE.erl index 569c0e887..210bb1bc9 100644 --- a/apps/emqx_auth_ldap/test/emqx_authz_ldap_SUITE.erl +++ b/apps/emqx_auth_ldap/test/emqx_authz_ldap_SUITE.erl @@ -1,5 +1,17 @@ %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_authz_ldap_SUITE). diff --git a/apps/emqx_authn/src/emqx_authn_enterprise.erl b/apps/emqx_authn/src/emqx_authn_enterprise.erl new file mode 100644 index 000000000..733c7ca00 --- /dev/null +++ b/apps/emqx_authn/src/emqx_authn_enterprise.erl @@ -0,0 +1,26 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_authn_enterprise). + +-export([providers/0, resource_provider/0]). + +-if(?EMQX_RELEASE_EDITION == ee). + +providers() -> + [ + {gcp_device, emqx_gcp_device_authn} + ]. + +resource_provider() -> + []. + +-else. + +providers() -> + []. + +resource_provider() -> + []. +-endif. diff --git a/apps/emqx_authz/src/emqx_authz.app.src b/apps/emqx_authz/src/emqx_authz.app.src new file mode 100644 index 000000000..67c0e54f1 --- /dev/null +++ b/apps/emqx_authz/src/emqx_authz.app.src @@ -0,0 +1,23 @@ +%% -*- mode: erlang -*- +{application, emqx_authz, [ + {description, "emqx authorization application"}, + {vsn, "0.1.3"}, + {registered, []}, + {mod, {emqx_authz_app, []}}, + {applications, [ + kernel, + stdlib, + crypto, + emqx_resource, + emqx_connector, + emqx_mongodb, + emqx_redis, + emqx_mysql, + emqx_bridge_http + ]}, + {env, []}, + {modules, []}, + + {licenses, ["Apache 2.0"]}, + {links, []} +]}. diff --git a/apps/emqx_authz/src/emqx_authz_enterprise.erl b/apps/emqx_authz/src/emqx_authz_enterprise.erl new file mode 100644 index 000000000..6f1451108 --- /dev/null +++ b/apps/emqx_authz/src/emqx_authz_enterprise.erl @@ -0,0 +1,60 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_authz_enterprise). + +-export([ + type_names/0, + fields/1, + is_enterprise_module/1, + authz_sources_types/0, + type/1, + desc/1 +]). + +-dialyzer({nowarn_function, [fields/1, type/1, desc/1]}). + +-if(?EMQX_RELEASE_EDITION == ee). + +%% type name set +type_names() -> + []. + +%% type -> type schema +fields(Any) -> + error({invalid_field, Any}). + +%% type -> type module +is_enterprise_module(_) -> + false. + +%% api sources set +authz_sources_types() -> + []. + +%% atom-able name -> type +type(Unknown) -> throw({unknown_authz_source_type, Unknown}). + +desc(_) -> + undefined. + +-else. + +type_names() -> + []. + +fields(Any) -> + error({invalid_field, Any}). + +is_enterprise_module(_) -> + false. + +authz_sources_types() -> + []. + +%% should never happen if the input is type-checked by hocon schema +type(Unknown) -> throw({unknown_authz_source_type, Unknown}). + +desc(_) -> + undefined. +-endif. diff --git a/apps/emqx_ldap/BSL.txt b/apps/emqx_ldap/BSL.txt deleted file mode 100644 index 0acc0e696..000000000 --- a/apps/emqx_ldap/BSL.txt +++ /dev/null @@ -1,94 +0,0 @@ -Business Source License 1.1 - -Licensor: Hangzhou EMQ Technologies Co., Ltd. -Licensed Work: EMQX Enterprise Edition - The Licensed Work is (c) 2023 - Hangzhou EMQ Technologies Co., Ltd. -Additional Use Grant: Students and educators are granted right to copy, - modify, and create derivative work for research - or education. -Change Date: 2027-02-01 -Change License: Apache License, Version 2.0 - -For information about alternative licensing arrangements for the Software, -please contact Licensor: https://www.emqx.com/en/contact - -Notice - -The Business Source License (this document, or the “License”) is not an Open -Source license. However, the Licensed Work will eventually be made available -under an Open Source License, as stated in this License. - -License text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved. -“Business Source License” is a trademark of MariaDB Corporation Ab. - ------------------------------------------------------------------------------ - -Business Source License 1.1 - -Terms - -The Licensor hereby grants you the right to copy, modify, create derivative -works, redistribute, and make non-production use of the Licensed Work. The -Licensor may make an Additional Use Grant, above, permitting limited -production use. - -Effective on the Change Date, or the fourth anniversary of the first publicly -available distribution of a specific version of the Licensed Work under this -License, whichever comes first, the Licensor hereby grants you rights under -the terms of the Change License, and the rights granted in the paragraph -above terminate. - -If your use of the Licensed Work does not comply with the requirements -currently in effect as described in this License, you must purchase a -commercial license from the Licensor, its affiliated entities, or authorized -resellers, or you must refrain from using the Licensed Work. - -All copies of the original and modified Licensed Work, and derivative works -of the Licensed Work, are subject to this License. This License applies -separately for each version of the Licensed Work and the Change Date may vary -for each version of the Licensed Work released by Licensor. - -You must conspicuously display this License on each original or modified copy -of the Licensed Work. If you receive the Licensed Work in original or -modified form from a third party, the terms and conditions set forth in this -License apply to your use of that work. - -Any use of the Licensed Work in violation of this License will automatically -terminate your rights under this License for the current and all other -versions of the Licensed Work. - -This License does not grant you any right in any trademark or logo of -Licensor or its affiliates (provided that you may use a trademark or logo of -Licensor as expressly required by this License). - -TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON -AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, -EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND -TITLE. - -MariaDB hereby grants you permission to use this License’s text to license -your works, and to refer to it using the trademark “Business Source License”, -as long as you comply with the Covenants of Licensor below. - -Covenants of Licensor - -In consideration of the right to use this License’s text and the “Business -Source License” name and trademark, Licensor covenants to MariaDB, and to all -other recipients of the licensed work to be provided by Licensor: - -1. To specify as the Change License the GPL Version 2.0 or any later version, - or a license that is compatible with GPL Version 2.0 or a later version, - where “compatible” means that software provided under the Change License can - be included in a program with software provided under GPL Version 2.0 or a - later version. Licensor may specify additional Change Licenses without - limitation. - -2. To either: (a) specify an additional grant of rights to use that does not - impose any additional restriction on the right granted in this License, as - the Additional Use Grant; or (b) insert the text “None”. - -3. To specify a Change Date. - -4. Not to modify this License in any other way. diff --git a/apps/emqx_ldap/src/emqx_ldap.erl b/apps/emqx_ldap/src/emqx_ldap.erl index 94b8992e0..a77a8ecf0 100644 --- a/apps/emqx_ldap/src/emqx_ldap.erl +++ b/apps/emqx_ldap/src/emqx_ldap.erl @@ -1,5 +1,17 @@ %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_ldap). diff --git a/apps/emqx_ldap/src/emqx_ldap_bind_worker.erl b/apps/emqx_ldap/src/emqx_ldap_bind_worker.erl index 1b1bd3ce9..722e79006 100644 --- a/apps/emqx_ldap/src/emqx_ldap_bind_worker.erl +++ b/apps/emqx_ldap/src/emqx_ldap_bind_worker.erl @@ -1,5 +1,17 @@ %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_ldap_bind_worker). diff --git a/apps/emqx_ldap/src/emqx_ldap_filter_lexer.xrl b/apps/emqx_ldap/src/emqx_ldap_filter_lexer.xrl index 3b4851fc4..9e5c772ab 100644 --- a/apps/emqx_ldap/src/emqx_ldap_filter_lexer.xrl +++ b/apps/emqx_ldap/src/emqx_ldap_filter_lexer.xrl @@ -29,6 +29,18 @@ Erlang code. %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- %% eldap does not support neither the '\28value\29' nor '\(value\)' %% so after the tokenization we should remove all escape character diff --git a/apps/emqx_ldap/src/emqx_ldap_filter_parser.yrl b/apps/emqx_ldap/src/emqx_ldap_filter_parser.yrl index a400132f8..b12ba846e 100644 --- a/apps/emqx_ldap/src/emqx_ldap_filter_parser.yrl +++ b/apps/emqx_ldap/src/emqx_ldap_filter_parser.yrl @@ -1,5 +1,17 @@ Header "%%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the \"License\"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an \"AS IS\" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%--------------------------------------------------------------------". Nonterminals diff --git a/apps/emqx_ldap/test/emqx_ldap_SUITE.erl b/apps/emqx_ldap/test/emqx_ldap_SUITE.erl index 79c549c22..e14e0feab 100644 --- a/apps/emqx_ldap/test/emqx_ldap_SUITE.erl +++ b/apps/emqx_ldap/test/emqx_ldap_SUITE.erl @@ -1,5 +1,17 @@ %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_ldap_SUITE). diff --git a/apps/emqx_ldap/test/emqx_ldap_filter_SUITE.erl b/apps/emqx_ldap/test/emqx_ldap_filter_SUITE.erl index e1aacef88..8c08b518c 100644 --- a/apps/emqx_ldap/test/emqx_ldap_filter_SUITE.erl +++ b/apps/emqx_ldap/test/emqx_ldap_filter_SUITE.erl @@ -1,5 +1,17 @@ %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_ldap_filter_SUITE). diff --git a/apps/emqx_machine/priv/reboot_lists.eterm b/apps/emqx_machine/priv/reboot_lists.eterm index 9be3e2f0c..43420d3cf 100644 --- a/apps/emqx_machine/priv/reboot_lists.eterm +++ b/apps/emqx_machine/priv/reboot_lists.eterm @@ -119,8 +119,6 @@ emqx_eviction_agent, emqx_node_rebalance, emqx_ft, - emqx_ldap, - emqx_auth_ldap, emqx_gcp_device, emqx_dashboard_rbac, emqx_dashboard_sso diff --git a/mix.exs b/mix.exs index 07f6b0209..9dfe0e160 100644 --- a/mix.exs +++ b/mix.exs @@ -225,8 +225,6 @@ defmodule EMQXUmbrella.MixProject do :emqx_enterprise, :emqx_bridge_kinesis, :emqx_bridge_azure_event_hub, - :emqx_ldap, - :emqx_auth_ldap, :emqx_gcp_device, :emqx_dashboard_rbac, :emqx_dashboard_sso diff --git a/rebar.config.erl b/rebar.config.erl index 5dad62af6..1c2ba4465 100644 --- a/rebar.config.erl +++ b/rebar.config.erl @@ -107,8 +107,6 @@ is_community_umbrella_app("apps/emqx_schema_registry") -> false; is_community_umbrella_app("apps/emqx_enterprise") -> false; is_community_umbrella_app("apps/emqx_bridge_kinesis") -> false; is_community_umbrella_app("apps/emqx_bridge_azure_event_hub") -> false; -is_community_umbrella_app("apps/emqx_ldap") -> false; -is_community_umbrella_app("apps/emqx_auth_ldap") -> false; is_community_umbrella_app("apps/emqx_gcp_device") -> false; is_community_umbrella_app("apps/emqx_dashboard_rbac") -> false; is_community_umbrella_app("apps/emqx_dashboard_sso") -> false; From 3e658b3da91f4e0bc2f6db4d02805deae0cde27a Mon Sep 17 00:00:00 2001 From: firest Date: Sat, 7 Oct 2023 15:15:46 +0800 Subject: [PATCH 019/155] chore: update changes --- apps/emqx_authn/src/emqx_authn_enterprise.erl | 26 -------- apps/emqx_authz/src/emqx_authz.app.src | 23 ------- apps/emqx_authz/src/emqx_authz_enterprise.erl | 60 ------------------- apps/emqx_machine/priv/reboot_lists.eterm | 2 + changes/ce/feat-11725.en.md | 1 + 5 files changed, 3 insertions(+), 109 deletions(-) delete mode 100644 apps/emqx_authn/src/emqx_authn_enterprise.erl delete mode 100644 apps/emqx_authz/src/emqx_authz.app.src delete mode 100644 apps/emqx_authz/src/emqx_authz_enterprise.erl create mode 100644 changes/ce/feat-11725.en.md diff --git a/apps/emqx_authn/src/emqx_authn_enterprise.erl b/apps/emqx_authn/src/emqx_authn_enterprise.erl deleted file mode 100644 index 733c7ca00..000000000 --- a/apps/emqx_authn/src/emqx_authn_enterprise.erl +++ /dev/null @@ -1,26 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%%-------------------------------------------------------------------- - --module(emqx_authn_enterprise). - --export([providers/0, resource_provider/0]). - --if(?EMQX_RELEASE_EDITION == ee). - -providers() -> - [ - {gcp_device, emqx_gcp_device_authn} - ]. - -resource_provider() -> - []. - --else. - -providers() -> - []. - -resource_provider() -> - []. --endif. diff --git a/apps/emqx_authz/src/emqx_authz.app.src b/apps/emqx_authz/src/emqx_authz.app.src deleted file mode 100644 index 67c0e54f1..000000000 --- a/apps/emqx_authz/src/emqx_authz.app.src +++ /dev/null @@ -1,23 +0,0 @@ -%% -*- mode: erlang -*- -{application, emqx_authz, [ - {description, "emqx authorization application"}, - {vsn, "0.1.3"}, - {registered, []}, - {mod, {emqx_authz_app, []}}, - {applications, [ - kernel, - stdlib, - crypto, - emqx_resource, - emqx_connector, - emqx_mongodb, - emqx_redis, - emqx_mysql, - emqx_bridge_http - ]}, - {env, []}, - {modules, []}, - - {licenses, ["Apache 2.0"]}, - {links, []} -]}. diff --git a/apps/emqx_authz/src/emqx_authz_enterprise.erl b/apps/emqx_authz/src/emqx_authz_enterprise.erl deleted file mode 100644 index 6f1451108..000000000 --- a/apps/emqx_authz/src/emqx_authz_enterprise.erl +++ /dev/null @@ -1,60 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%%-------------------------------------------------------------------- --module(emqx_authz_enterprise). - --export([ - type_names/0, - fields/1, - is_enterprise_module/1, - authz_sources_types/0, - type/1, - desc/1 -]). - --dialyzer({nowarn_function, [fields/1, type/1, desc/1]}). - --if(?EMQX_RELEASE_EDITION == ee). - -%% type name set -type_names() -> - []. - -%% type -> type schema -fields(Any) -> - error({invalid_field, Any}). - -%% type -> type module -is_enterprise_module(_) -> - false. - -%% api sources set -authz_sources_types() -> - []. - -%% atom-able name -> type -type(Unknown) -> throw({unknown_authz_source_type, Unknown}). - -desc(_) -> - undefined. - --else. - -type_names() -> - []. - -fields(Any) -> - error({invalid_field, Any}). - -is_enterprise_module(_) -> - false. - -authz_sources_types() -> - []. - -%% should never happen if the input is type-checked by hocon schema -type(Unknown) -> throw({unknown_authz_source_type, Unknown}). - -desc(_) -> - undefined. --endif. diff --git a/apps/emqx_machine/priv/reboot_lists.eterm b/apps/emqx_machine/priv/reboot_lists.eterm index 43420d3cf..768424db6 100644 --- a/apps/emqx_machine/priv/reboot_lists.eterm +++ b/apps/emqx_machine/priv/reboot_lists.eterm @@ -56,6 +56,8 @@ emqx_auth_mysql, emqx_auth_postgresql, emqx_auth_redis, + emqx_ldap, + emqx_auth_ldap, emqx_auto_subscribe, emqx_gateway, emqx_gateway_stomp, diff --git a/changes/ce/feat-11725.en.md b/changes/ce/feat-11725.en.md new file mode 100644 index 000000000..ce5b08169 --- /dev/null +++ b/changes/ce/feat-11725.en.md @@ -0,0 +1 @@ +Introduced the LDAP as a new authentication and authorization backend. From 4ecd5e17a2e715a742d22b26152fadef4935f325 Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Mon, 24 Jul 2023 18:58:34 +0300 Subject: [PATCH 020/155] chore(authz): trace non-resultative authz calls to backend modules --- apps/emqx_auth/src/emqx_authz/emqx_authz.erl | 26 +++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz.erl index 30210ff72..0ec300406 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz.erl @@ -498,7 +498,10 @@ do_authorize(_Client, _PubSub, _Topic, []) -> do_authorize(Client, PubSub, Topic, [#{enable := false} | Rest]) -> do_authorize(Client, PubSub, Topic, Rest); do_authorize( - Client, + #{ + username := Username, + peerhost := IpAddress + } = Client, PubSub, Topic, [Connector = #{type := Type} | Tail] @@ -508,11 +511,32 @@ do_authorize( try Module:authorize(Client, PubSub, Topic, Connector) of nomatch -> emqx_metrics_worker:inc(authz_metrics, Type, nomatch), + ?TRACE("AUTHZ", "authorization_module_nomatch", #{ + module => Module, + username => Username, + ipaddr => IpAddress, + topic => Topic, + pub_sub => PubSub + }), do_authorize(Client, PubSub, Topic, Tail); %% {matched, allow | deny | ignore} {matched, ignore} -> + ?TRACE("AUTHZ", "authorization_module_match_ignore", #{ + module => Module, + username => Username, + ipaddr => IpAddress, + topic => Topic, + pub_sub => PubSub + }), do_authorize(Client, PubSub, Topic, Tail); ignore -> + ?TRACE("AUTHZ", "authorization_module_ignore", #{ + module => Module, + username => Username, + ipaddr => IpAddress, + topic => Topic, + pub_sub => PubSub + }), do_authorize(Client, PubSub, Topic, Tail); %% {matched, allow | deny} Matched -> From 419b80d11e7f449a9168fd1490bac7670d8efc2f Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Thu, 12 Oct 2023 01:26:35 +0700 Subject: [PATCH 021/155] chore(ci): try to reduce CI artifacts footprint To speed up uploads and downloads in dependent jobs. --- .github/workflows/.zipignore | 3 +++ .github/workflows/_pr_entrypoint.yaml | 5 ++--- .github/workflows/_push-entrypoint.yaml | 2 +- .github/workflows/run_conf_tests.yaml | 1 + 4 files changed, 7 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/.zipignore diff --git a/.github/workflows/.zipignore b/.github/workflows/.zipignore new file mode 100644 index 000000000..d72c7ba23 --- /dev/null +++ b/.github/workflows/.zipignore @@ -0,0 +1,3 @@ +.git/* +*/.git/* +*/.github/* diff --git a/.github/workflows/_pr_entrypoint.yaml b/.github/workflows/_pr_entrypoint.yaml index f2688d8d1..8fc19c1aa 100644 --- a/.github/workflows/_pr_entrypoint.yaml +++ b/.github/workflows/_pr_entrypoint.yaml @@ -138,9 +138,8 @@ jobs: ENABLE_COVER_COMPILE: 1 run: | make ensure-rebar3 - make ${PROFILE} - make test-compile - zip -ryq $PROFILE.zip . + make ${PROFILE}-compile test-compile + zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip . - uses: actions/upload-artifact@v3 with: name: ${{ matrix.profile }} diff --git a/.github/workflows/_push-entrypoint.yaml b/.github/workflows/_push-entrypoint.yaml index a8a01fb45..19c25fa2d 100644 --- a/.github/workflows/_push-entrypoint.yaml +++ b/.github/workflows/_push-entrypoint.yaml @@ -149,7 +149,7 @@ jobs: ENABLE_COVER_COMPILE: 1 run: | make $PROFILE - zip -ryq $PROFILE.zip . + zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip . - uses: actions/upload-artifact@v3 with: name: ${{ matrix.profile }} diff --git a/.github/workflows/run_conf_tests.yaml b/.github/workflows/run_conf_tests.yaml index a69746d76..813aa663b 100644 --- a/.github/workflows/run_conf_tests.yaml +++ b/.github/workflows/run_conf_tests.yaml @@ -34,6 +34,7 @@ jobs: run: | unzip -o -q ${{ matrix.profile }}.zip git config --global --add safe.directory "$GITHUB_WORKSPACE" + - run: make ${{ matrix.profile }} - run: ./scripts/test/check-example-configs.sh - run: ./scripts/conf-test/run.sh - name: print erlang log From fdd9d77d41866a55b510a7f60c8b875be4e7f36b Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Thu, 12 Oct 2023 17:37:57 +0700 Subject: [PATCH 022/155] chore(ci): simplify `check-i18n-style` + stop relying on git --- scripts/check-i18n-style.escript | 3 +-- scripts/check-i18n-style.sh | 4 +--- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/scripts/check-i18n-style.escript b/scripts/check-i18n-style.escript index f48e5a513..e7e0ea42e 100755 --- a/scripts/check-i18n-style.escript +++ b/scripts/check-i18n-style.escript @@ -8,10 +8,9 @@ -define(RED, "\e[31m"). -define(RESET, "\e[39m"). -main([Files0]) -> +main(Files) -> io:format(user, "checking i18n file styles~n", []), _ = put(errors, 0), - Files = string:tokens(Files0, "\n"), ok = load_hocon(), ok = lists:foreach(fun check/1, Files), case get(errors) of diff --git a/scripts/check-i18n-style.sh b/scripts/check-i18n-style.sh index d21f43a72..b7d4d2113 100755 --- a/scripts/check-i18n-style.sh +++ b/scripts/check-i18n-style.sh @@ -3,6 +3,4 @@ set -euo pipefail cd -P -- "$(dirname -- "$0")/.." -all_files="$(git ls-files 'rel/i18n/*.hocon')" - -./scripts/check-i18n-style.escript "$all_files" +./scripts/check-i18n-style.escript rel/i18n/*.hocon From 397686fd183dc892a4163c7f51e1efcb4f426532 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Thu, 12 Oct 2023 17:54:56 +0700 Subject: [PATCH 023/155] fix(ci-conf): stop requiring git index in conf tests --- .github/workflows/_pr_entrypoint.yaml | 2 ++ .github/workflows/run_conf_tests.yaml | 7 +++++++ 2 files changed, 9 insertions(+) diff --git a/.github/workflows/_pr_entrypoint.yaml b/.github/workflows/_pr_entrypoint.yaml index 8fc19c1aa..dd2edfaa9 100644 --- a/.github/workflows/_pr_entrypoint.yaml +++ b/.github/workflows/_pr_entrypoint.yaml @@ -217,6 +217,8 @@ jobs: with: runner_labels: ${{ needs.sanity-checks.outputs.runner_labels }} builder: ${{ needs.sanity-checks.outputs.builder }} + version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }} + version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }} check_deps_integrity: needs: diff --git a/.github/workflows/run_conf_tests.yaml b/.github/workflows/run_conf_tests.yaml index 813aa663b..c91291836 100644 --- a/.github/workflows/run_conf_tests.yaml +++ b/.github/workflows/run_conf_tests.yaml @@ -13,6 +13,12 @@ on: builder: required: true type: string + version-emqx: + required: false + type: string + version-emqx-enterprise: + required: false + type: string jobs: run_conf_tests: @@ -20,6 +26,7 @@ jobs: container: ${{ inputs.builder }} env: PROFILE: ${{ matrix.profile }} + PKG_VSN: ${{ startsWith(matrix.profile, 'emqx-enterprise') && inputs.version-emqx-enterprise || inputs.version-emqx }} strategy: fail-fast: false matrix: From d0dac25644b4f52353b592254292566807a2e50c Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Fri, 13 Oct 2023 00:10:46 +0700 Subject: [PATCH 024/155] chore(ci): stop depending on git in `check-example-configs.sh` --- scripts/test/check-example-configs.sh | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/test/check-example-configs.sh b/scripts/test/check-example-configs.sh index f71fb15eb..cffea24ce 100755 --- a/scripts/test/check-example-configs.sh +++ b/scripts/test/check-example-configs.sh @@ -1,7 +1,8 @@ #!/usr/bin/env bash set -euo pipefail -PROJ_DIR="$(git rev-parse --show-toplevel)" + +cd -P -- "$(dirname -- "$0")/../.." PROFILE="${PROFILE:-emqx}" DIR_NAME='examples' @@ -11,7 +12,7 @@ if [ "${PROFILE}" = 'emqx-enterprise' ]; then SCHEMA_MOD='emqx_enterprise_schema' fi -IFS=$'\n' read -r -d '' -a FILES < <(find "${PROJ_DIR}/rel/config/${DIR_NAME}" -name "*.example" 2>/dev/null | sort && printf '\0') +IFS=$'\n' read -r -d '' -a FILES < <(find "rel/config/${DIR_NAME}" -name "*.example" 2>/dev/null | sort && printf '\0') prepare_erl_libs() { local libs_dir="$1" @@ -30,7 +31,7 @@ prepare_erl_libs() { } # This is needed when checking schema -export EMQX_ETC_DIR="${PROJ_DIR}/apps/emqx/etc" +export EMQX_ETC_DIR="apps/emqx/etc" prepare_erl_libs "_build/$PROFILE/lib" From 08b2b36b87f2bcf8bb570080d6bbb712d906254b Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Fri, 13 Oct 2023 00:40:25 +0700 Subject: [PATCH 025/155] feat(ci): push some envs through artifact files in workflows So that we could optimize away the need for the full git history. --- .github/workflows/_pr_entrypoint.yaml | 4 ++-- .github/workflows/_push-entrypoint.yaml | 2 ++ .github/workflows/run_conf_tests.yaml | 10 +--------- .github/workflows/static_checks.yaml | 3 +-- 4 files changed, 6 insertions(+), 13 deletions(-) diff --git a/.github/workflows/_pr_entrypoint.yaml b/.github/workflows/_pr_entrypoint.yaml index dd2edfaa9..930182c7e 100644 --- a/.github/workflows/_pr_entrypoint.yaml +++ b/.github/workflows/_pr_entrypoint.yaml @@ -139,6 +139,8 @@ jobs: run: | make ensure-rebar3 make ${PROFILE}-compile test-compile + echo "PROFILE=${PROFILE}" | tee -a .env + echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip . - uses: actions/upload-artifact@v3 with: @@ -217,8 +219,6 @@ jobs: with: runner_labels: ${{ needs.sanity-checks.outputs.runner_labels }} builder: ${{ needs.sanity-checks.outputs.builder }} - version-emqx: ${{ needs.sanity-checks.outputs.version-emqx }} - version-emqx-enterprise: ${{ needs.sanity-checks.outputs.version-emqx-enterprise }} check_deps_integrity: needs: diff --git a/.github/workflows/_push-entrypoint.yaml b/.github/workflows/_push-entrypoint.yaml index 19c25fa2d..b2d7e2c4a 100644 --- a/.github/workflows/_push-entrypoint.yaml +++ b/.github/workflows/_push-entrypoint.yaml @@ -149,6 +149,8 @@ jobs: ENABLE_COVER_COMPILE: 1 run: | make $PROFILE + echo "PROFILE=${PROFILE}" | tee -a .env + echo "PKG_VSN=$(./pkg-vsn.sh ${PROFILE})" | tee -a .env zip -ryq -x@.github/workflows/.zipignore $PROFILE.zip . - uses: actions/upload-artifact@v3 with: diff --git a/.github/workflows/run_conf_tests.yaml b/.github/workflows/run_conf_tests.yaml index c91291836..788c9c9fa 100644 --- a/.github/workflows/run_conf_tests.yaml +++ b/.github/workflows/run_conf_tests.yaml @@ -13,20 +13,11 @@ on: builder: required: true type: string - version-emqx: - required: false - type: string - version-emqx-enterprise: - required: false - type: string jobs: run_conf_tests: runs-on: ${{ fromJSON(inputs.runner_labels) }} container: ${{ inputs.builder }} - env: - PROFILE: ${{ matrix.profile }} - PKG_VSN: ${{ startsWith(matrix.profile, 'emqx-enterprise') && inputs.version-emqx-enterprise || inputs.version-emqx }} strategy: fail-fast: false matrix: @@ -41,6 +32,7 @@ jobs: run: | unzip -o -q ${{ matrix.profile }}.zip git config --global --add safe.directory "$GITHUB_WORKSPACE" + - run: cat .env | tee -a $GITHUB_ENV - run: make ${{ matrix.profile }} - run: ./scripts/test/check-example-configs.sh - run: ./scripts/conf-test/run.sh diff --git a/.github/workflows/static_checks.yaml b/.github/workflows/static_checks.yaml index 758c3712b..6cf89275c 100644 --- a/.github/workflows/static_checks.yaml +++ b/.github/workflows/static_checks.yaml @@ -43,7 +43,6 @@ jobs: key: rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}-${{ hashFiles('rebar.*', 'apps/*/rebar.*', 'lib-ee/*/rebar.*') }} restore-keys: | rebar3-dialyzer-plt-${{ matrix.profile }}-${{ matrix.otp }}- + - run: cat .env | tee -a $GITHUB_ENV - name: run static checks - env: - PROFILE: ${{ matrix.profile }} run: make static_checks From 33ff5d5588bfe535428f1567bd488d4a95e7e76f Mon Sep 17 00:00:00 2001 From: firest Date: Fri, 13 Oct 2023 09:33:56 +0800 Subject: [PATCH 026/155] chore: update auth header file --- apps/emqx_conf/include/emqx_conf.hrl | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/apps/emqx_conf/include/emqx_conf.hrl b/apps/emqx_conf/include/emqx_conf.hrl index 4ae2b1df9..a758681ff 100644 --- a/apps/emqx_conf/include/emqx_conf.hrl +++ b/apps/emqx_conf/include/emqx_conf.hrl @@ -43,13 +43,12 @@ emqx_authz_redis_schema, emqx_authz_mysql_schema, emqx_authz_postgresql_schema, - emqx_authz_mongodb_schema -]). - --define(EE_AUTHZ_SOURCE_SCHEMA_MODS, [ + emqx_authz_mongodb_schema, emqx_authz_ldap_schema ]). +-define(EE_AUTHZ_SOURCE_SCHEMA_MODS, []). + -define(CE_AUTHN_PROVIDER_SCHEMA_MODS, [ emqx_authn_mnesia_schema, emqx_authn_mysql_schema, @@ -58,12 +57,12 @@ emqx_authn_redis_schema, emqx_authn_http_schema, emqx_authn_jwt_schema, - emqx_authn_scram_mnesia_schema + emqx_authn_scram_mnesia_schema, + emqx_authn_ldap_schema, + emqx_authn_ldap_bind_schema ]). -define(EE_AUTHN_PROVIDER_SCHEMA_MODS, [ - emqx_authn_ldap_schema, - emqx_authn_ldap_bind_schema, emqx_gcp_device_authn_schema ]). From 0b9ac24c1e69ebe74ab6483ea431a4d60f151a2b Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Fri, 13 Oct 2023 15:40:49 +0700 Subject: [PATCH 027/155] fix(bpapicheck): stop depending on git index --- apps/emqx/test/emqx_bpapi_static_checks.erl | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/apps/emqx/test/emqx_bpapi_static_checks.erl b/apps/emqx/test/emqx_bpapi_static_checks.erl index 6766912c0..0be5895f6 100644 --- a/apps/emqx/test/emqx_bpapi_static_checks.erl +++ b/apps/emqx/test/emqx_bpapi_static_checks.erl @@ -411,10 +411,19 @@ setnok() -> put(bpapi_ok, false). dumps_dir() -> - filename:join(project_root_dir(), "apps/emqx/test/emqx_static_checks_data"). - -project_root_dir() -> - string:trim(os:cmd("git rev-parse --show-toplevel")). + filename:join(emqx_app_dir(), "test/emqx_static_checks_data"). versions_file() -> - filename:join(project_root_dir(), "apps/emqx/priv/bpapi.versions"). + filename:join(emqx_app_dir(), "priv/bpapi.versions"). + +emqx_app_dir() -> + Info = ?MODULE:module_info(compile), + case proplists:get_value(source, Info) of + Source when is_list(Source) -> + filename:dirname(filename:dirname(Source)); + undefined -> + "apps/emqx" + end. + +project_root_dir() -> + filename:dirname(filename:dirname(emqx_app_dir())). From 083e2da3478f1dd4549a851a33ed7f4d5088ab7f Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Fri, 13 Oct 2023 15:42:08 +0700 Subject: [PATCH 028/155] chore(bpapicheck): make some failures more user-friendly --- apps/emqx/test/emqx_bpapi_static_checks.erl | 25 ++++++++++++++------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/apps/emqx/test/emqx_bpapi_static_checks.erl b/apps/emqx/test/emqx_bpapi_static_checks.erl index 0be5895f6..657776317 100644 --- a/apps/emqx/test/emqx_bpapi_static_checks.erl +++ b/apps/emqx/test/emqx_bpapi_static_checks.erl @@ -244,19 +244,28 @@ get_param_types(Signatures, {M, F, A}) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% dump() -> - case - { - filelib:wildcard(project_root_dir() ++ "/*_plt"), - filelib:wildcard(project_root_dir() ++ "/_build/check/lib") - } - of + RootDir = project_root_dir(), + TryRelDir = RootDir ++ "/_build/check/lib", + case {filelib:wildcard(RootDir ++ "/*_plt"), filelib:wildcard(TryRelDir)} of {[PLT | _], [RelDir | _]} -> dump(#{ plt => PLT, reldir => RelDir }); - _ -> - error("failed to guess run options") + {[], _} -> + logger:error( + "No usable PLT files found in \"~s\", abort ~n" + "Try running `rebar3 as check dialyzer` at least once first", + [RootDir] + ), + error(run_failed); + {_, []} -> + logger:error( + "No built applications found in \"~s\", abort ~n" + "Try running `rebar3 as check compile` at least once first", + [TryRelDir] + ), + error(run_failed) end. %% Collect the local BPAPI modules to a dump file From 4f14e8df22c6398ea7f222e2987255438f85bae3 Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Fri, 13 Oct 2023 13:31:18 +0300 Subject: [PATCH 029/155] chore(source dir): split out postgresql connector --- .../src/emqx_auth_postgresql.app.src | 2 +- .../src/emqx_authn_postgresql.erl | 4 ++-- .../src/emqx_authn_postgresql_schema.erl | 2 +- .../src/emqx_authz_postgresql.erl | 6 +++--- .../src/emqx_authz_postgresql_schema.erl | 2 +- .../test/emqx_authn_postgresql_SUITE.erl | 4 ++-- .../test/emqx_authn_postgresql_tls_SUITE.erl | 2 +- .../test/emqx_authz_postgresql_SUITE.erl | 4 ++-- apps/emqx_bridge/src/emqx_bridge.app.src | 2 +- .../src/schema/emqx_bridge_enterprise.erl | 6 +++--- apps/emqx_bridge_pgsql/rebar.config | 3 ++- .../src/emqx_bridge_pgsql.app.src | 5 +++-- .../emqx_bridge_pgsql/src/emqx_bridge_pgsql.erl | 2 +- apps/emqx_connector/include/emqx_connector.hrl | 1 - apps/emqx_connector/rebar.config | 3 +-- apps/emqx_connector/src/emqx_connector.app.src | 2 -- apps/emqx_machine/priv/reboot_lists.eterm | 1 + apps/emqx_postgresql/README.md | 14 ++++++++++++++ .../docker-ct | 0 .../emqx_postgresql/include/emqx_postgresql.hrl | 17 +++++++++++++++++ apps/emqx_postgresql/rebar.config | 8 ++++++++ .../emqx_postgresql/src/emqx_postgresql.app.src | 16 ++++++++++++++++ .../src/emqx_postgresql.erl} | 5 +++-- .../test/emqx_postgresql_SUITE.erl} | 9 +++++---- ...nector_pgsql.hocon => emqx_postgresql.hocon} | 2 +- 25 files changed, 89 insertions(+), 33 deletions(-) create mode 100644 apps/emqx_postgresql/README.md rename apps/{emqx_connector => emqx_postgresql}/docker-ct (100%) create mode 100644 apps/emqx_postgresql/include/emqx_postgresql.hrl create mode 100644 apps/emqx_postgresql/rebar.config create mode 100644 apps/emqx_postgresql/src/emqx_postgresql.app.src rename apps/{emqx_connector/src/emqx_connector_pgsql.erl => emqx_postgresql/src/emqx_postgresql.erl} (99%) rename apps/{emqx_connector/test/emqx_connector_pgsql_SUITE.erl => emqx_postgresql/test/emqx_postgresql_SUITE.erl} (96%) rename rel/i18n/{emqx_connector_pgsql.hocon => emqx_postgresql.hocon} (91%) diff --git a/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src b/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src index 3157b7bd7..1d23ccac4 100644 --- a/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src +++ b/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src @@ -9,7 +9,7 @@ stdlib, emqx, emqx_auth, - emqx_connector + emqx_postgresql ]}, {env, []}, {modules, []}, diff --git a/apps/emqx_auth_postgresql/src/emqx_authn_postgresql.erl b/apps/emqx_auth_postgresql/src/emqx_authn_postgresql.erl index 1ce2e405c..ba92d2525 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authn_postgresql.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authn_postgresql.erl @@ -46,14 +46,14 @@ create(Config0) -> {Config, State} = parse_config(Config0, ResourceId), {ok, _Data} = emqx_authn_utils:create_resource( ResourceId, - emqx_connector_pgsql, + emqx_postgresql, Config ), {ok, State#{resource_id => ResourceId}}. update(Config0, #{resource_id := ResourceId} = _State) -> {Config, NState} = parse_config(Config0, ResourceId), - case emqx_authn_utils:update_resource(emqx_connector_pgsql, Config, ResourceId) of + case emqx_authn_utils:update_resource(emqx_postgresql, Config, ResourceId) of {error, Reason} -> error({load_config_error, Reason}); {ok, _} -> diff --git a/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl b/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl index 93819d7bf..6b3b600ee 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl @@ -50,7 +50,7 @@ fields(postgresql) -> {query, fun query/1} ] ++ emqx_authn_schema:common_fields() ++ - proplists:delete(prepare_statement, emqx_connector_pgsql:fields(config)). + proplists:delete(prepare_statement, emqx_postgresql:fields(config)). desc(postgresql) -> ?DESC(postgresql); diff --git a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl index 27f2d31ee..b930f77e4 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl @@ -50,10 +50,10 @@ description() -> create(#{query := SQL0} = Source) -> {SQL, PlaceHolders} = emqx_authz_utils:parse_sql(SQL0, '$n', ?PLACEHOLDERS), - ResourceID = emqx_authz_utils:make_resource_id(emqx_connector_pgsql), + ResourceID = emqx_authz_utils:make_resource_id(emqx_postgresql), {ok, _Data} = emqx_authz_utils:create_resource( ResourceID, - emqx_connector_pgsql, + emqx_postgresql, Source#{prepare_statement => #{ResourceID => SQL}} ), Source#{annotations => #{id => ResourceID, placeholders => PlaceHolders}}. @@ -62,7 +62,7 @@ update(#{query := SQL0, annotations := #{id := ResourceID}} = Source) -> {SQL, PlaceHolders} = emqx_authz_utils:parse_sql(SQL0, '$n', ?PLACEHOLDERS), case emqx_authz_utils:update_resource( - emqx_connector_pgsql, + emqx_postgresql, Source#{prepare_statement => #{ResourceID => SQL}} ) of diff --git a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl index a52cc4fdd..2be7e9387 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl @@ -33,7 +33,7 @@ type() -> ?AUTHZ_TYPE. fields(postgresql) -> emqx_authz_schema:authz_common_fields(?AUTHZ_TYPE) ++ - emqx_connector_pgsql:fields(config) ++ + emqx_postgresql:fields(config) ++ [{query, query()}]. desc(postgresql) -> diff --git a/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl b/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl index 752202610..ea44c0a45 100644 --- a/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl +++ b/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl @@ -19,7 +19,7 @@ -compile(nowarn_export_all). -compile(export_all). --include_lib("emqx_connector/include/emqx_connector.hrl"). +-include_lib("emqx_postgresql/include/emqx_postgresql.hrl"). -include_lib("emqx_auth/include/emqx_authn.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). @@ -64,7 +64,7 @@ init_per_suite(Config) -> {ok, _} = emqx_resource:create_local( ?PGSQL_RESOURCE, ?AUTHN_RESOURCE_GROUP, - emqx_connector_pgsql, + emqx_postgresql, pgsql_config(), #{} ), diff --git a/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_tls_SUITE.erl b/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_tls_SUITE.erl index 25a65f660..ba6cf0604 100644 --- a/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_tls_SUITE.erl +++ b/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_tls_SUITE.erl @@ -19,7 +19,7 @@ -compile(nowarn_export_all). -compile(export_all). --include_lib("emqx_connector/include/emqx_connector.hrl"). +-include_lib("emqx_postgresql/include/emqx_postgresql.hrl"). -include_lib("emqx_auth/include/emqx_authn.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). diff --git a/apps/emqx_auth_postgresql/test/emqx_authz_postgresql_SUITE.erl b/apps/emqx_auth_postgresql/test/emqx_authz_postgresql_SUITE.erl index 4d38e9c96..f486cbd3d 100644 --- a/apps/emqx_auth_postgresql/test/emqx_authz_postgresql_SUITE.erl +++ b/apps/emqx_auth_postgresql/test/emqx_authz_postgresql_SUITE.erl @@ -18,7 +18,7 @@ -compile(nowarn_export_all). -compile(export_all). --include("emqx_connector.hrl"). +-include_lib("emqx_postgresql/include/emqx_postgresql.hrl"). -include_lib("emqx_auth/include/emqx_authz.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("common_test/include/ct.hrl"). @@ -438,7 +438,7 @@ create_pgsql_resource() -> emqx_resource:create_local( ?PGSQL_RESOURCE, ?AUTHZ_RESOURCE_GROUP, - emqx_connector_pgsql, + emqx_postgresql, pgsql_config(), #{} ). diff --git a/apps/emqx_bridge/src/emqx_bridge.app.src b/apps/emqx_bridge/src/emqx_bridge.app.src index ecf0042ca..c2387fe99 100644 --- a/apps/emqx_bridge/src/emqx_bridge.app.src +++ b/apps/emqx_bridge/src/emqx_bridge.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge, [ {description, "EMQX bridges"}, - {vsn, "0.1.28"}, + {vsn, "0.1.29"}, {registered, [emqx_bridge_sup]}, {mod, {emqx_bridge_app, []}}, {applications, [ diff --git a/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl b/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl index a6bd4a754..06a23a45f 100644 --- a/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl +++ b/apps/emqx_bridge/src/schema/emqx_bridge_enterprise.erl @@ -113,9 +113,9 @@ resource_type(influxdb_api_v2) -> emqx_bridge_influxdb_connector; resource_type(redis_single) -> emqx_bridge_redis_connector; resource_type(redis_sentinel) -> emqx_bridge_redis_connector; resource_type(redis_cluster) -> emqx_bridge_redis_connector; -resource_type(pgsql) -> emqx_connector_pgsql; -resource_type(timescale) -> emqx_connector_pgsql; -resource_type(matrix) -> emqx_connector_pgsql; +resource_type(pgsql) -> emqx_postgresql; +resource_type(timescale) -> emqx_postgresql; +resource_type(matrix) -> emqx_postgresql; resource_type(tdengine) -> emqx_bridge_tdengine_connector; resource_type(clickhouse) -> emqx_bridge_clickhouse_connector; resource_type(dynamo) -> emqx_bridge_dynamo_connector; diff --git a/apps/emqx_bridge_pgsql/rebar.config b/apps/emqx_bridge_pgsql/rebar.config index 87c145f26..da2729b70 100644 --- a/apps/emqx_bridge_pgsql/rebar.config +++ b/apps/emqx_bridge_pgsql/rebar.config @@ -3,5 +3,6 @@ {deps, [ {emqx_connector, {path, "../../apps/emqx_connector"}}, {emqx_resource, {path, "../../apps/emqx_resource"}}, - {emqx_bridge, {path, "../../apps/emqx_bridge"}} + {emqx_bridge, {path, "../../apps/emqx_bridge"}}, + {emqx_postgresql, {path, "../../apps/emqx_postgresql"}} ]}. diff --git a/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.app.src b/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.app.src index 85131baf0..7a17652e0 100644 --- a/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.app.src +++ b/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.app.src @@ -1,11 +1,12 @@ {application, emqx_bridge_pgsql, [ {description, "EMQX Enterprise PostgreSQL Bridge"}, - {vsn, "0.1.3"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [ kernel, stdlib, - emqx_resource + emqx_resource, + emqx_postgresql ]}, {env, []}, {modules, []}, diff --git a/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.erl b/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.erl index 12161b9b9..bb15dfad9 100644 --- a/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.erl +++ b/apps/emqx_bridge_pgsql/src/emqx_bridge_pgsql.erl @@ -82,7 +82,7 @@ fields("config") -> #{desc => ?DESC("local_topic"), default => undefined} )} ] ++ emqx_resource_schema:fields("resource_opts") ++ - (emqx_connector_pgsql:fields(config) -- + (emqx_postgresql:fields(config) -- emqx_connector_schema_lib:prepare_statement_fields()); fields("post") -> fields("post", pgsql); diff --git a/apps/emqx_connector/include/emqx_connector.hrl b/apps/emqx_connector/include/emqx_connector.hrl index cdb6ddd92..216dca031 100644 --- a/apps/emqx_connector/include/emqx_connector.hrl +++ b/apps/emqx_connector/include/emqx_connector.hrl @@ -22,7 +22,6 @@ -define(MYSQL_DEFAULT_PORT, 3306). -define(MONGO_DEFAULT_PORT, 27017). -define(REDIS_DEFAULT_PORT, 6379). --define(PGSQL_DEFAULT_PORT, 5432). -define(CLICKHOUSE_DEFAULT_PORT, 8123). -define(AUTO_RECONNECT_INTERVAL, 2). diff --git a/apps/emqx_connector/rebar.config b/apps/emqx_connector/rebar.config index 78515abe6..94da3c580 100644 --- a/apps/emqx_connector/rebar.config +++ b/apps/emqx_connector/rebar.config @@ -8,8 +8,7 @@ {deps, [ {emqx, {path, "../emqx"}}, {emqx_utils, {path, "../emqx_utils"}}, - {emqx_resource, {path, "../emqx_resource"}}, - {epgsql, {git, "https://github.com/emqx/epgsql", {tag, "4.7.0.1"}}} + {emqx_resource, {path, "../emqx_resource"}} ]}. {shell, [ diff --git a/apps/emqx_connector/src/emqx_connector.app.src b/apps/emqx_connector/src/emqx_connector.app.src index 7ecabb0ff..6b462986b 100644 --- a/apps/emqx_connector/src/emqx_connector.app.src +++ b/apps/emqx_connector/src/emqx_connector.app.src @@ -10,8 +10,6 @@ ecpool, emqx_resource, eredis_cluster, - eredis, - epgsql, ehttpc, jose, emqx, diff --git a/apps/emqx_machine/priv/reboot_lists.eterm b/apps/emqx_machine/priv/reboot_lists.eterm index 768424db6..16f901d27 100644 --- a/apps/emqx_machine/priv/reboot_lists.eterm +++ b/apps/emqx_machine/priv/reboot_lists.eterm @@ -80,6 +80,7 @@ emqx_mongodb, emqx_redis, emqx_mysql, + emqx_postgresql, emqx_plugins, emqx_opentelemetry, quicer, diff --git a/apps/emqx_postgresql/README.md b/apps/emqx_postgresql/README.md new file mode 100644 index 000000000..bdc21db72 --- /dev/null +++ b/apps/emqx_postgresql/README.md @@ -0,0 +1,14 @@ +# PostgreSQL Connector + +This application houses the PostgreSQL Database connector. +It provides the APIs to connect to PostgreSQL Databases. + +It is used by the PostgreSQL bridge to insert messages and by the `emqx_auth_postgresql` application to check user permissions. + +## Contributing + +Please see our [contributing.md](../../CONTRIBUTING.md). + +## License + +See [APL](../../APL.txt). diff --git a/apps/emqx_connector/docker-ct b/apps/emqx_postgresql/docker-ct similarity index 100% rename from apps/emqx_connector/docker-ct rename to apps/emqx_postgresql/docker-ct diff --git a/apps/emqx_postgresql/include/emqx_postgresql.hrl b/apps/emqx_postgresql/include/emqx_postgresql.hrl new file mode 100644 index 000000000..3810bd6c2 --- /dev/null +++ b/apps/emqx_postgresql/include/emqx_postgresql.hrl @@ -0,0 +1,17 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2021-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-define(PGSQL_DEFAULT_PORT, 5432). diff --git a/apps/emqx_postgresql/rebar.config b/apps/emqx_postgresql/rebar.config new file mode 100644 index 000000000..1ae1309c9 --- /dev/null +++ b/apps/emqx_postgresql/rebar.config @@ -0,0 +1,8 @@ +%% -*- mode: erlang; -*- + +{erl_opts, [debug_info]}. +{deps, [ + {epgsql, {git, "https://github.com/emqx/epgsql", {tag, "4.7.0.1"}}}, + {emqx_connector, {path, "../../apps/emqx_connector"}}, + {emqx_resource, {path, "../../apps/emqx_resource"}} +]}. diff --git a/apps/emqx_postgresql/src/emqx_postgresql.app.src b/apps/emqx_postgresql/src/emqx_postgresql.app.src new file mode 100644 index 000000000..efe422cd0 --- /dev/null +++ b/apps/emqx_postgresql/src/emqx_postgresql.app.src @@ -0,0 +1,16 @@ +{application, emqx_postgresql, [ + {description, "EMQX PostgreSQL Database Connector"}, + {vsn, "0.1.0"}, + {registered, []}, + {applications, [ + kernel, + stdlib, + epgsql, + emqx_connector, + emqx_resource + ]}, + {env, []}, + {modules, []}, + + {links, []} +]}. diff --git a/apps/emqx_connector/src/emqx_connector_pgsql.erl b/apps/emqx_postgresql/src/emqx_postgresql.erl similarity index 99% rename from apps/emqx_connector/src/emqx_connector_pgsql.erl rename to apps/emqx_postgresql/src/emqx_postgresql.erl index b1e4a9fdb..dc6447536 100644 --- a/apps/emqx_connector/src/emqx_connector_pgsql.erl +++ b/apps/emqx_postgresql/src/emqx_postgresql.erl @@ -13,9 +13,10 @@ %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_connector_pgsql). +-module(emqx_postgresql). --include("emqx_connector.hrl"). +-include("emqx_postgresql.hrl"). +-include_lib("emqx_connector/include/emqx_connector.hrl"). -include_lib("typerefl/include/types.hrl"). -include_lib("emqx/include/logger.hrl"). -include_lib("hocon/include/hoconsc.hrl"). diff --git a/apps/emqx_connector/test/emqx_connector_pgsql_SUITE.erl b/apps/emqx_postgresql/test/emqx_postgresql_SUITE.erl similarity index 96% rename from apps/emqx_connector/test/emqx_connector_pgsql_SUITE.erl rename to apps/emqx_postgresql/test/emqx_postgresql_SUITE.erl index a4ac4f932..5a93a0578 100644 --- a/apps/emqx_connector/test/emqx_connector_pgsql_SUITE.erl +++ b/apps/emqx_postgresql/test/emqx_postgresql_SUITE.erl @@ -13,18 +13,19 @@ % %% limitations under the License. % %%-------------------------------------------------------------------- --module(emqx_connector_pgsql_SUITE). +-module(emqx_postgresql_SUITE). -compile(nowarn_export_all). -compile(export_all). --include("emqx_connector.hrl"). +-include("emqx_connector/include/emqx_connector.hrl"). +-include_lib("emqx_postgresql/include/emqx_postgresql.hrl"). -include_lib("eunit/include/eunit.hrl"). -include_lib("emqx/include/emqx.hrl"). -include_lib("stdlib/include/assert.hrl"). -define(PGSQL_HOST, "pgsql"). --define(PGSQL_RESOURCE_MOD, emqx_connector_pgsql). +-define(PGSQL_RESOURCE_MOD, emqx_postgresql). all() -> emqx_common_test_helpers:all(?MODULE). @@ -60,7 +61,7 @@ end_per_testcase(_, _Config) -> t_lifecycle(_Config) -> perform_lifecycle_check( - <<"emqx_connector_pgsql_SUITE">>, + <<"emqx_postgresql_SUITE">>, pgsql_config() ). diff --git a/rel/i18n/emqx_connector_pgsql.hocon b/rel/i18n/emqx_postgresql.hocon similarity index 91% rename from rel/i18n/emqx_connector_pgsql.hocon rename to rel/i18n/emqx_postgresql.hocon index 485e666a0..c6d2581c1 100644 --- a/rel/i18n/emqx_connector_pgsql.hocon +++ b/rel/i18n/emqx_postgresql.hocon @@ -1,4 +1,4 @@ -emqx_connector_pgsql { +emqx_postgresql { server.desc: """The IPv4 or IPv6 address or the hostname to connect to.
From bde8800f2e6cc2d9f6c38683638443677f13596f Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Fri, 13 Oct 2023 18:12:46 +0300 Subject: [PATCH 030/155] fix(mnesia authz): destroy authz records on mnesia authz destroy --- .../src/emqx_authz_mnesia.erl | 4 ++- .../test/emqx_authz_mnesia_SUITE.erl | 29 +++++++++++++++++++ changes/ce/fix-11762.en.md | 1 + 3 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 changes/ce/fix-11762.en.md diff --git a/apps/emqx_auth_mnesia/src/emqx_authz_mnesia.erl b/apps/emqx_auth_mnesia/src/emqx_authz_mnesia.erl index 401d5a494..7e8e463b3 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authz_mnesia.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authz_mnesia.erl @@ -95,7 +95,9 @@ create(Source) -> Source. update(Source) -> Source. -destroy(_Source) -> ok. +destroy(_Source) -> + {atomic, ok} = mria:clear_table(?ACL_TABLE), + ok. authorize( #{ diff --git a/apps/emqx_auth_mnesia/test/emqx_authz_mnesia_SUITE.erl b/apps/emqx_auth_mnesia/test/emqx_authz_mnesia_SUITE.erl index 8f4f92ea2..7d77116e0 100644 --- a/apps/emqx_auth_mnesia/test/emqx_authz_mnesia_SUITE.erl +++ b/apps/emqx_auth_mnesia/test/emqx_authz_mnesia_SUITE.erl @@ -221,6 +221,35 @@ t_normalize_rules(_Config) -> ) ). +t_destroy(_Config) -> + ClientInfo = emqx_authz_test_lib:base_client_info(), + + ok = emqx_authz_mnesia:store_rules( + {username, <<"username">>}, + [#{<<"permission">> => <<"allow">>, <<"action">> => <<"publish">>, <<"topic">> => <<"t">>}] + ), + + ?assertEqual( + allow, + emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>) + ), + + ok = emqx_authz_test_lib:reset_authorizers(), + + ?assertEqual( + deny, + emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>) + ), + + ok = setup_config(), + + %% After destroy, the rules should be empty + + ?assertEqual( + deny, + emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>) + ). + %%------------------------------------------------------------------------------ %% Helpers %%------------------------------------------------------------------------------ diff --git a/changes/ce/fix-11762.en.md b/changes/ce/fix-11762.en.md new file mode 100644 index 000000000..b2276d08c --- /dev/null +++ b/changes/ce/fix-11762.en.md @@ -0,0 +1 @@ +Fixed destruction of built_in_database authorization source. Now all the ACL records are removed when the authorization source is destroyed. Previosly, old records were left in the database, which could cause problems when creating authorization source back. From 220893177d08990993823e4d4d49e126f2639228 Mon Sep 17 00:00:00 2001 From: Ivan Dyachkov Date: Sat, 14 Oct 2023 09:48:07 +0200 Subject: [PATCH 031/155] ci: rerun failed checks for up to 3 times --- .github/workflows/green_master.yaml | 11 ++- scripts/rerun-failed-checks.py | 143 ---------------------------- 2 files changed, 10 insertions(+), 144 deletions(-) delete mode 100644 scripts/rerun-failed-checks.py diff --git a/.github/workflows/green_master.yaml b/.github/workflows/green_master.yaml index de343a8f9..1dc0f841f 100644 --- a/.github/workflows/green_master.yaml +++ b/.github/workflows/green_master.yaml @@ -7,6 +7,9 @@ on: # run hourly - cron: "0 * * * *" workflow_dispatch: + inputs: + ref: + required: false permissions: contents: read @@ -20,10 +23,16 @@ jobs: actions: write steps: - uses: actions/checkout@v3 + with: + ref: ${{ github.event.inputs.ref || 'master' }} - name: run script shell: bash env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - python3 scripts/rerun-failed-checks.py + gh api --method GET -f head_sha=$(git rev-parse HEAD) -f status=completed -f exclude_pull_requests=true /repos/emqx/emqx/actions/runs > runs.json + for id in $(jq -r '.workflow_runs[] | select((."conclusion" != "success") and .run_attempt < 3) | .id' runs.json); do + echo "rerun https://github.com/emqx/emqx/actions/runs/$id" + gh api --method POST /repos/emqx/emqx/actions/runs/$id/rerun-failed-jobs + done diff --git a/scripts/rerun-failed-checks.py b/scripts/rerun-failed-checks.py deleted file mode 100644 index ff9b9f33e..000000000 --- a/scripts/rerun-failed-checks.py +++ /dev/null @@ -1,143 +0,0 @@ -#!/usr/bin/env python3 -# Usage: python3 rerun-failed-checks.py -t -r -b -# -# Description: This script will fetch the latest commit from a branch, and check the status of all check runs of the commit. -# If any check run is not successful, it will trigger a rerun of the failed jobs. -# -# Default branch is master, default repo is emqx/emqx -# -# Limitation: only works for upstream repo, not for forked. -import requests -import http.client -import json -import os -import sys -import time -import math -from optparse import OptionParser - -job_black_list = [ - 'windows', - 'publish_artifacts', - 'stale' -] - -def fetch_latest_commit(token: str, repo: str, branch: str): - url = f'https://api.github.com/repos/{repo}/commits/{branch}' - headers = {'Accept': 'application/vnd.github+json', - 'Authorization': f'Bearer {token}', - 'X-GitHub-Api-Version': '2022-11-28', - 'User-Agent': 'python3' - } - r = requests.get(url, headers=headers) - if r.status_code == 200: - res = r.json() - return res - else: - print( - f'Failed to fetch latest commit from {branch} branch, code: {r.status_code}') - sys.exit(1) - - -''' -fetch check runs of a commit. -@note, only works for public repos -''' -def fetch_check_runs(token: str, repo: str, ref: str): - all_checks = [] - page = 1 - total_pages = 1 - per_page = 100 - failed_checks = [] - while page <= total_pages: - print(f'Fetching check runs for page {page} of {total_pages} pages') - url = f'https://api.github.com/repos/{repo}/commits/{ref}/check-runs?per_page={per_page}&page={page}' - headers = {'Accept': 'application/vnd.github.v3+json', - 'Authorization': f'Bearer {token}' - } - r = requests.get(url, headers=headers) - if r.status_code == 200: - resp = r.json() - all_checks.extend(resp['check_runs']) - - page += 1 - if 'total_count' in resp and resp['total_count'] > per_page: - total_pages = math.ceil(resp['total_count'] / per_page) - else: - print(f'Failed to fetch check runs {r.status_code}') - sys.exit(1) - - - for crun in all_checks: - if crun['status'] == 'completed' and crun['conclusion'] != 'success': - print('Failed check: ', crun['name']) - failed_checks.append( - {'id': crun['id'], 'name': crun['name'], 'url': crun['url']}) - else: - # pretty print crun - # print(json.dumps(crun, indent=4)) - print('successed:', crun['id'], crun['name'], - crun['status'], crun['conclusion']) - - return failed_checks - -''' -rerquest a check-run -''' -def trigger_build(failed_checks: list, repo: str, token: str): - reruns = [] - for crun in failed_checks: - if crun['name'].strip() in job_black_list: - print(f'Skip black listed job {crun["name"]}') - continue - - r = requests.get(crun['url'], headers={'Accept': 'application/vnd.github.v3+json', - 'User-Agent': 'python3', - 'Authorization': f'Bearer {token}'} - ) - if r.status_code == 200: - # url example: https://github.com/qzhuyan/emqx/actions/runs/4469557961/jobs/7852858687 - run_id = r.json()['details_url'].split('/')[-3] - reruns.append(run_id) - else: - print(f'failed to fetch check run {crun["name"]}') - - # remove duplicates - for run_id in set(reruns): - url = f'https://api.github.com/repos/{repo}/actions/runs/{run_id}/rerun-failed-jobs' - - r = requests.post(url, headers={'Accept': 'application/vnd.github.v3+json', - 'User-Agent': 'python3', - 'Authorization': f'Bearer {token}'} - ) - if r.status_code == 201: - print(f'Successfully triggered build for {crun["name"]}') - - else: - # Only complain but not exit. - print( - f'Failed to trigger rerun for {run_id}, {crun["name"]}: {r.status_code} : {r.text}') - - -def main(): - parser = OptionParser() - parser.add_option("-r", "--repo", dest="repo", - help="github repo", default="emqx/emqx") - parser.add_option("-t", "--token", dest="gh_token", - help="github API token") - parser.add_option("-b", "--branch", dest="branch", default='master', - help="Branch that workflow runs on") - (options, args) = parser.parse_args() - - # Get gh token from env var GITHUB_TOKEN if provided, else use the one from command line - token = os.environ['GITHUB_TOKEN'] if 'GITHUB_TOKEN' in os.environ else options.gh_token - - target_commit = fetch_latest_commit(token, options.repo, options.branch) - - failed_checks = fetch_check_runs(token, options.repo, target_commit['sha']) - - trigger_build(failed_checks, options.repo, token) - - -if __name__ == '__main__': - main() From 7c022c2c6a06da552b47ef18378bb8fd885ad544 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 11 Oct 2023 16:22:33 +0200 Subject: [PATCH 032/155] refactor: change mria default rpc module from 'gen_rpc' to 'rpc' Erlang distribution seems to outperform gen_rpc (unless gen_rpc clients are scaled up, but this is not easy to achive for shard transport as it may reorder events). --- apps/emqx_conf/src/emqx_conf_schema.erl | 2 +- changes/ce/feat-11752.en.md | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changes/ce/feat-11752.en.md diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index 7ec5348fc..4b571f937 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -652,7 +652,7 @@ fields("node") -> hoconsc:enum([gen_rpc, rpc]), #{ mapping => "mria.rlog_rpc_module", - default => gen_rpc, + default => rpc, 'readOnly' => true, importance => ?IMPORTANCE_HIDDEN, desc => ?DESC(db_rpc_module) diff --git a/changes/ce/feat-11752.en.md b/changes/ce/feat-11752.en.md new file mode 100644 index 000000000..0cf0d5f6f --- /dev/null +++ b/changes/ce/feat-11752.en.md @@ -0,0 +1,3 @@ +Change default RPC driver from 'gen_rpc' to 'rpc' for core-replica database sync. + +This improves core-replica data replication latency. From 6354f3b04ff2d0dd1dee7512997782d08203357d Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Mon, 16 Oct 2023 23:57:23 +0300 Subject: [PATCH 033/155] feat(authn): allow authn providers to define a separate schama for API --- .../src/emqx_authn/emqx_authn_api.erl | 23 ++-- .../emqx_authn_password_hashing.erl | 100 +++++++++++------- .../src/emqx_authn/emqx_authn_schema.erl | 68 +++++++++--- .../test/emqx_authn/emqx_authn_api_SUITE.erl | 36 ++++++- .../test/emqx_authz/emqx_authz_SUITE.erl | 1 + .../src/emqx_authn_mnesia_schema.erl | 30 ++++-- apps/emqx_utils/src/emqx_utils.erl | 19 +++- changes/ce/fix-11771.en.md | 1 + 8 files changed, 202 insertions(+), 76 deletions(-) create mode 100644 changes/ce/fix-11771.en.md diff --git a/apps/emqx_auth/src/emqx_authn/emqx_authn_api.erl b/apps/emqx_auth/src/emqx_authn/emqx_authn_api.erl index 9938a3018..f30f7f473 100644 --- a/apps/emqx_auth/src/emqx_authn/emqx_authn_api.erl +++ b/apps/emqx_auth/src/emqx_authn/emqx_authn_api.erl @@ -147,7 +147,7 @@ schema("/authentication") -> description => ?DESC(authentication_get), responses => #{ 200 => emqx_dashboard_swagger:schema_with_example( - hoconsc:array(emqx_authn_schema:authenticator_type()), + hoconsc:array(authenticator_type(config)), authenticator_array_example() ) } @@ -156,12 +156,12 @@ schema("/authentication") -> tags => ?API_TAGS_GLOBAL, description => ?DESC(authentication_post), 'requestBody' => emqx_dashboard_swagger:schema_with_examples( - emqx_authn_schema:authenticator_type(), + authenticator_type(api_write), authenticator_examples() ), responses => #{ 200 => emqx_dashboard_swagger:schema_with_examples( - emqx_authn_schema:authenticator_type(), + authenticator_type(config), authenticator_examples() ), 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), @@ -178,7 +178,7 @@ schema("/authentication/:id") -> parameters => [param_auth_id()], responses => #{ 200 => emqx_dashboard_swagger:schema_with_examples( - emqx_authn_schema:authenticator_type(), + authenticator_type(config), authenticator_examples() ), 404 => error_codes([?NOT_FOUND], <<"Not Found">>) @@ -189,7 +189,7 @@ schema("/authentication/:id") -> description => ?DESC(authentication_id_put), parameters => [param_auth_id()], 'requestBody' => emqx_dashboard_swagger:schema_with_examples( - emqx_authn_schema:authenticator_type(), + authenticator_type(api_write), authenticator_examples() ), responses => #{ @@ -236,7 +236,7 @@ schema("/listeners/:listener_id/authentication") -> parameters => [param_listener_id()], responses => #{ 200 => emqx_dashboard_swagger:schema_with_example( - hoconsc:array(emqx_authn_schema:authenticator_type()), + hoconsc:array(authenticator_type(config)), authenticator_array_example() ) } @@ -247,12 +247,12 @@ schema("/listeners/:listener_id/authentication") -> description => ?DESC(listeners_listener_id_authentication_post), parameters => [param_listener_id()], 'requestBody' => emqx_dashboard_swagger:schema_with_examples( - emqx_authn_schema:authenticator_type(), + authenticator_type(api_write), authenticator_examples() ), responses => #{ 200 => emqx_dashboard_swagger:schema_with_examples( - emqx_authn_schema:authenticator_type(), + authenticator_type(config), authenticator_examples() ), 400 => error_codes([?BAD_REQUEST], <<"Bad Request">>), @@ -270,7 +270,7 @@ schema("/listeners/:listener_id/authentication/:id") -> parameters => [param_listener_id(), param_auth_id()], responses => #{ 200 => emqx_dashboard_swagger:schema_with_examples( - emqx_authn_schema:authenticator_type(), + authenticator_type(config), authenticator_examples() ), 404 => error_codes([?NOT_FOUND], <<"Not Found">>) @@ -282,7 +282,7 @@ schema("/listeners/:listener_id/authentication/:id") -> description => ?DESC(listeners_listener_id_authentication_id_put), parameters => [param_listener_id(), param_auth_id()], 'requestBody' => emqx_dashboard_swagger:schema_with_examples( - emqx_authn_schema:authenticator_type(), + authenticator_type(api_write), authenticator_examples() ), responses => #{ @@ -1278,6 +1278,9 @@ paginated_list_type(Type) -> {meta, ref(emqx_dashboard_swagger, meta)} ]. +authenticator_type(Kind) -> + emqx_authn_schema:authenticator_type(Kind). + authenticator_array_example() -> [Config || #{value := Config} <- maps:values(authenticator_examples())]. diff --git a/apps/emqx_auth/src/emqx_authn/emqx_authn_password_hashing.erl b/apps/emqx_auth/src/emqx_authn/emqx_authn_password_hashing.erl index 66bc6bfc6..40e96ce6f 100644 --- a/apps/emqx_auth/src/emqx_authn/emqx_authn_password_hashing.erl +++ b/apps/emqx_auth/src/emqx_authn/emqx_authn_password_hashing.erl @@ -53,7 +53,8 @@ -export([ type_ro/1, - type_rw/1 + type_rw/1, + type_rw_api/1 ]). -export([ @@ -67,21 +68,17 @@ -define(SALT_ROUNDS_MAX, 10). namespace() -> "authn-hash". -roots() -> [pbkdf2, bcrypt, bcrypt_rw, simple]. +roots() -> [pbkdf2, bcrypt, bcrypt_rw, bcrypt_rw_api, simple]. fields(bcrypt_rw) -> fields(bcrypt) ++ [ - {salt_rounds, - sc( - range(?SALT_ROUNDS_MIN, ?SALT_ROUNDS_MAX), - #{ - default => ?SALT_ROUNDS_MAX, - example => ?SALT_ROUNDS_MAX, - desc => "Work factor for BCRYPT password generation.", - converter => fun salt_rounds_converter/2 - } - )} + {salt_rounds, fun bcrypt_salt_rounds/1} + ]; +fields(bcrypt_rw_api) -> + fields(bcrypt) ++ + [ + {salt_rounds, fun bcrypt_salt_rounds_api/1} ]; fields(bcrypt) -> [{name, sc(bcrypt, #{required => true, desc => "BCRYPT password hashing."})}]; @@ -110,6 +107,15 @@ fields(simple) -> {salt_position, fun salt_position/1} ]. +bcrypt_salt_rounds(converter) -> fun salt_rounds_converter/2; +bcrypt_salt_rounds(Option) -> bcrypt_salt_rounds_api(Option). + +bcrypt_salt_rounds_api(type) -> range(?SALT_ROUNDS_MIN, ?SALT_ROUNDS_MAX); +bcrypt_salt_rounds_api(default) -> ?SALT_ROUNDS_MAX; +bcrypt_salt_rounds_api(example) -> ?SALT_ROUNDS_MAX; +bcrypt_salt_rounds_api(desc) -> "Work factor for BCRYPT password generation."; +bcrypt_salt_rounds_api(_) -> undefined. + salt_rounds_converter(undefined, _) -> undefined; salt_rounds_converter(I, _) when is_integer(I) -> @@ -119,6 +125,8 @@ salt_rounds_converter(X, _) -> desc(bcrypt_rw) -> "Settings for bcrypt password hashing algorithm (for DB backends with write capability)."; +desc(bcrypt_rw_api) -> + desc(bcrypt_rw); desc(bcrypt) -> "Settings for bcrypt password hashing algorithm."; desc(pbkdf2) -> @@ -143,14 +151,20 @@ dk_length(desc) -> dk_length(_) -> undefined. -%% for simple_authn/emqx_authn_mnesia +%% for emqx_authn_mnesia type_rw(type) -> hoconsc:union(rw_refs()); -type_rw(default) -> - #{<<"name">> => sha256, <<"salt_position">> => prefix}; type_rw(desc) -> "Options for password hash creation and verification."; -type_rw(_) -> +type_rw(Option) -> + type_ro(Option). + +%% for emqx_authn_mnesia API +type_rw_api(type) -> + hoconsc:union(api_refs()); +type_rw_api(desc) -> + "Options for password hash creation and verification through API."; +type_rw_api(_) -> undefined. %% for other authn resources @@ -242,31 +256,41 @@ check_password(#{name := Other, salt_position := SaltPosition}, Salt, PasswordHa %%------------------------------------------------------------------------------ rw_refs() -> - All = [ - hoconsc:ref(?MODULE, bcrypt_rw), - hoconsc:ref(?MODULE, pbkdf2), - hoconsc:ref(?MODULE, simple) - ], - fun - (all_union_members) -> All; - ({value, #{<<"name">> := <<"bcrypt">>}}) -> [hoconsc:ref(?MODULE, bcrypt_rw)]; - ({value, #{<<"name">> := <<"pbkdf2">>}}) -> [hoconsc:ref(?MODULE, pbkdf2)]; - ({value, #{<<"name">> := _}}) -> [hoconsc:ref(?MODULE, simple)]; - ({value, _}) -> throw(#{reason => "algorithm_name_missing"}) - end. + union_selector(rw). ro_refs() -> - All = [ - hoconsc:ref(?MODULE, bcrypt), - hoconsc:ref(?MODULE, pbkdf2), - hoconsc:ref(?MODULE, simple) - ], + union_selector(ro). + +api_refs() -> + union_selector(api). + +sc(Type, Meta) -> hoconsc:mk(Type, Meta). + +union_selector(Kind) -> fun - (all_union_members) -> All; - ({value, #{<<"name">> := <<"bcrypt">>}}) -> [hoconsc:ref(?MODULE, bcrypt)]; - ({value, #{<<"name">> := <<"pbkdf2">>}}) -> [hoconsc:ref(?MODULE, pbkdf2)]; - ({value, #{<<"name">> := _}}) -> [hoconsc:ref(?MODULE, simple)]; + (all_union_members) -> refs(Kind); + ({value, #{<<"name">> := <<"bcrypt">>}}) -> [bcrypt_ref(Kind)]; + ({value, #{<<"name">> := <<"pbkdf2">>}}) -> [pbkdf2_ref(Kind)]; + ({value, #{<<"name">> := _}}) -> [simple_ref(Kind)]; ({value, _}) -> throw(#{reason => "algorithm_name_missing"}) end. -sc(Type, Meta) -> hoconsc:mk(Type, Meta). +refs(Kind) -> + [ + bcrypt_ref(Kind), + pbkdf2_ref(Kind), + simple_ref(Kind) + ]. + +pbkdf2_ref(_) -> + hoconsc:ref(?MODULE, pbkdf2). + +bcrypt_ref(rw) -> + hoconsc:ref(?MODULE, bcrypt_rw); +bcrypt_ref(api) -> + hoconsc:ref(?MODULE, bcrypt_rw_api); +bcrypt_ref(_) -> + hoconsc:ref(?MODULE, bcrypt). + +simple_ref(_) -> + hoconsc:ref(?MODULE, simple). diff --git a/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl b/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl index a06d4b692..9b9935a1f 100644 --- a/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl +++ b/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl @@ -34,7 +34,9 @@ tags/0, fields/1, authenticator_type/0, + authenticator_type/1, authenticator_type_without/1, + authenticator_type_without/2, mechanism/1, backend/1 ]). @@ -43,17 +45,35 @@ global_auth_fields/0 ]). +-export_type([shema_kind/0]). + -define(AUTHN_MODS_PT_KEY, {?MODULE, authn_schema_mods}). +-define(DEFAULT_SCHEMA_KIND, config). %%-------------------------------------------------------------------- %% Authn Source Schema Behaviour %%-------------------------------------------------------------------- -type schema_ref() :: ?R_REF(module(), hocon_schema:name()). +-type shema_kind() :: + %% api_write: schema for mutating API request validation + api_write + %% config: schema for config validation + | config. -callback refs() -> [schema_ref()]. --callback select_union_member(emqx_config:raw_config()) -> schema_ref() | undefined | no_return(). +-callback refs(shema_kind()) -> [schema_ref()]. +-callback select_union_member(emqx_config:raw_config()) -> [schema_ref()] | undefined | no_return(). +-callback select_union_member(shema_kind(), emqx_config:raw_config()) -> + [schema_ref()] | undefined | no_return(). -callback fields(hocon_schema:name()) -> [hocon_schema:field()]. +-optional_callbacks([ + select_union_member/1, + select_union_member/2, + refs/0, + refs/1 +]). + roots() -> []. injected_fields(AuthnSchemaMods) -> @@ -67,45 +87,63 @@ tags() -> [<<"Authentication">>]. authenticator_type() -> - hoconsc:union(union_member_selector(provider_schema_mods())). + authenticator_type(?DEFAULT_SCHEMA_KIND). + +authenticator_type(Kind) -> + hoconsc:union(union_member_selector(Kind, provider_schema_mods())). authenticator_type_without(ProviderSchemaMods) -> + authenticator_type_without(?DEFAULT_SCHEMA_KIND, ProviderSchemaMods). + +authenticator_type_without(Kind, ProviderSchemaMods) -> hoconsc:union( - union_member_selector(provider_schema_mods() -- ProviderSchemaMods) + union_member_selector(Kind, provider_schema_mods() -- ProviderSchemaMods) ). -union_member_selector(Mods) -> - AllTypes = config_refs(Mods), +union_member_selector(Kind, Mods) -> + AllTypes = config_refs(Kind, Mods), fun (all_union_members) -> AllTypes; - ({value, Value}) -> select_union_member(Value, Mods) + ({value, Value}) -> select_union_member(Kind, Value, Mods) end. -select_union_member(#{<<"mechanism">> := Mechanism, <<"backend">> := Backend}, []) -> +select_union_member(_Kind, #{<<"mechanism">> := Mechanism, <<"backend">> := Backend}, []) -> throw(#{ reason => "unsupported_mechanism", mechanism => Mechanism, backend => Backend }); -select_union_member(#{<<"mechanism">> := Mechanism}, []) -> +select_union_member(_Kind, #{<<"mechanism">> := Mechanism}, []) -> throw(#{ reason => "unsupported_mechanism", mechanism => Mechanism }); -select_union_member(#{<<"mechanism">> := _} = Value, [Mod | Mods]) -> - case Mod:select_union_member(Value) of +select_union_member(Kind, #{<<"mechanism">> := _} = Value, [Mod | Mods]) -> + case mod_select_union_member(Kind, Value, Mod) of undefined -> - select_union_member(Value, Mods); + select_union_member(Kind, Value, Mods); Member -> Member end; -select_union_member(#{} = _Value, _Mods) -> +select_union_member(_Kind, #{} = _Value, _Mods) -> throw(#{reason => "missing_mechanism_field"}); -select_union_member(Value, _Mods) -> +select_union_member(_Kind, Value, _Mods) -> throw(#{reason => "not_a_struct", value => Value}). -config_refs(Mods) -> - lists:append([Mod:refs() || Mod <- Mods]). +mod_select_union_member(Kind, Value, Mod) -> + emqx_utils:call_first_defined([ + {Mod, select_union_member, [Kind, Value]}, + {Mod, select_union_member, [Value]} + ]). + +config_refs(Kind, Mods) -> + lists:append([mod_refs(Kind, Mod) || Mod <- Mods]). + +mod_refs(Kind, Mod) -> + emqx_utils:call_first_defined([ + {Mod, refs, [Kind]}, + {Mod, refs, []} + ]). root_type() -> hoconsc:array(authenticator_type()). diff --git a/apps/emqx_auth/test/emqx_authn/emqx_authn_api_SUITE.erl b/apps/emqx_auth/test/emqx_authn/emqx_authn_api_SUITE.erl index 635b157d9..45a605e6e 100644 --- a/apps/emqx_auth/test/emqx_authn/emqx_authn_api_SUITE.erl +++ b/apps/emqx_auth/test/emqx_authn/emqx_authn_api_SUITE.erl @@ -63,14 +63,16 @@ end_per_testcase(_, Config) -> init_per_suite(Config) -> Apps = emqx_cth_suite:start( [ - emqx, emqx_conf, + emqx, emqx_auth, + %% to load schema + {emqx_auth_mnesia, #{start => false}}, emqx_management, {emqx_dashboard, "dashboard.listeners.http { enable = true, bind = 18083 }"} ], #{ - work_dir => ?config(priv_dir, Config) + work_dir => filename:join(?config(priv_dir, Config), ?MODULE) } ), _ = emqx_common_test_http:create_default_app(), @@ -535,6 +537,36 @@ ignore_switch_to_global_chain(_) -> ), ok = emqtt:disconnect(Client4). +t_bcrypt_validation(_Config) -> + BaseConf = #{ + mechanism => <<"password_based">>, + backend => <<"built_in_database">>, + user_id_type => <<"username">> + }, + BcryptValid = #{ + name => <<"bcrypt">>, + salt_rounds => 10 + }, + BcryptInvalid = #{ + name => <<"bcrypt">>, + salt_rounds => 15 + }, + + ConfValid = BaseConf#{password_hash_algorithm => BcryptValid}, + ConfInvalid = BaseConf#{password_hash_algorithm => BcryptInvalid}, + + {ok, 400, _} = request( + post, + uri([?CONF_NS]), + ConfInvalid + ), + + {ok, 200, _} = request( + post, + uri([?CONF_NS]), + ConfValid + ). + %%------------------------------------------------------------------------------ %% Helpers %%------------------------------------------------------------------------------ diff --git a/apps/emqx_auth/test/emqx_authz/emqx_authz_SUITE.erl b/apps/emqx_auth/test/emqx_authz/emqx_authz_SUITE.erl index 1af7d4d1d..37c9ebfc1 100644 --- a/apps/emqx_auth/test/emqx_authz/emqx_authz_SUITE.erl +++ b/apps/emqx_auth/test/emqx_authz/emqx_authz_SUITE.erl @@ -70,6 +70,7 @@ init_per_testcase(TestCase, Config) when {ok, _} = emqx:update_config([authorization, deny_action], disconnect), Config; init_per_testcase(_TestCase, Config) -> + _ = file:delete(emqx_authz_file:acl_conf_file()), {ok, _} = emqx_authz:update(?CMD_REPLACE, []), Config. diff --git a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl index 2d57abc90..bb5ccfe1a 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl @@ -24,27 +24,30 @@ -export([ fields/1, desc/1, - refs/0, - select_union_member/1 + refs/1, + select_union_member/2 ]). -refs() -> +refs(api_write) -> + [?R_REF(builtin_db_api)]; +refs(_) -> [?R_REF(builtin_db)]. -select_union_member(#{ +select_union_member(Kind, #{ <<"mechanism">> := ?AUTHN_MECHANISM_SIMPLE_BIN, <<"backend">> := ?AUTHN_BACKEND_BIN }) -> - refs(); -select_union_member(_) -> + refs(Kind); +select_union_member(_Kind, _Value) -> undefined. fields(builtin_db) -> [ - {mechanism, emqx_authn_schema:mechanism(?AUTHN_MECHANISM_SIMPLE)}, - {backend, emqx_authn_schema:backend(?AUTHN_BACKEND)}, - {user_id_type, fun user_id_type/1}, {password_hash_algorithm, fun emqx_authn_password_hashing:type_rw/1} - ] ++ emqx_authn_schema:common_fields(). + ] ++ common_fields(); +fields(builtin_db_api) -> + [ + {password_hash_algorithm, fun emqx_authn_password_hashing:type_rw_api/1} + ] ++ common_fields(). desc(builtin_db) -> ?DESC(builtin_db); @@ -56,3 +59,10 @@ user_id_type(desc) -> ?DESC(?FUNCTION_NAME); user_id_type(default) -> <<"username">>; user_id_type(required) -> true; user_id_type(_) -> undefined. + +common_fields() -> + [ + {mechanism, emqx_authn_schema:mechanism(?AUTHN_MECHANISM_SIMPLE)}, + {backend, emqx_authn_schema:backend(?AUTHN_BACKEND)}, + {user_id_type, fun user_id_type/1} + ] ++ emqx_authn_schema:common_fields(). diff --git a/apps/emqx_utils/src/emqx_utils.erl b/apps/emqx_utils/src/emqx_utils.erl index bf4e07ff9..f827f65de 100644 --- a/apps/emqx_utils/src/emqx_utils.erl +++ b/apps/emqx_utils/src/emqx_utils.erl @@ -62,7 +62,8 @@ merge_lists/3, tcp_keepalive_opts/4, format/1, - format_mfal/1 + format_mfal/1, + call_first_defined/1 ]). -export([ @@ -554,6 +555,22 @@ format_mfal(Data) -> undefined end. +-spec call_first_defined(list({module(), atom(), list()})) -> term() | no_return(). +call_first_defined([{Module, Function, Args} | Rest]) -> + try + apply(Module, Function, Args) + catch + error:undef:Stacktrace -> + case Stacktrace of + [{Module, Function, _, _} | _] -> + call_first_defined(Rest); + _ -> + erlang:raise(error, undef, Stacktrace) + end + end; +call_first_defined([]) -> + error(none_fun_is_defined). + %%------------------------------------------------------------------------------ %% Internal Functions %%------------------------------------------------------------------------------ diff --git a/changes/ce/fix-11771.en.md b/changes/ce/fix-11771.en.md new file mode 100644 index 000000000..1df7503de --- /dev/null +++ b/changes/ce/fix-11771.en.md @@ -0,0 +1 @@ +Fixed validation of Bcrypt salt rounds in authentification management through the API/Dashboard. From e1c8317779a55c8a77f54a3f38ea6dca95bc2031 Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Tue, 17 Oct 2023 17:38:02 +0300 Subject: [PATCH 034/155] chore(authn): remove dead code --- apps/emqx_auth_redis/src/emqx_authn_redis.erl | 84 ------------------- 1 file changed, 84 deletions(-) diff --git a/apps/emqx_auth_redis/src/emqx_authn_redis.erl b/apps/emqx_auth_redis/src/emqx_authn_redis.erl index 2f0948faf..960308ac9 100644 --- a/apps/emqx_auth_redis/src/emqx_authn_redis.erl +++ b/apps/emqx_auth_redis/src/emqx_authn_redis.erl @@ -18,100 +18,16 @@ -include_lib("emqx_auth/include/emqx_authn.hrl"). -include_lib("emqx/include/logger.hrl"). --include_lib("hocon/include/hoconsc.hrl"). --behaviour(hocon_schema). -behaviour(emqx_authn_provider). -export([ - namespace/0, - tags/0, - roots/0, - fields/1, - desc/1 -]). - --export([ - refs/0, - union_member_selector/1, create/2, update/2, authenticate/2, destroy/1 ]). -%%------------------------------------------------------------------------------ -%% Hocon Schema -%%------------------------------------------------------------------------------ - -namespace() -> "authn". - -tags() -> - [<<"Authentication">>]. - -%% used for config check when the schema module is resolved -roots() -> - [ - {?CONF_NS, - hoconsc:mk( - hoconsc:union(fun ?MODULE:union_member_selector/1), - #{} - )} - ]. - -fields(redis_single) -> - common_fields() ++ emqx_redis:fields(single); -fields(redis_cluster) -> - common_fields() ++ emqx_redis:fields(cluster); -fields(redis_sentinel) -> - common_fields() ++ emqx_redis:fields(sentinel). - -desc(redis_single) -> - ?DESC(single); -desc(redis_cluster) -> - ?DESC(cluster); -desc(redis_sentinel) -> - ?DESC(sentinel); -desc(_) -> - "". - -common_fields() -> - [ - {mechanism, emqx_authn_schema:mechanism(password_based)}, - {backend, emqx_authn_schema:backend(redis)}, - {cmd, fun cmd/1}, - {password_hash_algorithm, fun emqx_authn_password_hashing:type_ro/1} - ] ++ emqx_authn_schema:common_fields(). - -cmd(type) -> string(); -cmd(desc) -> ?DESC(?FUNCTION_NAME); -cmd(required) -> true; -cmd(_) -> undefined. - -refs() -> - [ - hoconsc:ref(?MODULE, redis_single), - hoconsc:ref(?MODULE, redis_cluster), - hoconsc:ref(?MODULE, redis_sentinel) - ]. - -union_member_selector(all_union_members) -> - refs(); -union_member_selector({value, Value}) -> - refs(Value). - -refs(#{<<"redis_type">> := <<"single">>}) -> - [hoconsc:ref(?MODULE, redis_single)]; -refs(#{<<"redis_type">> := <<"cluster">>}) -> - [hoconsc:ref(?MODULE, redis_cluster)]; -refs(#{<<"redis_type">> := <<"sentinel">>}) -> - [hoconsc:ref(?MODULE, redis_sentinel)]; -refs(_) -> - throw(#{ - field_name => redis_type, - expected => "single | cluster | sentinel" - }). - %%------------------------------------------------------------------------------ %% APIs %%------------------------------------------------------------------------------ From 2388d36b09dbec0c855abc3984100f28a4cc5397 Mon Sep 17 00:00:00 2001 From: zhongwencool Date: Wed, 18 Oct 2023 18:17:03 +0800 Subject: [PATCH 035/155] fix: allow viewers to change their own passwords --- .../src/emqx_dashboard_token.erl | 4 +-- .../src/emqx_dashboard_rbac.app.src | 2 +- .../src/emqx_dashboard_rbac.erl | 23 +++++++++------ .../test/emqx_dashboard_rbac_SUITE.erl | 28 +++++++++++++++++++ 4 files changed, 46 insertions(+), 11 deletions(-) diff --git a/apps/emqx_dashboard/src/emqx_dashboard_token.erl b/apps/emqx_dashboard/src/emqx_dashboard_token.erl index 1c840e90c..9a9875935 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_token.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_token.erl @@ -248,8 +248,8 @@ clean_expired_jwt(Now) -> -if(?EMQX_RELEASE_EDITION == ee). check_rbac(Req, JWT) -> - #?ADMIN_JWT{exptime = _ExpTime, extra = Extra, username = _Username} = JWT, - case emqx_dashboard_rbac:check_rbac(Req, Extra) of + #?ADMIN_JWT{exptime = _ExpTime, extra = Extra, username = Username} = JWT, + case emqx_dashboard_rbac:check_rbac(Req, Username, Extra) of true -> save_new_jwt(JWT); _ -> diff --git a/apps/emqx_dashboard_rbac/src/emqx_dashboard_rbac.app.src b/apps/emqx_dashboard_rbac/src/emqx_dashboard_rbac.app.src index 190764e2f..ec8e6cd3f 100644 --- a/apps/emqx_dashboard_rbac/src/emqx_dashboard_rbac.app.src +++ b/apps/emqx_dashboard_rbac/src/emqx_dashboard_rbac.app.src @@ -1,6 +1,6 @@ {application, emqx_dashboard_rbac, [ {description, "EMQX Dashboard RBAC"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_dashboard_rbac/src/emqx_dashboard_rbac.erl b/apps/emqx_dashboard_rbac/src/emqx_dashboard_rbac.erl index 28bd8960e..57132b65b 100644 --- a/apps/emqx_dashboard_rbac/src/emqx_dashboard_rbac.erl +++ b/apps/emqx_dashboard_rbac/src/emqx_dashboard_rbac.erl @@ -6,18 +6,18 @@ -include_lib("emqx_dashboard/include/emqx_dashboard.hrl"). --export([check_rbac/2, role/1, valid_role/1]). +-export([check_rbac/3, role/1, valid_role/1]). -dialyzer({nowarn_function, role/1}). %%===================================================================== %% API -check_rbac(Req, Extra) -> +check_rbac(Req, Username, Extra) -> Role = role(Extra), Method = cowboy_req:method(Req), AbsPath = cowboy_req:path(Req), case emqx_dashboard_swagger:get_relative_uri(AbsPath) of {ok, Path} -> - check_rbac(Role, Method, Path); + check_rbac(Role, Method, Path, Username); _ -> false end. @@ -41,14 +41,21 @@ valid_role(Role) -> {error, <<"Role does not exist">>} end. %% =================================================================== -check_rbac(?ROLE_SUPERUSER, _, _) -> +check_rbac(?ROLE_SUPERUSER, _, _, _) -> true; -check_rbac(?ROLE_VIEWER, <<"GET">>, _) -> +check_rbac(?ROLE_VIEWER, <<"GET">>, _, _) -> true; -%% this API is a special case -check_rbac(?ROLE_VIEWER, <<"POST">>, <<"/logout">>) -> +%% everyone should allow to logout +check_rbac(?ROLE_VIEWER, <<"POST">>, <<"/logout">>, _) -> true; -check_rbac(_, _, _) -> +%% viewer should allow to change self password, +%% superuser should allow to change any user +check_rbac(?ROLE_VIEWER, <<"POST">>, <<"/users/", SubPath/binary>>, Username) -> + case binary:split(SubPath, <<"/">>, [global]) of + [Username, <<"change_pwd">>] -> true; + _ -> false + end; +check_rbac(_, _, _, _) -> false. role_list() -> diff --git a/apps/emqx_dashboard_rbac/test/emqx_dashboard_rbac_SUITE.erl b/apps/emqx_dashboard_rbac/test/emqx_dashboard_rbac_SUITE.erl index b1a51a3c9..eeac8dadf 100644 --- a/apps/emqx_dashboard_rbac/test/emqx_dashboard_rbac_SUITE.erl +++ b/apps/emqx_dashboard_rbac/test/emqx_dashboard_rbac_SUITE.erl @@ -160,6 +160,34 @@ t_login_out(_) -> {ok, Username} = emqx_dashboard_admin:verify_token(FakeReq, Token), ok. +t_change_pwd(_) -> + Viewer1 = <<"viewer1">>, + Viewer2 = <<"viewer2">>, + SuperUser = <<"super_user">>, + Password = <<"public_www1">>, + Desc = <<"desc">>, + {ok, _} = emqx_dashboard_admin:add_user(Viewer1, Password, ?ROLE_VIEWER, Desc), + {ok, _} = emqx_dashboard_admin:add_user(Viewer2, Password, ?ROLE_VIEWER, Desc), + {ok, _} = emqx_dashboard_admin:add_user(SuperUser, Password, ?ROLE_SUPERUSER, Desc), + {ok, ?ROLE_VIEWER, Viewer1Token} = emqx_dashboard_admin:sign_token(Viewer1, Password), + {ok, ?ROLE_SUPERUSER, SuperToken} = emqx_dashboard_admin:sign_token(SuperUser, Password), + %% viewer can change own password + ?assertEqual({ok, Viewer1}, change_pwd(Viewer1Token, Viewer1)), + %% viewer can't change other's password + ?assertEqual({error, unauthorized_role}, change_pwd(Viewer1Token, Viewer2)), + ?assertEqual({error, unauthorized_role}, change_pwd(Viewer1Token, SuperUser)), + %% superuser can change other's password + ?assertEqual({ok, SuperUser}, change_pwd(SuperToken, Viewer1)), + ?assertEqual({ok, SuperUser}, change_pwd(SuperToken, Viewer2)), + ?assertEqual({ok, SuperUser}, change_pwd(SuperToken, SuperUser)), + ok. + +change_pwd(Token, Username) -> + Path = "/users/" ++ binary_to_list(Username) ++ "/change_pwd", + Path1 = erlang:list_to_binary(emqx_dashboard_swagger:relative_uri(Path)), + Req = #{method => <<"POST">>, path => Path1}, + emqx_dashboard_admin:verify_token(Req, Token). + add_default_superuser() -> {ok, _NewUser} = emqx_dashboard_admin:add_user( ?DEFAULT_SUPERUSER, From 81e10c6748a6efbbe42d01dff1c974bce2edbe94 Mon Sep 17 00:00:00 2001 From: zhongwencool Date: Wed, 18 Oct 2023 18:43:11 +0800 Subject: [PATCH 036/155] chore: add changelog for 11785 --- changes/ce/feat-11785.en.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/ce/feat-11785.en.md diff --git a/changes/ce/feat-11785.en.md b/changes/ce/feat-11785.en.md new file mode 100644 index 000000000..765ce6ea0 --- /dev/null +++ b/changes/ce/feat-11785.en.md @@ -0,0 +1 @@ +Allow viewer to change their own passwords, viewer can't change other's password. From ad09ca9d6d1e47c0b17c7ac651a4cc78c073788b Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 18 Oct 2023 13:26:53 +0200 Subject: [PATCH 037/155] refactor(nodetool): only add libs when necessary --- bin/nodetool | 3 ++- changes/ce/feat-11787.en.md | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changes/ce/feat-11787.en.md diff --git a/bin/nodetool b/bin/nodetool index ab2210aa5..c0d5b0025 100755 --- a/bin/nodetool +++ b/bin/nodetool @@ -21,12 +21,13 @@ main(Args) -> ok end end, - ok = add_libs_dir(), case Args of ["hocon" | Rest] -> + ok = add_libs_dir(), %% forward the call to hocon_cli hocon_cli:main(Rest); ["check_license_key", Key0] -> + ok = add_libs_dir(), Key = cleanup_key(Key0), check_license(#{key => Key}); _ -> diff --git a/changes/ce/feat-11787.en.md b/changes/ce/feat-11787.en.md new file mode 100644 index 000000000..2dc3efc73 --- /dev/null +++ b/changes/ce/feat-11787.en.md @@ -0,0 +1,3 @@ +Improve `emqx` command performance. + +Avoid loading EMQX application code in `nodetool` script unless necessary. From 5b9866f63096ce7292c271cdafbdf2e0732a8ddf Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Wed, 18 Oct 2023 16:08:29 -0300 Subject: [PATCH 038/155] fix(coap): increase received packet counter for keepalive Fixes https://emqx.atlassian.net/browse/EMQX-11193 Fixes https://github.com/emqx/emqx/issues/11779 --- .../src/emqx_coap_channel.erl | 18 +++-- .../test/emqx_coap_SUITE.erl | 73 +++++++++++++++++-- changes/ce/fix-11791.en.md | 1 + 3 files changed, 81 insertions(+), 11 deletions(-) create mode 100644 changes/ce/fix-11791.en.md diff --git a/apps/emqx_gateway_coap/src/emqx_coap_channel.erl b/apps/emqx_gateway_coap/src/emqx_coap_channel.erl index 467ac20a2..5e3461c52 100644 --- a/apps/emqx_gateway_coap/src/emqx_coap_channel.erl +++ b/apps/emqx_gateway_coap/src/emqx_coap_channel.erl @@ -86,7 +86,6 @@ -define(INFO_KEYS, [conninfo, conn_state, clientinfo, session]). -define(DEF_IDLE_TIME, timer:seconds(30)). --define(GET_IDLE_TIME(Cfg), maps:get(idle_timeout, Cfg, ?DEF_IDLE_TIME)). -import(emqx_coap_medium, [reply/2, reply/3, reply/4, iter/3, iter/4]). @@ -150,8 +149,7 @@ init( mountpoint => Mountpoint } ), - %% FIXME: it should coap.hearbeat instead of idle_timeout? - Heartbeat = ?GET_IDLE_TIME(Config), + Heartbeat = maps:get(heartbeat, Config, ?DEF_IDLE_TIME), #channel{ ctx = Ctx, conninfo = ConnInfo, @@ -179,8 +177,8 @@ send_request(Channel, Request) -> | {ok, replies(), channel()} | {shutdown, Reason :: term(), channel()} | {shutdown, Reason :: term(), replies(), channel()}. -handle_in(Msg, ChannleT) -> - Channel = ensure_keepalive_timer(ChannleT), +handle_in(Msg, Channel0) -> + Channel = ensure_keepalive_timer(Channel0), case emqx_coap_message:is_request(Msg) of true -> check_auth_state(Msg, Channel); @@ -321,6 +319,9 @@ handle_call(Req, _From, Channel) -> handle_cast(close, Channel) -> ?SLOG(info, #{msg => "close_connection"}), shutdown(normal, Channel); +handle_cast(inc_recv_pkt, Channel) -> + _ = emqx_pd:inc_counter(recv_pkt, 1), + {ok, Channel}; handle_cast(Req, Channel) -> ?SLOG(error, #{msg => "unexpected_cast", cast => Req}), {ok, Channel}. @@ -455,6 +456,13 @@ check_token( Reply = emqx_coap_message:piggyback({error, unauthorized}, Msg), {shutdown, normal, Reply, Channel}; true -> + %% hack: since each message request can spawn a new connection + %% process, we can't rely on the `inc_incoming_stats' call in + %% `emqx_gateway_conn:handle_incoming' to properly keep track of + %% bumping incoming requests for an existing channel. Since this + %% number is used by keepalive, we have to bump it inside the + %% requested channel/connection pid so heartbeats actually work. + emqx_gateway_cm:cast(coap, ReqClientId, inc_recv_pkt), call_session(handle_request, Msg, Channel) end; _ -> diff --git a/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl b/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl index 4459d84f1..c066b84ff 100644 --- a/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl +++ b/apps/emqx_gateway_coap/test/emqx_coap_SUITE.erl @@ -83,10 +83,26 @@ init_per_testcase(t_connection_with_authn_failed, Config) -> fun(_) -> {error, bad_username_or_password} end ), Config; +init_per_testcase(t_heartbeat, Config) -> + NewHeartbeat = 800, + OldConf = emqx:get_raw_config([gateway, coap]), + {ok, _} = emqx_gateway_conf:update_gateway( + coap, + OldConf#{<<"heartbeat">> => <<"800ms">>} + ), + [ + {old_conf, OldConf}, + {new_heartbeat, NewHeartbeat} + | Config + ]; init_per_testcase(_, Config) -> ok = meck:new(emqx_access_control, [passthrough]), Config. +end_per_testcase(t_heartbeat, Config) -> + OldConf = ?config(old_conf, Config), + {ok, _} = emqx_gateway_conf:update_gateway(coap, OldConf), + ok; end_per_testcase(_, Config) -> ok = meck:unload(emqx_access_control), Config. @@ -123,13 +139,49 @@ t_connection(_) -> ), %% heartbeat - HeartURI = - ?MQTT_PREFIX ++ - "/connection?clientid=client1&token=" ++ - Token, + {ok, changed, _} = send_heartbeat(Token), - ?LOGT("send heartbeat request:~ts~n", [HeartURI]), - {ok, changed, _} = er_coap_client:request(put, HeartURI), + disconnection(Channel, Token), + + timer:sleep(100), + ?assertEqual( + [], + emqx_gateway_cm_registry:lookup_channels(coap, <<"client1">>) + ) + end, + do(Action). + +t_heartbeat(Config) -> + Heartbeat = ?config(new_heartbeat, Config), + Action = fun(Channel) -> + Token = connection(Channel), + + timer:sleep(100), + ?assertNotEqual( + [], + emqx_gateway_cm_registry:lookup_channels(coap, <<"client1">>) + ), + + %% must keep client connection alive + Delay = Heartbeat div 2, + lists:foreach( + fun(_) -> + ?assertMatch({ok, changed, _}, send_heartbeat(Token)), + timer:sleep(Delay) + end, + lists:seq(1, 5) + ), + + ?assertNotEqual( + [], + emqx_gateway_cm_registry:lookup_channels(coap, <<"client1">>) + ), + + timer:sleep(Heartbeat * 2), + ?assertEqual( + [], + emqx_gateway_cm_registry:lookup_channels(coap, <<"client1">>) + ), disconnection(Channel, Token), @@ -491,6 +543,15 @@ t_connectionless_pubsub(_) -> %%-------------------------------------------------------------------- %% helpers +send_heartbeat(Token) -> + HeartURI = + ?MQTT_PREFIX ++ + "/connection?clientid=client1&token=" ++ + Token, + + ?LOGT("send heartbeat request:~ts~n", [HeartURI]), + er_coap_client:request(put, HeartURI). + connection(Channel) -> URI = ?MQTT_PREFIX ++ diff --git a/changes/ce/fix-11791.en.md b/changes/ce/fix-11791.en.md new file mode 100644 index 000000000..983347605 --- /dev/null +++ b/changes/ce/fix-11791.en.md @@ -0,0 +1 @@ +Fixed an issue that prevented heartbeats from correctly keeping the CoAP Gateway connections alive. From 0b4600c293eed4b4db65a4373fcd0b48f596caa8 Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Wed, 18 Oct 2023 15:46:43 +0300 Subject: [PATCH 039/155] feat(auth): improve redis command parsing and validation --- .../src/emqx_auth_redis_validations.erl | 71 ++++++++++ apps/emqx_auth_redis/src/emqx_authn_redis.erl | 67 +++++---- .../src/emqx_authn_redis_schema.erl | 2 +- apps/emqx_auth_redis/src/emqx_authz_redis.erl | 31 ++++- .../test/emqx_authn_redis_SUITE.erl | 17 ++- .../test/emqx_authz_redis_SUITE.erl | 22 ++- apps/emqx_redis/src/emqx_redis_command.erl | 129 ++++++++++++++++++ apps/emqx_redis/test/emqx_redis_SUITE.erl | 34 ++--- .../test/emqx_redis_command_SUITE.erl | 76 +++++++++++ .../test/props/prop_emqx_redis_command.erl | 31 +++++ .../test/emqx_telemetry_SUITE.erl | 2 +- changes/ce/feat-11790.en.md | 3 + 12 files changed, 422 insertions(+), 63 deletions(-) create mode 100644 apps/emqx_auth_redis/src/emqx_auth_redis_validations.erl create mode 100644 apps/emqx_redis/src/emqx_redis_command.erl create mode 100644 apps/emqx_redis/test/emqx_redis_command_SUITE.erl create mode 100644 apps/emqx_redis/test/props/prop_emqx_redis_command.erl create mode 100644 changes/ce/feat-11790.en.md diff --git a/apps/emqx_auth_redis/src/emqx_auth_redis_validations.erl b/apps/emqx_auth_redis/src/emqx_auth_redis_validations.erl new file mode 100644 index 000000000..e94b67c40 --- /dev/null +++ b/apps/emqx_auth_redis/src/emqx_auth_redis_validations.erl @@ -0,0 +1,71 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_auth_redis_validations). + +-export([ + validate_command/2 +]). + +validate_command([], _Command) -> + ok; +validate_command([Validation | Rest], Command) -> + case validate(Validation, Command) of + ok -> + validate_command(Rest, Command); + {error, _} = Error -> + Error + end. + +validate(not_empty, []) -> + {error, empty_command}; +validate(not_empty, _) -> + ok; +validate({command_name, AllowedNames}, [Name | _]) -> + IsAllowed = lists:any( + fun(AllowedName) -> + string:equal(AllowedName, Name, true, none) + end, + AllowedNames + ), + case IsAllowed of + true -> + ok; + false -> + {error, {invalid_command_name, Name}} + end; +validate({command_name, _}, _) -> + {error, invalid_command_name}; +validate({allowed_fields, AllowedFields}, [_CmdName, _CmdKey | Args]) -> + Unknown = lists:filter(fun(Arg) -> not lists:member(Arg, AllowedFields) end, Args), + case Unknown of + [] -> + ok; + _ -> + {error, {unknown_fields, Unknown}} + end; +validate({allowed_fields, _}, _) -> + ok; +validate({required_field_one_of, Required}, [_CmdName, _CmdKey | Args]) -> + HasRequired = lists:any(fun(Field) -> lists:member(Field, Args) end, Required), + case HasRequired of + true -> + ok; + false -> + {error, {missing_required_field, Required}} + end; +validate({required_field_one_of, Required}, _) -> + {error, {missing_required_field, Required}}. diff --git a/apps/emqx_auth_redis/src/emqx_authn_redis.erl b/apps/emqx_auth_redis/src/emqx_authn_redis.erl index 960308ac9..b7324e251 100644 --- a/apps/emqx_auth_redis/src/emqx_authn_redis.erl +++ b/apps/emqx_auth_redis/src/emqx_authn_redis.erl @@ -118,54 +118,51 @@ authenticate( parse_config( #{ - cmd := Cmd, + cmd := CmdStr, password_hash_algorithm := Algorithm } = Config ) -> - try - NCmd = parse_cmd(Cmd), - ok = emqx_authn_password_hashing:init(Algorithm), - ok = emqx_authn_utils:ensure_apps_started(Algorithm), - State = maps:with([password_hash_algorithm, salt_position], Config), - {Config, State#{cmd => NCmd}} - catch - error:{unsupported_cmd, _Cmd} -> - {error, {unsupported_cmd, Cmd}}; - error:missing_password_hash -> - {error, missing_password_hash}; - error:{unsupported_fields, Fields} -> - {error, {unsupported_fields, Fields}} + case parse_cmd(CmdStr) of + {ok, Cmd} -> + ok = emqx_authn_password_hashing:init(Algorithm), + ok = emqx_authn_utils:ensure_apps_started(Algorithm), + State = maps:with([password_hash_algorithm, salt_position], Config), + {Config, State#{cmd => Cmd}}; + {error, _} = Error -> + Error end. -%% Only support HGET and HMGET -parse_cmd(Cmd) -> - case string:tokens(Cmd, " ") of - [Command, Key, Field | Fields] when Command =:= "HGET" orelse Command =:= "HMGET" -> - NFields = [Field | Fields], - check_fields(NFields), - KeyTemplate = emqx_authn_utils:parse_str(list_to_binary(Key)), - {Command, KeyTemplate, NFields}; - _ -> - error({unsupported_cmd, Cmd}) +parse_cmd(CmdStr) -> + case emqx_redis_command:split(CmdStr) of + {ok, Cmd} -> + case validate_cmd(Cmd) of + ok -> + [CommandName, Key | Fields] = Cmd, + {ok, {CommandName, emqx_authn_utils:parse_str(Key), Fields}}; + {error, _} = Error -> + Error + end; + {error, _} = Error -> + Error end. -check_fields(Fields) -> - HasPassHash = lists:member("password_hash", Fields) orelse lists:member("password", Fields), - KnownFields = ["password_hash", "password", "salt", "is_superuser"], - UnknownFields = [F || F <- Fields, not lists:member(F, KnownFields)], - - case {HasPassHash, UnknownFields} of - {true, []} -> ok; - {true, _} -> error({unsupported_fields, UnknownFields}); - {false, _} -> error(missing_password_hash) - end. +validate_cmd(Cmd) -> + emqx_auth_redis_validations:validate_command( + [ + not_empty, + {command_name, [<<"hget">>, <<"hmget">>]}, + {allowed_fields, [<<"password_hash">>, <<"password">>, <<"salt">>, <<"is_superuser">>]}, + {required_field_one_of, [<<"password_hash">>, <<"password">>]} + ], + Cmd + ). merge(Fields, Value) when not is_list(Value) -> merge(Fields, [Value]); merge(Fields, Values) -> maps:from_list( [ - {list_to_binary(K), V} + {K, V} || {K, V} <- lists:zip(Fields, Values), V =/= undefined ] ). diff --git a/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl b/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl index 4f1b63633..7b5794c48 100644 --- a/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl +++ b/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl @@ -85,7 +85,7 @@ common_fields() -> {password_hash_algorithm, fun emqx_authn_password_hashing:type_ro/1} ] ++ emqx_authn_schema:common_fields(). -cmd(type) -> string(); +cmd(type) -> binary(); cmd(desc) -> ?DESC(?FUNCTION_NAME); cmd(required) -> true; cmd(_) -> undefined. diff --git a/apps/emqx_auth_redis/src/emqx_authz_redis.erl b/apps/emqx_auth_redis/src/emqx_authz_redis.erl index be83223e4..9b69f508a 100644 --- a/apps/emqx_auth_redis/src/emqx_authz_redis.erl +++ b/apps/emqx_auth_redis/src/emqx_authz_redis.erl @@ -47,15 +47,13 @@ description() -> "AuthZ with Redis". create(#{cmd := CmdStr} = Source) -> - Cmd = tokens(CmdStr), + CmdTemplate = parse_cmd(CmdStr), ResourceId = emqx_authz_utils:make_resource_id(?MODULE), - CmdTemplate = emqx_authz_utils:parse_deep(Cmd, ?PLACEHOLDERS), {ok, _Data} = emqx_authz_utils:create_resource(ResourceId, emqx_redis, Source), Source#{annotations => #{id => ResourceId}, cmd_template => CmdTemplate}. update(#{cmd := CmdStr} = Source) -> - Cmd = tokens(CmdStr), - CmdTemplate = emqx_authz_utils:parse_deep(Cmd, ?PLACEHOLDERS), + CmdTemplate = parse_cmd(CmdStr), case emqx_authz_utils:update_resource(emqx_redis, Source) of {error, Reason} -> error({load_config_error, Reason}); @@ -131,9 +129,28 @@ compile_rule(RuleBin, TopicFilterRaw) -> error(Reason) end. -tokens(Query) -> - Tokens = binary:split(Query, <<" ">>, [global]), - [Token || Token <- Tokens, size(Token) > 0]. +parse_cmd(Query) -> + case emqx_redis_command:split(Query) of + {ok, Cmd} -> + ok = validate_cmd(Cmd), + emqx_authz_utils:parse_deep(Cmd, ?PLACEHOLDERS); + {error, Reason} -> + error({invalid_redis_cmd, Reason, Query}) + end. + +validate_cmd(Cmd) -> + case + emqx_auth_redis_validations:validate_command( + [ + not_empty, + {command_name, [<<"hmget">>, <<"hgetall">>]} + ], + Cmd + ) + of + ok -> ok; + {error, Reason} -> error({invalid_redis_cmd, Reason, Cmd}) + end. parse_rule(<<"publish">>) -> #{<<"action">> => <<"publish">>}; diff --git a/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl b/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl index b3f4a15a3..081c4e641 100644 --- a/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl +++ b/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl @@ -336,7 +336,22 @@ user_seeds() -> config_params => #{}, result => {ok, #{is_superuser => true}} }, - + #{ + data => #{ + password_hash => <<"plainsalt">>, + salt => <<"salt">>, + is_superuser => <<"1">> + }, + credentials => #{ + username => <<"plain">>, + password => <<"plain">> + }, + key => <<"mqtt_user:plain">>, + config_params => #{ + <<"cmd">> => <<"HmGeT mqtt_user:${username} password_hash salt is_superuser">> + }, + result => {ok, #{is_superuser => true}} + }, #{ data => #{ password_hash => <<"9b4d0c43d206d48279e69b9ad7132e22">>, diff --git a/apps/emqx_auth_redis/test/emqx_authz_redis_SUITE.erl b/apps/emqx_auth_redis/test/emqx_authz_redis_SUITE.erl index 962333cd2..1c52cee17 100644 --- a/apps/emqx_auth_redis/test/emqx_authz_redis_SUITE.erl +++ b/apps/emqx_auth_redis/test/emqx_authz_redis_SUITE.erl @@ -112,7 +112,9 @@ t_create_invalid_config(_Config) -> ). t_redis_error(_Config) -> - ok = setup_config(#{<<"cmd">> => <<"INVALID COMMAND">>}), + q([<<"SET">>, <<"notahash">>, <<"stringvalue">>]), + + ok = setup_config(#{<<"cmd">> => <<"HGETALL notahash">>}), ClientInfo = emqx_authz_test_lib:base_client_info(), @@ -121,6 +123,24 @@ t_redis_error(_Config) -> emqx_access_control:authorize(ClientInfo, ?AUTHZ_SUBSCRIBE, <<"a">>) ). +t_invalid_command(_Config) -> + Config = raw_redis_authz_config(), + + ?assertMatch( + {error, _}, + emqx_authz:update(?CMD_REPLACE, [Config#{<<"cmd">> => <<"HGET key">>}]) + ), + + ?assertMatch( + {ok, _}, + emqx_authz:update(?CMD_REPLACE, [Config#{<<"cmd">> => <<"HGETALL key">>}]) + ), + + ?assertMatch( + {error, _}, + emqx_authz:update({?CMD_REPLACE, redis}, Config#{<<"cmd">> => <<"HGET key">>}) + ). + %%------------------------------------------------------------------------------ %% Cases %%------------------------------------------------------------------------------ diff --git a/apps/emqx_redis/src/emqx_redis_command.erl b/apps/emqx_redis/src/emqx_redis_command.erl new file mode 100644 index 000000000..7de80e1fa --- /dev/null +++ b/apps/emqx_redis/src/emqx_redis_command.erl @@ -0,0 +1,129 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +%% @doc `split/1` function reimplements the one used by Redis itself for `redis-cli`. +%% See `sdssplitargs` function, https://github.com/redis/redis/blob/unstable/src/sds.c. + +-module(emqx_redis_command). + +-export([split/1]). + +-define(CH_SPACE, 32). +-define(CH_N, 10). +-define(CH_R, 13). +-define(CH_T, 9). +-define(CH_B, 8). +-define(CH_A, 11). + +-define(IS_CH_HEX_DIGIT(C), + (((C >= $a) andalso (C =< $f)) orelse + ((C >= $A) andalso (C =< $F)) orelse + ((C >= $0) andalso (C =< $9))) +). +-define(IS_CH_SPACE(C), + (C =:= ?CH_SPACE orelse + C =:= ?CH_N orelse + C =:= ?CH_R orelse + C =:= ?CH_T orelse + C =:= ?CH_B orelse + C =:= ?CH_A) +). + +split(Line) when is_binary(Line) -> + case split(binary_to_list(Line)) of + {ok, Args} -> + {ok, [list_to_binary(Arg) || Arg <- Args]}; + {error, _} = Error -> + Error + end; +split(Line) -> + split(Line, []). + +split([], Acc) -> + {ok, lists:reverse(Acc)}; +split([C | Rest] = Line, Acc) -> + case ?IS_CH_SPACE(C) of + true -> split(Rest, Acc); + false -> split_noq([], Line, Acc) + end. + +hex_digit_to_int(C) when (C >= $a) andalso (C =< $f) -> 10 + C - $a; +hex_digit_to_int(C) when (C >= $A) andalso (C =< $F) -> 10 + C - $A; +hex_digit_to_int(C) when (C >= $0) andalso (C =< $9) -> C - $0. + +maybe_special_char($n) -> ?CH_N; +maybe_special_char($r) -> ?CH_R; +maybe_special_char($t) -> ?CH_T; +maybe_special_char($b) -> ?CH_B; +maybe_special_char($a) -> ?CH_A; +maybe_special_char(C) -> C. + +%% Inside double quotes +split_inq(CurAcc, Line, Acc) -> + case Line of + [$\\, $x, HD1, HD2 | LineRest] when ?IS_CH_HEX_DIGIT(HD1) andalso ?IS_CH_HEX_DIGIT(HD2) -> + C = hex_digit_to_int(HD1) * 16 + hex_digit_to_int(HD2), + NewCurAcc = [C | CurAcc], + split_inq(NewCurAcc, LineRest, Acc); + [$\\, SC | LineRest] -> + C = maybe_special_char(SC), + NewCurAcc = [C | CurAcc], + split_inq(NewCurAcc, LineRest, Acc); + [$", C | _] when not ?IS_CH_SPACE(C) -> + {error, trailing_after_quote}; + [$" | LineRest] -> + split(LineRest, [lists:reverse(CurAcc) | Acc]); + [] -> + {error, unterminated_quote}; + [C | LineRest] -> + NewCurAcc = [C | CurAcc], + split_inq(NewCurAcc, LineRest, Acc) + end. + +%% Inside single quotes +split_insq(CurAcc, Line, Acc) -> + case Line of + [$\\, $' | LineRest] -> + NewCurAcc = [$' | CurAcc], + split_insq(NewCurAcc, LineRest, Acc); + [$', C | _] when not ?IS_CH_SPACE(C) -> + {error, trailing_after_single_quote}; + [$' | LineRest] -> + split(LineRest, [lists:reverse(CurAcc) | Acc]); + [] -> + {error, unterminated_single_quote}; + [C | LineRest] -> + NewCurAcc = [C | CurAcc], + split_insq(NewCurAcc, LineRest, Acc) + end. + +%% Outside quotes +split_noq(CurAcc, Line, Acc) -> + case Line of + [C | LineRest] when + ?IS_CH_SPACE(C); C =:= ?CH_N; C =:= ?CH_R; C =:= ?CH_T + -> + split(LineRest, [lists:reverse(CurAcc) | Acc]); + [] -> + split([], [lists:reverse(CurAcc) | Acc]); + [$' | LineRest] -> + split_insq(CurAcc, LineRest, Acc); + [$" | LineRest] -> + split_inq(CurAcc, LineRest, Acc); + [C | LineRest] -> + NewCurAcc = [C | CurAcc], + split_noq(NewCurAcc, LineRest, Acc) + end. diff --git a/apps/emqx_redis/test/emqx_redis_SUITE.erl b/apps/emqx_redis/test/emqx_redis_SUITE.erl index e03b05921..8fcbf2b63 100644 --- a/apps/emqx_redis/test/emqx_redis_SUITE.erl +++ b/apps/emqx_redis/test/emqx_redis_SUITE.erl @@ -1,17 +1,17 @@ -% %%-------------------------------------------------------------------- -% %% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -% %% -% %% Licensed under the Apache License, Version 2.0 (the "License"); -% %% you may not use this file except in compliance with the License. -% %% You may obtain a copy of the License at -% %% http://www.apache.org/licenses/LICENSE-2.0 -% %% -% %% Unless required by applicable law or agreed to in writing, software -% %% distributed under the License is distributed on an "AS IS" BASIS, -% %% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -% %% See the License for the specific language governing permissions and -% %% limitations under the License. -% %%-------------------------------------------------------------------- +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- -module(emqx_redis_SUITE). @@ -190,9 +190,9 @@ perform_lifecycle_check(ResourceId, InitialConfig, RedisCommand) -> % Should not even be able to get the resource data out of ets now unlike just stopping. ?assertEqual({error, not_found}, emqx_resource:get_instance(ResourceId)). -% %%------------------------------------------------------------------------------ -% %% Helpers -% %%------------------------------------------------------------------------------ +%%------------------------------------------------------------------------------ +%% Helpers +%%------------------------------------------------------------------------------ redis_config_single() -> redis_config_base("single", "server"). diff --git a/apps/emqx_redis/test/emqx_redis_command_SUITE.erl b/apps/emqx_redis/test/emqx_redis_command_SUITE.erl new file mode 100644 index 000000000..1c6f87eff --- /dev/null +++ b/apps/emqx_redis/test/emqx_redis_command_SUITE.erl @@ -0,0 +1,76 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_redis_command_SUITE). + +-compile(nowarn_export_all). +-compile(export_all). + +-include_lib("eunit/include/eunit.hrl"). + +all() -> + emqx_common_test_helpers:all(?MODULE). + +t_split_ok(_Config) -> + ?assertEqual( + {ok, [<<"ab">>, <<"cd">>, <<"ef">>]}, + emqx_redis_command:split(<<" \"ab\" 'cd' ef ">>) + ), + ?assertEqual( + {ok, [<<"ab">>, <<"cd">>, <<"ef">>]}, + emqx_redis_command:split(<<" ab\tcd ef">>) + ), + ?assertEqual( + {ok, [<<"abc'd">>, <<"ef">>]}, + emqx_redis_command:split(<<"ab\"c'd\" ef">>) + ), + ?assertEqual( + {ok, [<<"abc\"d">>, <<"ef">>]}, + emqx_redis_command:split(<<"ab'c\"d' ef">>) + ), + ?assertEqual( + {ok, [<<"IJK">>, <<"\\x49\\x4a\\x4B">>]}, + emqx_redis_command:split(<<"\"\\x49\\x4a\\x4B\" \\x49\\x4a\\x4B">>) + ), + ?assertEqual( + {ok, [<<"x\t\n\r\b\v">>]}, + emqx_redis_command:split(<<"\"\\x\\t\\n\\r\\b\\a\"">>) + ), + ?assertEqual( + {ok, [<<"abc\'d">>, <<"ef">>]}, + emqx_redis_command:split(<<"'abc\\'d' ef">>) + ), + ?assertEqual( + {ok, [<<>>, <<>>]}, + emqx_redis_command:split(<<" '' \"\" ">>) + ). + +t_split_error(_Config) -> + ?assertEqual( + {error, trailing_after_quote}, + emqx_redis_command:split(<<"\"a\"b">>) + ), + ?assertEqual( + {error, unterminated_quote}, + emqx_redis_command:split(<<"\"ab">>) + ), + ?assertEqual( + {error, trailing_after_single_quote}, + emqx_redis_command:split(<<"'a'b'c">>) + ), + ?assertEqual( + {error, unterminated_single_quote}, + emqx_redis_command:split(<<"'ab">>) + ). diff --git a/apps/emqx_redis/test/props/prop_emqx_redis_command.erl b/apps/emqx_redis/test/props/prop_emqx_redis_command.erl new file mode 100644 index 000000000..dc7ce2ada --- /dev/null +++ b/apps/emqx_redis/test/props/prop_emqx_redis_command.erl @@ -0,0 +1,31 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(prop_emqx_redis_command). + +-include_lib("proper/include/proper.hrl"). + +%%-------------------------------------------------------------------- +%% Properties +%%-------------------------------------------------------------------- + +prop_split() -> + ?FORALL( + Cmd, + binary(), + %% Should terminate and not crash + is_tuple(emqx_redis_command:split(Cmd)) + ). diff --git a/apps/emqx_telemetry/test/emqx_telemetry_SUITE.erl b/apps/emqx_telemetry/test/emqx_telemetry_SUITE.erl index 92839d06a..47d31c4de 100644 --- a/apps/emqx_telemetry/test/emqx_telemetry_SUITE.erl +++ b/apps/emqx_telemetry/test/emqx_telemetry_SUITE.erl @@ -738,7 +738,7 @@ create_authn(ChainName, redis) -> backend => redis, enable => true, user_id_type => username, - cmd => "HMGET mqtt_user:${username} password_hash salt is_superuser", + cmd => <<"HMGET mqtt_user:${username} password_hash salt is_superuser">>, password_hash_algorithm => #{ name => plain, salt_position => suffix diff --git a/changes/ce/feat-11790.en.md b/changes/ce/feat-11790.en.md new file mode 100644 index 000000000..c2ceee216 --- /dev/null +++ b/changes/ce/feat-11790.en.md @@ -0,0 +1,3 @@ +Added validation of Redis commands configured in Redis authorization source. +Also, improved Redis command parsing in authentication and authorization +so that it is `redis-cli` compatible and supports quoted arguments. From 8c4beec6f031aa54cbbafcbbe3c7f259c0cb4b25 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Thu, 19 Oct 2023 18:19:42 -0300 Subject: [PATCH 040/155] test(rule_engine): add test to exemplify `foreach` with json array payload Relates to https://emqx.atlassian.net/browse/EMQX-11174 --- .../test/emqx_rule_engine_SUITE.erl | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl index 31b0e83fa..00ca68264 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl @@ -92,6 +92,7 @@ groups() -> t_sqlparse_foreach_6, t_sqlparse_foreach_7, t_sqlparse_foreach_8, + t_sqlparse_foreach_9, t_sqlparse_case_when_1, t_sqlparse_case_when_2, t_sqlparse_case_when_3, @@ -2451,6 +2452,53 @@ t_sqlparse_foreach_8(_Config) -> || SqlN <- [Sql3] ]. +t_sqlparse_foreach_9(_Config) -> + Sql1 = + "foreach json_decode(payload) as p " + "do p.ts as ts " + "from \"t/#\" ", + Context = #{ + payload => + emqx_utils_json:encode( + [ + #{ + <<"ts">> => 1451649600512, + <<"values">> => + #{ + <<"respiratoryrate">> => 20, + <<"heartrate">> => 130, + <<"systolic">> => 50 + } + } + ] + ), + topic => <<"t/a">> + }, + ?assertMatch( + {ok, [#{<<"ts">> := 1451649600512}]}, + emqx_rule_sqltester:test( + #{ + sql => Sql1, + context => Context + } + ) + ), + %% doesn't work if we don't decode it first + Sql2 = + "foreach payload as p " + "do p.ts as ts " + "from \"t/#\" ", + ?assertMatch( + {ok, []}, + emqx_rule_sqltester:test( + #{ + sql => Sql2, + context => Context + } + ) + ), + ok. + t_sqlparse_case_when_1(_Config) -> %% case-when-else clause Sql = From 3609b20fb2ca3e9ae7cf636822a75524f99466b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Igor=20Urmin=C4=8Dek?= Date: Mon, 16 Oct 2023 21:08:18 +0200 Subject: [PATCH 041/155] fix(bin/eqmx): do not rely on existing user name Container should be able to run as other user too. Use uid as fallback. Prerequisite to be able to run as user specified during startup. --- bin/emqx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/emqx b/bin/emqx index f24210cdf..5b61042d7 100755 --- a/bin/emqx +++ b/bin/emqx @@ -48,7 +48,7 @@ RUNNER_SCRIPT="$RUNNER_BIN_DIR/$REL_NAME" CODE_LOADING_MODE="${CODE_LOADING_MODE:-embedded}" REL_DIR="$RUNNER_ROOT_DIR/releases/$REL_VSN" -WHOAMI=$(whoami) +WHOAMI=$(whoami 2>/dev/null || id -u) # hocon try to read environment variables starting with "EMQX_" export HOCON_ENV_OVERRIDE_PREFIX='EMQX_' From edde661da3f31c1868e8ccd5f52865eb9b15b3ed Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Tue, 17 Oct 2023 19:45:56 +0300 Subject: [PATCH 042/155] fix(authn): fix pbkdf2 option validation --- apps/emqx/src/emqx_passwd.erl | 2 +- .../emqx_authn_password_hashing.erl | 2 +- .../emqx_authn_password_hashing_SUITE.erl | 26 ++++ .../src/emqx_authn_mnesia.erl | 118 ++++++++++++------ .../src/emqx_authn_scram_mnesia.erl | 105 ++++++++++------ .../test/emqx_authn_scram_mnesia_SUITE.erl | 68 ++++++++++ changes/ce/fix-11780.en.md | 1 + 7 files changed, 239 insertions(+), 83 deletions(-) create mode 100644 changes/ce/fix-11780.en.md diff --git a/apps/emqx/src/emqx_passwd.erl b/apps/emqx/src/emqx_passwd.erl index c68a146ed..1232dfcb4 100644 --- a/apps/emqx/src/emqx_passwd.erl +++ b/apps/emqx/src/emqx_passwd.erl @@ -83,7 +83,7 @@ do_check_pass({_SimpleHash, _Salt, _SaltPosition} = HashParams, PasswordHash, Pa compare_secure(Hash, PasswordHash). -spec hash(hash_params(), password()) -> password_hash(). -hash({pbkdf2, MacFun, Salt, Iterations, DKLength}, Password) -> +hash({pbkdf2, MacFun, Salt, Iterations, DKLength}, Password) when Iterations > 0 -> case pbkdf2(MacFun, Password, Salt, Iterations, DKLength) of {ok, HashPasswd} -> hex(HashPasswd); diff --git a/apps/emqx_auth/src/emqx_authn/emqx_authn_password_hashing.erl b/apps/emqx_auth/src/emqx_authn/emqx_authn_password_hashing.erl index 40e96ce6f..756f39d06 100644 --- a/apps/emqx_auth/src/emqx_authn/emqx_authn_password_hashing.erl +++ b/apps/emqx_auth/src/emqx_authn/emqx_authn_password_hashing.erl @@ -92,7 +92,7 @@ fields(pbkdf2) -> )}, {iterations, sc( - integer(), + pos_integer(), #{required => true, desc => "Iteration count for PBKDF2 hashing algorithm."} )}, {dk_length, fun dk_length/1} diff --git a/apps/emqx_auth/test/emqx_authn/emqx_authn_password_hashing_SUITE.erl b/apps/emqx_auth/test/emqx_authn/emqx_authn_password_hashing_SUITE.erl index 83b923d0e..ac3186bea 100644 --- a/apps/emqx_auth/test/emqx_authn/emqx_authn_password_hashing_SUITE.erl +++ b/apps/emqx_auth/test/emqx_authn/emqx_authn_password_hashing_SUITE.erl @@ -185,3 +185,29 @@ hash_examples() -> } } ]. + +t_pbkdf2_schema(_Config) -> + Config = fun(Iterations) -> + #{ + <<"pbkdf2">> => #{ + <<"name">> => <<"pbkdf2">>, + <<"mac_fun">> => <<"sha">>, + <<"iterations">> => Iterations + } + } + end, + + ?assertException( + throw, + {emqx_authn_password_hashing, _}, + hocon_tconf:check_plain(emqx_authn_password_hashing, Config(0), #{}, [pbkdf2]) + ), + ?assertException( + throw, + {emqx_authn_password_hashing, _}, + hocon_tconf:check_plain(emqx_authn_password_hashing, Config(-1), #{}, [pbkdf2]) + ), + ?assertMatch( + #{<<"pbkdf2">> := _}, + hocon_tconf:check_plain(emqx_authn_password_hashing, Config(1), #{}, [pbkdf2]) + ). diff --git a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia.erl b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia.erl index 8e59d94e7..bbbaeddb1 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia.erl @@ -50,7 +50,7 @@ %% Internal exports (RPC) -export([ do_destroy/1, - do_add_user/2, + do_add_user/1, do_delete_user/2, do_update_user/3, import/2, @@ -187,24 +187,22 @@ import_users({Filename0, FileData}, State) -> {error, {unsupported_file_format, Extension}} end. -add_user(UserInfo, State) -> - trans(fun ?MODULE:do_add_user/2, [UserInfo, State]). +add_user( + UserInfo, + State +) -> + UserInfoRecord = user_info_record(UserInfo, State), + trans(fun ?MODULE:do_add_user/1, [UserInfoRecord]). do_add_user( - #{ - user_id := UserID, - password := Password - } = UserInfo, - #{ - user_group := UserGroup, - password_hash_algorithm := Algorithm - } + #user_info{ + user_id = {_UserGroup, UserID} = DBUserID, + is_superuser = IsSuperuser + } = UserInfoRecord ) -> - case mnesia:read(?TAB, {UserGroup, UserID}, write) of + case mnesia:read(?TAB, DBUserID, write) of [] -> - {PasswordHash, Salt} = emqx_authn_password_hashing:hash(Algorithm, Password), - IsSuperuser = maps:get(is_superuser, UserInfo, false), - insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser), + insert_user(UserInfoRecord), {ok, #{user_id => UserID, is_superuser => IsSuperuser}}; [_] -> {error, already_exist} @@ -222,38 +220,30 @@ do_delete_user(UserID, #{user_group := UserGroup}) -> end. update_user(UserID, UserInfo, State) -> - trans(fun ?MODULE:do_update_user/3, [UserID, UserInfo, State]). + FieldsToUpdate = fields_to_update( + UserInfo, + [ + hash_and_salt, + is_superuser + ], + State + ), + trans(fun ?MODULE:do_update_user/3, [UserID, FieldsToUpdate, State]). do_update_user( UserID, - UserInfo, + FieldsToUpdate, #{ - user_group := UserGroup, - password_hash_algorithm := Algorithm + user_group := UserGroup } ) -> case mnesia:read(?TAB, {UserGroup, UserID}, write) of [] -> {error, not_found}; - [ - #user_info{ - password_hash = PasswordHash, - salt = Salt, - is_superuser = IsSuperuser - } - ] -> - NSuperuser = maps:get(is_superuser, UserInfo, IsSuperuser), - {NPasswordHash, NSalt} = - case UserInfo of - #{password := Password} -> - emqx_authn_password_hashing:hash( - Algorithm, Password - ); - #{} -> - {PasswordHash, Salt} - end, - insert_user(UserGroup, UserID, NPasswordHash, NSalt, NSuperuser), - {ok, #{user_id => UserID, is_superuser => NSuperuser}} + [#user_info{} = UserInfoRecord] -> + NUserInfoRecord = update_user_record(UserInfoRecord, FieldsToUpdate), + insert_user(NUserInfoRecord), + {ok, #{user_id => UserID, is_superuser => NUserInfoRecord#user_info.is_superuser}} end. lookup_user(UserID, #{user_group := UserGroup}) -> @@ -391,13 +381,59 @@ get_user_info_by_seq(_, _, _) -> {error, bad_format}. insert_user(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) -> - UserInfo = #user_info{ + UserInfoRecord = user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser), + insert_user(UserInfoRecord). + +insert_user(#user_info{} = UserInfoRecord) -> + mnesia:write(?TAB, UserInfoRecord, write). + +user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser) -> + #user_info{ user_id = {UserGroup, UserID}, password_hash = PasswordHash, salt = Salt, is_superuser = IsSuperuser - }, - mnesia:write(?TAB, UserInfo, write). + }. + +user_info_record( + #{ + user_id := UserID, + password := Password + } = UserInfo, + #{ + password_hash_algorithm := Algorithm, + user_group := UserGroup + } = _State +) -> + IsSuperuser = maps:get(is_superuser, UserInfo, false), + {PasswordHash, Salt} = emqx_authn_password_hashing:hash(Algorithm, Password), + user_info_record(UserGroup, UserID, PasswordHash, Salt, IsSuperuser). + +fields_to_update( + #{password := Password} = UserInfo, + [hash_and_salt | Rest], + #{password_hash_algorithm := Algorithm} = State +) -> + [ + {hash_and_salt, + emqx_authn_password_hashing:hash( + Algorithm, Password + )} + | fields_to_update(UserInfo, Rest, State) + ]; +fields_to_update(#{is_superuser := IsSuperuser} = UserInfo, [is_superuser | Rest], State) -> + [{is_superuser, IsSuperuser} | fields_to_update(UserInfo, Rest, State)]; +fields_to_update(UserInfo, [_ | Rest], State) -> + fields_to_update(UserInfo, Rest, State); +fields_to_update(_UserInfo, [], _State) -> + []. + +update_user_record(UserInfoRecord, []) -> + UserInfoRecord; +update_user_record(UserInfoRecord, [{hash_and_salt, {PasswordHash, Salt}} | Rest]) -> + update_user_record(UserInfoRecord#user_info{password_hash = PasswordHash, salt = Salt}, Rest); +update_user_record(UserInfoRecord, [{is_superuser, IsSuperuser} | Rest]) -> + update_user_record(UserInfoRecord#user_info{is_superuser = IsSuperuser}, Rest). %% TODO: Support other type get_user_identity(#{username := Username}, username) -> diff --git a/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia.erl b/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia.erl index 641efcf74..a66ae5786 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia.erl @@ -51,7 +51,7 @@ %% Internal exports (RPC) -export([ do_destroy/1, - do_add_user/2, + do_add_user/1, do_delete_user/2, do_update_user/3 ]). @@ -157,19 +157,15 @@ do_destroy(UserGroup) -> ). add_user(UserInfo, State) -> - trans(fun ?MODULE:do_add_user/2, [UserInfo, State]). + UserInfoRecord = user_info_record(UserInfo, State), + trans(fun ?MODULE:do_add_user/1, [UserInfoRecord]). do_add_user( - #{ - user_id := UserID, - password := Password - } = UserInfo, - #{user_group := UserGroup} = State + #user_info{user_id = {UserID, _} = DBUserID, is_superuser = IsSuperuser} = UserInfoRecord ) -> - case mnesia:read(?TAB, {UserGroup, UserID}, write) of + case mnesia:read(?TAB, DBUserID, write) of [] -> - IsSuperuser = maps:get(is_superuser, UserInfo, false), - add_user(UserGroup, UserID, Password, IsSuperuser, State), + mnesia:write(?TAB, UserInfoRecord, write), {ok, #{user_id => UserID, is_superuser => IsSuperuser}}; [_] -> {error, already_exist} @@ -187,36 +183,28 @@ do_delete_user(UserID, #{user_group := UserGroup}) -> end. update_user(UserID, User, State) -> - trans(fun ?MODULE:do_update_user/3, [UserID, User, State]). + FieldsToUpdate = fields_to_update( + User, + [ + keys_and_salt, + is_superuser + ], + State + ), + trans(fun ?MODULE:do_update_user/3, [UserID, FieldsToUpdate, State]). do_update_user( UserID, - User, - #{user_group := UserGroup} = State + FieldsToUpdate, + #{user_group := UserGroup} = _State ) -> case mnesia:read(?TAB, {UserGroup, UserID}, write) of [] -> {error, not_found}; - [#user_info{is_superuser = IsSuperuser} = UserInfo] -> - UserInfo1 = UserInfo#user_info{ - is_superuser = maps:get(is_superuser, User, IsSuperuser) - }, - UserInfo2 = - case maps:get(password, User, undefined) of - undefined -> - UserInfo1; - Password -> - {StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info( - Password, State - ), - UserInfo1#user_info{ - stored_key = StoredKey, - server_key = ServerKey, - salt = Salt - } - end, - mnesia:write(?TAB, UserInfo2, write), - {ok, format_user_info(UserInfo2)} + [#user_info{} = UserInfo0] -> + UserInfo1 = update_user_record(UserInfo0, FieldsToUpdate), + mnesia:write(?TAB, UserInfo1, write), + {ok, format_user_info(UserInfo1)} end. lookup_user(UserID, #{user_group := UserGroup}) -> @@ -315,19 +303,56 @@ check_client_final_message(Bin, #{is_superuser := IsSuperuser} = Cache, #{algori {error, not_authorized} end. -add_user(UserGroup, UserID, Password, IsSuperuser, State) -> - {StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State), - write_user(UserGroup, UserID, StoredKey, ServerKey, Salt, IsSuperuser). +user_info_record( + #{ + user_id := UserID, + password := Password + } = UserInfo, + #{user_group := UserGroup} = State +) -> + IsSuperuser = maps:get(is_superuser, UserInfo, false), + user_info_record(UserGroup, UserID, Password, IsSuperuser, State). -write_user(UserGroup, UserID, StoredKey, ServerKey, Salt, IsSuperuser) -> - UserInfo = #user_info{ +user_info_record(UserGroup, UserID, Password, IsSuperuser, State) -> + {StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State), + #user_info{ user_id = {UserGroup, UserID}, stored_key = StoredKey, server_key = ServerKey, salt = Salt, is_superuser = IsSuperuser - }, - mnesia:write(?TAB, UserInfo, write). + }. + +fields_to_update( + #{password := Password} = UserInfo, + [keys_and_salt | Rest], + State +) -> + {StoredKey, ServerKey, Salt} = esasl_scram:generate_authentication_info(Password, State), + [ + {keys_and_salt, {StoredKey, ServerKey, Salt}} + | fields_to_update(UserInfo, Rest, State) + ]; +fields_to_update(#{is_superuser := IsSuperuser} = UserInfo, [is_superuser | Rest], State) -> + [{is_superuser, IsSuperuser} | fields_to_update(UserInfo, Rest, State)]; +fields_to_update(UserInfo, [_ | Rest], State) -> + fields_to_update(UserInfo, Rest, State); +fields_to_update(_UserInfo, [], _State) -> + []. + +update_user_record(UserInfoRecord, []) -> + UserInfoRecord; +update_user_record(UserInfoRecord, [{keys_and_salt, {StoredKey, ServerKey, Salt}} | Rest]) -> + update_user_record( + UserInfoRecord#user_info{ + stored_key = StoredKey, + server_key = ServerKey, + salt = Salt + }, + Rest + ); +update_user_record(UserInfoRecord, [{is_superuser, IsSuperuser} | Rest]) -> + update_user_record(UserInfoRecord#user_info{is_superuser = IsSuperuser}, Rest). retrieve(UserID, #{user_group := UserGroup}) -> case mnesia:dirty_read(?TAB, {UserGroup, UserID}) of diff --git a/apps/emqx_auth_mnesia/test/emqx_authn_scram_mnesia_SUITE.erl b/apps/emqx_auth_mnesia/test/emqx_authn_scram_mnesia_SUITE.erl index abd5518a6..39350e4b9 100644 --- a/apps/emqx_auth_mnesia/test/emqx_authn_scram_mnesia_SUITE.erl +++ b/apps/emqx_auth_mnesia/test/emqx_authn_scram_mnesia_SUITE.erl @@ -314,6 +314,74 @@ t_update_user(_) -> {ok, #{is_superuser := true}} = emqx_authn_scram_mnesia:lookup_user(<<"u">>, State). +t_update_user_keys(_Config) -> + Algorithm = sha512, + Username = <<"u">>, + Password = <<"p">>, + + init_auth(Username, <<"badpass">>, Algorithm), + + {ok, [#{state := State}]} = emqx_authn_chains:list_authenticators(?GLOBAL), + + emqx_authn_scram_mnesia:update_user( + Username, + #{password => Password}, + State + ), + + ok = emqx_config:put([mqtt, idle_timeout], 500), + + {ok, Pid} = emqx_authn_mqtt_test_client:start_link("127.0.0.1", 1883), + + ClientFirstMessage = esasl_scram:client_first_message(Username), + + ConnectPacket = ?CONNECT_PACKET( + #mqtt_packet_connect{ + proto_ver = ?MQTT_PROTO_V5, + properties = #{ + 'Authentication-Method' => <<"SCRAM-SHA-512">>, + 'Authentication-Data' => ClientFirstMessage + } + } + ), + + ok = emqx_authn_mqtt_test_client:send(Pid, ConnectPacket), + + ?AUTH_PACKET( + ?RC_CONTINUE_AUTHENTICATION, + #{'Authentication-Data' := ServerFirstMessage} + ) = receive_packet(), + + {continue, ClientFinalMessage, ClientCache} = + esasl_scram:check_server_first_message( + ServerFirstMessage, + #{ + client_first_message => ClientFirstMessage, + password => Password, + algorithm => Algorithm + } + ), + + AuthContinuePacket = ?AUTH_PACKET( + ?RC_CONTINUE_AUTHENTICATION, + #{ + 'Authentication-Method' => <<"SCRAM-SHA-512">>, + 'Authentication-Data' => ClientFinalMessage + } + ), + + ok = emqx_authn_mqtt_test_client:send(Pid, AuthContinuePacket), + + ?CONNACK_PACKET( + ?RC_SUCCESS, + _, + #{'Authentication-Data' := ServerFinalMessage} + ) = receive_packet(), + + ok = esasl_scram:check_server_final_message( + ServerFinalMessage, ClientCache#{algorithm => Algorithm} + ). + t_list_users(_) -> Config = config(), {ok, State} = emqx_authn_scram_mnesia:create(<<"id">>, Config), diff --git a/changes/ce/fix-11780.en.md b/changes/ce/fix-11780.en.md new file mode 100644 index 000000000..549707ffb --- /dev/null +++ b/changes/ce/fix-11780.en.md @@ -0,0 +1 @@ +Fixed validation of the `iterations` field of the `pbkdf2` password hashing algorithm. Now, `iterations` must be strictly positive. Previously, it could be set to 0, which led to a nonfunctional authenticator. From 8c6f6b56b83201275a1b2cb014e551829506614f Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Mon, 23 Oct 2023 13:48:42 +0300 Subject: [PATCH 043/155] chore(tests): report more friendly about unset PROFILE variable --- apps/emqx_conf/src/emqx_conf.erl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl index f9e64ffa6..3a65efc53 100644 --- a/apps/emqx_conf/src/emqx_conf.erl +++ b/apps/emqx_conf/src/emqx_conf.erl @@ -219,7 +219,9 @@ resolve_schema_module() -> "emqx" -> emqx_conf_schema; "emqx-enterprise" -> - emqx_enterprise_schema + emqx_enterprise_schema; + false -> + error("PROFILE environment variable is not set") end. -else. -spec resolve_schema_module() -> no_return(). From 4e0e755b2876361dd7d36c7759a19cc28b320048 Mon Sep 17 00:00:00 2001 From: Stefan Strigler Date: Fri, 20 Oct 2023 12:24:38 +0200 Subject: [PATCH 044/155] fix: return 404 if built_in_database not configured as auth source --- .../src/emqx_authz/emqx_authz_api_sources.erl | 2 + .../src/emqx_authz_api_mnesia.erl | 309 ++++++++++-------- .../test/emqx_authz_api_mnesia_SUITE.erl | 155 +++++++++ changes/ce/fix-11797.en.md | 1 + 4 files changed, 337 insertions(+), 130 deletions(-) create mode 100644 changes/ce/fix-11797.en.md diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_api_sources.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_api_sources.erl index 247f3a9ac..00345a108 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_api_sources.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_api_sources.erl @@ -49,6 +49,8 @@ aggregate_metrics/1 ]). +-export([with_source/2]). + -define(TAGS, [<<"Authorization">>]). api_spec() -> diff --git a/apps/emqx_auth_mnesia/src/emqx_authz_api_mnesia.erl b/apps/emqx_auth_mnesia/src/emqx_authz_api_mnesia.erl index e71b44add..c0c2322c9 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authz_api_mnesia.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authz_api_mnesia.erl @@ -426,161 +426,210 @@ fields(rules) -> %% HTTP API %%-------------------------------------------------------------------- +-define(IF_CONFIGURED_AUTHZ_SOURCE(EXPR), + emqx_authz_api_sources:with_source( + <<"built_in_database">>, + fun(_Source) -> + EXPR + end + ) +). + users(get, #{query_string := QueryString}) -> - case - emqx_mgmt_api:node_query( - node(), - ?ACL_TABLE, - QueryString, - ?ACL_USERNAME_QSCHEMA, - ?QUERY_USERNAME_FUN, - fun ?MODULE:format_result/1 - ) - of - {error, page_limit_invalid} -> - {400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}}; - {error, Node, Error} -> - Message = list_to_binary(io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error])), - {500, #{code => <<"NODE_DOWN">>, message => Message}}; - Result -> - {200, Result} - end; + ?IF_CONFIGURED_AUTHZ_SOURCE( + case + emqx_mgmt_api:node_query( + node(), + ?ACL_TABLE, + QueryString, + ?ACL_USERNAME_QSCHEMA, + ?QUERY_USERNAME_FUN, + fun ?MODULE:format_result/1 + ) + of + {error, page_limit_invalid} -> + {400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}}; + {error, Node, Error} -> + Message = list_to_binary( + io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error]) + ), + {500, #{code => <<"NODE_DOWN">>, message => Message}}; + Result -> + {200, Result} + end + ); users(post, #{body := Body}) when is_list(Body) -> - case ensure_all_not_exists(<<"username">>, username, Body) of - [] -> - lists:foreach( - fun(#{<<"username">> := Username, <<"rules">> := Rules}) -> - emqx_authz_mnesia:store_rules({username, Username}, Rules) - end, - Body - ), - {204}; - Exists -> - {409, #{ - code => <<"ALREADY_EXISTS">>, - message => binfmt("Users '~ts' already exist", [binjoin(Exists)]) - }} - end. + ?IF_CONFIGURED_AUTHZ_SOURCE( + case ensure_all_not_exists(<<"username">>, username, Body) of + [] -> + lists:foreach( + fun(#{<<"username">> := Username, <<"rules">> := Rules}) -> + emqx_authz_mnesia:store_rules({username, Username}, Rules) + end, + Body + ), + {204}; + Exists -> + {409, #{ + code => <<"ALREADY_EXISTS">>, + message => binfmt("Users '~ts' already exist", [binjoin(Exists)]) + }} + end + ). clients(get, #{query_string := QueryString}) -> - case - emqx_mgmt_api:node_query( - node(), - ?ACL_TABLE, - QueryString, - ?ACL_CLIENTID_QSCHEMA, - ?QUERY_CLIENTID_FUN, - fun ?MODULE:format_result/1 - ) - of - {error, page_limit_invalid} -> - {400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}}; - {error, Node, Error} -> - Message = list_to_binary(io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error])), - {500, #{code => <<"NODE_DOWN">>, message => Message}}; - Result -> - {200, Result} - end; + ?IF_CONFIGURED_AUTHZ_SOURCE( + case + emqx_mgmt_api:node_query( + node(), + ?ACL_TABLE, + QueryString, + ?ACL_CLIENTID_QSCHEMA, + ?QUERY_CLIENTID_FUN, + fun ?MODULE:format_result/1 + ) + of + {error, page_limit_invalid} -> + {400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}}; + {error, Node, Error} -> + Message = list_to_binary( + io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error]) + ), + {500, #{code => <<"NODE_DOWN">>, message => Message}}; + Result -> + {200, Result} + end + ); clients(post, #{body := Body}) when is_list(Body) -> - case ensure_all_not_exists(<<"clientid">>, clientid, Body) of - [] -> - lists:foreach( - fun(#{<<"clientid">> := ClientID, <<"rules">> := Rules}) -> - emqx_authz_mnesia:store_rules({clientid, ClientID}, Rules) - end, - Body - ), - {204}; - Exists -> - {409, #{ - code => <<"ALREADY_EXISTS">>, - message => binfmt("Clients '~ts' already exist", [binjoin(Exists)]) - }} - end. + ?IF_CONFIGURED_AUTHZ_SOURCE( + case ensure_all_not_exists(<<"clientid">>, clientid, Body) of + [] -> + lists:foreach( + fun(#{<<"clientid">> := ClientID, <<"rules">> := Rules}) -> + emqx_authz_mnesia:store_rules({clientid, ClientID}, Rules) + end, + Body + ), + {204}; + Exists -> + {409, #{ + code => <<"ALREADY_EXISTS">>, + message => binfmt("Clients '~ts' already exist", [binjoin(Exists)]) + }} + end + ). user(get, #{bindings := #{username := Username}}) -> - case emqx_authz_mnesia:get_rules({username, Username}) of - not_found -> - {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}; - {ok, Rules} -> - {200, #{ - username => Username, - rules => format_rules(Rules) - }} - end; + ?IF_CONFIGURED_AUTHZ_SOURCE( + case emqx_authz_mnesia:get_rules({username, Username}) of + not_found -> + {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}; + {ok, Rules} -> + {200, #{ + username => Username, + rules => format_rules(Rules) + }} + end + ); user(put, #{ bindings := #{username := Username}, body := #{<<"username">> := Username, <<"rules">> := Rules} }) -> - emqx_authz_mnesia:store_rules({username, Username}, Rules), - {204}; -user(delete, #{bindings := #{username := Username}}) -> - case emqx_authz_mnesia:get_rules({username, Username}) of - not_found -> - {404, #{code => <<"NOT_FOUND">>, message => <<"Username Not Found">>}}; - {ok, _Rules} -> - emqx_authz_mnesia:delete_rules({username, Username}), + ?IF_CONFIGURED_AUTHZ_SOURCE( + begin + emqx_authz_mnesia:store_rules({username, Username}, Rules), {204} - end. + end + ); +user(delete, #{bindings := #{username := Username}}) -> + ?IF_CONFIGURED_AUTHZ_SOURCE( + case emqx_authz_mnesia:get_rules({username, Username}) of + not_found -> + {404, #{code => <<"NOT_FOUND">>, message => <<"Username Not Found">>}}; + {ok, _Rules} -> + emqx_authz_mnesia:delete_rules({username, Username}), + {204} + end + ). client(get, #{bindings := #{clientid := ClientID}}) -> - case emqx_authz_mnesia:get_rules({clientid, ClientID}) of - not_found -> - {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}; - {ok, Rules} -> - {200, #{ - clientid => ClientID, - rules => format_rules(Rules) - }} - end; + ?IF_CONFIGURED_AUTHZ_SOURCE( + case emqx_authz_mnesia:get_rules({clientid, ClientID}) of + not_found -> + {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}; + {ok, Rules} -> + {200, #{ + clientid => ClientID, + rules => format_rules(Rules) + }} + end + ); client(put, #{ bindings := #{clientid := ClientID}, body := #{<<"clientid">> := ClientID, <<"rules">> := Rules} }) -> - emqx_authz_mnesia:store_rules({clientid, ClientID}, Rules), - {204}; -client(delete, #{bindings := #{clientid := ClientID}}) -> - case emqx_authz_mnesia:get_rules({clientid, ClientID}) of - not_found -> - {404, #{code => <<"NOT_FOUND">>, message => <<"ClientID Not Found">>}}; - {ok, _Rules} -> - emqx_authz_mnesia:delete_rules({clientid, ClientID}), + ?IF_CONFIGURED_AUTHZ_SOURCE( + begin + emqx_authz_mnesia:store_rules({clientid, ClientID}, Rules), {204} - end. + end + ); +client(delete, #{bindings := #{clientid := ClientID}}) -> + ?IF_CONFIGURED_AUTHZ_SOURCE( + case emqx_authz_mnesia:get_rules({clientid, ClientID}) of + not_found -> + {404, #{code => <<"NOT_FOUND">>, message => <<"ClientID Not Found">>}}; + {ok, _Rules} -> + emqx_authz_mnesia:delete_rules({clientid, ClientID}), + {204} + end + ). all(get, _) -> - case emqx_authz_mnesia:get_rules(all) of - not_found -> - {200, #{rules => []}}; - {ok, Rules} -> - {200, #{ - rules => format_rules(Rules) - }} - end; + ?IF_CONFIGURED_AUTHZ_SOURCE( + case emqx_authz_mnesia:get_rules(all) of + not_found -> + {200, #{rules => []}}; + {ok, Rules} -> + {200, #{ + rules => format_rules(Rules) + }} + end + ); all(post, #{body := #{<<"rules">> := Rules}}) -> - emqx_authz_mnesia:store_rules(all, Rules), - {204}; + ?IF_CONFIGURED_AUTHZ_SOURCE( + begin + emqx_authz_mnesia:store_rules(all, Rules), + {204} + end + ); all(delete, _) -> - emqx_authz_mnesia:store_rules(all, []), - {204}. + ?IF_CONFIGURED_AUTHZ_SOURCE( + begin + emqx_authz_mnesia:store_rules(all, []), + {204} + end + ). rules(delete, _) -> - case emqx_authz_api_sources:get_raw_source(<<"built_in_database">>) of - [#{<<"enable">> := false}] -> - ok = emqx_authz_mnesia:purge_rules(), - {204}; - [#{<<"enable">> := true}] -> - {400, #{ - code => <<"BAD_REQUEST">>, - message => - <<"'built_in_database' type source must be disabled before purge.">> - }}; - [] -> - {404, #{ - code => <<"BAD_REQUEST">>, - message => <<"'built_in_database' type source is not found.">> - }} - end. + ?IF_CONFIGURED_AUTHZ_SOURCE( + case emqx_authz_api_sources:get_raw_source(<<"built_in_database">>) of + [#{<<"enable">> := false}] -> + ok = emqx_authz_mnesia:purge_rules(), + {204}; + [#{<<"enable">> := true}] -> + {400, #{ + code => <<"BAD_REQUEST">>, + message => + <<"'built_in_database' type source must be disabled before purge.">> + }}; + [] -> + {404, #{ + code => <<"BAD_REQUEST">>, + message => <<"'built_in_database' type source is not found.">> + }} + end + ). %%-------------------------------------------------------------------- %% QueryString to MatchSpec diff --git a/apps/emqx_auth_mnesia/test/emqx_authz_api_mnesia_SUITE.erl b/apps/emqx_auth_mnesia/test/emqx_authz_api_mnesia_SUITE.erl index e4b96b08b..50da6e676 100644 --- a/apps/emqx_auth_mnesia/test/emqx_authz_api_mnesia_SUITE.erl +++ b/apps/emqx_auth_mnesia/test/emqx_authz_api_mnesia_SUITE.erl @@ -331,4 +331,159 @@ t_api(_) -> [] ), ?assertEqual(0, emqx_authz_mnesia:record_count()), + + Examples = make_examples(emqx_authz_api_mnesia), + ?assertEqual( + 14, + length(Examples) + ), + + Fixtures1 = fun() -> + {ok, _, _} = + request( + delete, + uri(["authorization", "sources", "built_in_database", "rules", "all"]), + [] + ), + {ok, _, _} = + request( + delete, + uri(["authorization", "sources", "built_in_database", "rules", "users"]), + [] + ), + {ok, _, _} = + request( + delete, + uri(["authorization", "sources", "built_in_database", "rules", "clients"]), + [] + ) + end, + run_examples(Examples, Fixtures1), + + Fixtures2 = fun() -> + %% disable/remove built_in_database + {ok, 204, _} = + request( + delete, + uri(["authorization", "sources", "built_in_database"]), + [] + ) + end, + + run_examples(404, Examples, Fixtures2), + ok. + +%% test helpers +-define(REPLACEMENTS, #{ + ":clientid" => <<"client1">>, + ":username" => <<"user1">> +}). + +run_examples(Examples) -> + %% assume all ok + run_examples( + fun + ({ok, Code, _}) when + Code >= 200, + Code =< 299 + -> + true; + (_Res) -> + ct:pal("check failed: ~p", [_Res]), + false + end, + Examples + ). + +run_examples(Examples, Fixtures) when is_function(Fixtures) -> + Fixtures(), + run_examples(Examples); +run_examples(Check, Examples) when is_function(Check) -> + lists:foreach( + fun({Path, Op, Body} = _Req) -> + ct:pal("req: ~p", [_Req]), + ?assert( + Check( + request(Op, uri(Path), Body) + ) + ) + end, + Examples + ); +run_examples(Code, Examples) when is_number(Code) -> + run_examples( + fun + ({ok, ResCode, _}) when Code =:= ResCode -> true; + (_) -> false + end, + Examples + ). + +run_examples(CodeOrCheck, Examples, Fixtures) when is_function(Fixtures) -> + Fixtures(), + run_examples(CodeOrCheck, Examples). + +make_examples(ApiMod) -> + make_examples(ApiMod, ?REPLACEMENTS). + +-spec make_examples(Mod :: atom()) -> [{Path :: list(), [{Op :: atom(), Body :: term()}]}]. +make_examples(ApiMod, Replacements) -> + Paths = ApiMod:paths(), + lists:flatten( + lists:map( + fun(Path) -> + Schema = ApiMod:schema(Path), + lists:map( + fun({Op, OpSchema}) -> + Body = + case maps:get('requestBody', OpSchema, undefined) of + undefined -> + []; + HoconWithExamples -> + maps:get( + value, + hd( + maps:values( + maps:get( + <<"examples">>, + maps:get(examples, HoconWithExamples) + ) + ) + ) + ) + end, + {replace_parts(to_parts(Path), Replacements), Op, Body} + end, + lists:sort(fun op_sort/2, maps:to_list(maps:remove('operationId', Schema))) + ) + end, + Paths + ) + ). + +op_sort({post, _}, {_, _}) -> + true; +op_sort({put, _}, {_, _}) -> + true; +op_sort({get, _}, {delete, _}) -> + true; +op_sort(_, _) -> + false. + +to_parts(Path) -> + string:tokens(Path, "/"). + +replace_parts(Parts, Replacements) -> + lists:map( + fun(Part) -> + %% that's the fun part + case maps:is_key(Part, Replacements) of + true -> + maps:get(Part, Replacements); + false -> + Part + end + end, + Parts + ). diff --git a/changes/ce/fix-11797.en.md b/changes/ce/fix-11797.en.md new file mode 100644 index 000000000..6227e079c --- /dev/null +++ b/changes/ce/fix-11797.en.md @@ -0,0 +1 @@ +Modified HTTP API behavior for APIs managing the `built_in_database` authorization source: They will now return a `404` status code if `built_in_database` is not set as the authorization source, replacing the former `20X` response. From ec7b669b79491126acfa9cf94dd7d404d0d426dc Mon Sep 17 00:00:00 2001 From: JianBo He Date: Tue, 24 Oct 2023 08:51:52 +0800 Subject: [PATCH 045/155] chore: typo fixes --- changes/ce/fix-11771.en.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changes/ce/fix-11771.en.md b/changes/ce/fix-11771.en.md index 1df7503de..bc91dfaa5 100644 --- a/changes/ce/fix-11771.en.md +++ b/changes/ce/fix-11771.en.md @@ -1 +1 @@ -Fixed validation of Bcrypt salt rounds in authentification management through the API/Dashboard. +Fixed validation of Bcrypt salt rounds in authentication management through the API/Dashboard. From ea9228108b62057e7e7c804385bec06f2a43a731 Mon Sep 17 00:00:00 2001 From: Stefan Strigler Date: Tue, 24 Oct 2023 09:57:08 +0200 Subject: [PATCH 046/155] fix: use minirest filter --- .../src/emqx_authz_api_mnesia.erl | 323 ++++++++---------- .../test/emqx_authz_api_mnesia_SUITE.erl | 8 +- 2 files changed, 154 insertions(+), 177 deletions(-) diff --git a/apps/emqx_auth_mnesia/src/emqx_authz_api_mnesia.erl b/apps/emqx_auth_mnesia/src/emqx_authz_api_mnesia.erl index c0c2322c9..5fc1ec280 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authz_api_mnesia.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authz_api_mnesia.erl @@ -18,6 +18,7 @@ -behaviour(minirest_api). +-include("emqx_auth_mnesia.hrl"). -include_lib("emqx_auth/include/emqx_authz.hrl"). -include_lib("emqx/include/logger.hrl"). -include_lib("hocon/include/hoconsc.hrl"). @@ -55,6 +56,9 @@ format_result/1 ]). +%% minirest filter callback +-export([is_configured_authz_source/2]). + -define(BAD_REQUEST, 'BAD_REQUEST'). -define(NOT_FOUND, 'NOT_FOUND'). -define(ALREADY_EXISTS, 'ALREADY_EXISTS'). @@ -85,6 +89,7 @@ paths() -> schema("/authorization/sources/built_in_database/rules/users") -> #{ 'operationId' => users, + filter => fun ?MODULE:is_configured_authz_source/2, get => #{ tags => [<<"authorization">>], @@ -131,6 +136,7 @@ schema("/authorization/sources/built_in_database/rules/users") -> schema("/authorization/sources/built_in_database/rules/clients") -> #{ 'operationId' => clients, + filter => fun ?MODULE:is_configured_authz_source/2, get => #{ tags => [<<"authorization">>], @@ -177,6 +183,7 @@ schema("/authorization/sources/built_in_database/rules/clients") -> schema("/authorization/sources/built_in_database/rules/users/:username") -> #{ 'operationId' => user, + filter => fun ?MODULE:is_configured_authz_source/2, get => #{ tags => [<<"authorization">>], @@ -230,6 +237,7 @@ schema("/authorization/sources/built_in_database/rules/users/:username") -> schema("/authorization/sources/built_in_database/rules/clients/:clientid") -> #{ 'operationId' => client, + filter => fun ?MODULE:is_configured_authz_source/2, get => #{ tags => [<<"authorization">>], @@ -283,6 +291,7 @@ schema("/authorization/sources/built_in_database/rules/clients/:clientid") -> schema("/authorization/sources/built_in_database/rules/all") -> #{ 'operationId' => all, + filter => fun ?MODULE:is_configured_authz_source/2, get => #{ tags => [<<"authorization">>], @@ -317,6 +326,7 @@ schema("/authorization/sources/built_in_database/rules/all") -> schema("/authorization/sources/built_in_database/rules") -> #{ 'operationId' => rules, + filter => fun ?MODULE:is_configured_authz_source/2, delete => #{ tags => [<<"authorization">>], @@ -426,210 +436,173 @@ fields(rules) -> %% HTTP API %%-------------------------------------------------------------------- --define(IF_CONFIGURED_AUTHZ_SOURCE(EXPR), +is_configured_authz_source(Params, _Meta) -> emqx_authz_api_sources:with_source( - <<"built_in_database">>, + ?AUTHZ_TYPE_BIN, fun(_Source) -> - EXPR + {ok, Params} end - ) -). + ). users(get, #{query_string := QueryString}) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - case - emqx_mgmt_api:node_query( - node(), - ?ACL_TABLE, - QueryString, - ?ACL_USERNAME_QSCHEMA, - ?QUERY_USERNAME_FUN, - fun ?MODULE:format_result/1 - ) - of - {error, page_limit_invalid} -> - {400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}}; - {error, Node, Error} -> - Message = list_to_binary( - io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error]) - ), - {500, #{code => <<"NODE_DOWN">>, message => Message}}; - Result -> - {200, Result} - end - ); + case + emqx_mgmt_api:node_query( + node(), + ?ACL_TABLE, + QueryString, + ?ACL_USERNAME_QSCHEMA, + ?QUERY_USERNAME_FUN, + fun ?MODULE:format_result/1 + ) + of + {error, page_limit_invalid} -> + {400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}}; + {error, Node, Error} -> + Message = list_to_binary( + io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error]) + ), + {500, #{code => <<"NODE_DOWN">>, message => Message}}; + Result -> + {200, Result} + end; users(post, #{body := Body}) when is_list(Body) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - case ensure_all_not_exists(<<"username">>, username, Body) of - [] -> - lists:foreach( - fun(#{<<"username">> := Username, <<"rules">> := Rules}) -> - emqx_authz_mnesia:store_rules({username, Username}, Rules) - end, - Body - ), - {204}; - Exists -> - {409, #{ - code => <<"ALREADY_EXISTS">>, - message => binfmt("Users '~ts' already exist", [binjoin(Exists)]) - }} - end - ). + case ensure_all_not_exists(<<"username">>, username, Body) of + [] -> + lists:foreach( + fun(#{<<"username">> := Username, <<"rules">> := Rules}) -> + emqx_authz_mnesia:store_rules({username, Username}, Rules) + end, + Body + ), + {204}; + Exists -> + {409, #{ + code => <<"ALREADY_EXISTS">>, + message => binfmt("Users '~ts' already exist", [binjoin(Exists)]) + }} + end. clients(get, #{query_string := QueryString}) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - case - emqx_mgmt_api:node_query( - node(), - ?ACL_TABLE, - QueryString, - ?ACL_CLIENTID_QSCHEMA, - ?QUERY_CLIENTID_FUN, - fun ?MODULE:format_result/1 - ) - of - {error, page_limit_invalid} -> - {400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}}; - {error, Node, Error} -> - Message = list_to_binary( - io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error]) - ), - {500, #{code => <<"NODE_DOWN">>, message => Message}}; - Result -> - {200, Result} - end - ); + case + emqx_mgmt_api:node_query( + node(), + ?ACL_TABLE, + QueryString, + ?ACL_CLIENTID_QSCHEMA, + ?QUERY_CLIENTID_FUN, + fun ?MODULE:format_result/1 + ) + of + {error, page_limit_invalid} -> + {400, #{code => <<"INVALID_PARAMETER">>, message => <<"page_limit_invalid">>}}; + {error, Node, Error} -> + Message = list_to_binary( + io_lib:format("bad rpc call ~p, Reason ~p", [Node, Error]) + ), + {500, #{code => <<"NODE_DOWN">>, message => Message}}; + Result -> + {200, Result} + end; clients(post, #{body := Body}) when is_list(Body) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - case ensure_all_not_exists(<<"clientid">>, clientid, Body) of - [] -> - lists:foreach( - fun(#{<<"clientid">> := ClientID, <<"rules">> := Rules}) -> - emqx_authz_mnesia:store_rules({clientid, ClientID}, Rules) - end, - Body - ), - {204}; - Exists -> - {409, #{ - code => <<"ALREADY_EXISTS">>, - message => binfmt("Clients '~ts' already exist", [binjoin(Exists)]) - }} - end - ). + case ensure_all_not_exists(<<"clientid">>, clientid, Body) of + [] -> + lists:foreach( + fun(#{<<"clientid">> := ClientID, <<"rules">> := Rules}) -> + emqx_authz_mnesia:store_rules({clientid, ClientID}, Rules) + end, + Body + ), + {204}; + Exists -> + {409, #{ + code => <<"ALREADY_EXISTS">>, + message => binfmt("Clients '~ts' already exist", [binjoin(Exists)]) + }} + end. user(get, #{bindings := #{username := Username}}) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - case emqx_authz_mnesia:get_rules({username, Username}) of - not_found -> - {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}; - {ok, Rules} -> - {200, #{ - username => Username, - rules => format_rules(Rules) - }} - end - ); + case emqx_authz_mnesia:get_rules({username, Username}) of + not_found -> + {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}; + {ok, Rules} -> + {200, #{ + username => Username, + rules => format_rules(Rules) + }} + end; user(put, #{ bindings := #{username := Username}, body := #{<<"username">> := Username, <<"rules">> := Rules} }) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - begin - emqx_authz_mnesia:store_rules({username, Username}, Rules), - {204} - end - ); + emqx_authz_mnesia:store_rules({username, Username}, Rules), + {204}; user(delete, #{bindings := #{username := Username}}) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - case emqx_authz_mnesia:get_rules({username, Username}) of - not_found -> - {404, #{code => <<"NOT_FOUND">>, message => <<"Username Not Found">>}}; - {ok, _Rules} -> - emqx_authz_mnesia:delete_rules({username, Username}), - {204} - end - ). + case emqx_authz_mnesia:get_rules({username, Username}) of + not_found -> + {404, #{code => <<"NOT_FOUND">>, message => <<"Username Not Found">>}}; + {ok, _Rules} -> + emqx_authz_mnesia:delete_rules({username, Username}), + {204} + end. client(get, #{bindings := #{clientid := ClientID}}) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - case emqx_authz_mnesia:get_rules({clientid, ClientID}) of - not_found -> - {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}; - {ok, Rules} -> - {200, #{ - clientid => ClientID, - rules => format_rules(Rules) - }} - end - ); + case emqx_authz_mnesia:get_rules({clientid, ClientID}) of + not_found -> + {404, #{code => <<"NOT_FOUND">>, message => <<"Not Found">>}}; + {ok, Rules} -> + {200, #{ + clientid => ClientID, + rules => format_rules(Rules) + }} + end; client(put, #{ bindings := #{clientid := ClientID}, body := #{<<"clientid">> := ClientID, <<"rules">> := Rules} }) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - begin - emqx_authz_mnesia:store_rules({clientid, ClientID}, Rules), - {204} - end - ); + emqx_authz_mnesia:store_rules({clientid, ClientID}, Rules), + {204}; client(delete, #{bindings := #{clientid := ClientID}}) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - case emqx_authz_mnesia:get_rules({clientid, ClientID}) of - not_found -> - {404, #{code => <<"NOT_FOUND">>, message => <<"ClientID Not Found">>}}; - {ok, _Rules} -> - emqx_authz_mnesia:delete_rules({clientid, ClientID}), - {204} - end - ). + case emqx_authz_mnesia:get_rules({clientid, ClientID}) of + not_found -> + {404, #{code => <<"NOT_FOUND">>, message => <<"ClientID Not Found">>}}; + {ok, _Rules} -> + emqx_authz_mnesia:delete_rules({clientid, ClientID}), + {204} + end. all(get, _) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - case emqx_authz_mnesia:get_rules(all) of - not_found -> - {200, #{rules => []}}; - {ok, Rules} -> - {200, #{ - rules => format_rules(Rules) - }} - end - ); + case emqx_authz_mnesia:get_rules(all) of + not_found -> + {200, #{rules => []}}; + {ok, Rules} -> + {200, #{ + rules => format_rules(Rules) + }} + end; all(post, #{body := #{<<"rules">> := Rules}}) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - begin - emqx_authz_mnesia:store_rules(all, Rules), - {204} - end - ); + emqx_authz_mnesia:store_rules(all, Rules), + {204}; all(delete, _) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - begin - emqx_authz_mnesia:store_rules(all, []), - {204} - end - ). + emqx_authz_mnesia:store_rules(all, []), + {204}. rules(delete, _) -> - ?IF_CONFIGURED_AUTHZ_SOURCE( - case emqx_authz_api_sources:get_raw_source(<<"built_in_database">>) of - [#{<<"enable">> := false}] -> - ok = emqx_authz_mnesia:purge_rules(), - {204}; - [#{<<"enable">> := true}] -> - {400, #{ - code => <<"BAD_REQUEST">>, - message => - <<"'built_in_database' type source must be disabled before purge.">> - }}; - [] -> - {404, #{ - code => <<"BAD_REQUEST">>, - message => <<"'built_in_database' type source is not found.">> - }} - end - ). + case emqx_authz_api_sources:get_raw_source(<<"built_in_database">>) of + [#{<<"enable">> := false}] -> + ok = emqx_authz_mnesia:purge_rules(), + {204}; + [#{<<"enable">> := true}] -> + {400, #{ + code => <<"BAD_REQUEST">>, + message => + <<"'built_in_database' type source must be disabled before purge.">> + }}; + [] -> + {404, #{ + code => <<"BAD_REQUEST">>, + message => <<"'built_in_database' type source is not found.">> + }} + end. %%-------------------------------------------------------------------- %% QueryString to MatchSpec diff --git a/apps/emqx_auth_mnesia/test/emqx_authz_api_mnesia_SUITE.erl b/apps/emqx_auth_mnesia/test/emqx_authz_api_mnesia_SUITE.erl index 50da6e676..efe4899f0 100644 --- a/apps/emqx_auth_mnesia/test/emqx_authz_api_mnesia_SUITE.erl +++ b/apps/emqx_auth_mnesia/test/emqx_authz_api_mnesia_SUITE.erl @@ -415,7 +415,9 @@ run_examples(Code, Examples) when is_number(Code) -> run_examples( fun ({ok, ResCode, _}) when Code =:= ResCode -> true; - (_) -> false + (_Res) -> + ct:pal("check failed: ~p", [_Res]), + false end, Examples ). @@ -455,7 +457,9 @@ make_examples(ApiMod, Replacements) -> end, {replace_parts(to_parts(Path), Replacements), Op, Body} end, - lists:sort(fun op_sort/2, maps:to_list(maps:remove('operationId', Schema))) + lists:sort( + fun op_sort/2, maps:to_list(maps:with([get, put, post, delete], Schema)) + ) ) end, Paths From 9436582e141c9f3cfae36f884004ada229c95e43 Mon Sep 17 00:00:00 2001 From: Stefan Strigler Date: Tue, 24 Oct 2023 09:57:37 +0200 Subject: [PATCH 047/155] fix: remove debug statement --- dev | 2 -- 1 file changed, 2 deletions(-) diff --git a/dev b/dev index 67cb6e969..38ab7bee3 100755 --- a/dev +++ b/dev @@ -227,8 +227,6 @@ prepare_erl_libs() { for app in "_build/${profile}/checkouts"/*; do erl_libs="${erl_libs}${sep}${app}" done - else - echo "no checkouts" fi export ERL_LIBS="$erl_libs" } From 28d55d72ca0738b923328d3deedb06ffee0b6aad Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Tue, 18 Apr 2023 13:03:02 +0300 Subject: [PATCH 048/155] feat(tpl): split `emqx_placeholder` into a couple of modules Located under `emqx_connector` for now. The APIs of the new modules are slightly different from the old ones. The new ones are more explicit in terms of error handling. Also copy the according testsuite from `emqx_plugin_libs` and enrich it for better coverage. --- .../emqx_connector/src/emqx_connector_sql.erl | 159 ++++++++ .../src/emqx_connector_template.erl | 351 ++++++++++++++++++ .../src/emqx_connector_template_sql.erl | 135 +++++++ .../src/emqx_connector_utils.erl | 35 -- .../test/emqx_connector_template_SUITE.erl | 323 ++++++++++++++++ 5 files changed, 968 insertions(+), 35 deletions(-) create mode 100644 apps/emqx_connector/src/emqx_connector_sql.erl create mode 100644 apps/emqx_connector/src/emqx_connector_template.erl create mode 100644 apps/emqx_connector/src/emqx_connector_template_sql.erl delete mode 100644 apps/emqx_connector/src/emqx_connector_utils.erl create mode 100644 apps/emqx_connector/test/emqx_connector_template_SUITE.erl diff --git a/apps/emqx_connector/src/emqx_connector_sql.erl b/apps/emqx_connector/src/emqx_connector_sql.erl new file mode 100644 index 000000000..be0b220e6 --- /dev/null +++ b/apps/emqx_connector/src/emqx_connector_sql.erl @@ -0,0 +1,159 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_connector_sql). + +-export([get_statement_type/1]). +-export([parse_insert/1]). + +-export([to_sql_value/1]). +-export([to_sql_string/2]). + +-export([escape_sql/1]). +-export([escape_cql/1]). +-export([escape_mysql/1]). + +-export_type([value/0]). + +-type statement_type() :: select | insert | delete. +-type value() :: null | binary() | number() | boolean() | [value()]. + +-dialyzer({no_improper_lists, [escape_mysql/4, escape_prepend/4]}). + +-spec get_statement_type(iodata()) -> statement_type() | {error, unknown}. +get_statement_type(Query) -> + KnownTypes = #{ + <<"select">> => select, + <<"insert">> => insert, + <<"delete">> => delete + }, + case re:run(Query, <<"^\\s*([a-zA-Z]+)">>, [{capture, all_but_first, binary}]) of + {match, [Token]} -> + maps:get(string:lowercase(Token), KnownTypes, {error, unknown}); + _ -> + {error, unknown} + end. + +%% @doc Parse an INSERT SQL statement into its INSERT part and the VALUES part. +%% SQL = <<"INSERT INTO \"abc\" (c1, c2, c3) VALUES (${a}, ${b}, ${c.prop})">> +%% {ok, {<<"INSERT INTO \"abc\" (c1, c2, c3)">>, <<"(${a}, ${b}, ${c.prop})">>}} +-spec parse_insert(iodata()) -> + {ok, {_Statement :: binary(), _Rows :: binary()}} | {error, not_insert_sql}. +parse_insert(SQL) -> + case re:split(SQL, "((?i)values)", [{return, binary}]) of + [Part1, _, Part3] -> + case string:trim(Part1, leading) of + <<"insert", _/binary>> = InsertSQL -> + {ok, {InsertSQL, Part3}}; + <<"INSERT", _/binary>> = InsertSQL -> + {ok, {InsertSQL, Part3}}; + _ -> + {error, not_insert_sql} + end; + _ -> + {error, not_insert_sql} + end. + +%% @doc Convert an Erlang term to a value that can be used primarily in +%% prepared SQL statements. +-spec to_sql_value(term()) -> value(). +to_sql_value(undefined) -> null; +to_sql_value(List) when is_list(List) -> List; +to_sql_value(Bin) when is_binary(Bin) -> Bin; +to_sql_value(Num) when is_number(Num) -> Num; +to_sql_value(Bool) when is_boolean(Bool) -> Bool; +to_sql_value(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8); +to_sql_value(Map) when is_map(Map) -> emqx_utils_json:encode(Map). + +%% @doc Convert an Erlang term to a string that can be interpolated in literal +%% SQL statements. The value is escaped if necessary. +-spec to_sql_string(term(), Options) -> iodata() when + Options :: #{ + escaping => cql | mysql | sql + }. +to_sql_string(String, #{escaping := mysql}) when is_binary(String) -> + try + escape_mysql(String) + catch + throw:invalid_utf8 -> + [<<"0x">>, binary:encode_hex(String)] + end; +to_sql_string(Term, #{escaping := mysql}) -> + maybe_escape(Term, fun escape_mysql/1); +to_sql_string(Term, #{escaping := cql}) -> + maybe_escape(Term, fun escape_cql/1); +to_sql_string(Term, #{}) -> + maybe_escape(Term, fun escape_sql/1). + +-spec maybe_escape(_Value, fun((binary()) -> iodata())) -> iodata(). +maybe_escape(undefined, _EscapeFun) -> + <<"NULL">>; +maybe_escape(Str, EscapeFun) when is_binary(Str) -> + EscapeFun(Str); +maybe_escape(Str, EscapeFun) when is_list(Str) -> + case unicode:characters_to_binary(Str) of + Bin when is_binary(Bin) -> + EscapeFun(Bin); + Otherwise -> + error(Otherwise) + end; +maybe_escape(Val, EscapeFun) when is_atom(Val) orelse is_map(Val) -> + EscapeFun(emqx_connector_template:to_string(Val)); +maybe_escape(Val, _EscapeFun) -> + emqx_connector_template:to_string(Val). + +-spec escape_sql(binary()) -> iodata(). +escape_sql(S) -> + % NOTE + % This is a bit misleading: currently, escaping logic in `escape_sql/1` likely + % won't work with pgsql since it does not support C-style escapes by default. + % https://www.postgresql.org/docs/14/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS + ES = binary:replace(S, [<<"\\">>, <<"'">>], <<"\\">>, [global, {insert_replaced, 1}]), + [$', ES, $']. + +-spec escape_cql(binary()) -> iodata(). +escape_cql(S) -> + ES = binary:replace(S, <<"'">>, <<"'">>, [global, {insert_replaced, 1}]), + [$', ES, $']. + +-spec escape_mysql(binary()) -> iodata(). +escape_mysql(S0) -> + % https://dev.mysql.com/doc/refman/8.0/en/string-literals.html + [$', escape_mysql(S0, 0, 0, S0), $']. + +%% NOTE +%% This thing looks more complicated than needed because it's optimized for as few +%% intermediate memory (re)allocations as possible. +escape_mysql(<<$', Rest/binary>>, I, Run, Src) -> + escape_prepend(I, Run, Src, [<<"\\'">> | escape_mysql(Rest, I + Run + 1, 0, Src)]); +escape_mysql(<<$\\, Rest/binary>>, I, Run, Src) -> + escape_prepend(I, Run, Src, [<<"\\\\">> | escape_mysql(Rest, I + Run + 1, 0, Src)]); +escape_mysql(<<0, Rest/binary>>, I, Run, Src) -> + escape_prepend(I, Run, Src, [<<"\\0">> | escape_mysql(Rest, I + Run + 1, 0, Src)]); +escape_mysql(<<_/utf8, Rest/binary>> = S, I, Run, Src) -> + CWidth = byte_size(S) - byte_size(Rest), + escape_mysql(Rest, I, Run + CWidth, Src); +escape_mysql(<<>>, 0, _, Src) -> + Src; +escape_mysql(<<>>, I, Run, Src) -> + binary:part(Src, I, Run); +escape_mysql(_, _I, _Run, _Src) -> + throw(invalid_utf8). + +escape_prepend(_RunI, 0, _Src, Tail) -> + Tail; +escape_prepend(I, Run, Src, Tail) -> + [binary:part(Src, I, Run) | Tail]. diff --git a/apps/emqx_connector/src/emqx_connector_template.erl b/apps/emqx_connector/src/emqx_connector_template.erl new file mode 100644 index 000000000..c346d4289 --- /dev/null +++ b/apps/emqx_connector/src/emqx_connector_template.erl @@ -0,0 +1,351 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2022 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_connector_template). + +-include_lib("emqx/include/emqx_placeholder.hrl"). + +-export([parse/1]). +-export([parse/2]). +-export([parse_deep/1]). +-export([parse_deep/2]). +-export([validate/2]). +-export([trivial/1]). +-export([unparse/1]). +-export([render/2]). +-export([render/3]). +-export([render_strict/2]). +-export([render_strict/3]). + +-export([to_string/1]). + +-export_type([t/0]). +-export_type([str/0]). +-export_type([deep/0]). +-export_type([placeholder/0]). +-export_type([bindings/0]). + +-type t() :: str() | {'$tpl', deeptpl()}. + +-type str() :: [unicode:chardata() | placeholder()]. +-type deep() :: {'$tpl', deeptpl()}. + +-type deeptpl() :: + t() + | #{deeptpl() => deeptpl()} + | {list, [deeptpl()]} + | {tuple, [deeptpl()]} + | scalar() + | function() + | pid() + | port() + | reference(). + +-type placeholder() :: {var, var()}. +-type var() :: _Name :: [binary()]. + +-type scalar() :: atom() | unicode:chardata() | number(). +-type binding() :: scalar() | list(scalar()) | bindings(). +-type bindings() :: #{atom() | binary() => binding()}. + +-type var_trans() :: + fun((Value :: term()) -> unicode:chardata()) + | fun((var(), Value :: term()) -> unicode:chardata()). + +-type parse_opts() :: #{ + strip_double_quote => boolean() +}. + +-type render_opts() :: #{ + var_trans => var_trans() +}. + +-define(RE_PLACEHOLDER, "\\$(\\$?)\\{[.]?([a-zA-Z0-9._]*)\\}"). + +%% @doc Parse a unicode string into a template. +%% String might contain zero or more of placeholders in the form of `${var}`, +%% where `var` is a _location_ (possibly deeply nested) of some value in the +%% bindings map. +%% String might contain special escaped form `$${...}` which interpreted as a +%% literal `${...}`. +-spec parse(String :: unicode:chardata()) -> + t(). +parse(String) -> + parse(String, #{}). + +-spec parse(String :: unicode:chardata(), parse_opts()) -> + t(). +parse(String, Opts) -> + RE = + case Opts of + #{strip_double_quote := true} -> + <<"((?|" ?RE_PLACEHOLDER "|\"" ?RE_PLACEHOLDER "\"))">>; + #{} -> + <<"(" ?RE_PLACEHOLDER ")">> + end, + Splits = re:split(String, RE, [{return, binary}, group, trim, unicode]), + Components = lists:flatmap(fun parse_split/1, Splits), + Components. + +parse_split([Part, _PH, <<>>, Var]) -> + % Regular placeholder + prepend(Part, [{var, parse_var(Var)}]); +parse_split([Part, _PH = <>, <<"$">>, _]) -> + % Escaped literal, take all but the second byte, which is always `$`. + % Important to make a whole token starting with `$` so the `unparse/11` + % function can distinguish escaped literals. + prepend(Part, [<>]); +parse_split([Tail]) -> + [Tail]. + +prepend(<<>>, To) -> + To; +prepend(Head, To) -> + [Head | To]. + +parse_var(Var) -> + case string:split(Var, <<".">>, all) of + [<<>>] -> + ?PH_VAR_THIS; + Name -> + % TODO: lowercase? + Name + end. + +-spec validate([var() | binary()], t()) -> + ok | {error, [_Error :: {var(), disallowed}]}. +validate(AllowedIn, Template) -> + Allowed = [try_parse_var(V) || V <- AllowedIn], + {_, Errors} = render(Template, #{}), + {Used, _} = lists:unzip(Errors), + case lists:usort(Used) -- Allowed of + [] -> + ok; + Disallowed -> + {error, [{Var, disallowed} || Var <- Disallowed]} + end. + +try_parse_var(Var) when is_binary(Var) -> + parse_var(Var); +try_parse_var(Name) when is_list(Name) -> + Name. + +-spec trivial(t()) -> + boolean(). +trivial(Template) -> + validate([], Template) == ok. + +-spec unparse(t()) -> + unicode:chardata(). +unparse({'$tpl', Template}) -> + unparse_deep(Template); +unparse(Template) -> + lists:map(fun unparse_part/1, Template). + +unparse_part({var, Name}) -> + render_placeholder(Name); +unparse_part(Part = <<"${", _/binary>>) -> + <<"$", Part/binary>>; +unparse_part(Part) -> + Part. + +render_placeholder(Name) -> + "${" ++ lists:join($., Name) ++ "}". + +%% @doc Render a template with given bindings. +%% Returns a term with all placeholders replaced with values from bindings. +%% If one or more placeholders are not found in bindings, an error is returned. +%% By default, all binding values are converted to strings using `to_string/1` +%% function. Option `var_trans` can be used to override this behaviour. +-spec render(t(), bindings()) -> + {term(), [_Error :: {var(), undefined}]}. +render(Template, Bindings) -> + render(Template, Bindings, #{}). + +-spec render(t(), bindings(), render_opts()) -> + {term(), [_Error :: {var(), undefined}]}. +render(Template, Bindings, Opts) when is_list(Template) -> + lists:mapfoldl( + fun + ({var, Name}, EAcc) -> + {String, Errors} = render_binding(Name, Bindings, Opts), + {String, Errors ++ EAcc}; + (String, EAcc) -> + {String, EAcc} + end, + [], + Template + ); +render({'$tpl', Template}, Bindings, Opts) -> + render_deep(Template, Bindings, Opts). + +render_binding(Name, Bindings, Opts) -> + case lookup_var(Name, Bindings) of + {ok, Value} -> + {render_value(Name, Value, Opts), []}; + {error, Reason} -> + % TODO + % Currently, it's not possible to distinguish between a missing value + % and an atom `undefined` in `TransFun`. + {render_value(Name, undefined, Opts), [{Name, Reason}]} + end. + +render_value(_Name, Value, #{var_trans := TransFun}) when is_function(TransFun, 1) -> + TransFun(Value); +render_value(Name, Value, #{var_trans := TransFun}) when is_function(TransFun, 2) -> + TransFun(Name, Value); +render_value(_Name, Value, #{}) -> + to_string(Value). + +-spec render_strict(t(), bindings()) -> + unicode:chardata(). +render_strict(Template, Bindings) -> + render_strict(Template, Bindings, #{}). + +-spec render_strict(t(), bindings(), render_opts()) -> + unicode:chardata(). +render_strict(Template, Bindings, Opts) -> + case render(Template, Bindings, Opts) of + {String, []} -> + String; + {_, Errors = [_ | _]} -> + error(Errors, [unicode:characters_to_list(unparse(Template)), Bindings]) + end. + +%% @doc Parse an arbitrary Erlang term into a "deep" template. +%% Any binaries nested in the term are treated as string templates, while +%% lists are not analyzed for "printability" and are treated as nested terms. +%% The result is a usual template, and can be fed to other functions in this +%% module. +-spec parse_deep(unicode:chardata()) -> + t(). +parse_deep(Term) -> + parse_deep(Term, #{}). + +-spec parse_deep(unicode:chardata(), parse_opts()) -> + t(). +parse_deep(Term, Opts) -> + {'$tpl', parse_deep_term(Term, Opts)}. + +parse_deep_term(Term, Opts) when is_map(Term) -> + maps:fold( + fun(K, V, Acc) -> + Acc#{parse_deep_term(K, Opts) => parse_deep_term(V, Opts)} + end, + #{}, + Term + ); +parse_deep_term(Term, Opts) when is_list(Term) -> + {list, [parse_deep_term(E, Opts) || E <- Term]}; +parse_deep_term(Term, Opts) when is_tuple(Term) -> + {tuple, [parse_deep_term(E, Opts) || E <- tuple_to_list(Term)]}; +parse_deep_term(Term, Opts) when is_binary(Term) -> + parse(Term, Opts); +parse_deep_term(Term, _Opts) -> + Term. + +render_deep(Template, Bindings, Opts) when is_map(Template) -> + maps:fold( + fun(KT, VT, {Acc, Errors}) -> + {K, KErrors} = render_deep(KT, Bindings, Opts), + {V, VErrors} = render_deep(VT, Bindings, Opts), + {Acc#{K => V}, KErrors ++ VErrors ++ Errors} + end, + {#{}, []}, + Template + ); +render_deep({list, Template}, Bindings, Opts) when is_list(Template) -> + lists:mapfoldr( + fun(T, Errors) -> + {E, VErrors} = render_deep(T, Bindings, Opts), + {E, VErrors ++ Errors} + end, + [], + Template + ); +render_deep({tuple, Template}, Bindings, Opts) when is_list(Template) -> + {Term, Errors} = render_deep({list, Template}, Bindings, Opts), + {list_to_tuple(Term), Errors}; +render_deep(Template, Bindings, Opts) when is_list(Template) -> + {String, Errors} = render(Template, Bindings, Opts), + {unicode:characters_to_binary(String), Errors}; +render_deep(Term, _Bindings, _Opts) -> + {Term, []}. + +unparse_deep(Template) when is_map(Template) -> + maps:fold( + fun(K, V, Acc) -> + Acc#{unparse_deep(K) => unparse_deep(V)} + end, + #{}, + Template + ); +unparse_deep({list, Template}) when is_list(Template) -> + [unparse_deep(E) || E <- Template]; +unparse_deep({tuple, Template}) when is_list(Template) -> + list_to_tuple(unparse_deep({list, Template})); +unparse_deep(Template) when is_list(Template) -> + unicode:characters_to_binary(unparse(Template)); +unparse_deep(Term) -> + Term. + +%% + +-spec lookup_var(var(), bindings()) -> + {ok, binding()} | {error, undefined}. +lookup_var(?PH_VAR_THIS, Value) -> + {ok, Value}; +lookup_var([], Value) -> + {ok, Value}; +lookup_var([Prop | Rest], Bindings) -> + case lookup(Prop, Bindings) of + {ok, Value} -> + lookup_var(Rest, Value); + {error, Reason} -> + {error, Reason} + end. + +-spec lookup(Prop :: binary(), bindings()) -> + {ok, binding()} | {error, undefined}. +lookup(Prop, Bindings) when is_binary(Prop) -> + case maps:get(Prop, Bindings, undefined) of + undefined -> + try + {ok, maps:get(binary_to_existing_atom(Prop, utf8), Bindings)} + catch + error:{badkey, _} -> + {error, undefined}; + error:badarg -> + {error, undefined} + end; + Value -> + {ok, Value} + end. + +-spec to_string(binding()) -> + unicode:chardata(). +to_string(undefined) -> + []; +to_string(Bin) when is_binary(Bin) -> Bin; +to_string(Num) when is_integer(Num) -> integer_to_binary(Num); +to_string(Num) when is_float(Num) -> float_to_binary(Num, [{decimals, 10}, compact]); +to_string(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8); +to_string(Map) when is_map(Map) -> emqx_utils_json:encode(Map); +to_string(List) when is_list(List) -> + case io_lib:printable_unicode_list(List) of + true -> List; + false -> emqx_utils_json:encode(List) + end. diff --git a/apps/emqx_connector/src/emqx_connector_template_sql.erl b/apps/emqx_connector/src/emqx_connector_template_sql.erl new file mode 100644 index 000000000..0febfe575 --- /dev/null +++ b/apps/emqx_connector/src/emqx_connector_template_sql.erl @@ -0,0 +1,135 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2022 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_connector_template_sql). + +-export([parse/1]). +-export([parse/2]). +-export([render/3]). +-export([render_strict/3]). + +-export([parse_prepstmt/2]). +-export([render_prepstmt/2]). +-export([render_prepstmt_strict/2]). + +-export_type([row_template/0]). + +-type template() :: emqx_connector_template:t(). +-type row_template() :: [emqx_connector_template:placeholder()]. +-type bindings() :: emqx_connector_template:bindings(). + +-type values() :: [emqx_connector_sql:value()]. + +-type parse_opts() :: #{ + parameters => '$n' | '?', + % Inherited from `emqx_connector_template:parse_opts()` + strip_double_quote => boolean() +}. + +-type render_opts() :: #{ + escaping => mysql | cql | sql +}. + +-define(TEMPLATE_PARSE_OPTS, [strip_double_quote]). + +%% + +%% @doc Parse an SQL statement string with zero or more placeholders into a template. +-spec parse(unicode:chardata()) -> + template(). +parse(String) -> + parse(String, #{}). + +%% @doc Parse an SQL statement string with zero or more placeholders into a template. +-spec parse(unicode:chardata(), parse_opts()) -> + template(). +parse(String, Opts) -> + emqx_connector_template:parse(String, Opts). + +%% @doc Render an SQL statement template given a set of bindings. +%% Interpolation generally follows the SQL syntax, strings are escaped according to the +%% `escaping` option. +-spec render(template(), bindings(), render_opts()) -> + {unicode:chardata(), [_Error]}. +render(Template, Bindings, Opts) -> + emqx_connector_template:render(Template, Bindings, #{ + var_trans => fun(Value) -> emqx_connector_sql:to_sql_string(Value, Opts) end + }). + +%% @doc Render an SQL statement template given a set of bindings. +%% Errors are raised if any placeholders are not bound. +-spec render_strict(template(), bindings(), render_opts()) -> + unicode:chardata(). +render_strict(Template, Bindings, Opts) -> + emqx_connector_template:render_strict(Template, Bindings, #{ + var_trans => fun(Value) -> emqx_connector_sql:to_sql_string(Value, Opts) end + }). + +%% @doc Parse an SQL statement string into a prepared statement and a row template. +%% The row template is a template for a row of SQL values to be inserted to a database +%% during the execution of the prepared statement. +%% Example: +%% ``` +%% {Statement, RowTemplate} = emqx_connector_template_sql:parse_prepstmt( +%% "INSERT INTO table (id, name, age) VALUES (${id}, ${name}, 42)", +%% #{parameters => '$n'} +%% ), +%% Statement = <<"INSERT INTO table (id, name, age) VALUES ($1, $2, 42)">>, +%% RowTemplate = [{var, [...]}, ...] +%% ``` +-spec parse_prepstmt(unicode:chardata(), parse_opts()) -> + {unicode:chardata(), row_template()}. +parse_prepstmt(String, Opts) -> + Template = emqx_connector_template:parse(String, maps:with(?TEMPLATE_PARSE_OPTS, Opts)), + Statement = mk_prepared_statement(Template, Opts), + Placeholders = [Placeholder || Placeholder = {var, _} <- Template], + {Statement, Placeholders}. + +mk_prepared_statement(Template, Opts) -> + ParameterFormat = maps:get(parameters, Opts, '?'), + {Statement, _} = + lists:mapfoldl( + fun + ({var, _}, Acc) -> + mk_replace(ParameterFormat, Acc); + (String, Acc) -> + {String, Acc} + end, + 1, + Template + ), + Statement. + +mk_replace('?', Acc) -> + {"?", Acc}; +mk_replace('$n', N) -> + {"$" ++ integer_to_list(N), N + 1}. + +%% @doc Render a row template into a list of SQL values. +%% An _SQL value_ is a vaguely defined concept here, it is something that's considered +%% compatible with the protocol of the database being used. See the definition of +%% `emqx_connector_sql:value()` for more details. +-spec render_prepstmt(template(), bindings()) -> + {values(), [_Error]}. +render_prepstmt(Template, Bindings) -> + Opts = #{var_trans => fun emqx_connector_sql:to_sql_value/1}, + emqx_connector_template:render(Template, Bindings, Opts). + +-spec render_prepstmt_strict(template(), bindings()) -> + values(). +render_prepstmt_strict(Template, Bindings) -> + Opts = #{var_trans => fun emqx_connector_sql:to_sql_value/1}, + emqx_connector_template:render_strict(Template, Bindings, Opts). diff --git a/apps/emqx_connector/src/emqx_connector_utils.erl b/apps/emqx_connector/src/emqx_connector_utils.erl deleted file mode 100644 index 6000f6be5..000000000 --- a/apps/emqx_connector/src/emqx_connector_utils.erl +++ /dev/null @@ -1,35 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- - --module(emqx_connector_utils). - --export([split_insert_sql/1]). - -%% SQL = <<"INSERT INTO \"abc\" (c1,c2,c3) VALUES (${1}, ${1}, ${1})">> -split_insert_sql(SQL) -> - case re:split(SQL, "((?i)values)", [{return, binary}]) of - [Part1, _, Part3] -> - case string:trim(Part1, leading) of - <<"insert", _/binary>> = InsertSQL -> - {ok, {InsertSQL, Part3}}; - <<"INSERT", _/binary>> = InsertSQL -> - {ok, {InsertSQL, Part3}}; - _ -> - {error, not_insert_sql} - end; - _ -> - {error, not_insert_sql} - end. diff --git a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl new file mode 100644 index 000000000..666fbfa58 --- /dev/null +++ b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl @@ -0,0 +1,323 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_connector_template_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("emqx/include/emqx_placeholder.hrl"). +-include_lib("eunit/include/eunit.hrl"). + +all() -> emqx_common_test_helpers:all(?MODULE). + +t_render(_) -> + Bindings = #{ + a => <<"1">>, + b => 1, + c => 1.0, + d => #{<<"d1">> => <<"hi">>}, + l => [0, 1, 1000], + u => "utf-8 is ǝɹǝɥ" + }, + Template = emqx_connector_template:parse( + <<"a:${a},b:${b},c:${c},d:${d},d1:${d.d1},l:${l},u:${u}">> + ), + ?assertEqual( + {<<"a:1,b:1,c:1.0,d:{\"d1\":\"hi\"},d1:hi,l:[0,1,1000],u:utf-8 is ǝɹǝɥ"/utf8>>, []}, + render_string(Template, Bindings) + ). + +t_render_var_trans(_) -> + Bindings = #{a => <<"1">>, b => 1, c => #{prop => 1.0}}, + Template = emqx_connector_template:parse(<<"a:${a},b:${b},c:${c.prop}">>), + {String, Errors} = emqx_connector_template:render( + Template, + Bindings, + #{var_trans => fun(Name, _) -> "<" ++ lists:join($., Name) ++ ">" end} + ), + ?assertEqual( + {<<"a:,b:,c:">>, []}, + {bin(String), Errors} + ). + +t_render_path(_) -> + Bindings = #{d => #{d1 => <<"hi">>}}, + Template = emqx_connector_template:parse(<<"d.d1:${d.d1}">>), + ?assertEqual( + ok, + emqx_connector_template:validate([<<"d.d1">>], Template) + ), + ?assertEqual( + {<<"d.d1:hi">>, []}, + render_string(Template, Bindings) + ). + +t_render_custom_ph(_) -> + Bindings = #{a => <<"a">>, b => <<"b">>}, + Template = emqx_connector_template:parse(<<"a:${a},b:${b}">>), + ?assertEqual( + {error, [{[<<"b">>], disallowed}]}, + emqx_connector_template:validate([<<"a">>], Template) + ), + ?assertEqual( + <<"a:a,b:b">>, + render_strict_string(Template, Bindings) + ). + +t_render_this(_) -> + Bindings = #{a => <<"a">>, b => [1, 2, 3]}, + Template = emqx_connector_template:parse(<<"this:${} / also:${.}">>), + ?assertEqual(ok, emqx_connector_template:validate([?PH_VAR_THIS], Template)), + ?assertEqual( + % NOTE: order of the keys in the JSON object depends on the JSON encoder + <<"this:{\"b\":[1,2,3],\"a\":\"a\"} / also:{\"b\":[1,2,3],\"a\":\"a\"}">>, + render_strict_string(Template, Bindings) + ). + +t_render_missing_bindings(_) -> + Bindings = #{no => #{}}, + Template = emqx_connector_template:parse( + <<"a:${a},b:${b},c:${c},d:${d.d1},e:${no.such_atom_i_swear}">> + ), + ?assertEqual( + {<<"a:,b:,c:,d:,e:">>, [ + {[<<"no">>, <<"such_atom_i_swear">>], undefined}, + {[<<"d">>, <<"d1">>], undefined}, + {[<<"c">>], undefined}, + {[<<"b">>], undefined}, + {[<<"a">>], undefined} + ]}, + render_string(Template, Bindings) + ), + ?assertError( + [ + {[<<"no">>, <<"such_atom_i_swear">>], undefined}, + {[<<"d">>, <<"d1">>], undefined}, + {[<<"c">>], undefined}, + {[<<"b">>], undefined}, + {[<<"a">>], undefined} + ], + render_strict_string(Template, Bindings) + ). + +t_unparse(_) -> + TString = <<"a:${a},b:${b},c:$${c},d:{${d.d1}}">>, + Template = emqx_connector_template:parse(TString), + ?assertEqual( + TString, + unicode:characters_to_binary(emqx_connector_template:unparse(Template)) + ). + +t_trivial(_) -> + ?assertEqual( + true, + emqx_connector_template:trivial(emqx_connector_template:parse(<<"">>)) + ), + ?assertEqual( + false, + emqx_connector_template:trivial(emqx_connector_template:parse(<<"a:${a},b:${b},c:$${c}">>)) + ), + ?assertEqual( + true, + emqx_connector_template:trivial( + emqx_connector_template:parse(<<"a:$${a},b:$${b},c:$${c}">>) + ) + ). + +t_render_partial_ph(_) -> + Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + Template = emqx_connector_template:parse(<<"a:$a,b:b},c:{c},d:${d">>), + ?assertEqual( + <<"a:$a,b:b},c:{c},d:${d">>, + render_strict_string(Template, Bindings) + ). + +t_parse_escaped(_) -> + Bindings = #{a => <<"1">>, b => 1}, + Template = emqx_connector_template:parse(<<"a:${a},b:$${b}">>), + ?assertEqual( + <<"a:1,b:${b}">>, + render_strict_string(Template, Bindings) + ). + +t_parse_escaped_dquote(_) -> + Bindings = #{a => <<"1">>, b => 1}, + Template = emqx_connector_template:parse(<<"a:\"${a}\",b:\"$${b}\"">>, #{ + strip_double_quote => true + }), + ?assertEqual( + <<"a:1,b:\"${b}\"">>, + render_strict_string(Template, Bindings) + ). + +t_parse_sql_prepstmt(_) -> + Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + {PrepareStatement, RowTemplate} = + emqx_connector_template_sql:parse_prepstmt(<<"a:${a},b:${b},c:${c},d:${d}">>, #{ + parameters => '?' + }), + ?assertEqual(<<"a:?,b:?,c:?,d:?">>, bin(PrepareStatement)), + ?assertEqual( + {[<<"1">>, 1, 1.0, <<"{\"d1\":\"hi\"}">>], _Errors = []}, + emqx_connector_template_sql:render_prepstmt(RowTemplate, Bindings) + ). + +t_parse_sql_prepstmt_n(_) -> + Bindings = #{a => undefined, b => true, c => atom, d => #{d1 => 42.1337}}, + {PrepareStatement, RowTemplate} = + emqx_connector_template_sql:parse_prepstmt(<<"a:${a},b:${b},c:${c},d:${d}">>, #{ + parameters => '$n' + }), + ?assertEqual(<<"a:$1,b:$2,c:$3,d:$4">>, bin(PrepareStatement)), + ?assertEqual( + [null, true, <<"atom">>, <<"{\"d1\":42.1337}">>], + emqx_connector_template_sql:render_prepstmt_strict(RowTemplate, Bindings) + ). + +t_parse_sql_prepstmt_partial_ph(_) -> + Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + {PrepareStatement, RowTemplate} = + emqx_connector_template_sql:parse_prepstmt(<<"a:$a,b:b},c:{c},d:${d">>, #{parameters => '?'}), + ?assertEqual(<<"a:$a,b:b},c:{c},d:${d">>, bin(PrepareStatement)), + ?assertEqual([], emqx_connector_template_sql:render_prepstmt_strict(RowTemplate, Bindings)). + +t_render_sql(_) -> + Bindings = #{ + a => <<"1">>, + b => 1, + c => 1.0, + d => #{d1 => <<"hi">>}, + n => undefined, + u => "utf8's cool 🐸" + }, + Template = emqx_connector_template:parse(<<"a:${a},b:${b},c:${c},d:${d},n:${n},u:${u}">>), + ?assertMatch( + {_String, _Errors = []}, + emqx_connector_template_sql:render(Template, Bindings, #{}) + ), + ?assertEqual( + <<"a:'1',b:1,c:1.0,d:'{\"d1\":\"hi\"}',n:NULL,u:'utf8\\'s cool 🐸'"/utf8>>, + bin(emqx_connector_template_sql:render_strict(Template, Bindings, #{})) + ). + +t_render_mysql(_) -> + %% with apostrophes + %% https://github.com/emqx/emqx/issues/4135 + Bindings = #{ + a => <<"1''2">>, + b => 1, + c => 1.0, + d => #{d1 => <<"someone's phone">>}, + e => <<$\\, 0, "💩"/utf8>>, + f => <<"non-utf8", 16#DCC900:24>>, + g => "utf8's cool 🐸", + h => imgood + }, + Template = emqx_connector_template_sql:parse( + <<"a:${a},b:${b},c:${c},d:${d},e:${e},f:${f},g:${g},h:${h}">> + ), + ?assertEqual( + << + "a:'1\\'\\'2',b:1,c:1.0,d:'{\"d1\":\"someone\\'s phone\"}'," + "e:'\\\\\\0💩',f:0x6E6F6E2D75746638DCC900,g:'utf8\\'s cool 🐸',"/utf8, + "h:'imgood'" + >>, + bin(emqx_connector_template_sql:render_strict(Template, Bindings, #{escaping => mysql})) + ). + +t_render_cql(_) -> + %% with apostrophes for cassandra + %% https://github.com/emqx/emqx/issues/4148 + Bindings = #{ + a => <<"1''2">>, + b => 1, + c => 1.0, + d => #{d1 => <<"someone's phone">>} + }, + Template = emqx_connector_template:parse(<<"a:${a},b:${b},c:${c},d:${d}">>), + ?assertEqual( + <<"a:'1''''2',b:1,c:1.0,d:'{\"d1\":\"someone''s phone\"}'">>, + bin(emqx_connector_template_sql:render_strict(Template, Bindings, #{escaping => cql})) + ). + +t_render_sql_custom_ph(_) -> + {PrepareStatement, RowTemplate} = + emqx_connector_template_sql:parse_prepstmt(<<"a:${a},b:${b}">>, #{parameters => '$n'}), + ?assertEqual( + {error, [{[<<"b">>], disallowed}]}, + emqx_connector_template:validate([<<"a">>], RowTemplate) + ), + ?assertEqual(<<"a:$1,b:$2">>, bin(PrepareStatement)). + +t_render_sql_strip_double_quote(_) -> + Bindings = #{a => <<"a">>, b => <<"b">>}, + + %% no strip_double_quote option: "${key}" -> "value" + {PrepareStatement1, RowTemplate1} = emqx_connector_template_sql:parse_prepstmt( + <<"a:\"${a}\",b:\"${b}\"">>, + #{parameters => '$n'} + ), + ?assertEqual(<<"a:\"$1\",b:\"$2\"">>, bin(PrepareStatement1)), + ?assertEqual( + [<<"a">>, <<"b">>], + emqx_connector_template_sql:render_prepstmt_strict(RowTemplate1, Bindings) + ), + + %% strip_double_quote = true: "${key}" -> value + {PrepareStatement2, RowTemplate2} = emqx_connector_template_sql:parse_prepstmt( + <<"a:\"${a}\",b:\"${b}\"">>, + #{parameters => '$n', strip_double_quote => true} + ), + ?assertEqual(<<"a:$1,b:$2">>, bin(PrepareStatement2)), + ?assertEqual( + [<<"a">>, <<"b">>], + emqx_connector_template_sql:render_prepstmt_strict(RowTemplate2, Bindings) + ). + +t_render_tmpl_deep(_) -> + Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + + Template = emqx_connector_template:parse_deep( + #{<<"${a}">> => [<<"${b}">>, "c", 2, 3.0, '${d}', {[<<"${c}">>, <<"$${d}">>], 0}]} + ), + + ?assertEqual( + {error, [{V, disallowed} || V <- [[<<"b">>], [<<"c">>]]]}, + emqx_connector_template:validate([<<"a">>], Template) + ), + + ?assertEqual( + #{<<"1">> => [<<"1">>, "c", 2, 3.0, '${d}', {[<<"1.0">>, <<"${d}">>], 0}]}, + emqx_connector_template:render_strict(Template, Bindings) + ). + +t_unparse_tmpl_deep(_) -> + Term = #{<<"${a}">> => [<<"$${b}">>, "c", 2, 3.0, '${d}', {[<<"${c}">>], 0}]}, + Template = emqx_connector_template:parse_deep(Term), + ?assertEqual(Term, emqx_connector_template:unparse(Template)). + +%% + +render_string(Template, Bindings) -> + {String, Errors} = emqx_connector_template:render(Template, Bindings), + {bin(String), Errors}. + +render_strict_string(Template, Bindings) -> + bin(emqx_connector_template:render_strict(Template, Bindings)). + +bin(String) -> + unicode:characters_to_binary(String). From 35902dc72db829d2ff0c4de4206aae04841533cb Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Tue, 18 Apr 2023 15:34:38 +0300 Subject: [PATCH 049/155] feat(tpl): switch basic connectors to `emqx_connector_template` Also avoid `filename:join/2` in HTTP connector since it's both OS specific and an overkill. --- .../src/emqx_bridge_http_connector.erl | 120 ++++----- .../test/emqx_bridge_http_connector_tests.erl | 3 +- .../test/emqx_bridge_mysql_SUITE.erl | 30 +-- .../test/emqx_bridge_pgsql_SUITE.erl | 3 +- apps/emqx_mysql/src/emqx_mysql.erl | 248 +++++++++--------- apps/emqx_postgresql/src/emqx_postgresql.erl | 200 +++++++------- 6 files changed, 280 insertions(+), 324 deletions(-) diff --git a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl index 5d1b1947c..869f081fb 100644 --- a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl +++ b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl @@ -479,61 +479,47 @@ preprocess_request( } = Req ) -> #{ - method => emqx_placeholder:preproc_tmpl(to_bin(Method)), - path => emqx_placeholder:preproc_tmpl(Path), - body => maybe_preproc_tmpl(body, Req), - headers => wrap_auth_header(preproc_headers(Headers)), + method => parse_template(to_bin(Method)), + path => parse_template(Path), + body => maybe_parse_template(body, Req), + headers => parse_headers(Headers), request_timeout => maps:get(request_timeout, Req, ?DEFAULT_REQUEST_TIMEOUT_MS), max_retries => maps:get(max_retries, Req, 2) }. -preproc_headers(Headers) when is_map(Headers) -> +parse_headers(Headers) when is_map(Headers) -> maps:fold( - fun(K, V, Acc) -> - [ - { - emqx_placeholder:preproc_tmpl(to_bin(K)), - emqx_placeholder:preproc_tmpl(to_bin(V)) - } - | Acc - ] - end, + fun(K, V, Acc) -> [parse_header(K, V) | Acc] end, [], Headers ); -preproc_headers(Headers) when is_list(Headers) -> +parse_headers(Headers) when is_list(Headers) -> lists:map( - fun({K, V}) -> - { - emqx_placeholder:preproc_tmpl(to_bin(K)), - emqx_placeholder:preproc_tmpl(to_bin(V)) - } - end, + fun({K, V}) -> parse_header(K, V) end, Headers ). -wrap_auth_header(Headers) -> - lists:map(fun maybe_wrap_auth_header/1, Headers). +parse_header(K, V) -> + KStr = to_bin(K), + VTpl = parse_template(to_bin(V)), + {parse_template(KStr), maybe_wrap_auth_header(KStr, VTpl)}. -maybe_wrap_auth_header({[{str, Key}] = StrKey, Val}) -> - {_, MaybeWrapped} = maybe_wrap_auth_header({Key, Val}), - {StrKey, MaybeWrapped}; -maybe_wrap_auth_header({Key, Val} = Header) when - is_binary(Key), (size(Key) =:= 19 orelse size(Key) =:= 13) +maybe_wrap_auth_header(Key, VTpl) when + (byte_size(Key) =:= 19 orelse byte_size(Key) =:= 13) -> %% We check the size of potential keys in the guard above and consider only %% those that match the number of characters of either "Authorization" or %% "Proxy-Authorization". case try_bin_to_lower(Key) of <<"authorization">> -> - {Key, emqx_secret:wrap(Val)}; + emqx_secret:wrap(VTpl); <<"proxy-authorization">> -> - {Key, emqx_secret:wrap(Val)}; + emqx_secret:wrap(VTpl); _Other -> - Header + VTpl end; -maybe_wrap_auth_header(Header) -> - Header. +maybe_wrap_auth_header(_Key, VTpl) -> + VTpl. try_bin_to_lower(Bin) -> try iolist_to_binary(string:lowercase(Bin)) of @@ -542,46 +528,57 @@ try_bin_to_lower(Bin) -> _:_ -> Bin end. -maybe_preproc_tmpl(Key, Conf) -> +maybe_parse_template(Key, Conf) -> case maps:get(Key, Conf, undefined) of undefined -> undefined; - Val -> emqx_placeholder:preproc_tmpl(Val) + Val -> parse_template(Val) end. +parse_template(String) -> + emqx_connector_template:parse(String). + process_request( #{ - method := MethodTks, - path := PathTks, - body := BodyTks, - headers := HeadersTks, + method := MethodTemplate, + path := PathTemplate, + body := BodyTemplate, + headers := HeadersTemplate, request_timeout := ReqTimeout } = Conf, Msg ) -> Conf#{ - method => make_method(emqx_placeholder:proc_tmpl(MethodTks, Msg)), - path => emqx_placeholder:proc_tmpl(PathTks, Msg), - body => process_request_body(BodyTks, Msg), - headers => proc_headers(HeadersTks, Msg), + method => make_method(render_template_string(MethodTemplate, Msg)), + path => unicode:characters_to_list(render_template(PathTemplate, Msg)), + body => render_request_body(BodyTemplate, Msg), + headers => render_headers(HeadersTemplate, Msg), request_timeout => ReqTimeout }. -process_request_body(undefined, Msg) -> +render_request_body(undefined, Msg) -> emqx_utils_json:encode(Msg); -process_request_body(BodyTks, Msg) -> - emqx_placeholder:proc_tmpl(BodyTks, Msg). +render_request_body(BodyTks, Msg) -> + render_template(BodyTks, Msg). -proc_headers(HeaderTks, Msg) -> +render_headers(HeaderTks, Msg) -> lists:map( fun({K, V}) -> { - emqx_placeholder:proc_tmpl(K, Msg), - emqx_placeholder:proc_tmpl(emqx_secret:unwrap(V), Msg) + render_template_string(K, Msg), + render_template_string(emqx_secret:unwrap(V), Msg) } end, HeaderTks ). +render_template(Template, Msg) -> + % NOTE: ignoring errors here, missing variables will be rendered as `"undefined"`. + {String, _Errors} = emqx_connector_template:render(Template, Msg), + String. + +render_template_string(Template, Msg) -> + unicode:characters_to_binary(render_template(Template, Msg)). + make_method(M) when M == <<"POST">>; M == <<"post">> -> post; make_method(M) when M == <<"PUT">>; M == <<"put">> -> put; make_method(M) when M == <<"GET">>; M == <<"get">> -> get; @@ -716,8 +713,6 @@ maybe_retry(Result, _Context, ReplyFunAndArgs) -> emqx_resource:apply_reply_fun(ReplyFunAndArgs, Result). %% The HOCON schema system may generate sensitive keys with this format -is_sensitive_key([{str, StringKey}]) -> - is_sensitive_key(StringKey); is_sensitive_key(Atom) when is_atom(Atom) -> is_sensitive_key(erlang:atom_to_binary(Atom)); is_sensitive_key(Bin) when is_binary(Bin), (size(Bin) =:= 19 orelse size(Bin) =:= 13) -> @@ -742,25 +737,19 @@ redact(Data) -> %% and we also can't know the body format and where the sensitive data will be %% so the easy way to keep data security is redacted the whole body redact_request({Path, Headers}) -> - {Path, redact(Headers)}; + {Path, Headers}; redact_request({Path, Headers, _Body}) -> - {Path, redact(Headers), <<"******">>}. + {Path, Headers, <<"******">>}. -ifdef(TEST). -include_lib("eunit/include/eunit.hrl"). redact_test_() -> - TestData1 = [ - {<<"content-type">>, <<"application/json">>}, - {<<"Authorization">>, <<"Basic YWxhZGRpbjpvcGVuc2VzYW1l">>} - ], - - TestData2 = #{ - headers => - [ - {[{str, <<"content-type">>}], [{str, <<"application/json">>}]}, - {[{str, <<"Authorization">>}], [{str, <<"Basic YWxhZGRpbjpvcGVuc2VzYW1l">>}]} - ] + TestData = #{ + headers => [ + {<<"content-type">>, <<"application/json">>}, + {<<"Authorization">>, <<"Basic YWxhZGRpbjpvcGVuc2VzYW1l">>} + ] }, [ ?_assert(is_sensitive_key(<<"Authorization">>)), @@ -770,8 +759,7 @@ redact_test_() -> ?_assert(is_sensitive_key('PrOxy-authoRizaTion')), ?_assertNot(is_sensitive_key(<<"Something">>)), ?_assertNot(is_sensitive_key(89)), - ?_assertNotEqual(TestData1, redact(TestData1)), - ?_assertNotEqual(TestData2, redact(TestData2)) + ?_assertNotEqual(TestData, redact(TestData)) ]. join_paths_test_() -> diff --git a/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl b/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl index 6b5c2b0cd..1de210260 100644 --- a/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl +++ b/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl @@ -83,7 +83,8 @@ is_wrapped(Secret) when is_function(Secret) -> is_wrapped(_Other) -> false. -untmpl([{_, V} | _]) -> V. +untmpl(Tpl) -> + iolist_to_binary(emqx_connector_template:render_strict(Tpl, #{})). is_unwrapped_headers(Headers) -> lists:all(fun is_unwrapped_header/1, Headers). diff --git a/apps/emqx_bridge_mysql/test/emqx_bridge_mysql_SUITE.erl b/apps/emqx_bridge_mysql/test/emqx_bridge_mysql_SUITE.erl index 3ed40e903..2eeccfd77 100644 --- a/apps/emqx_bridge_mysql/test/emqx_bridge_mysql_SUITE.erl +++ b/apps/emqx_bridge_mysql/test/emqx_bridge_mysql_SUITE.erl @@ -565,8 +565,6 @@ t_simple_sql_query(Config) -> ok. t_missing_data(Config) -> - BatchSize = ?config(batch_size, Config), - IsBatch = BatchSize > 1, ?assertMatch( {ok, _}, create_bridge(Config) @@ -577,27 +575,13 @@ t_missing_data(Config) -> ), send_message(Config, #{}), {ok, [Event]} = snabbkaffe:receive_events(SRef), - case IsBatch of - true -> - ?assertMatch( - #{ - result := - {error, - {unrecoverable_error, - {1292, _, <<"Truncated incorrect DOUBLE value: 'undefined'">>}}} - }, - Event - ); - false -> - ?assertMatch( - #{ - result := - {error, - {unrecoverable_error, {1048, _, <<"Column 'arrived' cannot be null">>}}} - }, - Event - ) - end, + ?assertMatch( + #{ + result := + {error, {unrecoverable_error, {1048, _, <<"Column 'arrived' cannot be null">>}}} + }, + Event + ), ok. t_bad_sql_parameter(Config) -> diff --git a/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl b/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl index cd79db43d..156d4bd16 100644 --- a/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl +++ b/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl @@ -324,6 +324,7 @@ connect_and_drop_table(Config) -> connect_and_clear_table(Config) -> Con = connect_direct_pgsql(Config), + _ = epgsql:squery(Con, ?SQL_CREATE_TABLE), {ok, _} = epgsql:squery(Con, ?SQL_DELETE), ok = epgsql:close(Con). @@ -668,7 +669,7 @@ t_missing_table(Config) -> ok end, fun(Trace) -> - ?assertMatch([_, _, _], ?of_kind(pgsql_undefined_table, Trace)), + ?assertMatch([_], ?of_kind(pgsql_undefined_table, Trace)), ok end ), diff --git a/apps/emqx_mysql/src/emqx_mysql.erl b/apps/emqx_mysql/src/emqx_mysql.erl index 4440bcfbb..927c9d067 100644 --- a/apps/emqx_mysql/src/emqx_mysql.erl +++ b/apps/emqx_mysql/src/emqx_mysql.erl @@ -46,16 +46,12 @@ default_port => ?MYSQL_DEFAULT_PORT }). --type prepares() :: #{atom() => binary()}. --type params_tokens() :: #{atom() => list()}. --type sqls() :: #{atom() => binary()}. +-type template() :: {unicode:chardata(), emqx_connector_template:str()}. -type state() :: #{ pool_name := binary(), - prepare_statement := prepares(), - params_tokens := params_tokens(), - batch_inserts := sqls(), - batch_params_tokens := params_tokens() + prepares := ok | {error, _}, + templates := #{{atom(), batch | prepstmt} => template()} }. %%===================================================================== @@ -154,13 +150,13 @@ on_query(InstId, {TypeOrKey, SQLOrKey, Params}, State) -> on_query( InstId, {TypeOrKey, SQLOrKey, Params, Timeout}, - #{pool_name := PoolName, prepare_statement := Prepares} = State + State ) -> MySqlFunction = mysql_function(TypeOrKey), {SQLOrKey2, Data} = proc_sql_params(TypeOrKey, SQLOrKey, Params, State), case on_sql_query(InstId, MySqlFunction, SQLOrKey2, Data, Timeout, State) of {error, not_prepared} -> - case maybe_prepare_sql(SQLOrKey2, Prepares, PoolName) of + case maybe_prepare_sql(SQLOrKey2, State) of ok -> ?tp( mysql_connector_on_query_prepared_sql, @@ -187,23 +183,27 @@ on_query( on_batch_query( InstId, - BatchReq, - #{batch_inserts := Inserts, batch_params_tokens := ParamsTokens} = State + BatchReq = [{Key, _} | _], + #{query_templates := Templates} = State ) -> - case hd(BatchReq) of - {Key, _} -> - case maps:get(Key, Inserts, undefined) of - undefined -> - {error, {unrecoverable_error, batch_select_not_implemented}}; - InsertSQL -> - Tokens = maps:get(Key, ParamsTokens), - on_batch_insert(InstId, BatchReq, InsertSQL, Tokens, State) - end; - Request -> - LogMeta = #{connector => InstId, first_request => Request, state => State}, - ?SLOG(error, LogMeta#{msg => "invalid request"}), - {error, {unrecoverable_error, invalid_request}} - end. + case maps:get({Key, batch}, Templates, undefined) of + undefined -> + {error, {unrecoverable_error, batch_select_not_implemented}}; + Template -> + on_batch_insert(InstId, BatchReq, Template, State) + end; +on_batch_query( + InstId, + BatchReq, + State +) -> + ?SLOG(error, #{ + msg => "invalid request", + connector => InstId, + request => BatchReq, + state => State + }), + {error, {unrecoverable_error, invalid_request}}. mysql_function(sql) -> query; @@ -222,8 +222,8 @@ on_get_status(_InstId, #{pool_name := PoolName} = State) -> {ok, NState} -> %% return new state with prepared statements {connected, NState}; - {error, {undefined_table, NState}} -> - {disconnected, NState, unhealthy_target}; + {error, undefined_table} -> + {disconnected, State, unhealthy_target}; {error, _Reason} -> %% do not log error, it is logged in prepare_sql_to_conn connecting @@ -238,8 +238,8 @@ do_get_status(Conn) -> do_check_prepares( #{ pool_name := PoolName, - prepare_statement := #{send_message := SQL} - } = State + templates := #{{send_message, prepstmt} := SQL} + } ) -> % it's already connected. Verify if target table still exists Workers = [Worker || {_WorkerName, Worker} <- ecpool:workers(PoolName)], @@ -250,7 +250,7 @@ do_check_prepares( {ok, Conn} -> case mysql:prepare(Conn, get_status, SQL) of {error, {1146, _, _}} -> - {error, {undefined_table, State}}; + {error, undefined_table}; {ok, Statement} -> mysql:unprepare(Conn, Statement); _ -> @@ -265,17 +265,14 @@ do_check_prepares( ok, Workers ); -do_check_prepares(#{prepare_statement := Statement}) when is_map(Statement) -> +do_check_prepares(#{prepares := ok}) -> ok; -do_check_prepares(State = #{pool_name := PoolName, prepare_statement := {error, Prepares}}) -> +do_check_prepares(#{prepares := {error, _}} = State) -> %% retry to prepare - case prepare_sql(Prepares, PoolName) of + case prepare_sql(State) of ok -> %% remove the error - {ok, State#{prepare_statement => Prepares}}; - {error, undefined_table} -> - %% indicate the error - {error, {undefined_table, State#{prepare_statement => {error, Prepares}}}}; + {ok, State#{prepares => ok}}; {error, Reason} -> {error, Reason} end. @@ -285,41 +282,44 @@ do_check_prepares(State = #{pool_name := PoolName, prepare_statement := {error, connect(Options) -> mysql:start_link(Options). -init_prepare(State = #{prepare_statement := Prepares, pool_name := PoolName}) -> - case maps:size(Prepares) of +init_prepare(State = #{query_templates := Templates}) -> + case maps:size(Templates) of 0 -> - State; + State#{prepares => ok}; _ -> - case prepare_sql(Prepares, PoolName) of + case prepare_sql(State) of ok -> - State; + State#{prepares => ok}; {error, Reason} -> - LogMeta = #{msg => <<"mysql_init_prepare_statement_failed">>, reason => Reason}, - ?SLOG(error, LogMeta), + ?SLOG(error, #{ + msg => <<"MySQL init prepare statement failed">>, + reason => Reason + }), %% mark the prepare_statement as failed - State#{prepare_statement => {error, Prepares}} + State#{prepares => {error, Reason}} end end. -maybe_prepare_sql(SQLOrKey, Prepares, PoolName) -> - case maps:is_key(SQLOrKey, Prepares) of - true -> prepare_sql(Prepares, PoolName); +maybe_prepare_sql(SQLOrKey, State = #{query_templates := Templates}) -> + case maps:is_key({SQLOrKey, prepstmt}, Templates) of + true -> prepare_sql(State); false -> {error, {unrecoverable_error, prepared_statement_invalid}} end. -prepare_sql(Prepares, PoolName) when is_map(Prepares) -> - prepare_sql(maps:to_list(Prepares), PoolName); -prepare_sql(Prepares, PoolName) -> - case do_prepare_sql(Prepares, PoolName) of +prepare_sql(#{query_templates := Templates, pool_name := PoolName}) -> + prepare_sql(maps:to_list(Templates), PoolName). + +prepare_sql(Templates, PoolName) -> + case do_prepare_sql(Templates, PoolName) of ok -> %% prepare for reconnect - ecpool:add_reconnect_callback(PoolName, {?MODULE, prepare_sql_to_conn, [Prepares]}), + ecpool:add_reconnect_callback(PoolName, {?MODULE, prepare_sql_to_conn, [Templates]}), ok; {error, R} -> {error, R} end. -do_prepare_sql(Prepares, PoolName) -> +do_prepare_sql(Templates, PoolName) -> Conns = [ begin @@ -328,33 +328,30 @@ do_prepare_sql(Prepares, PoolName) -> end || {_Name, Worker} <- ecpool:workers(PoolName) ], - prepare_sql_to_conn_list(Conns, Prepares). + prepare_sql_to_conn_list(Conns, Templates). -prepare_sql_to_conn_list([], _PrepareList) -> +prepare_sql_to_conn_list([], _Templates) -> ok; -prepare_sql_to_conn_list([Conn | ConnList], PrepareList) -> - case prepare_sql_to_conn(Conn, PrepareList) of +prepare_sql_to_conn_list([Conn | ConnList], Templates) -> + case prepare_sql_to_conn(Conn, Templates) of ok -> - prepare_sql_to_conn_list(ConnList, PrepareList); + prepare_sql_to_conn_list(ConnList, Templates); {error, R} -> %% rollback - Fun = fun({Key, _}) -> - _ = unprepare_sql_to_conn(Conn, Key), - ok - end, - lists:foreach(Fun, PrepareList), + _ = [unprepare_sql_to_conn(Conn, Template) || Template <- Templates], {error, R} end. -prepare_sql_to_conn(Conn, []) when is_pid(Conn) -> ok; -prepare_sql_to_conn(Conn, [{Key, SQL} | PrepareList]) when is_pid(Conn) -> - LogMeta = #{msg => "mysql_prepare_statement", name => Key, prepare_sql => SQL}, +prepare_sql_to_conn(_Conn, []) -> + ok; +prepare_sql_to_conn(Conn, [{{Key, prepstmt}, {SQL, _RowTemplate}} | Rest]) -> + LogMeta = #{msg => "MySQL Prepare Statement", name => Key, prepare_sql => SQL}, ?SLOG(info, LogMeta), _ = unprepare_sql_to_conn(Conn, Key), case mysql:prepare(Conn, Key, SQL) of {ok, _Key} -> ?SLOG(info, LogMeta#{result => success}), - prepare_sql_to_conn(Conn, PrepareList); + prepare_sql_to_conn(Conn, Rest); {error, {1146, _, _} = Reason} -> %% Target table is not created ?tp(mysql_undefined_table, #{}), @@ -365,84 +362,85 @@ prepare_sql_to_conn(Conn, [{Key, SQL} | PrepareList]) when is_pid(Conn) -> % syntax failures. Retrying syntax failures is not very productive. ?SLOG(error, LogMeta#{result => failed, reason => Reason}), {error, Reason} - end. + end; +prepare_sql_to_conn(Conn, [{_Key, _Template} | Rest]) -> + prepare_sql_to_conn(Conn, Rest). -unprepare_sql_to_conn(Conn, PrepareSqlKey) -> - mysql:unprepare(Conn, PrepareSqlKey). +unprepare_sql_to_conn(Conn, {{Key, prepstmt}, _}) -> + mysql:unprepare(Conn, Key); +unprepare_sql_to_conn(Conn, Key) when is_atom(Key) -> + mysql:unprepare(Conn, Key); +unprepare_sql_to_conn(_Conn, _) -> + ok. parse_prepare_sql(Config) -> - SQL = - case maps:get(prepare_statement, Config, undefined) of - undefined -> - case maps:get(sql, Config, undefined) of - undefined -> #{}; - Template -> #{send_message => Template} - end; - Any -> - Any + Queries = + case Config of + #{prepare_statement := Qs} -> + Qs; + #{sql := Query} -> + #{send_message => Query}; + _ -> + #{} end, - parse_prepare_sql(maps:to_list(SQL), #{}, #{}, #{}, #{}). + Templates = maps:fold(fun parse_prepare_sql/3, #{}, Queries), + #{query_templates => Templates}. -parse_prepare_sql([{Key, H} | _] = L, Prepares, Tokens, BatchInserts, BatchTks) -> - {PrepareSQL, ParamsTokens} = emqx_placeholder:preproc_sql(H), - parse_batch_prepare_sql( - L, Prepares#{Key => PrepareSQL}, Tokens#{Key => ParamsTokens}, BatchInserts, BatchTks - ); -parse_prepare_sql([], Prepares, Tokens, BatchInserts, BatchTks) -> - #{ - prepare_statement => Prepares, - params_tokens => Tokens, - batch_inserts => BatchInserts, - batch_params_tokens => BatchTks - }. +parse_prepare_sql(Key, Query, Acc) -> + Template = emqx_connector_template_sql:parse_prepstmt(Query, #{parameters => '?'}), + AccNext = Acc#{{Key, prepstmt} => Template}, + parse_batch_sql(Key, Query, AccNext). -parse_batch_prepare_sql([{Key, H} | T], Prepares, Tokens, BatchInserts, BatchTks) -> - case emqx_utils_sql:get_statement_type(H) of - select -> - parse_prepare_sql(T, Prepares, Tokens, BatchInserts, BatchTks); +parse_batch_sql(Key, Query, Acc) -> + case emqx_connector_sql:get_statement_type(Query) of insert -> - case emqx_utils_sql:parse_insert(H) of - {ok, {InsertSQL, Params}} -> - ParamsTks = emqx_placeholder:preproc_tmpl(Params), - parse_prepare_sql( - T, - Prepares, - Tokens, - BatchInserts#{Key => InsertSQL}, - BatchTks#{Key => ParamsTks} - ); + case emqx_connector_sql:parse_insert(Query) of + {ok, {Insert, Params}} -> + RowTemplate = emqx_connector_template_sql:parse(Params), + Acc#{{Key, batch} => {Insert, RowTemplate}}; {error, Reason} -> - ?SLOG(error, #{msg => "split_sql_failed", sql => H, reason => Reason}), - parse_prepare_sql(T, Prepares, Tokens, BatchInserts, BatchTks) + ?SLOG(error, #{ + msg => "parse insert sql statement failed", + sql => Query, + reason => Reason + }), + Acc end; - Type when is_atom(Type) -> - ?SLOG(error, #{msg => "detect_sql_type_unsupported", sql => H, type => Type}), - parse_prepare_sql(T, Prepares, Tokens, BatchInserts, BatchTks); - {error, Reason} -> - ?SLOG(error, #{msg => "detect_sql_type_failed", sql => H, reason => Reason}), - parse_prepare_sql(T, Prepares, Tokens, BatchInserts, BatchTks) + select -> + Acc; + Otherwise -> + ?SLOG(error, #{ + msg => "invalid sql statement type", + sql => Query, + type => Otherwise + }), + Acc end. proc_sql_params(query, SQLOrKey, Params, _State) -> {SQLOrKey, Params}; proc_sql_params(prepared_query, SQLOrKey, Params, _State) -> {SQLOrKey, Params}; -proc_sql_params(TypeOrKey, SQLOrData, Params, #{params_tokens := ParamsTokens}) -> - case maps:get(TypeOrKey, ParamsTokens, undefined) of +proc_sql_params(TypeOrKey, SQLOrData, Params, #{query_templates := Templates}) -> + case maps:get({TypeOrKey, prepstmt}, Templates, undefined) of undefined -> {SQLOrData, Params}; - Tokens -> - {TypeOrKey, emqx_placeholder:proc_sql(Tokens, SQLOrData)} + {_InsertPart, RowTemplate} -> + % NOTE: ignoring errors here, missing variables are set to `null`. + {Row, _Errors} = emqx_connector_template_sql:render_prepstmt(RowTemplate, SQLOrData), + {TypeOrKey, Row} end. -on_batch_insert(InstId, BatchReqs, InsertPart, Tokens, State) -> - ValuesPart = lists:join($,, [ - emqx_placeholder:proc_param_str(Tokens, Msg, fun emqx_placeholder:quote_mysql/1) - || {_, Msg} <- BatchReqs - ]), - Query = [InsertPart, <<" values ">> | ValuesPart], +on_batch_insert(InstId, BatchReqs, {InsertPart, RowTemplate}, State) -> + Rows = [render_row(RowTemplate, Msg) || {_, Msg} <- BatchReqs], + Query = [InsertPart, <<" values ">> | lists:join($,, Rows)], on_sql_query(InstId, query, Query, no_params, default_timeout, State). +render_row(RowTemplate, Data) -> + % NOTE: ignoring errors here, missing variables are set to "NULL". + {Row, _Errors} = emqx_connector_template_sql:render(RowTemplate, Data, #{escaping => mysql}), + Row. + on_sql_query( InstId, SQLFunc, diff --git a/apps/emqx_postgresql/src/emqx_postgresql.erl b/apps/emqx_postgresql/src/emqx_postgresql.erl index dc6447536..71ba93b9b 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.erl +++ b/apps/emqx_postgresql/src/emqx_postgresql.erl @@ -52,15 +52,12 @@ default_port => ?PGSQL_DEFAULT_PORT }). --type prepares() :: #{atom() => binary()}. --type params_tokens() :: #{atom() => list()}. - +-type template() :: {unicode:chardata(), emqx_connector_template_sql:row_template()}. -type state() :: #{ pool_name := binary(), - prepare_sql := prepares(), - params_tokens := params_tokens(), - prepare_statement := epgsql:statement() + query_templates := #{binary() => template()}, + prepares := #{binary() => epgsql:statement()} | {error, _} }. %% FIXME: add `{error, sync_required}' to `epgsql:execute_batch' @@ -142,7 +139,7 @@ on_start( State = parse_prepare_sql(Config), case emqx_resource_pool:start(InstId, ?MODULE, Options ++ SslOpts) of ok -> - {ok, init_prepare(State#{pool_name => InstId, prepare_statement => #{}})}; + {ok, init_prepare(State#{pool_name => InstId, prepares => #{}})}; {error, Reason} -> ?tp( pgsql_connector_start_failed, @@ -189,55 +186,50 @@ pgsql_query_type(_) -> on_batch_query( InstId, - BatchReq, - #{pool_name := PoolName, params_tokens := Tokens, prepare_statement := Sts} = State + [{Key, _} = Request | _] = BatchReq, + #{pool_name := PoolName, query_templates := Templates, prepares := PrepStatements} = State ) -> - case BatchReq of - [{Key, _} = Request | _] -> - BinKey = to_bin(Key), - case maps:get(BinKey, Tokens, undefined) of - undefined -> - Log = #{ - connector => InstId, - first_request => Request, - state => State, - msg => "batch_prepare_not_implemented" - }, - ?SLOG(error, Log), - {error, {unrecoverable_error, batch_prepare_not_implemented}}; - TokenList -> - {_, Datas} = lists:unzip(BatchReq), - Datas2 = [emqx_placeholder:proc_sql(TokenList, Data) || Data <- Datas], - St = maps:get(BinKey, Sts), - case on_sql_query(InstId, PoolName, execute_batch, St, Datas2) of - {error, _Error} = Result -> - handle_result(Result); - {_Column, Results} -> - handle_batch_result(Results, 0) - end - end; - _ -> + BinKey = to_bin(Key), + case maps:get(BinKey, Templates, undefined) of + undefined -> Log = #{ connector => InstId, - request => BatchReq, + first_request => Request, state => State, - msg => "invalid_request" + msg => "batch prepare not implemented" }, ?SLOG(error, Log), - {error, {unrecoverable_error, invalid_request}} - end. + {error, {unrecoverable_error, batch_prepare_not_implemented}}; + {_Statement, RowTemplate} -> + PrepStatement = maps:get(BinKey, PrepStatements), + Rows = [render_prepare_sql_row(RowTemplate, Data) || {_Key, Data} <- BatchReq], + case on_sql_query(InstId, PoolName, execute_batch, PrepStatement, Rows) of + {error, _Error} = Result -> + handle_result(Result); + {_Column, Results} -> + handle_batch_result(Results, 0) + end + end; +on_batch_query(InstId, BatchReq, State) -> + ?SLOG(error, #{ + connector => InstId, + request => BatchReq, + state => State, + msg => "invalid request" + }), + {error, {unrecoverable_error, invalid_request}}. proc_sql_params(query, SQLOrKey, Params, _State) -> {SQLOrKey, Params}; proc_sql_params(prepared_query, SQLOrKey, Params, _State) -> {SQLOrKey, Params}; -proc_sql_params(TypeOrKey, SQLOrData, Params, #{params_tokens := ParamsTokens}) -> +proc_sql_params(TypeOrKey, SQLOrData, Params, #{query_templates := Templates}) -> Key = to_bin(TypeOrKey), - case maps:get(Key, ParamsTokens, undefined) of + case maps:get(Key, Templates, undefined) of undefined -> {SQLOrData, Params}; - Tokens -> - {Key, emqx_placeholder:proc_sql(Tokens, SQLOrData)} + {_Statement, RowTemplate} -> + {Key, render_prepare_sql_row(RowTemplate, SQLOrData)} end. on_sql_query(InstId, PoolName, Type, NameOrSQL, Data) -> @@ -297,9 +289,9 @@ on_get_status(_InstId, #{pool_name := PoolName} = State) -> {ok, NState} -> %% return new state with prepared statements {connected, NState}; - {error, {undefined_table, NState}} -> + {error, undefined_table} -> %% return new state indicating that we are connected but the target table is not created - {disconnected, NState, unhealthy_target}; + {disconnected, State, unhealthy_target}; {error, _Reason} -> %% do not log error, it is logged in prepare_sql_to_conn connecting @@ -314,8 +306,8 @@ do_get_status(Conn) -> do_check_prepares( #{ pool_name := PoolName, - prepare_sql := #{<<"send_message">> := SQL} - } = State + query_templates := #{<<"send_message">> := {SQL, _RowTemplate}} + } ) -> WorkerPids = [Worker || {_WorkerName, Worker} <- ecpool:workers(PoolName)], case validate_table_existence(WorkerPids, SQL) of @@ -324,19 +316,16 @@ do_check_prepares( {error, undefined_table} -> {error, {undefined_table, State}} end; -do_check_prepares(#{prepare_sql := Prepares}) when is_map(Prepares) -> +do_check_prepares(#{prepares := Prepares}) when is_map(Prepares) -> ok; -do_check_prepares(State = #{pool_name := PoolName, prepare_sql := {error, Prepares}}) -> +do_check_prepares(#{prepares := {error, _}} = State) -> %% retry to prepare - case prepare_sql(Prepares, PoolName) of - {ok, Sts} -> + case prepare_sql(State) of + {ok, PrepStatements} -> %% remove the error - {ok, State#{prepare_sql => Prepares, prepare_statement := Sts}}; - {error, undefined_table} -> - %% indicate the error - {error, {undefined_table, State#{prepare_sql => {error, Prepares}}}}; - Error -> - {error, Error} + {ok, State#{prepares := PrepStatements}}; + {error, Reason} -> + {error, Reason} end. -spec validate_table_existence([pid()], binary()) -> ok | {error, undefined_table}. @@ -426,69 +415,63 @@ conn_opts([_Opt | Opts], Acc) -> conn_opts(Opts, Acc). parse_prepare_sql(Config) -> - SQL = - case maps:get(prepare_statement, Config, undefined) of - undefined -> - case maps:get(sql, Config, undefined) of - undefined -> #{}; - Template -> #{<<"send_message">> => Template} - end; - Any -> - Any + Queries = + case Config of + #{prepare_statement := Qs} -> + Qs; + #{sql := Query} -> + #{<<"send_message">> => Query}; + #{} -> + #{} end, - parse_prepare_sql(maps:to_list(SQL), #{}, #{}). + Templates = maps:fold(fun parse_prepare_sql/3, #{}, Queries), + #{query_templates => Templates}. -parse_prepare_sql([{Key, H} | T], Prepares, Tokens) -> - {PrepareSQL, ParamsTokens} = emqx_placeholder:preproc_sql(H, '$n'), - parse_prepare_sql( - T, Prepares#{Key => PrepareSQL}, Tokens#{Key => ParamsTokens} - ); -parse_prepare_sql([], Prepares, Tokens) -> - #{ - prepare_sql => Prepares, - params_tokens => Tokens - }. +parse_prepare_sql(Key, Query, Acc) -> + Template = emqx_connector_template_sql:parse_prepstmt(Query, #{parameters => '$n'}), + Acc#{Key => Template}. -init_prepare(State = #{prepare_sql := Prepares, pool_name := PoolName}) -> - case maps:size(Prepares) of - 0 -> - State; - _ -> - case prepare_sql(Prepares, PoolName) of - {ok, Sts} -> - State#{prepare_statement := Sts}; - Error -> - LogMsg = - maps:merge( - #{msg => <<"postgresql_init_prepare_statement_failed">>}, - translate_to_log_context(Error) - ), - ?SLOG(error, LogMsg), - %% mark the prepare_sql as failed - State#{prepare_sql => {error, Prepares}} - end +render_prepare_sql_row(RowTemplate, Data) -> + % NOTE: ignoring errors here, missing variables will be replaced with `null`. + {Row, _Errors} = emqx_connector_template_sql:render_prepstmt(RowTemplate, Data), + Row. + +init_prepare(State = #{query_templates := Templates}) when map_size(Templates) == 0 -> + State; +init_prepare(State = #{}) -> + case prepare_sql(State) of + {ok, PrepStatements} -> + State#{prepares => PrepStatements}; + Error -> + ?SLOG(error, maps:merge( + #{msg => <<"postgresql_init_prepare_statement_failed">>}, + translate_to_log_context(Error) + )), + %% mark the prepares failed + State#{prepares => Error} end. -prepare_sql(Prepares, PoolName) when is_map(Prepares) -> - prepare_sql(maps:to_list(Prepares), PoolName); -prepare_sql(Prepares, PoolName) -> - case do_prepare_sql(Prepares, PoolName) of +prepare_sql(#{query_templates := Templates, pool_name := PoolName}) -> + prepare_sql(maps:to_list(Templates), PoolName). + +prepare_sql(Templates, PoolName) -> + case do_prepare_sql(Templates, PoolName) of {ok, _Sts} = Ok -> %% prepare for reconnect - ecpool:add_reconnect_callback(PoolName, {?MODULE, prepare_sql_to_conn, [Prepares]}), + ecpool:add_reconnect_callback(PoolName, {?MODULE, prepare_sql_to_conn, [Templates]}), Ok; Error -> Error end. -do_prepare_sql(Prepares, PoolName) -> - do_prepare_sql(ecpool:workers(PoolName), Prepares, #{}). +do_prepare_sql(Templates, PoolName) -> + do_prepare_sql(ecpool:workers(PoolName), Templates, #{}). -do_prepare_sql([{_Name, Worker} | T], Prepares, _LastSts) -> +do_prepare_sql([{_Name, Worker} | Rest], Templates, _LastSts) -> {ok, Conn} = ecpool_worker:client(Worker), - case prepare_sql_to_conn(Conn, Prepares) of + case prepare_sql_to_conn(Conn, Templates) of {ok, Sts} -> - do_prepare_sql(T, Prepares, Sts); + do_prepare_sql(Rest, Templates, Sts); Error -> Error end; @@ -498,13 +481,14 @@ do_prepare_sql([], _Prepares, LastSts) -> prepare_sql_to_conn(Conn, Prepares) -> prepare_sql_to_conn(Conn, Prepares, #{}). -prepare_sql_to_conn(Conn, [], Statements) when is_pid(Conn) -> {ok, Statements}; -prepare_sql_to_conn(Conn, [{Key, SQL} | PrepareList], Statements) when is_pid(Conn) -> - LogMeta = #{msg => "postgresql_prepare_statement", name => Key, prepare_sql => SQL}, +prepare_sql_to_conn(Conn, [], Statements) when is_pid(Conn) -> + {ok, Statements}; +prepare_sql_to_conn(Conn, [{Key, {SQL, _RowTemplate}} | Rest], Statements) when is_pid(Conn) -> + LogMeta = #{msg => "PostgreSQL Prepare Statement", name => Key, sql => SQL}, ?SLOG(info, LogMeta), case epgsql:parse2(Conn, Key, SQL, []) of {ok, Statement} -> - prepare_sql_to_conn(Conn, PrepareList, Statements#{Key => Statement}); + prepare_sql_to_conn(Conn, Rest, Statements#{Key => Statement}); {error, {error, error, _, undefined_table, _, _} = Error} -> %% Target table is not created ?tp(pgsql_undefined_table, #{}), From 0538a77700d56e1da90e0127ce7de2f867402e23 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Tue, 18 Apr 2023 15:21:35 +0300 Subject: [PATCH 050/155] feat(tpl): use `emqx_connector_template` in `emqx_authn`, `emqx_authz` This slightly changes semantics: now the attempt to create authenticator with illegal bindings in templates will fail, instead of treating them as literals. The runtime behaviour on the other hand should be the same. --- apps/emqx/include/emqx_placeholder.hrl | 80 +++++++++++-------- .../src/emqx_authn/emqx_authn_utils.erl | 80 +++++++++++-------- .../src/emqx_authz/emqx_authz_rule.erl | 18 ++--- .../src/emqx_authz/emqx_authz_utils.erl | 64 +++++++++------ .../test/emqx_authz/emqx_authz_rule_SUITE.erl | 6 +- apps/emqx_auth_http/src/emqx_authz_http.erl | 22 ++--- .../src/emqx_authz_mongodb.erl | 10 +-- apps/emqx_auth_mysql/src/emqx_authz_mysql.erl | 10 +-- .../src/emqx_authz_postgresql.erl | 10 +-- apps/emqx_auth_redis/src/emqx_authz_redis.erl | 10 +-- .../src/emqx_connector_template.erl | 8 +- 11 files changed, 178 insertions(+), 140 deletions(-) diff --git a/apps/emqx/include/emqx_placeholder.hrl b/apps/emqx/include/emqx_placeholder.hrl index 7b2ce6c6b..1db80c72d 100644 --- a/apps/emqx/include/emqx_placeholder.hrl +++ b/apps/emqx/include/emqx_placeholder.hrl @@ -19,67 +19,79 @@ -define(PH_VAR_THIS, <<"$_THIS_">>). --define(PH(Type), <<"${", Type/binary, "}">>). +-define(PH(Var), <<"${" Var "}">>). %% action: publish/subscribe --define(PH_ACTION, <<"${action}">>). +-define(VAR_ACTION, "action"). +-define(PH_ACTION, ?PH(?VAR_ACTION)). %% cert --define(PH_CERT_SUBJECT, <<"${cert_subject}">>). --define(PH_CERT_CN_NAME, <<"${cert_common_name}">>). +-define(VAR_CERT_SUBJECT, "cert_subject"). +-define(VAR_CERT_CN_NAME, "cert_common_name"). +-define(PH_CERT_SUBJECT, ?PH(?VAR_CERT_SUBJECT)). +-define(PH_CERT_CN_NAME, ?PH(?VAR_CERT_CN_NAME)). %% MQTT --define(PH_PASSWORD, <<"${password}">>). --define(PH_CLIENTID, <<"${clientid}">>). --define(PH_FROM_CLIENTID, <<"${from_clientid}">>). --define(PH_USERNAME, <<"${username}">>). --define(PH_FROM_USERNAME, <<"${from_username}">>). --define(PH_TOPIC, <<"${topic}">>). +-define(VAR_PASSWORD, "password"). +-define(VAR_CLIENTID, "clientid"). +-define(VAR_USERNAME, "username"). +-define(VAR_TOPIC, "topic"). +-define(PH_PASSWORD, ?PH(?VAR_PASSWORD)). +-define(PH_CLIENTID, ?PH(?VAR_CLIENTID)). +-define(PH_FROM_CLIENTID, ?PH("from_clientid")). +-define(PH_USERNAME, ?PH(?VAR_USERNAME)). +-define(PH_FROM_USERNAME, ?PH("from_username")). +-define(PH_TOPIC, ?PH(?VAR_TOPIC)). %% MQTT payload --define(PH_PAYLOAD, <<"${payload}">>). +-define(PH_PAYLOAD, ?PH("payload")). %% client IPAddress --define(PH_PEERHOST, <<"${peerhost}">>). +-define(VAR_PEERHOST, "peerhost"). +-define(PH_PEERHOST, ?PH(?VAR_PEERHOST)). %% ip & port --define(PH_HOST, <<"${host}">>). --define(PH_PORT, <<"${port}">>). +-define(PH_HOST, ?PH("host")). +-define(PH_PORT, ?PH("port")). %% Enumeration of message QoS 0,1,2 --define(PH_QOS, <<"${qos}">>). --define(PH_FLAGS, <<"${flags}">>). +-define(VAR_QOS, "qos"). +-define(PH_QOS, ?PH(?VAR_QOS)). +-define(PH_FLAGS, ?PH("flags")). %% Additional data related to process within the MQTT message --define(PH_HEADERS, <<"${headers}">>). +-define(PH_HEADERS, ?PH("headers")). %% protocol name --define(PH_PROTONAME, <<"${proto_name}">>). +-define(VAR_PROTONAME, "proto_name"). +-define(PH_PROTONAME, ?PH(?VAR_PROTONAME)). %% protocol version --define(PH_PROTOVER, <<"${proto_ver}">>). +-define(PH_PROTOVER, ?PH("proto_ver")). %% MQTT keepalive interval --define(PH_KEEPALIVE, <<"${keepalive}">>). +-define(PH_KEEPALIVE, ?PH("keepalive")). %% MQTT clean_start --define(PH_CLEAR_START, <<"${clean_start}">>). +-define(PH_CLEAR_START, ?PH("clean_start")). %% MQTT Session Expiration time --define(PH_EXPIRY_INTERVAL, <<"${expiry_interval}">>). +-define(PH_EXPIRY_INTERVAL, ?PH("expiry_interval")). %% Time when PUBLISH message reaches Broker (ms) --define(PH_PUBLISH_RECEIVED_AT, <<"${publish_received_at}">>). +-define(PH_PUBLISH_RECEIVED_AT, ?PH("publish_received_at")). %% Mountpoint for bridging messages --define(PH_MOUNTPOINT, <<"${mountpoint}">>). +-define(VAR_MOUNTPOINT, "mountpoint"). +-define(PH_MOUNTPOINT, ?PH(?VAR_MOUNTPOINT)). %% IPAddress and Port of terminal --define(PH_PEERNAME, <<"${peername}">>). +-define(PH_PEERNAME, ?PH("peername")). %% IPAddress and Port listened by emqx --define(PH_SOCKNAME, <<"${sockname}">>). +-define(PH_SOCKNAME, ?PH("sockname")). %% whether it is MQTT bridge connection --define(PH_IS_BRIDGE, <<"${is_bridge}">>). +-define(PH_IS_BRIDGE, ?PH("is_bridge")). %% Terminal connection completion time (s) --define(PH_CONNECTED_AT, <<"${connected_at}">>). +-define(PH_CONNECTED_AT, ?PH("connected_at")). %% Event trigger time(millisecond) --define(PH_TIMESTAMP, <<"${timestamp}">>). +-define(PH_TIMESTAMP, ?PH("timestamp")). %% Terminal disconnection completion time (s) --define(PH_DISCONNECTED_AT, <<"${disconnected_at}">>). +-define(PH_DISCONNECTED_AT, ?PH("disconnected_at")). --define(PH_NODE, <<"${node}">>). --define(PH_REASON, <<"${reason}">>). +-define(PH_NODE, ?PH("node")). +-define(PH_REASON, ?PH("reason")). --define(PH_ENDPOINT_NAME, <<"${endpoint_name}">>). --define(PH_RETAIN, <<"${retain}">>). +-define(PH_ENDPOINT_NAME, ?PH("endpoint_name")). +-define(VAR_RETAIN, "retain"). +-define(PH_RETAIN, ?PH(?VAR_RETAIN)). %% sync change these place holder with binary def. -define(PH_S_ACTION, "${action}"). diff --git a/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl b/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl index a9d672922..9be3e24d6 100644 --- a/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl +++ b/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl @@ -45,12 +45,12 @@ ]). -define(AUTHN_PLACEHOLDERS, [ - ?PH_USERNAME, - ?PH_CLIENTID, - ?PH_PASSWORD, - ?PH_PEERHOST, - ?PH_CERT_SUBJECT, - ?PH_CERT_CN_NAME + <>, + <>, + <>, + <>, + <>, + <> ]). -define(DEFAULT_RESOURCE_OPTS, #{ @@ -107,48 +107,62 @@ check_password_from_selected_map(Algorithm, Selected, Password) -> end. parse_deep(Template) -> - emqx_placeholder:preproc_tmpl_deep(Template, #{placeholders => ?AUTHN_PLACEHOLDERS}). + Result = emqx_connector_template:parse_deep(Template), + ok = emqx_connector_template:validate(?AUTHN_PLACEHOLDERS, Result), + Result. parse_str(Template) -> - emqx_placeholder:preproc_tmpl(Template, #{placeholders => ?AUTHN_PLACEHOLDERS}). + Result = emqx_connector_template:parse(Template), + ok = emqx_connector_template:validate(?AUTHN_PLACEHOLDERS, Result), + Result. parse_sql(Template, ReplaceWith) -> - emqx_placeholder:preproc_sql( + {Statement, Result} = emqx_connector_template_sql:parse_prepstmt( Template, - #{ - replace_with => ReplaceWith, - placeholders => ?AUTHN_PLACEHOLDERS, - strip_double_quote => true - } - ). + #{parameters => ReplaceWith, strip_double_quote => true} + ), + ok = emqx_connector_template:validate(?AUTHN_PLACEHOLDERS, Result), + {Statement, Result}. render_deep(Template, Credential) -> - emqx_placeholder:proc_tmpl_deep( + % NOTE + % Ignoring errors here, undefined bindings will be replaced with empty string. + {Term, _Errors} = emqx_connector_template:render( Template, mapping_credential(Credential), - #{return => full_binary, var_trans => fun handle_var/2} - ). + #{var_trans => fun handle_var/2} + ), + Term. render_str(Template, Credential) -> - emqx_placeholder:proc_tmpl( + % NOTE + % Ignoring errors here, undefined bindings will be replaced with empty string. + {String, _Errors} = emqx_connector_template:render( Template, mapping_credential(Credential), - #{return => full_binary, var_trans => fun handle_var/2} - ). + #{var_trans => fun handle_var/2} + ), + unicode:characters_to_binary(String). render_urlencoded_str(Template, Credential) -> - emqx_placeholder:proc_tmpl( + % NOTE + % Ignoring errors here, undefined bindings will be replaced with empty string. + {String, _Errors} = emqx_connector_template:render( Template, mapping_credential(Credential), - #{return => full_binary, var_trans => fun urlencode_var/2} - ). + #{var_trans => fun urlencode_var/2} + ), + unicode:characters_to_binary(String). render_sql_params(ParamList, Credential) -> - emqx_placeholder:proc_tmpl( + % NOTE + % Ignoring errors here, undefined bindings will be replaced with empty string. + {Row, _Errors} = emqx_connector_template:render( ParamList, mapping_credential(Credential), - #{return => rawlist, var_trans => fun handle_sql_var/2} - ). + #{var_trans => fun handle_sql_var/2} + ), + Row. is_superuser(#{<<"is_superuser">> := Value}) -> #{is_superuser => to_bool(Value)}; @@ -272,19 +286,19 @@ without_password(Credential, [Name | Rest]) -> urlencode_var(Var, Value) -> emqx_http_lib:uri_encode(handle_var(Var, Value)). -handle_var(_Name, undefined) -> +handle_var(_, undefined) -> <<>>; handle_var([<<"peerhost">>], PeerHost) -> - emqx_placeholder:bin(inet:ntoa(PeerHost)); + emqx_connector_template:to_string(inet:ntoa(PeerHost)); handle_var(_, Value) -> - emqx_placeholder:bin(Value). + emqx_connector_template:to_string(Value). -handle_sql_var(_Name, undefined) -> +handle_sql_var(_, undefined) -> <<>>; handle_sql_var([<<"peerhost">>], PeerHost) -> - emqx_placeholder:bin(inet:ntoa(PeerHost)); + emqx_connector_sql:to_sql_value(inet:ntoa(PeerHost)); handle_sql_var(_, Value) -> - emqx_placeholder:sql_data(Value). + emqx_connector_sql:to_sql_value(Value). mapping_credential(C = #{cn := CN, dn := DN}) -> C#{cert_common_name => CN, cert_subject => DN}; diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl index 6e13cac91..9cf79ba88 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl @@ -183,19 +183,15 @@ compile_topic(<<"eq ", Topic/binary>>) -> compile_topic({eq, Topic}) -> {eq, emqx_topic:words(bin(Topic))}; compile_topic(Topic) -> - TopicBin = bin(Topic), - case - emqx_placeholder:preproc_tmpl( - TopicBin, - #{placeholders => [?PH_USERNAME, ?PH_CLIENTID]} - ) - of - [{str, _}] -> emqx_topic:words(TopicBin); - Tokens -> {pattern, Tokens} + Template = emqx_connector_template:parse(Topic), + ok = emqx_connector_template:validate([<>, <>], Template), + case emqx_connector_template:trivial(Template) of + true -> emqx_topic:words(bin(Topic)); + false -> {pattern, Template} end. bin(L) when is_list(L) -> - list_to_binary(L); + unicode:characters_to_binary(L); bin(B) when is_binary(B) -> B. @@ -307,7 +303,7 @@ match_who(_, _) -> match_topics(_ClientInfo, _Topic, []) -> false; match_topics(ClientInfo, Topic, [{pattern, PatternFilter} | Filters]) -> - TopicFilter = emqx_placeholder:proc_tmpl(PatternFilter, ClientInfo), + TopicFilter = bin(emqx_connector_template:render_strict(PatternFilter, ClientInfo)), match_topic(emqx_topic:words(Topic), emqx_topic:words(TopicFilter)) orelse match_topics(ClientInfo, Topic, Filters); match_topics(ClientInfo, Topic, [TopicFilter | Filters]) -> diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl index 3a0d4f1a1..e5aeab21b 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl @@ -108,48 +108,62 @@ update_config(Path, ConfigRequest) -> }). parse_deep(Template, PlaceHolders) -> - emqx_placeholder:preproc_tmpl_deep(Template, #{placeholders => PlaceHolders}). + Result = emqx_connector_template:parse_deep(Template), + ok = emqx_connector_template:validate(PlaceHolders, Result), + Result. parse_str(Template, PlaceHolders) -> - emqx_placeholder:preproc_tmpl(Template, #{placeholders => PlaceHolders}). + Result = emqx_connector_template:parse(Template), + ok = emqx_connector_template:validate(PlaceHolders, Result), + Result. parse_sql(Template, ReplaceWith, PlaceHolders) -> - emqx_placeholder:preproc_sql( + {Statement, Result} = emqx_connector_template_sql:parse_prepstmt( Template, - #{ - replace_with => ReplaceWith, - placeholders => PlaceHolders, - strip_double_quote => true - } - ). + #{parameters => ReplaceWith, strip_double_quote => true} + ), + ok = emqx_connector_template:validate(PlaceHolders, Result), + {Statement, Result}. render_deep(Template, Values) -> - emqx_placeholder:proc_tmpl_deep( + % NOTE + % Ignoring errors here, undefined bindings will be replaced with empty string. + {Term, _Errors} = emqx_connector_template:render( Template, client_vars(Values), - #{return => full_binary, var_trans => fun handle_var/2} - ). + #{var_trans => fun handle_var/2} + ), + Term. render_str(Template, Values) -> - emqx_placeholder:proc_tmpl( + % NOTE + % Ignoring errors here, undefined bindings will be replaced with empty string. + {String, _Errors} = emqx_connector_template:render( Template, client_vars(Values), - #{return => full_binary, var_trans => fun handle_var/2} - ). + #{var_trans => fun handle_var/2} + ), + unicode:characters_to_binary(String). render_urlencoded_str(Template, Values) -> - emqx_placeholder:proc_tmpl( + % NOTE + % Ignoring errors here, undefined bindings will be replaced with empty string. + {String, _Errors} = emqx_connector_template:render( Template, client_vars(Values), - #{return => full_binary, var_trans => fun urlencode_var/2} - ). + #{var_trans => fun urlencode_var/2} + ), + unicode:characters_to_binary(String). render_sql_params(ParamList, Values) -> - emqx_placeholder:proc_tmpl( + % NOTE + % Ignoring errors here, undefined bindings will be replaced with empty string. + {Row, _Errors} = emqx_connector_template:render( ParamList, client_vars(Values), - #{return => rawlist, var_trans => fun handle_sql_var/2} - ). + #{var_trans => fun handle_sql_var/2} + ), + Row. -spec parse_http_resp_body(binary(), binary()) -> allow | deny | ignore | error. parse_http_resp_body(<<"application/x-www-form-urlencoded", _/binary>>, Body) -> @@ -218,19 +232,19 @@ convert_client_var(Other) -> Other. urlencode_var(Var, Value) -> emqx_http_lib:uri_encode(handle_var(Var, Value)). -handle_var(_Name, undefined) -> +handle_var(_, undefined) -> <<>>; handle_var([<<"peerhost">>], IpAddr) -> inet_parse:ntoa(IpAddr); handle_var(_Name, Value) -> - emqx_placeholder:bin(Value). + emqx_connector_template:to_string(Value). -handle_sql_var(_Name, undefined) -> +handle_sql_var(_, undefined) -> <<>>; handle_sql_var([<<"peerhost">>], IpAddr) -> inet_parse:ntoa(IpAddr); handle_sql_var(_Name, Value) -> - emqx_placeholder:sql_data(Value). + emqx_connector_sql:to_sql_value(Value). bin(A) when is_atom(A) -> atom_to_binary(A, utf8); bin(L) when is_list(L) -> list_to_binary(L); diff --git a/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl b/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl index b34e4fb00..bca21cd8d 100644 --- a/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl +++ b/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl @@ -67,6 +67,10 @@ set_special_configs(_App) -> ok. t_compile(_) -> + % NOTE + % Some of the following testcase are relying on the internal representation of + % `emqx_connector_template:t()`. If the internal representation is changed, these + % testcases may fail. ?assertEqual({deny, all, all, [['#']]}, emqx_authz_rule:compile({deny, all})), ?assertEqual( @@ -116,7 +120,7 @@ t_compile(_) -> ?assertEqual( {allow, {username, {eq, <<"test">>}}, publish, [ - {pattern, [{str, <<"t/foo">>}, {var, [<<"username">>]}, {str, <<"boo">>}]} + {pattern, [<<"t/foo">>, {var, [<<"username">>]}, <<"boo">>]} ]}, emqx_authz_rule:compile({allow, {username, "test"}, publish, ["t/foo${username}boo"]}) ), diff --git a/apps/emqx_auth_http/src/emqx_authz_http.erl b/apps/emqx_auth_http/src/emqx_authz_http.erl index ed7051bb6..2ab76f305 100644 --- a/apps/emqx_auth_http/src/emqx_authz_http.erl +++ b/apps/emqx_auth_http/src/emqx_authz_http.erl @@ -39,20 +39,20 @@ -endif. -define(PLACEHOLDERS, [ - ?PH_USERNAME, - ?PH_CLIENTID, - ?PH_PEERHOST, - ?PH_PROTONAME, - ?PH_MOUNTPOINT, - ?PH_TOPIC, - ?PH_ACTION, - ?PH_CERT_SUBJECT, - ?PH_CERT_CN_NAME + <>, + <>, + <>, + <>, + <>, + <>, + <>, + <>, + <> ]). -define(PLACEHOLDERS_FOR_RICH_ACTIONS, [ - ?PH_QOS, - ?PH_RETAIN + <>, + <> ]). description() -> diff --git a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl index 3b235ad2c..97a5fa3a6 100644 --- a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl +++ b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl @@ -36,11 +36,11 @@ -endif. -define(PLACEHOLDERS, [ - ?PH_USERNAME, - ?PH_CLIENTID, - ?PH_PEERHOST, - ?PH_CERT_CN_NAME, - ?PH_CERT_SUBJECT + <>, + <>, + <>, + <>, + <> ]). description() -> diff --git a/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl b/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl index 4ca71e332..e87d2afa2 100644 --- a/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl +++ b/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl @@ -38,11 +38,11 @@ -endif. -define(PLACEHOLDERS, [ - ?PH_USERNAME, - ?PH_CLIENTID, - ?PH_PEERHOST, - ?PH_CERT_CN_NAME, - ?PH_CERT_SUBJECT + <>, + <>, + <>, + <>, + <> ]). description() -> diff --git a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl index b930f77e4..645fff293 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl @@ -38,11 +38,11 @@ -endif. -define(PLACEHOLDERS, [ - ?PH_USERNAME, - ?PH_CLIENTID, - ?PH_PEERHOST, - ?PH_CERT_CN_NAME, - ?PH_CERT_SUBJECT + <>, + <>, + <>, + <>, + <> ]). description() -> diff --git a/apps/emqx_auth_redis/src/emqx_authz_redis.erl b/apps/emqx_auth_redis/src/emqx_authz_redis.erl index 9b69f508a..7ac893da1 100644 --- a/apps/emqx_auth_redis/src/emqx_authz_redis.erl +++ b/apps/emqx_auth_redis/src/emqx_authz_redis.erl @@ -36,11 +36,11 @@ -endif. -define(PLACEHOLDERS, [ - ?PH_CERT_CN_NAME, - ?PH_CERT_SUBJECT, - ?PH_PEERHOST, - ?PH_CLIENTID, - ?PH_USERNAME + <>, + <>, + <>, + <>, + <> ]). description() -> diff --git a/apps/emqx_connector/src/emqx_connector_template.erl b/apps/emqx_connector/src/emqx_connector_template.erl index c346d4289..4f583573c 100644 --- a/apps/emqx_connector/src/emqx_connector_template.erl +++ b/apps/emqx_connector/src/emqx_connector_template.erl @@ -153,7 +153,7 @@ trivial(Template) -> unparse({'$tpl', Template}) -> unparse_deep(Template); unparse(Template) -> - lists:map(fun unparse_part/1, Template). + unicode:characters_to_list(lists:map(fun unparse_part/1, Template)). unparse_part({var, Name}) -> render_placeholder(Name); @@ -222,7 +222,7 @@ render_strict(Template, Bindings, Opts) -> {String, []} -> String; {_, Errors = [_ | _]} -> - error(Errors, [unicode:characters_to_list(unparse(Template)), Bindings]) + error(Errors, [unparse(Template), Bindings]) end. %% @doc Parse an arbitrary Erlang term into a "deep" template. @@ -306,9 +306,7 @@ unparse_deep(Term) -> -spec lookup_var(var(), bindings()) -> {ok, binding()} | {error, undefined}. -lookup_var(?PH_VAR_THIS, Value) -> - {ok, Value}; -lookup_var([], Value) -> +lookup_var(Var, Value) when Var == ?PH_VAR_THIS orelse Var == [] -> {ok, Value}; lookup_var([Prop | Rest], Bindings) -> case lookup(Prop, Bindings) of From e1bca5844f40cc6adf4ecd6a4ec56f964e607185 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Tue, 18 Apr 2023 17:07:23 +0300 Subject: [PATCH 051/155] feat(tpl): use `emqx_connector_template` in `emqx_prometheus` app --- apps/emqx_prometheus/src/emqx_prometheus.erl | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/apps/emqx_prometheus/src/emqx_prometheus.erl b/apps/emqx_prometheus/src/emqx_prometheus.erl index e9030d3ed..fa9a39cc6 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus.erl @@ -24,7 +24,6 @@ -include("emqx_prometheus.hrl"). --include_lib("prometheus/include/prometheus.hrl"). -include_lib("prometheus/include/prometheus_model.hrl"). -include_lib("emqx/include/logger.hrl"). @@ -114,16 +113,12 @@ handle_info(_Msg, State) -> push_to_push_gateway(Uri, Headers, JobName) when is_list(Headers) -> [Name, Ip] = string:tokens(atom_to_list(node()), "@"), - JobName1 = emqx_placeholder:preproc_tmpl(JobName), - JobName2 = binary_to_list( - emqx_placeholder:proc_tmpl( - JobName1, - #{<<"name">> => Name, <<"host">> => Ip} - ) + JobName1 = emqx_connector_template:render_strict( + emqx_connector_template:parse(JobName), + #{<<"name">> => Name, <<"host">> => Ip} ), - - Url = lists:concat([Uri, "/metrics/job/", JobName2]), Data = prometheus_text_format:format(), + Url = lists:concat([Uri, "/metrics/job/", unicode:characters_to_list(JobName1)]), case httpc:request(post, {Url, Headers, "text/plain", Data}, ?HTTP_OPTIONS, []) of {ok, {{"HTTP/1.1", 200, _}, _RespHeaders, _RespBody}} -> ok; From b812f9af5a481f8927fe6f25404eec143a73f06b Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Tue, 18 Apr 2023 17:42:39 +0300 Subject: [PATCH 052/155] feat(tpl): use `emqx_connector_template` in `emqx_rule_engine` app --- .../src/emqx_connector_template.erl | 1 + .../src/emqx_rule_actions.erl | 103 +++++++++--------- .../test/emqx_rule_engine_SUITE.erl | 5 +- 3 files changed, 57 insertions(+), 52 deletions(-) diff --git a/apps/emqx_connector/src/emqx_connector_template.erl b/apps/emqx_connector/src/emqx_connector_template.erl index 4f583573c..bb26edec1 100644 --- a/apps/emqx_connector/src/emqx_connector_template.erl +++ b/apps/emqx_connector/src/emqx_connector_template.erl @@ -30,6 +30,7 @@ -export([render_strict/2]). -export([render_strict/3]). +-export([lookup_var/2]). -export([to_string/1]). -export_type([t/0]). diff --git a/apps/emqx_rule_engine/src/emqx_rule_actions.erl b/apps/emqx_rule_engine/src/emqx_rule_actions.erl index 276f8d0e0..bb9966b4a 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_actions.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_actions.erl @@ -65,23 +65,18 @@ pre_process_action_args( qos := QoS, retain := Retain, payload := Payload, - mqtt_properties := MQTTPropertiesTemplate0, - user_properties := UserPropertiesTemplate + mqtt_properties := MQTTProperties, + user_properties := UserProperties } = Args ) -> - MQTTPropertiesTemplate = - maps:map( - fun(_Key, V) -> emqx_placeholder:preproc_tmpl(V) end, - MQTTPropertiesTemplate0 - ), Args#{ preprocessed_tmpl => #{ - topic => emqx_placeholder:preproc_tmpl(Topic), - qos => preproc_vars(QoS), - retain => preproc_vars(Retain), - payload => emqx_placeholder:preproc_tmpl(Payload), - mqtt_properties => MQTTPropertiesTemplate, - user_properties => preproc_user_properties(UserPropertiesTemplate) + topic => emqx_connector_template:parse(Topic), + qos => parse_vars(QoS), + retain => parse_vars(Retain), + payload => parse_payload(Payload), + mqtt_properties => parse_mqtt_properties(MQTTProperties), + user_properties => parse_user_properties(UserProperties) } }; pre_process_action_args(_, Args) -> @@ -114,25 +109,27 @@ republish( #{metadata := #{rule_id := RuleId}} = Env, #{ preprocessed_tmpl := #{ - qos := QoSTks, - retain := RetainTks, - topic := TopicTks, - payload := PayloadTks, + qos := QoSTemplate, + retain := RetainTemplate, + topic := TopicTemplate, + payload := PayloadTemplate, mqtt_properties := MQTTPropertiesTemplate, - user_properties := UserPropertiesTks + user_properties := UserPropertiesTemplate } } ) -> - Topic = emqx_placeholder:proc_tmpl(TopicTks, Selected), - Payload = format_msg(PayloadTks, Selected), - QoS = replace_simple_var(QoSTks, Selected, 0), - Retain = replace_simple_var(RetainTks, Selected, false), + Topic = unicode:characters_to_binary( + emqx_connector_template:render_strict(TopicTemplate, Selected) + ), + Payload = emqx_connector_template:render_strict(PayloadTemplate, Selected), + QoS = render_simple_var(QoSTemplate, Selected, 0), + Retain = render_simple_var(RetainTemplate, Selected, false), %% 'flags' is set for message re-publishes or message related %% events such as message.acked and message.dropped Flags0 = maps:get(flags, Env, #{}), Flags = Flags0#{retain => Retain}, - PubProps0 = format_pub_props(UserPropertiesTks, Selected, Env), - MQTTProps = format_mqtt_properties(MQTTPropertiesTemplate, Selected, Env), + PubProps0 = render_pub_props(UserPropertiesTemplate, Selected, Env), + MQTTProps = render_mqtt_properties(MQTTPropertiesTemplate, Selected, Env), PubProps = maps:merge(PubProps0, MQTTProps), ?TRACE( "RULE", @@ -203,58 +200,66 @@ safe_publish(RuleId, Topic, QoS, Flags, Payload, PubProps) -> _ = emqx_broker:safe_publish(Msg), emqx_metrics:inc_msg(Msg). -preproc_vars(Data) when is_binary(Data) -> - emqx_placeholder:preproc_tmpl(Data); -preproc_vars(Data) -> - Data. +parse_vars(Data) when is_binary(Data) -> + emqx_connector_template:parse(Data); +parse_vars(Data) -> + {const, Data}. -preproc_user_properties(<<"${pub_props.'User-Property'}">>) -> +parse_mqtt_properties(MQTTPropertiesTemplate) -> + maps:map( + fun(_Key, V) -> emqx_connector_template:parse(V) end, + MQTTPropertiesTemplate + ). + +parse_user_properties(<<"${pub_props.'User-Property'}">>) -> %% keep the original %% avoid processing this special variable because %% we do not want to force users to select the value %% the value will be taken from Env.pub_props directly ?ORIGINAL_USER_PROPERTIES; -preproc_user_properties(<<"${", _/binary>> = V) -> +parse_user_properties(<<"${", _/binary>> = V) -> %% use a variable - emqx_placeholder:preproc_tmpl(V); -preproc_user_properties(_) -> + emqx_connector_template:parse(V); +parse_user_properties(_) -> %% invalid, discard undefined. -replace_simple_var(Tokens, Data, Default) when is_list(Tokens) -> - [Var] = emqx_placeholder:proc_tmpl(Tokens, Data, #{return => rawlist}), - case Var of +render_simple_var([{var, Name}], Data, Default) -> + case emqx_connector_template:lookup_var(Name, Data) of + {ok, Var} -> Var; %% cannot find the variable from Data - undefined -> Default; - _ -> Var + {error, _} -> Default end; -replace_simple_var(Val, _Data, _Default) -> +render_simple_var({const, Val}, _Data, _Default) -> Val. -format_msg([], Selected) -> - emqx_utils_json:encode(Selected); -format_msg(Tokens, Selected) -> - emqx_placeholder:proc_tmpl(Tokens, Selected). +parse_payload(Payload) -> + case string:is_empty(Payload) of + false -> emqx_connector_template:parse(Payload); + true -> emqx_connector_template:parse("${.}") + end. -format_pub_props(UserPropertiesTks, Selected, Env) -> +render_pub_props(UserPropertiesTemplate, Selected, Env) -> UserProperties = - case UserPropertiesTks of + case UserPropertiesTemplate of ?ORIGINAL_USER_PROPERTIES -> maps:get('User-Property', maps:get(pub_props, Env, #{}), #{}); undefined -> #{}; _ -> - replace_simple_var(UserPropertiesTks, Selected, #{}) + render_simple_var(UserPropertiesTemplate, Selected, #{}) end, #{'User-Property' => UserProperties}. -format_mqtt_properties(MQTTPropertiesTemplate, Selected, Env) -> +render_mqtt_properties(MQTTPropertiesTemplate, Selected, Env) -> #{metadata := #{rule_id := RuleId}} = Env, - MQTTProperties0 = + MQTTProperties = maps:fold( fun(K, Template, Acc) -> try - V = emqx_placeholder:proc_tmpl(Template, Selected), + V = unicode:characters_to_binary( + emqx_connector_template:render_strict(Template, Selected) + ), Acc#{K => V} catch Kind:Error -> @@ -275,7 +280,7 @@ format_mqtt_properties(MQTTPropertiesTemplate, Selected, Env) -> #{}, MQTTPropertiesTemplate ), - coerce_properties_values(MQTTProperties0, Env). + coerce_properties_values(MQTTProperties, Env). ensure_int(B) when is_binary(B) -> try diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl index 00ca68264..fcb04f9b3 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl @@ -1364,14 +1364,13 @@ t_sqlselect_inject_props(_Config) -> actions => [Repub] } ), - Props = user_properties(#{<<"inject_key">> => <<"inject_val">>}), {ok, Client} = emqtt:start_link([{username, <<"emqx">>}, {proto_ver, v5}]), {ok, _} = emqtt:connect(Client), {ok, _, _} = emqtt:subscribe(Client, <<"t2">>, 0), emqtt:publish(Client, <<"t1">>, #{}, <<"{\"x\":1}">>, [{qos, 0}]), receive - {publish, #{topic := T, payload := Payload, properties := Props2}} -> - ?assertEqual(Props, Props2), + {publish, #{topic := T, payload := Payload, properties := Props}} -> + ?assertEqual(user_properties(#{<<"inject_key">> => <<"inject_val">>}), Props), ?assertEqual(<<"t2">>, T), ?assertEqual(<<"{\"x\":1}">>, Payload) after 2000 -> From 49f5325c6768eae83d0e6e398a05e74328b234a8 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Wed, 26 Apr 2023 22:45:24 +0300 Subject: [PATCH 053/155] feat(tpl): unify validations / errors var representations --- .../src/emqx_authz/emqx_authz_rule.erl | 2 +- .../test/emqx_authz/emqx_authz_rule_SUITE.erl | 10 ++-- apps/emqx_auth_http/src/emqx_authz_http.erl | 18 ++++---- .../src/emqx_authz_mongodb.erl | 10 ++-- apps/emqx_auth_mysql/src/emqx_authz_mysql.erl | 10 ++-- .../src/emqx_authz_postgresql.erl | 10 ++-- apps/emqx_auth_redis/src/emqx_authz_redis.erl | 10 ++-- .../src/emqx_connector_template.erl | 46 +++++++++---------- .../src/emqx_connector_template_sql.erl | 6 +-- .../test/emqx_connector_template_SUITE.erl | 40 ++++++++-------- .../src/emqx_rule_actions.erl | 4 +- 11 files changed, 81 insertions(+), 85 deletions(-) diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl index 9cf79ba88..6f5369aec 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl @@ -184,7 +184,7 @@ compile_topic({eq, Topic}) -> {eq, emqx_topic:words(bin(Topic))}; compile_topic(Topic) -> Template = emqx_connector_template:parse(Topic), - ok = emqx_connector_template:validate([<>, <>], Template), + ok = emqx_connector_template:validate([?VAR_USERNAME, ?VAR_CLIENTID], Template), case emqx_connector_template:trivial(Template) of true -> emqx_topic:words(bin(Topic)); false -> {pattern, Template} diff --git a/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl b/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl index bca21cd8d..5031daff6 100644 --- a/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl +++ b/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl @@ -78,13 +78,13 @@ t_compile(_) -> emqx_authz_rule:compile({allow, {ipaddr, "127.0.0.1"}, all, [{eq, "#"}, {eq, "+"}]}) ), - ?assertEqual( + ?assertMatch( {allow, {ipaddrs, [ {{127, 0, 0, 1}, {127, 0, 0, 1}, 32}, {{192, 168, 1, 0}, {192, 168, 1, 255}, 24} ]}, - subscribe, [{pattern, [{var, [<<"clientid">>]}]}]}, + subscribe, [{pattern, [{var, "clientid", [_]}]}]}, emqx_authz_rule:compile( {allow, {ipaddrs, ["127.0.0.1", "192.168.1.0/24"]}, subscribe, [?PH_S_CLIENTID]} ) @@ -106,7 +106,7 @@ t_compile(_) -> {clientid, {re_pattern, _, _, _, _}} ]}, publish, [ - {pattern, [{var, [<<"username">>]}]}, {pattern, [{var, [<<"clientid">>]}]} + {pattern, [{var, "username", [_]}]}, {pattern, [{var, "clientid", [_]}]} ]}, emqx_authz_rule:compile( {allow, @@ -118,9 +118,9 @@ t_compile(_) -> ) ), - ?assertEqual( + ?assertMatch( {allow, {username, {eq, <<"test">>}}, publish, [ - {pattern, [<<"t/foo">>, {var, [<<"username">>]}, <<"boo">>]} + {pattern, [<<"t/foo">>, {var, "username", [_]}, <<"boo">>]} ]}, emqx_authz_rule:compile({allow, {username, "test"}, publish, ["t/foo${username}boo"]}) ), diff --git a/apps/emqx_auth_http/src/emqx_authz_http.erl b/apps/emqx_auth_http/src/emqx_authz_http.erl index 2ab76f305..bbb2bf9b5 100644 --- a/apps/emqx_auth_http/src/emqx_authz_http.erl +++ b/apps/emqx_auth_http/src/emqx_authz_http.erl @@ -39,15 +39,15 @@ -endif. -define(PLACEHOLDERS, [ - <>, - <>, - <>, - <>, - <>, - <>, - <>, - <>, - <> + ?VAR_USERNAME, + ?VAR_CLIENTID, + ?VAR_PEERHOST, + ?VAR_PROTONAME, + ?VAR_MOUNTPOINT, + ?VAR_TOPIC, + ?VAR_ACTION, + ?VAR_CERT_SUBJECT, + ?VAR_CERT_CN_NAME ]). -define(PLACEHOLDERS_FOR_RICH_ACTIONS, [ diff --git a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl index 97a5fa3a6..35ac3a41b 100644 --- a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl +++ b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl @@ -36,11 +36,11 @@ -endif. -define(PLACEHOLDERS, [ - <>, - <>, - <>, - <>, - <> + ?VAR_USERNAME, + ?VAR_CLIENTID, + ?VAR_PEERHOST, + ?VAR_CERT_CN_NAME, + ?VAR_CERT_SUBJECT ]). description() -> diff --git a/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl b/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl index e87d2afa2..a6d71d1ca 100644 --- a/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl +++ b/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl @@ -38,11 +38,11 @@ -endif. -define(PLACEHOLDERS, [ - <>, - <>, - <>, - <>, - <> + ?VAR_USERNAME, + ?VAR_CLIENTID, + ?VAR_PEERHOST, + ?VAR_CERT_CN_NAME, + ?VAR_CERT_SUBJECT ]). description() -> diff --git a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl index 645fff293..b538bd95e 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl @@ -38,11 +38,11 @@ -endif. -define(PLACEHOLDERS, [ - <>, - <>, - <>, - <>, - <> + ?VAR_USERNAME, + ?VAR_CLIENTID, + ?VAR_PEERHOST, + ?VAR_CERT_CN_NAME, + ?VAR_CERT_SUBJECT ]). description() -> diff --git a/apps/emqx_auth_redis/src/emqx_authz_redis.erl b/apps/emqx_auth_redis/src/emqx_authz_redis.erl index 7ac893da1..eb63804b9 100644 --- a/apps/emqx_auth_redis/src/emqx_authz_redis.erl +++ b/apps/emqx_auth_redis/src/emqx_authz_redis.erl @@ -36,11 +36,11 @@ -endif. -define(PLACEHOLDERS, [ - <>, - <>, - <>, - <>, - <> + ?VAR_CERT_CN_NAME, + ?VAR_CERT_SUBJECT, + ?VAR_PEERHOST, + ?VAR_CLIENTID, + ?VAR_USERNAME ]). description() -> diff --git a/apps/emqx_connector/src/emqx_connector_template.erl b/apps/emqx_connector/src/emqx_connector_template.erl index bb26edec1..221cc5e86 100644 --- a/apps/emqx_connector/src/emqx_connector_template.erl +++ b/apps/emqx_connector/src/emqx_connector_template.erl @@ -37,6 +37,7 @@ -export_type([str/0]). -export_type([deep/0]). -export_type([placeholder/0]). +-export_type([varname/0]). -export_type([bindings/0]). -type t() :: str() | {'$tpl', deeptpl()}. @@ -55,8 +56,9 @@ | port() | reference(). --type placeholder() :: {var, var()}. --type var() :: _Name :: [binary()]. +-type placeholder() :: {var, varname(), accessor()}. +-type accessor() :: [binary()]. +-type varname() :: string(). -type scalar() :: atom() | unicode:chardata() | number(). -type binding() :: scalar() | list(scalar()) | bindings(). @@ -64,7 +66,7 @@ -type var_trans() :: fun((Value :: term()) -> unicode:chardata()) - | fun((var(), Value :: term()) -> unicode:chardata()). + | fun((varname(), Value :: term()) -> unicode:chardata()). -type parse_opts() :: #{ strip_double_quote => boolean() @@ -103,7 +105,7 @@ parse(String, Opts) -> parse_split([Part, _PH, <<>>, Var]) -> % Regular placeholder - prepend(Part, [{var, parse_var(Var)}]); + prepend(Part, [{var, unicode:characters_to_list(Var), parse_accessor(Var)}]); parse_split([Part, _PH = <>, <<"$">>, _]) -> % Escaped literal, take all but the second byte, which is always `$`. % Important to make a whole token starting with `$` so the `unparse/11` @@ -117,7 +119,7 @@ prepend(<<>>, To) -> prepend(Head, To) -> [Head | To]. -parse_var(Var) -> +parse_accessor(Var) -> case string:split(Var, <<".">>, all) of [<<>>] -> ?PH_VAR_THIS; @@ -126,10 +128,9 @@ parse_var(Var) -> Name end. --spec validate([var() | binary()], t()) -> - ok | {error, [_Error :: {var(), disallowed}]}. -validate(AllowedIn, Template) -> - Allowed = [try_parse_var(V) || V <- AllowedIn], +-spec validate([varname()], t()) -> + ok | {error, [_Error :: {varname(), disallowed}]}. +validate(Allowed, Template) -> {_, Errors} = render(Template, #{}), {Used, _} = lists:unzip(Errors), case lists:usort(Used) -- Allowed of @@ -139,11 +140,6 @@ validate(AllowedIn, Template) -> {error, [{Var, disallowed} || Var <- Disallowed]} end. -try_parse_var(Var) when is_binary(Var) -> - parse_var(Var); -try_parse_var(Name) when is_list(Name) -> - Name. - -spec trivial(t()) -> boolean(). trivial(Template) -> @@ -156,7 +152,7 @@ unparse({'$tpl', Template}) -> unparse(Template) -> unicode:characters_to_list(lists:map(fun unparse_part/1, Template)). -unparse_part({var, Name}) -> +unparse_part({var, Name, _Accessor}) -> render_placeholder(Name); unparse_part(Part = <<"${", _/binary>>) -> <<"$", Part/binary>>; @@ -164,7 +160,7 @@ unparse_part(Part) -> Part. render_placeholder(Name) -> - "${" ++ lists:join($., Name) ++ "}". + "${" ++ Name ++ "}". %% @doc Render a template with given bindings. %% Returns a term with all placeholders replaced with values from bindings. @@ -172,17 +168,17 @@ render_placeholder(Name) -> %% By default, all binding values are converted to strings using `to_string/1` %% function. Option `var_trans` can be used to override this behaviour. -spec render(t(), bindings()) -> - {term(), [_Error :: {var(), undefined}]}. + {term(), [_Error :: {varname(), undefined}]}. render(Template, Bindings) -> render(Template, Bindings, #{}). -spec render(t(), bindings(), render_opts()) -> - {term(), [_Error :: {var(), undefined}]}. + {term(), [_Error :: {varname(), undefined}]}. render(Template, Bindings, Opts) when is_list(Template) -> lists:mapfoldl( fun - ({var, Name}, EAcc) -> - {String, Errors} = render_binding(Name, Bindings, Opts), + ({var, Name, Accessor}, EAcc) -> + {String, Errors} = render_binding(Name, Accessor, Bindings, Opts), {String, Errors ++ EAcc}; (String, EAcc) -> {String, EAcc} @@ -193,8 +189,8 @@ render(Template, Bindings, Opts) when is_list(Template) -> render({'$tpl', Template}, Bindings, Opts) -> render_deep(Template, Bindings, Opts). -render_binding(Name, Bindings, Opts) -> - case lookup_var(Name, Bindings) of +render_binding(Name, Accessor, Bindings, Opts) -> + case lookup_var(Accessor, Bindings) of {ok, Value} -> {render_value(Name, Value, Opts), []}; {error, Reason} -> @@ -231,12 +227,12 @@ render_strict(Template, Bindings, Opts) -> %% lists are not analyzed for "printability" and are treated as nested terms. %% The result is a usual template, and can be fed to other functions in this %% module. --spec parse_deep(unicode:chardata()) -> +-spec parse_deep(term()) -> t(). parse_deep(Term) -> parse_deep(Term, #{}). --spec parse_deep(unicode:chardata(), parse_opts()) -> +-spec parse_deep(term(), parse_opts()) -> t(). parse_deep(Term, Opts) -> {'$tpl', parse_deep_term(Term, Opts)}. @@ -305,7 +301,7 @@ unparse_deep(Term) -> %% --spec lookup_var(var(), bindings()) -> +-spec lookup_var(accessor(), bindings()) -> {ok, binding()} | {error, undefined}. lookup_var(Var, Value) when Var == ?PH_VAR_THIS orelse Var == [] -> {ok, Value}; diff --git a/apps/emqx_connector/src/emqx_connector_template_sql.erl b/apps/emqx_connector/src/emqx_connector_template_sql.erl index 0febfe575..e95ecde42 100644 --- a/apps/emqx_connector/src/emqx_connector_template_sql.erl +++ b/apps/emqx_connector/src/emqx_connector_template_sql.erl @@ -88,14 +88,14 @@ render_strict(Template, Bindings, Opts) -> %% #{parameters => '$n'} %% ), %% Statement = <<"INSERT INTO table (id, name, age) VALUES ($1, $2, 42)">>, -%% RowTemplate = [{var, [...]}, ...] +%% RowTemplate = [{var, "...", [...]}, ...] %% ``` -spec parse_prepstmt(unicode:chardata(), parse_opts()) -> {unicode:chardata(), row_template()}. parse_prepstmt(String, Opts) -> Template = emqx_connector_template:parse(String, maps:with(?TEMPLATE_PARSE_OPTS, Opts)), Statement = mk_prepared_statement(Template, Opts), - Placeholders = [Placeholder || Placeholder = {var, _} <- Template], + Placeholders = [Placeholder || Placeholder <- Template, element(1, Placeholder) == var], {Statement, Placeholders}. mk_prepared_statement(Template, Opts) -> @@ -103,7 +103,7 @@ mk_prepared_statement(Template, Opts) -> {Statement, _} = lists:mapfoldl( fun - ({var, _}, Acc) -> + (Var, Acc) when element(1, Var) == var -> mk_replace(ParameterFormat, Acc); (String, Acc) -> {String, Acc} diff --git a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl index 666fbfa58..998baae37 100644 --- a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl +++ b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl @@ -47,7 +47,7 @@ t_render_var_trans(_) -> {String, Errors} = emqx_connector_template:render( Template, Bindings, - #{var_trans => fun(Name, _) -> "<" ++ lists:join($., Name) ++ ">" end} + #{var_trans => fun(Name, _) -> "<" ++ Name ++ ">" end} ), ?assertEqual( {<<"a:,b:,c:">>, []}, @@ -59,7 +59,7 @@ t_render_path(_) -> Template = emqx_connector_template:parse(<<"d.d1:${d.d1}">>), ?assertEqual( ok, - emqx_connector_template:validate([<<"d.d1">>], Template) + emqx_connector_template:validate(["d.d1"], Template) ), ?assertEqual( {<<"d.d1:hi">>, []}, @@ -70,8 +70,8 @@ t_render_custom_ph(_) -> Bindings = #{a => <<"a">>, b => <<"b">>}, Template = emqx_connector_template:parse(<<"a:${a},b:${b}">>), ?assertEqual( - {error, [{[<<"b">>], disallowed}]}, - emqx_connector_template:validate([<<"a">>], Template) + {error, [{"b", disallowed}]}, + emqx_connector_template:validate(["a"], Template) ), ?assertEqual( <<"a:a,b:b">>, @@ -81,7 +81,7 @@ t_render_custom_ph(_) -> t_render_this(_) -> Bindings = #{a => <<"a">>, b => [1, 2, 3]}, Template = emqx_connector_template:parse(<<"this:${} / also:${.}">>), - ?assertEqual(ok, emqx_connector_template:validate([?PH_VAR_THIS], Template)), + ?assertEqual(ok, emqx_connector_template:validate(["."], Template)), ?assertEqual( % NOTE: order of the keys in the JSON object depends on the JSON encoder <<"this:{\"b\":[1,2,3],\"a\":\"a\"} / also:{\"b\":[1,2,3],\"a\":\"a\"}">>, @@ -95,21 +95,21 @@ t_render_missing_bindings(_) -> ), ?assertEqual( {<<"a:,b:,c:,d:,e:">>, [ - {[<<"no">>, <<"such_atom_i_swear">>], undefined}, - {[<<"d">>, <<"d1">>], undefined}, - {[<<"c">>], undefined}, - {[<<"b">>], undefined}, - {[<<"a">>], undefined} + {"no.such_atom_i_swear", undefined}, + {"d.d1", undefined}, + {"c", undefined}, + {"b", undefined}, + {"a", undefined} ]}, render_string(Template, Bindings) ), ?assertError( [ - {[<<"no">>, <<"such_atom_i_swear">>], undefined}, - {[<<"d">>, <<"d1">>], undefined}, - {[<<"c">>], undefined}, - {[<<"b">>], undefined}, - {[<<"a">>], undefined} + {"no.such_atom_i_swear", undefined}, + {"d.d1", undefined}, + {"c", undefined}, + {"b", undefined}, + {"a", undefined} ], render_strict_string(Template, Bindings) ). @@ -256,10 +256,10 @@ t_render_cql(_) -> t_render_sql_custom_ph(_) -> {PrepareStatement, RowTemplate} = - emqx_connector_template_sql:parse_prepstmt(<<"a:${a},b:${b}">>, #{parameters => '$n'}), + emqx_connector_template_sql:parse_prepstmt(<<"a:${a},b:${b.c}">>, #{parameters => '$n'}), ?assertEqual( - {error, [{[<<"b">>], disallowed}]}, - emqx_connector_template:validate([<<"a">>], RowTemplate) + {error, [{"b.c", disallowed}]}, + emqx_connector_template:validate(["a"], RowTemplate) ), ?assertEqual(<<"a:$1,b:$2">>, bin(PrepareStatement)). @@ -296,8 +296,8 @@ t_render_tmpl_deep(_) -> ), ?assertEqual( - {error, [{V, disallowed} || V <- [[<<"b">>], [<<"c">>]]]}, - emqx_connector_template:validate([<<"a">>], Template) + {error, [{V, disallowed} || V <- ["b", "c"]]}, + emqx_connector_template:validate(["a"], Template) ), ?assertEqual( diff --git a/apps/emqx_rule_engine/src/emqx_rule_actions.erl b/apps/emqx_rule_engine/src/emqx_rule_actions.erl index bb9966b4a..fa677ce78 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_actions.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_actions.erl @@ -224,8 +224,8 @@ parse_user_properties(_) -> %% invalid, discard undefined. -render_simple_var([{var, Name}], Data, Default) -> - case emqx_connector_template:lookup_var(Name, Data) of +render_simple_var([{var, _Name, Accessor}], Data, Default) -> + case emqx_connector_template:lookup_var(Accessor, Data) of {ok, Var} -> Var; %% cannot find the variable from Data {error, _} -> Default From 49fba40ee7a402ce07fc6620ea035b21eea0573e Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Thu, 27 Apr 2023 13:53:17 +0300 Subject: [PATCH 054/155] fix(tpl): ensure backward compat with authz / authn templates This commit leans heavy into discouraging the former approach where only part of placeholders were interpolated, depending on `placeholders` option. --- .../src/emqx_authn/emqx_authn_utils.erl | 97 +++++++++++++------ .../src/emqx_authz/emqx_authz_rule.erl | 3 +- .../src/emqx_authz/emqx_authz_utils.erl | 85 +++++++++++----- apps/emqx_auth_http/src/emqx_authz_http.erl | 26 ++--- .../test/emqx_authn_http_SUITE.erl | 53 ++++++++-- .../test/emqx_authz_http_SUITE.erl | 61 ++++++++++++ .../src/emqx_authz_mongodb.erl | 6 +- apps/emqx_auth_mysql/src/emqx_authz_mysql.erl | 6 +- .../src/emqx_authz_postgresql.erl | 6 +- apps/emqx_auth_redis/src/emqx_authz_redis.erl | 4 +- 10 files changed, 260 insertions(+), 87 deletions(-) diff --git a/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl b/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl index 9be3e24d6..d9b20a47c 100644 --- a/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl +++ b/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl @@ -18,6 +18,7 @@ -include_lib("emqx/include/emqx_placeholder.hrl"). -include_lib("emqx_authn.hrl"). +-include_lib("snabbkaffe/include/trace.hrl"). -export([ create_resource/3, @@ -44,13 +45,13 @@ default_headers_no_content_type/0 ]). --define(AUTHN_PLACEHOLDERS, [ - <>, - <>, - <>, - <>, - <>, - <> +-define(ALLOWED_VARS, [ + ?VAR_USERNAME, + ?VAR_CLIENTID, + ?VAR_PASSWORD, + ?VAR_PEERHOST, + ?VAR_CERT_SUBJECT, + ?VAR_CERT_CN_NAME ]). -define(DEFAULT_RESOURCE_OPTS, #{ @@ -108,21 +109,55 @@ check_password_from_selected_map(Algorithm, Selected, Password) -> parse_deep(Template) -> Result = emqx_connector_template:parse_deep(Template), - ok = emqx_connector_template:validate(?AUTHN_PLACEHOLDERS, Result), - Result. + handle_disallowed_placeholders(Result, {deep, Template}). parse_str(Template) -> Result = emqx_connector_template:parse(Template), - ok = emqx_connector_template:validate(?AUTHN_PLACEHOLDERS, Result), - Result. + handle_disallowed_placeholders(Result, {string, Template}). parse_sql(Template, ReplaceWith) -> {Statement, Result} = emqx_connector_template_sql:parse_prepstmt( Template, #{parameters => ReplaceWith, strip_double_quote => true} ), - ok = emqx_connector_template:validate(?AUTHN_PLACEHOLDERS, Result), - {Statement, Result}. + {Statement, handle_disallowed_placeholders(Result, {string, Template})}. + +handle_disallowed_placeholders(Template, Source) -> + case emqx_connector_template:validate(?ALLOWED_VARS, Template) of + ok -> + Template; + {error, Disallowed} -> + ?tp(warning, "authn_template_invalid", #{ + template => Source, + reason => Disallowed, + allowed => #{placeholders => ?ALLOWED_VARS}, + notice => + "Disallowed placeholders will be rendered as is." + " However, consider using `$${...}` escaping for literal `${...}` where" + " needed to avoid unexpected results." + }), + Result = prerender_disallowed_placeholders(Template), + case Source of + {string, _} -> + emqx_connector_template:parse(Result); + {deep, _} -> + emqx_connector_template:parse_deep(Result) + end + end. + +prerender_disallowed_placeholders(Template) -> + {Result, _} = emqx_connector_template:render(Template, #{}, #{ + var_trans => fun(Name, _) -> + % NOTE + % Rendering disallowed placeholders in escaped form, which will then + % parse as a literal string. + case lists:member(Name, ?ALLOWED_VARS) of + true -> "${" ++ Name ++ "}"; + false -> "$${" ++ Name ++ "}" + end + end + }), + Result. render_deep(Template, Credential) -> % NOTE @@ -130,7 +165,7 @@ render_deep(Template, Credential) -> {Term, _Errors} = emqx_connector_template:render( Template, mapping_credential(Credential), - #{var_trans => fun handle_var/2} + #{var_trans => fun to_string/2} ), Term. @@ -140,7 +175,7 @@ render_str(Template, Credential) -> {String, _Errors} = emqx_connector_template:render( Template, mapping_credential(Credential), - #{var_trans => fun handle_var/2} + #{var_trans => fun to_string/2} ), unicode:characters_to_binary(String). @@ -150,7 +185,7 @@ render_urlencoded_str(Template, Credential) -> {String, _Errors} = emqx_connector_template:render( Template, mapping_credential(Credential), - #{var_trans => fun urlencode_var/2} + #{var_trans => fun to_urlencoded_string/2} ), unicode:characters_to_binary(String). @@ -160,7 +195,7 @@ render_sql_params(ParamList, Credential) -> {Row, _Errors} = emqx_connector_template:render( ParamList, mapping_credential(Credential), - #{var_trans => fun handle_sql_var/2} + #{var_trans => fun to_sql_valaue/2} ), Row. @@ -283,22 +318,24 @@ without_password(Credential, [Name | Rest]) -> without_password(Credential, Rest) end. -urlencode_var(Var, Value) -> - emqx_http_lib:uri_encode(handle_var(Var, Value)). +to_urlencoded_string(Name, Value) -> + emqx_http_lib:uri_encode(to_string(Name, Value)). -handle_var(_, undefined) -> - <<>>; -handle_var([<<"peerhost">>], PeerHost) -> - emqx_connector_template:to_string(inet:ntoa(PeerHost)); -handle_var(_, Value) -> - emqx_connector_template:to_string(Value). +to_string(Name, Value) -> + emqx_connector_template:to_string(render_var(Name, Value)). -handle_sql_var(_, undefined) -> +to_sql_valaue(Name, Value) -> + emqx_connector_sql:to_sql_value(render_var(Name, Value)). + +render_var(_, undefined) -> + % NOTE + % Any allowed but undefined binding will be replaced with empty string, even when + % rendering SQL values. <<>>; -handle_sql_var([<<"peerhost">>], PeerHost) -> - emqx_connector_sql:to_sql_value(inet:ntoa(PeerHost)); -handle_sql_var(_, Value) -> - emqx_connector_sql:to_sql_value(Value). +render_var(?VAR_PEERHOST, Value) -> + inet:ntoa(Value); +render_var(_Name, Value) -> + Value. mapping_credential(C = #{cn := CN, dn := DN}) -> C#{cert_common_name => CN, cert_subject => DN}; diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl index 6f5369aec..1e7a49855 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl @@ -183,8 +183,7 @@ compile_topic(<<"eq ", Topic/binary>>) -> compile_topic({eq, Topic}) -> {eq, emqx_topic:words(bin(Topic))}; compile_topic(Topic) -> - Template = emqx_connector_template:parse(Topic), - ok = emqx_connector_template:validate([?VAR_USERNAME, ?VAR_CLIENTID], Template), + Template = emqx_authz_utils:parse_str(Topic, [?VAR_USERNAME, ?VAR_CLIENTID]), case emqx_connector_template:trivial(Template) of true -> emqx_topic:words(bin(Topic)); false -> {pattern, Template} diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl index e5aeab21b..bd7b353a5 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl @@ -16,7 +16,9 @@ -module(emqx_authz_utils). +-include_lib("emqx/include/emqx_placeholder.hrl"). -include_lib("emqx_authz.hrl"). +-include_lib("snabbkaffe/include/trace.hrl"). -export([ cleanup_resources/0, @@ -109,21 +111,56 @@ update_config(Path, ConfigRequest) -> parse_deep(Template, PlaceHolders) -> Result = emqx_connector_template:parse_deep(Template), - ok = emqx_connector_template:validate(PlaceHolders, Result), - Result. + handle_disallowed_placeholders(Result, {deep, Template}, PlaceHolders). parse_str(Template, PlaceHolders) -> Result = emqx_connector_template:parse(Template), - ok = emqx_connector_template:validate(PlaceHolders, Result), - Result. + handle_disallowed_placeholders(Result, {string, Template}, PlaceHolders). parse_sql(Template, ReplaceWith, PlaceHolders) -> {Statement, Result} = emqx_connector_template_sql:parse_prepstmt( Template, #{parameters => ReplaceWith, strip_double_quote => true} ), - ok = emqx_connector_template:validate(PlaceHolders, Result), - {Statement, Result}. + FResult = handle_disallowed_placeholders(Result, {string, Template}, PlaceHolders), + {Statement, FResult}. + +handle_disallowed_placeholders(Template, Source, Allowed) -> + case emqx_connector_template:validate(Allowed, Template) of + ok -> + Template; + {error, Disallowed} -> + ?tp(warning, "authz_template_invalid", #{ + template => Source, + reason => Disallowed, + allowed => #{placeholders => Allowed}, + notice => + "Disallowed placeholders will be rendered as is." + " However, consider using `$${...}` escaping for literal `${...}` where" + " needed to avoid unexpected results." + }), + Result = prerender_disallowed_placeholders(Template, Allowed), + case Source of + {string, _} -> + emqx_connector_template:parse(Result); + {deep, _} -> + emqx_connector_template:parse_deep(Result) + end + end. + +prerender_disallowed_placeholders(Template, Allowed) -> + {Result, _} = emqx_connector_template:render(Template, #{}, #{ + var_trans => fun(Name, _) -> + % NOTE + % Rendering disallowed placeholders in escaped form, which will then + % parse as a literal string. + case lists:member(Name, Allowed) of + true -> "${" ++ Name ++ "}"; + false -> "$${" ++ Name ++ "}" + end + end + }), + Result. render_deep(Template, Values) -> % NOTE @@ -131,7 +168,7 @@ render_deep(Template, Values) -> {Term, _Errors} = emqx_connector_template:render( Template, client_vars(Values), - #{var_trans => fun handle_var/2} + #{var_trans => fun to_string/2} ), Term. @@ -141,7 +178,7 @@ render_str(Template, Values) -> {String, _Errors} = emqx_connector_template:render( Template, client_vars(Values), - #{var_trans => fun handle_var/2} + #{var_trans => fun to_string/2} ), unicode:characters_to_binary(String). @@ -151,7 +188,7 @@ render_urlencoded_str(Template, Values) -> {String, _Errors} = emqx_connector_template:render( Template, client_vars(Values), - #{var_trans => fun urlencode_var/2} + #{var_trans => fun to_urlencoded_string/2} ), unicode:characters_to_binary(String). @@ -161,7 +198,7 @@ render_sql_params(ParamList, Values) -> {Row, _Errors} = emqx_connector_template:render( ParamList, client_vars(Values), - #{var_trans => fun handle_sql_var/2} + #{var_trans => fun to_sql_value/2} ), Row. @@ -229,22 +266,24 @@ convert_client_var({dn, DN}) -> {cert_subject, DN}; convert_client_var({protocol, Proto}) -> {proto_name, Proto}; convert_client_var(Other) -> Other. -urlencode_var(Var, Value) -> - emqx_http_lib:uri_encode(handle_var(Var, Value)). +to_urlencoded_string(Name, Value) -> + emqx_http_lib:uri_encode(to_string(Name, Value)). -handle_var(_, undefined) -> - <<>>; -handle_var([<<"peerhost">>], IpAddr) -> - inet_parse:ntoa(IpAddr); -handle_var(_Name, Value) -> - emqx_connector_template:to_string(Value). +to_string(Name, Value) -> + emqx_connector_template:to_string(render_var(Name, Value)). -handle_sql_var(_, undefined) -> +to_sql_value(Name, Value) -> + emqx_connector_sql:to_sql_value(render_var(Name, Value)). + +render_var(_, undefined) -> + % NOTE + % Any allowed but undefined binding will be replaced with empty string, even when + % rendering SQL values. <<>>; -handle_sql_var([<<"peerhost">>], IpAddr) -> - inet_parse:ntoa(IpAddr); -handle_sql_var(_Name, Value) -> - emqx_connector_sql:to_sql_value(Value). +render_var(?VAR_PEERHOST, Value) -> + inet:ntoa(Value); +render_var(_Name, Value) -> + Value. bin(A) when is_atom(A) -> atom_to_binary(A, utf8); bin(L) when is_list(L) -> list_to_binary(L); diff --git a/apps/emqx_auth_http/src/emqx_authz_http.erl b/apps/emqx_auth_http/src/emqx_authz_http.erl index bbb2bf9b5..04f76b4c9 100644 --- a/apps/emqx_auth_http/src/emqx_authz_http.erl +++ b/apps/emqx_auth_http/src/emqx_authz_http.erl @@ -38,7 +38,7 @@ -compile(nowarn_export_all). -endif. --define(PLACEHOLDERS, [ +-define(ALLOWED_VARS, [ ?VAR_USERNAME, ?VAR_CLIENTID, ?VAR_PEERHOST, @@ -50,9 +50,9 @@ ?VAR_CERT_CN_NAME ]). --define(PLACEHOLDERS_FOR_RICH_ACTIONS, [ - <>, - <> +-define(ALLOWED_VARS_RICH_ACTIONS, [ + ?VAR_QOS, + ?VAR_RETAIN ]). description() -> @@ -157,14 +157,14 @@ parse_config( method => Method, base_url => BaseUrl, headers => Headers, - base_path_templete => emqx_authz_utils:parse_str(Path, placeholders()), + base_path_templete => emqx_authz_utils:parse_str(Path, allowed_vars()), base_query_template => emqx_authz_utils:parse_deep( cow_qs:parse_qs(to_bin(Query)), - placeholders() + allowed_vars() ), body_template => emqx_authz_utils:parse_deep( maps:to_list(maps:get(body, Conf, #{})), - placeholders() + allowed_vars() ), request_timeout => ReqTimeout, %% pool_type default value `random` @@ -260,10 +260,10 @@ to_bin(B) when is_binary(B) -> B; to_bin(L) when is_list(L) -> list_to_binary(L); to_bin(X) -> X. -placeholders() -> - placeholders(emqx_authz:feature_available(rich_actions)). +allowed_vars() -> + allowed_vars(emqx_authz:feature_available(rich_actions)). -placeholders(true) -> - ?PLACEHOLDERS ++ ?PLACEHOLDERS_FOR_RICH_ACTIONS; -placeholders(false) -> - ?PLACEHOLDERS. +allowed_vars(true) -> + ?ALLOWED_VARS ++ ?ALLOWED_VARS_RICH_ACTIONS; +allowed_vars(false) -> + ?ALLOWED_VARS. diff --git a/apps/emqx_auth_http/test/emqx_authn_http_SUITE.erl b/apps/emqx_auth_http/test/emqx_authn_http_SUITE.erl index 577b3b638..e307b5bbf 100644 --- a/apps/emqx_auth_http/test/emqx_authn_http_SUITE.erl +++ b/apps/emqx_auth_http/test/emqx_authn_http_SUITE.erl @@ -27,7 +27,7 @@ -define(PATH, [?CONF_NS_ATOM]). -define(HTTP_PORT, 32333). --define(HTTP_PATH, "/auth"). +-define(HTTP_PATH, "/auth/[...]"). -define(CREDENTIALS, #{ clientid => <<"clienta">>, username => <<"plain">>, @@ -146,8 +146,12 @@ t_authenticate(_Config) -> test_user_auth(#{ handler := Handler, config_params := SpecificConfgParams, - result := Result + result := Expect }) -> + Result = perform_user_auth(SpecificConfgParams, Handler, ?CREDENTIALS), + ?assertEqual(Expect, Result). + +perform_user_auth(SpecificConfgParams, Handler, Credentials) -> AuthConfig = maps:merge(raw_http_auth_config(), SpecificConfgParams), {ok, _} = emqx:update_config( @@ -157,21 +161,21 @@ test_user_auth(#{ ok = emqx_authn_http_test_server:set_handler(Handler), - ?assertEqual(Result, emqx_access_control:authenticate(?CREDENTIALS)), + Result = emqx_access_control:authenticate(Credentials), emqx_authn_test_lib:delete_authenticators( [authentication], ?GLOBAL - ). + ), + + Result. t_authenticate_path_placeholders(_Config) -> - ok = emqx_authn_http_test_server:stop(), - {ok, _} = emqx_authn_http_test_server:start_link(?HTTP_PORT, <<"/[...]">>), ok = emqx_authn_http_test_server:set_handler( fun(Req0, State) -> Req = case cowboy_req:path(Req0) of - <<"/my/p%20ath//us%20er/auth//">> -> + <<"/auth/p%20ath//us%20er/auth//">> -> cowboy_req:reply( 200, #{<<"content-type">> => <<"application/json">>}, @@ -193,7 +197,7 @@ t_authenticate_path_placeholders(_Config) -> AuthConfig = maps:merge( raw_http_auth_config(), #{ - <<"url">> => <<"http://127.0.0.1:32333/my/p%20ath//${username}/auth//">>, + <<"url">> => <<"http://127.0.0.1:32333/auth/p%20ath//${username}/auth//">>, <<"body">> => #{} } ), @@ -255,6 +259,39 @@ t_no_value_for_placeholder(_Config) -> ?GLOBAL ). +t_disallowed_placeholders_preserved(_Config) -> + Config = #{ + <<"method">> => <<"post">>, + <<"headers">> => #{<<"content-type">> => <<"application/json">>}, + <<"body">> => #{ + <<"username">> => ?PH_USERNAME, + <<"password">> => ?PH_PASSWORD, + <<"this">> => <<"${whatisthis}">> + } + }, + Handler = fun(Req0, State) -> + {ok, Body, Req1} = cowboy_req:read_body(Req0), + #{ + <<"username">> := <<"plain">>, + <<"password">> := <<"plain">>, + <<"this">> := <<"${whatisthis}">> + } = emqx_utils_json:decode(Body), + Req = cowboy_req:reply( + 200, + #{<<"content-type">> => <<"application/json">>}, + emqx_utils_json:encode(#{result => allow, is_superuser => false}), + Req1 + ), + {ok, Req, State} + end, + ?assertMatch({ok, _}, perform_user_auth(Config, Handler, ?CREDENTIALS)), + + % NOTE: disallowed placeholder left intact, which makes the URL invalid + ConfigUrl = Config#{ + <<"url">> => <<"http://127.0.0.1:32333/auth/${whatisthis}">> + }, + ?assertMatch({error, _}, perform_user_auth(ConfigUrl, Handler, ?CREDENTIALS)). + t_destroy(_Config) -> AuthConfig = raw_http_auth_config(), diff --git a/apps/emqx_auth_http/test/emqx_authz_http_SUITE.erl b/apps/emqx_auth_http/test/emqx_authz_http_SUITE.erl index e56e25f5f..845259e78 100644 --- a/apps/emqx_auth_http/test/emqx_authz_http_SUITE.erl +++ b/apps/emqx_auth_http/test/emqx_authz_http_SUITE.erl @@ -494,6 +494,67 @@ t_no_value_for_placeholder(_Config) -> emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>) ). +t_disallowed_placeholders_preserved(_Config) -> + ok = setup_handler_and_config( + fun(Req0, State) -> + {ok, Body, Req1} = cowboy_req:read_body(Req0), + ?assertMatch( + #{ + <<"cname">> := <<>>, + <<"usertypo">> := <<"${usertypo}">> + }, + emqx_utils_json:decode(Body) + ), + {ok, ?AUTHZ_HTTP_RESP(allow, Req1), State} + end, + #{ + <<"method">> => <<"post">>, + <<"body">> => #{ + <<"cname">> => ?PH_CERT_CN_NAME, + <<"usertypo">> => <<"${usertypo}">> + } + } + ), + + ClientInfo = #{ + clientid => <<"client id">>, + username => <<"user name">>, + peerhost => {127, 0, 0, 1}, + protocol => <<"MQTT">>, + zone => default, + listener => {tcp, default} + }, + + ?assertEqual( + allow, + emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>) + ). + +t_disallowed_placeholders_path(_Config) -> + ok = setup_handler_and_config( + fun(Req, State) -> + {ok, ?AUTHZ_HTTP_RESP(allow, Req), State} + end, + #{ + <<"url">> => <<"http://127.0.0.1:33333/authz/use%20rs/${typo}">> + } + ), + + ClientInfo = #{ + clientid => <<"client id">>, + username => <<"user name">>, + peerhost => {127, 0, 0, 1}, + protocol => <<"MQTT">>, + zone => default, + listener => {tcp, default} + }, + + % % NOTE: disallowed placeholder left intact, which makes the URL invalid + ?assertEqual( + deny, + emqx_access_control:authorize(ClientInfo, ?AUTHZ_PUBLISH, <<"t">>) + ). + t_create_replace(_Config) -> ClientInfo = #{ clientid => <<"clientid">>, diff --git a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl index 35ac3a41b..fdeb9d542 100644 --- a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl +++ b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb.erl @@ -35,7 +35,7 @@ -compile(nowarn_export_all). -endif. --define(PLACEHOLDERS, [ +-define(ALLOWED_VARS, [ ?VAR_USERNAME, ?VAR_CLIENTID, ?VAR_PEERHOST, @@ -49,11 +49,11 @@ description() -> create(#{filter := Filter} = Source) -> ResourceId = emqx_authz_utils:make_resource_id(?MODULE), {ok, _Data} = emqx_authz_utils:create_resource(ResourceId, emqx_mongodb, Source), - FilterTemp = emqx_authz_utils:parse_deep(Filter, ?PLACEHOLDERS), + FilterTemp = emqx_authz_utils:parse_deep(Filter, ?ALLOWED_VARS), Source#{annotations => #{id => ResourceId}, filter_template => FilterTemp}. update(#{filter := Filter} = Source) -> - FilterTemp = emqx_authz_utils:parse_deep(Filter, ?PLACEHOLDERS), + FilterTemp = emqx_authz_utils:parse_deep(Filter, ?ALLOWED_VARS), case emqx_authz_utils:update_resource(emqx_mongodb, Source) of {error, Reason} -> error({load_config_error, Reason}); diff --git a/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl b/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl index a6d71d1ca..8c9e54ee1 100644 --- a/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl +++ b/apps/emqx_auth_mysql/src/emqx_authz_mysql.erl @@ -37,7 +37,7 @@ -compile(nowarn_export_all). -endif. --define(PLACEHOLDERS, [ +-define(ALLOWED_VARS, [ ?VAR_USERNAME, ?VAR_CLIENTID, ?VAR_PEERHOST, @@ -49,14 +49,14 @@ description() -> "AuthZ with Mysql". create(#{query := SQL} = Source0) -> - {PrepareSQL, TmplToken} = emqx_authz_utils:parse_sql(SQL, '?', ?PLACEHOLDERS), + {PrepareSQL, TmplToken} = emqx_authz_utils:parse_sql(SQL, '?', ?ALLOWED_VARS), ResourceId = emqx_authz_utils:make_resource_id(?MODULE), Source = Source0#{prepare_statement => #{?PREPARE_KEY => PrepareSQL}}, {ok, _Data} = emqx_authz_utils:create_resource(ResourceId, emqx_mysql, Source), Source#{annotations => #{id => ResourceId, tmpl_token => TmplToken}}. update(#{query := SQL} = Source0) -> - {PrepareSQL, TmplToken} = emqx_authz_utils:parse_sql(SQL, '?', ?PLACEHOLDERS), + {PrepareSQL, TmplToken} = emqx_authz_utils:parse_sql(SQL, '?', ?ALLOWED_VARS), Source = Source0#{prepare_statement => #{?PREPARE_KEY => PrepareSQL}}, case emqx_authz_utils:update_resource(emqx_mysql, Source) of {error, Reason} -> diff --git a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl index b538bd95e..14b7598a6 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql.erl @@ -37,7 +37,7 @@ -compile(nowarn_export_all). -endif. --define(PLACEHOLDERS, [ +-define(ALLOWED_VARS, [ ?VAR_USERNAME, ?VAR_CLIENTID, ?VAR_PEERHOST, @@ -49,7 +49,7 @@ description() -> "AuthZ with PostgreSQL". create(#{query := SQL0} = Source) -> - {SQL, PlaceHolders} = emqx_authz_utils:parse_sql(SQL0, '$n', ?PLACEHOLDERS), + {SQL, PlaceHolders} = emqx_authz_utils:parse_sql(SQL0, '$n', ?ALLOWED_VARS), ResourceID = emqx_authz_utils:make_resource_id(emqx_postgresql), {ok, _Data} = emqx_authz_utils:create_resource( ResourceID, @@ -59,7 +59,7 @@ create(#{query := SQL0} = Source) -> Source#{annotations => #{id => ResourceID, placeholders => PlaceHolders}}. update(#{query := SQL0, annotations := #{id := ResourceID}} = Source) -> - {SQL, PlaceHolders} = emqx_authz_utils:parse_sql(SQL0, '$n', ?PLACEHOLDERS), + {SQL, PlaceHolders} = emqx_authz_utils:parse_sql(SQL0, '$n', ?ALLOWED_VARS), case emqx_authz_utils:update_resource( emqx_postgresql, diff --git a/apps/emqx_auth_redis/src/emqx_authz_redis.erl b/apps/emqx_auth_redis/src/emqx_authz_redis.erl index eb63804b9..ca4a11742 100644 --- a/apps/emqx_auth_redis/src/emqx_authz_redis.erl +++ b/apps/emqx_auth_redis/src/emqx_authz_redis.erl @@ -35,7 +35,7 @@ -compile(nowarn_export_all). -endif. --define(PLACEHOLDERS, [ +-define(ALLOWED_VARS, [ ?VAR_CERT_CN_NAME, ?VAR_CERT_SUBJECT, ?VAR_PEERHOST, @@ -133,7 +133,7 @@ parse_cmd(Query) -> case emqx_redis_command:split(Query) of {ok, Cmd} -> ok = validate_cmd(Cmd), - emqx_authz_utils:parse_deep(Cmd, ?PLACEHOLDERS); + emqx_authz_utils:parse_deep(Cmd, ?ALLOWED_VARS); {error, Reason} -> error({invalid_redis_cmd, Reason, Query}) end. From dfb7faf6a8a8a011c07f429b595c55011b7c3bb2 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Wed, 3 May 2023 18:53:18 +0300 Subject: [PATCH 055/155] fix(tpl): ensure rough backward compat in `emqx_prometheus` But also emit warning when job name template renders with errors. --- apps/emqx_prometheus/src/emqx_prometheus.app.src | 2 +- apps/emqx_prometheus/src/emqx_prometheus.erl | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/apps/emqx_prometheus/src/emqx_prometheus.app.src b/apps/emqx_prometheus/src/emqx_prometheus.app.src index c4abbec27..4631fec8b 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.app.src +++ b/apps/emqx_prometheus/src/emqx_prometheus.app.src @@ -2,7 +2,7 @@ {application, emqx_prometheus, [ {description, "Prometheus for EMQX"}, % strict semver, bump manually! - {vsn, "5.0.16"}, + {vsn, "5.0.17"}, {modules, []}, {registered, [emqx_prometheus_sup]}, {applications, [kernel, stdlib, prometheus, emqx, emqx_management]}, diff --git a/apps/emqx_prometheus/src/emqx_prometheus.erl b/apps/emqx_prometheus/src/emqx_prometheus.erl index fa9a39cc6..41dec9ee9 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus.erl @@ -113,10 +113,18 @@ handle_info(_Msg, State) -> push_to_push_gateway(Uri, Headers, JobName) when is_list(Headers) -> [Name, Ip] = string:tokens(atom_to_list(node()), "@"), - JobName1 = emqx_connector_template:render_strict( + % NOTE: allowing errors here to keep rough backward compatibility + {JobName1, Errors} = emqx_connector_template:render( emqx_connector_template:parse(JobName), #{<<"name">> => Name, <<"host">> => Ip} ), + _ = + Errors == [] orelse + ?SLOG(warning, #{ + msg => "prometheus_job_name_template_invalid", + errors => Errors, + template => JobName + }), Data = prometheus_text_format:format(), Url = lists:concat([Uri, "/metrics/job/", unicode:characters_to_list(JobName1)]), case httpc:request(post, {Url, Headers, "text/plain", Data}, ?HTTP_OPTIONS, []) of From 7bb995f0c6a329b4c3375f821e5de272d34e1507 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Fri, 28 Apr 2023 12:03:55 +0300 Subject: [PATCH 056/155] feat(tpl): support `:n` SQL parameters --- apps/emqx_connector/src/emqx_connector_template_sql.erl | 6 ++++-- apps/emqx_connector/test/emqx_connector_template_SUITE.erl | 7 +++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/apps/emqx_connector/src/emqx_connector_template_sql.erl b/apps/emqx_connector/src/emqx_connector_template_sql.erl index e95ecde42..90d79415e 100644 --- a/apps/emqx_connector/src/emqx_connector_template_sql.erl +++ b/apps/emqx_connector/src/emqx_connector_template_sql.erl @@ -34,7 +34,7 @@ -type values() :: [emqx_connector_sql:value()]. -type parse_opts() :: #{ - parameters => '$n' | '?', + parameters => '$n' | ':n' | '?', % Inherited from `emqx_connector_template:parse_opts()` strip_double_quote => boolean() }. @@ -116,7 +116,9 @@ mk_prepared_statement(Template, Opts) -> mk_replace('?', Acc) -> {"?", Acc}; mk_replace('$n', N) -> - {"$" ++ integer_to_list(N), N + 1}. + {"$" ++ integer_to_list(N), N + 1}; +mk_replace(':n', N) -> + {":" ++ integer_to_list(N), N + 1}. %% @doc Render a row template into a list of SQL values. %% An _SQL value_ is a vaguely defined concept here, it is something that's considered diff --git a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl index 998baae37..562e0e550 100644 --- a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl +++ b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl @@ -188,6 +188,13 @@ t_parse_sql_prepstmt_n(_) -> emqx_connector_template_sql:render_prepstmt_strict(RowTemplate, Bindings) ). +t_parse_sql_prepstmt_colon(_) -> + {PrepareStatement, _RowTemplate} = + emqx_connector_template_sql:parse_prepstmt(<<"a=${a},b=${b},c=${c},d=${d}">>, #{ + parameters => ':n' + }), + ?assertEqual(<<"a=:1,b=:2,c=:3,d=:4">>, bin(PrepareStatement)). + t_parse_sql_prepstmt_partial_ph(_) -> Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, {PrepareStatement, RowTemplate} = From a9693eada7e3832480e1b32f9e37c93d2b1539bc Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Thu, 4 May 2023 16:47:36 +0300 Subject: [PATCH 057/155] fix(tpl): rename `trivial` -> `is_const` This is clearer. Former naming was a bit misleading. --- apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl | 2 +- apps/emqx_connector/src/emqx_connector_template.erl | 9 +++++---- .../test/emqx_connector_template_SUITE.erl | 8 ++++---- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl index 1e7a49855..160f9cac4 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl @@ -184,7 +184,7 @@ compile_topic({eq, Topic}) -> {eq, emqx_topic:words(bin(Topic))}; compile_topic(Topic) -> Template = emqx_authz_utils:parse_str(Topic, [?VAR_USERNAME, ?VAR_CLIENTID]), - case emqx_connector_template:trivial(Template) of + case emqx_connector_template:is_const(Template) of true -> emqx_topic:words(bin(Topic)); false -> {pattern, Template} end. diff --git a/apps/emqx_connector/src/emqx_connector_template.erl b/apps/emqx_connector/src/emqx_connector_template.erl index 221cc5e86..e966f4244 100644 --- a/apps/emqx_connector/src/emqx_connector_template.erl +++ b/apps/emqx_connector/src/emqx_connector_template.erl @@ -23,7 +23,7 @@ -export([parse_deep/1]). -export([parse_deep/2]). -export([validate/2]). --export([trivial/1]). +-export([is_const/1]). -export([unparse/1]). -export([render/2]). -export([render/3]). @@ -124,7 +124,6 @@ parse_accessor(Var) -> [<<>>] -> ?PH_VAR_THIS; Name -> - % TODO: lowercase? Name end. @@ -140,9 +139,11 @@ validate(Allowed, Template) -> {error, [{Var, disallowed} || Var <- Disallowed]} end. --spec trivial(t()) -> +%% @doc Check if a template is constant with respect to rendering, i.e. does not +%% contain any placeholders. +-spec is_const(t()) -> boolean(). -trivial(Template) -> +is_const(Template) -> validate([], Template) == ok. -spec unparse(t()) -> diff --git a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl index 562e0e550..41f637226 100644 --- a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl +++ b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl @@ -122,18 +122,18 @@ t_unparse(_) -> unicode:characters_to_binary(emqx_connector_template:unparse(Template)) ). -t_trivial(_) -> +t_const(_) -> ?assertEqual( true, - emqx_connector_template:trivial(emqx_connector_template:parse(<<"">>)) + emqx_connector_template:is_const(emqx_connector_template:parse(<<"">>)) ), ?assertEqual( false, - emqx_connector_template:trivial(emqx_connector_template:parse(<<"a:${a},b:${b},c:$${c}">>)) + emqx_connector_template:is_const(emqx_connector_template:parse(<<"a:${a},b:${b},c:$${c}">>)) ), ?assertEqual( true, - emqx_connector_template:trivial( + emqx_connector_template:is_const( emqx_connector_template:parse(<<"a:$${a},b:$${b},c:$${c}">>) ) ). From 1fcdfe991ca0e0403f94c740347ac00bb4b8f877 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Thu, 4 May 2023 17:00:27 +0300 Subject: [PATCH 058/155] fix(tpl): add few missing `@doc`s + correct typespecs --- .../src/emqx_connector_template.erl | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/apps/emqx_connector/src/emqx_connector_template.erl b/apps/emqx_connector/src/emqx_connector_template.erl index e966f4244..6f9688347 100644 --- a/apps/emqx_connector/src/emqx_connector_template.erl +++ b/apps/emqx_connector/src/emqx_connector_template.erl @@ -127,6 +127,9 @@ parse_accessor(Var) -> Name end. +%% @doc Validate a template against a set of allowed variables. +%% If the given template contains any variable not in the allowed set, an error +%% is returned. -spec validate([varname()], t()) -> ok | {error, [_Error :: {varname(), disallowed}]}. validate(Allowed, Template) -> @@ -146,8 +149,9 @@ validate(Allowed, Template) -> is_const(Template) -> validate([], Template) == ok. +%% @doc Restore original term from a parsed template. -spec unparse(t()) -> - unicode:chardata(). + term(). unparse({'$tpl', Template}) -> unparse_deep(Template); unparse(Template) -> @@ -208,17 +212,20 @@ render_value(Name, Value, #{var_trans := TransFun}) when is_function(TransFun, 2 render_value(_Name, Value, #{}) -> to_string(Value). +%% @doc Render a template with given bindings. +%% Behaves like `render/2`, but raises an error exception if one or more placeholders +%% are not found in the bindings. -spec render_strict(t(), bindings()) -> - unicode:chardata(). + term(). render_strict(Template, Bindings) -> render_strict(Template, Bindings, #{}). -spec render_strict(t(), bindings(), render_opts()) -> - unicode:chardata(). + term(). render_strict(Template, Bindings, Opts) -> case render(Template, Bindings, Opts) of - {String, []} -> - String; + {Render, []} -> + Render; {_, Errors = [_ | _]} -> error(Errors, [unparse(Template), Bindings]) end. From f689d6c233575a1755efbd80a7d6abc1cff5d296 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Thu, 4 May 2023 19:08:03 +0300 Subject: [PATCH 059/155] fix(tpl): ensure backward compat in `emqx_rule_engine` Missing bindings in string templates will be rendered as "undefined", as before. Rendering still assumes that missing binding with implicit default (`undefined`) is an error. This will also restore complete backward compat in `emqx_prometheus`. --- .../src/emqx_connector_template.erl | 2 -- .../test/emqx_connector_template_SUITE.erl | 2 +- .../src/emqx_rule_actions.erl | 9 ++++--- .../test/emqx_rule_engine_SUITE.erl | 27 +++++++++++++++++++ 4 files changed, 33 insertions(+), 7 deletions(-) diff --git a/apps/emqx_connector/src/emqx_connector_template.erl b/apps/emqx_connector/src/emqx_connector_template.erl index 6f9688347..72062fc2c 100644 --- a/apps/emqx_connector/src/emqx_connector_template.erl +++ b/apps/emqx_connector/src/emqx_connector_template.erl @@ -340,8 +340,6 @@ lookup(Prop, Bindings) when is_binary(Prop) -> -spec to_string(binding()) -> unicode:chardata(). -to_string(undefined) -> - []; to_string(Bin) when is_binary(Bin) -> Bin; to_string(Num) when is_integer(Num) -> integer_to_binary(Num); to_string(Num) when is_float(Num) -> float_to_binary(Num, [{decimals, 10}, compact]); diff --git a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl index 41f637226..b6784ea54 100644 --- a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl +++ b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl @@ -94,7 +94,7 @@ t_render_missing_bindings(_) -> <<"a:${a},b:${b},c:${c},d:${d.d1},e:${no.such_atom_i_swear}">> ), ?assertEqual( - {<<"a:,b:,c:,d:,e:">>, [ + {<<"a:undefined,b:undefined,c:undefined,d:undefined,e:undefined">>, [ {"no.such_atom_i_swear", undefined}, {"d.d1", undefined}, {"c", undefined}, diff --git a/apps/emqx_rule_engine/src/emqx_rule_actions.erl b/apps/emqx_rule_engine/src/emqx_rule_actions.erl index fa677ce78..7473572c8 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_actions.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_actions.erl @@ -118,10 +118,11 @@ republish( } } ) -> - Topic = unicode:characters_to_binary( - emqx_connector_template:render_strict(TopicTemplate, Selected) - ), - Payload = emqx_connector_template:render_strict(PayloadTemplate, Selected), + % NOTE: rendering missing bindings as string "undefined" + {TopicString, _Errors1} = emqx_connector_template:render(TopicTemplate, Selected), + {PayloadString, _Errors2} = emqx_connector_template:render(PayloadTemplate, Selected), + Topic = iolist_to_binary(TopicString), + Payload = iolist_to_binary(PayloadString), QoS = render_simple_var(QoSTemplate, Selected, 0), Retain = render_simple_var(RetainTemplate, Selected, false), %% 'flags' is set for message re-publishes or message related diff --git a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl index fcb04f9b3..41fec48ee 100644 --- a/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl +++ b/apps/emqx_rule_engine/test/emqx_rule_engine_SUITE.erl @@ -81,6 +81,7 @@ groups() -> t_sqlselect_3, t_sqlselect_message_publish_event_keep_original_props_1, t_sqlselect_message_publish_event_keep_original_props_2, + t_sqlselect_missing_template_vars_render_as_undefined, t_sqlparse_event_1, t_sqlparse_event_2, t_sqlparse_event_3, @@ -1946,6 +1947,32 @@ t_sqlselect_as_put(_Config) -> PayloadMap2 ). +t_sqlselect_missing_template_vars_render_as_undefined(_Config) -> + SQL = <<"SELECT * FROM \"$events/client_connected\"">>, + Repub = republish_action(<<"t2">>, <<"${clientid}:${missing.var}">>), + {ok, TopicRule} = emqx_rule_engine:create_rule( + #{ + sql => SQL, + id => ?TMP_RULEID, + actions => [Repub] + } + ), + {ok, Client1} = emqtt:start_link([{clientid, <<"sub-01">>}]), + {ok, _} = emqtt:connect(Client1), + {ok, _, _} = emqtt:subscribe(Client1, <<"t2">>), + {ok, Client2} = emqtt:start_link([{clientid, <<"pub-02">>}]), + {ok, _} = emqtt:connect(Client2), + emqtt:publish(Client2, <<"foo/bar/1">>, <<>>), + receive + {publish, Msg} -> + ?assertMatch(#{topic := <<"t2">>, payload := <<"pub-02:undefined">>}, Msg) + after 2000 -> + ct:fail(wait_for_t2) + end, + emqtt:stop(Client2), + emqtt:stop(Client1), + delete_rule(TopicRule). + t_sqlparse_event_1(_Config) -> Sql = "select topic as tp " From 343b679741949db12f462145e983d1c089eb13c5 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Fri, 5 May 2023 10:24:47 +0300 Subject: [PATCH 060/155] feat(tpl): make escaping mechanism more foolproof Treat "${$}" as literal "$". This allows to template express strings, for example, of the form "${some_var_value}" where `some_var_value` is interpolated from bindings. --- .../src/emqx_authn/emqx_authn_utils.erl | 4 ++-- .../src/emqx_authz/emqx_authz_utils.erl | 4 ++-- .../src/emqx_connector_template.erl | 24 +++++++++---------- .../test/emqx_connector_template_SUITE.erl | 22 +++++++++-------- 4 files changed, 28 insertions(+), 26 deletions(-) diff --git a/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl b/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl index d9b20a47c..0a938eafb 100644 --- a/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl +++ b/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl @@ -133,7 +133,7 @@ handle_disallowed_placeholders(Template, Source) -> allowed => #{placeholders => ?ALLOWED_VARS}, notice => "Disallowed placeholders will be rendered as is." - " However, consider using `$${...}` escaping for literal `${...}` where" + " However, consider using `${$}` escaping for literal `$` where" " needed to avoid unexpected results." }), Result = prerender_disallowed_placeholders(Template), @@ -153,7 +153,7 @@ prerender_disallowed_placeholders(Template) -> % parse as a literal string. case lists:member(Name, ?ALLOWED_VARS) of true -> "${" ++ Name ++ "}"; - false -> "$${" ++ Name ++ "}" + false -> "${$}{" ++ Name ++ "}" end end }), diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl index bd7b353a5..444955504 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl @@ -136,7 +136,7 @@ handle_disallowed_placeholders(Template, Source, Allowed) -> allowed => #{placeholders => Allowed}, notice => "Disallowed placeholders will be rendered as is." - " However, consider using `$${...}` escaping for literal `${...}` where" + " However, consider using `${$}` escaping for literal `$` where" " needed to avoid unexpected results." }), Result = prerender_disallowed_placeholders(Template, Allowed), @@ -156,7 +156,7 @@ prerender_disallowed_placeholders(Template, Allowed) -> % parse as a literal string. case lists:member(Name, Allowed) of true -> "${" ++ Name ++ "}"; - false -> "$${" ++ Name ++ "}" + false -> "${$}{" ++ Name ++ "}" end end }), diff --git a/apps/emqx_connector/src/emqx_connector_template.erl b/apps/emqx_connector/src/emqx_connector_template.erl index 72062fc2c..619dbd6ec 100644 --- a/apps/emqx_connector/src/emqx_connector_template.erl +++ b/apps/emqx_connector/src/emqx_connector_template.erl @@ -42,7 +42,7 @@ -type t() :: str() | {'$tpl', deeptpl()}. --type str() :: [unicode:chardata() | placeholder()]. +-type str() :: [iodata() | byte() | placeholder()]. -type deep() :: {'$tpl', deeptpl()}. -type deeptpl() :: @@ -76,7 +76,8 @@ var_trans => var_trans() }. --define(RE_PLACEHOLDER, "\\$(\\$?)\\{[.]?([a-zA-Z0-9._]*)\\}"). +-define(RE_PLACEHOLDER, "\\$\\{[.]?([a-zA-Z0-9._]*)\\}"). +-define(RE_ESCAPE, "\\$\\{(\\$)\\}"). %% @doc Parse a unicode string into a template. %% String might contain zero or more of placeholders in the form of `${var}`, @@ -95,22 +96,21 @@ parse(String, Opts) -> RE = case Opts of #{strip_double_quote := true} -> - <<"((?|" ?RE_PLACEHOLDER "|\"" ?RE_PLACEHOLDER "\"))">>; + <<"((?|" ?RE_PLACEHOLDER "|\"" ?RE_PLACEHOLDER "\")|" ?RE_ESCAPE ")">>; #{} -> - <<"(" ?RE_PLACEHOLDER ")">> + <<"(" ?RE_PLACEHOLDER "|" ?RE_ESCAPE ")">> end, Splits = re:split(String, RE, [{return, binary}, group, trim, unicode]), Components = lists:flatmap(fun parse_split/1, Splits), Components. -parse_split([Part, _PH, <<>>, Var]) -> +parse_split([Part, _PH, Var, <<>>]) -> % Regular placeholder prepend(Part, [{var, unicode:characters_to_list(Var), parse_accessor(Var)}]); -parse_split([Part, _PH = <>, <<"$">>, _]) -> - % Escaped literal, take all but the second byte, which is always `$`. - % Important to make a whole token starting with `$` so the `unparse/11` - % function can distinguish escaped literals. - prepend(Part, [<>]); +parse_split([Part, _Escape, <<>>, <<"$">>]) -> + % Escaped literal `$`. + % Use single char as token so the `unparse/1` function can distinguish escaped `$`. + prepend(Part, [$$]); parse_split([Tail]) -> [Tail]. @@ -159,8 +159,8 @@ unparse(Template) -> unparse_part({var, Name, _Accessor}) -> render_placeholder(Name); -unparse_part(Part = <<"${", _/binary>>) -> - <<"$", Part/binary>>; +unparse_part($$) -> + <<"${$}">>; unparse_part(Part) -> Part. diff --git a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl index b6784ea54..3700caa96 100644 --- a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl +++ b/apps/emqx_connector/test/emqx_connector_template_SUITE.erl @@ -115,7 +115,7 @@ t_render_missing_bindings(_) -> ). t_unparse(_) -> - TString = <<"a:${a},b:${b},c:$${c},d:{${d.d1}}">>, + TString = <<"a:${a},b:${b},c:$${c},d:{${d.d1}},e:${$}{e},lit:${$}{$}">>, Template = emqx_connector_template:parse(TString), ?assertEqual( TString, @@ -129,12 +129,14 @@ t_const(_) -> ), ?assertEqual( false, - emqx_connector_template:is_const(emqx_connector_template:parse(<<"a:${a},b:${b},c:$${c}">>)) + emqx_connector_template:is_const( + emqx_connector_template:parse(<<"a:${a},b:${b},c:${$}{c}">>) + ) ), ?assertEqual( true, emqx_connector_template:is_const( - emqx_connector_template:parse(<<"a:$${a},b:$${b},c:$${c}">>) + emqx_connector_template:parse(<<"a:${$}{a},b:${$}{b}">>) ) ). @@ -147,16 +149,16 @@ t_render_partial_ph(_) -> ). t_parse_escaped(_) -> - Bindings = #{a => <<"1">>, b => 1}, - Template = emqx_connector_template:parse(<<"a:${a},b:$${b}">>), + Bindings = #{a => <<"1">>, b => 1, c => "VAR"}, + Template = emqx_connector_template:parse(<<"a:${a},b:${$}{b},c:${$}{${c}},lit:${$}{$}">>), ?assertEqual( - <<"a:1,b:${b}">>, + <<"a:1,b:${b},c:${VAR},lit:${$}">>, render_strict_string(Template, Bindings) ). t_parse_escaped_dquote(_) -> Bindings = #{a => <<"1">>, b => 1}, - Template = emqx_connector_template:parse(<<"a:\"${a}\",b:\"$${b}\"">>, #{ + Template = emqx_connector_template:parse(<<"a:\"${a}\",b:\"${$}{b}\"">>, #{ strip_double_quote => true }), ?assertEqual( @@ -299,7 +301,7 @@ t_render_tmpl_deep(_) -> Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, Template = emqx_connector_template:parse_deep( - #{<<"${a}">> => [<<"${b}">>, "c", 2, 3.0, '${d}', {[<<"${c}">>, <<"$${d}">>], 0}]} + #{<<"${a}">> => [<<"$${b}">>, "c", 2, 3.0, '${d}', {[<<"${c}">>, <<"${$}{d}">>], 0}]} ), ?assertEqual( @@ -308,12 +310,12 @@ t_render_tmpl_deep(_) -> ), ?assertEqual( - #{<<"1">> => [<<"1">>, "c", 2, 3.0, '${d}', {[<<"1.0">>, <<"${d}">>], 0}]}, + #{<<"1">> => [<<"$1">>, "c", 2, 3.0, '${d}', {[<<"1.0">>, <<"${d}">>], 0}]}, emqx_connector_template:render_strict(Template, Bindings) ). t_unparse_tmpl_deep(_) -> - Term = #{<<"${a}">> => [<<"$${b}">>, "c", 2, 3.0, '${d}', {[<<"${c}">>], 0}]}, + Term = #{<<"${a}">> => [<<"$${b}">>, "c", 2, 3.0, '${d}', {[<<"${c}">>], <<"${$}{d}">>, 0}]}, Template = emqx_connector_template:parse_deep(Term), ?assertEqual(Term, emqx_connector_template:unparse(Template)). From 8e4585d64fab2c72b62e77351f1a0d074f580d30 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Fri, 14 Jul 2023 18:40:11 +0200 Subject: [PATCH 061/155] chore: move template modules to `emqx_utils` Even though most of the time these modules will be used by connectors, there are exceptions (namely, `emqx_rule_engine`). Besides, they are general enough to land there, more so given that `emqx_placeholder` is already there. --- .../src/emqx_authn/emqx_authn_utils.erl | 26 +-- .../src/emqx_authz/emqx_authz_rule.erl | 4 +- .../src/emqx_authz/emqx_authz_utils.erl | 26 +-- .../test/emqx_authz/emqx_authz_rule_SUITE.erl | 4 +- .../src/emqx_bridge_http_connector.erl | 4 +- .../test/emqx_bridge_http_connector_tests.erl | 2 +- .../emqx_connector/src/emqx_connector_sql.erl | 159 ------------------ apps/emqx_mysql/src/emqx_mysql.erl | 14 +- apps/emqx_postgresql/src/emqx_postgresql.erl | 6 +- apps/emqx_prometheus/src/emqx_prometheus.erl | 4 +- .../src/emqx_rule_actions.erl | 20 +-- .../src/emqx_template.erl} | 6 +- .../src/emqx_template_sql.erl} | 36 ++-- apps/emqx_utils/src/emqx_utils_sql.erl | 10 +- .../test/emqx_template_SUITE.erl} | 96 +++++------ 15 files changed, 130 insertions(+), 287 deletions(-) delete mode 100644 apps/emqx_connector/src/emqx_connector_sql.erl rename apps/{emqx_connector/src/emqx_connector_template.erl => emqx_utils/src/emqx_template.erl} (99%) rename apps/{emqx_connector/src/emqx_connector_template_sql.erl => emqx_utils/src/emqx_template_sql.erl} (77%) rename apps/{emqx_connector/test/emqx_connector_template_SUITE.erl => emqx_utils/test/emqx_template_SUITE.erl} (69%) diff --git a/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl b/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl index 0a938eafb..f782e0e6c 100644 --- a/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl +++ b/apps/emqx_auth/src/emqx_authn/emqx_authn_utils.erl @@ -108,22 +108,22 @@ check_password_from_selected_map(Algorithm, Selected, Password) -> end. parse_deep(Template) -> - Result = emqx_connector_template:parse_deep(Template), + Result = emqx_template:parse_deep(Template), handle_disallowed_placeholders(Result, {deep, Template}). parse_str(Template) -> - Result = emqx_connector_template:parse(Template), + Result = emqx_template:parse(Template), handle_disallowed_placeholders(Result, {string, Template}). parse_sql(Template, ReplaceWith) -> - {Statement, Result} = emqx_connector_template_sql:parse_prepstmt( + {Statement, Result} = emqx_template_sql:parse_prepstmt( Template, #{parameters => ReplaceWith, strip_double_quote => true} ), {Statement, handle_disallowed_placeholders(Result, {string, Template})}. handle_disallowed_placeholders(Template, Source) -> - case emqx_connector_template:validate(?ALLOWED_VARS, Template) of + case emqx_template:validate(?ALLOWED_VARS, Template) of ok -> Template; {error, Disallowed} -> @@ -139,14 +139,14 @@ handle_disallowed_placeholders(Template, Source) -> Result = prerender_disallowed_placeholders(Template), case Source of {string, _} -> - emqx_connector_template:parse(Result); + emqx_template:parse(Result); {deep, _} -> - emqx_connector_template:parse_deep(Result) + emqx_template:parse_deep(Result) end end. prerender_disallowed_placeholders(Template) -> - {Result, _} = emqx_connector_template:render(Template, #{}, #{ + {Result, _} = emqx_template:render(Template, #{}, #{ var_trans => fun(Name, _) -> % NOTE % Rendering disallowed placeholders in escaped form, which will then @@ -162,7 +162,7 @@ prerender_disallowed_placeholders(Template) -> render_deep(Template, Credential) -> % NOTE % Ignoring errors here, undefined bindings will be replaced with empty string. - {Term, _Errors} = emqx_connector_template:render( + {Term, _Errors} = emqx_template:render( Template, mapping_credential(Credential), #{var_trans => fun to_string/2} @@ -172,7 +172,7 @@ render_deep(Template, Credential) -> render_str(Template, Credential) -> % NOTE % Ignoring errors here, undefined bindings will be replaced with empty string. - {String, _Errors} = emqx_connector_template:render( + {String, _Errors} = emqx_template:render( Template, mapping_credential(Credential), #{var_trans => fun to_string/2} @@ -182,7 +182,7 @@ render_str(Template, Credential) -> render_urlencoded_str(Template, Credential) -> % NOTE % Ignoring errors here, undefined bindings will be replaced with empty string. - {String, _Errors} = emqx_connector_template:render( + {String, _Errors} = emqx_template:render( Template, mapping_credential(Credential), #{var_trans => fun to_urlencoded_string/2} @@ -192,7 +192,7 @@ render_urlencoded_str(Template, Credential) -> render_sql_params(ParamList, Credential) -> % NOTE % Ignoring errors here, undefined bindings will be replaced with empty string. - {Row, _Errors} = emqx_connector_template:render( + {Row, _Errors} = emqx_template:render( ParamList, mapping_credential(Credential), #{var_trans => fun to_sql_valaue/2} @@ -322,10 +322,10 @@ to_urlencoded_string(Name, Value) -> emqx_http_lib:uri_encode(to_string(Name, Value)). to_string(Name, Value) -> - emqx_connector_template:to_string(render_var(Name, Value)). + emqx_template:to_string(render_var(Name, Value)). to_sql_valaue(Name, Value) -> - emqx_connector_sql:to_sql_value(render_var(Name, Value)). + emqx_utils_sql:to_sql_value(render_var(Name, Value)). render_var(_, undefined) -> % NOTE diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl index 160f9cac4..ad6dec56b 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_rule.erl @@ -184,7 +184,7 @@ compile_topic({eq, Topic}) -> {eq, emqx_topic:words(bin(Topic))}; compile_topic(Topic) -> Template = emqx_authz_utils:parse_str(Topic, [?VAR_USERNAME, ?VAR_CLIENTID]), - case emqx_connector_template:is_const(Template) of + case emqx_template:is_const(Template) of true -> emqx_topic:words(bin(Topic)); false -> {pattern, Template} end. @@ -302,7 +302,7 @@ match_who(_, _) -> match_topics(_ClientInfo, _Topic, []) -> false; match_topics(ClientInfo, Topic, [{pattern, PatternFilter} | Filters]) -> - TopicFilter = bin(emqx_connector_template:render_strict(PatternFilter, ClientInfo)), + TopicFilter = bin(emqx_template:render_strict(PatternFilter, ClientInfo)), match_topic(emqx_topic:words(Topic), emqx_topic:words(TopicFilter)) orelse match_topics(ClientInfo, Topic, Filters); match_topics(ClientInfo, Topic, [TopicFilter | Filters]) -> diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl index 444955504..a17a563ae 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_utils.erl @@ -110,15 +110,15 @@ update_config(Path, ConfigRequest) -> }). parse_deep(Template, PlaceHolders) -> - Result = emqx_connector_template:parse_deep(Template), + Result = emqx_template:parse_deep(Template), handle_disallowed_placeholders(Result, {deep, Template}, PlaceHolders). parse_str(Template, PlaceHolders) -> - Result = emqx_connector_template:parse(Template), + Result = emqx_template:parse(Template), handle_disallowed_placeholders(Result, {string, Template}, PlaceHolders). parse_sql(Template, ReplaceWith, PlaceHolders) -> - {Statement, Result} = emqx_connector_template_sql:parse_prepstmt( + {Statement, Result} = emqx_template_sql:parse_prepstmt( Template, #{parameters => ReplaceWith, strip_double_quote => true} ), @@ -126,7 +126,7 @@ parse_sql(Template, ReplaceWith, PlaceHolders) -> {Statement, FResult}. handle_disallowed_placeholders(Template, Source, Allowed) -> - case emqx_connector_template:validate(Allowed, Template) of + case emqx_template:validate(Allowed, Template) of ok -> Template; {error, Disallowed} -> @@ -142,14 +142,14 @@ handle_disallowed_placeholders(Template, Source, Allowed) -> Result = prerender_disallowed_placeholders(Template, Allowed), case Source of {string, _} -> - emqx_connector_template:parse(Result); + emqx_template:parse(Result); {deep, _} -> - emqx_connector_template:parse_deep(Result) + emqx_template:parse_deep(Result) end end. prerender_disallowed_placeholders(Template, Allowed) -> - {Result, _} = emqx_connector_template:render(Template, #{}, #{ + {Result, _} = emqx_template:render(Template, #{}, #{ var_trans => fun(Name, _) -> % NOTE % Rendering disallowed placeholders in escaped form, which will then @@ -165,7 +165,7 @@ prerender_disallowed_placeholders(Template, Allowed) -> render_deep(Template, Values) -> % NOTE % Ignoring errors here, undefined bindings will be replaced with empty string. - {Term, _Errors} = emqx_connector_template:render( + {Term, _Errors} = emqx_template:render( Template, client_vars(Values), #{var_trans => fun to_string/2} @@ -175,7 +175,7 @@ render_deep(Template, Values) -> render_str(Template, Values) -> % NOTE % Ignoring errors here, undefined bindings will be replaced with empty string. - {String, _Errors} = emqx_connector_template:render( + {String, _Errors} = emqx_template:render( Template, client_vars(Values), #{var_trans => fun to_string/2} @@ -185,7 +185,7 @@ render_str(Template, Values) -> render_urlencoded_str(Template, Values) -> % NOTE % Ignoring errors here, undefined bindings will be replaced with empty string. - {String, _Errors} = emqx_connector_template:render( + {String, _Errors} = emqx_template:render( Template, client_vars(Values), #{var_trans => fun to_urlencoded_string/2} @@ -195,7 +195,7 @@ render_urlencoded_str(Template, Values) -> render_sql_params(ParamList, Values) -> % NOTE % Ignoring errors here, undefined bindings will be replaced with empty string. - {Row, _Errors} = emqx_connector_template:render( + {Row, _Errors} = emqx_template:render( ParamList, client_vars(Values), #{var_trans => fun to_sql_value/2} @@ -270,10 +270,10 @@ to_urlencoded_string(Name, Value) -> emqx_http_lib:uri_encode(to_string(Name, Value)). to_string(Name, Value) -> - emqx_connector_template:to_string(render_var(Name, Value)). + emqx_template:to_string(render_var(Name, Value)). to_sql_value(Name, Value) -> - emqx_connector_sql:to_sql_value(render_var(Name, Value)). + emqx_utils_sql:to_sql_value(render_var(Name, Value)). render_var(_, undefined) -> % NOTE diff --git a/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl b/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl index 5031daff6..d81a93038 100644 --- a/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl +++ b/apps/emqx_auth/test/emqx_authz/emqx_authz_rule_SUITE.erl @@ -69,8 +69,8 @@ set_special_configs(_App) -> t_compile(_) -> % NOTE % Some of the following testcase are relying on the internal representation of - % `emqx_connector_template:t()`. If the internal representation is changed, these - % testcases may fail. + % `emqx_template:t()`. If the internal representation is changed, these testcases + % may fail. ?assertEqual({deny, all, all, [['#']]}, emqx_authz_rule:compile({deny, all})), ?assertEqual( diff --git a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl index 869f081fb..88f55af52 100644 --- a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl +++ b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl @@ -535,7 +535,7 @@ maybe_parse_template(Key, Conf) -> end. parse_template(String) -> - emqx_connector_template:parse(String). + emqx_template:parse(String). process_request( #{ @@ -573,7 +573,7 @@ render_headers(HeaderTks, Msg) -> render_template(Template, Msg) -> % NOTE: ignoring errors here, missing variables will be rendered as `"undefined"`. - {String, _Errors} = emqx_connector_template:render(Template, Msg), + {String, _Errors} = emqx_template:render(Template, Msg), String. render_template_string(Template, Msg) -> diff --git a/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl b/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl index 1de210260..4f5e2929c 100644 --- a/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl +++ b/apps/emqx_bridge_http/test/emqx_bridge_http_connector_tests.erl @@ -84,7 +84,7 @@ is_wrapped(_Other) -> false. untmpl(Tpl) -> - iolist_to_binary(emqx_connector_template:render_strict(Tpl, #{})). + iolist_to_binary(emqx_template:render_strict(Tpl, #{})). is_unwrapped_headers(Headers) -> lists:all(fun is_unwrapped_header/1, Headers). diff --git a/apps/emqx_connector/src/emqx_connector_sql.erl b/apps/emqx_connector/src/emqx_connector_sql.erl deleted file mode 100644 index be0b220e6..000000000 --- a/apps/emqx_connector/src/emqx_connector_sql.erl +++ /dev/null @@ -1,159 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- - --module(emqx_connector_sql). - --export([get_statement_type/1]). --export([parse_insert/1]). - --export([to_sql_value/1]). --export([to_sql_string/2]). - --export([escape_sql/1]). --export([escape_cql/1]). --export([escape_mysql/1]). - --export_type([value/0]). - --type statement_type() :: select | insert | delete. --type value() :: null | binary() | number() | boolean() | [value()]. - --dialyzer({no_improper_lists, [escape_mysql/4, escape_prepend/4]}). - --spec get_statement_type(iodata()) -> statement_type() | {error, unknown}. -get_statement_type(Query) -> - KnownTypes = #{ - <<"select">> => select, - <<"insert">> => insert, - <<"delete">> => delete - }, - case re:run(Query, <<"^\\s*([a-zA-Z]+)">>, [{capture, all_but_first, binary}]) of - {match, [Token]} -> - maps:get(string:lowercase(Token), KnownTypes, {error, unknown}); - _ -> - {error, unknown} - end. - -%% @doc Parse an INSERT SQL statement into its INSERT part and the VALUES part. -%% SQL = <<"INSERT INTO \"abc\" (c1, c2, c3) VALUES (${a}, ${b}, ${c.prop})">> -%% {ok, {<<"INSERT INTO \"abc\" (c1, c2, c3)">>, <<"(${a}, ${b}, ${c.prop})">>}} --spec parse_insert(iodata()) -> - {ok, {_Statement :: binary(), _Rows :: binary()}} | {error, not_insert_sql}. -parse_insert(SQL) -> - case re:split(SQL, "((?i)values)", [{return, binary}]) of - [Part1, _, Part3] -> - case string:trim(Part1, leading) of - <<"insert", _/binary>> = InsertSQL -> - {ok, {InsertSQL, Part3}}; - <<"INSERT", _/binary>> = InsertSQL -> - {ok, {InsertSQL, Part3}}; - _ -> - {error, not_insert_sql} - end; - _ -> - {error, not_insert_sql} - end. - -%% @doc Convert an Erlang term to a value that can be used primarily in -%% prepared SQL statements. --spec to_sql_value(term()) -> value(). -to_sql_value(undefined) -> null; -to_sql_value(List) when is_list(List) -> List; -to_sql_value(Bin) when is_binary(Bin) -> Bin; -to_sql_value(Num) when is_number(Num) -> Num; -to_sql_value(Bool) when is_boolean(Bool) -> Bool; -to_sql_value(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8); -to_sql_value(Map) when is_map(Map) -> emqx_utils_json:encode(Map). - -%% @doc Convert an Erlang term to a string that can be interpolated in literal -%% SQL statements. The value is escaped if necessary. --spec to_sql_string(term(), Options) -> iodata() when - Options :: #{ - escaping => cql | mysql | sql - }. -to_sql_string(String, #{escaping := mysql}) when is_binary(String) -> - try - escape_mysql(String) - catch - throw:invalid_utf8 -> - [<<"0x">>, binary:encode_hex(String)] - end; -to_sql_string(Term, #{escaping := mysql}) -> - maybe_escape(Term, fun escape_mysql/1); -to_sql_string(Term, #{escaping := cql}) -> - maybe_escape(Term, fun escape_cql/1); -to_sql_string(Term, #{}) -> - maybe_escape(Term, fun escape_sql/1). - --spec maybe_escape(_Value, fun((binary()) -> iodata())) -> iodata(). -maybe_escape(undefined, _EscapeFun) -> - <<"NULL">>; -maybe_escape(Str, EscapeFun) when is_binary(Str) -> - EscapeFun(Str); -maybe_escape(Str, EscapeFun) when is_list(Str) -> - case unicode:characters_to_binary(Str) of - Bin when is_binary(Bin) -> - EscapeFun(Bin); - Otherwise -> - error(Otherwise) - end; -maybe_escape(Val, EscapeFun) when is_atom(Val) orelse is_map(Val) -> - EscapeFun(emqx_connector_template:to_string(Val)); -maybe_escape(Val, _EscapeFun) -> - emqx_connector_template:to_string(Val). - --spec escape_sql(binary()) -> iodata(). -escape_sql(S) -> - % NOTE - % This is a bit misleading: currently, escaping logic in `escape_sql/1` likely - % won't work with pgsql since it does not support C-style escapes by default. - % https://www.postgresql.org/docs/14/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS - ES = binary:replace(S, [<<"\\">>, <<"'">>], <<"\\">>, [global, {insert_replaced, 1}]), - [$', ES, $']. - --spec escape_cql(binary()) -> iodata(). -escape_cql(S) -> - ES = binary:replace(S, <<"'">>, <<"'">>, [global, {insert_replaced, 1}]), - [$', ES, $']. - --spec escape_mysql(binary()) -> iodata(). -escape_mysql(S0) -> - % https://dev.mysql.com/doc/refman/8.0/en/string-literals.html - [$', escape_mysql(S0, 0, 0, S0), $']. - -%% NOTE -%% This thing looks more complicated than needed because it's optimized for as few -%% intermediate memory (re)allocations as possible. -escape_mysql(<<$', Rest/binary>>, I, Run, Src) -> - escape_prepend(I, Run, Src, [<<"\\'">> | escape_mysql(Rest, I + Run + 1, 0, Src)]); -escape_mysql(<<$\\, Rest/binary>>, I, Run, Src) -> - escape_prepend(I, Run, Src, [<<"\\\\">> | escape_mysql(Rest, I + Run + 1, 0, Src)]); -escape_mysql(<<0, Rest/binary>>, I, Run, Src) -> - escape_prepend(I, Run, Src, [<<"\\0">> | escape_mysql(Rest, I + Run + 1, 0, Src)]); -escape_mysql(<<_/utf8, Rest/binary>> = S, I, Run, Src) -> - CWidth = byte_size(S) - byte_size(Rest), - escape_mysql(Rest, I, Run + CWidth, Src); -escape_mysql(<<>>, 0, _, Src) -> - Src; -escape_mysql(<<>>, I, Run, Src) -> - binary:part(Src, I, Run); -escape_mysql(_, _I, _Run, _Src) -> - throw(invalid_utf8). - -escape_prepend(_RunI, 0, _Src, Tail) -> - Tail; -escape_prepend(I, Run, Src, Tail) -> - [binary:part(Src, I, Run) | Tail]. diff --git a/apps/emqx_mysql/src/emqx_mysql.erl b/apps/emqx_mysql/src/emqx_mysql.erl index 927c9d067..e052b9b89 100644 --- a/apps/emqx_mysql/src/emqx_mysql.erl +++ b/apps/emqx_mysql/src/emqx_mysql.erl @@ -46,7 +46,7 @@ default_port => ?MYSQL_DEFAULT_PORT }). --type template() :: {unicode:chardata(), emqx_connector_template:str()}. +-type template() :: {unicode:chardata(), emqx_template:str()}. -type state() :: #{ pool_name := binary(), @@ -387,16 +387,16 @@ parse_prepare_sql(Config) -> #{query_templates => Templates}. parse_prepare_sql(Key, Query, Acc) -> - Template = emqx_connector_template_sql:parse_prepstmt(Query, #{parameters => '?'}), + Template = emqx_template_sql:parse_prepstmt(Query, #{parameters => '?'}), AccNext = Acc#{{Key, prepstmt} => Template}, parse_batch_sql(Key, Query, AccNext). parse_batch_sql(Key, Query, Acc) -> - case emqx_connector_sql:get_statement_type(Query) of + case emqx_utils_sql:get_statement_type(Query) of insert -> - case emqx_connector_sql:parse_insert(Query) of + case emqx_utils_sql:parse_insert(Query) of {ok, {Insert, Params}} -> - RowTemplate = emqx_connector_template_sql:parse(Params), + RowTemplate = emqx_template_sql:parse(Params), Acc#{{Key, batch} => {Insert, RowTemplate}}; {error, Reason} -> ?SLOG(error, #{ @@ -427,7 +427,7 @@ proc_sql_params(TypeOrKey, SQLOrData, Params, #{query_templates := Templates}) - {SQLOrData, Params}; {_InsertPart, RowTemplate} -> % NOTE: ignoring errors here, missing variables are set to `null`. - {Row, _Errors} = emqx_connector_template_sql:render_prepstmt(RowTemplate, SQLOrData), + {Row, _Errors} = emqx_template_sql:render_prepstmt(RowTemplate, SQLOrData), {TypeOrKey, Row} end. @@ -438,7 +438,7 @@ on_batch_insert(InstId, BatchReqs, {InsertPart, RowTemplate}, State) -> render_row(RowTemplate, Data) -> % NOTE: ignoring errors here, missing variables are set to "NULL". - {Row, _Errors} = emqx_connector_template_sql:render(RowTemplate, Data, #{escaping => mysql}), + {Row, _Errors} = emqx_template_sql:render(RowTemplate, Data, #{escaping => mysql}), Row. on_sql_query( diff --git a/apps/emqx_postgresql/src/emqx_postgresql.erl b/apps/emqx_postgresql/src/emqx_postgresql.erl index 71ba93b9b..3f7b43c79 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.erl +++ b/apps/emqx_postgresql/src/emqx_postgresql.erl @@ -52,7 +52,7 @@ default_port => ?PGSQL_DEFAULT_PORT }). --type template() :: {unicode:chardata(), emqx_connector_template_sql:row_template()}. +-type template() :: {unicode:chardata(), emqx_template_sql:row_template()}. -type state() :: #{ pool_name := binary(), @@ -428,12 +428,12 @@ parse_prepare_sql(Config) -> #{query_templates => Templates}. parse_prepare_sql(Key, Query, Acc) -> - Template = emqx_connector_template_sql:parse_prepstmt(Query, #{parameters => '$n'}), + Template = emqx_template_sql:parse_prepstmt(Query, #{parameters => '$n'}), Acc#{Key => Template}. render_prepare_sql_row(RowTemplate, Data) -> % NOTE: ignoring errors here, missing variables will be replaced with `null`. - {Row, _Errors} = emqx_connector_template_sql:render_prepstmt(RowTemplate, Data), + {Row, _Errors} = emqx_template_sql:render_prepstmt(RowTemplate, Data), Row. init_prepare(State = #{query_templates := Templates}) when map_size(Templates) == 0 -> diff --git a/apps/emqx_prometheus/src/emqx_prometheus.erl b/apps/emqx_prometheus/src/emqx_prometheus.erl index 41dec9ee9..a242931c4 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus.erl @@ -114,8 +114,8 @@ handle_info(_Msg, State) -> push_to_push_gateway(Uri, Headers, JobName) when is_list(Headers) -> [Name, Ip] = string:tokens(atom_to_list(node()), "@"), % NOTE: allowing errors here to keep rough backward compatibility - {JobName1, Errors} = emqx_connector_template:render( - emqx_connector_template:parse(JobName), + {JobName1, Errors} = emqx_template:render( + emqx_template:parse(JobName), #{<<"name">> => Name, <<"host">> => Ip} ), _ = diff --git a/apps/emqx_rule_engine/src/emqx_rule_actions.erl b/apps/emqx_rule_engine/src/emqx_rule_actions.erl index 7473572c8..7a8b2520c 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_actions.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_actions.erl @@ -71,7 +71,7 @@ pre_process_action_args( ) -> Args#{ preprocessed_tmpl => #{ - topic => emqx_connector_template:parse(Topic), + topic => emqx_template:parse(Topic), qos => parse_vars(QoS), retain => parse_vars(Retain), payload => parse_payload(Payload), @@ -119,8 +119,8 @@ republish( } ) -> % NOTE: rendering missing bindings as string "undefined" - {TopicString, _Errors1} = emqx_connector_template:render(TopicTemplate, Selected), - {PayloadString, _Errors2} = emqx_connector_template:render(PayloadTemplate, Selected), + {TopicString, _Errors1} = emqx_template:render(TopicTemplate, Selected), + {PayloadString, _Errors2} = emqx_template:render(PayloadTemplate, Selected), Topic = iolist_to_binary(TopicString), Payload = iolist_to_binary(PayloadString), QoS = render_simple_var(QoSTemplate, Selected, 0), @@ -202,13 +202,13 @@ safe_publish(RuleId, Topic, QoS, Flags, Payload, PubProps) -> emqx_metrics:inc_msg(Msg). parse_vars(Data) when is_binary(Data) -> - emqx_connector_template:parse(Data); + emqx_template:parse(Data); parse_vars(Data) -> {const, Data}. parse_mqtt_properties(MQTTPropertiesTemplate) -> maps:map( - fun(_Key, V) -> emqx_connector_template:parse(V) end, + fun(_Key, V) -> emqx_template:parse(V) end, MQTTPropertiesTemplate ). @@ -220,13 +220,13 @@ parse_user_properties(<<"${pub_props.'User-Property'}">>) -> ?ORIGINAL_USER_PROPERTIES; parse_user_properties(<<"${", _/binary>> = V) -> %% use a variable - emqx_connector_template:parse(V); + emqx_template:parse(V); parse_user_properties(_) -> %% invalid, discard undefined. render_simple_var([{var, _Name, Accessor}], Data, Default) -> - case emqx_connector_template:lookup_var(Accessor, Data) of + case emqx_template:lookup_var(Accessor, Data) of {ok, Var} -> Var; %% cannot find the variable from Data {error, _} -> Default @@ -236,8 +236,8 @@ render_simple_var({const, Val}, _Data, _Default) -> parse_payload(Payload) -> case string:is_empty(Payload) of - false -> emqx_connector_template:parse(Payload); - true -> emqx_connector_template:parse("${.}") + false -> emqx_template:parse(Payload); + true -> emqx_template:parse("${.}") end. render_pub_props(UserPropertiesTemplate, Selected, Env) -> @@ -259,7 +259,7 @@ render_mqtt_properties(MQTTPropertiesTemplate, Selected, Env) -> fun(K, Template, Acc) -> try V = unicode:characters_to_binary( - emqx_connector_template:render_strict(Template, Selected) + emqx_template:render_strict(Template, Selected) ), Acc#{K => V} catch diff --git a/apps/emqx_connector/src/emqx_connector_template.erl b/apps/emqx_utils/src/emqx_template.erl similarity index 99% rename from apps/emqx_connector/src/emqx_connector_template.erl rename to apps/emqx_utils/src/emqx_template.erl index 619dbd6ec..deb25d807 100644 --- a/apps/emqx_connector/src/emqx_connector_template.erl +++ b/apps/emqx_utils/src/emqx_template.erl @@ -14,9 +14,7 @@ %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_connector_template). - --include_lib("emqx/include/emqx_placeholder.hrl"). +-module(emqx_template). -export([parse/1]). -export([parse/2]). @@ -76,6 +74,8 @@ var_trans => var_trans() }. +-define(PH_VAR_THIS, '$this'). + -define(RE_PLACEHOLDER, "\\$\\{[.]?([a-zA-Z0-9._]*)\\}"). -define(RE_ESCAPE, "\\$\\{(\\$)\\}"). diff --git a/apps/emqx_connector/src/emqx_connector_template_sql.erl b/apps/emqx_utils/src/emqx_template_sql.erl similarity index 77% rename from apps/emqx_connector/src/emqx_connector_template_sql.erl rename to apps/emqx_utils/src/emqx_template_sql.erl index 90d79415e..f215cd868 100644 --- a/apps/emqx_connector/src/emqx_connector_template_sql.erl +++ b/apps/emqx_utils/src/emqx_template_sql.erl @@ -14,7 +14,7 @@ %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_connector_template_sql). +-module(emqx_template_sql). -export([parse/1]). -export([parse/2]). @@ -27,15 +27,15 @@ -export_type([row_template/0]). --type template() :: emqx_connector_template:t(). --type row_template() :: [emqx_connector_template:placeholder()]. --type bindings() :: emqx_connector_template:bindings(). +-type template() :: emqx_template:t(). +-type row_template() :: [emqx_template:placeholder()]. +-type bindings() :: emqx_template:bindings(). --type values() :: [emqx_connector_sql:value()]. +-type values() :: [emqx_utils_sql:value()]. -type parse_opts() :: #{ parameters => '$n' | ':n' | '?', - % Inherited from `emqx_connector_template:parse_opts()` + % Inherited from `emqx_template:parse_opts()` strip_double_quote => boolean() }. @@ -57,7 +57,7 @@ parse(String) -> -spec parse(unicode:chardata(), parse_opts()) -> template(). parse(String, Opts) -> - emqx_connector_template:parse(String, Opts). + emqx_template:parse(String, Opts). %% @doc Render an SQL statement template given a set of bindings. %% Interpolation generally follows the SQL syntax, strings are escaped according to the @@ -65,8 +65,8 @@ parse(String, Opts) -> -spec render(template(), bindings(), render_opts()) -> {unicode:chardata(), [_Error]}. render(Template, Bindings, Opts) -> - emqx_connector_template:render(Template, Bindings, #{ - var_trans => fun(Value) -> emqx_connector_sql:to_sql_string(Value, Opts) end + emqx_template:render(Template, Bindings, #{ + var_trans => fun(Value) -> emqx_utils_sql:to_sql_string(Value, Opts) end }). %% @doc Render an SQL statement template given a set of bindings. @@ -74,8 +74,8 @@ render(Template, Bindings, Opts) -> -spec render_strict(template(), bindings(), render_opts()) -> unicode:chardata(). render_strict(Template, Bindings, Opts) -> - emqx_connector_template:render_strict(Template, Bindings, #{ - var_trans => fun(Value) -> emqx_connector_sql:to_sql_string(Value, Opts) end + emqx_template:render_strict(Template, Bindings, #{ + var_trans => fun(Value) -> emqx_utils_sql:to_sql_string(Value, Opts) end }). %% @doc Parse an SQL statement string into a prepared statement and a row template. @@ -83,7 +83,7 @@ render_strict(Template, Bindings, Opts) -> %% during the execution of the prepared statement. %% Example: %% ``` -%% {Statement, RowTemplate} = emqx_connector_template_sql:parse_prepstmt( +%% {Statement, RowTemplate} = emqx_template_sql:parse_prepstmt( %% "INSERT INTO table (id, name, age) VALUES (${id}, ${name}, 42)", %% #{parameters => '$n'} %% ), @@ -93,7 +93,7 @@ render_strict(Template, Bindings, Opts) -> -spec parse_prepstmt(unicode:chardata(), parse_opts()) -> {unicode:chardata(), row_template()}. parse_prepstmt(String, Opts) -> - Template = emqx_connector_template:parse(String, maps:with(?TEMPLATE_PARSE_OPTS, Opts)), + Template = emqx_template:parse(String, maps:with(?TEMPLATE_PARSE_OPTS, Opts)), Statement = mk_prepared_statement(Template, Opts), Placeholders = [Placeholder || Placeholder <- Template, element(1, Placeholder) == var], {Statement, Placeholders}. @@ -123,15 +123,15 @@ mk_replace(':n', N) -> %% @doc Render a row template into a list of SQL values. %% An _SQL value_ is a vaguely defined concept here, it is something that's considered %% compatible with the protocol of the database being used. See the definition of -%% `emqx_connector_sql:value()` for more details. +%% `emqx_utils_sql:value()` for more details. -spec render_prepstmt(template(), bindings()) -> {values(), [_Error]}. render_prepstmt(Template, Bindings) -> - Opts = #{var_trans => fun emqx_connector_sql:to_sql_value/1}, - emqx_connector_template:render(Template, Bindings, Opts). + Opts = #{var_trans => fun emqx_utils_sql:to_sql_value/1}, + emqx_template:render(Template, Bindings, Opts). -spec render_prepstmt_strict(template(), bindings()) -> values(). render_prepstmt_strict(Template, Bindings) -> - Opts = #{var_trans => fun emqx_connector_sql:to_sql_value/1}, - emqx_connector_template:render_strict(Template, Bindings, Opts). + Opts = #{var_trans => fun emqx_utils_sql:to_sql_value/1}, + emqx_template:render_strict(Template, Bindings, Opts). diff --git a/apps/emqx_utils/src/emqx_utils_sql.erl b/apps/emqx_utils/src/emqx_utils_sql.erl index 3caed6b62..12aac6464 100644 --- a/apps/emqx_utils/src/emqx_utils_sql.erl +++ b/apps/emqx_utils/src/emqx_utils_sql.erl @@ -80,7 +80,7 @@ to_sql_value(Map) when is_map(Map) -> emqx_utils_json:encode(Map). %% @doc Convert an Erlang term to a string that can be interpolated in literal %% SQL statements. The value is escaped if necessary. --spec to_sql_string(term(), Options) -> iodata() when +-spec to_sql_string(term(), Options) -> unicode:chardata() when Options :: #{ escaping => cql | mysql | sql }. @@ -98,7 +98,9 @@ to_sql_string(Term, #{escaping := cql}) -> to_sql_string(Term, #{}) -> maybe_escape(Term, fun escape_sql/1). --spec maybe_escape(_Value, fun((binary()) -> iodata())) -> iodata(). +-spec maybe_escape(_Value, fun((binary()) -> iodata())) -> unicode:chardata(). +maybe_escape(undefined, _EscapeFun) -> + <<"NULL">>; maybe_escape(Str, EscapeFun) when is_binary(Str) -> EscapeFun(Str); maybe_escape(Str, EscapeFun) when is_list(Str) -> @@ -109,9 +111,9 @@ maybe_escape(Str, EscapeFun) when is_list(Str) -> error(Otherwise) end; maybe_escape(Val, EscapeFun) when is_atom(Val) orelse is_map(Val) -> - EscapeFun(emqx_utils_conv:bin(Val)); + EscapeFun(emqx_template:to_string(Val)); maybe_escape(Val, _EscapeFun) -> - emqx_utils_conv:bin(Val). + emqx_template:to_string(Val). -spec escape_sql(binary()) -> iodata(). escape_sql(S) -> diff --git a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl b/apps/emqx_utils/test/emqx_template_SUITE.erl similarity index 69% rename from apps/emqx_connector/test/emqx_connector_template_SUITE.erl rename to apps/emqx_utils/test/emqx_template_SUITE.erl index 3700caa96..657c3c94f 100644 --- a/apps/emqx_connector/test/emqx_connector_template_SUITE.erl +++ b/apps/emqx_utils/test/emqx_template_SUITE.erl @@ -14,7 +14,7 @@ %% limitations under the License. %%-------------------------------------------------------------------- --module(emqx_connector_template_SUITE). +-module(emqx_template_SUITE). -compile(export_all). -compile(nowarn_export_all). @@ -33,7 +33,7 @@ t_render(_) -> l => [0, 1, 1000], u => "utf-8 is ǝɹǝɥ" }, - Template = emqx_connector_template:parse( + Template = emqx_template:parse( <<"a:${a},b:${b},c:${c},d:${d},d1:${d.d1},l:${l},u:${u}">> ), ?assertEqual( @@ -43,8 +43,8 @@ t_render(_) -> t_render_var_trans(_) -> Bindings = #{a => <<"1">>, b => 1, c => #{prop => 1.0}}, - Template = emqx_connector_template:parse(<<"a:${a},b:${b},c:${c.prop}">>), - {String, Errors} = emqx_connector_template:render( + Template = emqx_template:parse(<<"a:${a},b:${b},c:${c.prop}">>), + {String, Errors} = emqx_template:render( Template, Bindings, #{var_trans => fun(Name, _) -> "<" ++ Name ++ ">" end} @@ -56,10 +56,10 @@ t_render_var_trans(_) -> t_render_path(_) -> Bindings = #{d => #{d1 => <<"hi">>}}, - Template = emqx_connector_template:parse(<<"d.d1:${d.d1}">>), + Template = emqx_template:parse(<<"d.d1:${d.d1}">>), ?assertEqual( ok, - emqx_connector_template:validate(["d.d1"], Template) + emqx_template:validate(["d.d1"], Template) ), ?assertEqual( {<<"d.d1:hi">>, []}, @@ -68,10 +68,10 @@ t_render_path(_) -> t_render_custom_ph(_) -> Bindings = #{a => <<"a">>, b => <<"b">>}, - Template = emqx_connector_template:parse(<<"a:${a},b:${b}">>), + Template = emqx_template:parse(<<"a:${a},b:${b}">>), ?assertEqual( {error, [{"b", disallowed}]}, - emqx_connector_template:validate(["a"], Template) + emqx_template:validate(["a"], Template) ), ?assertEqual( <<"a:a,b:b">>, @@ -80,8 +80,8 @@ t_render_custom_ph(_) -> t_render_this(_) -> Bindings = #{a => <<"a">>, b => [1, 2, 3]}, - Template = emqx_connector_template:parse(<<"this:${} / also:${.}">>), - ?assertEqual(ok, emqx_connector_template:validate(["."], Template)), + Template = emqx_template:parse(<<"this:${} / also:${.}">>), + ?assertEqual(ok, emqx_template:validate(["."], Template)), ?assertEqual( % NOTE: order of the keys in the JSON object depends on the JSON encoder <<"this:{\"b\":[1,2,3],\"a\":\"a\"} / also:{\"b\":[1,2,3],\"a\":\"a\"}">>, @@ -90,7 +90,7 @@ t_render_this(_) -> t_render_missing_bindings(_) -> Bindings = #{no => #{}}, - Template = emqx_connector_template:parse( + Template = emqx_template:parse( <<"a:${a},b:${b},c:${c},d:${d.d1},e:${no.such_atom_i_swear}">> ), ?assertEqual( @@ -116,33 +116,33 @@ t_render_missing_bindings(_) -> t_unparse(_) -> TString = <<"a:${a},b:${b},c:$${c},d:{${d.d1}},e:${$}{e},lit:${$}{$}">>, - Template = emqx_connector_template:parse(TString), + Template = emqx_template:parse(TString), ?assertEqual( TString, - unicode:characters_to_binary(emqx_connector_template:unparse(Template)) + unicode:characters_to_binary(emqx_template:unparse(Template)) ). t_const(_) -> ?assertEqual( true, - emqx_connector_template:is_const(emqx_connector_template:parse(<<"">>)) + emqx_template:is_const(emqx_template:parse(<<"">>)) ), ?assertEqual( false, - emqx_connector_template:is_const( - emqx_connector_template:parse(<<"a:${a},b:${b},c:${$}{c}">>) + emqx_template:is_const( + emqx_template:parse(<<"a:${a},b:${b},c:${$}{c}">>) ) ), ?assertEqual( true, - emqx_connector_template:is_const( - emqx_connector_template:parse(<<"a:${$}{a},b:${$}{b}">>) + emqx_template:is_const( + emqx_template:parse(<<"a:${$}{a},b:${$}{b}">>) ) ). t_render_partial_ph(_) -> Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, - Template = emqx_connector_template:parse(<<"a:$a,b:b},c:{c},d:${d">>), + Template = emqx_template:parse(<<"a:$a,b:b},c:{c},d:${d">>), ?assertEqual( <<"a:$a,b:b},c:{c},d:${d">>, render_strict_string(Template, Bindings) @@ -150,7 +150,7 @@ t_render_partial_ph(_) -> t_parse_escaped(_) -> Bindings = #{a => <<"1">>, b => 1, c => "VAR"}, - Template = emqx_connector_template:parse(<<"a:${a},b:${$}{b},c:${$}{${c}},lit:${$}{$}">>), + Template = emqx_template:parse(<<"a:${a},b:${$}{b},c:${$}{${c}},lit:${$}{$}">>), ?assertEqual( <<"a:1,b:${b},c:${VAR},lit:${$}">>, render_strict_string(Template, Bindings) @@ -158,7 +158,7 @@ t_parse_escaped(_) -> t_parse_escaped_dquote(_) -> Bindings = #{a => <<"1">>, b => 1}, - Template = emqx_connector_template:parse(<<"a:\"${a}\",b:\"${$}{b}\"">>, #{ + Template = emqx_template:parse(<<"a:\"${a}\",b:\"${$}{b}\"">>, #{ strip_double_quote => true }), ?assertEqual( @@ -169,30 +169,30 @@ t_parse_escaped_dquote(_) -> t_parse_sql_prepstmt(_) -> Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, {PrepareStatement, RowTemplate} = - emqx_connector_template_sql:parse_prepstmt(<<"a:${a},b:${b},c:${c},d:${d}">>, #{ + emqx_template_sql:parse_prepstmt(<<"a:${a},b:${b},c:${c},d:${d}">>, #{ parameters => '?' }), ?assertEqual(<<"a:?,b:?,c:?,d:?">>, bin(PrepareStatement)), ?assertEqual( {[<<"1">>, 1, 1.0, <<"{\"d1\":\"hi\"}">>], _Errors = []}, - emqx_connector_template_sql:render_prepstmt(RowTemplate, Bindings) + emqx_template_sql:render_prepstmt(RowTemplate, Bindings) ). t_parse_sql_prepstmt_n(_) -> Bindings = #{a => undefined, b => true, c => atom, d => #{d1 => 42.1337}}, {PrepareStatement, RowTemplate} = - emqx_connector_template_sql:parse_prepstmt(<<"a:${a},b:${b},c:${c},d:${d}">>, #{ + emqx_template_sql:parse_prepstmt(<<"a:${a},b:${b},c:${c},d:${d}">>, #{ parameters => '$n' }), ?assertEqual(<<"a:$1,b:$2,c:$3,d:$4">>, bin(PrepareStatement)), ?assertEqual( [null, true, <<"atom">>, <<"{\"d1\":42.1337}">>], - emqx_connector_template_sql:render_prepstmt_strict(RowTemplate, Bindings) + emqx_template_sql:render_prepstmt_strict(RowTemplate, Bindings) ). t_parse_sql_prepstmt_colon(_) -> {PrepareStatement, _RowTemplate} = - emqx_connector_template_sql:parse_prepstmt(<<"a=${a},b=${b},c=${c},d=${d}">>, #{ + emqx_template_sql:parse_prepstmt(<<"a=${a},b=${b},c=${c},d=${d}">>, #{ parameters => ':n' }), ?assertEqual(<<"a=:1,b=:2,c=:3,d=:4">>, bin(PrepareStatement)). @@ -200,9 +200,9 @@ t_parse_sql_prepstmt_colon(_) -> t_parse_sql_prepstmt_partial_ph(_) -> Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, {PrepareStatement, RowTemplate} = - emqx_connector_template_sql:parse_prepstmt(<<"a:$a,b:b},c:{c},d:${d">>, #{parameters => '?'}), + emqx_template_sql:parse_prepstmt(<<"a:$a,b:b},c:{c},d:${d">>, #{parameters => '?'}), ?assertEqual(<<"a:$a,b:b},c:{c},d:${d">>, bin(PrepareStatement)), - ?assertEqual([], emqx_connector_template_sql:render_prepstmt_strict(RowTemplate, Bindings)). + ?assertEqual([], emqx_template_sql:render_prepstmt_strict(RowTemplate, Bindings)). t_render_sql(_) -> Bindings = #{ @@ -213,14 +213,14 @@ t_render_sql(_) -> n => undefined, u => "utf8's cool 🐸" }, - Template = emqx_connector_template:parse(<<"a:${a},b:${b},c:${c},d:${d},n:${n},u:${u}">>), + Template = emqx_template:parse(<<"a:${a},b:${b},c:${c},d:${d},n:${n},u:${u}">>), ?assertMatch( {_String, _Errors = []}, - emqx_connector_template_sql:render(Template, Bindings, #{}) + emqx_template_sql:render(Template, Bindings, #{}) ), ?assertEqual( <<"a:'1',b:1,c:1.0,d:'{\"d1\":\"hi\"}',n:NULL,u:'utf8\\'s cool 🐸'"/utf8>>, - bin(emqx_connector_template_sql:render_strict(Template, Bindings, #{})) + bin(emqx_template_sql:render_strict(Template, Bindings, #{})) ). t_render_mysql(_) -> @@ -236,7 +236,7 @@ t_render_mysql(_) -> g => "utf8's cool 🐸", h => imgood }, - Template = emqx_connector_template_sql:parse( + Template = emqx_template_sql:parse( <<"a:${a},b:${b},c:${c},d:${d},e:${e},f:${f},g:${g},h:${h}">> ), ?assertEqual( @@ -245,7 +245,7 @@ t_render_mysql(_) -> "e:'\\\\\\0💩',f:0x6E6F6E2D75746638DCC900,g:'utf8\\'s cool 🐸',"/utf8, "h:'imgood'" >>, - bin(emqx_connector_template_sql:render_strict(Template, Bindings, #{escaping => mysql})) + bin(emqx_template_sql:render_strict(Template, Bindings, #{escaping => mysql})) ). t_render_cql(_) -> @@ -257,18 +257,18 @@ t_render_cql(_) -> c => 1.0, d => #{d1 => <<"someone's phone">>} }, - Template = emqx_connector_template:parse(<<"a:${a},b:${b},c:${c},d:${d}">>), + Template = emqx_template:parse(<<"a:${a},b:${b},c:${c},d:${d}">>), ?assertEqual( <<"a:'1''''2',b:1,c:1.0,d:'{\"d1\":\"someone''s phone\"}'">>, - bin(emqx_connector_template_sql:render_strict(Template, Bindings, #{escaping => cql})) + bin(emqx_template_sql:render_strict(Template, Bindings, #{escaping => cql})) ). t_render_sql_custom_ph(_) -> {PrepareStatement, RowTemplate} = - emqx_connector_template_sql:parse_prepstmt(<<"a:${a},b:${b.c}">>, #{parameters => '$n'}), + emqx_template_sql:parse_prepstmt(<<"a:${a},b:${b.c}">>, #{parameters => '$n'}), ?assertEqual( {error, [{"b.c", disallowed}]}, - emqx_connector_template:validate(["a"], RowTemplate) + emqx_template:validate(["a"], RowTemplate) ), ?assertEqual(<<"a:$1,b:$2">>, bin(PrepareStatement)). @@ -276,57 +276,57 @@ t_render_sql_strip_double_quote(_) -> Bindings = #{a => <<"a">>, b => <<"b">>}, %% no strip_double_quote option: "${key}" -> "value" - {PrepareStatement1, RowTemplate1} = emqx_connector_template_sql:parse_prepstmt( + {PrepareStatement1, RowTemplate1} = emqx_template_sql:parse_prepstmt( <<"a:\"${a}\",b:\"${b}\"">>, #{parameters => '$n'} ), ?assertEqual(<<"a:\"$1\",b:\"$2\"">>, bin(PrepareStatement1)), ?assertEqual( [<<"a">>, <<"b">>], - emqx_connector_template_sql:render_prepstmt_strict(RowTemplate1, Bindings) + emqx_template_sql:render_prepstmt_strict(RowTemplate1, Bindings) ), %% strip_double_quote = true: "${key}" -> value - {PrepareStatement2, RowTemplate2} = emqx_connector_template_sql:parse_prepstmt( + {PrepareStatement2, RowTemplate2} = emqx_template_sql:parse_prepstmt( <<"a:\"${a}\",b:\"${b}\"">>, #{parameters => '$n', strip_double_quote => true} ), ?assertEqual(<<"a:$1,b:$2">>, bin(PrepareStatement2)), ?assertEqual( [<<"a">>, <<"b">>], - emqx_connector_template_sql:render_prepstmt_strict(RowTemplate2, Bindings) + emqx_template_sql:render_prepstmt_strict(RowTemplate2, Bindings) ). t_render_tmpl_deep(_) -> Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, - Template = emqx_connector_template:parse_deep( + Template = emqx_template:parse_deep( #{<<"${a}">> => [<<"$${b}">>, "c", 2, 3.0, '${d}', {[<<"${c}">>, <<"${$}{d}">>], 0}]} ), ?assertEqual( {error, [{V, disallowed} || V <- ["b", "c"]]}, - emqx_connector_template:validate(["a"], Template) + emqx_template:validate(["a"], Template) ), ?assertEqual( #{<<"1">> => [<<"$1">>, "c", 2, 3.0, '${d}', {[<<"1.0">>, <<"${d}">>], 0}]}, - emqx_connector_template:render_strict(Template, Bindings) + emqx_template:render_strict(Template, Bindings) ). t_unparse_tmpl_deep(_) -> Term = #{<<"${a}">> => [<<"$${b}">>, "c", 2, 3.0, '${d}', {[<<"${c}">>], <<"${$}{d}">>, 0}]}, - Template = emqx_connector_template:parse_deep(Term), - ?assertEqual(Term, emqx_connector_template:unparse(Template)). + Template = emqx_template:parse_deep(Term), + ?assertEqual(Term, emqx_template:unparse(Template)). %% render_string(Template, Bindings) -> - {String, Errors} = emqx_connector_template:render(Template, Bindings), + {String, Errors} = emqx_template:render(Template, Bindings), {bin(String), Errors}. render_strict_string(Template, Bindings) -> - bin(emqx_connector_template:render_strict(Template, Bindings)). + bin(emqx_template:render_strict(Template, Bindings)). bin(String) -> unicode:characters_to_binary(String). From 75cc66378667372da7c7ef0ebfcc6f284050d5a4 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Fri, 14 Jul 2023 18:43:12 +0200 Subject: [PATCH 062/155] chore(ruleeng): streamline application dependencies --- apps/emqx_rule_engine/src/emqx_rule_engine.app.src | 11 ++++++++++- mix.exs | 1 + 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src index c353742ae..cad752886 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src @@ -5,7 +5,16 @@ {vsn, "5.0.28"}, {modules, []}, {registered, [emqx_rule_engine_sup, emqx_rule_engine]}, - {applications, [kernel, stdlib, rulesql, getopt, emqx_ctl, uuid]}, + {applications, [ + kernel, + stdlib, + rulesql, + getopt, + uuid, + emqx, + emqx_utils, + emqx_ctl + ]}, {mod, {emqx_rule_engine_app, []}}, {env, []}, {licenses, ["Apache-2.0"]}, diff --git a/mix.exs b/mix.exs index 3817b5121..409c29924 100644 --- a/mix.exs +++ b/mix.exs @@ -338,6 +338,7 @@ defmodule EMQXUmbrella.MixProject do :emqx_management, :emqx_retainer, :emqx_prometheus, + :emqx_rule_engine, :emqx_auto_subscribe, :emqx_slow_subs, :emqx_plugins, From 69cfa740ea9775dfa2302a2d589c593c06cff7e9 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Fri, 20 Oct 2023 20:36:44 +0700 Subject: [PATCH 063/155] fix(ruleeng): ensure full backward compatibility --- .../src/emqx_rule_actions.erl | 52 +++++++------- apps/emqx_utils/src/emqx_template.erl | 72 ++++++++++++++++--- apps/emqx_utils/test/emqx_template_SUITE.erl | 8 +-- 3 files changed, 95 insertions(+), 37 deletions(-) diff --git a/apps/emqx_rule_engine/src/emqx_rule_actions.erl b/apps/emqx_rule_engine/src/emqx_rule_actions.erl index 7a8b2520c..96eb4a789 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_actions.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_actions.erl @@ -72,8 +72,8 @@ pre_process_action_args( Args#{ preprocessed_tmpl => #{ topic => emqx_template:parse(Topic), - qos => parse_vars(QoS), - retain => parse_vars(Retain), + qos => parse_simple_var(QoS), + retain => parse_simple_var(Retain), payload => parse_payload(Payload), mqtt_properties => parse_mqtt_properties(MQTTProperties), user_properties => parse_user_properties(UserProperties) @@ -119,8 +119,8 @@ republish( } ) -> % NOTE: rendering missing bindings as string "undefined" - {TopicString, _Errors1} = emqx_template:render(TopicTemplate, Selected), - {PayloadString, _Errors2} = emqx_template:render(PayloadTemplate, Selected), + {TopicString, _Errors1} = render_template(TopicTemplate, Selected), + {PayloadString, _Errors2} = render_template(PayloadTemplate, Selected), Topic = iolist_to_binary(TopicString), Payload = iolist_to_binary(PayloadString), QoS = render_simple_var(QoSTemplate, Selected, 0), @@ -201,11 +201,17 @@ safe_publish(RuleId, Topic, QoS, Flags, Payload, PubProps) -> _ = emqx_broker:safe_publish(Msg), emqx_metrics:inc_msg(Msg). -parse_vars(Data) when is_binary(Data) -> +parse_simple_var(Data) when is_binary(Data) -> emqx_template:parse(Data); -parse_vars(Data) -> +parse_simple_var(Data) -> {const, Data}. +parse_payload(Payload) -> + case string:is_empty(Payload) of + false -> emqx_template:parse(Payload); + true -> emqx_template:parse("${.}") + end. + parse_mqtt_properties(MQTTPropertiesTemplate) -> maps:map( fun(_Key, V) -> emqx_template:parse(V) end, @@ -225,8 +231,12 @@ parse_user_properties(_) -> %% invalid, discard undefined. +render_template(Template, Bindings) -> + Opts = #{var_lookup => fun emqx_template:lookup_loose_json/2}, + emqx_template:render(Template, Bindings, Opts). + render_simple_var([{var, _Name, Accessor}], Data, Default) -> - case emqx_template:lookup_var(Accessor, Data) of + case emqx_template:lookup_loose_json(Accessor, Data) of {ok, Var} -> Var; %% cannot find the variable from Data {error, _} -> Default @@ -234,12 +244,6 @@ render_simple_var([{var, _Name, Accessor}], Data, Default) -> render_simple_var({const, Val}, _Data, _Default) -> Val. -parse_payload(Payload) -> - case string:is_empty(Payload) of - false -> emqx_template:parse(Payload); - true -> emqx_template:parse("${.}") - end. - render_pub_props(UserPropertiesTemplate, Selected, Env) -> UserProperties = case UserPropertiesTemplate of @@ -257,26 +261,24 @@ render_mqtt_properties(MQTTPropertiesTemplate, Selected, Env) -> MQTTProperties = maps:fold( fun(K, Template, Acc) -> - try - V = unicode:characters_to_binary( - emqx_template:render_strict(Template, Selected) - ), - Acc#{K => V} - catch - Kind:Error -> + {V, Errors} = render_template(Template, Selected), + NAcc = Acc#{K => iolist_to_binary(V)}, + case Errors of + [] -> + ok; + Errors -> ?SLOG( debug, #{ msg => "bad_mqtt_property_value_ignored", rule_id => RuleId, - exception => Kind, - reason => Error, + reason => Errors, property => K, selected => Selected } - ), - Acc - end + ) + end, + NAcc end, #{}, MQTTPropertiesTemplate diff --git a/apps/emqx_utils/src/emqx_template.erl b/apps/emqx_utils/src/emqx_template.erl index deb25d807..43d9158de 100644 --- a/apps/emqx_utils/src/emqx_template.erl +++ b/apps/emqx_utils/src/emqx_template.erl @@ -29,6 +29,7 @@ -export([render_strict/3]). -export([lookup_var/2]). +-export([lookup_loose_json/2]). -export([to_string/1]). -export_type([t/0]). @@ -62,16 +63,23 @@ -type binding() :: scalar() | list(scalar()) | bindings(). -type bindings() :: #{atom() | binary() => binding()}. +-type reason() :: undefined | {location(), _InvalidType :: atom()}. +-type location() :: non_neg_integer(). + -type var_trans() :: fun((Value :: term()) -> unicode:chardata()) | fun((varname(), Value :: term()) -> unicode:chardata()). +-type var_lookup() :: + fun((accessor(), bindings()) -> {ok, binding()} | {error, reason()}). + -type parse_opts() :: #{ strip_double_quote => boolean() }. -type render_opts() :: #{ - var_trans => var_trans() + var_trans => var_trans(), + var_lookup => var_lookup() }. -define(PH_VAR_THIS, '$this'). @@ -173,7 +181,7 @@ render_placeholder(Name) -> %% By default, all binding values are converted to strings using `to_string/1` %% function. Option `var_trans` can be used to override this behaviour. -spec render(t(), bindings()) -> - {term(), [_Error :: {varname(), undefined}]}. + {term(), [_Error :: {varname(), reason()}]}. render(Template, Bindings) -> render(Template, Bindings, #{}). @@ -195,7 +203,7 @@ render({'$tpl', Template}, Bindings, Opts) -> render_deep(Template, Bindings, Opts). render_binding(Name, Accessor, Bindings, Opts) -> - case lookup_var(Accessor, Bindings) of + case lookup_value(Accessor, Bindings, Opts) of {ok, Value} -> {render_value(Name, Value, Opts), []}; {error, Reason} -> @@ -205,6 +213,11 @@ render_binding(Name, Accessor, Bindings, Opts) -> {render_value(Name, undefined, Opts), [{Name, Reason}]} end. +lookup_value(Accessor, Bindings, #{var_lookup := LookupFun}) -> + LookupFun(Accessor, Bindings); +lookup_value(Accessor, Bindings, #{}) -> + lookup_var(Accessor, Bindings). + render_value(_Name, Value, #{var_trans := TransFun}) when is_function(TransFun, 1) -> TransFun(Value); render_value(Name, Value, #{var_trans := TransFun}) when is_function(TransFun, 2) -> @@ -309,17 +322,60 @@ unparse_deep(Term) -> %% +%% @doc Lookup a variable in the bindings accessible through the accessor. +%% Lookup is "loose" in the sense that atom and binary keys in the bindings are +%% treated equally. This is useful for both hand-crafted and JSON-like bindings. +%% This is the default lookup function used by rendering functions. -spec lookup_var(accessor(), bindings()) -> - {ok, binding()} | {error, undefined}. -lookup_var(Var, Value) when Var == ?PH_VAR_THIS orelse Var == [] -> + {ok, binding()} | {error, reason()}. +lookup_var(Var, Bindings) -> + lookup_var(0, Var, Bindings). + +lookup_var(_, Var, Value) when Var == ?PH_VAR_THIS orelse Var == [] -> {ok, Value}; -lookup_var([Prop | Rest], Bindings) -> +lookup_var(Loc, [Prop | Rest], Bindings) when is_map(Bindings) -> case lookup(Prop, Bindings) of {ok, Value} -> - lookup_var(Rest, Value); + lookup_var(Loc + 1, Rest, Value); {error, Reason} -> {error, Reason} - end. + end; +lookup_var(Loc, _, Invalid) -> + {error, {Loc, type_name(Invalid)}}. + +%% @doc Lookup a variable in the bindings accessible through the accessor. +%% Additionally to `lookup_var/2` behavior, this function also tries to parse any +%% binary as JSON to a map if accessor needs to go deeper into it. +-spec lookup_loose_json(accessor(), bindings() | binary()) -> + {ok, binding()} | {error, reason()}. +lookup_loose_json(Var, Bindings) -> + lookup_loose_json(0, Var, Bindings). + +lookup_loose_json(_, Var, Value) when Var == ?PH_VAR_THIS orelse Var == [] -> + {ok, Value}; +lookup_loose_json(Loc, [Prop | Rest], Bindings) when is_map(Bindings) -> + case lookup(Prop, Bindings) of + {ok, Value} -> + lookup_loose_json(Loc + 1, Rest, Value); + {error, Reason} -> + {error, Reason} + end; +lookup_loose_json(Loc, Rest, Json) when is_binary(Json) -> + try emqx_utils_json:decode(Json) of + Bindings -> + % NOTE: This is intentional, we don't want to parse nested JSON. + lookup_var(Loc, Rest, Bindings) + catch + error:_ -> + {error, {Loc, binary}} + end; +lookup_loose_json(Loc, _, Invalid) -> + {error, {Loc, type_name(Invalid)}}. + +type_name(Term) when is_atom(Term) -> atom; +type_name(Term) when is_number(Term) -> number; +type_name(Term) when is_binary(Term) -> binary; +type_name(Term) when is_list(Term) -> list. -spec lookup(Prop :: binary(), bindings()) -> {ok, binding()} | {error, undefined}. diff --git a/apps/emqx_utils/test/emqx_template_SUITE.erl b/apps/emqx_utils/test/emqx_template_SUITE.erl index 657c3c94f..22ffff47c 100644 --- a/apps/emqx_utils/test/emqx_template_SUITE.erl +++ b/apps/emqx_utils/test/emqx_template_SUITE.erl @@ -89,15 +89,15 @@ t_render_this(_) -> ). t_render_missing_bindings(_) -> - Bindings = #{no => #{}}, + Bindings = #{no => #{}, c => #{<<"c1">> => 42}}, Template = emqx_template:parse( - <<"a:${a},b:${b},c:${c},d:${d.d1},e:${no.such_atom_i_swear}">> + <<"a:${a},b:${b},c:${c.c1.c2},d:${d.d1},e:${no.such_atom_i_swear}">> ), ?assertEqual( {<<"a:undefined,b:undefined,c:undefined,d:undefined,e:undefined">>, [ {"no.such_atom_i_swear", undefined}, {"d.d1", undefined}, - {"c", undefined}, + {"c.c1.c2", {2, number}}, {"b", undefined}, {"a", undefined} ]}, @@ -107,7 +107,7 @@ t_render_missing_bindings(_) -> [ {"no.such_atom_i_swear", undefined}, {"d.d1", undefined}, - {"c", undefined}, + {"c.c1.c2", {2, number}}, {"b", undefined}, {"a", undefined} ], From fbd27eda6a51d0c3e7e60e8a87dd9e026ff7af39 Mon Sep 17 00:00:00 2001 From: chengshq Date: Wed, 1 Nov 2023 20:08:26 +0800 Subject: [PATCH 064/155] fix(Stomp): content type returned --- .../src/emqx_stomp_channel.erl | 16 +++++++++++----- .../emqx_gateway_stomp/test/emqx_stomp_SUITE.erl | 10 +++++++++- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/apps/emqx_gateway_stomp/src/emqx_stomp_channel.erl b/apps/emqx_gateway_stomp/src/emqx_stomp_channel.erl index 8e9be8359..453fa9fd2 100644 --- a/apps/emqx_gateway_stomp/src/emqx_stomp_channel.erl +++ b/apps/emqx_gateway_stomp/src/emqx_stomp_channel.erl @@ -1052,9 +1052,16 @@ handle_deliver( _ -> Headers0 end, + Headers2 = lists:foldl( + fun({Key, _Val} = KV, Acc1) -> + lists:keystore(Key, 1, Acc1, KV) + end, + Headers1, + maps:get(stomp_headers, Headers, []) + ), Frame = #stomp_frame{ command = <<"MESSAGE">>, - headers = Headers1 ++ maps:get(stomp_headers, Headers, []), + headers = Headers2, body = Payload }, [Frame | Acc]; @@ -1160,12 +1167,12 @@ do_negotiate_version(Accepts) -> lists:reverse(lists:sort(binary:split(Accepts, <<",">>, [global]))) ). -do_negotiate_version(Ver, []) -> - {error, <<"Supported protocol versions < ", Ver/binary>>}; do_negotiate_version(Ver, [AcceptVer | _]) when Ver >= AcceptVer -> {ok, AcceptVer}; do_negotiate_version(Ver, [_ | T]) -> - do_negotiate_version(Ver, T). + do_negotiate_version(Ver, T); +do_negotiate_version(Ver, _) -> + {error, <<"Supported protocol versions < ", Ver/binary>>}. header(Name, Headers) -> get_value(Name, Headers). @@ -1227,7 +1234,6 @@ frame2message( [ <<"destination">>, <<"content-length">>, - <<"content-type">>, <<"transaction">>, <<"receipt">> ] diff --git a/apps/emqx_gateway_stomp/test/emqx_stomp_SUITE.erl b/apps/emqx_gateway_stomp/test/emqx_stomp_SUITE.erl index 2ba753ca4..58913cf2f 100644 --- a/apps/emqx_gateway_stomp/test/emqx_stomp_SUITE.erl +++ b/apps/emqx_gateway_stomp/test/emqx_stomp_SUITE.erl @@ -181,11 +181,15 @@ t_subscribe(_) -> %% 'user-defined' header will be retain ok = send_message_frame(Sock, <<"/queue/foo">>, <<"hello">>, [ - {<<"user-defined">>, <<"emq">>} + {<<"user-defined">>, <<"emq">>}, + {<<"content-type">>, <<"text/html">>} ]), ?assertMatch({ok, #stomp_frame{command = <<"RECEIPT">>}}, recv_a_frame(Sock)), {ok, Frame} = recv_a_frame(Sock), + ?assertEqual( + <<"text/html">>, proplists:get_value(<<"content-type">>, Frame#stomp_frame.headers) + ), ?assertMatch( #stomp_frame{ @@ -977,6 +981,10 @@ t_mountpoint(_) -> }} = recv_a_frame(Sock), ?assertEqual(<<"t/a">>, proplists:get_value(<<"destination">>, Headers)), + ?assertEqual( + <<"text/plain">>, proplists:get_value(<<"content-type">>, Headers) + ), + ok = send_disconnect_frame(Sock) end, From 02c1bd70b68b8e443aafc10a2185809c71de6e4a Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Mon, 23 Oct 2023 15:42:58 +0700 Subject: [PATCH 065/155] feat(tpl): factor out loose json concept into a separate module Which is called `emqx_jsonish`. Also introduce an _access module_ abstraction to extract information from such data during rendering. --- .../src/emqx_rule_actions.erl | 5 +- apps/emqx_utils/src/emqx_jsonish.erl | 71 +++++++++++ apps/emqx_utils/src/emqx_template.erl | 113 +++++++----------- apps/emqx_utils/src/emqx_template_sql.erl | 28 ++--- apps/emqx_utils/test/emqx_jsonish_tests.erl | 97 +++++++++++++++ apps/emqx_utils/test/emqx_template_SUITE.erl | 106 +++++++++------- 6 files changed, 295 insertions(+), 125 deletions(-) create mode 100644 apps/emqx_utils/src/emqx_jsonish.erl create mode 100644 apps/emqx_utils/test/emqx_jsonish_tests.erl diff --git a/apps/emqx_rule_engine/src/emqx_rule_actions.erl b/apps/emqx_rule_engine/src/emqx_rule_actions.erl index 96eb4a789..d0810eb84 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_actions.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_actions.erl @@ -232,11 +232,10 @@ parse_user_properties(_) -> undefined. render_template(Template, Bindings) -> - Opts = #{var_lookup => fun emqx_template:lookup_loose_json/2}, - emqx_template:render(Template, Bindings, Opts). + emqx_template:render(Template, {emqx_jsonish, Bindings}). render_simple_var([{var, _Name, Accessor}], Data, Default) -> - case emqx_template:lookup_loose_json(Accessor, Data) of + case emqx_jsonish:lookup(Accessor, Data) of {ok, Var} -> Var; %% cannot find the variable from Data {error, _} -> Default diff --git a/apps/emqx_utils/src/emqx_jsonish.erl b/apps/emqx_utils/src/emqx_jsonish.erl new file mode 100644 index 000000000..b2d92c7fc --- /dev/null +++ b/apps/emqx_utils/src/emqx_jsonish.erl @@ -0,0 +1,71 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2022 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_jsonish). + +-export([lookup/2]). + +-export_type([t/0]). + +%% @doc Either a map or a JSON serial. +%% Think of it as a kind of lazily parsed and/or constructed JSON. +-type t() :: propmap() | serial(). + +%% @doc JSON in serialized form. +-type serial() :: binary(). + +-type propmap() :: #{prop() => value()}. +-type prop() :: atom() | binary(). +-type value() :: scalar() | [scalar() | propmap()] | t(). +-type scalar() :: atom() | unicode:chardata() | number(). + +%% + +%% @doc Lookup a value in the JSON-ish map accessible through the given accessor. +%% If accessor implies drilling down into a binary, it will be treated as JSON serial. +%% Failure to parse the binary as JSON will result in an _invalid type_ error. +%% Nested JSON is NOT parsed recursively. +-spec lookup(emqx_template:accessor(), t()) -> + {ok, value()} + | {error, undefined | {_Location :: non_neg_integer(), _InvalidType :: atom()}}. +lookup(Var, Jsonish) -> + lookup(0, _Decoded = false, Var, Jsonish). + +lookup(_, _, [], Value) -> + {ok, Value}; +lookup(Loc, Decoded, [Prop | Rest], Jsonish) when is_map(Jsonish) -> + case emqx_template:lookup(Prop, Jsonish) of + {ok, Value} -> + lookup(Loc + 1, Decoded, Rest, Value); + {error, Reason} -> + {error, Reason} + end; +lookup(Loc, _Decoded = false, Rest, Json) when is_binary(Json) -> + try emqx_utils_json:decode(Json) of + Value -> + % NOTE: This is intentional, we don't want to parse nested JSON. + lookup(Loc, true, Rest, Value) + catch + error:_ -> + {error, {Loc, binary}} + end; +lookup(Loc, _, _, Invalid) -> + {error, {Loc, type_name(Invalid)}}. + +type_name(Term) when is_atom(Term) -> atom; +type_name(Term) when is_number(Term) -> number; +type_name(Term) when is_binary(Term) -> binary; +type_name(Term) when is_list(Term) -> list. diff --git a/apps/emqx_utils/src/emqx_template.erl b/apps/emqx_utils/src/emqx_template.erl index 43d9158de..1ccc74c50 100644 --- a/apps/emqx_utils/src/emqx_template.erl +++ b/apps/emqx_utils/src/emqx_template.erl @@ -29,7 +29,8 @@ -export([render_strict/3]). -export([lookup_var/2]). --export([lookup_loose_json/2]). +-export([lookup/2]). + -export([to_string/1]). -export_type([t/0]). @@ -38,6 +39,10 @@ -export_type([placeholder/0]). -export_type([varname/0]). -export_type([bindings/0]). +-export_type([accessor/0]). + +-export_type([context/0]). +-export_type([render_opts/0]). -type t() :: str() | {'$tpl', deeptpl()}. @@ -70,19 +75,22 @@ fun((Value :: term()) -> unicode:chardata()) | fun((varname(), Value :: term()) -> unicode:chardata()). --type var_lookup() :: - fun((accessor(), bindings()) -> {ok, binding()} | {error, reason()}). - -type parse_opts() :: #{ strip_double_quote => boolean() }. -type render_opts() :: #{ - var_trans => var_trans(), - var_lookup => var_lookup() + var_trans => var_trans() }. --define(PH_VAR_THIS, '$this'). +-type context() :: + %% Map with (potentially nested) bindings. + bindings() + %% Arbitrary term accessible via an access module with `lookup/2` function. + | {_AccessModule :: module(), _Bindings}. + +%% Access module API +-callback lookup(accessor(), _Bindings) -> {ok, _Value} | {error, reason()}. -define(RE_PLACEHOLDER, "\\$\\{[.]?([a-zA-Z0-9._]*)\\}"). -define(RE_ESCAPE, "\\$\\{(\\$)\\}"). @@ -130,7 +138,7 @@ prepend(Head, To) -> parse_accessor(Var) -> case string:split(Var, <<".">>, all) of [<<>>] -> - ?PH_VAR_THIS; + []; Name -> Name end. @@ -180,18 +188,18 @@ render_placeholder(Name) -> %% If one or more placeholders are not found in bindings, an error is returned. %% By default, all binding values are converted to strings using `to_string/1` %% function. Option `var_trans` can be used to override this behaviour. --spec render(t(), bindings()) -> +-spec render(t(), context()) -> {term(), [_Error :: {varname(), reason()}]}. -render(Template, Bindings) -> - render(Template, Bindings, #{}). +render(Template, Context) -> + render(Template, Context, #{}). --spec render(t(), bindings(), render_opts()) -> +-spec render(t(), context(), render_opts()) -> {term(), [_Error :: {varname(), undefined}]}. -render(Template, Bindings, Opts) when is_list(Template) -> +render(Template, Context, Opts) when is_list(Template) -> lists:mapfoldl( fun ({var, Name, Accessor}, EAcc) -> - {String, Errors} = render_binding(Name, Accessor, Bindings, Opts), + {String, Errors} = render_binding(Name, Accessor, Context, Opts), {String, Errors ++ EAcc}; (String, EAcc) -> {String, EAcc} @@ -199,11 +207,11 @@ render(Template, Bindings, Opts) when is_list(Template) -> [], Template ); -render({'$tpl', Template}, Bindings, Opts) -> - render_deep(Template, Bindings, Opts). +render({'$tpl', Template}, Context, Opts) -> + render_deep(Template, Context, Opts). -render_binding(Name, Accessor, Bindings, Opts) -> - case lookup_value(Accessor, Bindings, Opts) of +render_binding(Name, Accessor, Context, Opts) -> + case lookup_value(Accessor, Context) of {ok, Value} -> {render_value(Name, Value, Opts), []}; {error, Reason} -> @@ -213,9 +221,9 @@ render_binding(Name, Accessor, Bindings, Opts) -> {render_value(Name, undefined, Opts), [{Name, Reason}]} end. -lookup_value(Accessor, Bindings, #{var_lookup := LookupFun}) -> - LookupFun(Accessor, Bindings); -lookup_value(Accessor, Bindings, #{}) -> +lookup_value(Accessor, {AccessMod, Bindings}) -> + AccessMod:lookup(Accessor, Bindings); +lookup_value(Accessor, Bindings) -> lookup_var(Accessor, Bindings). render_value(_Name, Value, #{var_trans := TransFun}) when is_function(TransFun, 1) -> @@ -228,19 +236,19 @@ render_value(_Name, Value, #{}) -> %% @doc Render a template with given bindings. %% Behaves like `render/2`, but raises an error exception if one or more placeholders %% are not found in the bindings. --spec render_strict(t(), bindings()) -> +-spec render_strict(t(), context()) -> term(). -render_strict(Template, Bindings) -> - render_strict(Template, Bindings, #{}). +render_strict(Template, Context) -> + render_strict(Template, Context, #{}). --spec render_strict(t(), bindings(), render_opts()) -> +-spec render_strict(t(), context(), render_opts()) -> term(). -render_strict(Template, Bindings, Opts) -> - case render(Template, Bindings, Opts) of +render_strict(Template, Context, Opts) -> + case render(Template, Context, Opts) of {Render, []} -> Render; {_, Errors = [_ | _]} -> - error(Errors, [unparse(Template), Bindings]) + error(Errors, [unparse(Template), Context]) end. %% @doc Parse an arbitrary Erlang term into a "deep" template. @@ -275,30 +283,30 @@ parse_deep_term(Term, Opts) when is_binary(Term) -> parse_deep_term(Term, _Opts) -> Term. -render_deep(Template, Bindings, Opts) when is_map(Template) -> +render_deep(Template, Context, Opts) when is_map(Template) -> maps:fold( fun(KT, VT, {Acc, Errors}) -> - {K, KErrors} = render_deep(KT, Bindings, Opts), - {V, VErrors} = render_deep(VT, Bindings, Opts), + {K, KErrors} = render_deep(KT, Context, Opts), + {V, VErrors} = render_deep(VT, Context, Opts), {Acc#{K => V}, KErrors ++ VErrors ++ Errors} end, {#{}, []}, Template ); -render_deep({list, Template}, Bindings, Opts) when is_list(Template) -> +render_deep({list, Template}, Context, Opts) when is_list(Template) -> lists:mapfoldr( fun(T, Errors) -> - {E, VErrors} = render_deep(T, Bindings, Opts), + {E, VErrors} = render_deep(T, Context, Opts), {E, VErrors ++ Errors} end, [], Template ); -render_deep({tuple, Template}, Bindings, Opts) when is_list(Template) -> - {Term, Errors} = render_deep({list, Template}, Bindings, Opts), +render_deep({tuple, Template}, Context, Opts) when is_list(Template) -> + {Term, Errors} = render_deep({list, Template}, Context, Opts), {list_to_tuple(Term), Errors}; -render_deep(Template, Bindings, Opts) when is_list(Template) -> - {String, Errors} = render(Template, Bindings, Opts), +render_deep(Template, Context, Opts) when is_list(Template) -> + {String, Errors} = render(Template, Context, Opts), {unicode:characters_to_binary(String), Errors}; render_deep(Term, _Bindings, _Opts) -> {Term, []}. @@ -331,7 +339,7 @@ unparse_deep(Term) -> lookup_var(Var, Bindings) -> lookup_var(0, Var, Bindings). -lookup_var(_, Var, Value) when Var == ?PH_VAR_THIS orelse Var == [] -> +lookup_var(_, [], Value) -> {ok, Value}; lookup_var(Loc, [Prop | Rest], Bindings) when is_map(Bindings) -> case lookup(Prop, Bindings) of @@ -343,35 +351,6 @@ lookup_var(Loc, [Prop | Rest], Bindings) when is_map(Bindings) -> lookup_var(Loc, _, Invalid) -> {error, {Loc, type_name(Invalid)}}. -%% @doc Lookup a variable in the bindings accessible through the accessor. -%% Additionally to `lookup_var/2` behavior, this function also tries to parse any -%% binary as JSON to a map if accessor needs to go deeper into it. --spec lookup_loose_json(accessor(), bindings() | binary()) -> - {ok, binding()} | {error, reason()}. -lookup_loose_json(Var, Bindings) -> - lookup_loose_json(0, Var, Bindings). - -lookup_loose_json(_, Var, Value) when Var == ?PH_VAR_THIS orelse Var == [] -> - {ok, Value}; -lookup_loose_json(Loc, [Prop | Rest], Bindings) when is_map(Bindings) -> - case lookup(Prop, Bindings) of - {ok, Value} -> - lookup_loose_json(Loc + 1, Rest, Value); - {error, Reason} -> - {error, Reason} - end; -lookup_loose_json(Loc, Rest, Json) when is_binary(Json) -> - try emqx_utils_json:decode(Json) of - Bindings -> - % NOTE: This is intentional, we don't want to parse nested JSON. - lookup_var(Loc, Rest, Bindings) - catch - error:_ -> - {error, {Loc, binary}} - end; -lookup_loose_json(Loc, _, Invalid) -> - {error, {Loc, type_name(Invalid)}}. - type_name(Term) when is_atom(Term) -> atom; type_name(Term) when is_number(Term) -> number; type_name(Term) when is_binary(Term) -> binary; diff --git a/apps/emqx_utils/src/emqx_template_sql.erl b/apps/emqx_utils/src/emqx_template_sql.erl index f215cd868..4e9d8f622 100644 --- a/apps/emqx_utils/src/emqx_template_sql.erl +++ b/apps/emqx_utils/src/emqx_template_sql.erl @@ -27,9 +27,9 @@ -export_type([row_template/0]). --type template() :: emqx_template:t(). +-type template() :: emqx_template:str(). -type row_template() :: [emqx_template:placeholder()]. --type bindings() :: emqx_template:bindings(). +-type context() :: emqx_template:context(). -type values() :: [emqx_utils_sql:value()]. @@ -62,19 +62,19 @@ parse(String, Opts) -> %% @doc Render an SQL statement template given a set of bindings. %% Interpolation generally follows the SQL syntax, strings are escaped according to the %% `escaping` option. --spec render(template(), bindings(), render_opts()) -> +-spec render(template(), context(), render_opts()) -> {unicode:chardata(), [_Error]}. -render(Template, Bindings, Opts) -> - emqx_template:render(Template, Bindings, #{ +render(Template, Context, Opts) -> + emqx_template:render(Template, Context, #{ var_trans => fun(Value) -> emqx_utils_sql:to_sql_string(Value, Opts) end }). %% @doc Render an SQL statement template given a set of bindings. %% Errors are raised if any placeholders are not bound. --spec render_strict(template(), bindings(), render_opts()) -> +-spec render_strict(template(), context(), render_opts()) -> unicode:chardata(). -render_strict(Template, Bindings, Opts) -> - emqx_template:render_strict(Template, Bindings, #{ +render_strict(Template, Context, Opts) -> + emqx_template:render_strict(Template, Context, #{ var_trans => fun(Value) -> emqx_utils_sql:to_sql_string(Value, Opts) end }). @@ -124,14 +124,14 @@ mk_replace(':n', N) -> %% An _SQL value_ is a vaguely defined concept here, it is something that's considered %% compatible with the protocol of the database being used. See the definition of %% `emqx_utils_sql:value()` for more details. --spec render_prepstmt(template(), bindings()) -> +-spec render_prepstmt(template(), context()) -> {values(), [_Error]}. -render_prepstmt(Template, Bindings) -> +render_prepstmt(Template, Context) -> Opts = #{var_trans => fun emqx_utils_sql:to_sql_value/1}, - emqx_template:render(Template, Bindings, Opts). + emqx_template:render(Template, Context, Opts). --spec render_prepstmt_strict(template(), bindings()) -> +-spec render_prepstmt_strict(template(), context()) -> values(). -render_prepstmt_strict(Template, Bindings) -> +render_prepstmt_strict(Template, Context) -> Opts = #{var_trans => fun emqx_utils_sql:to_sql_value/1}, - emqx_template:render_strict(Template, Bindings, Opts). + emqx_template:render_strict(Template, Context, Opts). diff --git a/apps/emqx_utils/test/emqx_jsonish_tests.erl b/apps/emqx_utils/test/emqx_jsonish_tests.erl new file mode 100644 index 000000000..c776615a1 --- /dev/null +++ b/apps/emqx_utils/test/emqx_jsonish_tests.erl @@ -0,0 +1,97 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2020-2022 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_jsonish_tests). + +-include_lib("eunit/include/eunit.hrl"). + +prop_prio_test_() -> + [ + ?_assertEqual( + {ok, 42}, + emqx_jsonish:lookup([<<"foo">>], #{<<"foo">> => 42, foo => 1337}) + ), + ?_assertEqual( + {ok, 1337}, + emqx_jsonish:lookup([<<"foo">>], #{foo => 1337}) + ) + ]. + +undefined_test() -> + ?assertEqual( + {error, undefined}, + emqx_jsonish:lookup([<<"foo">>], #{}) + ). + +undefined_deep_test() -> + ?assertEqual( + {error, undefined}, + emqx_jsonish:lookup([<<"foo">>, <<"bar">>], #{}) + ). + +undefined_deep_json_test() -> + ?assertEqual( + {error, undefined}, + emqx_jsonish:lookup( + [<<"foo">>, <<"bar">>, <<"baz">>], + <<"{\"foo\":{\"bar\":{\"no\":{}}}}">> + ) + ). + +invalid_type_test() -> + ?assertEqual( + {error, {0, number}}, + emqx_jsonish:lookup([<<"foo">>], <<"42">>) + ). + +invalid_type_deep_test() -> + ?assertEqual( + {error, {2, atom}}, + emqx_jsonish:lookup([<<"foo">>, <<"bar">>, <<"tuple">>], #{foo => #{bar => baz}}) + ). + +decode_json_test() -> + ?assertEqual( + {ok, 42}, + emqx_jsonish:lookup([<<"foo">>, <<"bar">>], <<"{\"foo\":{\"bar\":42}}">>) + ). + +decode_json_deep_test() -> + ?assertEqual( + {ok, 42}, + emqx_jsonish:lookup([<<"foo">>, <<"bar">>], #{<<"foo">> => <<"{\"bar\": 42}">>}) + ). + +decode_json_invalid_type_test() -> + ?assertEqual( + {error, {1, list}}, + emqx_jsonish:lookup([<<"foo">>, <<"bar">>], #{<<"foo">> => <<"[1,2,3]">>}) + ). + +decode_no_json_test() -> + ?assertEqual( + {error, {1, binary}}, + emqx_jsonish:lookup([<<"foo">>, <<"bar">>], #{<<"foo">> => <<0, 1, 2, 3>>}) + ). + +decode_json_no_nested_test() -> + ?assertEqual( + {error, {2, binary}}, + emqx_jsonish:lookup( + [<<"foo">>, <<"bar">>, <<"baz">>], + #{<<"foo">> => <<"{\"bar\":\"{\\\"baz\\\":42}\"}">>} + ) + ). diff --git a/apps/emqx_utils/test/emqx_template_SUITE.erl b/apps/emqx_utils/test/emqx_template_SUITE.erl index 22ffff47c..f8355f769 100644 --- a/apps/emqx_utils/test/emqx_template_SUITE.erl +++ b/apps/emqx_utils/test/emqx_template_SUITE.erl @@ -25,7 +25,7 @@ all() -> emqx_common_test_helpers:all(?MODULE). t_render(_) -> - Bindings = #{ + Context = #{ a => <<"1">>, b => 1, c => 1.0, @@ -38,15 +38,15 @@ t_render(_) -> ), ?assertEqual( {<<"a:1,b:1,c:1.0,d:{\"d1\":\"hi\"},d1:hi,l:[0,1,1000],u:utf-8 is ǝɹǝɥ"/utf8>>, []}, - render_string(Template, Bindings) + render_string(Template, Context) ). t_render_var_trans(_) -> - Bindings = #{a => <<"1">>, b => 1, c => #{prop => 1.0}}, + Context = #{a => <<"1">>, b => 1, c => #{prop => 1.0}}, Template = emqx_template:parse(<<"a:${a},b:${b},c:${c.prop}">>), {String, Errors} = emqx_template:render( Template, - Bindings, + Context, #{var_trans => fun(Name, _) -> "<" ++ Name ++ ">" end} ), ?assertEqual( @@ -55,7 +55,7 @@ t_render_var_trans(_) -> ). t_render_path(_) -> - Bindings = #{d => #{d1 => <<"hi">>}}, + Context = #{d => #{d1 => <<"hi">>}}, Template = emqx_template:parse(<<"d.d1:${d.d1}">>), ?assertEqual( ok, @@ -63,11 +63,11 @@ t_render_path(_) -> ), ?assertEqual( {<<"d.d1:hi">>, []}, - render_string(Template, Bindings) + render_string(Template, Context) ). t_render_custom_ph(_) -> - Bindings = #{a => <<"a">>, b => <<"b">>}, + Context = #{a => <<"a">>, b => <<"b">>}, Template = emqx_template:parse(<<"a:${a},b:${b}">>), ?assertEqual( {error, [{"b", disallowed}]}, @@ -75,21 +75,21 @@ t_render_custom_ph(_) -> ), ?assertEqual( <<"a:a,b:b">>, - render_strict_string(Template, Bindings) + render_strict_string(Template, Context) ). t_render_this(_) -> - Bindings = #{a => <<"a">>, b => [1, 2, 3]}, + Context = #{a => <<"a">>, b => [1, 2, 3]}, Template = emqx_template:parse(<<"this:${} / also:${.}">>), ?assertEqual(ok, emqx_template:validate(["."], Template)), ?assertEqual( % NOTE: order of the keys in the JSON object depends on the JSON encoder <<"this:{\"b\":[1,2,3],\"a\":\"a\"} / also:{\"b\":[1,2,3],\"a\":\"a\"}">>, - render_strict_string(Template, Bindings) + render_strict_string(Template, Context) ). t_render_missing_bindings(_) -> - Bindings = #{no => #{}, c => #{<<"c1">> => 42}}, + Context = #{no => #{}, c => #{<<"c1">> => 42}}, Template = emqx_template:parse( <<"a:${a},b:${b},c:${c.c1.c2},d:${d.d1},e:${no.such_atom_i_swear}">> ), @@ -101,7 +101,7 @@ t_render_missing_bindings(_) -> {"b", undefined}, {"a", undefined} ]}, - render_string(Template, Bindings) + render_string(Template, Context) ), ?assertError( [ @@ -111,7 +111,21 @@ t_render_missing_bindings(_) -> {"b", undefined}, {"a", undefined} ], - render_strict_string(Template, Bindings) + render_strict_string(Template, Context) + ). + +t_render_custom_bindings(_) -> + _ = erlang:put(a, <<"foo">>), + _ = erlang:put(b, #{<<"bar">> => #{atom => 42}}), + Template = emqx_template:parse( + <<"a:${a},b:${b.bar.atom},c:${c},oops:${b.bar.atom.oops}">> + ), + ?assertEqual( + {<<"a:foo,b:42,c:undefined,oops:undefined">>, [ + {"b.bar.atom.oops", {2, number}}, + {"c", undefined} + ]}, + render_string(Template, {?MODULE, []}) ). t_unparse(_) -> @@ -141,33 +155,33 @@ t_const(_) -> ). t_render_partial_ph(_) -> - Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + Context = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, Template = emqx_template:parse(<<"a:$a,b:b},c:{c},d:${d">>), ?assertEqual( <<"a:$a,b:b},c:{c},d:${d">>, - render_strict_string(Template, Bindings) + render_strict_string(Template, Context) ). t_parse_escaped(_) -> - Bindings = #{a => <<"1">>, b => 1, c => "VAR"}, + Context = #{a => <<"1">>, b => 1, c => "VAR"}, Template = emqx_template:parse(<<"a:${a},b:${$}{b},c:${$}{${c}},lit:${$}{$}">>), ?assertEqual( <<"a:1,b:${b},c:${VAR},lit:${$}">>, - render_strict_string(Template, Bindings) + render_strict_string(Template, Context) ). t_parse_escaped_dquote(_) -> - Bindings = #{a => <<"1">>, b => 1}, + Context = #{a => <<"1">>, b => 1}, Template = emqx_template:parse(<<"a:\"${a}\",b:\"${$}{b}\"">>, #{ strip_double_quote => true }), ?assertEqual( <<"a:1,b:\"${b}\"">>, - render_strict_string(Template, Bindings) + render_strict_string(Template, Context) ). t_parse_sql_prepstmt(_) -> - Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + Context = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, {PrepareStatement, RowTemplate} = emqx_template_sql:parse_prepstmt(<<"a:${a},b:${b},c:${c},d:${d}">>, #{ parameters => '?' @@ -175,11 +189,11 @@ t_parse_sql_prepstmt(_) -> ?assertEqual(<<"a:?,b:?,c:?,d:?">>, bin(PrepareStatement)), ?assertEqual( {[<<"1">>, 1, 1.0, <<"{\"d1\":\"hi\"}">>], _Errors = []}, - emqx_template_sql:render_prepstmt(RowTemplate, Bindings) + emqx_template_sql:render_prepstmt(RowTemplate, Context) ). t_parse_sql_prepstmt_n(_) -> - Bindings = #{a => undefined, b => true, c => atom, d => #{d1 => 42.1337}}, + Context = #{a => undefined, b => true, c => atom, d => #{d1 => 42.1337}}, {PrepareStatement, RowTemplate} = emqx_template_sql:parse_prepstmt(<<"a:${a},b:${b},c:${c},d:${d}">>, #{ parameters => '$n' @@ -187,7 +201,7 @@ t_parse_sql_prepstmt_n(_) -> ?assertEqual(<<"a:$1,b:$2,c:$3,d:$4">>, bin(PrepareStatement)), ?assertEqual( [null, true, <<"atom">>, <<"{\"d1\":42.1337}">>], - emqx_template_sql:render_prepstmt_strict(RowTemplate, Bindings) + emqx_template_sql:render_prepstmt_strict(RowTemplate, Context) ). t_parse_sql_prepstmt_colon(_) -> @@ -198,14 +212,14 @@ t_parse_sql_prepstmt_colon(_) -> ?assertEqual(<<"a=:1,b=:2,c=:3,d=:4">>, bin(PrepareStatement)). t_parse_sql_prepstmt_partial_ph(_) -> - Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + Context = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, {PrepareStatement, RowTemplate} = emqx_template_sql:parse_prepstmt(<<"a:$a,b:b},c:{c},d:${d">>, #{parameters => '?'}), ?assertEqual(<<"a:$a,b:b},c:{c},d:${d">>, bin(PrepareStatement)), - ?assertEqual([], emqx_template_sql:render_prepstmt_strict(RowTemplate, Bindings)). + ?assertEqual([], emqx_template_sql:render_prepstmt_strict(RowTemplate, Context)). t_render_sql(_) -> - Bindings = #{ + Context = #{ a => <<"1">>, b => 1, c => 1.0, @@ -216,17 +230,17 @@ t_render_sql(_) -> Template = emqx_template:parse(<<"a:${a},b:${b},c:${c},d:${d},n:${n},u:${u}">>), ?assertMatch( {_String, _Errors = []}, - emqx_template_sql:render(Template, Bindings, #{}) + emqx_template_sql:render(Template, Context, #{}) ), ?assertEqual( <<"a:'1',b:1,c:1.0,d:'{\"d1\":\"hi\"}',n:NULL,u:'utf8\\'s cool 🐸'"/utf8>>, - bin(emqx_template_sql:render_strict(Template, Bindings, #{})) + bin(emqx_template_sql:render_strict(Template, Context, #{})) ). t_render_mysql(_) -> %% with apostrophes %% https://github.com/emqx/emqx/issues/4135 - Bindings = #{ + Context = #{ a => <<"1''2">>, b => 1, c => 1.0, @@ -245,13 +259,13 @@ t_render_mysql(_) -> "e:'\\\\\\0💩',f:0x6E6F6E2D75746638DCC900,g:'utf8\\'s cool 🐸',"/utf8, "h:'imgood'" >>, - bin(emqx_template_sql:render_strict(Template, Bindings, #{escaping => mysql})) + bin(emqx_template_sql:render_strict(Template, Context, #{escaping => mysql})) ). t_render_cql(_) -> %% with apostrophes for cassandra %% https://github.com/emqx/emqx/issues/4148 - Bindings = #{ + Context = #{ a => <<"1''2">>, b => 1, c => 1.0, @@ -260,7 +274,7 @@ t_render_cql(_) -> Template = emqx_template:parse(<<"a:${a},b:${b},c:${c},d:${d}">>), ?assertEqual( <<"a:'1''''2',b:1,c:1.0,d:'{\"d1\":\"someone''s phone\"}'">>, - bin(emqx_template_sql:render_strict(Template, Bindings, #{escaping => cql})) + bin(emqx_template_sql:render_strict(Template, Context, #{escaping => cql})) ). t_render_sql_custom_ph(_) -> @@ -273,7 +287,7 @@ t_render_sql_custom_ph(_) -> ?assertEqual(<<"a:$1,b:$2">>, bin(PrepareStatement)). t_render_sql_strip_double_quote(_) -> - Bindings = #{a => <<"a">>, b => <<"b">>}, + Context = #{a => <<"a">>, b => <<"b">>}, %% no strip_double_quote option: "${key}" -> "value" {PrepareStatement1, RowTemplate1} = emqx_template_sql:parse_prepstmt( @@ -283,7 +297,7 @@ t_render_sql_strip_double_quote(_) -> ?assertEqual(<<"a:\"$1\",b:\"$2\"">>, bin(PrepareStatement1)), ?assertEqual( [<<"a">>, <<"b">>], - emqx_template_sql:render_prepstmt_strict(RowTemplate1, Bindings) + emqx_template_sql:render_prepstmt_strict(RowTemplate1, Context) ), %% strip_double_quote = true: "${key}" -> value @@ -294,11 +308,11 @@ t_render_sql_strip_double_quote(_) -> ?assertEqual(<<"a:$1,b:$2">>, bin(PrepareStatement2)), ?assertEqual( [<<"a">>, <<"b">>], - emqx_template_sql:render_prepstmt_strict(RowTemplate2, Bindings) + emqx_template_sql:render_prepstmt_strict(RowTemplate2, Context) ). t_render_tmpl_deep(_) -> - Bindings = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, + Context = #{a => <<"1">>, b => 1, c => 1.0, d => #{d1 => <<"hi">>}}, Template = emqx_template:parse_deep( #{<<"${a}">> => [<<"$${b}">>, "c", 2, 3.0, '${d}', {[<<"${c}">>, <<"${$}{d}">>], 0}]} @@ -311,7 +325,7 @@ t_render_tmpl_deep(_) -> ?assertEqual( #{<<"1">> => [<<"$1">>, "c", 2, 3.0, '${d}', {[<<"1.0">>, <<"${d}">>], 0}]}, - emqx_template:render_strict(Template, Bindings) + emqx_template:render_strict(Template, Context) ). t_unparse_tmpl_deep(_) -> @@ -321,12 +335,22 @@ t_unparse_tmpl_deep(_) -> %% -render_string(Template, Bindings) -> - {String, Errors} = emqx_template:render(Template, Bindings), +render_string(Template, Context) -> + {String, Errors} = emqx_template:render(Template, Context), {bin(String), Errors}. -render_strict_string(Template, Bindings) -> - bin(emqx_template:render_strict(Template, Bindings)). +render_strict_string(Template, Context) -> + bin(emqx_template:render_strict(Template, Context)). bin(String) -> unicode:characters_to_binary(String). + +%% Access module API + +lookup([], _) -> + {error, undefined}; +lookup([Prop | Rest], _) -> + case erlang:get(binary_to_atom(Prop)) of + undefined -> {error, undefined}; + Value -> emqx_template:lookup_var(Rest, Value) + end. From b5b6c3f8cc6ee7a446a7b92b917df30b4fdc0fa1 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Fri, 20 Oct 2023 18:25:47 +0700 Subject: [PATCH 066/155] fix(tpl): ensure full backward compat in basic connectors --- .../src/emqx_bridge_http_connector.erl | 2 +- .../test/emqx_bridge_mysql_SUITE.erl | 29 ++++++++++++++----- apps/emqx_mysql/src/emqx_mysql.erl | 15 +++++++--- apps/emqx_postgresql/src/emqx_postgresql.erl | 19 +++++++----- apps/emqx_utils/src/emqx_placeholder.erl | 6 ++-- apps/emqx_utils/src/emqx_template_sql.erl | 7 ++++- apps/emqx_utils/src/emqx_utils_sql.erl | 9 ++++-- apps/emqx_utils/test/emqx_template_SUITE.erl | 4 +++ 8 files changed, 64 insertions(+), 27 deletions(-) diff --git a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl index 88f55af52..b2f876d21 100644 --- a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl +++ b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl @@ -573,7 +573,7 @@ render_headers(HeaderTks, Msg) -> render_template(Template, Msg) -> % NOTE: ignoring errors here, missing variables will be rendered as `"undefined"`. - {String, _Errors} = emqx_template:render(Template, Msg), + {String, _Errors} = emqx_template:render(Template, {emqx_jsonish, Msg}), String. render_template_string(Template, Msg) -> diff --git a/apps/emqx_bridge_mysql/test/emqx_bridge_mysql_SUITE.erl b/apps/emqx_bridge_mysql/test/emqx_bridge_mysql_SUITE.erl index 2eeccfd77..a34b65ede 100644 --- a/apps/emqx_bridge_mysql/test/emqx_bridge_mysql_SUITE.erl +++ b/apps/emqx_bridge_mysql/test/emqx_bridge_mysql_SUITE.erl @@ -565,6 +565,7 @@ t_simple_sql_query(Config) -> ok. t_missing_data(Config) -> + BatchSize = ?config(batch_size, Config), ?assertMatch( {ok, _}, create_bridge(Config) @@ -575,13 +576,27 @@ t_missing_data(Config) -> ), send_message(Config, #{}), {ok, [Event]} = snabbkaffe:receive_events(SRef), - ?assertMatch( - #{ - result := - {error, {unrecoverable_error, {1048, _, <<"Column 'arrived' cannot be null">>}}} - }, - Event - ), + case BatchSize of + N when N > 1 -> + ?assertMatch( + #{ + result := + {error, + {unrecoverable_error, + {1292, _, <<"Truncated incorrect DOUBLE value: 'undefined'">>}}} + }, + Event + ); + 1 -> + ?assertMatch( + #{ + result := + {error, + {unrecoverable_error, {1048, _, <<"Column 'arrived' cannot be null">>}}} + }, + Event + ) + end, ok. t_bad_sql_parameter(Config) -> diff --git a/apps/emqx_mysql/src/emqx_mysql.erl b/apps/emqx_mysql/src/emqx_mysql.erl index e052b9b89..d8b7994ab 100644 --- a/apps/emqx_mysql/src/emqx_mysql.erl +++ b/apps/emqx_mysql/src/emqx_mysql.erl @@ -426,8 +426,12 @@ proc_sql_params(TypeOrKey, SQLOrData, Params, #{query_templates := Templates}) - undefined -> {SQLOrData, Params}; {_InsertPart, RowTemplate} -> - % NOTE: ignoring errors here, missing variables are set to `null`. - {Row, _Errors} = emqx_template_sql:render_prepstmt(RowTemplate, SQLOrData), + % NOTE + % Ignoring errors here, missing variables are set to `null`. + {Row, _Errors} = emqx_template_sql:render_prepstmt( + RowTemplate, + {emqx_jsonish, SQLOrData} + ), {TypeOrKey, Row} end. @@ -437,8 +441,11 @@ on_batch_insert(InstId, BatchReqs, {InsertPart, RowTemplate}, State) -> on_sql_query(InstId, query, Query, no_params, default_timeout, State). render_row(RowTemplate, Data) -> - % NOTE: ignoring errors here, missing variables are set to "NULL". - {Row, _Errors} = emqx_template_sql:render(RowTemplate, Data, #{escaping => mysql}), + % NOTE + % Ignoring errors here, missing variables are set to "'undefined'" due to backward + % compatibility requirements. + RenderOpts = #{escaping => mysql, undefined => <<"undefined">>}, + {Row, _Errors} = emqx_template_sql:render(RowTemplate, {emqx_jsonish, Data}, RenderOpts), Row. on_sql_query( diff --git a/apps/emqx_postgresql/src/emqx_postgresql.erl b/apps/emqx_postgresql/src/emqx_postgresql.erl index 3f7b43c79..814d8a074 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.erl +++ b/apps/emqx_postgresql/src/emqx_postgresql.erl @@ -313,8 +313,8 @@ do_check_prepares( case validate_table_existence(WorkerPids, SQL) of ok -> ok; - {error, undefined_table} -> - {error, {undefined_table, State}} + {error, Reason} -> + {error, Reason} end; do_check_prepares(#{prepares := Prepares}) when is_map(Prepares) -> ok; @@ -433,7 +433,7 @@ parse_prepare_sql(Key, Query, Acc) -> render_prepare_sql_row(RowTemplate, Data) -> % NOTE: ignoring errors here, missing variables will be replaced with `null`. - {Row, _Errors} = emqx_template_sql:render_prepstmt(RowTemplate, Data), + {Row, _Errors} = emqx_template_sql:render_prepstmt(RowTemplate, {emqx_jsonish, Data}), Row. init_prepare(State = #{query_templates := Templates}) when map_size(Templates) == 0 -> @@ -443,10 +443,13 @@ init_prepare(State = #{}) -> {ok, PrepStatements} -> State#{prepares => PrepStatements}; Error -> - ?SLOG(error, maps:merge( - #{msg => <<"postgresql_init_prepare_statement_failed">>}, - translate_to_log_context(Error) - )), + ?SLOG( + error, + maps:merge( + #{msg => <<"postgresql_init_prepare_statement_failed">>}, + translate_to_log_context(Error) + ) + ), %% mark the prepares failed State#{prepares => Error} end. @@ -484,7 +487,7 @@ prepare_sql_to_conn(Conn, Prepares) -> prepare_sql_to_conn(Conn, [], Statements) when is_pid(Conn) -> {ok, Statements}; prepare_sql_to_conn(Conn, [{Key, {SQL, _RowTemplate}} | Rest], Statements) when is_pid(Conn) -> - LogMeta = #{msg => "PostgreSQL Prepare Statement", name => Key, sql => SQL}, + LogMeta = #{msg => "postgresql_prepare_statement", name => Key, sql => SQL}, ?SLOG(info, LogMeta), case epgsql:parse2(Conn, Key, SQL, []) of {ok, Statement} -> diff --git a/apps/emqx_utils/src/emqx_placeholder.erl b/apps/emqx_utils/src/emqx_placeholder.erl index 4d386840f..90df6003b 100644 --- a/apps/emqx_utils/src/emqx_placeholder.erl +++ b/apps/emqx_utils/src/emqx_placeholder.erl @@ -249,15 +249,15 @@ bin(Val) -> emqx_utils_conv:bin(Val). -spec quote_sql(_Value) -> iolist(). quote_sql(Str) -> - emqx_utils_sql:to_sql_string(Str, #{escaping => sql}). + emqx_utils_sql:to_sql_string(Str, #{escaping => sql, undefined => <<"undefined">>}). -spec quote_cql(_Value) -> iolist(). quote_cql(Str) -> - emqx_utils_sql:to_sql_string(Str, #{escaping => cql}). + emqx_utils_sql:to_sql_string(Str, #{escaping => cql, undefined => <<"undefined">>}). -spec quote_mysql(_Value) -> iolist(). quote_mysql(Str) -> - emqx_utils_sql:to_sql_string(Str, #{escaping => mysql}). + emqx_utils_sql:to_sql_string(Str, #{escaping => mysql, undefined => <<"undefined">>}). lookup_var(Var, Value) when Var == ?PH_VAR_THIS orelse Var == [] -> Value; diff --git a/apps/emqx_utils/src/emqx_template_sql.erl b/apps/emqx_utils/src/emqx_template_sql.erl index 4e9d8f622..9b2c1d55c 100644 --- a/apps/emqx_utils/src/emqx_template_sql.erl +++ b/apps/emqx_utils/src/emqx_template_sql.erl @@ -40,7 +40,12 @@ }. -type render_opts() :: #{ - escaping => mysql | cql | sql + %% String escaping rules to use. + %% Default: `sql` (generic) + escaping => sql | mysql | cql, + %% Value to map `undefined` to, either to NULLs or to arbitrary strings. + %% Default: `null` + undefined => null | unicode:chardata() }. -define(TEMPLATE_PARSE_OPTS, [strip_double_quote]). diff --git a/apps/emqx_utils/src/emqx_utils_sql.erl b/apps/emqx_utils/src/emqx_utils_sql.erl index 12aac6464..9ce9e576d 100644 --- a/apps/emqx_utils/src/emqx_utils_sql.erl +++ b/apps/emqx_utils/src/emqx_utils_sql.erl @@ -82,8 +82,13 @@ to_sql_value(Map) when is_map(Map) -> emqx_utils_json:encode(Map). %% SQL statements. The value is escaped if necessary. -spec to_sql_string(term(), Options) -> unicode:chardata() when Options :: #{ - escaping => cql | mysql | sql + escaping => mysql | sql | cql, + undefined => null | unicode:chardata() }. +to_sql_string(undefined, #{undefined := Str} = Opts) when Str =/= null -> + to_sql_string(Str, Opts); +to_sql_string(undefined, #{}) -> + <<"NULL">>; to_sql_string(String, #{escaping := mysql}) when is_binary(String) -> try escape_mysql(String) @@ -99,8 +104,6 @@ to_sql_string(Term, #{}) -> maybe_escape(Term, fun escape_sql/1). -spec maybe_escape(_Value, fun((binary()) -> iodata())) -> unicode:chardata(). -maybe_escape(undefined, _EscapeFun) -> - <<"NULL">>; maybe_escape(Str, EscapeFun) when is_binary(Str) -> EscapeFun(Str); maybe_escape(Str, EscapeFun) when is_list(Str) -> diff --git a/apps/emqx_utils/test/emqx_template_SUITE.erl b/apps/emqx_utils/test/emqx_template_SUITE.erl index f8355f769..4dfe5de2e 100644 --- a/apps/emqx_utils/test/emqx_template_SUITE.erl +++ b/apps/emqx_utils/test/emqx_template_SUITE.erl @@ -235,6 +235,10 @@ t_render_sql(_) -> ?assertEqual( <<"a:'1',b:1,c:1.0,d:'{\"d1\":\"hi\"}',n:NULL,u:'utf8\\'s cool 🐸'"/utf8>>, bin(emqx_template_sql:render_strict(Template, Context, #{})) + ), + ?assertEqual( + <<"a:'1',b:1,c:1.0,d:'{\"d1\":\"hi\"}',n:'undefined',u:'utf8\\'s cool 🐸'"/utf8>>, + bin(emqx_template_sql:render_strict(Template, Context, #{undefined => "undefined"})) ). t_render_mysql(_) -> From e521a9f5fc9adaf3a3bbc35bf44ff26358404603 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Thu, 2 Nov 2023 15:40:20 +0700 Subject: [PATCH 067/155] fix(utils): denote `emqx_jsonish` follows access module behaviour Defined in `emqx_template`. --- apps/emqx_utils/src/emqx_jsonish.erl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/apps/emqx_utils/src/emqx_jsonish.erl b/apps/emqx_utils/src/emqx_jsonish.erl index b2d92c7fc..ef26da1d8 100644 --- a/apps/emqx_utils/src/emqx_jsonish.erl +++ b/apps/emqx_utils/src/emqx_jsonish.erl @@ -16,6 +16,7 @@ -module(emqx_jsonish). +-behaviour(emqx_template). -export([lookup/2]). -export_type([t/0]). @@ -53,11 +54,11 @@ lookup(Loc, Decoded, [Prop | Rest], Jsonish) when is_map(Jsonish) -> {error, Reason} -> {error, Reason} end; -lookup(Loc, _Decoded = false, Rest, Json) when is_binary(Json) -> +lookup(Loc, _Decoded = false, Props, Json) when is_binary(Json) -> try emqx_utils_json:decode(Json) of Value -> % NOTE: This is intentional, we don't want to parse nested JSON. - lookup(Loc, true, Rest, Value) + lookup(Loc, true, Props, Value) catch error:_ -> {error, {Loc, binary}} From f1847fe494c18b4731890955dce974d1eb1027ae Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Thu, 2 Nov 2023 15:41:42 +0700 Subject: [PATCH 068/155] chore(tpl): drop unnecessary binding --- apps/emqx_utils/src/emqx_template.erl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/emqx_utils/src/emqx_template.erl b/apps/emqx_utils/src/emqx_template.erl index 1ccc74c50..ac330becf 100644 --- a/apps/emqx_utils/src/emqx_template.erl +++ b/apps/emqx_utils/src/emqx_template.erl @@ -117,8 +117,7 @@ parse(String, Opts) -> <<"(" ?RE_PLACEHOLDER "|" ?RE_ESCAPE ")">> end, Splits = re:split(String, RE, [{return, binary}, group, trim, unicode]), - Components = lists:flatmap(fun parse_split/1, Splits), - Components. + lists:flatmap(fun parse_split/1, Splits). parse_split([Part, _PH, Var, <<>>]) -> % Regular placeholder From 729c6edff632534cde79d74e412df0d84d0893c5 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Thu, 2 Nov 2023 17:08:28 +0700 Subject: [PATCH 069/155] chore(repub): simplify error handling in MQTT props templating --- .../src/emqx_rule_actions.erl | 78 ++++++++----------- 1 file changed, 31 insertions(+), 47 deletions(-) diff --git a/apps/emqx_rule_engine/src/emqx_rule_actions.erl b/apps/emqx_rule_engine/src/emqx_rule_actions.erl index d0810eb84..29dbc2315 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_actions.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_actions.erl @@ -255,31 +255,33 @@ render_pub_props(UserPropertiesTemplate, Selected, Env) -> end, #{'User-Property' => UserProperties}. +%% + +-define(BADPROP(K, REASON, ENV, DATA), + ?SLOG( + debug, + DATA#{ + msg => "bad_mqtt_property_value_ignored", + rule_id => emqx_utils_maps:deep_get([metadata, rule_id], ENV, undefined), + reason => REASON, + property => K + } + ) +). + render_mqtt_properties(MQTTPropertiesTemplate, Selected, Env) -> - #{metadata := #{rule_id := RuleId}} = Env, MQTTProperties = - maps:fold( - fun(K, Template, Acc) -> + maps:map( + fun(K, Template) -> {V, Errors} = render_template(Template, Selected), - NAcc = Acc#{K => iolist_to_binary(V)}, case Errors of [] -> ok; Errors -> - ?SLOG( - debug, - #{ - msg => "bad_mqtt_property_value_ignored", - rule_id => RuleId, - reason => Errors, - property => K, - selected => Selected - } - ) + ?BADPROP(K, Errors, Env, #{selected => Selected}) end, - NAcc + iolist_to_binary(V) end, - #{}, MQTTPropertiesTemplate ), coerce_properties_values(MQTTProperties, Env). @@ -294,42 +296,24 @@ ensure_int(B) when is_binary(B) -> ensure_int(I) when is_integer(I) -> I. -coerce_properties_values(MQTTProperties, #{metadata := #{rule_id := RuleId}}) -> - maps:fold( - fun(K, V0, Acc) -> +coerce_properties_values(MQTTProperties, Env) -> + maps:filtermap( + fun(K, V) -> try - V = encode_mqtt_property(K, V0), - Acc#{K => V} + {true, encode_mqtt_property(K, V)} catch - throw:bad_integer -> - ?SLOG( - debug, - #{ - msg => "bad_mqtt_property_value_ignored", - rule_id => RuleId, - reason => bad_integer, - property => K, - value => V0 - } - ), - Acc; + throw:Reason -> + ?BADPROP(K, Reason, Env, #{value => V}), + false; Kind:Reason:Stacktrace -> - ?SLOG( - debug, - #{ - msg => "bad_mqtt_property_value_ignored", - rule_id => RuleId, - exception => Kind, - reason => Reason, - property => K, - value => V0, - stacktrace => Stacktrace - } - ), - Acc + ?BADPROP(K, Reason, Env, #{ + value => V, + exception => Kind, + stacktrace => Stacktrace + }), + false end end, - #{}, MQTTProperties ). From 75704513d8ea28d07b85152b719f5e8be04862c2 Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Mon, 6 Nov 2023 10:56:48 +0300 Subject: [PATCH 070/155] fix(plugins): fix backward compatibility --- apps/emqx/src/emqx_hookpoints.erl | 47 ++++++++++++++----- apps/emqx_plugins/test/emqx_plugins_SUITE.erl | 40 ++++++++++++++++ changes/ce/fix-11886.en.md | 3 ++ 3 files changed, 77 insertions(+), 13 deletions(-) create mode 100644 changes/ce/fix-11886.en.md diff --git a/apps/emqx/src/emqx_hookpoints.erl b/apps/emqx/src/emqx_hookpoints.erl index 1a1452a57..ba125101e 100644 --- a/apps/emqx/src/emqx_hookpoints.erl +++ b/apps/emqx/src/emqx_hookpoints.erl @@ -16,6 +16,8 @@ -module(emqx_hookpoints). +-include("logger.hrl"). + -type callback_result() :: stop | any(). -type fold_callback_result(Acc) :: {stop, Acc} | {ok, Acc} | stop | any(). @@ -62,12 +64,16 @@ 'delivery.dropped', 'delivery.completed', 'cm.channel.unregistered', - 'tls_handshake.psk_lookup', + 'tls_handshake.psk_lookup' +]). +%% Our template plugin used this hookpoints before its 5.1.0 version, +%% so we keep them here +-define(DEPRECATED_HOOKPOINTS, [ %% This is a deprecated hookpoint renamed to 'client.authorize' - %% However, our template plugin used this hookpoint before its 5.1.0 version, - %% so we keep it here - 'client.check_acl' + 'client.check_acl', + %% Misspelled hookpoint + 'session.takeovered' ]). %%----------------------------------------------------------------------------- @@ -206,27 +212,42 @@ when %% API %%----------------------------------------------------------------------------- -default_hookpoints() -> - ?HOOKPOINTS. +%% Binary hookpoint names are dynamic and used for bridges +-type registered_hookpoint() :: atom(). +-type registered_hookpoint_status() :: valid | deprecated. +-spec default_hookpoints() -> #{registered_hookpoint() => registered_hookpoint_status()}. +default_hookpoints() -> + maps:merge( + maps:from_keys(?HOOKPOINTS, valid), + maps:from_keys(?DEPRECATED_HOOKPOINTS, deprecated) + ). + +-spec register_hookpoints() -> ok. register_hookpoints() -> register_hookpoints(default_hookpoints()). -register_hookpoints(HookPoints) -> - persistent_term:put(?MODULE, maps:from_keys(HookPoints, true)). +-spec register_hookpoints( + [registered_hookpoint()] | #{registered_hookpoint() => registered_hookpoint_status()} +) -> ok. +register_hookpoints(HookPoints) when is_list(HookPoints) -> + register_hookpoints(maps:from_keys(HookPoints, valid)); +register_hookpoints(HookPoints) when is_map(HookPoints) -> + persistent_term:put(?MODULE, HookPoints). +-spec verify_hookpoint(registered_hookpoint() | binary()) -> ok | no_return(). verify_hookpoint(HookPoint) when is_binary(HookPoint) -> ok; verify_hookpoint(HookPoint) -> - case maps:is_key(HookPoint, registered_hookpoints()) of - true -> - ok; - false -> - error({invalid_hookpoint, HookPoint}) + case maps:find(HookPoint, registered_hookpoints()) of + {ok, valid} -> ok; + {ok, deprecated} -> ?SLOG(warning, #{msg => deprecated_hookpoint, hookpoint => HookPoint}); + error -> error({invalid_hookpoint, HookPoint}) end. %%----------------------------------------------------------------------------- %% Internal API %%----------------------------------------------------------------------------- +-spec registered_hookpoints() -> #{registered_hookpoint() => registered_hookpoint_status()}. registered_hookpoints() -> persistent_term:get(?MODULE, #{}). diff --git a/apps/emqx_plugins/test/emqx_plugins_SUITE.erl b/apps/emqx_plugins/test/emqx_plugins_SUITE.erl index 5680aa047..3e9850129 100644 --- a/apps/emqx_plugins/test/emqx_plugins_SUITE.erl +++ b/apps/emqx_plugins/test/emqx_plugins_SUITE.erl @@ -29,6 +29,16 @@ ). -define(EMQX_PLUGIN_TEMPLATE_VSN, "5.1.0"). -define(EMQX_PLUGIN_TEMPLATE_TAG, "5.1.0"). + +-define(EMQX_PLUGIN_TEMPLATES_LEGACY, [ + #{ + vsn => "5.0.0", + tag => "5.0.0", + release_name => "emqx_plugin_template", + app_name => emqx_plugin_template + } +]). + -define(EMQX_ELIXIR_PLUGIN_TEMPLATE_RELEASE_NAME, "elixir_plugin_template"). -define(EMQX_ELIXIR_PLUGIN_TEMPLATE_URL, "https://github.com/emqx/emqx-elixir-plugin/releases/download/" @@ -290,6 +300,36 @@ t_start_restart_and_stop(Config) -> ?assertEqual([], emqx_plugins:list()), ok. +t_legacy_plugins({init, Config}) -> + Config; +t_legacy_plugins({'end', _Config}) -> + ok; +t_legacy_plugins(Config) -> + lists:foreach( + fun(LegacyPlugin) -> + test_legacy_plugin(LegacyPlugin, Config) + end, + ?EMQX_PLUGIN_TEMPLATES_LEGACY + ). + +test_legacy_plugin(#{app_name := AppName} = LegacyPlugin, _Config) -> + #{package := Package} = get_demo_plugin_package(LegacyPlugin#{ + shdir => emqx_plugins:install_dir(), git_url => ?EMQX_PLUGIN_TEMPLATE_URL + }), + NameVsn = filename:basename(Package, ?PACKAGE_SUFFIX), + ok = emqx_plugins:ensure_installed(NameVsn), + %% start + ok = emqx_plugins:ensure_started(NameVsn), + ok = assert_app_running(AppName, true), + ok = assert_app_running(map_sets, true), + %% stop + ok = emqx_plugins:ensure_stopped(NameVsn), + ok = assert_app_running(AppName, false), + ok = assert_app_running(map_sets, false), + ok = emqx_plugins:ensure_uninstalled(NameVsn), + ?assertEqual([], emqx_plugins:list()), + ok. + t_enable_disable({init, Config}) -> #{package := Package} = get_demo_plugin_package(), NameVsn = filename:basename(Package, ?PACKAGE_SUFFIX), diff --git a/changes/ce/fix-11886.en.md b/changes/ce/fix-11886.en.md new file mode 100644 index 000000000..a4f7617a1 --- /dev/null +++ b/changes/ce/fix-11886.en.md @@ -0,0 +1,3 @@ +Fixed backward plugin compatibility. + +Currently, EMQX validates hookpoint names, so invalid hookspoints cannot be used for registering hooks. However, older versions of plugin templates used some misspelled hookpoints, and so could the real plugins. We allow the old hookpoints to be used for registering hooks, but issue a warning that they are deprecated. As before, these hooks are never called. From 7ab57824dcfaf07ce9f7de32b26d1941e64701cc Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 2 Oct 2023 16:20:16 -0300 Subject: [PATCH 071/155] chore(ds): change return type of `storage_layer:next/{1,2}` Part of https://emqx.atlassian.net/browse/EMQX-10942 The goal is to help make it clear to the caller of `next` what to do next: if the iterator should still be used or if no new messages will ever come out of it. From: ```erlang -spec next(iterator()) -> {value, binary(), iterator()} | none | {error, closed}. ``` To: ```erlang -spec next(iterator()) -> {ok, iterator(), [binary()]} | end_of_stream. -spec next(iterator(), pos_integer()) -> {ok, iterator(), [binary()]} | end_of_stream. ``` --- .../test/emqx_persistent_messages_SUITE.erl | 4 +- .../src/emqx_ds_message_storage_bitmask.erl | 27 ++++++--- .../src/emqx_ds_storage_layer.erl | 57 ++++++++++++++----- .../test/emqx_ds_storage_layer_SUITE.erl | 21 ++++--- .../props/prop_replay_message_storage.erl | 7 +-- 5 files changed, 76 insertions(+), 40 deletions(-) diff --git a/apps/emqx/test/emqx_persistent_messages_SUITE.erl b/apps/emqx/test/emqx_persistent_messages_SUITE.erl index 751b7e4b8..2d8768e65 100644 --- a/apps/emqx/test/emqx_persistent_messages_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_messages_SUITE.erl @@ -272,9 +272,9 @@ consume(Shard, IteratorId) when is_binary(IteratorId) -> consume(It) -> case emqx_ds_storage_layer:next(It) of - {value, Msg, NIt} -> + {ok, NIt, [Msg]} -> [emqx_persistent_message:deserialize(Msg) | consume(NIt)]; - none -> + end_of_stream -> [] end. diff --git a/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl b/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl index 7b141b202..be8a207bb 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl @@ -90,7 +90,7 @@ -export([next/1]). -export([preserve_iterator/1]). --export([restore_iterator/3]). +-export([restore_iterator/2]). -export([refresh_iterator/1]). %% Debug/troubleshooting: @@ -217,6 +217,7 @@ -opaque db() :: #db{}. -opaque iterator() :: #it{}. +-type serialized_iterator() :: binary(). -type keymapper() :: #keymapper{}. -type keyspace_filter() :: #filter{}. @@ -340,22 +341,30 @@ next(It0 = #it{filter = #filter{keymapper = Keymapper}}) -> {error, closed} end. --spec preserve_iterator(iterator()) -> binary(). -preserve_iterator(#it{cursor = Cursor}) -> +-spec preserve_iterator(iterator()) -> serialized_iterator(). +preserve_iterator(#it{ + cursor = Cursor, + filter = #filter{ + topic_filter = TopicFilter, + start_time = StartTime + } +}) -> State = #{ v => 1, - cursor => Cursor + cursor => Cursor, + replay => {TopicFilter, StartTime} }, term_to_binary(State). --spec restore_iterator(db(), emqx_ds:replay(), binary()) -> +-spec restore_iterator(db(), serialized_iterator()) -> {ok, iterator()} | {error, _TODO}. -restore_iterator(DB, Replay, Serial) when is_binary(Serial) -> +restore_iterator(DB, Serial) when is_binary(Serial) -> State = binary_to_term(Serial), - restore_iterator(DB, Replay, State); -restore_iterator(DB, Replay, #{ + restore_iterator(DB, State); +restore_iterator(DB, #{ v := 1, - cursor := Cursor + cursor := Cursor, + replay := Replay = {_TopicFilter, _StartTime} }) -> case make_iterator(DB, Replay) of {ok, It} when Cursor == undefined -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index 6137a1ed7..25a58950d 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -12,7 +12,7 @@ -export([store/5]). -export([delete/4]). --export([make_iterator/2, next/1]). +-export([make_iterator/2, next/1, next/2]). -export([ preserve_iterator/2, @@ -131,7 +131,7 @@ -callback make_iterator(_Schema, emqx_ds:replay()) -> {ok, _It} | {error, _}. --callback restore_iterator(_Schema, emqx_ds:replay(), binary()) -> {ok, _It} | {error, _}. +-callback restore_iterator(_Schema, _Serialized :: binary()) -> {ok, _It} | {error, _}. -callback preserve_iterator(_It) -> term(). @@ -175,21 +175,52 @@ make_iterator(Shard, Replay = {_, StartTime}) -> replay = Replay }). --spec next(iterator()) -> {value, binary(), iterator()} | none | {error, closed}. -next(It = #it{module = Mod, data = ItData}) -> +-spec next(iterator()) -> {ok, iterator(), [binary()]} | end_of_stream. +next(It = #it{}) -> + next(It, _BatchSize = 1). + +-spec next(iterator(), pos_integer()) -> {ok, iterator(), [binary()]} | end_of_stream. +next(#it{data = {?MODULE, end_of_stream}}, _BatchSize) -> + end_of_stream; +next( + It = #it{shard = Shard, module = Mod, gen = Gen, data = {?MODULE, retry, Serialized}}, BatchSize +) -> + #{data := DBData} = meta_get_gen(Shard, Gen), + {ok, ItData} = Mod:restore_iterator(DBData, Serialized), + next(It#it{data = ItData}, BatchSize); +next(It = #it{}, BatchSize) -> + do_next(It, BatchSize, _Acc = []). + +-spec do_next(iterator(), non_neg_integer(), [binary()]) -> + {ok, iterator(), [binary()]} | end_of_stream. +do_next(It, N, Acc) when N =< 0 -> + {ok, It, lists:reverse(Acc)}; +do_next(It = #it{module = Mod, data = ItData}, N, Acc) -> case Mod:next(ItData) of {value, Val, ItDataNext} -> - {value, Val, It#it{data = ItDataNext}}; - {error, _} = Error -> - Error; + do_next(It#it{data = ItDataNext}, N - 1, [Val | Acc]); + {error, _} = _Error -> + %% todo: log? + %% iterator might be invalid now; will need to re-open it. + Serialized = Mod:preserve_iterator(ItData), + {ok, It#it{data = {?MODULE, retry, Serialized}}, lists:reverse(Acc)}; none -> case open_next_iterator(It) of {ok, ItNext} -> - next(ItNext); - {error, _} = Error -> - Error; + do_next(ItNext, N, Acc); + {error, _} = _Error -> + %% todo: log? + %% fixme: only bad options may lead to this? + %% return an "empty" iterator to be re-opened when retrying? + Serialized = Mod:preserve_iterator(ItData), + {ok, It#it{data = {?MODULE, retry, Serialized}}, lists:reverse(Acc)}; none -> - none + case Acc of + [] -> + end_of_stream; + _ -> + {ok, It#it{data = {?MODULE, end_of_stream}}, lists:reverse(Acc)} + end end end. @@ -407,8 +438,8 @@ open_iterator(#{module := Mod, data := Data}, It = #it{}) -> -spec open_restore_iterator(generation(), iterator(), binary()) -> {ok, iterator()} | {error, _Reason}. -open_restore_iterator(#{module := Mod, data := Data}, It = #it{replay = Replay}, Serial) -> - case Mod:restore_iterator(Data, Replay, Serial) of +open_restore_iterator(#{module := Mod, data := Data}, It = #it{}, Serial) -> + case Mod:restore_iterator(Data, Serial) of {ok, ItData} -> {ok, It#it{module = Mod, data = ItData}}; Err -> diff --git a/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl index 3a872934f..10596e216 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl @@ -201,7 +201,7 @@ t_iterate_multigen_preserve_restore(_Config) -> ok = emqx_ds_storage_layer:preserve_iterator(It3, ReplayID), {ok, It4} = emqx_ds_storage_layer:restore_iterator(?SHARD, ReplayID), {It5, Res200} = iterate(It4, 1000), - ?assertEqual(none, It5), + ?assertEqual({end_of_stream, []}, iterate(It5, 1)), ?assertEqual( lists:sort([{Topic, TS} || Topic <- TopicsMatching, TS <- Timestamps]), lists:sort([binary_to_term(Payload) || Payload <- Res10 ++ Res100 ++ Res200]) @@ -224,21 +224,20 @@ iterate(DB, TopicFilter, StartTime) -> iterate(It) -> case emqx_ds_storage_layer:next(It) of - {value, Payload, ItNext} -> + {ok, ItNext, [Payload]} -> [Payload | iterate(ItNext)]; - none -> + end_of_stream -> [] end. -iterate(It, 0) -> - {It, []}; +iterate(end_of_stream, _N) -> + {end_of_stream, []}; iterate(It, N) -> - case emqx_ds_storage_layer:next(It) of - {value, Payload, ItNext} -> - {ItFinal, Ps} = iterate(ItNext, N - 1), - {ItFinal, [Payload | Ps]}; - none -> - {none, []} + case emqx_ds_storage_layer:next(It, N) of + {ok, ItFinal, Payloads} -> + {ItFinal, Payloads}; + end_of_stream -> + {end_of_stream, []} end. iterator(DB, TopicFilter, StartTime) -> diff --git a/apps/emqx_durable_storage/test/props/prop_replay_message_storage.erl b/apps/emqx_durable_storage/test/props/prop_replay_message_storage.erl index f9964bebe..d96996534 100644 --- a/apps/emqx_durable_storage/test/props/prop_replay_message_storage.erl +++ b/apps/emqx_durable_storage/test/props/prop_replay_message_storage.erl @@ -225,12 +225,9 @@ run_iterator_commands([iterate | Rest], It, Ctx) -> [] end; run_iterator_commands([{preserve, restore} | Rest], It, Ctx) -> - #{ - db := DB, - replay := Replay - } = Ctx, + #{db := DB} = Ctx, Serial = emqx_ds_message_storage_bitmask:preserve_iterator(It), - {ok, ItNext} = emqx_ds_message_storage_bitmask:restore_iterator(DB, Replay, Serial), + {ok, ItNext} = emqx_ds_message_storage_bitmask:restore_iterator(DB, Serial), run_iterator_commands(Rest, ItNext, Ctx); run_iterator_commands([], It, _Ctx) -> iterate_db(It). From f1454bb57eb5452c8d7773be6379b5ca2d9322f3 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Fri, 29 Sep 2023 17:11:03 +0200 Subject: [PATCH 072/155] feat(ds): learned topic structure --- apps/emqx_durable_storage/src/emqx_ds_lts.erl | 509 ++++++++++++++++++ rebar.config | 4 + 2 files changed, 513 insertions(+) create mode 100644 apps/emqx_durable_storage/src/emqx_ds_lts.erl diff --git a/apps/emqx_durable_storage/src/emqx_ds_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_lts.erl new file mode 100644 index 000000000..384677d21 --- /dev/null +++ b/apps/emqx_durable_storage/src/emqx_ds_lts.erl @@ -0,0 +1,509 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_ds_lts). + +%% API: +-export([trie_create/0, topic_key/3, match_topics/2, lookup_topic_key/2, dump_to_dot/2]). + +%% Debug: +-export([trie_next/3, trie_insert/3]). + +-export_type([static_key/0, trie/0]). + +-include_lib("stdlib/include/ms_transform.hrl"). + +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). +-endif. + +%%================================================================================ +%% Type declarations +%%================================================================================ + +-define(EOT, []). %% End Of Topic +-define(PLUS, '+'). + +-type edge() :: binary() | ?EOT | ?PLUS. + +%% Fixed size binary +-type static_key() :: binary(). + +-define(PREFIX, prefix). +-type state() :: static_key() | ?PREFIX. + +-type msg_storage_key() :: {static_key(), _Varying :: [binary()]}. + +-type threshold_fun() :: fun((non_neg_integer()) -> non_neg_integer()). + +-record(trie, + { trie :: ets:tid() + , stats :: ets:tid() + }). + +-opaque trie() :: #trie{}. + +-record(trans, + { key :: {state(), edge()} + , next :: state() + }). + +%%================================================================================ +%% API funcions +%%================================================================================ + +%% @doc Create an empty trie +-spec trie_create() -> trie(). +trie_create() -> + Trie = ets:new(trie, [{keypos, #trans.key}, set]), + Stats = ets:new(stats, [{keypos, 1}, set]), + #trie{ trie = Trie + , stats = Stats + }. + +%% @doc Create a topic key, +-spec topic_key(trie(), threshold_fun(), [binary()]) -> msg_storage_key(). +topic_key(Trie, ThresholdFun, Tokens) -> + do_topic_key(Trie, ThresholdFun, 0, ?PREFIX, Tokens, []). + +%% @doc Return an exisiting topic key if it exists. +-spec lookup_topic_key(trie(), [binary()]) -> {ok, msg_storage_key()} | undefined. +lookup_topic_key(Trie, Tokens) -> + do_lookup_topic_key(Trie, ?PREFIX, Tokens, []). + +%% @doc Return list of keys of topics that match a given topic filter +-spec match_topics(trie(), [binary() | '+' | '#']) -> + [{static_key(), _Varying :: binary() | ?PLUS}]. +match_topics(Trie, TopicFilter) -> + do_match_topics(Trie, ?PREFIX, [], TopicFilter). + +%% @doc Dump trie to graphviz format for debugging +-spec dump_to_dot(trie(), file:filename()) -> ok. +dump_to_dot(#trie{trie = Trie, stats = Stats}, Filename) -> + L = ets:tab2list(Trie), + {Nodes0, Edges} = + lists:foldl( + fun(#trans{key = {From, Label}, next = To}, {AccN, AccEdge}) -> + Edge = {From, To, Label}, + {[From, To] ++ AccN, [Edge|AccEdge]} + end, + {[], []}, + L), + Nodes = + lists:map( + fun(Node) -> + case ets:lookup(Stats, Node) of + [{_, NChildren}] -> ok; + [] -> NChildren = 0 + end, + {Node, NChildren} + end, + lists:usort(Nodes0)), + {ok, FD} = file:open(Filename, [write]), + Print = fun (?PREFIX) -> "prefix"; + (NodeId) -> binary:encode_hex(NodeId) + end, + io:format(FD, "digraph {~n", []), + lists:foreach( + fun({Node, NChildren}) -> + Id = Print(Node), + io:format(FD, " \"~s\" [label=\"~s : ~p\"];~n", [Id, Id, NChildren]) + end, + Nodes), + lists:foreach( + fun({From, To, Label}) -> + io:format(FD, " \"~s\" -> \"~s\" [label=\"~s\"];~n", [Print(From), Print(To), Label]) + end, + Edges), + io:format(FD, "}~n", []), + file:close(FD). + +%%================================================================================ +%% Internal exports +%%================================================================================ + +-spec trie_next(trie(), state(), binary() | ?EOT) -> {Wildcard, state()} | undefined + when Wildcard :: boolean(). +trie_next(#trie{trie = Trie}, State, ?EOT) -> + case ets:lookup(Trie, {State, ?EOT}) of + [#trans{next = Next}] -> {false, Next}; + [] -> undefined + end; +trie_next(#trie{trie = Trie}, State, Token) -> + case ets:lookup(Trie, {State, ?PLUS}) of + [#trans{next = Next}] -> + {true, Next}; + [] -> + case ets:lookup(Trie, {State, Token}) of + [#trans{next = Next}] -> {false, Next}; + [] -> undefined + end + end. + +-spec trie_insert(trie(), state(), edge()) -> {Updated, state()} + when Updated :: false | non_neg_integer(). +trie_insert(#trie{trie = Trie, stats = Stats}, State, Token) -> + Key = {State, Token}, + NewState = get_id_for_key(State, Token), + Rec = #trans{ key = Key + , next = NewState + }, + case ets:insert_new(Trie, Rec) of + true -> + Inc = case Token of + ?EOT -> 0; + ?PLUS -> 0; + _ -> 1 + end, + NChildren = ets:update_counter(Stats, State, {2, Inc}, {State, 0}), + {NChildren, NewState}; + false -> + [#trans{next = NextState}] = ets:lookup(Trie, Key), + {false, NextState} + end. + +%%================================================================================ +%% Internal functions +%%================================================================================ + +-spec get_id_for_key(state(), edge()) -> static_key(). +get_id_for_key(_State, _Token) -> + %% Requirements for the return value: + %% + %% It should be globally unique for the `{State, Token}` pair. Other + %% than that, there's no requirements. The return value doesn't even + %% have to be deterministic, since the states are saved in the trie. + %% + %% The generated value becomes the ID of the topic in the durable + %% storage. Its size should be relatively small to reduce the + %% overhead of storing messages. + %% + %% If we want to impress computer science crowd, sorry, I mean to + %% minimize storage requirements, we can even employ Huffman coding + %% based on the frequency of messages. + crypto:strong_rand_bytes(8). + +%% erlfmt-ignore +-spec do_match_topics(trie(), state(), non_neg_integer(), [binary() | '+' | '#']) -> + list(). +do_match_topics(Trie, State, Varying, []) -> + case trie_next(Trie, State, ?EOT) of + {false, Static} -> [{Static, lists:reverse(Varying)}]; + undefined -> [] + end; +do_match_topics(Trie, State, Varying, ['#']) -> + Emanating = emanating(Trie, State, ?PLUS), + lists:flatmap( + fun({?EOT, Static}) -> + [{Static, lists:reverse(Varying)}]; + ({?PLUS, NextState}) -> + do_match_topics(Trie, NextState, [?PLUS|Varying], ['#']); + ({_, NextState}) -> + do_match_topics(Trie, NextState, Varying, ['#']) + end, + Emanating); +do_match_topics(Trie, State, Varying, [Level|Rest]) -> + Emanating = emanating(Trie, State, Level), + lists:flatmap( + fun({?EOT, _NextState}) -> + []; + ({?PLUS, NextState}) -> + do_match_topics(Trie, NextState, [Level|Varying], Rest); + ({_, NextState}) -> + do_match_topics(Trie, NextState, Varying, Rest) + end, + Emanating). + +-spec do_lookup_topic_key(trie(), state(), [binary()], [binary()]) -> + {ok, msg_storage_key()} | undefined. +do_lookup_topic_key(Trie, State, [], Varying) -> + case trie_next(Trie, State, ?EOT) of + {false, Static} -> + {ok, {Static, lists:reverse(Varying)}}; + undefined -> + undefined + end; +do_lookup_topic_key(Trie, State, [Tok|Rest], Varying) -> + case trie_next(Trie, State, Tok) of + {true, NextState} -> + do_lookup_topic_key(Trie, NextState, Rest, [Tok|Varying]); + {false, NextState} -> + do_lookup_topic_key(Trie, NextState, Rest, Varying); + undefined -> + undefined + end. + +do_topic_key(Trie, _, _, State, [], Varying) -> + {_, false, Static} = trie_next_(Trie, State, ?EOT), + {Static, lists:reverse(Varying)}; +do_topic_key(Trie, ThresholdFun, Depth, State, [Tok|Rest], Varying0) -> + Threshold = ThresholdFun(Depth), % TODO: it's not necessary to call it every time. + Varying = case trie_next_(Trie, State, Tok) of + {NChildren, _, _DiscardState} when is_integer(NChildren), NChildren > Threshold -> + {_, NextState} = trie_insert(Trie, State, ?PLUS), + [Tok|Varying0]; + {_, false, NextState} -> + Varying0; + {_, true, NextState} -> + [Tok|Varying0] + end, + do_topic_key(Trie, ThresholdFun, Depth + 1, NextState, Rest, Varying). + +-spec trie_next_(trie(), state(), binary() | ?EOT) -> {New, Wildcard, state()} + when New :: false | non_neg_integer(), + Wildcard :: boolean(). +trie_next_(Trie, State, Token) -> + case trie_next(Trie, State, Token) of + {Wildcard, NextState} -> + {false, Wildcard, NextState}; + undefined -> + {Updated, NextState} = trie_insert(Trie, State, Token), + {Updated, false, NextState} + end. + +%% @doc Return all edges emanating from a node: +%% erlfmt-ignore +-spec emanating(trie(), state(), edge()) -> [{edge(), state()}]. +emanating(#trie{trie = Tab}, State, ?PLUS) -> + ets:select(Tab, ets:fun2ms( + fun(#trans{key = {S, Edge}, next = Next}) when S == State -> + {Edge, Next} + end)); +emanating(#trie{trie = Tab}, State, ?EOT) -> + case ets:lookup(Tab, {State, ?EOT}) of + [#trans{next = Next}] -> [{?EOT, Next}]; + [] -> [] + end; +emanating(#trie{trie = Tab}, State, Bin) when is_binary(Bin) -> + [{Edge, Next} || #trans{key = {_, Edge}, next = Next} <- + ets:lookup(Tab, {State, ?PLUS}) ++ + ets:lookup(Tab, {State, Bin})]. + +%%================================================================================ +%% Tests +%%================================================================================ + +-ifdef(TEST). + +trie_basic_test() -> + T = trie_create(), + ?assertMatch(undefined, trie_next(T, ?PREFIX, <<"foo">>)), + {1, S1} = trie_insert(T, ?PREFIX, <<"foo">>), + ?assertMatch({false, S1}, trie_insert(T, ?PREFIX, <<"foo">>)), + ?assertMatch({false, S1}, trie_next(T, ?PREFIX, <<"foo">>)), + + ?assertMatch(undefined, trie_next(T, ?PREFIX, <<"bar">>)), + {2, S2} = trie_insert(T, ?PREFIX, <<"bar">>), + ?assertMatch({false, S2}, trie_insert(T, ?PREFIX, <<"bar">>)), + + ?assertMatch(undefined, trie_next(T, S1, <<"foo">>)), + ?assertMatch(undefined, trie_next(T, S1, <<"bar">>)), + {1, S11} = trie_insert(T, S1, <<"foo">>), + {2, S12} = trie_insert(T, S1, <<"bar">>), + ?assertMatch({false, S11}, trie_next(T, S1, <<"foo">>)), + ?assertMatch({false, S12}, trie_next(T, S1, <<"bar">>)), + + ?assertMatch(undefined, trie_next(T, S11, <<"bar">>)), + {1, S111} = trie_insert(T, S11, <<"bar">>), + ?assertMatch({false, S111}, trie_next(T, S11, <<"bar">>)). + +lookup_key_test() -> + T = trie_create(), + {_, S1} = trie_insert(T, ?PREFIX, <<"foo">>), + {_, S11} = trie_insert(T, S1, <<"foo">>), + %% Topics don't match until we insert ?EOT: + ?assertMatch( undefined + , lookup_topic_key(T, [<<"foo">>]) + ), + ?assertMatch( undefined + , lookup_topic_key(T, [<<"foo">>, <<"foo">>]) + ), + {_, S10} = trie_insert(T, S1, ?EOT), + {_, S110} = trie_insert(T, S11, ?EOT), + ?assertMatch( {ok, {S10, []}} + , lookup_topic_key(T, [<<"foo">>]) + ), + ?assertMatch( {ok, {S110, []}} + , lookup_topic_key(T, [<<"foo">>, <<"foo">>]) + ), + %% The rest of keys still don't match: + ?assertMatch( undefined + , lookup_topic_key(T, [<<"bar">>]) + ), + ?assertMatch( undefined + , lookup_topic_key(T, [<<"bar">>, <<"foo">>]) + ). + +wildcard_lookup_test() -> + T = trie_create(), + {1, S1} = trie_insert(T, ?PREFIX, <<"foo">>), + {0, S11} = trie_insert(T, S1, ?PLUS), %% Plus doesn't increase the number of children + {1, S111} = trie_insert(T, S11, <<"foo">>), + {0, S1110} = trie_insert(T, S111, ?EOT), %% ?EOT doesn't increase the number of children + ?assertMatch( {ok, {S1110, [<<"bar">>]}} + , lookup_topic_key(T, [<<"foo">>, <<"bar">>, <<"foo">>]) + ), + ?assertMatch( {ok, {S1110, [<<"quux">>]}} + , lookup_topic_key(T, [<<"foo">>, <<"quux">>, <<"foo">>]) + ), + ?assertMatch( undefined + , lookup_topic_key(T, [<<"foo">>]) + ), + ?assertMatch( undefined + , lookup_topic_key(T, [<<"foo">>, <<"bar">>]) + ), + ?assertMatch( undefined + , lookup_topic_key(T, [<<"foo">>, <<"bar">>, <<"bar">>]) + ), + ?assertMatch( undefined + , lookup_topic_key(T, [<<"bar">>, <<"foo">>, <<"foo">>]) + ), + {_, S10} = trie_insert(T, S1, ?EOT), + ?assertMatch( {ok, {S10, []}} + , lookup_topic_key(T, [<<"foo">>]) + ). + +%% erlfmt-ignore +topic_key_test() -> + T = trie_create(), + try + Threshold = 3, + ThresholdFun = fun(0) -> 1000; + (_) -> Threshold + end, + %% Test that bottom layer threshold is high: + lists:foreach( + fun(I) -> + {_, []} = test_key(T, ThresholdFun, [I, 99, 99, 99]) + end, + lists:seq(1, 10)), + %% Test adding children on the 2nd level: + lists:foreach( + fun(I) -> + case test_key(T, ThresholdFun, [1, I, 1]) of + {_, []} when I < Threshold -> + ok; + {_, [Var]} -> + ?assertEqual(Var, integer_to_binary(I)) + end + end, + lists:seq(1, 100)), + %% This doesn't affect 2nd level with a different prefix: + {_, []} = test_key(T, ThresholdFun, [2, 1, 1]), + %% Now create another level of +: + lists:foreach( + fun(I) -> + case test_key(T, ThresholdFun, [1, 42, 1, I, 42]) of + {_, [<<"42">>]} when I =< Threshold -> %% TODO: off by 1 error + ok; + {_, [<<"42">>, Var]} -> + ?assertEqual(Var, integer_to_binary(I)); + Ret -> + error({Ret, I}) + end + end, + lists:seq(1, 100)) + after + dump_to_dot(T, atom_to_list(?FUNCTION_NAME) ++ ".dot") + end. + +%% erlfmt-ignore +topic_match_test() -> + T = trie_create(), + try + Threshold = 2, + ThresholdFun = fun(0) -> 1000; + (_) -> Threshold + end, + {S1, []} = test_key(T, ThresholdFun, [1]), + {S11, []} = test_key(T, ThresholdFun, [1, 1]), + {S12, []} = test_key(T, ThresholdFun, [1, 2]), + {S111, []} = test_key(T, ThresholdFun, [1, 1, 1]), + %% Match concrete topics: + assert_match_topics(T, [1], [{S1, []}]), + assert_match_topics(T, [1, 1], [{S11, []}]), + assert_match_topics(T, [1, 1, 1], [{S111, []}]), + %% Match topics with +: + assert_match_topics(T, [1, '+'], [{S11, []}, {S12, []}]), + assert_match_topics(T, [1, '+', 1], [{S111, []}]), + %% Match topics with #: + assert_match_topics(T, [1, '#'], [{S1, []}, {S11, []}, {S12, []}, {S111, []}]), + assert_match_topics(T, [1, 1, '#'], [{S11, []}, {S111, []}]), + %% Now add learned wildcards: + {S21, []} = test_key(T, ThresholdFun, [2, 1]), + {S22, []} = test_key(T, ThresholdFun, [2, 2]), + {S2_, [<<"3">>]} = test_key(T, ThresholdFun, [2, 3]), + {S2_11, [_]} = test_key(T, ThresholdFun, [2, 1, 1, 1]), + {S2_12, [_]} = test_key(T, ThresholdFun, [2, 1, 1, 2]), + {S2_1_, [_, _]} = test_key(T, ThresholdFun, [2, 1, 1, 3]), + %% Check matching: + assert_match_topics(T, [2, 2], + [{S22, []}, {S2_, [<<"2">>]}]), + assert_match_topics(T, [2, '+'], + [{S22, []}, {S21, []}, {S2_, ['+']}]), + assert_match_topics(T, [2, 1, 1, 2], + [{S2_12, [<<"1">>]}, + {S2_1_, [<<"1">>, <<"2">>]}]), + assert_match_topics(T, [2, '#'], + [{S21, []}, {S22, []}, {S2_, ['+']}, + {S2_11, ['+']}, {S2_12, ['+']}, + {S2_1_, ['+', '+']}]), + ok + after + dump_to_dot(T, atom_to_list(?FUNCTION_NAME) ++ ".dot") + end. + +-define(keys_history, topic_key_history). + +%% erlfmt-ignore +assert_match_topics(Trie, Filter0, Expected) -> + Filter = lists:map(fun(I) when is_integer(I) -> integer_to_binary(I); + (I) -> I + end, + Filter0), + Matched = match_topics(Trie, Filter), + ?assertMatch( #{missing := [], unexpected := []} + , #{ missing => Expected -- Matched + , unexpected => Matched -- Expected + } + , Filter + ). + +%% erlfmt-ignore +test_key(Trie, Threshold, Topic0) -> + Topic = [integer_to_binary(I) || I <- Topic0], + Ret = topic_key(Trie, Threshold, Topic), + Ret = topic_key(Trie, Threshold, Topic), %% Test idempotency + case get(?keys_history) of + undefined -> OldHistory = #{}; + OldHistory -> ok + end, + %% Test that the generated keys are always unique for the topic: + History = maps:update_with( + Ret, + fun(Old) -> + case Old =:= Topic of + true -> Old; + false -> error(#{'$msg' => "Duplicate key!", key => Ret, old_topic => Old, new_topic => Topic}) + end + end, + Topic, + OldHistory), + put(?keys_history, History), + {ok, Ret} = lookup_topic_key(Trie, Topic), + Ret. + +-endif. diff --git a/rebar.config b/rebar.config index 3ba8edc4b..81fa04231 100644 --- a/rebar.config +++ b/rebar.config @@ -106,6 +106,10 @@ emqx_exproto_pb % generated code for protobuf ]}. +{eunit_opts, + [ verbose + ]}. + {project_plugins, [ erlfmt, {rebar3_hex, "7.0.2"}, From c91df2f5cd917335f973224ed6e32ef177ad28f5 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Mon, 2 Oct 2023 23:04:43 +0200 Subject: [PATCH 073/155] refactor(ds): Create a prototype of replication layer --- apps/emqx/src/emqx_persistent_session_ds.erl | 2 +- .../emqx_persistent_session_ds_proto_v1.erl | 17 +- apps/emqx_durable_storage/IMPLEMENTATION.md | 42 ---- apps/emqx_durable_storage/README.md | 9 +- apps/emqx_durable_storage/src/emqx_ds.erl | 199 ++++++++++-------- apps/emqx_durable_storage/src/emqx_ds.erl_ | 189 +++++++++++++++++ apps/emqx_durable_storage/src/emqx_ds_lts.erl | 3 +- .../src/emqx_ds_message_storage_bitmask.erl | 15 +- .../src/emqx_ds_replication_layer.erl | 128 +++++++++++ .../src/emqx_ds_storage_layer.erl | 27 ++- .../src/proto/emqx_ds_proto_v1.erl | 56 +++++ 11 files changed, 539 insertions(+), 148 deletions(-) create mode 100644 apps/emqx_durable_storage/src/emqx_ds.erl_ create mode 100644 apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl create mode 100644 apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index e456211fc..174a02156 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -74,7 +74,7 @@ ]). %% FIXME --define(DS_SHARD_ID, <<"local">>). +-define(DS_SHARD_ID, atom_to_binary(node())). -define(DEFAULT_KEYSPACE, default). -define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}). diff --git a/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl b/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl index d35ccd963..edaaea775 100644 --- a/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl +++ b/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl @@ -23,7 +23,8 @@ open_iterator/4, close_iterator/2, - close_all_iterators/2 + close_all_iterators/2, + get_streams/5 ]). -include_lib("emqx/include/bpapi.hrl"). @@ -50,6 +51,20 @@ open_iterator(Nodes, TopicFilter, StartMS, IteratorID) -> ?TIMEOUT ). +-spec get_streams( + node(), + emqx_ds:keyspace(), + emqx_ds:shard_id(), + emqx_ds:topic_filter(), + emqx_ds:time()) -> + [emqx_ds_storage_layer:stream()]. +get_streams(Node, Keyspace, ShardId, TopicFilter, StartTime) -> + erpc:call( + Node, + emqx_ds_storage_layer, + get_streams, + [Keyspace, ShardId, TopicFilter, StartTime]). + -spec close_iterator( [node()], emqx_ds:iterator_id() diff --git a/apps/emqx_durable_storage/IMPLEMENTATION.md b/apps/emqx_durable_storage/IMPLEMENTATION.md index 9c0c5928c..33f02dfc4 100644 --- a/apps/emqx_durable_storage/IMPLEMENTATION.md +++ b/apps/emqx_durable_storage/IMPLEMENTATION.md @@ -31,48 +31,6 @@ Read pattern: pseudoserial Number of records: O(total write throughput * retention time) -## Session storage - -Data there is updated when: - -- A new client connects with clean session = false -- Client subscribes to a topic -- Client unsubscribes to a topic -- Garbage collection is performed - -Write throughput: low - -Data is read when a client connects and replay agents are started - -Read throughput: low - -Data format: - -`#session{clientId = "foobar", iterators = [ItKey1, ItKey2, ItKey3, ...]}` - -Number of records: O(N clients) - -Size of record: O(N subscriptions per clients) - -## Iterator storage - -Data is written every time a client acks a message. - -Data is read when a client reconnects and we restart replay agents. - -`#iterator{key = IterKey, data = Blob}` - -Number of records: O(N clients * N subscriptions per client) - -Size of record: O(1) - -Write throughput: high, lots of small updates - -Write pattern: mostly key overwrite - -Read throughput: low - -Read pattern: random # Push vs. Pull model diff --git a/apps/emqx_durable_storage/README.md b/apps/emqx_durable_storage/README.md index 7de43bee0..f01af0c37 100644 --- a/apps/emqx_durable_storage/README.md +++ b/apps/emqx_durable_storage/README.md @@ -1,9 +1,10 @@ # EMQX Replay -`emqx_ds` is a durable storage for MQTT messages within EMQX. -It implements the following scenarios: -- Persisting messages published by clients -- +`emqx_ds` is a generic durable storage for MQTT messages within EMQX. + +Concepts: + + > 0. App overview introduction > 1. let people know what your project can do specifically. Is it a base diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index feaa37bc0..ad6a07330 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -15,48 +15,29 @@ %%-------------------------------------------------------------------- -module(emqx_ds). --include_lib("stdlib/include/ms_transform.hrl"). --include_lib("snabbkaffe/include/snabbkaffe.hrl"). +%% Management API: +-export([create_db/2]). -%% API: --export([ensure_shard/2]). -%% Messages: --export([message_store/2, message_store/1, message_stats/0]). -%% Iterator: --export([iterator_update/2, iterator_next/1, iterator_stats/0]). +%% Message storage API: +-export([message_store/3, message_store/2]). + +%% Message replay API: +-export([get_streams/3, open_iterator/2, next/2]). %% internal exports: -export([]). --export_type([ - keyspace/0, - message_id/0, - message_stats/0, - message_store_opts/0, - replay/0, - replay_id/0, - iterator_id/0, - iterator/0, - shard/0, - shard_id/0, - topic/0, - topic_filter/0, - time/0 -]). +-export_type([db/0, time/0, topic_filter/0, topic/0]). %%================================================================================ %% Type declarations %%================================================================================ --type iterator() :: term(). - --type iterator_id() :: binary(). - --type message_store_opts() :: #{}. - --type message_stats() :: #{}. - --type message_id() :: binary(). +%% Different DBs are completely independent from each other. They +%% could represent something like different tenants. +%% +%% Topics stored in different DBs aren't necesserily disjoint. +-type db() :: binary(). %% Parsed topic. -type topic() :: list(binary()). @@ -64,9 +45,30 @@ %% Parsed topic filter. -type topic_filter() :: list(binary() | '+' | '#' | ''). --type keyspace() :: atom(). --type shard_id() :: binary(). --type shard() :: {keyspace(), shard_id()}. +%% This record enapsulates the stream entity from the replication +%% level. +%% +%% TODO: currently the stream is hardwired to only support the +%% internal rocksdb storage. In t he future we want to add another +%% implementations for emqx_ds, so this type has to take this into +%% account. +-record(stream, + { shard :: emqx_ds:shard() + , enc :: emqx_ds_replication_layer:stream() + }). + +-type stream_rank() :: {integer(), integer()}. + +-opaque stream() :: #stream{}. + +%% This record encapsulates the iterator entity from the replication +%% level. +-record(iterator, + { shard :: emqx_ds:shard() + , enc :: enqx_ds_replication_layer:iterator() + }). + +-opaque iterator() :: #iterator{}. %% Timestamp %% Earliest possible timestamp is 0. @@ -74,70 +76,89 @@ %% use in emqx_guid. Otherwise, the iterators won't match the message timestamps. -type time() :: non_neg_integer(). --type replay_id() :: binary(). +-type message_store_opts() :: #{}. --type replay() :: { - _TopicFilter :: topic_filter(), - _StartTime :: time() -}. +-type create_db_opts() :: #{}. + +-type message_id() :: binary(). %%================================================================================ %% API funcions %%================================================================================ --spec ensure_shard(shard(), emqx_ds_storage_layer:options()) -> - ok | {error, _Reason}. -ensure_shard(Shard, Options) -> - case emqx_ds_storage_layer_sup:start_shard(Shard, Options) of - {ok, _Pid} -> - ok; - {error, {already_started, _Pid}} -> - ok; - {error, Reason} -> - {error, Reason} +-spec create_db(db(), create_db_opts()) -> ok. +create_db(DB, Opts) -> + emqx_ds_replication_layer:create_db(DB, Opts). + +-spec message_store(db(), [emqx_types:message()], message_store_opts()) -> + {ok, [message_id()]} | {error, _}. +message_store(DB, Msgs, Opts) -> + emqx_ds_replication_layer:message_store(DB, Msgs, Opts). + +-spec message_store(db(), [emqx_types:message()]) -> {ok, [message_id()]} | {error, _}. +message_store(DB, Msgs) -> + message_store(DB, Msgs, #{}). + +%% @doc Get a list of streams needed for replaying a topic filter. +%% +%% Motivation: under the hood, EMQX may store different topics at +%% different locations or even in different databases. A wildcard +%% topic filter may require pulling data from any number of locations. +%% +%% Stream is an abstraction exposed by `emqx_ds' that reflects the +%% notion that different topics can be stored differently, but hides +%% the implementation details. +%% +%% Rules: +%% +%% 1. New streams matching the topic filter can appear without notice, +%% so the replayer must periodically call this function to get the +%% updated list of streams. +%% +%% 2. Streams may depend on one another. Therefore, care should be +%% taken while replaying them in parallel to avoid out-of-order +%% replay. This function returns stream together with its +%% "coordinates": `{X, T, Stream}'. If X coordinate of two streams is +%% different, then they can be replayed in parallel. If it's the +%% same, then the stream with smaller T coordinate should be replayed +%% first. +-spec get_streams(db(), topic_filter(), time()) -> [{stream_rank(), stream()}]. +get_streams(DB, TopicFilter, StartTime) -> + Shards = emqx_ds_replication_layer:list_shards(DB), + lists:flatmap( + fun(Shard) -> + Streams = emqx_ds_replication_layer:get_streams(Shard, TopicFilter, StartTime), + [{Rank, #stream{ shard = Shard + , enc = I + }} || {Rank, I} <- Streams] + end, + Shards). + +-spec open_iterator(stream(), time()) -> {ok, iterator()} | {error, _}. +open_iterator(#stream{shard = Shard, enc = Stream}, StartTime) -> + case emqx_ds_replication_layer:open_iterator(Shard, Stream, StartTime) of + {ok, Iter} -> + {ok, #iterator{shard = Shard, enc = Iter}}; + Err = {error, _} -> + Err end. -%%-------------------------------------------------------------------------------- -%% Message -%%-------------------------------------------------------------------------------- --spec message_store([emqx_types:message()], message_store_opts()) -> - {ok, [message_id()]} | {error, _}. -message_store(_Msg, _Opts) -> - %% TODO - {error, not_implemented}. +-spec next(iterator(), non_neg_integer()) -> {ok, iterator(), [emqx_types:message()]} | end_of_stream. +next(#iterator{shard = Shard, enc = Iter0}, BatchSize) -> + case emqx_ds_replication_layer:next(Shard, Iter0, BatchSize) of + {ok, Iter, Batch} -> + {ok, #iterator{shard = Shard, enc = Iter}, Batch}; + end_of_stream -> + end_of_stream + end. --spec message_store([emqx_types:message()]) -> {ok, [message_id()]} | {error, _}. -message_store(Msg) -> - %% TODO - message_store(Msg, #{}). +%%================================================================================ +%% behavior callbacks +%%================================================================================ --spec message_stats() -> message_stats(). -message_stats() -> - #{}. - -%%-------------------------------------------------------------------------------- -%% Session -%%-------------------------------------------------------------------------------- - -%%-------------------------------------------------------------------------------- -%% Iterator (pull API) -%%-------------------------------------------------------------------------------- - -%% @doc Called when a client acks a message --spec iterator_update(iterator_id(), iterator()) -> ok. -iterator_update(_IterId, _Iter) -> - %% TODO - ok. - -%% @doc Called when a client acks a message --spec iterator_next(iterator()) -> {value, emqx_types:message(), iterator()} | none | {error, _}. -iterator_next(_Iter) -> - %% TODO - none. - --spec iterator_stats() -> #{}. -iterator_stats() -> - #{}. +%%================================================================================ +%% Internal exports +%%================================================================================ %%================================================================================ %% Internal functions diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl_ b/apps/emqx_durable_storage/src/emqx_ds.erl_ new file mode 100644 index 000000000..61b4c4bb3 --- /dev/null +++ b/apps/emqx_durable_storage/src/emqx_ds.erl_ @@ -0,0 +1,189 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_ds). + +-include_lib("stdlib/include/ms_transform.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). + +%% API: +-export([ensure_shard/2]). +%% Messages: +-export([message_store/2, message_store/1, message_stats/0]). +%% Iterator: +-export([get_streams/3, open_iterator/1, next/2]). + +%% internal exports: +-export([]). + +-export_type([ + stream/0, + keyspace/0, + message_id/0, + message_stats/0, + message_store_opts/0, + replay/0, + replay_id/0, + %iterator_id/0, + iterator/0, + topic/0, + topic_filter/0, + time/0 +]). + +%%================================================================================ +%% Type declarations +%%================================================================================ + +%% This record enapsulates the stream entity from the storage level. +%% +%% TODO: currently the stream is hardwired to only support the +%% internal rocksdb storage. In t he future we want to add another +%% implementations for emqx_ds, so this type has to take this into +%% account. +-record(stream, + { shard :: emqx_ds:shard() + , :: emqx_ds_storage_layer:stream() + }). + +-opaque stream() :: #stream{}. + +-type iterator() :: term(). + +%-type iterator_id() :: binary(). + +-type message_store_opts() :: #{}. + +-type message_stats() :: #{}. + +-type message_id() :: binary(). + +%% Parsed topic. +-type topic() :: list(binary()). + +%% Parsed topic filter. +-type topic_filter() :: list(binary() | '+' | '#' | ''). + +-type keyspace() :: atom(). +-type shard_id() :: binary(). +-type shard() :: {keyspace(), shard_id()}. + +%% Timestamp +%% Earliest possible timestamp is 0. +%% TODO granularity? Currently, we should always use micro second, as that's the unit we +%% use in emqx_guid. Otherwise, the iterators won't match the message timestamps. +-type time() :: non_neg_integer(). + +-type replay_id() :: binary(). + +-type replay() :: { + _TopicFilter :: topic_filter(), + _StartTime :: time() +}. + +%%================================================================================ +%% API funcions +%%================================================================================ + +%% @doc Get a list of streams needed for replaying a topic filter. +%% +%% Motivation: under the hood, EMQX may store different topics at +%% different locations or even in different databases. A wildcard +%% topic filter may require pulling data from any number of locations. +%% +%% Stream is an abstraction exposed by `emqx_ds' that reflects the +%% notion that different topics can be stored differently, but hides +%% the implementation details. +%% +%% Rules: +%% +%% 1. New streams matching the topic filter can appear without notice, +%% so the replayer must periodically call this function to get the +%% updated list of streams. +%% +%% 2. Streams may depend on one another. Therefore, care should be +%% taken while replaying them in parallel to avoid out-of-order +%% replay. This function returns stream together with its +%% "coordinates": `{X, T, Stream}'. If X coordinate of two streams is +%% different, then they can be replayed in parallel. If it's the +%% same, then the stream with smaller T coordinate should be replayed +%% first. +-spec get_streams(keyspace(), topic_filter(), time()) -> [{integer(), integer(), stream()}]. +get_streams(Keyspace, TopicFilter, StartTime) -> + ShardIds = emqx_ds_replication_layer:get_all_shards(Keyspace), + lists:flatmap( + fun(Shard) -> + Node = emqx_ds_replication_layer:shard_to_node(Shard), + try + Streams = emqx_persistent_session_ds_proto_v1:get_streams(Node, Keyspace, Shard, TopicFilter, StartTime), + [#stream{ shard = {Keyspace, ShardId} + , stream = Stream + } || Stream <- Streams] + catch + error:{erpc, _} -> + %% The caller has to periodically refresh the + %% list of streams anyway, so it's ok to ignore + %% transient errors. + [] + end + end, + ShardIds). + +-spec ensure_shard(shard(), emqx_ds_storage_layer:options()) -> + ok | {error, _Reason}. +ensure_shard(Shard, Options) -> + case emqx_ds_storage_layer_sup:start_shard(Shard, Options) of + {ok, _Pid} -> + ok; + {error, {already_started, _Pid}} -> + ok; + {error, Reason} -> + {error, Reason} + end. + +%%-------------------------------------------------------------------------------- +%% Message +%%-------------------------------------------------------------------------------- + +-spec message_store([emqx_types:message()], message_store_opts()) -> + {ok, [message_id()]} | {error, _}. +message_store(Msg, Opts) -> + message_store(Msg, Opts). + +-spec message_store([emqx_types:message()]) -> {ok, [message_id()]} | {error, _}. +message_store(Msg) -> + message_store(Msg, #{}). + +-spec message_stats() -> message_stats(). +message_stats() -> + #{}. + +%%-------------------------------------------------------------------------------- +%% Iterator (pull API) +%%-------------------------------------------------------------------------------- + +-spec open_iterator(stream()) -> {ok, iterator()}. +open_iterator(#stream{shard = {_Keyspace, _ShardId}, stream = _StorageSpecificStream}) -> + error(todo). + +-spec next(iterator(), non_neg_integer()) -> + {ok, iterator(), [emqx_types:message()]} + | end_of_stream. +next(_Iterator, _BatchSize) -> + error(todo). + +%%================================================================================ +%% Internal functions +%%================================================================================ diff --git a/apps/emqx_durable_storage/src/emqx_ds_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_lts.erl index 384677d21..9d206ee81 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_lts.erl @@ -458,7 +458,8 @@ topic_match_test() -> [{S2_12, [<<"1">>]}, {S2_1_, [<<"1">>, <<"2">>]}]), assert_match_topics(T, [2, '#'], - [{S21, []}, {S22, []}, {S2_, ['+']}, + [{S21, []}, {S22, []}, + {S2_, ['+']}, {S2_11, ['+']}, {S2_12, ['+']}, {S2_1_, ['+', '+']}]), ok diff --git a/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl b/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl index be8a207bb..f51d556f1 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl @@ -85,9 +85,9 @@ -export([store/5]). -export([delete/4]). --export([make_iterator/2]). --export([make_iterator/3]). --export([next/1]). + +-export([get_streams/2]). +-export([make_iterator/2, make_iterator/3, next/1]). -export([preserve_iterator/1]). -export([restore_iterator/2]). @@ -112,7 +112,7 @@ compute_topic_seek/4 ]). --export_type([db/0, iterator/0, schema/0]). +-export_type([db/0, stream/0, iterator/0, schema/0]). -export_type([options/0]). -export_type([iteration_options/0]). @@ -131,6 +131,8 @@ %% Type declarations %%================================================================================ +-opaque stream() :: singleton_stream. + -type topic() :: emqx_ds:topic(). -type topic_filter() :: emqx_ds:topic_filter(). -type time() :: emqx_ds:time(). @@ -288,6 +290,11 @@ delete(DB = #db{handle = DBHandle, cf = CFHandle}, MessageID, PublishedAt, Topic Key = make_message_key(Topic, PublishedAt, MessageID, DB#db.keymapper), rocksdb:delete(DBHandle, CFHandle, Key, DB#db.write_options). +-spec get_streams(db(), emqx_ds:reply()) -> + [stream()]. +get_streams(_, _) -> + [singleton_stream]. + -spec make_iterator(db(), emqx_ds:replay()) -> {ok, iterator()} | {error, _TODO}. make_iterator(DB, Replay) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl new file mode 100644 index 000000000..9fe08e0a2 --- /dev/null +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -0,0 +1,128 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_ds_replication_layer). + +-export([ + list_shards/1, + create_db/2, + message_store/3, + get_streams/3, + open_iterator/3, + next/3 + ]). + + +%% internal exports: +-export([ do_create_shard_v1/2, + do_get_streams_v1/3, + do_open_iterator/3, + do_next_v1/3 + ]). + +-export_type([shard/0, stream/0, iterator/0]). + +%%================================================================================ +%% Type declarations +%%================================================================================ + +-opaque stream() :: emqx_ds_storage_layer:stream(). + +-type shard() :: binary(). + +-opaque iterator() :: emqx_ds_storage_layer:iterator(). + +%%================================================================================ +%% API functions +%%================================================================================ + +-spec list_shards(emqx_ds:db()) -> [shard()]. +list_shards(DB) -> + %% TODO: milestone 5 + lists:map( + fun(Node) -> + term_to_binary({DB, Node}) + end, + list_nodes()). + +-spec create_db(emqx_ds:db(), emqx_ds:create_db_opts()) -> ok. +create_db(DB, Opts) -> + lists:foreach( + fun(Node) -> + Shard = term_to_binary({DB, Node}), + emqx_ds_proto_v1:create_shard(Node, Shard, Opts) + end, + list_nodes()). + +-spec message_store(emqx_ds:db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> + {ok, [message_id()]} | {error, _}. +message_store(DB, Msg, Opts) -> + %% TODO: milestone 5. Currently we store messages locally. + Shard = term_to_binary({DB, node()}), + emqx_ds_storage_layer:message_store(Shard, Msg, Opts). + +-spec get_streams(shard(), emqx_ds:topic_filter(), emqx_ds:time()) -> [{emqx_ds:stream_rank(), stream()}]. +get_streams(Shard, TopicFilter, StartTime) -> + Node = node_of_shard(Shard), + emqx_ds_proto_v1:get_streams(Node, Shard, TopicFilter, StartTime). + +-spec open_iterator(shard(), stream(), emqx_ds:time()) -> {ok, iterator()} | {error, _}. +open_iterator(Shard, Stream, StartTime) -> + Node = node_of_shard(Shard), + emqx_ds_proto_v1:open_iterator(Node, Shard, Stream, StartTime). + +-spec next(shard(), iterator(), non_neg_integer()) -> + {ok, iterator(), [emqx_types:message()]} | end_of_stream. +next(Shard, Iter, BatchSize) -> + Node = node_of_shard(Shard), + emqx_ds_proto_v1:next(Node, Shard, Iter, BatchSize). + +%%================================================================================ +%% behavior callbacks +%%================================================================================ + +%%================================================================================ +%% Internal exports (RPC targets) +%%================================================================================ + +-spec do_create_shard_v1(shard(), emqx_ds:create_db_opts()) -> ok. +do_create_shard_v1(Shard, Opts) -> + error({todo, Shard, Opts}). + +-spec do_get_streams_v1(shard(), emqx_ds:topic_filter(), emqx_ds:time()) -> + [{emqx_ds:stream_rank(), stream()}]. +do_get_streams_v1(Shard, TopicFilter, StartTime) -> + error({todo, Shard, TopicFilter, StartTime}). + +-spec do_open_iterator_v1(shard(), stream(), emqx_ds:time()) -> iterator(). +do_open_iterator_v1(Shard, Stream, Time) -> + error({todo, Shard, Stream, StartTime}). + +-spec do_next_v1(shard(), iterator(), non_neg_integer()) -> + {ok, iterator(), [emqx_types:message()]} | end_of_stream. +do_next_v1(Shard, Iter, BatchSize) -> + error({todo, Shard, Iter, BatchSize}). + +%%================================================================================ +%% Internal functions +%%================================================================================ + +-spec node_of_shard(shard()) -> node(). +node_of_shard(ShardId) -> + {_DB, Node} = binary_to_term(ShardId), + Node. + +list_nodes() -> + mria:running_nodes(). diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index 25a58950d..7a96cab51 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -9,6 +9,7 @@ -export([start_link/2]). -export([create_generation/3]). +-export([get_streams/3]). -export([store/5]). -export([delete/4]). @@ -27,7 +28,7 @@ %% behaviour callbacks: -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). --export_type([cf_refs/0, gen_id/0, options/0, state/0, iterator/0]). +-export_type([stream/0, cf_refs/0, gen_id/0, options/0, state/0, iterator/0]). -export_type([db_options/0, db_write_options/0, db_read_options/0]). -compile({inline, [meta_lookup/2]}). @@ -36,6 +37,8 @@ %% Type declarations %%================================================================================ +-opaque stream() :: {term()}. + -type options() :: #{ dir => file:filename() }. @@ -114,10 +117,10 @@ cf_refs(), _Schema ) -> - term(). + _DB. -callback store( - _Schema, + _DB, _MessageID :: binary(), emqx_ds:time(), emqx_ds:topic(), @@ -125,13 +128,16 @@ ) -> ok | {error, _}. --callback delete(_Schema, _MessageID :: binary(), emqx_ds:time(), emqx_ds:topic()) -> +-callback delete(_DB, _MessageID :: binary(), emqx_ds:time(), emqx_ds:topic()) -> ok | {error, _}. --callback make_iterator(_Schema, emqx_ds:replay()) -> +-callback get_streams(_DB, emqx_ds:topic_filter(), emqx_ds:time()) -> + [_Stream]. + +-callback make_iterator(_DB, emqx_ds:replay()) -> {ok, _It} | {error, _}. --callback restore_iterator(_Schema, _Serialized :: binary()) -> {ok, _It} | {error, _}. +-callback restore_iterator(_DB, _Serialized :: binary()) -> {ok, _It} | {error, _}. -callback preserve_iterator(_It) -> term(). @@ -146,6 +152,15 @@ start_link(Shard = {Keyspace, ShardId}, Options) -> gen_server:start_link(?REF(Keyspace, ShardId), ?MODULE, {Shard, Options}, []). +-spec get_streams(emqx_ds:keyspace(), emqx_ds:shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> [stream()]. +get_streams(KeySpace, TopicFilter, StartTime) -> + %% FIXME: messages can be potentially stored in multiple + %% generations. This function should return the results from all + %% of them! + %% Otherwise we could LOSE messages when generations are switched. + {GenId, #{module := Mod, }} = meta_lookup_gen(Shard, StartTime), + + -spec create_generation( emqx_ds:shard(), emqx_ds:time(), emqx_ds_conf:backend_config() ) -> diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl new file mode 100644 index 000000000..f5d802003 --- /dev/null +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -0,0 +1,56 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_ds_proto_v1). + +-behavior(emqx_bpapi). + +%% API: +-export([]). + +%% behavior callbacks: +-export([introduced_in/0]). + +%%================================================================================ +%% API funcions +%%================================================================================ + +-spec create_shard(node(), emqx_ds_replication_layer:shard(), emqx_ds:create_db_opts()) -> + ok. +create_shard(Node, Shard, Opts) -> + erpc:call(Node, emqx_ds_replication_layer, do_create_shard_v1, [Shard, Opts]). + +-spec get_streams(node(), emqx_ds_replication_layer:shard(), emqx_ds:topic_filter(), emqx_ds:time()) -> + [emqx_ds_replication_layer:stream()]. +get_streams(Shard, TopicFilter, Time) -> + erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [Shard, TopicFilter, Time]). + +-spec open_iterator(node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:stream(), emqx_ds:time()) -> + {ok, emqx_ds_replication_layer:iterator()} | {error, _}. +open_iterator(Node, Shard, Stream, StartTime) -> + erpc:call(Node, emqx_ds_replication_layer, do_open_iterator_v1, [Shard, Stream, Time]). + +-spec next(node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:iterator(), non_neg_integer()) -> + {ok, emqx_ds_replication_layer:iterator(), [emqx_types:messages()]} | end_of_stream. +next(Node, Shard, Iter, BatchSize) -> + erpc:call(Node, emqx_ds_replication_layer, do_next_v1, [Shard, Iter, BatchSize]). + +%%================================================================================ +%% behavior callbacks +%%================================================================================ + +introduced_in() -> + %% FIXME + "5.3.0". From 7095cb8583752a18e166822131536756f4d8e788 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Tue, 3 Oct 2023 01:01:39 +0200 Subject: [PATCH 074/155] refactor(ds): Refactor storage layer --- apps/emqx/src/emqx_persistent_message.erl | 15 +---- apps/emqx_durable_storage/src/emqx_ds.erl | 17 ++++-- .../src/emqx_ds_replication_layer.erl | 18 ++++-- .../src/emqx_ds_storage_layer.erl | 55 ++++++++++++------- apps/emqx_durable_storage/src/emqx_ds_sup.erl | 4 +- .../src/proto/emqx_ds_proto_v1.erl | 6 +- .../test/emqx_ds_storage_layer_SUITE.erl | 36 ++++++++---- 7 files changed, 93 insertions(+), 58 deletions(-) diff --git a/apps/emqx/src/emqx_persistent_message.erl b/apps/emqx/src/emqx_persistent_message.erl index 609b0139d..3f38b4030 100644 --- a/apps/emqx/src/emqx_persistent_message.erl +++ b/apps/emqx/src/emqx_persistent_message.erl @@ -23,9 +23,7 @@ %% Message persistence -export([ - persist/1, - serialize/1, - deserialize/1 + persist/1 ]). %% FIXME @@ -83,18 +81,9 @@ needs_persistence(Msg) -> not (emqx_message:get_flag(dup, Msg) orelse emqx_message:is_sys(Msg)). store_message(Msg) -> - ID = emqx_message:id(Msg), - Timestamp = emqx_guid:timestamp(ID), - Topic = emqx_topic:words(emqx_message:topic(Msg)), - emqx_ds_storage_layer:store(?DS_SHARD, ID, Timestamp, Topic, serialize(Msg)). + emqx_ds:message_store([Msg]). has_subscribers(#message{topic = Topic}) -> emqx_persistent_session_ds_router:has_any_route(Topic). %% - -serialize(Msg) -> - term_to_binary(emqx_message:to_map(Msg)). - -deserialize(Bin) -> - emqx_message:from_map(binary_to_term(Bin)). diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index ad6a07330..762478932 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -19,7 +19,7 @@ -export([create_db/2]). %% Message storage API: --export([message_store/3, message_store/2]). +-export([message_store/1, message_store/2, message_store/3]). %% Message replay API: -export([get_streams/3, open_iterator/2, next/2]). @@ -53,7 +53,7 @@ %% implementations for emqx_ds, so this type has to take this into %% account. -record(stream, - { shard :: emqx_ds:shard() + { shard :: emqx_ds_replication_layer:shard() , enc :: emqx_ds_replication_layer:stream() }). @@ -64,7 +64,7 @@ %% This record encapsulates the iterator entity from the replication %% level. -record(iterator, - { shard :: emqx_ds:shard() + { shard :: emqx_ds_replication_layer:shard() , enc :: enqx_ds_replication_layer:iterator() }). @@ -80,7 +80,9 @@ -type create_db_opts() :: #{}. --type message_id() :: binary(). +-type message_id() :: emqx_ds_replication_layer:message_id(). + +-define(DEFAULT_DB, <<"default">>). %%================================================================================ %% API funcions @@ -90,6 +92,11 @@ create_db(DB, Opts) -> emqx_ds_replication_layer:create_db(DB, Opts). +-spec message_store([emqx_types:message()]) -> + {ok, [message_id()]} | {error, _}. +message_store(Msgs) -> + message_store(?DEFAULT_DB, Msgs, #{}). + -spec message_store(db(), [emqx_types:message()], message_store_opts()) -> {ok, [message_id()]} | {error, _}. message_store(DB, Msgs, Opts) -> @@ -143,7 +150,7 @@ open_iterator(#stream{shard = Shard, enc = Stream}, StartTime) -> Err end. --spec next(iterator(), non_neg_integer()) -> {ok, iterator(), [emqx_types:message()]} | end_of_stream. +-spec next(iterator(), pos_integer()) -> {ok, iterator(), [emqx_types:message()]} | end_of_stream. next(#iterator{shard = Shard, enc = Iter0}, BatchSize) -> case emqx_ds_replication_layer:next(Shard, Iter0, BatchSize) of {ok, Iter, Batch} -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index 9fe08e0a2..af6087188 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -28,11 +28,11 @@ %% internal exports: -export([ do_create_shard_v1/2, do_get_streams_v1/3, - do_open_iterator/3, + do_open_iterator_v1/3, do_next_v1/3 ]). --export_type([shard/0, stream/0, iterator/0]). +-export_type([shard/0, stream/0, iterator/0, message_id/0]). %%================================================================================ %% Type declarations @@ -44,6 +44,8 @@ -opaque iterator() :: emqx_ds_storage_layer:iterator(). +-type message_id() :: emqx_ds_storage_layer:message_id(). + %%================================================================================ %% API functions %%================================================================================ @@ -83,10 +85,18 @@ open_iterator(Shard, Stream, StartTime) -> Node = node_of_shard(Shard), emqx_ds_proto_v1:open_iterator(Node, Shard, Stream, StartTime). --spec next(shard(), iterator(), non_neg_integer()) -> +-spec next(shard(), iterator(), pos_integer()) -> {ok, iterator(), [emqx_types:message()]} | end_of_stream. next(Shard, Iter, BatchSize) -> Node = node_of_shard(Shard), + %% TODO: iterator can contain information that is useful for + %% reconstructing messages sent over the network. For example, + %% when we send messages with the learned topic index, we could + %% send the static part of topic once, and append it to the + %% messages on the receiving node, hence saving some network. + %% + %% This kind of trickery should be probably done here in the + %% replication layer. Or, perhaps, in the logic lary. emqx_ds_proto_v1:next(Node, Shard, Iter, BatchSize). %%================================================================================ @@ -107,7 +117,7 @@ do_get_streams_v1(Shard, TopicFilter, StartTime) -> error({todo, Shard, TopicFilter, StartTime}). -spec do_open_iterator_v1(shard(), stream(), emqx_ds:time()) -> iterator(). -do_open_iterator_v1(Shard, Stream, Time) -> +do_open_iterator_v1(Shard, Stream, StartTime) -> error({todo, Shard, Stream, StartTime}). -spec do_next_v1(shard(), iterator(), non_neg_integer()) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index 7a96cab51..f4dbbe6f4 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -10,7 +10,7 @@ -export([create_generation/3]). -export([get_streams/3]). --export([store/5]). +-export([message_store/3]). -export([delete/4]). -export([make_iterator/2, next/1, next/2]). @@ -33,11 +33,13 @@ -compile({inline, [meta_lookup/2]}). +-include_lib("emqx/include/emqx.hrl"). + %%================================================================================ %% Type declarations %%================================================================================ --opaque stream() :: {term()}. +-type stream() :: term(). %% Opaque term returned by the generation callback module -type options() :: #{ dir => file:filename() @@ -101,7 +103,7 @@ %% 3. `inplace_update_support`? -define(ITERATOR_CF_OPTS, []). --define(REF(Keyspace, ShardId), {via, gproc, {n, l, {?MODULE, Keyspace, ShardId}}}). +-define(REF(ShardId), {via, gproc, {n, l, {?MODULE, ShardId}}}). %%================================================================================ %% Callbacks @@ -149,30 +151,34 @@ -spec start_link(emqx_ds:shard(), emqx_ds_storage_layer:options()) -> {ok, pid()}. -start_link(Shard = {Keyspace, ShardId}, Options) -> - gen_server:start_link(?REF(Keyspace, ShardId), ?MODULE, {Shard, Options}, []). +start_link(Shard, Options) -> + gen_server:start_link(?REF(Shard), ?MODULE, {Shard, Options}, []). --spec get_streams(emqx_ds:keyspace(), emqx_ds:shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> [stream()]. -get_streams(KeySpace, TopicFilter, StartTime) -> - %% FIXME: messages can be potentially stored in multiple - %% generations. This function should return the results from all - %% of them! - %% Otherwise we could LOSE messages when generations are switched. - {GenId, #{module := Mod, }} = meta_lookup_gen(Shard, StartTime), +-spec get_streams(emqx_ds:shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> [_Stream]. +get_streams(_ShardId, _TopicFilter, _StartTime) -> + []. -spec create_generation( emqx_ds:shard(), emqx_ds:time(), emqx_ds_conf:backend_config() ) -> {ok, gen_id()} | {error, nonmonotonic}. -create_generation({Keyspace, ShardId}, Since, Config = {_Module, _Options}) -> - gen_server:call(?REF(Keyspace, ShardId), {create_generation, Since, Config}). +create_generation(ShardId, Since, Config = {_Module, _Options}) -> + gen_server:call(?REF(ShardId), {create_generation, Since, Config}). --spec store(emqx_ds:shard(), emqx_guid:guid(), emqx_ds:time(), emqx_ds:topic(), binary()) -> - ok | {error, _}. -store(Shard, GUID, Time, Topic, Msg) -> - {_GenId, #{module := Mod, data := Data}} = meta_lookup_gen(Shard, Time), - Mod:store(Data, GUID, Time, Topic, Msg). +-spec message_store(emqx_ds:shard(), [emqx_types:message()], emqx_ds:message_store_opts()) -> + {ok, _MessageId} | {error, _}. +message_store(Shard, Msgs, _Opts) -> + {ok, lists:map( + fun(Msg) -> + GUID = emqx_message:id(Msg), + Timestamp = Msg#message.timestamp, + {_GenId, #{module := Mod, data := ModState}} = meta_lookup_gen(Shard, Timestamp), + Topic = emqx_topic:words(emqx_message:topic(Msg)), + Payload = serialize(Msg), + Mod:store(ModState, GUID, Timestamp, Topic, Payload) + end, + Msgs)}. -spec delete(emqx_ds:shard(), emqx_guid:guid(), emqx_ds:time(), emqx_ds:topic()) -> ok | {error, _}. @@ -212,7 +218,8 @@ do_next(It, N, Acc) when N =< 0 -> {ok, It, lists:reverse(Acc)}; do_next(It = #it{module = Mod, data = ItData}, N, Acc) -> case Mod:next(ItData) of - {value, Val, ItDataNext} -> + {value, Bin, ItDataNext} -> + Val = deserialize(Bin), do_next(It#it{data = ItDataNext}, N - 1, [Val | Acc]); {error, _} = _Error -> %% todo: log? @@ -663,6 +670,14 @@ is_gen_valid(Shard, GenId, Since) when GenId > 0 -> is_gen_valid(_Shard, 0, 0) -> ok. +serialize(Msg) -> + %% TODO: remove topic, GUID, etc. from the stored message. + term_to_binary(emqx_message:to_map(Msg)). + +deserialize(Bin) -> + emqx_message:from_map(binary_to_term(Bin)). + + %% -spec store_cfs(rocksdb:db_handle(), [{string(), rocksdb:cf_handle()}]) -> ok. %% store_cfs(DBHandle, CFRefs) -> %% lists:foreach( diff --git a/apps/emqx_durable_storage/src/emqx_ds_sup.erl b/apps/emqx_durable_storage/src/emqx_ds_sup.erl index ca939e892..d371a2346 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_sup.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_sup.erl @@ -30,7 +30,7 @@ start_link() -> %%================================================================================ init([]) -> - Children = [shard_sup()], + Children = [storage_layer_sup()], SupFlags = #{ strategy => one_for_all, intensity => 0, @@ -42,7 +42,7 @@ init([]) -> %% Internal functions %%================================================================================ -shard_sup() -> +storage_layer_sup() -> #{ id => local_store_shard_sup, start => {emqx_ds_storage_layer_sup, start_link, []}, diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index f5d802003..79285fe16 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -34,15 +34,15 @@ create_shard(Node, Shard, Opts) -> -spec get_streams(node(), emqx_ds_replication_layer:shard(), emqx_ds:topic_filter(), emqx_ds:time()) -> [emqx_ds_replication_layer:stream()]. -get_streams(Shard, TopicFilter, Time) -> +get_streams(Node, Shard, TopicFilter, Time) -> erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [Shard, TopicFilter, Time]). -spec open_iterator(node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:stream(), emqx_ds:time()) -> {ok, emqx_ds_replication_layer:iterator()} | {error, _}. open_iterator(Node, Shard, Stream, StartTime) -> - erpc:call(Node, emqx_ds_replication_layer, do_open_iterator_v1, [Shard, Stream, Time]). + erpc:call(Node, emqx_ds_replication_layer, do_open_iterator_v1, [Shard, Stream, StartTime]). --spec next(node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:iterator(), non_neg_integer()) -> +-spec next(node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:iterator(), pos_integer()) -> {ok, emqx_ds_replication_layer:iterator(), [emqx_types:messages()]} | end_of_stream. next(Node, Shard, Iter, BatchSize) -> erpc:call(Node, emqx_ds_replication_layer, do_next_v1, [Shard, Iter, BatchSize]). diff --git a/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl index 10596e216..981f1062a 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl @@ -6,6 +6,7 @@ -compile(export_all). -compile(nowarn_export_all). +-include_lib("emqx/include/emqx.hrl"). -include_lib("common_test/include/ct.hrl"). -include_lib("stdlib/include/assert.hrl"). @@ -39,19 +40,24 @@ t_open(_Config) -> t_store(_Config) -> MessageID = emqx_guid:gen(), PublishedAt = 1000, - Topic = [<<"foo">>, <<"bar">>], + Topic = <<"foo/bar">>, Payload = <<"message">>, - ?assertMatch(ok, emqx_ds_storage_layer:store(?SHARD, MessageID, PublishedAt, Topic, Payload)). + Msg = #message{ + id = MessageID, + topic = Topic, + payload = Payload, + timestamp = PublishedAt + }, + ?assertMatch({ok, [_]}, emqx_ds_storage_layer:message_store(?SHARD, [Msg], #{})). %% Smoke test for iteration through a concrete topic t_iterate(_Config) -> %% Prepare data: - Topics = [[<<"foo">>, <<"bar">>], [<<"foo">>, <<"bar">>, <<"baz">>], [<<"a">>]], + Topics = [<<"foo/bar">>, <<"foo/bar/baz">>, <<"a">>], Timestamps = lists:seq(1, 10), [ - emqx_ds_storage_layer:store( + store( ?SHARD, - emqx_guid:gen(), PublishedAt, Topic, integer_to_binary(PublishedAt) @@ -61,7 +67,7 @@ t_iterate(_Config) -> %% Iterate through individual topics: [ begin - {ok, It} = emqx_ds_storage_layer:make_iterator(?SHARD, {Topic, 0}), + {ok, It} = emqx_ds_storage_layer:make_iterator(?SHARD, {parse_topic(Topic), 0}), Values = iterate(It), ?assertEqual(lists:map(fun integer_to_binary/1, Timestamps), Values) end @@ -149,7 +155,7 @@ t_create_gen(_Config) -> Topics = ["foo/bar", "foo/bar/baz"], Timestamps = lists:seq(1, 100), [ - ?assertEqual(ok, store(?SHARD, PublishedAt, Topic, <<>>)) + ?assertMatch({ok, [_]}, store(?SHARD, PublishedAt, Topic, <<>>)) || Topic <- Topics, PublishedAt <- Timestamps ]. @@ -215,16 +221,24 @@ t_iterate_multigen_preserve_restore(_Config) -> emqx_ds_storage_layer:restore_iterator(?SHARD, ReplayID) ). +store(Shard, PublishedAt, TopicL, Payload) when is_list(TopicL) -> + store(Shard, PublishedAt, list_to_binary(TopicL), Payload); store(Shard, PublishedAt, Topic, Payload) -> ID = emqx_guid:gen(), - emqx_ds_storage_layer:store(Shard, ID, PublishedAt, parse_topic(Topic), Payload). + Msg = #message{ + id = ID, + topic = Topic, + timestamp = PublishedAt, + payload = Payload + }, + emqx_ds_storage_layer:message_store(Shard, [Msg], #{}). iterate(DB, TopicFilter, StartTime) -> iterate(iterator(DB, TopicFilter, StartTime)). iterate(It) -> case emqx_ds_storage_layer:next(It) of - {ok, ItNext, [Payload]} -> + {ok, ItNext, [#message{payload = Payload}]} -> [Payload | iterate(ItNext)]; end_of_stream -> [] @@ -234,8 +248,8 @@ iterate(end_of_stream, _N) -> {end_of_stream, []}; iterate(It, N) -> case emqx_ds_storage_layer:next(It, N) of - {ok, ItFinal, Payloads} -> - {ItFinal, Payloads}; + {ok, ItFinal, Messages} -> + {ItFinal, [Payload || #message{payload = Payload} <- Messages]}; end_of_stream -> {end_of_stream, []} end. From 59d01dc82334ec634ca1894b5b85d2abd228944f Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Tue, 3 Oct 2023 02:48:06 +0200 Subject: [PATCH 075/155] refactor(ds): Implement emqx_ds:open_db --- apps/emqx/src/emqx_persistent_message.erl | 13 +------- apps/emqx_durable_storage/src/emqx_ds.erl | 9 +++--- apps/emqx_durable_storage/src/emqx_ds.erl_ | 2 +- .../src/emqx_ds_message_storage_bitmask.erl | 12 ++------ .../src/emqx_ds_replication_layer.erl | 30 +++++++++++-------- .../src/emqx_ds_storage_layer.erl | 25 ++++++++++------ .../src/emqx_ds_storage_layer_sup.erl | 11 +++++++ .../test/emqx_ds_storage_layer_SUITE.erl | 11 +++---- 8 files changed, 59 insertions(+), 54 deletions(-) diff --git a/apps/emqx/src/emqx_persistent_message.erl b/apps/emqx/src/emqx_persistent_message.erl index 3f38b4030..96c767d7e 100644 --- a/apps/emqx/src/emqx_persistent_message.erl +++ b/apps/emqx/src/emqx_persistent_message.erl @@ -42,18 +42,7 @@ init() -> ?WHEN_ENABLED(begin - ok = emqx_ds:ensure_shard( - ?DS_SHARD, - #{ - dir => filename:join([ - emqx:data_dir(), - ds, - messages, - ?DEFAULT_KEYSPACE, - ?DS_SHARD_ID - ]) - } - ), + ok = emqx_ds:create_db(<<"default">>, #{}), ok = emqx_persistent_session_ds_router:init_tables(), ok = emqx_persistent_session_ds:create_tables(), ok diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index 762478932..70cdd8d17 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -16,7 +16,7 @@ -module(emqx_ds). %% Management API: --export([create_db/2]). +-export([open_db/2]). %% Message storage API: -export([message_store/1, message_store/2, message_store/3]). @@ -88,9 +88,9 @@ %% API funcions %%================================================================================ --spec create_db(db(), create_db_opts()) -> ok. -create_db(DB, Opts) -> - emqx_ds_replication_layer:create_db(DB, Opts). +-spec open_db(db(), create_db_opts()) -> ok. +open_db(DB, Opts) -> + emqx_ds_replication_layer:open_db(DB, Opts). -spec message_store([emqx_types:message()]) -> {ok, [message_id()]} | {error, _}. @@ -102,6 +102,7 @@ message_store(Msgs) -> message_store(DB, Msgs, Opts) -> emqx_ds_replication_layer:message_store(DB, Msgs, Opts). +%% TODO: Do we really need to return message IDs? It's extra work... -spec message_store(db(), [emqx_types:message()]) -> {ok, [message_id()]} | {error, _}. message_store(DB, Msgs) -> message_store(DB, Msgs, #{}). diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl_ b/apps/emqx_durable_storage/src/emqx_ds.erl_ index 61b4c4bb3..1acbcc7c7 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl_ +++ b/apps/emqx_durable_storage/src/emqx_ds.erl_ @@ -143,7 +143,7 @@ get_streams(Keyspace, TopicFilter, StartTime) -> -spec ensure_shard(shard(), emqx_ds_storage_layer:options()) -> ok | {error, _Reason}. -ensure_shard(Shard, Options) -> +ensure_shard(Sharzd, Options) -> case emqx_ds_storage_layer_sup:start_shard(Shard, Options) of {ok, _Pid} -> ok; diff --git a/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl b/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl index f51d556f1..3290b03e6 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl @@ -87,7 +87,7 @@ -export([delete/4]). -export([get_streams/2]). --export([make_iterator/2, make_iterator/3, next/1]). +-export([make_iterator/3, next/1]). -export([preserve_iterator/1]). -export([restore_iterator/2]). @@ -295,13 +295,6 @@ delete(DB = #db{handle = DBHandle, cf = CFHandle}, MessageID, PublishedAt, Topic get_streams(_, _) -> [singleton_stream]. --spec make_iterator(db(), emqx_ds:replay()) -> - {ok, iterator()} | {error, _TODO}. -make_iterator(DB, Replay) -> - {Keyspace, _ShardId} = DB#db.shard, - Options = emqx_ds_conf:iteration_options(Keyspace), - make_iterator(DB, Replay, Options). - -spec make_iterator(db(), emqx_ds:replay(), iteration_options()) -> % {error, invalid_start_time}? might just start from the beginning of time % and call it a day: client violated the contract anyway. @@ -373,7 +366,8 @@ restore_iterator(DB, #{ cursor := Cursor, replay := Replay = {_TopicFilter, _StartTime} }) -> - case make_iterator(DB, Replay) of + Options = #{}, % TODO: passthrough options + case make_iterator(DB, Replay, Options) of {ok, It} when Cursor == undefined -> % Iterator was preserved right after it has been made. {ok, It}; diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index af6087188..846d2ca0c 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -17,7 +17,7 @@ -export([ list_shards/1, - create_db/2, + open_db/2, message_store/3, get_streams/3, open_iterator/3, @@ -26,7 +26,7 @@ %% internal exports: --export([ do_create_shard_v1/2, +-export([ do_open_shard_v1/2, do_get_streams_v1/3, do_open_iterator_v1/3, do_next_v1/3 @@ -55,16 +55,16 @@ list_shards(DB) -> %% TODO: milestone 5 lists:map( fun(Node) -> - term_to_binary({DB, Node}) + shard_id(DB, Node) end, list_nodes()). --spec create_db(emqx_ds:db(), emqx_ds:create_db_opts()) -> ok. -create_db(DB, Opts) -> +-spec open_db(emqx_ds:db(), emqx_ds:create_db_opts()) -> ok. +open_db(DB, Opts) -> lists:foreach( fun(Node) -> - Shard = term_to_binary({DB, Node}), - emqx_ds_proto_v1:create_shard(Node, Shard, Opts) + Shard = shard_id(DB, Node), + emqx_ds_proto_v1:open_shard(Node, Shard, Opts) end, list_nodes()). @@ -107,9 +107,9 @@ next(Shard, Iter, BatchSize) -> %% Internal exports (RPC targets) %%================================================================================ --spec do_create_shard_v1(shard(), emqx_ds:create_db_opts()) -> ok. -do_create_shard_v1(Shard, Opts) -> - error({todo, Shard, Opts}). +-spec do_open_shard_v1(shard(), emqx_ds:create_db_opts()) -> ok. +do_open_shard_v1(Shard, Opts) -> + emqx_ds_storage_layer_sup:ensure_shard(Shard, Opts). -spec do_get_streams_v1(shard(), emqx_ds:topic_filter(), emqx_ds:time()) -> [{emqx_ds:stream_rank(), stream()}]. @@ -129,10 +129,16 @@ do_next_v1(Shard, Iter, BatchSize) -> %% Internal functions %%================================================================================ +shard_id(DB, Node) -> + %% TODO: don't bake node name into the schema, don't repeat the + %% Mnesia's 1M$ mistake. + NodeBin = atom_to_binary(Node), + <>. + -spec node_of_shard(shard()) -> node(). node_of_shard(ShardId) -> - {_DB, Node} = binary_to_term(ShardId), - Node. + [_DB, NodeBin] = binary:split(ShardId, <<":">>), + binary_to_atom(NodeBin). list_nodes() -> mria:running_nodes(). diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index f4dbbe6f4..93c1aaa1f 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -69,6 +69,7 @@ -record(s, { shard :: emqx_ds:shard(), + keyspace :: emqx_ds_conf:keyspace(), db :: rocksdb:db_handle(), cf_iterator :: rocksdb:cf_handle(), cf_generations :: cf_refs() @@ -176,7 +177,8 @@ message_store(Shard, Msgs, _Opts) -> {_GenId, #{module := Mod, data := ModState}} = meta_lookup_gen(Shard, Timestamp), Topic = emqx_topic:words(emqx_message:topic(Msg)), Payload = serialize(Msg), - Mod:store(ModState, GUID, Timestamp, Topic, Payload) + Mod:store(ModState, GUID, Timestamp, Topic, Payload), + GUID end, Msgs)}. @@ -356,7 +358,7 @@ populate_metadata(GenId, S = #s{shard = Shard, db = DBHandle}) -> meta_register_gen(Shard, GenId, Gen). -spec ensure_current_generation(state()) -> state(). -ensure_current_generation(S = #s{shard = {Keyspace, _ShardId}, db = DBHandle}) -> +ensure_current_generation(S = #s{shard = _Shard, keyspace = Keyspace, db = DBHandle}) -> case schema_get_current(DBHandle) of undefined -> Config = emqx_ds_conf:keyspace_config(Keyspace), @@ -396,9 +398,11 @@ create_gen(GenId, Since, {Module, Options}, S = #s{db = DBHandle, cf_generations {ok, Gen, S#s{cf_generations = NewCFs ++ CFs}}. -spec open_db(emqx_ds:shard(), options()) -> {ok, state()} | {error, _TODO}. -open_db(Shard = {Keyspace, ShardId}, Options) -> - DefaultDir = filename:join([atom_to_binary(Keyspace), ShardId]), +open_db(Shard, Options) -> + DefaultDir = binary_to_list(Shard), DBDir = unicode:characters_to_list(maps:get(dir, Options, DefaultDir)), + %% TODO: properly forward keyspace + Keyspace = maps:get(keyspace, Options, default_keyspace), DBOptions = [ {create_if_missing, true}, {create_missing_column_families, true} @@ -423,6 +427,7 @@ open_db(Shard = {Keyspace, ShardId}, Options) -> {CFNames, _} = lists:unzip(ExistingCFs), {ok, #s{ shard = Shard, + keyspace = Keyspace, db = DBHandle, cf_iterator = CFIterator, cf_generations = lists:zip(CFNames, CFRefs) @@ -451,7 +456,8 @@ open_next_iterator(Gen = #{}, It) -> -spec open_iterator(generation(), iterator()) -> {ok, iterator()} | {error, _Reason}. open_iterator(#{module := Mod, data := Data}, It = #it{}) -> - case Mod:make_iterator(Data, It#it.replay) of + Options = #{}, % TODO: passthrough options + case Mod:make_iterator(Data, It#it.replay, Options) of {ok, ItData} -> {ok, It#it{module = Mod, data = ItData}}; Err -> @@ -611,9 +617,9 @@ meta_register_gen(Shard, GenId, Gen) -> -spec meta_lookup_gen(emqx_ds:shard(), emqx_ds:time()) -> {gen_id(), generation()}. meta_lookup_gen(Shard, Time) -> - % TODO - % Is cheaper persistent term GC on update here worth extra lookup? I'm leaning - % towards a "no". + %% TODO + %% Is cheaper persistent term GC on update here worth extra lookup? I'm leaning + %% towards a "no". Current = meta_lookup(Shard, current), Gens = meta_lookup(Shard, Current), find_gen(Time, Current, Gens). @@ -671,7 +677,8 @@ is_gen_valid(_Shard, 0, 0) -> ok. serialize(Msg) -> - %% TODO: remove topic, GUID, etc. from the stored message. + %% TODO: remove topic, GUID, etc. from the stored + %% message. Reconstruct it from the metadata. term_to_binary(emqx_message:to_map(Msg)). deserialize(Bin) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl index 56c8c760a..2e4f56f10 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl @@ -35,6 +35,17 @@ stop_shard(Shard) -> ok = supervisor:terminate_child(?SUP, Shard), ok = supervisor:delete_child(?SUP, Shard). +-spec ensure_shard(emqx_ds:shard(), emqx_ds_storage_layer:options()) -> ok | {error, _Reason}. +ensure_shard(Shard, Options) -> + case start_shard(Shard, Options) of + {ok, _Pid} -> + ok; + {error, {already_started, _Pid}} -> + ok; + {error, Reason} -> + {error, Reason} + end. + %%================================================================================ %% behaviour callbacks %%================================================================================ diff --git a/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl index 981f1062a..25198cfd7 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl @@ -282,14 +282,11 @@ init_per_testcase(TC, Config) -> end_per_testcase(TC, _Config) -> ok = emqx_ds_storage_layer_sup:stop_shard(shard(TC)). -keyspace(TC) -> - list_to_atom(lists:concat([?MODULE, "_", TC])). - -shard_id(_TC) -> - <<"shard">>. - shard(TC) -> - {keyspace(TC), shard_id(TC)}. + iolist_to_binary([?MODULE_STRING, "_", atom_to_list(TC)]). + +keyspace(TC) -> + TC. set_keyspace_config(Keyspace, Config) -> ok = application:set_env(emqx_ds, keyspace_config, #{Keyspace => Config}). From c6a721a7eb431d5bd910bf84f23939f0caef744e Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Tue, 3 Oct 2023 17:13:16 +0200 Subject: [PATCH 076/155] refactor(ds): Passthrough open_db and get_channels to storage layer --- ...l => emqx_persistent_session_ds_SUITE.erl} | 2 +- apps/emqx/src/emqx_persistent_message.erl | 10 +- ...ds.erl => emqx_persistent_session_ds.erl_} | 64 +- .../emqx_persistent_session_ds_proto_v1.erl | 22 +- apps/emqx_durable_storage/src/emqx_ds.erl | 155 ++- apps/emqx_durable_storage/src/emqx_ds_lts.erl | 337 ++++--- .../src/emqx_ds_replication_layer.erl | 173 ++-- .../src/emqx_ds_storage_layer.erl | 893 ++++++------------ .../src/emqx_ds_storage_layer.erl_ | 714 ++++++++++++++ ...erl => emqx_ds_storage_layer_bitmask.erl_} | 18 +- .../src/emqx_ds_storage_layer_sup.erl | 2 +- .../src/emqx_ds_storage_reference.erl | 136 +++ .../src/proto/emqx_ds_proto_v1.erl | 33 +- .../test/emqx_ds_SUITE.erl | 107 +++ ...mqx_ds_message_storage_bitmask_SUITE.erl_} | 0 ...E.erl => emqx_ds_storage_layer_SUITE.erl_} | 0 scripts/check-elixir-applications.exs | 2 +- scripts/check-elixir-deps-discrepancies.exs | 2 +- ...elixir-emqx-machine-boot-discrepancies.exs | 2 +- scripts/check_missing_reboot_apps.exs | 2 +- 20 files changed, 1683 insertions(+), 991 deletions(-) rename apps/emqx/integration_test/{emqx_ds_SUITE.erl => emqx_persistent_session_ds_SUITE.erl} (99%) rename apps/emqx/src/{emqx_persistent_session_ds.erl => emqx_persistent_session_ds.erl_} (90%) create mode 100644 apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl_ rename apps/emqx_durable_storage/src/{emqx_ds_message_storage_bitmask.erl => emqx_ds_storage_layer_bitmask.erl_} (98%) create mode 100644 apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl create mode 100644 apps/emqx_durable_storage/test/emqx_ds_SUITE.erl rename apps/emqx_durable_storage/test/{emqx_ds_message_storage_bitmask_SUITE.erl => emqx_ds_message_storage_bitmask_SUITE.erl_} (100%) rename apps/emqx_durable_storage/test/{emqx_ds_storage_layer_SUITE.erl => emqx_ds_storage_layer_SUITE.erl_} (100%) diff --git a/apps/emqx/integration_test/emqx_ds_SUITE.erl b/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl similarity index 99% rename from apps/emqx/integration_test/emqx_ds_SUITE.erl rename to apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl index 34c15b505..d2d23e8cd 100644 --- a/apps/emqx/integration_test/emqx_ds_SUITE.erl +++ b/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl @@ -1,7 +1,7 @@ %%-------------------------------------------------------------------- %% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. %%-------------------------------------------------------------------- --module(emqx_ds_SUITE). +-module(emqx_persistent_session_ds_SUITE). -compile(export_all). -compile(nowarn_export_all). diff --git a/apps/emqx/src/emqx_persistent_message.erl b/apps/emqx/src/emqx_persistent_message.erl index 96c767d7e..8801acce5 100644 --- a/apps/emqx/src/emqx_persistent_message.erl +++ b/apps/emqx/src/emqx_persistent_message.erl @@ -27,10 +27,6 @@ ]). %% FIXME --define(DS_SHARD_ID, <<"local">>). --define(DEFAULT_KEYSPACE, default). --define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}). - -define(WHEN_ENABLED(DO), case is_store_enabled() of true -> DO; @@ -42,9 +38,9 @@ init() -> ?WHEN_ENABLED(begin - ok = emqx_ds:create_db(<<"default">>, #{}), + ok = emqx_ds:open_db(<<"default">>, #{}), ok = emqx_persistent_session_ds_router:init_tables(), - ok = emqx_persistent_session_ds:create_tables(), + %ok = emqx_persistent_session_ds:create_tables(), ok end). @@ -70,7 +66,7 @@ needs_persistence(Msg) -> not (emqx_message:get_flag(dup, Msg) orelse emqx_message:is_sys(Msg)). store_message(Msg) -> - emqx_ds:message_store([Msg]). + emqx_ds:store_batch([Msg]). has_subscribers(#message{topic = Topic}) -> emqx_persistent_session_ds_router:has_any_route(Topic). diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl_ similarity index 90% rename from apps/emqx/src/emqx_persistent_session_ds.erl rename to apps/emqx/src/emqx_persistent_session_ds.erl_ index 174a02156..3fff5f7ba 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl_ @@ -62,22 +62,6 @@ -export([session_open/1]). -endif. -%% RPC --export([ - ensure_iterator_closed_on_all_shards/1, - ensure_all_iterators_closed/1 -]). --export([ - do_open_iterator/3, - do_ensure_iterator_closed/1, - do_ensure_all_iterators_closed/1 -]). - -%% FIXME --define(DS_SHARD_ID, atom_to_binary(node())). --define(DEFAULT_KEYSPACE, default). --define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}). - %% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be %% an atom, in theory (?). -type id() :: binary(). @@ -157,7 +141,6 @@ destroy(#{clientid := ClientID}) -> destroy_session(ClientID). destroy_session(ClientID) -> - _ = ensure_all_iterators_closed(ClientID), session_drop(ClientID). %%-------------------------------------------------------------------- @@ -410,9 +393,9 @@ open_iterator_on_all_shards(TopicFilter, Iterator) -> %% RPC target. -spec do_open_iterator(emqx_types:words(), emqx_ds:time(), emqx_ds:iterator_id()) -> {ok, emqx_ds_storage_layer:iterator()} | {error, _Reason}. -do_open_iterator(TopicFilter, StartMS, IteratorID) -> - Replay = {TopicFilter, StartMS}, - emqx_ds_storage_layer:ensure_iterator(?DS_SHARD, IteratorID, Replay). +do_open_iterator(TopicFilter, StartMS, _IteratorID) -> + %% TODO: wrong + {ok, emqx_ds:make_iterator(TopicFilter, StartMS)}. -spec del_subscription(topic(), iterator(), id()) -> ok. @@ -420,49 +403,8 @@ del_subscription(TopicFilterBin, #{id := IteratorID}, DSSessionID) -> % N.B.: see comments in `?MODULE:add_subscription' for a discussion about the % order of operations here. TopicFilter = emqx_topic:words(TopicFilterBin), - Ctx = #{iterator_id => IteratorID}, - ?tp_span( - persistent_session_ds_close_iterators, - Ctx, - ok = ensure_iterator_closed_on_all_shards(IteratorID) - ), - ?tp_span( - persistent_session_ds_iterator_delete, - Ctx, - session_del_iterator(DSSessionID, TopicFilter) - ), ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilterBin, DSSessionID). --spec ensure_iterator_closed_on_all_shards(emqx_ds:iterator_id()) -> ok. -ensure_iterator_closed_on_all_shards(IteratorID) -> - %% Note: currently, shards map 1:1 to nodes, but this will change in the future. - Nodes = emqx:running_nodes(), - Results = emqx_persistent_session_ds_proto_v1:close_iterator(Nodes, IteratorID), - %% TODO: handle errors - true = lists:all(fun(Res) -> Res =:= {ok, ok} end, Results), - ok. - -%% RPC target. --spec do_ensure_iterator_closed(emqx_ds:iterator_id()) -> ok. -do_ensure_iterator_closed(IteratorID) -> - ok = emqx_ds_storage_layer:discard_iterator(?DS_SHARD, IteratorID), - ok. - --spec ensure_all_iterators_closed(id()) -> ok. -ensure_all_iterators_closed(DSSessionID) -> - %% Note: currently, shards map 1:1 to nodes, but this will change in the future. - Nodes = emqx:running_nodes(), - Results = emqx_persistent_session_ds_proto_v1:close_all_iterators(Nodes, DSSessionID), - %% TODO: handle errors - true = lists:all(fun(Res) -> Res =:= {ok, ok} end, Results), - ok. - -%% RPC target. --spec do_ensure_all_iterators_closed(id()) -> ok. -do_ensure_all_iterators_closed(DSSessionID) -> - ok = emqx_ds_storage_layer:discard_iterator_prefix(?DS_SHARD, DSSessionID), - ok. - %%-------------------------------------------------------------------- %% Session tables operations %%-------------------------------------------------------------------- diff --git a/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl b/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl index edaaea775..d9b882f3d 100644 --- a/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl +++ b/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl @@ -52,18 +52,20 @@ open_iterator(Nodes, TopicFilter, StartMS, IteratorID) -> ). -spec get_streams( - node(), - emqx_ds:keyspace(), - emqx_ds:shard_id(), - emqx_ds:topic_filter(), - emqx_ds:time()) -> - [emqx_ds_storage_layer:stream()]. + node(), + emqx_ds:keyspace(), + emqx_ds:shard_id(), + emqx_ds:topic_filter(), + emqx_ds:time() +) -> + [emqx_ds_storage_layer:stream()]. get_streams(Node, Keyspace, ShardId, TopicFilter, StartTime) -> erpc:call( - Node, - emqx_ds_storage_layer, - get_streams, - [Keyspace, ShardId, TopicFilter, StartTime]). + Node, + emqx_ds_storage_layer, + get_streams, + [Keyspace, ShardId, TopicFilter, StartTime] + ). -spec close_iterator( [node()], diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index 70cdd8d17..6a20afbf1 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -13,31 +13,44 @@ %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- + +%% @doc Main interface module for `emqx_durable_storage' application. +%% +%% It takes care of forwarding calls to the underlying DBMS. Currently +%% only the embedded `emqx_ds_replication_layer' storage is supported, +%% so all the calls are simply passed through. -module(emqx_ds). %% Management API: -export([open_db/2]). %% Message storage API: --export([message_store/1, message_store/2, message_store/3]). +-export([store_batch/1, store_batch/2, store_batch/3]). %% Message replay API: --export([get_streams/3, open_iterator/2, next/2]). +-export([get_streams/3, make_iterator/2, next/2]). -%% internal exports: +%% Misc. API: -export([]). --export_type([db/0, time/0, topic_filter/0, topic/0]). +-export_type([ + db/0, + time/0, + topic_filter/0, + topic/0, + stream/0, + stream_rank/0, + iterator/0, + next_result/1, next_result/0, + store_batch_result/0, + make_iterator_result/1, make_iterator_result/0 +]). %%================================================================================ %% Type declarations %%================================================================================ -%% Different DBs are completely independent from each other. They -%% could represent something like different tenants. -%% -%% Topics stored in different DBs aren't necesserily disjoint. --type db() :: binary(). +-type db() :: emqx_ds_replication_layer:db(). %% Parsed topic. -type topic() :: list(binary()). @@ -45,30 +58,22 @@ %% Parsed topic filter. -type topic_filter() :: list(binary() | '+' | '#' | ''). -%% This record enapsulates the stream entity from the replication -%% level. -%% -%% TODO: currently the stream is hardwired to only support the -%% internal rocksdb storage. In t he future we want to add another -%% implementations for emqx_ds, so this type has to take this into -%% account. --record(stream, - { shard :: emqx_ds_replication_layer:shard() - , enc :: emqx_ds_replication_layer:stream() - }). - -type stream_rank() :: {integer(), integer()}. --opaque stream() :: #stream{}. +-opaque stream() :: emqx_ds_replication_layer:stream(). -%% This record encapsulates the iterator entity from the replication -%% level. --record(iterator, - { shard :: emqx_ds_replication_layer:shard() - , enc :: enqx_ds_replication_layer:iterator() - }). +-opaque iterator() :: emqx_ds_replication_layer:iterator(). --opaque iterator() :: #iterator{}. +-type store_batch_result() :: ok | {error, _}. + +-type make_iterator_result(Iterator) :: {ok, Iterator} | {error, _}. + +-type make_iterator_result() :: make_iterator_result(iterator()). + +-type next_result(Iterator) :: + {ok, Iterator, [emqx_types:message()]} | {ok, end_of_stream} | {error, _}. + +-type next_result() :: next_result(iterator()). %% Timestamp %% Earliest possible timestamp is 0. @@ -78,7 +83,9 @@ -type message_store_opts() :: #{}. --type create_db_opts() :: #{}. +-type create_db_opts() :: + %% TODO: keyspace + #{}. -type message_id() :: emqx_ds_replication_layer:message_id(). @@ -88,24 +95,24 @@ %% API funcions %%================================================================================ +%% @doc Different DBs are completely independent from each other. They +%% could represent something like different tenants. -spec open_db(db(), create_db_opts()) -> ok. open_db(DB, Opts) -> emqx_ds_replication_layer:open_db(DB, Opts). --spec message_store([emqx_types:message()]) -> - {ok, [message_id()]} | {error, _}. -message_store(Msgs) -> - message_store(?DEFAULT_DB, Msgs, #{}). +-spec store_batch([emqx_types:message()]) -> store_batch_result(). +store_batch(Msgs) -> + store_batch(?DEFAULT_DB, Msgs, #{}). --spec message_store(db(), [emqx_types:message()], message_store_opts()) -> - {ok, [message_id()]} | {error, _}. -message_store(DB, Msgs, Opts) -> - emqx_ds_replication_layer:message_store(DB, Msgs, Opts). +-spec store_batch(db(), [emqx_types:message()], message_store_opts()) -> store_batch_result(). +store_batch(DB, Msgs, Opts) -> + emqx_ds_replication_layer:store_batch(DB, Msgs, Opts). %% TODO: Do we really need to return message IDs? It's extra work... --spec message_store(db(), [emqx_types:message()]) -> {ok, [message_id()]} | {error, _}. -message_store(DB, Msgs) -> - message_store(DB, Msgs, #{}). +-spec store_batch(db(), [emqx_types:message()]) -> {ok, [message_id()]} | {error, _}. +store_batch(DB, Msgs) -> + store_batch(DB, Msgs, #{}). %% @doc Get a list of streams needed for replaying a topic filter. %% @@ -113,56 +120,44 @@ message_store(DB, Msgs) -> %% different locations or even in different databases. A wildcard %% topic filter may require pulling data from any number of locations. %% -%% Stream is an abstraction exposed by `emqx_ds' that reflects the -%% notion that different topics can be stored differently, but hides -%% the implementation details. +%% Stream is an abstraction exposed by `emqx_ds' that, on one hand, +%% reflects the notion that different topics can be stored +%% differently, but hides the implementation details. %% %% Rules: %% -%% 1. New streams matching the topic filter can appear without notice, -%% so the replayer must periodically call this function to get the -%% updated list of streams. +%% 0. There is no 1-to-1 mapping between MQTT topics and streams. One +%% stream can contain any number of MQTT topics. +%% +%% 1. New streams matching the topic filter and start time can appear +%% without notice, so the replayer must periodically call this +%% function to get the updated list of streams. %% %% 2. Streams may depend on one another. Therefore, care should be %% taken while replaying them in parallel to avoid out-of-order %% replay. This function returns stream together with its -%% "coordinates": `{X, T, Stream}'. If X coordinate of two streams is -%% different, then they can be replayed in parallel. If it's the -%% same, then the stream with smaller T coordinate should be replayed -%% first. +%% "coordinate": `stream_rank()'. +%% +%% Stream rank is a tuple of two integers, let's call them X and Y. If +%% X coordinate of two streams is different, they are independent and +%% can be replayed in parallel. If it's the same, then the stream with +%% smaller Y coordinate should be replayed first. If Y coordinates are +%% equal, then the streams are independent. +%% +%% Stream is fully consumed when `next/3' function returns +%% `end_of_stream'. Then the client can proceed to replaying streams +%% that depend on the given one. -spec get_streams(db(), topic_filter(), time()) -> [{stream_rank(), stream()}]. get_streams(DB, TopicFilter, StartTime) -> - Shards = emqx_ds_replication_layer:list_shards(DB), - lists:flatmap( - fun(Shard) -> - Streams = emqx_ds_replication_layer:get_streams(Shard, TopicFilter, StartTime), - [{Rank, #stream{ shard = Shard - , enc = I - }} || {Rank, I} <- Streams] - end, - Shards). + emqx_ds_replication_layer:get_streams(DB, TopicFilter, StartTime). --spec open_iterator(stream(), time()) -> {ok, iterator()} | {error, _}. -open_iterator(#stream{shard = Shard, enc = Stream}, StartTime) -> - case emqx_ds_replication_layer:open_iterator(Shard, Stream, StartTime) of - {ok, Iter} -> - {ok, #iterator{shard = Shard, enc = Iter}}; - Err = {error, _} -> - Err - end. +-spec make_iterator(stream(), time()) -> make_iterator_result(). +make_iterator(Stream, StartTime) -> + emqx_ds_replication_layer:make_iterator(Stream, StartTime). --spec next(iterator(), pos_integer()) -> {ok, iterator(), [emqx_types:message()]} | end_of_stream. -next(#iterator{shard = Shard, enc = Iter0}, BatchSize) -> - case emqx_ds_replication_layer:next(Shard, Iter0, BatchSize) of - {ok, Iter, Batch} -> - {ok, #iterator{shard = Shard, enc = Iter}, Batch}; - end_of_stream -> - end_of_stream - end. - -%%================================================================================ -%% behavior callbacks -%%================================================================================ +-spec next(iterator(), pos_integer()) -> next_result(). +next(Iter, BatchSize) -> + emqx_ds_replication_layer:next(Iter, BatchSize). %%================================================================================ %% Internal exports diff --git a/apps/emqx_durable_storage/src/emqx_ds_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_lts.erl index 9d206ee81..fcc9f2b36 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_lts.erl @@ -34,7 +34,8 @@ %% Type declarations %%================================================================================ --define(EOT, []). %% End Of Topic +%% End Of Topic +-define(EOT, []). -define(PLUS, '+'). -type edge() :: binary() | ?EOT | ?PLUS. @@ -49,17 +50,17 @@ -type threshold_fun() :: fun((non_neg_integer()) -> non_neg_integer()). --record(trie, - { trie :: ets:tid() - , stats :: ets:tid() - }). +-record(trie, { + trie :: ets:tid(), + stats :: ets:tid() +}). -opaque trie() :: #trie{}. --record(trans, - { key :: {state(), edge()} - , next :: state() - }). +-record(trans, { + key :: {state(), edge()}, + next :: state() +}). %%================================================================================ %% API funcions @@ -70,9 +71,10 @@ trie_create() -> Trie = ets:new(trie, [{keypos, #trans.key}, set]), Stats = ets:new(stats, [{keypos, 1}, set]), - #trie{ trie = Trie - , stats = Stats - }. + #trie{ + trie = Trie, + stats = Stats + }. %% @doc Create a topic key, -spec topic_key(trie(), threshold_fun(), [binary()]) -> msg_storage_key(). @@ -86,7 +88,7 @@ lookup_topic_key(Trie, Tokens) -> %% @doc Return list of keys of topics that match a given topic filter -spec match_topics(trie(), [binary() | '+' | '#']) -> - [{static_key(), _Varying :: binary() | ?PLUS}]. + [{static_key(), _Varying :: binary() | ?PLUS}]. match_topics(Trie, TopicFilter) -> do_match_topics(Trie, ?PREFIX, [], TopicFilter). @@ -96,38 +98,43 @@ dump_to_dot(#trie{trie = Trie, stats = Stats}, Filename) -> L = ets:tab2list(Trie), {Nodes0, Edges} = lists:foldl( - fun(#trans{key = {From, Label}, next = To}, {AccN, AccEdge}) -> - Edge = {From, To, Label}, - {[From, To] ++ AccN, [Edge|AccEdge]} - end, - {[], []}, - L), + fun(#trans{key = {From, Label}, next = To}, {AccN, AccEdge}) -> + Edge = {From, To, Label}, + {[From, To] ++ AccN, [Edge | AccEdge]} + end, + {[], []}, + L + ), Nodes = lists:map( - fun(Node) -> - case ets:lookup(Stats, Node) of - [{_, NChildren}] -> ok; - [] -> NChildren = 0 - end, - {Node, NChildren} - end, - lists:usort(Nodes0)), - {ok, FD} = file:open(Filename, [write]), - Print = fun (?PREFIX) -> "prefix"; - (NodeId) -> binary:encode_hex(NodeId) + fun(Node) -> + case ets:lookup(Stats, Node) of + [{_, NChildren}] -> ok; + [] -> NChildren = 0 + end, + {Node, NChildren} end, + lists:usort(Nodes0) + ), + {ok, FD} = file:open(Filename, [write]), + Print = fun + (?PREFIX) -> "prefix"; + (NodeId) -> binary:encode_hex(NodeId) + end, io:format(FD, "digraph {~n", []), lists:foreach( - fun({Node, NChildren}) -> - Id = Print(Node), - io:format(FD, " \"~s\" [label=\"~s : ~p\"];~n", [Id, Id, NChildren]) - end, - Nodes), + fun({Node, NChildren}) -> + Id = Print(Node), + io:format(FD, " \"~s\" [label=\"~s : ~p\"];~n", [Id, Id, NChildren]) + end, + Nodes + ), lists:foreach( - fun({From, To, Label}) -> - io:format(FD, " \"~s\" -> \"~s\" [label=\"~s\"];~n", [Print(From), Print(To), Label]) - end, - Edges), + fun({From, To, Label}) -> + io:format(FD, " \"~s\" -> \"~s\" [label=\"~s\"];~n", [Print(From), Print(To), Label]) + end, + Edges + ), io:format(FD, "}~n", []), file:close(FD). @@ -135,12 +142,12 @@ dump_to_dot(#trie{trie = Trie, stats = Stats}, Filename) -> %% Internal exports %%================================================================================ --spec trie_next(trie(), state(), binary() | ?EOT) -> {Wildcard, state()} | undefined - when Wildcard :: boolean(). +-spec trie_next(trie(), state(), binary() | ?EOT) -> {Wildcard, state()} | undefined when + Wildcard :: boolean(). trie_next(#trie{trie = Trie}, State, ?EOT) -> case ets:lookup(Trie, {State, ?EOT}) of [#trans{next = Next}] -> {false, Next}; - [] -> undefined + [] -> undefined end; trie_next(#trie{trie = Trie}, State, Token) -> case ets:lookup(Trie, {State, ?PLUS}) of @@ -149,25 +156,27 @@ trie_next(#trie{trie = Trie}, State, Token) -> [] -> case ets:lookup(Trie, {State, Token}) of [#trans{next = Next}] -> {false, Next}; - [] -> undefined + [] -> undefined end end. --spec trie_insert(trie(), state(), edge()) -> {Updated, state()} - when Updated :: false | non_neg_integer(). +-spec trie_insert(trie(), state(), edge()) -> {Updated, state()} when + Updated :: false | non_neg_integer(). trie_insert(#trie{trie = Trie, stats = Stats}, State, Token) -> Key = {State, Token}, NewState = get_id_for_key(State, Token), - Rec = #trans{ key = Key - , next = NewState - }, + Rec = #trans{ + key = Key, + next = NewState + }, case ets:insert_new(Trie, Rec) of true -> - Inc = case Token of - ?EOT -> 0; - ?PLUS -> 0; - _ -> 1 - end, + Inc = + case Token of + ?EOT -> 0; + ?PLUS -> 0; + _ -> 1 + end, NChildren = ets:update_counter(Stats, State, {2, Inc}, {State, 0}), {NChildren, NewState}; false -> @@ -202,69 +211,75 @@ get_id_for_key(_State, _Token) -> do_match_topics(Trie, State, Varying, []) -> case trie_next(Trie, State, ?EOT) of {false, Static} -> [{Static, lists:reverse(Varying)}]; - undefined -> [] + undefined -> [] end; do_match_topics(Trie, State, Varying, ['#']) -> Emanating = emanating(Trie, State, ?PLUS), lists:flatmap( - fun({?EOT, Static}) -> - [{Static, lists:reverse(Varying)}]; - ({?PLUS, NextState}) -> - do_match_topics(Trie, NextState, [?PLUS|Varying], ['#']); - ({_, NextState}) -> - do_match_topics(Trie, NextState, Varying, ['#']) - end, - Emanating); -do_match_topics(Trie, State, Varying, [Level|Rest]) -> + fun + ({?EOT, Static}) -> + [{Static, lists:reverse(Varying)}]; + ({?PLUS, NextState}) -> + do_match_topics(Trie, NextState, [?PLUS | Varying], ['#']); + ({_, NextState}) -> + do_match_topics(Trie, NextState, Varying, ['#']) + end, + Emanating + ); +do_match_topics(Trie, State, Varying, [Level | Rest]) -> Emanating = emanating(Trie, State, Level), lists:flatmap( - fun({?EOT, _NextState}) -> - []; - ({?PLUS, NextState}) -> - do_match_topics(Trie, NextState, [Level|Varying], Rest); - ({_, NextState}) -> - do_match_topics(Trie, NextState, Varying, Rest) - end, - Emanating). + fun + ({?EOT, _NextState}) -> + []; + ({?PLUS, NextState}) -> + do_match_topics(Trie, NextState, [Level | Varying], Rest); + ({_, NextState}) -> + do_match_topics(Trie, NextState, Varying, Rest) + end, + Emanating + ). -spec do_lookup_topic_key(trie(), state(), [binary()], [binary()]) -> - {ok, msg_storage_key()} | undefined. + {ok, msg_storage_key()} | undefined. do_lookup_topic_key(Trie, State, [], Varying) -> - case trie_next(Trie, State, ?EOT) of - {false, Static} -> - {ok, {Static, lists:reverse(Varying)}}; - undefined -> - undefined - end; -do_lookup_topic_key(Trie, State, [Tok|Rest], Varying) -> - case trie_next(Trie, State, Tok) of - {true, NextState} -> - do_lookup_topic_key(Trie, NextState, Rest, [Tok|Varying]); - {false, NextState} -> - do_lookup_topic_key(Trie, NextState, Rest, Varying); - undefined -> - undefined - end. + case trie_next(Trie, State, ?EOT) of + {false, Static} -> + {ok, {Static, lists:reverse(Varying)}}; + undefined -> + undefined + end; +do_lookup_topic_key(Trie, State, [Tok | Rest], Varying) -> + case trie_next(Trie, State, Tok) of + {true, NextState} -> + do_lookup_topic_key(Trie, NextState, Rest, [Tok | Varying]); + {false, NextState} -> + do_lookup_topic_key(Trie, NextState, Rest, Varying); + undefined -> + undefined + end. do_topic_key(Trie, _, _, State, [], Varying) -> {_, false, Static} = trie_next_(Trie, State, ?EOT), {Static, lists:reverse(Varying)}; -do_topic_key(Trie, ThresholdFun, Depth, State, [Tok|Rest], Varying0) -> - Threshold = ThresholdFun(Depth), % TODO: it's not necessary to call it every time. - Varying = case trie_next_(Trie, State, Tok) of - {NChildren, _, _DiscardState} when is_integer(NChildren), NChildren > Threshold -> - {_, NextState} = trie_insert(Trie, State, ?PLUS), - [Tok|Varying0]; - {_, false, NextState} -> - Varying0; - {_, true, NextState} -> - [Tok|Varying0] - end, +do_topic_key(Trie, ThresholdFun, Depth, State, [Tok | Rest], Varying0) -> + % TODO: it's not necessary to call it every time. + Threshold = ThresholdFun(Depth), + Varying = + case trie_next_(Trie, State, Tok) of + {NChildren, _, _DiscardState} when is_integer(NChildren), NChildren > Threshold -> + {_, NextState} = trie_insert(Trie, State, ?PLUS), + [Tok | Varying0]; + {_, false, NextState} -> + Varying0; + {_, true, NextState} -> + [Tok | Varying0] + end, do_topic_key(Trie, ThresholdFun, Depth + 1, NextState, Rest, Varying). --spec trie_next_(trie(), state(), binary() | ?EOT) -> {New, Wildcard, state()} - when New :: false | non_neg_integer(), - Wildcard :: boolean(). +-spec trie_next_(trie(), state(), binary() | ?EOT) -> {New, Wildcard, state()} when + New :: false | non_neg_integer(), + Wildcard :: boolean(). trie_next_(Trie, State, Token) -> case trie_next(Trie, State, Token) of {Wildcard, NextState} -> @@ -278,19 +293,26 @@ trie_next_(Trie, State, Token) -> %% erlfmt-ignore -spec emanating(trie(), state(), edge()) -> [{edge(), state()}]. emanating(#trie{trie = Tab}, State, ?PLUS) -> - ets:select(Tab, ets:fun2ms( - fun(#trans{key = {S, Edge}, next = Next}) when S == State -> - {Edge, Next} - end)); + ets:select( + Tab, + ets:fun2ms( + fun(#trans{key = {S, Edge}, next = Next}) when S == State -> + {Edge, Next} + end + ) + ); emanating(#trie{trie = Tab}, State, ?EOT) -> case ets:lookup(Tab, {State, ?EOT}) of [#trans{next = Next}] -> [{?EOT, Next}]; - [] -> [] + [] -> [] end; emanating(#trie{trie = Tab}, State, Bin) when is_binary(Bin) -> - [{Edge, Next} || #trans{key = {_, Edge}, next = Next} <- - ets:lookup(Tab, {State, ?PLUS}) ++ - ets:lookup(Tab, {State, Bin})]. + [ + {Edge, Next} + || #trans{key = {_, Edge}, next = Next} <- + ets:lookup(Tab, {State, ?PLUS}) ++ + ets:lookup(Tab, {State, Bin}) + ]. %%================================================================================ %% Tests @@ -325,56 +347,71 @@ lookup_key_test() -> {_, S1} = trie_insert(T, ?PREFIX, <<"foo">>), {_, S11} = trie_insert(T, S1, <<"foo">>), %% Topics don't match until we insert ?EOT: - ?assertMatch( undefined - , lookup_topic_key(T, [<<"foo">>]) - ), - ?assertMatch( undefined - , lookup_topic_key(T, [<<"foo">>, <<"foo">>]) - ), + ?assertMatch( + undefined, + lookup_topic_key(T, [<<"foo">>]) + ), + ?assertMatch( + undefined, + lookup_topic_key(T, [<<"foo">>, <<"foo">>]) + ), {_, S10} = trie_insert(T, S1, ?EOT), {_, S110} = trie_insert(T, S11, ?EOT), - ?assertMatch( {ok, {S10, []}} - , lookup_topic_key(T, [<<"foo">>]) - ), - ?assertMatch( {ok, {S110, []}} - , lookup_topic_key(T, [<<"foo">>, <<"foo">>]) - ), + ?assertMatch( + {ok, {S10, []}}, + lookup_topic_key(T, [<<"foo">>]) + ), + ?assertMatch( + {ok, {S110, []}}, + lookup_topic_key(T, [<<"foo">>, <<"foo">>]) + ), %% The rest of keys still don't match: - ?assertMatch( undefined - , lookup_topic_key(T, [<<"bar">>]) - ), - ?assertMatch( undefined - , lookup_topic_key(T, [<<"bar">>, <<"foo">>]) - ). + ?assertMatch( + undefined, + lookup_topic_key(T, [<<"bar">>]) + ), + ?assertMatch( + undefined, + lookup_topic_key(T, [<<"bar">>, <<"foo">>]) + ). wildcard_lookup_test() -> T = trie_create(), {1, S1} = trie_insert(T, ?PREFIX, <<"foo">>), - {0, S11} = trie_insert(T, S1, ?PLUS), %% Plus doesn't increase the number of children + %% Plus doesn't increase the number of children + {0, S11} = trie_insert(T, S1, ?PLUS), {1, S111} = trie_insert(T, S11, <<"foo">>), - {0, S1110} = trie_insert(T, S111, ?EOT), %% ?EOT doesn't increase the number of children - ?assertMatch( {ok, {S1110, [<<"bar">>]}} - , lookup_topic_key(T, [<<"foo">>, <<"bar">>, <<"foo">>]) - ), - ?assertMatch( {ok, {S1110, [<<"quux">>]}} - , lookup_topic_key(T, [<<"foo">>, <<"quux">>, <<"foo">>]) - ), - ?assertMatch( undefined - , lookup_topic_key(T, [<<"foo">>]) - ), - ?assertMatch( undefined - , lookup_topic_key(T, [<<"foo">>, <<"bar">>]) - ), - ?assertMatch( undefined - , lookup_topic_key(T, [<<"foo">>, <<"bar">>, <<"bar">>]) - ), - ?assertMatch( undefined - , lookup_topic_key(T, [<<"bar">>, <<"foo">>, <<"foo">>]) - ), + %% ?EOT doesn't increase the number of children + {0, S1110} = trie_insert(T, S111, ?EOT), + ?assertMatch( + {ok, {S1110, [<<"bar">>]}}, + lookup_topic_key(T, [<<"foo">>, <<"bar">>, <<"foo">>]) + ), + ?assertMatch( + {ok, {S1110, [<<"quux">>]}}, + lookup_topic_key(T, [<<"foo">>, <<"quux">>, <<"foo">>]) + ), + ?assertMatch( + undefined, + lookup_topic_key(T, [<<"foo">>]) + ), + ?assertMatch( + undefined, + lookup_topic_key(T, [<<"foo">>, <<"bar">>]) + ), + ?assertMatch( + undefined, + lookup_topic_key(T, [<<"foo">>, <<"bar">>, <<"bar">>]) + ), + ?assertMatch( + undefined, + lookup_topic_key(T, [<<"bar">>, <<"foo">>, <<"foo">>]) + ), {_, S10} = trie_insert(T, S1, ?EOT), - ?assertMatch( {ok, {S10, []}} - , lookup_topic_key(T, [<<"foo">>]) - ). + ?assertMatch( + {ok, {S10, []}}, + lookup_topic_key(T, [<<"foo">>]) + ). %% erlfmt-ignore topic_key_test() -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index 846d2ca0c..5d4749c30 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -16,33 +16,52 @@ -module(emqx_ds_replication_layer). -export([ - list_shards/1, - open_db/2, - message_store/3, - get_streams/3, - open_iterator/3, - next/3 - ]). - + list_shards/1, + open_db/2, + store_batch/3, + get_streams/3, + make_iterator/2, + next/2 +]). %% internal exports: --export([ do_open_shard_v1/2, - do_get_streams_v1/3, - do_open_iterator_v1/3, - do_next_v1/3 - ]). +-export([ + do_open_shard_v1/2, + do_get_streams_v1/3, + do_make_iterator_v1/3, + do_next_v1/3 +]). --export_type([shard/0, stream/0, iterator/0, message_id/0]). +-export_type([shard_id/0, stream/0, iterator/0, message_id/0]). %%================================================================================ %% Type declarations %%================================================================================ --opaque stream() :: emqx_ds_storage_layer:stream(). +-type db() :: binary(). --type shard() :: binary(). +-type shard_id() :: binary(). --opaque iterator() :: emqx_ds_storage_layer:iterator(). +%% This record enapsulates the stream entity from the replication +%% level. +%% +%% TODO: currently the stream is hardwired to only support the +%% internal rocksdb storage. In t he future we want to add another +%% implementations for emqx_ds, so this type has to take this into +%% account. +-record(stream, { + shard :: emqx_ds_replication_layer:shard_id(), + enc :: emqx_ds_replication_layer:stream() +}). + +-opaque stream() :: stream(). + +-record(iterator, { + shard :: emqx_ds_replication_layer:shard_id(), + enc :: enqx_ds_replication_layer:iterator() +}). + +-opaque iterator() :: #iterator{}. -type message_id() :: emqx_ds_storage_layer:message_id(). @@ -50,44 +69,71 @@ %% API functions %%================================================================================ --spec list_shards(emqx_ds:db()) -> [shard()]. +-spec list_shards(emqx_ds:db()) -> [shard_id()]. list_shards(DB) -> %% TODO: milestone 5 lists:map( - fun(Node) -> - shard_id(DB, Node) - end, - list_nodes()). + fun(Node) -> + shard_id(DB, Node) + end, + list_nodes() + ). --spec open_db(emqx_ds:db(), emqx_ds:create_db_opts()) -> ok. +-spec open_db(emqx_ds:db(), emqx_ds:create_db_opts()) -> ok | {error, _}. open_db(DB, Opts) -> + %% TODO: improve error reporting, don't just crash lists:foreach( - fun(Node) -> - Shard = shard_id(DB, Node), - emqx_ds_proto_v1:open_shard(Node, Shard, Opts) - end, - list_nodes()). + fun(Node) -> + Shard = shard_id(DB, Node), + ok = emqx_ds_proto_v1:open_shard(Node, Shard, Opts) + end, + list_nodes() + ). --spec message_store(emqx_ds:db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> - {ok, [message_id()]} | {error, _}. -message_store(DB, Msg, Opts) -> - %% TODO: milestone 5. Currently we store messages locally. - Shard = term_to_binary({DB, node()}), - emqx_ds_storage_layer:message_store(Shard, Msg, Opts). +-spec store_batch(emqx_ds:db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> + emqx_ds:store_batch_result(). +store_batch(DB, Msg, Opts) -> + %% TODO: Currently we store messages locally. + Shard = shard_id(DB, node()), + emqx_ds_storage_layer:store_batch(Shard, Msg, Opts). --spec get_streams(shard(), emqx_ds:topic_filter(), emqx_ds:time()) -> [{emqx_ds:stream_rank(), stream()}]. -get_streams(Shard, TopicFilter, StartTime) -> +-spec get_streams(db(), emqx_ds:topic_filter(), emqx_ds:time()) -> + [{emqx_ds:stream_rank(), stream()}]. +get_streams(DB, TopicFilter, StartTime) -> + Shards = emqx_ds_replication_layer:list_shards(DB), + lists:flatmap( + fun(Shard) -> + Node = node_of_shard(Shard), + Streams = emqx_ds_proto_v1:get_streams(Node, Shard, TopicFilter, StartTime), + lists:map( + fun({RankY, Stream}) -> + RankX = erlang:phash2(Shard, 255), + Rank = {RankX, RankY}, + {Rank, #stream{ + shard = Shard, + enc = Stream + }} + end, + Streams + ) + end, + Shards + ). + +-spec make_iterator(stream(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). +make_iterator(Stream, StartTime) -> + #stream{shard = Shard, enc = StorageStream} = Stream, Node = node_of_shard(Shard), - emqx_ds_proto_v1:get_streams(Node, Shard, TopicFilter, StartTime). + case emqx_ds_proto_v1:make_iterator(Node, Shard, StorageStream, StartTime) of + {ok, Iter} -> + {ok, #iterator{shard = Shard, enc = Iter}}; + Err = {error, _} -> + Err + end. --spec open_iterator(shard(), stream(), emqx_ds:time()) -> {ok, iterator()} | {error, _}. -open_iterator(Shard, Stream, StartTime) -> - Node = node_of_shard(Shard), - emqx_ds_proto_v1:open_iterator(Node, Shard, Stream, StartTime). - --spec next(shard(), iterator(), pos_integer()) -> - {ok, iterator(), [emqx_types:message()]} | end_of_stream. -next(Shard, Iter, BatchSize) -> +-spec next(iterator(), pos_integer()) -> emqx_ds:next_result(iterator()). +next(Iter0, BatchSize) -> + #iterator{shard = Shard, enc = StorageIter0} = Iter0, Node = node_of_shard(Shard), %% TODO: iterator can contain information that is useful for %% reconstructing messages sent over the network. For example, @@ -97,7 +143,13 @@ next(Shard, Iter, BatchSize) -> %% %% This kind of trickery should be probably done here in the %% replication layer. Or, perhaps, in the logic lary. - emqx_ds_proto_v1:next(Node, Shard, Iter, BatchSize). + case emqx_ds_proto_v1:next(Node, Shard, StorageIter0, BatchSize) of + {ok, StorageIter, Batch} -> + Iter = #iterator{shard = Shard, enc = StorageIter}, + {ok, Iter, Batch}; + Other -> + Other + end. %%================================================================================ %% behavior callbacks @@ -107,35 +159,38 @@ next(Shard, Iter, BatchSize) -> %% Internal exports (RPC targets) %%================================================================================ --spec do_open_shard_v1(shard(), emqx_ds:create_db_opts()) -> ok. +-spec do_open_shard_v1(shard_id(), emqx_ds:create_db_opts()) -> ok. do_open_shard_v1(Shard, Opts) -> - emqx_ds_storage_layer_sup:ensure_shard(Shard, Opts). + emqx_ds_storage_layer:open_shard(Shard, Opts). --spec do_get_streams_v1(shard(), emqx_ds:topic_filter(), emqx_ds:time()) -> - [{emqx_ds:stream_rank(), stream()}]. +-spec do_get_streams_v1(shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> + [{integer(), _Stream}]. do_get_streams_v1(Shard, TopicFilter, StartTime) -> - error({todo, Shard, TopicFilter, StartTime}). + emqx_ds_storage_layer:get_streams(Shard, TopicFilter, StartTime). --spec do_open_iterator_v1(shard(), stream(), emqx_ds:time()) -> iterator(). -do_open_iterator_v1(Shard, Stream, StartTime) -> - error({todo, Shard, Stream, StartTime}). +-spec do_make_iterator_v1(shard_id(), _Stream, emqx_ds:time()) -> {ok, iterator()} | {error, _}. +do_make_iterator_v1(Shard, Stream, StartTime) -> + emqx_ds_storage_layer:make_iterator(Shard, Stream, StartTime). --spec do_next_v1(shard(), iterator(), non_neg_integer()) -> - {ok, iterator(), [emqx_types:message()]} | end_of_stream. +-spec do_next_v1(shard_id(), Iter, pos_integer()) -> emqx_ds:next_result(Iter). do_next_v1(Shard, Iter, BatchSize) -> - error({todo, Shard, Iter, BatchSize}). + emqx_ds_storage_layer:next(Shard, Iter, BatchSize). %%================================================================================ %% Internal functions %%================================================================================ +add_shard_to_rank(Shard, RankY) -> + RankX = erlang:phash2(Shard, 255), + {RankX, RankY}. + shard_id(DB, Node) -> %% TODO: don't bake node name into the schema, don't repeat the %% Mnesia's 1M$ mistake. NodeBin = atom_to_binary(Node), - <>. + <>. --spec node_of_shard(shard()) -> node(). +-spec node_of_shard(shard_id()) -> node(). node_of_shard(ShardId) -> [_DB, NodeBin] = binary:split(ShardId, <<":">>), binary_to_atom(NodeBin). diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index 93c1aaa1f..fdd81a095 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -1,332 +1,240 @@ %%-------------------------------------------------------------------- -%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_ds_storage_layer). -behaviour(gen_server). -%% API: --export([start_link/2]). --export([create_generation/3]). +%% Replication layer API: +-export([open_shard/2, store_batch/3, get_streams/3, make_iterator/3, next/3]). --export([get_streams/3]). --export([message_store/3]). --export([delete/4]). +%% gen_server +-export([start_link/2, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). --export([make_iterator/2, next/1, next/2]). +%% internal exports: +-export([]). --export([ - preserve_iterator/2, - restore_iterator/2, - discard_iterator/2, - ensure_iterator/3, - discard_iterator_prefix/2, - list_iterator_prefix/2, - foldl_iterator_prefix/4 -]). - -%% behaviour callbacks: --export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). - --export_type([stream/0, cf_refs/0, gen_id/0, options/0, state/0, iterator/0]). --export_type([db_options/0, db_write_options/0, db_read_options/0]). - --compile({inline, [meta_lookup/2]}). - --include_lib("emqx/include/emqx.hrl"). +-export_type([gen_id/0, generation/0, cf_refs/0, stream/0, iterator/0]). %%================================================================================ %% Type declarations %%================================================================================ --type stream() :: term(). %% Opaque term returned by the generation callback module - --type options() :: #{ - dir => file:filename() -}. - -%% see rocksdb:db_options() --type db_options() :: proplists:proplist(). -%% see rocksdb:write_options() --type db_write_options() :: proplists:proplist(). -%% see rocksdb:read_options() --type db_read_options() :: proplists:proplist(). +-type shard_id() :: emqx_ds_replication_layer:shard_id(). -type cf_refs() :: [{string(), rocksdb:cf_handle()}]. -%% Message storage generation -%% Keep in mind that instances of this type are persisted in long-term storage. --type generation() :: #{ - %% Module that handles data for the generation +-type gen_id() :: 0..16#ffff. + +%% Note: this record might be stored permanently on a remote node. +-record(stream, { + generation :: gen_id(), + enc :: _EncapsultatedData, + misc = #{} :: map() +}). + +-opaque stream() :: #stream{}. + +%% Note: this record might be stored permanently on a remote node. +-record(it, { + generation :: gen_id(), + enc :: _EncapsultatedData, + misc = #{} :: map() +}). + +-opaque iterator() :: #it{}. + +%%%% Generation: + +-type generation(Data) :: #{ + %% Module that handles data for the generation: module := module(), - %% Module-specific data defined at generation creation time - data := term(), + %% Module-specific data defined at generation creation time: + data := Data, %% When should this generation become active? %% This generation should only contain messages timestamped no earlier than that. %% The very first generation will have `since` equal 0. - since := emqx_ds:time() + since := emqx_ds:time(), + until := emqx_ds:time() | undefined }. --record(s, { - shard :: emqx_ds:shard(), - keyspace :: emqx_ds_conf:keyspace(), - db :: rocksdb:db_handle(), - cf_iterator :: rocksdb:cf_handle(), - cf_generations :: cf_refs() -}). +%% Schema for a generation. Persistent term. +-type generation_schema() :: generation(term()). --record(it, { - shard :: emqx_ds:shard(), - gen :: gen_id(), - replay :: emqx_ds:replay(), - module :: module(), - data :: term() -}). +%% Runtime view of generation: +-type generation() :: generation(term()). --type gen_id() :: 0..16#ffff. +%%%% Shard: --opaque state() :: #s{}. --opaque iterator() :: #it{}. +-type shard(GenData) :: #{ + current_generation := gen_id(), + default_generation_module := module(), + default_generation_config := term(), + {generation, gen_id()} => GenData +}. -%% Contents of the default column family: -%% -%% [{<<"genNN">>, #generation{}}, ..., -%% {<<"current">>, GenID}] +%% Shard schema (persistent): +-type shard_schema() :: shard(generation_schema()). --define(DEFAULT_CF, "default"). --define(DEFAULT_CF_OPTS, []). +%% Shard (runtime): +-type shard() :: shard(generation()). --define(ITERATOR_CF, "$iterators"). +%%================================================================================ +%% Generation callbacks +%%================================================================================ -%% TODO -%% 1. CuckooTable might be of use here / `OptimizeForPointLookup(...)`. -%% 2. Supposedly might be compressed _very_ effectively. -%% 3. `inplace_update_support`? --define(ITERATOR_CF_OPTS, []). +%% Create the new schema given generation id and the options. +%% Create rocksdb column families. +-callback create(shard_id(), rocksdb:db_handle(), gen_id(), _Options) -> + {_Schema, cf_refs()}. + +%% Open the existing schema +-callback open(shard_id(), rocsdb:db_handle(), gen_id(), cf_refs(), _Schema) -> + _Data. + +-callback store_batch(shard_id(), _Data, [emqx_types:message()], emqx_ds:message_store_opts()) -> + ok. + +-callback get_streams(shard_id(), _Data, emqx_ds:topic_filter(), emqx_ds:time()) -> + [_Stream]. + +-callback make_iterator(shard_id(), _Data, _Stream, emqx_ds:time()) -> + emqx_ds:make_iterator_result(_Iterator). + +-callback next(shard_id(), _Data, Iter, pos_integer()) -> + {ok, Iter, [emqx_types:message()]} | {error, _}. + +%%================================================================================ +%% API for the replication layer +%%================================================================================ + +-spec open_shard(shard_id(), emqx_ds:create_db_opts()) -> ok. +open_shard(Shard, Options) -> + emqx_ds_storage_layer_sup:ensure_shard(Shard, Options). + +-spec store_batch(shard_id(), [emqx_types:message()], emqx_ds:message_store_opts()) -> + emqx_ds:store_batch_result(). +store_batch(Shard, Messages, Options) -> + %% We always store messages in the current generation: + GenId = generation_current(Shard), + #{module := Mod, data := GenData} = generation_get(Shard, GenId), + Mod:store_batch(Shard, GenData, Messages, Options). + +-spec get_streams(shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> + [{integer(), stream()}]. +get_streams(Shard, TopicFilter, StartTime) -> + Gens = generations_since(Shard, StartTime), + lists:flatmap( + fun(GenId) -> + #{module := Mod, data := GenData} = generation_get(Shard, GenId), + Streams = Mod:get_streams(Shard, GenData, TopicFilter, StartTime), + [ + {GenId, #stream{ + generation = GenId, + enc = Stream + }} + || Stream <- Streams + ] + end, + Gens + ). + +-spec make_iterator(shard_id(), stream(), emqx_ds:time()) -> + emqx_ds:make_iterator_result(iterator()). +make_iterator(Shard, #stream{generation = GenId, enc = Stream}, StartTime) -> + #{module := Mod, data := GenData} = generation_get(Shard, GenId), + case Mod:make_iterator(Shard, GenData, Stream, StartTime) of + {ok, Iter} -> + {ok, #it{ + generation = GenId, + enc = Iter + }}; + {error, _} = Err -> + Err + end. + +-spec next(shard_id(), iterator(), pos_integer()) -> + emqx_ds:next_result(iterator()). +next(Shard, Iter = #it{generation = GenId, enc = GenIter0}, BatchSize) -> + #{module := Mod, data := GenData} = generation_get(Shard, GenId), + Current = generation_current(Shard), + case Mod:next(Shard, GenData, GenIter0, BatchSize) of + {ok, _GenIter, []} when GenId < Current -> + %% This is a past generation. Storage layer won't write + %% any more messages here. The iterator reached the end: + %% the stream has been fully replayed. + {ok, end_of_stream}; + {ok, GenIter, Batch} -> + {ok, Iter#it{enc = GenIter}, Batch}; + Error = {error, _} -> + Error + end. + +%%================================================================================ +%% gen_server for the shard +%%================================================================================ -define(REF(ShardId), {via, gproc, {n, l, {?MODULE, ShardId}}}). -%%================================================================================ -%% Callbacks -%%================================================================================ - --callback create_new(rocksdb:db_handle(), gen_id(), _Options :: term()) -> - {_Schema, cf_refs()}. - --callback open( - emqx_ds:shard(), - rocksdb:db_handle(), - gen_id(), - cf_refs(), - _Schema -) -> - _DB. - --callback store( - _DB, - _MessageID :: binary(), - emqx_ds:time(), - emqx_ds:topic(), - _Payload :: binary() -) -> - ok | {error, _}. - --callback delete(_DB, _MessageID :: binary(), emqx_ds:time(), emqx_ds:topic()) -> - ok | {error, _}. - --callback get_streams(_DB, emqx_ds:topic_filter(), emqx_ds:time()) -> - [_Stream]. - --callback make_iterator(_DB, emqx_ds:replay()) -> - {ok, _It} | {error, _}. - --callback restore_iterator(_DB, _Serialized :: binary()) -> {ok, _It} | {error, _}. - --callback preserve_iterator(_It) -> term(). - --callback next(It) -> {value, binary(), It} | none | {error, closed}. - -%%================================================================================ -%% API funcions -%%================================================================================ - --spec start_link(emqx_ds:shard(), emqx_ds_storage_layer:options()) -> +-spec start_link(emqx_ds:shard_id(), emqx_ds:create_db_opts()) -> {ok, pid()}. start_link(Shard, Options) -> gen_server:start_link(?REF(Shard), ?MODULE, {Shard, Options}, []). --spec get_streams(emqx_ds:shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> [_Stream]. -get_streams(_ShardId, _TopicFilter, _StartTime) -> - []. +-record(s, { + shard_id :: emqx_ds:shard_id(), + db :: rocksdb:db_handle(), + cf_refs :: cf_refs(), + schema :: shard_schema(), + shard :: shard() +}). +-type server_state() :: #s{}. --spec create_generation( - emqx_ds:shard(), emqx_ds:time(), emqx_ds_conf:backend_config() -) -> - {ok, gen_id()} | {error, nonmonotonic}. -create_generation(ShardId, Since, Config = {_Module, _Options}) -> - gen_server:call(?REF(ShardId), {create_generation, Since, Config}). +-define(DEFAULT_CF, "default"). +-define(DEFAULT_CF_OPTS, []). --spec message_store(emqx_ds:shard(), [emqx_types:message()], emqx_ds:message_store_opts()) -> - {ok, _MessageId} | {error, _}. -message_store(Shard, Msgs, _Opts) -> - {ok, lists:map( - fun(Msg) -> - GUID = emqx_message:id(Msg), - Timestamp = Msg#message.timestamp, - {_GenId, #{module := Mod, data := ModState}} = meta_lookup_gen(Shard, Timestamp), - Topic = emqx_topic:words(emqx_message:topic(Msg)), - Payload = serialize(Msg), - Mod:store(ModState, GUID, Timestamp, Topic, Payload), - GUID - end, - Msgs)}. - --spec delete(emqx_ds:shard(), emqx_guid:guid(), emqx_ds:time(), emqx_ds:topic()) -> - ok | {error, _}. -delete(Shard, GUID, Time, Topic) -> - {_GenId, #{module := Mod, data := Data}} = meta_lookup_gen(Shard, Time), - Mod:delete(Data, GUID, Time, Topic). - --spec make_iterator(emqx_ds:shard(), emqx_ds:replay()) -> - {ok, iterator()} | {error, _TODO}. -make_iterator(Shard, Replay = {_, StartTime}) -> - {GenId, Gen} = meta_lookup_gen(Shard, StartTime), - open_iterator(Gen, #it{ - shard = Shard, - gen = GenId, - replay = Replay - }). - --spec next(iterator()) -> {ok, iterator(), [binary()]} | end_of_stream. -next(It = #it{}) -> - next(It, _BatchSize = 1). - --spec next(iterator(), pos_integer()) -> {ok, iterator(), [binary()]} | end_of_stream. -next(#it{data = {?MODULE, end_of_stream}}, _BatchSize) -> - end_of_stream; -next( - It = #it{shard = Shard, module = Mod, gen = Gen, data = {?MODULE, retry, Serialized}}, BatchSize -) -> - #{data := DBData} = meta_get_gen(Shard, Gen), - {ok, ItData} = Mod:restore_iterator(DBData, Serialized), - next(It#it{data = ItData}, BatchSize); -next(It = #it{}, BatchSize) -> - do_next(It, BatchSize, _Acc = []). - --spec do_next(iterator(), non_neg_integer(), [binary()]) -> - {ok, iterator(), [binary()]} | end_of_stream. -do_next(It, N, Acc) when N =< 0 -> - {ok, It, lists:reverse(Acc)}; -do_next(It = #it{module = Mod, data = ItData}, N, Acc) -> - case Mod:next(ItData) of - {value, Bin, ItDataNext} -> - Val = deserialize(Bin), - do_next(It#it{data = ItDataNext}, N - 1, [Val | Acc]); - {error, _} = _Error -> - %% todo: log? - %% iterator might be invalid now; will need to re-open it. - Serialized = Mod:preserve_iterator(ItData), - {ok, It#it{data = {?MODULE, retry, Serialized}}, lists:reverse(Acc)}; - none -> - case open_next_iterator(It) of - {ok, ItNext} -> - do_next(ItNext, N, Acc); - {error, _} = _Error -> - %% todo: log? - %% fixme: only bad options may lead to this? - %% return an "empty" iterator to be re-opened when retrying? - Serialized = Mod:preserve_iterator(ItData), - {ok, It#it{data = {?MODULE, retry, Serialized}}, lists:reverse(Acc)}; - none -> - case Acc of - [] -> - end_of_stream; - _ -> - {ok, It#it{data = {?MODULE, end_of_stream}}, lists:reverse(Acc)} - end - end - end. - --spec preserve_iterator(iterator(), emqx_ds:iterator_id()) -> - ok | {error, _TODO}. -preserve_iterator(It = #it{}, IteratorID) -> - iterator_put_state(IteratorID, It). - --spec restore_iterator(emqx_ds:shard(), emqx_ds:replay_id()) -> - {ok, iterator()} | {error, _TODO}. -restore_iterator(Shard, ReplayID) -> - case iterator_get_state(Shard, ReplayID) of - {ok, Serial} -> - restore_iterator_state(Shard, Serial); - not_found -> - {error, not_found}; - {error, _Reason} = Error -> - Error - end. - --spec ensure_iterator(emqx_ds:shard(), emqx_ds:iterator_id(), emqx_ds:replay()) -> - {ok, iterator()} | {error, _TODO}. -ensure_iterator(Shard, IteratorID, Replay = {_TopicFilter, _StartMS}) -> - case restore_iterator(Shard, IteratorID) of - {ok, It} -> - {ok, It}; - {error, not_found} -> - {ok, It} = make_iterator(Shard, Replay), - ok = emqx_ds_storage_layer:preserve_iterator(It, IteratorID), - {ok, It}; - Error -> - Error - end. - --spec discard_iterator(emqx_ds:shard(), emqx_ds:replay_id()) -> - ok | {error, _TODO}. -discard_iterator(Shard, ReplayID) -> - iterator_delete(Shard, ReplayID). - --spec discard_iterator_prefix(emqx_ds:shard(), binary()) -> - ok | {error, _TODO}. -discard_iterator_prefix(Shard, KeyPrefix) -> - case do_discard_iterator_prefix(Shard, KeyPrefix) of - {ok, _} -> ok; - Error -> Error - end. - --spec list_iterator_prefix( - emqx_ds:shard(), - binary() -) -> {ok, [emqx_ds:iterator_id()]} | {error, _TODO}. -list_iterator_prefix(Shard, KeyPrefix) -> - do_list_iterator_prefix(Shard, KeyPrefix). - --spec foldl_iterator_prefix( - emqx_ds:shard(), - binary(), - fun((_Key :: binary(), _Value :: binary(), Acc) -> Acc), - Acc -) -> {ok, Acc} | {error, _TODO} when - Acc :: term(). -foldl_iterator_prefix(Shard, KeyPrefix, Fn, Acc) -> - do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, Acc). - -%%================================================================================ -%% behaviour callbacks -%%================================================================================ - -init({Shard, Options}) -> +init({ShardId, Options}) -> process_flag(trap_exit, true), - {ok, S0} = open_db(Shard, Options), - S = ensure_current_generation(S0), - ok = populate_metadata(S), + erase_schema_runtime(ShardId), + {ok, DB, CFRefs0} = rocksdb_open(ShardId, Options), + {Schema, CFRefs} = + case get_schema_persistent(DB) of + not_found -> + create_new_shard_schema(ShardId, DB, CFRefs0, Options); + Scm -> + {Scm, CFRefs0} + end, + Shard = open_shard(ShardId, DB, CFRefs, Schema), + S = #s{ + shard_id = ShardId, + db = DB, + cf_refs = CFRefs, + schema = Schema, + shard = Shard + }, + commit_metadata(S), {ok, S}. -handle_call({create_generation, Since, Config}, _From, S) -> - case create_new_gen(Since, Config, S) of - {ok, GenId, NS} -> - {reply, {ok, GenId}, NS}; - {error, _} = Error -> - {reply, Error, S} - end; +%% handle_call({create_generation, Since, Config}, _From, S) -> +%% case create_new_gen(Since, Config, S) of +%% {ok, GenId, NS} -> +%% {reply, {ok, GenId}, NS}; +%% {error, _} = Error -> +%% {reply, Error, S} +%% end; handle_call(_Call, _From, S) -> {reply, {error, unknown_call}, S}. @@ -336,359 +244,156 @@ handle_cast(_Cast, S) -> handle_info(_Info, S) -> {noreply, S}. -terminate(_Reason, #s{db = DB, shard = Shard}) -> - meta_erase(Shard), +terminate(_Reason, #s{db = DB, shard_id = ShardId}) -> + erase_schema_runtime(ShardId), ok = rocksdb:close(DB). +%%================================================================================ +%% Internal exports +%%================================================================================ + %%================================================================================ %% Internal functions %%================================================================================ --record(db, {handle :: rocksdb:db_handle(), cf_iterator :: rocksdb:cf_handle()}). +-spec open_shard(shard_id(), rocksdb:db_handle(), cf_refs(), shard_schema()) -> + shard(). +open_shard(ShardId, DB, CFRefs, ShardSchema) -> + %% Transform generation schemas to generation runtime data: + maps:map( + fun + ({generation, GenId}, GenSchema) -> + open_generation(ShardId, DB, CFRefs, GenId, GenSchema); + (_K, Val) -> + Val + end, + ShardSchema + ). --spec populate_metadata(state()) -> ok. -populate_metadata(S = #s{shard = Shard, db = DBHandle, cf_iterator = CFIterator}) -> - ok = meta_put(Shard, db, #db{handle = DBHandle, cf_iterator = CFIterator}), - Current = schema_get_current(DBHandle), - lists:foreach(fun(GenId) -> populate_metadata(GenId, S) end, lists:seq(0, Current)). +-spec open_generation(shard_id(), rocksdb:db_handle(), cf_refs(), gen_id(), generation_schema()) -> + generation(). +open_generation(ShardId, DB, CFRefs, GenId, GenSchema) -> + #{module := Mod, data := Schema} = GenSchema, + RuntimeData = Mod:open(ShardId, DB, GenId, CFRefs, Schema), + GenSchema#{data => RuntimeData}. --spec populate_metadata(gen_id(), state()) -> ok. -populate_metadata(GenId, S = #s{shard = Shard, db = DBHandle}) -> - Gen = open_gen(GenId, schema_get_gen(DBHandle, GenId), S), - meta_register_gen(Shard, GenId, Gen). - --spec ensure_current_generation(state()) -> state(). -ensure_current_generation(S = #s{shard = _Shard, keyspace = Keyspace, db = DBHandle}) -> - case schema_get_current(DBHandle) of - undefined -> - Config = emqx_ds_conf:keyspace_config(Keyspace), - {ok, _, NS} = create_new_gen(0, Config, S), - NS; - _GenId -> - S - end. - --spec create_new_gen(emqx_ds:time(), emqx_ds_conf:backend_config(), state()) -> - {ok, gen_id(), state()} | {error, nonmonotonic}. -create_new_gen(Since, Config, S = #s{shard = Shard, db = DBHandle}) -> - GenId = get_next_id(meta_get_current(Shard)), - GenId = get_next_id(schema_get_current(DBHandle)), - case is_gen_valid(Shard, GenId, Since) of - ok -> - {ok, Gen, NS} = create_gen(GenId, Since, Config, S), - %% TODO: Transaction? Column family creation can't be transactional, anyway. - ok = schema_put_gen(DBHandle, GenId, Gen), - ok = schema_put_current(DBHandle, GenId), - ok = meta_register_gen(Shard, GenId, open_gen(GenId, Gen, NS)), - {ok, GenId, NS}; - {error, _} = Error -> - Error - end. - --spec create_gen(gen_id(), emqx_ds:time(), emqx_ds_conf:backend_config(), state()) -> - {ok, generation(), state()}. -create_gen(GenId, Since, {Module, Options}, S = #s{db = DBHandle, cf_generations = CFs}) -> - % TODO: Backend implementation should ensure idempotency. - {Schema, NewCFs} = Module:create_new(DBHandle, GenId, Options), - Gen = #{ - module => Module, - data => Schema, - since => Since +-spec create_new_shard_schema(shard_id(), rocksdb:db_handle(), cf_refs(), _Options) -> + {shard_schema(), cf_refs()}. +create_new_shard_schema(ShardId, DB, CFRefs, _Options) -> + GenId = 1, + %% TODO: read from options/config + Mod = emqx_ds_storage_reference, + ModConfig = #{}, + {GenData, NewCFRefs} = Mod:create(ShardId, DB, GenId, ModConfig), + GenSchema = #{module => Mod, data => GenData, since => 0, until => undefined}, + ShardSchema = #{ + current_generation => GenId, + default_generation_module => Mod, + default_generation_confg => ModConfig, + {generation, GenId} => GenSchema }, - {ok, Gen, S#s{cf_generations = NewCFs ++ CFs}}. + {ShardSchema, NewCFRefs ++ CFRefs}. --spec open_db(emqx_ds:shard(), options()) -> {ok, state()} | {error, _TODO}. -open_db(Shard, Options) -> +%% @doc Commit current state of the server to both rocksdb and the persistent term +-spec commit_metadata(server_state()) -> ok. +commit_metadata(#s{shard_id = ShardId, schema = Schema, shard = Runtime, db = DB}) -> + ok = put_schema_persistent(DB, Schema), + put_schema_runtime(ShardId, Runtime). + +-spec rocksdb_open(shard_id(), emqx_ds:create_db_opts()) -> + {ok, rocksdb:db_handle(), cf_refs()} | {error, _TODO}. +rocksdb_open(Shard, Options) -> DefaultDir = binary_to_list(Shard), DBDir = unicode:characters_to_list(maps:get(dir, Options, DefaultDir)), - %% TODO: properly forward keyspace - Keyspace = maps:get(keyspace, Options, default_keyspace), DBOptions = [ {create_if_missing, true}, {create_missing_column_families, true} - | emqx_ds_conf:db_options(Keyspace) + | maps:get(db_options, Options, []) ], _ = filelib:ensure_dir(DBDir), ExistingCFs = case rocksdb:list_column_families(DBDir, DBOptions) of {ok, CFs} -> - [{Name, []} || Name <- CFs, Name /= ?DEFAULT_CF, Name /= ?ITERATOR_CF]; + [{Name, []} || Name <- CFs, Name /= ?DEFAULT_CF]; % DB is not present. First start {error, {db_open, _}} -> [] end, ColumnFamilies = [ - {?DEFAULT_CF, ?DEFAULT_CF_OPTS}, - {?ITERATOR_CF, ?ITERATOR_CF_OPTS} + {?DEFAULT_CF, ?DEFAULT_CF_OPTS} | ExistingCFs ], case rocksdb:open(DBDir, DBOptions, ColumnFamilies) of - {ok, DBHandle, [_CFDefault, CFIterator | CFRefs]} -> + {ok, DBHandle, [_CFDefault | CFRefs]} -> {CFNames, _} = lists:unzip(ExistingCFs), - {ok, #s{ - shard = Shard, - keyspace = Keyspace, - db = DBHandle, - cf_iterator = CFIterator, - cf_generations = lists:zip(CFNames, CFRefs) - }}; + {ok, DBHandle, lists:zip(CFNames, CFRefs)}; Error -> Error end. --spec open_gen(gen_id(), generation(), state()) -> generation(). -open_gen( - GenId, - Gen = #{module := Mod, data := Data}, - #s{shard = Shard, db = DBHandle, cf_generations = CFs} -) -> - DB = Mod:open(Shard, DBHandle, GenId, CFs, Data), - Gen#{data := DB}. +%%-------------------------------------------------------------------------------- +%% Schema access +%%-------------------------------------------------------------------------------- --spec open_next_iterator(iterator()) -> {ok, iterator()} | {error, _Reason} | none. -open_next_iterator(It = #it{shard = Shard, gen = GenId}) -> - open_next_iterator(meta_get_gen(Shard, GenId + 1), It#it{gen = GenId + 1}). +-spec generation_current(shard_id()) -> gen_id(). +generation_current(Shard) -> + #{current_generation := Current} = get_schema_runtime(Shard), + Current. -open_next_iterator(undefined, _It) -> - none; -open_next_iterator(Gen = #{}, It) -> - open_iterator(Gen, It). +-spec generation_get(shard_id(), gen_id()) -> generation(). +generation_get(Shard, GenId) -> + #{{generation, GenId} := GenData} = get_schema_runtime(Shard), + GenData. --spec open_iterator(generation(), iterator()) -> {ok, iterator()} | {error, _Reason}. -open_iterator(#{module := Mod, data := Data}, It = #it{}) -> - Options = #{}, % TODO: passthrough options - case Mod:make_iterator(Data, It#it.replay, Options) of - {ok, ItData} -> - {ok, It#it{module = Mod, data = ItData}}; - Err -> - Err - end. - --spec open_restore_iterator(generation(), iterator(), binary()) -> - {ok, iterator()} | {error, _Reason}. -open_restore_iterator(#{module := Mod, data := Data}, It = #it{}, Serial) -> - case Mod:restore_iterator(Data, Serial) of - {ok, ItData} -> - {ok, It#it{module = Mod, data = ItData}}; - Err -> - Err - end. - -%% - --define(KEY_REPLAY_STATE(IteratorId), <<(IteratorId)/binary, "rs">>). --define(KEY_REPLAY_STATE_PAT(KeyReplayState), begin - <> = (KeyReplayState), - IteratorId -end). - --define(ITERATION_WRITE_OPTS, []). --define(ITERATION_READ_OPTS, []). - -iterator_get_state(Shard, ReplayID) -> - #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), - rocksdb:get(Handle, CF, ?KEY_REPLAY_STATE(ReplayID), ?ITERATION_READ_OPTS). - -iterator_put_state(ID, It = #it{shard = Shard}) -> - #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), - Serial = preserve_iterator_state(It), - rocksdb:put(Handle, CF, ?KEY_REPLAY_STATE(ID), Serial, ?ITERATION_WRITE_OPTS). - -iterator_delete(Shard, ID) -> - #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), - rocksdb:delete(Handle, CF, ?KEY_REPLAY_STATE(ID), ?ITERATION_WRITE_OPTS). - -preserve_iterator_state(#it{ - gen = Gen, - replay = {TopicFilter, StartTime}, - module = Mod, - data = ItData -}) -> - term_to_binary(#{ - v => 1, - gen => Gen, - filter => TopicFilter, - start => StartTime, - st => Mod:preserve_iterator(ItData) - }). - -restore_iterator_state(Shard, Serial) when is_binary(Serial) -> - restore_iterator_state(Shard, binary_to_term(Serial)); -restore_iterator_state( - Shard, - #{ - v := 1, - gen := Gen, - filter := TopicFilter, - start := StartTime, - st := State - } -) -> - It = #it{shard = Shard, gen = Gen, replay = {TopicFilter, StartTime}}, - open_restore_iterator(meta_get_gen(Shard, Gen), It, State). - -do_list_iterator_prefix(Shard, KeyPrefix) -> - Fn = fun(K0, _V, Acc) -> - K = ?KEY_REPLAY_STATE_PAT(K0), - [K | Acc] - end, - do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, []). - -do_discard_iterator_prefix(Shard, KeyPrefix) -> - #db{handle = DBHandle, cf_iterator = CF} = meta_lookup(Shard, db), - Fn = fun(K, _V, _Acc) -> ok = rocksdb:delete(DBHandle, CF, K, ?ITERATION_WRITE_OPTS) end, - do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, ok). - -do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, Acc) -> - #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), - case rocksdb:iterator(Handle, CF, ?ITERATION_READ_OPTS) of - {ok, It} -> - NextAction = {seek, KeyPrefix}, - do_foldl_iterator_prefix(Handle, CF, It, KeyPrefix, NextAction, Fn, Acc); - Error -> - Error - end. - -do_foldl_iterator_prefix(DBHandle, CF, It, KeyPrefix, NextAction, Fn, Acc) -> - case rocksdb:iterator_move(It, NextAction) of - {ok, K = <>, V} -> - NewAcc = Fn(K, V, Acc), - do_foldl_iterator_prefix(DBHandle, CF, It, KeyPrefix, next, Fn, NewAcc); - {ok, _K, _V} -> - ok = rocksdb:iterator_close(It), - {ok, Acc}; - {error, invalid_iterator} -> - ok = rocksdb:iterator_close(It), - {ok, Acc}; - Error -> - ok = rocksdb:iterator_close(It), - Error - end. - -%% Functions for dealing with the metadata stored persistently in rocksdb - --define(CURRENT_GEN, <<"current">>). --define(SCHEMA_WRITE_OPTS, []). --define(SCHEMA_READ_OPTS, []). - --spec schema_get_gen(rocksdb:db_handle(), gen_id()) -> generation(). -schema_get_gen(DBHandle, GenId) -> - {ok, Bin} = rocksdb:get(DBHandle, schema_gen_key(GenId), ?SCHEMA_READ_OPTS), - binary_to_term(Bin). - --spec schema_put_gen(rocksdb:db_handle(), gen_id(), generation()) -> ok | {error, _}. -schema_put_gen(DBHandle, GenId, Gen) -> - rocksdb:put(DBHandle, schema_gen_key(GenId), term_to_binary(Gen), ?SCHEMA_WRITE_OPTS). - --spec schema_get_current(rocksdb:db_handle()) -> gen_id() | undefined. -schema_get_current(DBHandle) -> - case rocksdb:get(DBHandle, ?CURRENT_GEN, ?SCHEMA_READ_OPTS) of - {ok, Bin} -> - binary_to_integer(Bin); - not_found -> - undefined - end. - --spec schema_put_current(rocksdb:db_handle(), gen_id()) -> ok | {error, _}. -schema_put_current(DBHandle, GenId) -> - rocksdb:put(DBHandle, ?CURRENT_GEN, integer_to_binary(GenId), ?SCHEMA_WRITE_OPTS). - --spec schema_gen_key(integer()) -> binary(). -schema_gen_key(N) -> - <<"gen", N:32>>. - --undef(CURRENT_GEN). --undef(SCHEMA_WRITE_OPTS). --undef(SCHEMA_READ_OPTS). - -%% Functions for dealing with the runtime shard metadata: - --define(PERSISTENT_TERM(SHARD, GEN), {?MODULE, SHARD, GEN}). - --spec meta_register_gen(emqx_ds:shard(), gen_id(), generation()) -> ok. -meta_register_gen(Shard, GenId, Gen) -> - Gs = - case GenId > 0 of - true -> meta_lookup(Shard, GenId - 1); - false -> [] +-spec generations_since(shard_id(), emqx_ds:time()) -> [gen_id()]. +generations_since(Shard, Since) -> + Schema = get_schema_runtime(Shard), + maps:fold( + fun + ({generation, GenId}, #{until := Until}, Acc) when Until >= Since -> + [GenId | Acc]; + (_K, _V, Acc) -> + Acc end, - ok = meta_put(Shard, GenId, [Gen | Gs]), - ok = meta_put(Shard, current, GenId). + [], + Schema + ). --spec meta_lookup_gen(emqx_ds:shard(), emqx_ds:time()) -> {gen_id(), generation()}. -meta_lookup_gen(Shard, Time) -> - %% TODO - %% Is cheaper persistent term GC on update here worth extra lookup? I'm leaning - %% towards a "no". - Current = meta_lookup(Shard, current), - Gens = meta_lookup(Shard, Current), - find_gen(Time, Current, Gens). +-define(PERSISTENT_TERM(SHARD), {emqx_ds_storage_layer, SHARD}). -find_gen(Time, GenId, [Gen = #{since := Since} | _]) when Time >= Since -> - {GenId, Gen}; -find_gen(Time, GenId, [_Gen | Rest]) -> - find_gen(Time, GenId - 1, Rest). +-spec get_schema_runtime(shard_id()) -> shard(). +get_schema_runtime(Shard) -> + persistent_term:get(?PERSISTENT_TERM(Shard)). --spec meta_get_gen(emqx_ds:shard(), gen_id()) -> generation() | undefined. -meta_get_gen(Shard, GenId) -> - case meta_lookup(Shard, GenId, []) of - [Gen | _Older] -> Gen; - [] -> undefined - end. +-spec put_schema_runtime(shard_id(), shard()) -> ok. +put_schema_runtime(Shard, RuntimeSchema) -> + persistent_term:put(?PERSISTENT_TERM(Shard), RuntimeSchema), + ok. --spec meta_get_current(emqx_ds:shard()) -> gen_id() | undefined. -meta_get_current(Shard) -> - meta_lookup(Shard, current, undefined). - --spec meta_lookup(emqx_ds:shard(), _K) -> _V. -meta_lookup(Shard, K) -> - persistent_term:get(?PERSISTENT_TERM(Shard, K)). - --spec meta_lookup(emqx_ds:shard(), _K, Default) -> _V | Default. -meta_lookup(Shard, K, Default) -> - persistent_term:get(?PERSISTENT_TERM(Shard, K), Default). - --spec meta_put(emqx_ds:shard(), _K, _V) -> ok. -meta_put(Shard, K, V) -> - persistent_term:put(?PERSISTENT_TERM(Shard, K), V). - --spec meta_erase(emqx_ds:shard()) -> ok. -meta_erase(Shard) -> - [ - persistent_term:erase(K) - || {K = ?PERSISTENT_TERM(Z, _), _} <- persistent_term:get(), Z =:= Shard - ], +-spec erase_schema_runtime(shard_id()) -> ok. +erase_schema_runtime(Shard) -> + persistent_term:erase(?PERSISTENT_TERM(Shard)), ok. -undef(PERSISTENT_TERM). -get_next_id(undefined) -> 0; -get_next_id(GenId) -> GenId + 1. +-define(ROCKSDB_SCHEMA_KEY, <<"schema_v1">>). -is_gen_valid(Shard, GenId, Since) when GenId > 0 -> - [GenPrev | _] = meta_lookup(Shard, GenId - 1), - case GenPrev of - #{since := SincePrev} when Since > SincePrev -> - ok; - #{} -> - {error, nonmonotonic} - end; -is_gen_valid(_Shard, 0, 0) -> - ok. +-spec get_schema_persistent(rocksdb:db_handle()) -> shard_schema() | not_found. +get_schema_persistent(DB) -> + case rocksdb:get(DB, ?ROCKSDB_SCHEMA_KEY, []) of + {ok, Blob} -> + Schema = binary_to_term(Blob), + %% Sanity check: + #{current_generation := _, default_generation_module := _} = Schema, + Schema; + not_found -> + not_found + end. -serialize(Msg) -> - %% TODO: remove topic, GUID, etc. from the stored - %% message. Reconstruct it from the metadata. - term_to_binary(emqx_message:to_map(Msg)). +-spec put_schema_persistent(rocksdb:db_handle(), shard_schema()) -> ok. +put_schema_persistent(DB, Schema) -> + Blob = term_to_binary(Schema), + rocksdb:put(DB, ?ROCKSDB_SCHEMA_KEY, Blob, []). -deserialize(Bin) -> - emqx_message:from_map(binary_to_term(Bin)). - - -%% -spec store_cfs(rocksdb:db_handle(), [{string(), rocksdb:cf_handle()}]) -> ok. -%% store_cfs(DBHandle, CFRefs) -> -%% lists:foreach( -%% fun({CFName, CFRef}) -> -%% persistent_term:put({self(), CFName}, {DBHandle, CFRef}) -%% end, -%% CFRefs). +-undef(ROCKSDB_SCHEMA_KEY). diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl_ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl_ new file mode 100644 index 000000000..32f18d18b --- /dev/null +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl_ @@ -0,0 +1,714 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_ds_storage_layer). + +-behaviour(gen_server). + +%% API: +-export([start_link/2]). +-export([create_generation/3]). + +-export([open_shard/2, get_streams/3]). +-export([message_store/3]). +-export([delete/4]). + +-export([make_iterator/3, next/1, next/2]). + +-export([ + preserve_iterator/2, + restore_iterator/2, + discard_iterator/2, + ensure_iterator/3, + discard_iterator_prefix/2, + list_iterator_prefix/2, + foldl_iterator_prefix/4 +]). + +%% gen_server callbacks: +-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). + +-export_type([stream/0, cf_refs/0, gen_id/0, options/0, state/0, iterator/0]). +-export_type([db_options/0, db_write_options/0, db_read_options/0]). + +-compile({inline, [meta_lookup/2]}). + +-include_lib("emqx/include/emqx.hrl"). + +%%================================================================================ +%% Type declarations +%%================================================================================ + +-type options() :: #{ + dir => file:filename() +}. + +%% see rocksdb:db_options() +-type db_options() :: proplists:proplist(). +%% see rocksdb:write_options() +-type db_write_options() :: proplists:proplist(). +%% see rocksdb:read_options() +-type db_read_options() :: proplists:proplist(). + +-type cf_refs() :: [{string(), rocksdb:cf_handle()}]. + +%% Message storage generation +%% Keep in mind that instances of this type are persisted in long-term storage. +-type generation() :: #{ + %% Module that handles data for the generation + module := module(), + %% Module-specific data defined at generation creation time + data := term(), + %% When should this generation become active? + %% This generation should only contain messages timestamped no earlier than that. + %% The very first generation will have `since` equal 0. + since := emqx_ds:time() +}. + +-record(s, { + shard :: emqx_ds:shard(), + keyspace :: emqx_ds_conf:keyspace(), + db :: rocksdb:db_handle(), + cf_iterator :: rocksdb:cf_handle(), + cf_generations :: cf_refs() +}). + +-record(stream, + { generation :: gen_id() + , topic_filter :: emqx_ds:topic_filter() + , since :: emqx_ds:time() + , enc :: _EncapsultatedData + }). + +-opaque stream() :: #stream{}. + +-record(it, { + shard :: emqx_ds:shard(), + gen :: gen_id(), + replay :: emqx_ds:replay(), + module :: module(), + data :: term() +}). + +-type gen_id() :: 0..16#ffff. + +-opaque state() :: #s{}. +-opaque iterator() :: #it{}. + +%% Contents of the default column family: +%% +%% [{<<"genNN">>, #generation{}}, ..., +%% {<<"current">>, GenID}] + +-define(DEFAULT_CF, "default"). +-define(DEFAULT_CF_OPTS, []). + +-define(ITERATOR_CF, "$iterators"). + +%% TODO +%% 1. CuckooTable might be of use here / `OptimizeForPointLookup(...)`. +%% 2. Supposedly might be compressed _very_ effectively. +%% 3. `inplace_update_support`? +-define(ITERATOR_CF_OPTS, []). + +-define(REF(ShardId), {via, gproc, {n, l, {?MODULE, ShardId}}}). + +%%================================================================================ +%% Callbacks +%%================================================================================ + +-callback create_new(rocksdb:db_handle(), gen_id(), _Options :: term()) -> + {_Schema, cf_refs()}. + +-callback open( + emqx_ds:shard(), + rocksdb:db_handle(), + gen_id(), + cf_refs(), + _Schema +) -> + _DB. + +-callback store( + _DB, + _MessageID :: binary(), + emqx_ds:time(), + emqx_ds:topic(), + _Payload :: binary() +) -> + ok | {error, _}. + +-callback delete(_DB, _MessageID :: binary(), emqx_ds:time(), emqx_ds:topic()) -> + ok | {error, _}. + +-callback get_streams(_DB, emqx_ds:topic_filter(), emqx_ds:time()) -> + [{_TopicRankX, _Stream}]. + +-callback make_iterator(_DB, emqx_ds:replay()) -> + {ok, _It} | {error, _}. + +-callback restore_iterator(_DB, _Serialized :: binary()) -> {ok, _It} | {error, _}. + +-callback preserve_iterator(_It) -> term(). + +-callback next(It) -> {value, binary(), It} | none | {error, closed}. + +%%================================================================================ +%% Replication layer API +%%================================================================================ + +-spec open_shard(emqx_ds_replication_layer:shard(), emqx_ds_storage_layer:options()) -> ok. +open_shard(Shard, Options) -> + emqx_ds_storage_layer_sup:ensure_shard(Shard, Options). + +-spec get_streams(emqx_ds:shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> [{emqx_ds:stream_rank(), _Stream}]. +get_streams(Shard, TopicFilter, StartTime) -> + %% TODO: lookup ALL generations + {GenId, #{module := Mod, data := ModState}} = meta_lookup_gen(Shard, StartTime), + lists:map( + fun({RankX, ModStream}) -> + Stream = #stream{ generation = GenId + , topic_filter = TopicFilter + , since = StartTime + , enc = ModStream + }, + Rank = {RankX, GenId}, + {Rank, Stream} + end, + Mod:get_streams(ModState, TopicFilter, StartTime)). + +-spec message_store(emqx_ds:shard(), [emqx_types:message()], emqx_ds:message_store_opts()) -> + {ok, _MessageId} | {error, _}. +message_store(Shard, Msgs, _Opts) -> + {ok, lists:map( + fun(Msg) -> + GUID = emqx_message:id(Msg), + Timestamp = Msg#message.timestamp, + {_GenId, #{module := Mod, data := ModState}} = meta_lookup_gen(Shard, Timestamp), + Topic = emqx_topic:words(emqx_message:topic(Msg)), + Payload = serialize(Msg), + Mod:store(ModState, GUID, Timestamp, Topic, Payload), + GUID + end, + Msgs)}. + +-spec next(iterator()) -> {ok, iterator(), [binary()]} | end_of_stream. +next(It = #it{}) -> + next(It, _BatchSize = 1). + +-spec next(iterator(), pos_integer()) -> {ok, iterator(), [binary()]} | end_of_stream. +next(#it{data = {?MODULE, end_of_stream}}, _BatchSize) -> + end_of_stream; +next( + It = #it{shard = Shard, module = Mod, gen = Gen, data = {?MODULE, retry, Serialized}}, BatchSize +) -> + #{data := DBData} = meta_get_gen(Shard, Gen), + {ok, ItData} = Mod:restore_iterator(DBData, Serialized), + next(It#it{data = ItData}, BatchSize); +next(It = #it{}, BatchSize) -> + do_next(It, BatchSize, _Acc = []). + +%%================================================================================ +%% API functions +%%================================================================================ + +-spec create_generation( + emqx_ds:shard(), emqx_ds:time(), emqx_ds_conf:backend_config() +) -> + {ok, gen_id()} | {error, nonmonotonic}. +create_generation(ShardId, Since, Config = {_Module, _Options}) -> + gen_server:call(?REF(ShardId), {create_generation, Since, Config}). + +-spec delete(emqx_ds:shard(), emqx_guid:guid(), emqx_ds:time(), emqx_ds:topic()) -> + ok | {error, _}. +delete(Shard, GUID, Time, Topic) -> + {_GenId, #{module := Mod, data := Data}} = meta_lookup_gen(Shard, Time), + Mod:delete(Data, GUID, Time, Topic). + +-spec make_iterator(emqx_ds:shard(), stream(), emqx_ds:time()) -> + {ok, iterator()} | {error, _TODO}. +make_iterator(Shard, Stream, StartTime) -> + #stream{ topic_filter = TopicFilter + , since = Since + , enc = Enc + } = Stream, + {GenId, Gen} = meta_lookup_gen(Shard, StartTime), + Replay = {TopicFilter, Since}, + case Mod:make_iterator(Data, Replay, Options) of + #it{ gen = GenId, + replay = {TopicFilter, Since} + }. + +-spec do_next(iterator(), non_neg_integer(), [binary()]) -> + {ok, iterator(), [binary()]} | end_of_stream. +do_next(It, N, Acc) when N =< 0 -> + {ok, It, lists:reverse(Acc)}; +do_next(It = #it{module = Mod, data = ItData}, N, Acc) -> + case Mod:next(ItData) of + {value, Bin, ItDataNext} -> + Val = deserialize(Bin), + do_next(It#it{data = ItDataNext}, N - 1, [Val | Acc]); + {error, _} = _Error -> + %% todo: log? + %% iterator might be invalid now; will need to re-open it. + Serialized = Mod:preserve_iterator(ItData), + {ok, It#it{data = {?MODULE, retry, Serialized}}, lists:reverse(Acc)}; + none -> + case open_next_iterator(It) of + {ok, ItNext} -> + do_next(ItNext, N, Acc); + {error, _} = _Error -> + %% todo: log? + %% fixme: only bad options may lead to this? + %% return an "empty" iterator to be re-opened when retrying? + Serialized = Mod:preserve_iterator(ItData), + {ok, It#it{data = {?MODULE, retry, Serialized}}, lists:reverse(Acc)}; + none -> + case Acc of + [] -> + end_of_stream; + _ -> + {ok, It#it{data = {?MODULE, end_of_stream}}, lists:reverse(Acc)} + end + end + end. + +-spec preserve_iterator(iterator(), emqx_ds:iterator_id()) -> + ok | {error, _TODO}. +preserve_iterator(It = #it{}, IteratorID) -> + iterator_put_state(IteratorID, It). + +-spec restore_iterator(emqx_ds:shard(), emqx_ds:replay_id()) -> + {ok, iterator()} | {error, _TODO}. +restore_iterator(Shard, ReplayID) -> + case iterator_get_state(Shard, ReplayID) of + {ok, Serial} -> + restore_iterator_state(Shard, Serial); + not_found -> + {error, not_found}; + {error, _Reason} = Error -> + Error + end. + +-spec ensure_iterator(emqx_ds:shard(), emqx_ds:iterator_id(), emqx_ds:replay()) -> + {ok, iterator()} | {error, _TODO}. +ensure_iterator(Shard, IteratorID, Replay = {_TopicFilter, _StartMS}) -> + case restore_iterator(Shard, IteratorID) of + {ok, It} -> + {ok, It}; + {error, not_found} -> + {ok, It} = make_iterator(Shard, Replay), + ok = emqx_ds_storage_layer:preserve_iterator(It, IteratorID), + {ok, It}; + Error -> + Error + end. + +-spec discard_iterator(emqx_ds:shard(), emqx_ds:replay_id()) -> + ok | {error, _TODO}. +discard_iterator(Shard, ReplayID) -> + iterator_delete(Shard, ReplayID). + +-spec discard_iterator_prefix(emqx_ds:shard(), binary()) -> + ok | {error, _TODO}. +discard_iterator_prefix(Shard, KeyPrefix) -> + case do_discard_iterator_prefix(Shard, KeyPrefix) of + {ok, _} -> ok; + Error -> Error + end. + +-spec list_iterator_prefix( + emqx_ds:shard(), + binary() +) -> {ok, [emqx_ds:iterator_id()]} | {error, _TODO}. +list_iterator_prefix(Shard, KeyPrefix) -> + do_list_iterator_prefix(Shard, KeyPrefix). + +-spec foldl_iterator_prefix( + emqx_ds:shard(), + binary(), + fun((_Key :: binary(), _Value :: binary(), Acc) -> Acc), + Acc +) -> {ok, Acc} | {error, _TODO} when + Acc :: term(). +foldl_iterator_prefix(Shard, KeyPrefix, Fn, Acc) -> + do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, Acc). + +%%================================================================================ +%% gen_server +%%================================================================================ + +-spec start_link(emqx_ds:shard(), emqx_ds_storage_layer:options()) -> + {ok, pid()}. +start_link(Shard, Options) -> + gen_server:start_link(?REF(Shard), ?MODULE, {Shard, Options}, []). + +init({Shard, Options}) -> + process_flag(trap_exit, true), + {ok, S0} = do_open_db(Shard, Options), + S = ensure_current_generation(S0), + ok = populate_metadata(S), + {ok, S}. + +handle_call({create_generation, Since, Config}, _From, S) -> + case create_new_gen(Since, Config, S) of + {ok, GenId, NS} -> + {reply, {ok, GenId}, NS}; + {error, _} = Error -> + {reply, Error, S} + end; +handle_call(_Call, _From, S) -> + {reply, {error, unknown_call}, S}. + +handle_cast(_Cast, S) -> + {noreply, S}. + +handle_info(_Info, S) -> + {noreply, S}. + +terminate(_Reason, #s{db = DB, shard = Shard}) -> + meta_erase(Shard), + ok = rocksdb:close(DB). + +%%================================================================================ +%% Internal functions +%%================================================================================ + +-record(db, {handle :: rocksdb:db_handle(), cf_iterator :: rocksdb:cf_handle()}). + +-spec populate_metadata(state()) -> ok. +populate_metadata(S = #s{shard = Shard, db = DBHandle, cf_iterator = CFIterator}) -> + ok = meta_put(Shard, db, #db{handle = DBHandle, cf_iterator = CFIterator}), + Current = schema_get_current(DBHandle), + lists:foreach(fun(GenId) -> populate_metadata(GenId, S) end, lists:seq(0, Current)). + +-spec populate_metadata(gen_id(), state()) -> ok. +populate_metadata(GenId, S = #s{shard = Shard, db = DBHandle}) -> + Gen = open_gen(GenId, schema_get_gen(DBHandle, GenId), S), + meta_register_gen(Shard, GenId, Gen). + +-spec ensure_current_generation(state()) -> state(). +ensure_current_generation(S = #s{shard = _Shard, keyspace = Keyspace, db = DBHandle}) -> + case schema_get_current(DBHandle) of + undefined -> + Config = emqx_ds_conf:keyspace_config(Keyspace), + {ok, _, NS} = create_new_gen(0, Config, S), + NS; + _GenId -> + S + end. + +-spec create_new_gen(emqx_ds:time(), emqx_ds_conf:backend_config(), state()) -> + {ok, gen_id(), state()} | {error, nonmonotonic}. +create_new_gen(Since, Config, S = #s{shard = Shard, db = DBHandle}) -> + GenId = get_next_id(meta_get_current(Shard)), + GenId = get_next_id(schema_get_current(DBHandle)), + case is_gen_valid(Shard, GenId, Since) of + ok -> + {ok, Gen, NS} = create_gen(GenId, Since, Config, S), + %% TODO: Transaction? Column family creation can't be transactional, anyway. + ok = schema_put_gen(DBHandle, GenId, Gen), + ok = schema_put_current(DBHandle, GenId), + ok = meta_register_gen(Shard, GenId, open_gen(GenId, Gen, NS)), + {ok, GenId, NS}; + {error, _} = Error -> + Error + end. + +-spec create_gen(gen_id(), emqx_ds:time(), emqx_ds_conf:backend_config(), state()) -> + {ok, generation(), state()}. +create_gen(GenId, Since, {Module, Options}, S = #s{db = DBHandle, cf_generations = CFs}) -> + % TODO: Backend implementation should ensure idempotency. + {Schema, NewCFs} = Module:create_new(DBHandle, GenId, Options), + Gen = #{ + module => Module, + data => Schema, + since => Since + }, + {ok, Gen, S#s{cf_generations = NewCFs ++ CFs}}. + +-spec do_open_db(emqx_ds:shard(), options()) -> {ok, state()} | {error, _TODO}. +do_open_db(Shard, Options) -> + DefaultDir = binary_to_list(Shard), + DBDir = unicode:characters_to_list(maps:get(dir, Options, DefaultDir)), + %% TODO: properly forward keyspace + Keyspace = maps:get(keyspace, Options, default_keyspace), + DBOptions = [ + {create_if_missing, true}, + {create_missing_column_families, true} + | emqx_ds_conf:db_options(Keyspace) + ], + _ = filelib:ensure_dir(DBDir), + ExistingCFs = + case rocksdb:list_column_families(DBDir, DBOptions) of + {ok, CFs} -> + [{Name, []} || Name <- CFs, Name /= ?DEFAULT_CF, Name /= ?ITERATOR_CF]; + % DB is not present. First start + {error, {db_open, _}} -> + [] + end, + ColumnFamilies = [ + {?DEFAULT_CF, ?DEFAULT_CF_OPTS}, + {?ITERATOR_CF, ?ITERATOR_CF_OPTS} + | ExistingCFs + ], + case rocksdb:open(DBDir, DBOptions, ColumnFamilies) of + {ok, DBHandle, [_CFDefault, CFIterator | CFRefs]} -> + {CFNames, _} = lists:unzip(ExistingCFs), + {ok, #s{ + shard = Shard, + keyspace = Keyspace, + db = DBHandle, + cf_iterator = CFIterator, + cf_generations = lists:zip(CFNames, CFRefs) + }}; + Error -> + Error + end. + +-spec open_gen(gen_id(), generation(), state()) -> generation(). +open_gen( + GenId, + Gen = #{module := Mod, data := Data}, + #s{shard = Shard, db = DBHandle, cf_generations = CFs} +) -> + DB = Mod:open(Shard, DBHandle, GenId, CFs, Data), + Gen#{data := DB}. + +-spec open_next_iterator(iterator()) -> {ok, iterator()} | {error, _Reason} | none. +open_next_iterator(It = #it{shard = Shard, gen = GenId}) -> + open_next_iterator(meta_get_gen(Shard, GenId + 1), It#it{gen = GenId + 1}). + +open_next_iterator(undefined, _It) -> + none; +open_next_iterator(Gen = #{}, It) -> + open_iterator(Gen, It). + +-spec open_restore_iterator(generation(), iterator(), binary()) -> + {ok, iterator()} | {error, _Reason}. +open_restore_iterator(#{module := Mod, data := Data}, It = #it{}, Serial) -> + case Mod:restore_iterator(Data, Serial) of + {ok, ItData} -> + {ok, It#it{module = Mod, data = ItData}}; + Err -> + Err + end. + +%% + +-define(KEY_REPLAY_STATE(IteratorId), <<(IteratorId)/binary, "rs">>). +-define(KEY_REPLAY_STATE_PAT(KeyReplayState), begin + <> = (KeyReplayState), + IteratorId +end). + +-define(ITERATION_WRITE_OPTS, []). +-define(ITERATION_READ_OPTS, []). + +iterator_get_state(Shard, ReplayID) -> + #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), + rocksdb:get(Handle, CF, ?KEY_REPLAY_STATE(ReplayID), ?ITERATION_READ_OPTS). + +iterator_put_state(ID, It = #it{shard = Shard}) -> + #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), + Serial = preserve_iterator_state(It), + rocksdb:put(Handle, CF, ?KEY_REPLAY_STATE(ID), Serial, ?ITERATION_WRITE_OPTS). + +iterator_delete(Shard, ID) -> + #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), + rocksdb:delete(Handle, CF, ?KEY_REPLAY_STATE(ID), ?ITERATION_WRITE_OPTS). + +preserve_iterator_state(#it{ + gen = Gen, + replay = {TopicFilter, StartTime}, + module = Mod, + data = ItData +}) -> + term_to_binary(#{ + v => 1, + gen => Gen, + filter => TopicFilter, + start => StartTime, + st => Mod:preserve_iterator(ItData) + }). + +restore_iterator_state(Shard, Serial) when is_binary(Serial) -> + restore_iterator_state(Shard, binary_to_term(Serial)); +restore_iterator_state( + Shard, + #{ + v := 1, + gen := Gen, + filter := TopicFilter, + start := StartTime, + st := State + } +) -> + It = #it{shard = Shard, gen = Gen, replay = {TopicFilter, StartTime}}, + open_restore_iterator(meta_get_gen(Shard, Gen), It, State). + +do_list_iterator_prefix(Shard, KeyPrefix) -> + Fn = fun(K0, _V, Acc) -> + K = ?KEY_REPLAY_STATE_PAT(K0), + [K | Acc] + end, + do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, []). + +do_discard_iterator_prefix(Shard, KeyPrefix) -> + #db{handle = DBHandle, cf_iterator = CF} = meta_lookup(Shard, db), + Fn = fun(K, _V, _Acc) -> ok = rocksdb:delete(DBHandle, CF, K, ?ITERATION_WRITE_OPTS) end, + do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, ok). + +do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, Acc) -> + #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), + case rocksdb:iterator(Handle, CF, ?ITERATION_READ_OPTS) of + {ok, It} -> + NextAction = {seek, KeyPrefix}, + do_foldl_iterator_prefix(Handle, CF, It, KeyPrefix, NextAction, Fn, Acc); + Error -> + Error + end. + +do_foldl_iterator_prefix(DBHandle, CF, It, KeyPrefix, NextAction, Fn, Acc) -> + case rocksdb:iterator_move(It, NextAction) of + {ok, K = <>, V} -> + NewAcc = Fn(K, V, Acc), + do_foldl_iterator_prefix(DBHandle, CF, It, KeyPrefix, next, Fn, NewAcc); + {ok, _K, _V} -> + ok = rocksdb:iterator_close(It), + {ok, Acc}; + {error, invalid_iterator} -> + ok = rocksdb:iterator_close(It), + {ok, Acc}; + Error -> + ok = rocksdb:iterator_close(It), + Error + end. + +%% Functions for dealing with the metadata stored persistently in rocksdb + +-define(CURRENT_GEN, <<"current">>). +-define(SCHEMA_WRITE_OPTS, []). +-define(SCHEMA_READ_OPTS, []). + +-spec schema_get_gen(rocksdb:db_handle(), gen_id()) -> generation(). +schema_get_gen(DBHandle, GenId) -> + {ok, Bin} = rocksdb:get(DBHandle, schema_gen_key(GenId), ?SCHEMA_READ_OPTS), + binary_to_term(Bin). + +-spec schema_put_gen(rocksdb:db_handle(), gen_id(), generation()) -> ok | {error, _}. +schema_put_gen(DBHandle, GenId, Gen) -> + rocksdb:put(DBHandle, schema_gen_key(GenId), term_to_binary(Gen), ?SCHEMA_WRITE_OPTS). + +-spec schema_get_current(rocksdb:db_handle()) -> gen_id() | undefined. +schema_get_current(DBHandle) -> + case rocksdb:get(DBHandle, ?CURRENT_GEN, ?SCHEMA_READ_OPTS) of + {ok, Bin} -> + binary_to_integer(Bin); + not_found -> + undefined + end. + +-spec schema_put_current(rocksdb:db_handle(), gen_id()) -> ok | {error, _}. +schema_put_current(DBHandle, GenId) -> + rocksdb:put(DBHandle, ?CURRENT_GEN, integer_to_binary(GenId), ?SCHEMA_WRITE_OPTS). + +-spec schema_gen_key(integer()) -> binary(). +schema_gen_key(N) -> + <<"gen", N:32>>. + +-undef(CURRENT_GEN). +-undef(SCHEMA_WRITE_OPTS). +-undef(SCHEMA_READ_OPTS). + +%% Functions for dealing with the runtime shard metadata: + +-define(PERSISTENT_TERM(SHARD, GEN), {emqx_ds_storage_layer, SHARD, GEN}). + +-spec meta_register_gen(emqx_ds:shard(), gen_id(), generation()) -> ok. +meta_register_gen(Shard, GenId, Gen) -> + Gs = + case GenId > 0 of + true -> meta_lookup(Shard, GenId - 1); + false -> [] + end, + ok = meta_put(Shard, GenId, [Gen | Gs]), + ok = meta_put(Shard, current, GenId). + +-spec meta_lookup_gen(emqx_ds:shard(), emqx_ds:time()) -> {gen_id(), generation()}. +meta_lookup_gen(Shard, Time) -> + %% TODO + %% Is cheaper persistent term GC on update here worth extra lookup? I'm leaning + %% towards a "no". + Current = meta_lookup(Shard, current), + Gens = meta_lookup(Shard, Current), + find_gen(Time, Current, Gens). + +find_gen(Time, GenId, [Gen = #{since := Since} | _]) when Time >= Since -> + {GenId, Gen}; +find_gen(Time, GenId, [_Gen | Rest]) -> + find_gen(Time, GenId - 1, Rest). + +-spec meta_get_gen(emqx_ds:shard(), gen_id()) -> generation() | undefined. +meta_get_gen(Shard, GenId) -> + case meta_lookup(Shard, GenId, []) of + [Gen | _Older] -> Gen; + [] -> undefined + end. + +-spec meta_get_current(emqx_ds:shard()) -> gen_id() | undefined. +meta_get_current(Shard) -> + meta_lookup(Shard, current, undefined). + +-spec meta_lookup(emqx_ds:shard(), _K) -> _V. +meta_lookup(Shard, Key) -> + persistent_term:get(?PERSISTENT_TERM(Shard, Key)). + +-spec meta_lookup(emqx_ds:shard(), _K, Default) -> _V | Default. +meta_lookup(Shard, K, Default) -> + persistent_term:get(?PERSISTENT_TERM(Shard, K), Default). + +-spec meta_put(emqx_ds:shard(), _K, _V) -> ok. +meta_put(Shard, K, V) -> + persistent_term:put(?PERSISTENT_TERM(Shard, K), V). + +-spec meta_erase(emqx_ds:shard()) -> ok. +meta_erase(Shard) -> + [ + persistent_term:erase(K) + || {K = ?PERSISTENT_TERM(Z, _), _} <- persistent_term:get(), Z =:= Shard + ], + ok. + +-undef(PERSISTENT_TERM). + +get_next_id(undefined) -> 0; +get_next_id(GenId) -> GenId + 1. + +is_gen_valid(Shard, GenId, Since) when GenId > 0 -> + [GenPrev | _] = meta_lookup(Shard, GenId - 1), + case GenPrev of + #{since := SincePrev} when Since > SincePrev -> + ok; + #{} -> + {error, nonmonotonic} + end; +is_gen_valid(_Shard, 0, 0) -> + ok. + +serialize(Msg) -> + %% TODO: remove topic, GUID, etc. from the stored + %% message. Reconstruct it from the metadata. + term_to_binary(emqx_message:to_map(Msg)). + +deserialize(Bin) -> + emqx_message:from_map(binary_to_term(Bin)). + + +%% -spec store_cfs(rocksdb:db_handle(), [{string(), rocksdb:cf_handle()}]) -> ok. +%% store_cfs(DBHandle, CFRefs) -> +%% lists:foreach( +%% fun({CFName, CFRef}) -> +%% persistent_term:put({self(), CFName}, {DBHandle, CFRef}) +%% end, +%% CFRefs). diff --git a/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_bitmask.erl_ similarity index 98% rename from apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl rename to apps/emqx_durable_storage/src/emqx_ds_storage_layer_bitmask.erl_ index 3290b03e6..bdf5a1453 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_message_storage_bitmask.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_bitmask.erl_ @@ -83,15 +83,11 @@ -export([create_new/3, open/5]). -export([make_keymapper/1]). --export([store/5]). --export([delete/4]). +-export([store/5, delete/4]). --export([get_streams/2]). --export([make_iterator/3, next/1]). +-export([get_streams/3, make_iterator/3, next/1]). --export([preserve_iterator/1]). --export([restore_iterator/2]). --export([refresh_iterator/1]). +-export([preserve_iterator/1, restore_iterator/2, refresh_iterator/1]). %% Debug/troubleshooting: %% Keymappers @@ -131,7 +127,7 @@ %% Type declarations %%================================================================================ --opaque stream() :: singleton_stream. +-opaque stream() :: emqx_ds:topic_filter(). -type topic() :: emqx_ds:topic(). -type topic_filter() :: emqx_ds:topic_filter(). @@ -290,10 +286,10 @@ delete(DB = #db{handle = DBHandle, cf = CFHandle}, MessageID, PublishedAt, Topic Key = make_message_key(Topic, PublishedAt, MessageID, DB#db.keymapper), rocksdb:delete(DBHandle, CFHandle, Key, DB#db.write_options). --spec get_streams(db(), emqx_ds:reply()) -> +-spec get_streams(db(), emqx_ds:topic_filter(), emqx_ds:time()) -> [stream()]. -get_streams(_, _) -> - [singleton_stream]. +get_streams(_, TopicFilter, _) -> + [{0, TopicFilter}]. -spec make_iterator(db(), emqx_ds:replay(), iteration_options()) -> % {error, invalid_start_time}? might just start from the beginning of time diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl index 2e4f56f10..bf73e3ac8 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl @@ -6,7 +6,7 @@ -behaviour(supervisor). %% API: --export([start_link/0, start_shard/2, stop_shard/1]). +-export([start_link/0, start_shard/2, stop_shard/1, ensure_shard/2]). %% behaviour callbacks: -export([init/1]). diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl new file mode 100644 index 000000000..1fbad5f1b --- /dev/null +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl @@ -0,0 +1,136 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +%% @doc Reference implementation of the storage. +%% +%% Trivial, extremely slow and inefficient. It also doesn't handle +%% restart of the Erlang node properly, so obviously it's only to be +%% used for testing. +-module(emqx_ds_storage_reference). + +-behavior(emqx_ds_storage_layer). + +%% API: +-export([]). + +%% behavior callbacks: +-export([create/4, open/5, store_batch/4, get_streams/4, make_iterator/4, next/4]). + +%% internal exports: +-export([]). + +-export_type([]). + +-include_lib("emqx/include/emqx.hrl"). + +%%================================================================================ +%% Type declarations +%%================================================================================ + +%% Permanent state: +-record(schema, {}). + +%% Runtime state: +-record(s, { + db :: rocksdb:db_handle(), + cf :: rocksdb:cf_handle() +}). + +-record(stream, {topic_filter :: emqx_ds:topic_filter()}). + +-record(it, { + topic_filter :: emqx_ds:topic_filter(), + start_time :: emqx_ds:time(), + last_seen_message_key = first :: binary() | first +}). + +%%================================================================================ +%% API funcions +%%================================================================================ + +%%================================================================================ +%% behavior callbacks +%%================================================================================ + +create(_ShardId, DBHandle, GenId, _Options) -> + CFName = data_cf(GenId), + {ok, CFHandle} = rocksdb:create_column_family(DBHandle, CFName, []), + Schema = #schema{}, + {Schema, [{CFName, CFHandle}]}. + +open(_Shard, DBHandle, GenId, CFRefs, #schema{}) -> + {_, CF} = lists:keyfind(data_cf(GenId), 1, CFRefs), + #s{db = DBHandle, cf = CF}. + +store_batch(_ShardId, #s{db = DB, cf = CF}, Messages, _Options) -> + lists:foreach( + fun(Msg) -> + Id = erlang:unique_integer([monotonic]), + Key = <>, + Val = term_to_binary(Msg), + rocksdb:put(DB, CF, Key, Val, []) + end, + Messages + ). + +get_streams(_Shard, _Data, TopicFilter, _StartTime) -> + [#stream{topic_filter = TopicFilter}]. + +make_iterator(_Shard, _Data, #stream{topic_filter = TopicFilter}, StartTime) -> + {ok, #it{ + topic_filter = TopicFilter, + start_time = StartTime + }}. + +next(_Shard, #s{db = DB, cf = CF}, It0, BatchSize) -> + #it{topic_filter = TopicFilter, start_time = StartTime, last_seen_message_key = Key0} = It0, + {ok, ITHandle} = rocksdb:iterator(DB, CF, []), + Action = case Key0 of + first -> + first; + _ -> + rocksdb:iterator_move(ITHandle, Key0), + next + end, + {Key, Messages} = do_next(TopicFilter, StartTime, ITHandle, Action, BatchSize, Key0, []), + rocksdb:iterator_close(ITHandle), + It = It0#it{last_seen_message_key = Key}, + {ok, It, lists:reverse(Messages)}. + +%%================================================================================ +%% Internal functions +%%================================================================================ + +do_next(_, _, _, _, 0, Key, Acc) -> + {Key, Acc}; +do_next(TopicFilter, StartTime, IT, Action, NLeft, Key0, Acc) -> + case rocksdb:iterator_move(IT, Action) of + {ok, Key, Blob} -> + Msg = #message{topic = Topic, timestamp = TS} = binary_to_term(Blob), + case emqx_topic:match(Topic, TopicFilter) andalso TS >= StartTime of + true -> + do_next(TopicFilter, StartTime, IT, next, NLeft - 1, Key, [Msg | Acc]); + false -> + do_next(TopicFilter, StartTime, IT, next, NLeft, Key, Acc) + end; + {error, invalid_iterator} -> + {Key0, Acc} + end. + +%% @doc Generate a column family ID for the MQTT messages +-spec data_cf(emqx_ds_storage_layer:gen_id()) -> [char()]. +data_cf(GenId) -> + ?MODULE_STRING ++ integer_to_list(GenId). diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index 79285fe16..df3d64bc3 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -17,8 +17,9 @@ -behavior(emqx_bpapi). +-include_lib("emqx/include/bpapi.hrl"). %% API: --export([]). +-export([open_shard/3, get_streams/4, make_iterator/4, next/4]). %% behavior callbacks: -export([introduced_in/0]). @@ -27,23 +28,29 @@ %% API funcions %%================================================================================ --spec create_shard(node(), emqx_ds_replication_layer:shard(), emqx_ds:create_db_opts()) -> - ok. -create_shard(Node, Shard, Opts) -> - erpc:call(Node, emqx_ds_replication_layer, do_create_shard_v1, [Shard, Opts]). +-spec open_shard(node(), emqx_ds_replication_layer:shard(), emqx_ds:create_db_opts()) -> + ok. +open_shard(Node, Shard, Opts) -> + erpc:call(Node, emqx_ds_replication_layer, do_open_shard_v1, [Shard, Opts]). --spec get_streams(node(), emqx_ds_replication_layer:shard(), emqx_ds:topic_filter(), emqx_ds:time()) -> - [emqx_ds_replication_layer:stream()]. +-spec get_streams( + node(), emqx_ds_replication_layer:shard(), emqx_ds:topic_filter(), emqx_ds:time() +) -> + [{integer(), emqx_ds_replication_layer:stream()}]. get_streams(Node, Shard, TopicFilter, Time) -> erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [Shard, TopicFilter, Time]). --spec open_iterator(node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:stream(), emqx_ds:time()) -> - {ok, emqx_ds_replication_layer:iterator()} | {error, _}. -open_iterator(Node, Shard, Stream, StartTime) -> - erpc:call(Node, emqx_ds_replication_layer, do_open_iterator_v1, [Shard, Stream, StartTime]). +-spec make_iterator(node(), emqx_ds_replication_layer:shard(), _Stream, emqx_ds:time()) -> + {ok, emqx_ds_replication_layer:iterator()} | {error, _}. +make_iterator(Node, Shard, Stream, StartTime) -> + erpc:call(Node, emqx_ds_replication_layer, do_make_iterator_v1, [Shard, Stream, StartTime]). --spec next(node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:iterator(), pos_integer()) -> - {ok, emqx_ds_replication_layer:iterator(), [emqx_types:messages()]} | end_of_stream. +-spec next( + node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:iterator(), pos_integer() +) -> + {ok, emqx_ds_replication_layer:iterator(), [emqx_types:messages()]} + | {ok, end_of_stream} + | {error, _}. next(Node, Shard, Iter, BatchSize) -> erpc:call(Node, emqx_ds_replication_layer, do_next_v1, [Shard, Iter, BatchSize]). diff --git a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl new file mode 100644 index 000000000..effe3b695 --- /dev/null +++ b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl @@ -0,0 +1,107 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_ds_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("emqx/include/emqx.hrl"). +-include_lib("common_test/include/ct.hrl"). +-include_lib("stdlib/include/assert.hrl"). + +%% A simple smoke test that verifies that opening the DB doesn't crash +t_00_smoke_open(_Config) -> + ?assertMatch(ok, emqx_ds:open_db(<<"DB1">>, #{})), + ?assertMatch(ok, emqx_ds:open_db(<<"DB1">>, #{})). + +%% A simple smoke test that verifies that storing the messages doesn't +%% crash +t_01_smoke_store(_Config) -> + DB = <<"default">>, + ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + Msg = message(<<"foo/bar">>, <<"foo">>, 0), + ?assertMatch(ok, emqx_ds:store_batch(DB, [Msg])). + +%% A simple smoke test that verifies that getting the list of streams +%% doesn't crash and that iterators can be opened. +t_02_smoke_get_streams_start_iter(_Config) -> + DB = <<"default">>, + ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + StartTime = 0, + [{Rank, Stream}] = emqx_ds:get_streams(DB, ['#'], StartTime), + ?assertMatch({_, _}, Rank), + ?assertMatch({ok, _Iter}, emqx_ds:make_iterator(Stream, StartTime)). + +%% A simple smoke test that verifies that it's possible to iterate +%% over messages. +t_03_smoke_iterate(_Config) -> + DB = atom_to_binary(?FUNCTION_NAME), + ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + StartTime = 0, + Msgs = [ + message(<<"foo/bar">>, <<"1">>, 0), + message(<<"foo">>, <<"2">>, 1), + message(<<"bar/bar">>, <<"3">>, 2) + ], + ?assertMatch(ok, emqx_ds:store_batch(DB, Msgs)), + [{_, Stream}] = emqx_ds:get_streams(DB, ['#'], StartTime), + {ok, Iter0} = emqx_ds:make_iterator(Stream, StartTime), + {ok, Iter, Batch} = iterate(Iter0, 1), + ?assertEqual(Msgs, Batch, {Iter0, Iter}). + +message(Topic, Payload, PublishedAt) -> + #message{ + topic = Topic, + payload = Payload, + timestamp = PublishedAt, + id = emqx_guid:gen() + }. + +iterate(It, BatchSize) -> + iterate(It, BatchSize, []). + +iterate(It0, BatchSize, Acc) -> + case emqx_ds:next(It0, BatchSize) of + {ok, It, []} -> + {ok, It, Acc}; + {ok, It, Msgs} -> + iterate(It, BatchSize, Acc ++ Msgs); + Ret -> + Ret + end. + +%% CT callbacks + +all() -> emqx_common_test_helpers:all(?MODULE). + +init_per_suite(Config) -> + Apps = emqx_cth_suite:start( + [mria, emqx_durable_storage], + #{work_dir => ?config(priv_dir, Config)} + ), + [{apps, Apps} | Config]. + +end_per_suite(Config) -> + ok = emqx_cth_suite:stop(?config(apps, Config)), + ok. + +init_per_testcase(_TC, Config) -> + snabbkaffe:fix_ct_logging(), + application:ensure_all_started(emqx_durable_storage), + Config. + +end_per_testcase(_TC, _Config) -> + ok = application:stop(emqx_durable_storage). diff --git a/apps/emqx_durable_storage/test/emqx_ds_message_storage_bitmask_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_message_storage_bitmask_SUITE.erl_ similarity index 100% rename from apps/emqx_durable_storage/test/emqx_ds_message_storage_bitmask_SUITE.erl rename to apps/emqx_durable_storage/test/emqx_ds_message_storage_bitmask_SUITE.erl_ diff --git a/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl_ similarity index 100% rename from apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl rename to apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl_ diff --git a/scripts/check-elixir-applications.exs b/scripts/check-elixir-applications.exs index 42c838199..1e604c69f 100755 --- a/scripts/check-elixir-applications.exs +++ b/scripts/check-elixir-applications.exs @@ -1,4 +1,4 @@ -#!/usr/bin/env elixir +#! /usr/bin/env elixir defmodule CheckElixirApplications do alias EMQXUmbrella.MixProject diff --git a/scripts/check-elixir-deps-discrepancies.exs b/scripts/check-elixir-deps-discrepancies.exs index 408079d7d..1363219ed 100755 --- a/scripts/check-elixir-deps-discrepancies.exs +++ b/scripts/check-elixir-deps-discrepancies.exs @@ -1,4 +1,4 @@ -#!/usr/bin/env elixir +#! /usr/bin/env elixir # ensure we have a fresh rebar.lock diff --git a/scripts/check-elixir-emqx-machine-boot-discrepancies.exs b/scripts/check-elixir-emqx-machine-boot-discrepancies.exs index d07e6978f..9ffdc47bf 100755 --- a/scripts/check-elixir-emqx-machine-boot-discrepancies.exs +++ b/scripts/check-elixir-emqx-machine-boot-discrepancies.exs @@ -1,4 +1,4 @@ -#!/usr/bin/env elixir +#! /usr/bin/env elixir defmodule CheckElixirEMQXMachineBootDiscrepancies do alias EMQXUmbrella.MixProject diff --git a/scripts/check_missing_reboot_apps.exs b/scripts/check_missing_reboot_apps.exs index 91d4b39ea..7f2178ec1 100755 --- a/scripts/check_missing_reboot_apps.exs +++ b/scripts/check_missing_reboot_apps.exs @@ -1,4 +1,4 @@ -#!/usr/bin/env elixir +#! /usr/bin/env elixir alias EMQXUmbrella.MixProject From 6d65707d41df06219ba63b1c4e735c52444c7981 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Thu, 5 Oct 2023 01:43:35 +0200 Subject: [PATCH 077/155] refactor(ds): Implement drop_db function --- apps/emqx_durable_storage/src/emqx_ds.erl | 24 ++++++++++++---- .../src/emqx_ds_replication_layer.erl | 28 ++++++++++++++----- .../src/emqx_ds_storage_layer.erl | 18 ++++++++---- .../src/emqx_ds_storage_reference.erl | 15 +++++----- .../src/proto/emqx_ds_proto_v1.erl | 7 ++++- .../test/emqx_ds_SUITE.erl | 17 ++++++----- 6 files changed, 77 insertions(+), 32 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index 6a20afbf1..293f2e531 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -22,7 +22,7 @@ -module(emqx_ds). %% Management API: --export([open_db/2]). +-export([open_db/2, drop_db/1]). %% Message storage API: -export([store_batch/1, store_batch/2, store_batch/3]). @@ -50,7 +50,7 @@ %% Type declarations %%================================================================================ --type db() :: emqx_ds_replication_layer:db(). +-type db() :: atom(). %% Parsed topic. -type topic() :: list(binary()). @@ -101,6 +101,12 @@ open_db(DB, Opts) -> emqx_ds_replication_layer:open_db(DB, Opts). +%% @doc TODO: currently if one or a few shards are down, they won't be +%% deleted. +-spec drop_db(db()) -> ok. +drop_db(DB) -> + emqx_ds_replication_layer:drop_db(DB). + -spec store_batch([emqx_types:message()]) -> store_batch_result(). store_batch(Msgs) -> store_batch(?DEFAULT_DB, Msgs, #{}). @@ -124,7 +130,15 @@ store_batch(DB, Msgs) -> %% reflects the notion that different topics can be stored %% differently, but hides the implementation details. %% -%% Rules: +%% While having to work with multiple iterators to replay a topic +%% filter may be cumbersome, it opens up some possibilities: +%% +%% 1. It's possible to parallelize replays +%% +%% 2. Streams can be shared between different clients to implement +%% shared subscriptions +%% +%% IMPORTANT RULES: %% %% 0. There is no 1-to-1 mapping between MQTT topics and streams. One %% stream can contain any number of MQTT topics. @@ -145,8 +159,8 @@ store_batch(DB, Msgs) -> %% equal, then the streams are independent. %% %% Stream is fully consumed when `next/3' function returns -%% `end_of_stream'. Then the client can proceed to replaying streams -%% that depend on the given one. +%% `end_of_stream'. Then and only then the client can proceed to +%% replaying streams that depend on the given one. -spec get_streams(db(), topic_filter(), time()) -> [{stream_rank(), stream()}]. get_streams(DB, TopicFilter, StartTime) -> emqx_ds_replication_layer:get_streams(DB, TopicFilter, StartTime). diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index 5d4749c30..e1c775d5a 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -18,6 +18,7 @@ -export([ list_shards/1, open_db/2, + drop_db/1, store_batch/3, get_streams/3, make_iterator/2, @@ -27,6 +28,7 @@ %% internal exports: -export([ do_open_shard_v1/2, + do_drop_shard_v1/1, do_get_streams_v1/3, do_make_iterator_v1/3, do_next_v1/3 @@ -38,9 +40,9 @@ %% Type declarations %%================================================================================ --type db() :: binary(). +-type db() :: emqx_ds:db(). --type shard_id() :: binary(). +-type shard_id() :: {emqx_ds:db(), atom()}. %% This record enapsulates the stream entity from the replication %% level. @@ -90,6 +92,16 @@ open_db(DB, Opts) -> list_nodes() ). +-spec drop_db(emqx_ds:db()) -> ok | {error, _}. +drop_db(DB) -> + lists:foreach( + fun(Node) -> + Shard = shard_id(DB, Node), + ok = emqx_ds_proto_v1:drop_shard(Node, Shard) + end, + list_nodes() + ). + -spec store_batch(emqx_ds:db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> emqx_ds:store_batch_result(). store_batch(DB, Msg, Opts) -> @@ -163,6 +175,10 @@ next(Iter0, BatchSize) -> do_open_shard_v1(Shard, Opts) -> emqx_ds_storage_layer:open_shard(Shard, Opts). +-spec do_drop_shard_v1(shard_id()) -> ok. +do_drop_shard_v1(Shard) -> + emqx_ds_storage_layer:drop_shard(Shard). + -spec do_get_streams_v1(shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> [{integer(), _Stream}]. do_get_streams_v1(Shard, TopicFilter, StartTime) -> @@ -187,13 +203,11 @@ add_shard_to_rank(Shard, RankY) -> shard_id(DB, Node) -> %% TODO: don't bake node name into the schema, don't repeat the %% Mnesia's 1M$ mistake. - NodeBin = atom_to_binary(Node), - <>. + {DB, Node}. -spec node_of_shard(shard_id()) -> node(). -node_of_shard(ShardId) -> - [_DB, NodeBin] = binary:split(ShardId, <<":">>), - binary_to_atom(NodeBin). +node_of_shard({_DB, Node}) -> + Node. list_nodes() -> mria:running_nodes(). diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index fdd81a095..d531c5985 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -18,13 +18,13 @@ -behaviour(gen_server). %% Replication layer API: --export([open_shard/2, store_batch/3, get_streams/3, make_iterator/3, next/3]). +-export([open_shard/2, drop_shard/1, store_batch/3, get_streams/3, make_iterator/3, next/3]). %% gen_server -export([start_link/2, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). %% internal exports: --export([]). +-export([drop_shard/1]). -export_type([gen_id/0, generation/0, cf_refs/0, stream/0, iterator/0]). @@ -124,6 +124,11 @@ open_shard(Shard, Options) -> emqx_ds_storage_layer_sup:ensure_shard(Shard, Options). +-spec drop_shard(shard_id()) -> ok. +drop_shard(Shard) -> + emqx_ds_storage_layer_sup:stop_shard(Shard), + ok = rocksdb:destroy(db_dir(Shard), []). + -spec store_batch(shard_id(), [emqx_types:message()], emqx_ds:message_store_opts()) -> emqx_ds:store_batch_result(). store_batch(Shard, Messages, Options) -> @@ -188,7 +193,7 @@ next(Shard, Iter = #it{generation = GenId, enc = GenIter0}, BatchSize) -> -define(REF(ShardId), {via, gproc, {n, l, {?MODULE, ShardId}}}). --spec start_link(emqx_ds:shard_id(), emqx_ds:create_db_opts()) -> +-spec start_link(shard_id(), emqx_ds:create_db_opts()) -> {ok, pid()}. start_link(Shard, Options) -> gen_server:start_link(?REF(Shard), ?MODULE, {Shard, Options}, []). @@ -303,13 +308,12 @@ commit_metadata(#s{shard_id = ShardId, schema = Schema, shard = Runtime, db = DB -spec rocksdb_open(shard_id(), emqx_ds:create_db_opts()) -> {ok, rocksdb:db_handle(), cf_refs()} | {error, _TODO}. rocksdb_open(Shard, Options) -> - DefaultDir = binary_to_list(Shard), - DBDir = unicode:characters_to_list(maps:get(dir, Options, DefaultDir)), DBOptions = [ {create_if_missing, true}, {create_missing_column_families, true} | maps:get(db_options, Options, []) ], + DBDir = db_dir(Shard), _ = filelib:ensure_dir(DBDir), ExistingCFs = case rocksdb:list_column_families(DBDir, DBOptions) of @@ -331,6 +335,10 @@ rocksdb_open(Shard, Options) -> Error end. +-spec db_dir(shard_id()) -> file:filename(). +db_dir({DB, ShardId}) -> + lists:flatten([atom_to_list(DB), $:, atom_to_list(ShardId)]). + %%-------------------------------------------------------------------------------- %% Schema access %%-------------------------------------------------------------------------------- diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl index 1fbad5f1b..c0fb29ceb 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl @@ -98,13 +98,14 @@ make_iterator(_Shard, _Data, #stream{topic_filter = TopicFilter}, StartTime) -> next(_Shard, #s{db = DB, cf = CF}, It0, BatchSize) -> #it{topic_filter = TopicFilter, start_time = StartTime, last_seen_message_key = Key0} = It0, {ok, ITHandle} = rocksdb:iterator(DB, CF, []), - Action = case Key0 of - first -> - first; - _ -> - rocksdb:iterator_move(ITHandle, Key0), - next - end, + Action = + case Key0 of + first -> + first; + _ -> + rocksdb:iterator_move(ITHandle, Key0), + next + end, {Key, Messages} = do_next(TopicFilter, StartTime, ITHandle, Action, BatchSize, Key0, []), rocksdb:iterator_close(ITHandle), It = It0#it{last_seen_message_key = Key}, diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index df3d64bc3..60671cef7 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -19,7 +19,7 @@ -include_lib("emqx/include/bpapi.hrl"). %% API: --export([open_shard/3, get_streams/4, make_iterator/4, next/4]). +-export([open_shard/3, drop_shard/2, get_streams/4, make_iterator/4, next/4]). %% behavior callbacks: -export([introduced_in/0]). @@ -33,6 +33,11 @@ open_shard(Node, Shard, Opts) -> erpc:call(Node, emqx_ds_replication_layer, do_open_shard_v1, [Shard, Opts]). +-spec drop_shard(node(), emqx_ds_replication_layer:shard()) -> + ok. +drop_shard(Node, Shard) -> + erpc:call(Node, emqx_ds_replication_layer, do_drop_shard_v1, [Shard]). + -spec get_streams( node(), emqx_ds_replication_layer:shard(), emqx_ds:topic_filter(), emqx_ds:time() ) -> diff --git a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl index effe3b695..eabd03277 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl @@ -22,15 +22,18 @@ -include_lib("common_test/include/ct.hrl"). -include_lib("stdlib/include/assert.hrl"). -%% A simple smoke test that verifies that opening the DB doesn't crash -t_00_smoke_open(_Config) -> - ?assertMatch(ok, emqx_ds:open_db(<<"DB1">>, #{})), - ?assertMatch(ok, emqx_ds:open_db(<<"DB1">>, #{})). +%% A simple smoke test that verifies that opening/closing the DB +%% doesn't crash +t_00_smoke_open_drop(_Config) -> + DB = 'DB', + ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + ?assertMatch(ok, emqx_ds:drop_db(DB)). %% A simple smoke test that verifies that storing the messages doesn't %% crash t_01_smoke_store(_Config) -> - DB = <<"default">>, + DB = default, ?assertMatch(ok, emqx_ds:open_db(DB, #{})), Msg = message(<<"foo/bar">>, <<"foo">>, 0), ?assertMatch(ok, emqx_ds:store_batch(DB, [Msg])). @@ -38,7 +41,7 @@ t_01_smoke_store(_Config) -> %% A simple smoke test that verifies that getting the list of streams %% doesn't crash and that iterators can be opened. t_02_smoke_get_streams_start_iter(_Config) -> - DB = <<"default">>, + DB = ?FUNCTION_NAME, ?assertMatch(ok, emqx_ds:open_db(DB, #{})), StartTime = 0, [{Rank, Stream}] = emqx_ds:get_streams(DB, ['#'], StartTime), @@ -48,7 +51,7 @@ t_02_smoke_get_streams_start_iter(_Config) -> %% A simple smoke test that verifies that it's possible to iterate %% over messages. t_03_smoke_iterate(_Config) -> - DB = atom_to_binary(?FUNCTION_NAME), + DB = ?FUNCTION_NAME, ?assertMatch(ok, emqx_ds:open_db(DB, #{})), StartTime = 0, Msgs = [ From 2972bf14ee4c67c6794e14f95742efc2d2322bd2 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Thu, 5 Oct 2023 02:32:23 +0200 Subject: [PATCH 078/155] refactor(ds): Implement create_generation gen_rpc storage layer call --- apps/emqx/src/emqx_persistent_message.erl | 7 +- apps/emqx_durable_storage/src/emqx_ds.erl | 8 +-- .../src/emqx_ds_replication_layer.erl | 4 -- .../src/emqx_ds_storage_layer.erl | 72 +++++++++++++------ .../src/emqx_ds_storage_reference.erl | 2 +- .../test/emqx_ds_SUITE.erl | 31 +++++++- 6 files changed, 85 insertions(+), 39 deletions(-) diff --git a/apps/emqx/src/emqx_persistent_message.erl b/apps/emqx/src/emqx_persistent_message.erl index 8801acce5..82717cd01 100644 --- a/apps/emqx/src/emqx_persistent_message.erl +++ b/apps/emqx/src/emqx_persistent_message.erl @@ -26,6 +26,8 @@ persist/1 ]). +-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message). + %% FIXME -define(WHEN_ENABLED(DO), case is_store_enabled() of @@ -38,7 +40,7 @@ init() -> ?WHEN_ENABLED(begin - ok = emqx_ds:open_db(<<"default">>, #{}), + ok = emqx_ds:open_db(?PERSISTENT_MESSAGE_DB, #{}), ok = emqx_persistent_session_ds_router:init_tables(), %ok = emqx_persistent_session_ds:create_tables(), ok @@ -65,8 +67,9 @@ persist(Msg) -> needs_persistence(Msg) -> not (emqx_message:get_flag(dup, Msg) orelse emqx_message:is_sys(Msg)). +-spec store_message(emqx_types:message()) -> emqx_ds:store_batch_result(). store_message(Msg) -> - emqx_ds:store_batch([Msg]). + emqx_ds:store_batch(?PERSISTENT_MESSAGE_DB, [Msg]). has_subscribers(#message{topic = Topic}) -> emqx_persistent_session_ds_router:has_any_route(Topic). diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index 293f2e531..cf4b5a031 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -25,7 +25,7 @@ -export([open_db/2, drop_db/1]). %% Message storage API: --export([store_batch/1, store_batch/2, store_batch/3]). +-export([store_batch/2, store_batch/3]). %% Message replay API: -export([get_streams/3, make_iterator/2, next/2]). @@ -89,8 +89,6 @@ -type message_id() :: emqx_ds_replication_layer:message_id(). --define(DEFAULT_DB, <<"default">>). - %%================================================================================ %% API funcions %%================================================================================ @@ -107,10 +105,6 @@ open_db(DB, Opts) -> drop_db(DB) -> emqx_ds_replication_layer:drop_db(DB). --spec store_batch([emqx_types:message()]) -> store_batch_result(). -store_batch(Msgs) -> - store_batch(?DEFAULT_DB, Msgs, #{}). - -spec store_batch(db(), [emqx_types:message()], message_store_opts()) -> store_batch_result(). store_batch(DB, Msgs, Opts) -> emqx_ds_replication_layer:store_batch(DB, Msgs, Opts). diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index e1c775d5a..b43604469 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -196,10 +196,6 @@ do_next_v1(Shard, Iter, BatchSize) -> %% Internal functions %%================================================================================ -add_shard_to_rank(Shard, RankY) -> - RankX = erlang:phash2(Shard, 255), - {RankX, RankY}. - shard_id(DB, Node) -> %% TODO: don't bake node name into the schema, don't repeat the %% Mnesia's 1M$ mistake. diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index d531c5985..e9d4edc06 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -24,10 +24,10 @@ -export([start_link/2, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). %% internal exports: --export([drop_shard/1]). - -export_type([gen_id/0, generation/0, cf_refs/0, stream/0, iterator/0]). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). + %%================================================================================ %% Type declarations %%================================================================================ @@ -79,9 +79,11 @@ %%%% Shard: -type shard(GenData) :: #{ + %% ID of the current generation (where the new data is written:) current_generation := gen_id(), - default_generation_module := module(), - default_generation_config := term(), + %% This data is used to create new generation: + prototype := {module(), term()}, + %% Generations: {generation, gen_id()} => GenData }. @@ -206,6 +208,9 @@ start_link(Shard, Options) -> shard :: shard() }). +%% Note: we specify gen_server requests as records to make use of Dialyzer: +-record(call_create_generation, {since :: emqx_ds:time()}). + -type server_state() :: #s{}. -define(DEFAULT_CF, "default"). @@ -213,6 +218,7 @@ start_link(Shard, Options) -> init({ShardId, Options}) -> process_flag(trap_exit, true), + logger:set_process_metadata(#{shard_id => ShardId, domain => [ds, storage_layer, shard]}), erase_schema_runtime(ShardId), {ok, DB, CFRefs0} = rocksdb_open(ShardId, Options), {Schema, CFRefs} = @@ -233,13 +239,10 @@ init({ShardId, Options}) -> commit_metadata(S), {ok, S}. -%% handle_call({create_generation, Since, Config}, _From, S) -> -%% case create_new_gen(Since, Config, S) of -%% {ok, GenId, NS} -> -%% {reply, {ok, GenId}, NS}; -%% {error, _} = Error -> -%% {reply, Error, S} -%% end; +handle_call(#call_create_generation{since = Since}, _From, S0) -> + S = add_generation(S0, Since), + commit_metadata(S), + {reply, ok, S}; handle_call(_Call, _From, S) -> {reply, {error, unknown_call}, S}. @@ -275,29 +278,52 @@ open_shard(ShardId, DB, CFRefs, ShardSchema) -> ShardSchema ). +-spec add_generation(server_state(), emqx_ds:time()) -> server_state(). +add_generation(S0, Since) -> + #s{shard_id = ShardId, db = DB, schema = Schema0, shard = Shard0, cf_refs = CFRefs0} = S0, + {GenId, Schema, NewCFRefs} = new_generation(ShardId, DB, Schema0, Since), + CFRefs = NewCFRefs ++ CFRefs0, + Key = {generation, GenId}, + Generation = open_generation(ShardId, DB, CFRefs, GenId, maps:get(Key, Schema)), + Shard = Shard0#{Key => Generation}, + S0#s{ + cf_refs = CFRefs, + schema = Schema, + shard = Shard + }. + -spec open_generation(shard_id(), rocksdb:db_handle(), cf_refs(), gen_id(), generation_schema()) -> generation(). open_generation(ShardId, DB, CFRefs, GenId, GenSchema) -> + ?tp(debug, ds_open_generation, #{gen_id => GenId, schema => GenSchema}), #{module := Mod, data := Schema} = GenSchema, RuntimeData = Mod:open(ShardId, DB, GenId, CFRefs, Schema), GenSchema#{data => RuntimeData}. -spec create_new_shard_schema(shard_id(), rocksdb:db_handle(), cf_refs(), _Options) -> {shard_schema(), cf_refs()}. -create_new_shard_schema(ShardId, DB, CFRefs, _Options) -> - GenId = 1, - %% TODO: read from options/config - Mod = emqx_ds_storage_reference, - ModConfig = #{}, - {GenData, NewCFRefs} = Mod:create(ShardId, DB, GenId, ModConfig), - GenSchema = #{module => Mod, data => GenData, since => 0, until => undefined}, - ShardSchema = #{ +create_new_shard_schema(ShardId, DB, CFRefs, Options) -> + ?tp(notice, ds_create_new_shard_schema, #{shard => ShardId, options => Options}), + %% TODO: read prototype from options/config + Schema0 = #{ + current_generation => 0, + prototype => {emqx_ds_storage_reference, #{}} + }, + {_NewGenId, Schema, NewCFRefs} = new_generation(ShardId, DB, Schema0, _Since = 0), + {Schema, NewCFRefs ++ CFRefs}. + +-spec new_generation(shard_id(), rocksdb:db_handle(), shard_schema(), emqx_ds:time()) -> + {gen_id(), shard_schema(), cf_refs()}. +new_generation(ShardId, DB, Schema0, Since) -> + #{current_generation := PrevGenId, prototype := {Mod, ModConf}} = Schema0, + GenId = PrevGenId + 1, + {GenData, NewCFRefs} = Mod:create(ShardId, DB, GenId, ModConf), + GenSchema = #{module => Mod, data => GenData, since => Since, until => undefined}, + Schema = Schema0#{ current_generation => GenId, - default_generation_module => Mod, - default_generation_confg => ModConfig, {generation, GenId} => GenSchema }, - {ShardSchema, NewCFRefs ++ CFRefs}. + {GenId, Schema, NewCFRefs}. %% @doc Commit current state of the server to both rocksdb and the persistent term -spec commit_metadata(server_state()) -> ok. @@ -393,7 +419,7 @@ get_schema_persistent(DB) -> {ok, Blob} -> Schema = binary_to_term(Blob), %% Sanity check: - #{current_generation := _, default_generation_module := _} = Schema, + #{current_generation := _, prototype := _} = Schema, Schema; not_found -> not_found diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl index c0fb29ceb..fd480eeab 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl @@ -103,7 +103,7 @@ next(_Shard, #s{db = DB, cf = CF}, It0, BatchSize) -> first -> first; _ -> - rocksdb:iterator_move(ITHandle, Key0), + _ = rocksdb:iterator_move(ITHandle, Key0), next end, {Key, Messages} = do_next(TopicFilter, StartTime, ITHandle, Action, BatchSize, Key0, []), diff --git a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl index eabd03277..1935e41cf 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl @@ -21,9 +21,10 @@ -include_lib("emqx/include/emqx.hrl"). -include_lib("common_test/include/ct.hrl"). -include_lib("stdlib/include/assert.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). %% A simple smoke test that verifies that opening/closing the DB -%% doesn't crash +%% doesn't crash, and not much else t_00_smoke_open_drop(_Config) -> DB = 'DB', ?assertMatch(ok, emqx_ds:open_db(DB, #{})), @@ -65,6 +66,32 @@ t_03_smoke_iterate(_Config) -> {ok, Iter, Batch} = iterate(Iter0, 1), ?assertEqual(Msgs, Batch, {Iter0, Iter}). +%% Verify that iterators survive restart of the application. This is +%% an important property, since the lifetime of the iterators is tied +%% to the external resources, such as clients' sessions, and they +%% should always be able to continue replaying the topics from where +%% they are left off. +t_04_restart(_Config) -> + DB = ?FUNCTION_NAME, + ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + StartTime = 0, + Msgs = [ + message(<<"foo/bar">>, <<"1">>, 0), + message(<<"foo">>, <<"2">>, 1), + message(<<"bar/bar">>, <<"3">>, 2) + ], + ?assertMatch(ok, emqx_ds:store_batch(DB, Msgs)), + [{_, Stream}] = emqx_ds:get_streams(DB, ['#'], StartTime), + {ok, Iter0} = emqx_ds:make_iterator(Stream, StartTime), + %% Restart the application: + ?tp(warning, emqx_ds_SUITE_restart_app, #{}), + ok = application:stop(emqx_durable_storage), + {ok, _} = application:ensure_all_started(emqx_durable_storage), + ok = emqx_ds:open_db(DB, #{}), + %% The old iterator should be still operational: + {ok, Iter, Batch} = iterate(Iter0, 1), + ?assertEqual(Msgs, Batch, {Iter0, Iter}). + message(Topic, Payload, PublishedAt) -> #message{ topic = Topic, @@ -102,7 +129,7 @@ end_per_suite(Config) -> ok. init_per_testcase(_TC, Config) -> - snabbkaffe:fix_ct_logging(), + %% snabbkaffe:fix_ct_logging(), application:ensure_all_started(emqx_durable_storage), Config. From 903b3863d1fe0e627f547b3db06db3bed9f8388b Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Thu, 5 Oct 2023 17:17:08 -0300 Subject: [PATCH 079/155] chore(ps_ds): make persistent session module use new `emqx_ds` APIs --- .../emqx_persistent_session_ds_SUITE.erl | 125 +++----- apps/emqx/src/emqx_persistent_message.erl | 2 +- ...ds.erl_ => emqx_persistent_session_ds.erl} | 286 +++++++++--------- .../test/emqx_persistent_messages_SUITE.erl | 66 ++-- .../src/emqx_ds_replication_layer.erl | 10 +- 5 files changed, 203 insertions(+), 286 deletions(-) rename apps/emqx/src/{emqx_persistent_session_ds.erl_ => emqx_persistent_session_ds.erl} (66%) diff --git a/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl b/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl index d2d23e8cd..ee5d203e4 100644 --- a/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl +++ b/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl @@ -14,7 +14,6 @@ -define(DEFAULT_KEYSPACE, default). -define(DS_SHARD_ID, <<"local">>). -define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}). --define(ITERATOR_REF_TAB, emqx_ds_iterator_ref). -import(emqx_common_test_helpers, [on_exit/1]). @@ -91,9 +90,6 @@ get_mqtt_port(Node, Type) -> {_IP, Port} = erpc:call(Node, emqx_config, get, [[listeners, Type, default, bind]]), Port. -get_all_iterator_refs(Node) -> - erpc:call(Node, mnesia, dirty_all_keys, [?ITERATOR_REF_TAB]). - get_all_iterator_ids(Node) -> Fn = fun(K, _V, Acc) -> [K | Acc] end, erpc:call(Node, fun() -> @@ -126,6 +122,32 @@ start_client(Opts0 = #{}) -> on_exit(fun() -> catch emqtt:stop(Client) end), Client. +restart_node(Node, NodeSpec) -> + ?tp(will_restart_node, #{}), + ?tp(notice, "restarting node", #{node => Node}), + true = monitor_node(Node, true), + ok = erpc:call(Node, init, restart, []), + receive + {nodedown, Node} -> + ok + after 10_000 -> + ct:fail("node ~p didn't stop", [Node]) + end, + ?tp(notice, "waiting for nodeup", #{node => Node}), + wait_nodeup(Node), + wait_gen_rpc_down(NodeSpec), + ?tp(notice, "restarting apps", #{node => Node}), + Apps = maps:get(apps, NodeSpec), + ok = erpc:call(Node, emqx_cth_suite, load_apps, [Apps]), + _ = erpc:call(Node, emqx_cth_suite, start_apps, [Apps, NodeSpec]), + %% have to re-inject this so that we may stop the node succesfully at the + %% end.... + ok = emqx_cth_cluster:set_node_opts(Node, NodeSpec), + ok = snabbkaffe:forward_trace(Node), + ?tp(notice, "node restarted", #{node => Node}), + ?tp(restarted_node, #{}), + ok. + %%------------------------------------------------------------------------------ %% Testcases %%------------------------------------------------------------------------------ @@ -143,24 +165,14 @@ t_non_persistent_session_subscription(_Config) -> {ok, _} = emqtt:connect(Client), ?tp(notice, "subscribing", #{}), {ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client, SubTopicFilter, qos2), - IteratorRefs = get_all_iterator_refs(node()), - IteratorIds = get_all_iterator_ids(node()), ok = emqtt:stop(Client), - #{ - iterator_refs => IteratorRefs, - iterator_ids => IteratorIds - } + ok end, - fun(Res, Trace) -> + fun(Trace) -> ct:pal("trace:\n ~p", [Trace]), - #{ - iterator_refs := IteratorRefs, - iterator_ids := IteratorIds - } = Res, - ?assertEqual([], IteratorRefs), - ?assertEqual({ok, []}, IteratorIds), + ?assertEqual([], ?of_kind(ds_session_subscription_added, Trace)), ok end ), @@ -175,7 +187,7 @@ t_session_subscription_idempotency(Config) -> ?check_trace( begin ?force_ordering( - #{?snk_kind := persistent_session_ds_iterator_added}, + #{?snk_kind := persistent_session_ds_subscription_added}, _NEvents0 = 1, #{?snk_kind := will_restart_node}, _Guard0 = true @@ -187,32 +199,7 @@ t_session_subscription_idempotency(Config) -> _Guard1 = true ), - spawn_link(fun() -> - ?tp(will_restart_node, #{}), - ?tp(notice, "restarting node", #{node => Node1}), - true = monitor_node(Node1, true), - ok = erpc:call(Node1, init, restart, []), - receive - {nodedown, Node1} -> - ok - after 10_000 -> - ct:fail("node ~p didn't stop", [Node1]) - end, - ?tp(notice, "waiting for nodeup", #{node => Node1}), - wait_nodeup(Node1), - wait_gen_rpc_down(Node1Spec), - ?tp(notice, "restarting apps", #{node => Node1}), - Apps = maps:get(apps, Node1Spec), - ok = erpc:call(Node1, emqx_cth_suite, load_apps, [Apps]), - _ = erpc:call(Node1, emqx_cth_suite, start_apps, [Apps, Node1Spec]), - %% have to re-inject this so that we may stop the node succesfully at the - %% end.... - ok = emqx_cth_cluster:set_node_opts(Node1, Node1Spec), - ok = snabbkaffe:forward_trace(Node1), - ?tp(notice, "node restarted", #{node => Node1}), - ?tp(restarted_node, #{}), - ok - end), + spawn_link(fun() -> restart_node(Node1, Node1Spec) end), ?tp(notice, "starting 1", #{}), Client0 = start_client(#{port => Port, clientid => ClientId}), @@ -223,7 +210,7 @@ t_session_subscription_idempotency(Config) -> receive {'EXIT', {shutdown, _}} -> ok - after 0 -> ok + after 100 -> ok end, process_flag(trap_exit, false), @@ -240,10 +227,7 @@ t_session_subscription_idempotency(Config) -> end, fun(Trace) -> ct:pal("trace:\n ~p", [Trace]), - %% Exactly one iterator should have been opened. SubTopicFilterWords = emqx_topic:words(SubTopicFilter), - ?assertEqual([{ClientId, SubTopicFilterWords}], get_all_iterator_refs(Node1)), - ?assertMatch({ok, [_]}, get_all_iterator_ids(Node1)), ?assertMatch( {ok, #{}, #{SubTopicFilterWords := #{}}}, erpc:call(Node1, emqx_persistent_session_ds, session_open, [ClientId]) @@ -262,7 +246,10 @@ t_session_unsubscription_idempotency(Config) -> ?check_trace( begin ?force_ordering( - #{?snk_kind := persistent_session_ds_close_iterators, ?snk_span := {complete, _}}, + #{ + ?snk_kind := persistent_session_ds_subscription_delete, + ?snk_span := {complete, _} + }, _NEvents0 = 1, #{?snk_kind := will_restart_node}, _Guard0 = true @@ -270,36 +257,11 @@ t_session_unsubscription_idempotency(Config) -> ?force_ordering( #{?snk_kind := restarted_node}, _NEvents1 = 1, - #{?snk_kind := persistent_session_ds_iterator_delete, ?snk_span := start}, + #{?snk_kind := persistent_session_ds_subscription_route_delete, ?snk_span := start}, _Guard1 = true ), - spawn_link(fun() -> - ?tp(will_restart_node, #{}), - ?tp(notice, "restarting node", #{node => Node1}), - true = monitor_node(Node1, true), - ok = erpc:call(Node1, init, restart, []), - receive - {nodedown, Node1} -> - ok - after 10_000 -> - ct:fail("node ~p didn't stop", [Node1]) - end, - ?tp(notice, "waiting for nodeup", #{node => Node1}), - wait_nodeup(Node1), - wait_gen_rpc_down(Node1Spec), - ?tp(notice, "restarting apps", #{node => Node1}), - Apps = maps:get(apps, Node1Spec), - ok = erpc:call(Node1, emqx_cth_suite, load_apps, [Apps]), - _ = erpc:call(Node1, emqx_cth_suite, start_apps, [Apps, Node1Spec]), - %% have to re-inject this so that we may stop the node succesfully at the - %% end.... - ok = emqx_cth_cluster:set_node_opts(Node1, Node1Spec), - ok = snabbkaffe:forward_trace(Node1), - ?tp(notice, "node restarted", #{node => Node1}), - ?tp(restarted_node, #{}), - ok - end), + spawn_link(fun() -> restart_node(Node1, Node1Spec) end), ?tp(notice, "starting 1", #{}), Client0 = start_client(#{port => Port, clientid => ClientId}), @@ -312,7 +274,7 @@ t_session_unsubscription_idempotency(Config) -> receive {'EXIT', {shutdown, _}} -> ok - after 0 -> ok + after 100 -> ok end, process_flag(trap_exit, false), @@ -327,7 +289,7 @@ t_session_unsubscription_idempotency(Config) -> ?wait_async_action( emqtt:unsubscribe(Client1, SubTopicFilter), #{ - ?snk_kind := persistent_session_ds_iterator_delete, + ?snk_kind := persistent_session_ds_subscription_route_delete, ?snk_span := {complete, _} }, 15_000 @@ -339,9 +301,10 @@ t_session_unsubscription_idempotency(Config) -> end, fun(Trace) -> ct:pal("trace:\n ~p", [Trace]), - %% No iterators remaining - ?assertEqual([], get_all_iterator_refs(Node1)), - ?assertEqual({ok, []}, get_all_iterator_ids(Node1)), + ?assertMatch( + {ok, #{}, Subs = #{}} when map_size(Subs) =:= 0, + erpc:call(Node1, emqx_persistent_session_ds, session_open, [ClientId]) + ), ok end ), diff --git a/apps/emqx/src/emqx_persistent_message.erl b/apps/emqx/src/emqx_persistent_message.erl index 82717cd01..f3ec9def5 100644 --- a/apps/emqx/src/emqx_persistent_message.erl +++ b/apps/emqx/src/emqx_persistent_message.erl @@ -42,7 +42,7 @@ init() -> ?WHEN_ENABLED(begin ok = emqx_ds:open_db(?PERSISTENT_MESSAGE_DB, #{}), ok = emqx_persistent_session_ds_router:init_tables(), - %ok = emqx_persistent_session_ds:create_tables(), + ok = emqx_persistent_session_ds:create_tables(), ok end). diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl_ b/apps/emqx/src/emqx_persistent_session_ds.erl similarity index 66% rename from apps/emqx/src/emqx_persistent_session_ds.erl_ rename to apps/emqx/src/emqx_persistent_session_ds.erl index 3fff5f7ba..9bc9e0b91 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl_ +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -65,10 +65,13 @@ %% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be %% an atom, in theory (?). -type id() :: binary(). --type iterator() :: emqx_ds:iterator(). --type iterator_id() :: emqx_ds:iterator_id(). -type topic_filter() :: emqx_ds:topic_filter(). --type iterators() :: #{topic_filter() => iterator()}. +-type subscription_id() :: {id(), topic_filter()}. +-type subscription() :: #{ + start_time := emqx_ds:time(), + propts := map(), + extra := map() +}. -type session() :: #{ %% Client ID id := id(), @@ -77,7 +80,7 @@ %% When the session should expire expires_at := timestamp() | never, %% Client’s Subscriptions. - iterators := #{topic() => iterator()}, + iterators := #{topic() => subscription()}, %% props := map() }. @@ -90,6 +93,8 @@ -export_type([id/0]). +-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message). + %% -spec create(clientinfo(), conninfo(), emqx_session:conf()) -> @@ -121,17 +126,17 @@ ensure_session(ClientID, Conf) -> open_session(ClientID) -> case session_open(ClientID) of - {ok, Session, Iterators} -> - Session#{iterators => prep_iterators(Iterators)}; + {ok, Session, Subscriptions} -> + Session#{iterators => prep_subscriptions(Subscriptions)}; false -> false end. -prep_iterators(Iterators) -> +prep_subscriptions(Subscriptions) -> maps:fold( - fun(Topic, Iterator, Acc) -> Acc#{emqx_topic:join(Topic) => Iterator} end, + fun(Topic, Subscription, Acc) -> Acc#{emqx_topic:join(Topic) => Subscription} end, #{}, - Iterators + Subscriptions ). -spec destroy(session() | clientinfo()) -> ok. @@ -228,7 +233,7 @@ unsubscribe( ) when is_map_key(TopicFilter, Iters) -> Iterator = maps:get(TopicFilter, Iters), SubOpts = maps:get(props, Iterator), - ok = del_subscription(TopicFilter, Iterator, ID), + ok = del_subscription(TopicFilter, ID), {ok, Session#{iterators := maps:remove(TopicFilter, Iters)}, SubOpts}; unsubscribe( _TopicFilter, @@ -327,91 +332,67 @@ terminate(_Reason, _Session = #{}) -> %%-------------------------------------------------------------------- -spec add_subscription(topic(), emqx_types:subopts(), id()) -> - emqx_ds:iterator(). + subscription(). add_subscription(TopicFilterBin, SubOpts, DSSessionID) -> - % N.B.: we chose to update the router before adding the subscription to the - % session/iterator table. The reasoning for this is as follows: - % - % Messages matching this topic filter should start to be persisted as soon as - % possible to avoid missing messages. If this is the first such persistent - % session subscription, it's important to do so early on. - % - % This could, in turn, lead to some inconsistency: if such a route gets - % created but the session/iterator data fails to be updated accordingly, we - % have a dangling route. To remove such dangling routes, we may have a - % periodic GC process that removes routes that do not have a matching - % persistent subscription. Also, route operations use dirty mnesia - % operations, which inherently have room for inconsistencies. - % - % In practice, we use the iterator reference table as a source of truth, - % since it is guarded by a transaction context: we consider a subscription - % operation to be successful if it ended up changing this table. Both router - % and iterator information can be reconstructed from this table, if needed. + %% N.B.: we chose to update the router before adding the subscription to the + %% session/iterator table. The reasoning for this is as follows: + %% + %% Messages matching this topic filter should start to be persisted as soon as + %% possible to avoid missing messages. If this is the first such persistent + %% session subscription, it's important to do so early on. + %% + %% This could, in turn, lead to some inconsistency: if such a route gets + %% created but the session/iterator data fails to be updated accordingly, we + %% have a dangling route. To remove such dangling routes, we may have a + %% periodic GC process that removes routes that do not have a matching + %% persistent subscription. Also, route operations use dirty mnesia + %% operations, which inherently have room for inconsistencies. + %% + %% In practice, we use the iterator reference table as a source of truth, + %% since it is guarded by a transaction context: we consider a subscription + %% operation to be successful if it ended up changing this table. Both router + %% and iterator information can be reconstructed from this table, if needed. ok = emqx_persistent_session_ds_router:do_add_route(TopicFilterBin, DSSessionID), TopicFilter = emqx_topic:words(TopicFilterBin), - {ok, Iterator, IsNew} = session_add_iterator( + {ok, DSSubExt, IsNew} = session_add_subscription( DSSessionID, TopicFilter, SubOpts ), - Ctx = #{iterator => Iterator, is_new => IsNew}, - ?tp(persistent_session_ds_iterator_added, Ctx), + ?tp(persistent_session_ds_subscription_added, #{sub => DSSubExt, is_new => IsNew}), + %% we'll list streams and open iterators when implementing message replay. + DSSubExt. + +-spec update_subscription(topic(), subscription(), emqx_types:subopts(), id()) -> + subscription(). +update_subscription(TopicFilterBin, DSSubExt, SubOpts, DSSessionID) -> + TopicFilter = emqx_topic:words(TopicFilterBin), + {ok, NDSSubExt, false} = session_add_subscription( + DSSessionID, TopicFilter, SubOpts + ), + ok = ?tp(persistent_session_ds_iterator_updated, #{sub => DSSubExt}), + NDSSubExt. + +-spec del_subscription(topic(), id()) -> + ok. +del_subscription(TopicFilterBin, DSSessionId) -> + TopicFilter = emqx_topic:words(TopicFilterBin), ?tp_span( - persistent_session_ds_open_iterators, - Ctx, - ok = open_iterator_on_all_shards(TopicFilter, Iterator) + persistent_session_ds_subscription_delete, + #{session_id => DSSessionId}, + ok = session_del_subscription(DSSessionId, TopicFilter) ), - Iterator. - --spec update_subscription(topic(), iterator(), emqx_types:subopts(), id()) -> - iterator(). -update_subscription(TopicFilterBin, Iterator, SubOpts, DSSessionID) -> - TopicFilter = emqx_topic:words(TopicFilterBin), - {ok, NIterator, false} = session_add_iterator( - DSSessionID, TopicFilter, SubOpts - ), - ok = ?tp(persistent_session_ds_iterator_updated, #{iterator => Iterator}), - NIterator. - --spec open_iterator_on_all_shards(emqx_types:words(), emqx_ds:iterator()) -> ok. -open_iterator_on_all_shards(TopicFilter, Iterator) -> - ?tp(persistent_session_ds_will_open_iterators, #{iterator => Iterator}), - %% Note: currently, shards map 1:1 to nodes, but this will change in the future. - Nodes = emqx:running_nodes(), - Results = emqx_persistent_session_ds_proto_v1:open_iterator( - Nodes, - TopicFilter, - maps:get(start_time, Iterator), - maps:get(id, Iterator) - ), - %% TODO - %% 1. Handle errors. - %% 2. Iterator handles are rocksdb resources, it's doubtful they survive RPC. - %% Even if they do, we throw them away here anyway. All in all, we probably should - %% hold each of them in a process on the respective node. - true = lists:all(fun(Res) -> element(1, Res) =:= ok end, Results), - ok. - -%% RPC target. --spec do_open_iterator(emqx_types:words(), emqx_ds:time(), emqx_ds:iterator_id()) -> - {ok, emqx_ds_storage_layer:iterator()} | {error, _Reason}. -do_open_iterator(TopicFilter, StartMS, _IteratorID) -> - %% TODO: wrong - {ok, emqx_ds:make_iterator(TopicFilter, StartMS)}. - --spec del_subscription(topic(), iterator(), id()) -> - ok. -del_subscription(TopicFilterBin, #{id := IteratorID}, DSSessionID) -> - % N.B.: see comments in `?MODULE:add_subscription' for a discussion about the - % order of operations here. - TopicFilter = emqx_topic:words(TopicFilterBin), - ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilterBin, DSSessionID). + ?tp_span( + persistent_session_ds_subscription_route_delete, + #{session_id => DSSessionId}, + ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilterBin, DSSessionId) + ). %%-------------------------------------------------------------------- %% Session tables operations %%-------------------------------------------------------------------- -define(SESSION_TAB, emqx_ds_session). --define(ITERATOR_REF_TAB, emqx_ds_iterator_ref). --define(DS_MRIA_SHARD, emqx_ds_shard). +-define(SESSION_SUBSCRIPTIONS_TAB, emqx_ds_session_subscriptions). +-define(DS_MRIA_SHARD, emqx_ds_session_shard). -record(session, { %% same as clientid @@ -423,12 +404,13 @@ del_subscription(TopicFilterBin, #{id := IteratorID}, DSSessionID) -> props = #{} :: map() }). --record(iterator_ref, { - ref_id :: {id(), emqx_ds:topic_filter()}, - it_id :: emqx_ds:iterator_id(), +-record(ds_sub, { + id :: subscription_id(), start_time :: emqx_ds:time(), - props = #{} :: map() + props = #{} :: map(), + extra = #{} :: map() }). +-type ds_sub() :: #ds_sub{}. create_tables() -> ok = mria:create_table( @@ -442,15 +424,16 @@ create_tables() -> ] ), ok = mria:create_table( - ?ITERATOR_REF_TAB, + ?SESSION_SUBSCRIPTIONS_TAB, [ {rlog_shard, ?DS_MRIA_SHARD}, {type, ordered_set}, {storage, storage()}, - {record_name, iterator_ref}, - {attributes, record_info(fields, iterator_ref)} + {record_name, ds_sub}, + {attributes, record_info(fields, ds_sub)} ] ), + ok = mria:wait_for_tables([?SESSION_TAB, ?SESSION_SUBSCRIPTIONS_TAB]), ok. -dialyzer({nowarn_function, storage/0}). @@ -471,26 +454,26 @@ storage() -> %% Note: session API doesn't handle session takeovers, it's the job of %% the broker. -spec session_open(id()) -> - {ok, session(), iterators()} | false. + {ok, session(), #{topic() => subscription()}} | false. session_open(SessionId) -> transaction(fun() -> case mnesia:read(?SESSION_TAB, SessionId, write) of [Record = #session{}] -> - Session = export_record(Record), - IteratorRefs = session_read_iterators(SessionId), - Iterators = export_iterators(IteratorRefs), - {ok, Session, Iterators}; + Session = export_session(Record), + DSSubs = session_read_subscriptions(SessionId), + Subscriptions = export_subscriptions(DSSubs), + {ok, Session, Subscriptions}; [] -> false end end). -spec session_ensure_new(id(), _Props :: map()) -> - {ok, session(), iterators()}. + {ok, session(), #{topic() => subscription()}}. session_ensure_new(SessionId, Props) -> transaction(fun() -> - ok = session_drop_iterators(SessionId), - Session = export_record(session_create(SessionId, Props)), + ok = session_drop_subscriptions(SessionId), + Session = export_session(session_create(SessionId, Props)), {ok, Session, #{}} end). @@ -510,80 +493,80 @@ session_create(SessionId, Props) -> session_drop(DSSessionId) -> transaction(fun() -> %% TODO: ensure all iterators from this clientid are closed? - ok = session_drop_iterators(DSSessionId), + ok = session_drop_subscriptions(DSSessionId), ok = mnesia:delete(?SESSION_TAB, DSSessionId, write) end). -session_drop_iterators(DSSessionId) -> - IteratorRefs = session_read_iterators(DSSessionId), - ok = lists:foreach(fun session_del_iterator/1, IteratorRefs). +session_drop_subscriptions(DSSessionId) -> + IteratorRefs = session_read_subscriptions(DSSessionId), + ok = lists:foreach(fun session_del_subscription/1, IteratorRefs). %% @doc Called when a client subscribes to a topic. Idempotent. --spec session_add_iterator(id(), topic_filter(), _Props :: map()) -> - {ok, iterator(), _IsNew :: boolean()}. -session_add_iterator(DSSessionId, TopicFilter, Props) -> - IteratorRefId = {DSSessionId, TopicFilter}, +-spec session_add_subscription(id(), topic_filter(), _Props :: map()) -> + {ok, subscription(), _IsNew :: boolean()}. +session_add_subscription(DSSessionId, TopicFilter, Props) -> + DSSubId = {DSSessionId, TopicFilter}, transaction(fun() -> - case mnesia:read(?ITERATOR_REF_TAB, IteratorRefId, write) of + case mnesia:read(?SESSION_SUBSCRIPTIONS_TAB, DSSubId, write) of [] -> - IteratorRef = session_insert_iterator(DSSessionId, TopicFilter, Props), - Iterator = export_record(IteratorRef), + DSSub = session_insert_subscription(DSSessionId, TopicFilter, Props), + DSSubExt = export_subscription(DSSub), ?tp( ds_session_subscription_added, - #{iterator => Iterator, session_id => DSSessionId} + #{sub => DSSubExt, session_id => DSSessionId} ), - {ok, Iterator, _IsNew = true}; - [#iterator_ref{} = IteratorRef] -> - NIteratorRef = session_update_iterator(IteratorRef, Props), - NIterator = export_record(NIteratorRef), + {ok, DSSubExt, _IsNew = true}; + [#ds_sub{} = DSSub] -> + NDSSub = session_update_subscription(DSSub, Props), + NDSSubExt = export_subscription(NDSSub), ?tp( ds_session_subscription_present, - #{iterator => NIterator, session_id => DSSessionId} + #{sub => NDSSubExt, session_id => DSSessionId} ), - {ok, NIterator, _IsNew = false} + {ok, NDSSubExt, _IsNew = false} end end). -session_insert_iterator(DSSessionId, TopicFilter, Props) -> - {IteratorId, StartMS} = new_iterator_id(DSSessionId), - IteratorRef = #iterator_ref{ - ref_id = {DSSessionId, TopicFilter}, - it_id = IteratorId, +-spec session_insert_subscription(id(), topic_filter(), map()) -> ds_sub(). +session_insert_subscription(DSSessionId, TopicFilter, Props) -> + {DSSubId, StartMS} = new_subscription_id(DSSessionId, TopicFilter), + DSSub = #ds_sub{ + id = DSSubId, start_time = StartMS, - props = Props + props = Props, + extra = #{} }, - ok = mnesia:write(?ITERATOR_REF_TAB, IteratorRef, write), - IteratorRef. + ok = mnesia:write(?SESSION_SUBSCRIPTIONS_TAB, DSSub, write), + DSSub. -session_update_iterator(IteratorRef, Props) -> - NIteratorRef = IteratorRef#iterator_ref{props = Props}, - ok = mnesia:write(?ITERATOR_REF_TAB, NIteratorRef, write), - NIteratorRef. +-spec session_update_subscription(ds_sub(), map()) -> ds_sub(). +session_update_subscription(DSSub, Props) -> + NDSSub = DSSub#ds_sub{props = Props}, + ok = mnesia:write(?SESSION_SUBSCRIPTIONS_TAB, NDSSub, write), + NDSSub. -%% @doc Called when a client unsubscribes from a topic. --spec session_del_iterator(id(), topic_filter()) -> ok. -session_del_iterator(DSSessionId, TopicFilter) -> - IteratorRefId = {DSSessionId, TopicFilter}, +session_del_subscription(DSSessionId, TopicFilter) -> + DSSubId = {DSSessionId, TopicFilter}, transaction(fun() -> - mnesia:delete(?ITERATOR_REF_TAB, IteratorRefId, write) + mnesia:delete(?SESSION_SUBSCRIPTIONS_TAB, DSSubId, write) end). -session_del_iterator(#iterator_ref{ref_id = IteratorRefId}) -> - mnesia:delete(?ITERATOR_REF_TAB, IteratorRefId, write). +session_del_subscription(#ds_sub{id = DSSubId}) -> + mnesia:delete(?SESSION_SUBSCRIPTIONS_TAB, DSSubId, write). -session_read_iterators(DSSessionId) -> +session_read_subscriptions(DSSessionId) -> % NOTE: somewhat convoluted way to trick dialyzer - Pat = erlang:make_tuple(record_info(size, iterator_ref), '_', [ - {1, iterator_ref}, - {#iterator_ref.ref_id, {DSSessionId, '_'}} + Pat = erlang:make_tuple(record_info(size, ds_sub), '_', [ + {1, ds_sub}, + {#ds_sub.id, {DSSessionId, '_'}} ]), - mnesia:match_object(?ITERATOR_REF_TAB, Pat, read). + mnesia:match_object(?SESSION_SUBSCRIPTIONS_TAB, Pat, read). --spec new_iterator_id(id()) -> {iterator_id(), emqx_ds:time()}. -new_iterator_id(DSSessionId) -> +-spec new_subscription_id(id(), topic_filter()) -> {subscription_id(), emqx_ds:time()}. +new_subscription_id(DSSessionId, TopicFilter) -> NowMS = erlang:system_time(microsecond), - IteratorId = <>, - {IteratorId, NowMS}. + DSSubId = {DSSessionId, TopicFilter}, + {DSSubId, NowMS}. %%-------------------------------------------------------------------------------- @@ -593,19 +576,20 @@ transaction(Fun) -> %%-------------------------------------------------------------------------------- -export_iterators(IteratorRefs) -> +export_subscriptions(DSSubs) -> lists:foldl( - fun(IteratorRef = #iterator_ref{ref_id = {_DSSessionId, TopicFilter}}, Acc) -> - Acc#{TopicFilter => export_record(IteratorRef)} + fun(DSSub = #ds_sub{id = {_DSSessionId, TopicFilter}}, Acc) -> + Acc#{TopicFilter => export_subscription(DSSub)} end, #{}, - IteratorRefs + DSSubs ). -export_record(#session{} = Record) -> - export_record(Record, #session.id, [id, created_at, expires_at, props], #{}); -export_record(#iterator_ref{} = Record) -> - export_record(Record, #iterator_ref.it_id, [id, start_time, props], #{}). +export_session(#session{} = Record) -> + export_record(Record, #session.id, [id, created_at, expires_at, props], #{}). + +export_subscription(#ds_sub{} = Record) -> + export_record(Record, #ds_sub.start_time, [start_time, props, extra], #{}). export_record(Record, I, [Field | Rest], Acc) -> export_record(Record, I + 1, Rest, Acc#{Field => element(I, Record)}); diff --git a/apps/emqx/test/emqx_persistent_messages_SUITE.erl b/apps/emqx/test/emqx_persistent_messages_SUITE.erl index 2d8768e65..32e59a114 100644 --- a/apps/emqx/test/emqx_persistent_messages_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_messages_SUITE.erl @@ -29,6 +29,7 @@ -define(DEFAULT_KEYSPACE, default). -define(DS_SHARD_ID, <<"local">>). -define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}). +-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message). all() -> emqx_common_test_helpers:all(?MODULE). @@ -62,6 +63,7 @@ end_per_testcase(t_session_subscription_iterators, Config) -> end_per_testcase(_TestCase, Config) -> Apps = ?config(apps, Config), emqx_common_test_helpers:call_janitor(60_000), + clear_db(), emqx_cth_suite:stop(Apps), ok. @@ -96,7 +98,7 @@ t_messages_persisted(_Config) -> ct:pal("Results = ~p", [Results]), - Persisted = consume(?DS_SHARD, {['#'], 0}), + Persisted = consume(['#'], 0), ct:pal("Persisted = ~p", [Persisted]), @@ -139,7 +141,7 @@ t_messages_persisted_2(_Config) -> {ok, #{reason_code := ?RC_NO_MATCHING_SUBSCRIBERS}} = emqtt:publish(CP, T(<<"client/2/topic">>), <<"8">>, 1), - Persisted = consume(?DS_SHARD, {['#'], 0}), + Persisted = consume(['#'], 0), ct:pal("Persisted = ~p", [Persisted]), @@ -155,7 +157,7 @@ t_messages_persisted_2(_Config) -> %% TODO: test quic and ws too t_session_subscription_iterators(Config) -> - [Node1, Node2] = ?config(nodes, Config), + [Node1, _Node2] = ?config(nodes, Config), Port = get_mqtt_port(Node1, tcp), Topic = <<"t/topic">>, SubTopicFilter = <<"t/+">>, @@ -202,11 +204,8 @@ t_session_subscription_iterators(Config) -> messages => [Message1, Message2, Message3, Message4] } end, - fun(Results, Trace) -> + fun(Trace) -> ct:pal("trace:\n ~p", [Trace]), - #{ - messages := [_Message1, Message2, Message3 | _] - } = Results, case ?of_kind(ds_session_subscription_added, Trace) of [] -> %% Since `emqx_durable_storage' is a dependency of `emqx', it gets @@ -228,17 +227,6 @@ t_session_subscription_iterators(Config) -> ), ok end, - ?assertMatch({ok, [_]}, get_all_iterator_ids(Node1)), - {ok, [IteratorId]} = get_all_iterator_ids(Node1), - ?assertMatch({ok, [IteratorId]}, get_all_iterator_ids(Node2)), - ReplayMessages1 = erpc:call(Node1, fun() -> consume(?DS_SHARD, IteratorId) end), - ExpectedMessages = [Message2, Message3], - %% Note: it is expected that this will break after replayers are in place. - %% They might have consumed all the messages by this time. - ?assertEqual(ExpectedMessages, ReplayMessages1), - %% Different DS shard - ReplayMessages2 = erpc:call(Node2, fun() -> consume(?DS_SHARD, IteratorId) end), - ?assertEqual([], ReplayMessages2), ok end ), @@ -263,33 +251,21 @@ connect(Opts0 = #{}) -> {ok, _} = emqtt:connect(Client), Client. -consume(Shard, Replay = {_TopicFiler, _StartMS}) -> - {ok, It} = emqx_ds_storage_layer:make_iterator(Shard, Replay), - consume(It); -consume(Shard, IteratorId) when is_binary(IteratorId) -> - {ok, It} = emqx_ds_storage_layer:restore_iterator(Shard, IteratorId), +consume(TopicFiler, StartMS) -> + [{_, Stream}] = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFiler, StartMS), + {ok, It} = emqx_ds:make_iterator(Stream, StartMS), consume(It). consume(It) -> - case emqx_ds_storage_layer:next(It) of - {ok, NIt, [Msg]} -> - [emqx_persistent_message:deserialize(Msg) | consume(NIt)]; - end_of_stream -> + case emqx_ds:next(It, 100) of + {ok, _NIt, _Msgs = []} -> + []; + {ok, NIt, Msgs} -> + Msgs ++ consume(NIt); + {ok, end_of_stream} -> [] end. -delete_all_messages() -> - Persisted = consume(?DS_SHARD, {['#'], 0}), - lists:foreach( - fun(Msg) -> - GUID = emqx_message:id(Msg), - Topic = emqx_topic:words(emqx_message:topic(Msg)), - Timestamp = emqx_guid:timestamp(GUID), - ok = emqx_ds_storage_layer:delete(?DS_SHARD, GUID, Timestamp, Topic) - end, - Persisted - ). - receive_messages(Count) -> receive_messages(Count, []). @@ -306,13 +282,6 @@ receive_messages(Count, Msgs) -> publish(Node, Message) -> erpc:call(Node, emqx, publish, [Message]). -get_iterator_ids(Node, ClientId) -> - Channel = erpc:call(Node, fun() -> - [ConnPid] = emqx_cm:lookup_channels(ClientId), - sys:get_state(ConnPid) - end), - emqx_connection:info({channel, {session, iterators}}, Channel). - app_specs() -> [ emqx_durable_storage, @@ -330,5 +299,6 @@ get_mqtt_port(Node, Type) -> {_IP, Port} = erpc:call(Node, emqx_config, get, [[listeners, Type, default, bind]]), Port. -get_all_iterator_ids(Node) -> - erpc:call(Node, emqx_ds_storage_layer, list_iterator_prefix, [?DS_SHARD, <<>>]). +clear_db() -> + ok = emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB), + ok. diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index b43604469..a28c9de52 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -48,19 +48,19 @@ %% level. %% %% TODO: currently the stream is hardwired to only support the -%% internal rocksdb storage. In t he future we want to add another +%% internal rocksdb storage. In the future we want to add another %% implementations for emqx_ds, so this type has to take this into %% account. -record(stream, { shard :: emqx_ds_replication_layer:shard_id(), - enc :: emqx_ds_replication_layer:stream() + enc :: emqx_ds_storage_layer:stream() }). --opaque stream() :: stream(). +-opaque stream() :: #stream{}. -record(iterator, { shard :: emqx_ds_replication_layer:shard_id(), - enc :: enqx_ds_replication_layer:iterator() + enc :: enqx_ds_storage_layer:iterator() }). -opaque iterator() :: #iterator{}. @@ -154,7 +154,7 @@ next(Iter0, BatchSize) -> %% messages on the receiving node, hence saving some network. %% %% This kind of trickery should be probably done here in the - %% replication layer. Or, perhaps, in the logic lary. + %% replication layer. Or, perhaps, in the logic layer. case emqx_ds_proto_v1:next(Node, Shard, StorageIter0, BatchSize) of {ok, StorageIter, Batch} -> Iter = #iterator{shard = Shard, enc = StorageIter}, From 51a6f623fd1c0775dc87afe772b1821baacc695a Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Tue, 10 Oct 2023 22:15:14 +0200 Subject: [PATCH 080/155] refactor(ds): Split out bitfield keymapper to a different module --- .../src/emqx_ds_bitmask_keymapper.erl | 693 ++++++++++++++++++ 1 file changed, 693 insertions(+) create mode 100644 apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl new file mode 100644 index 000000000..44f171b55 --- /dev/null +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -0,0 +1,693 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_ds_bitmask_keymapper). + +%%================================================================================ +%% @doc This module is used to map N-dimensional coordinates to a +%% 1-dimensional space. +%% +%% Example: +%% +%% Let us assume that `T' is a topic and `t' is time. These are the two +%% dimensions used to index messages. They can be viewed as +%% "coordinates" of an MQTT message in a 2D space. +%% +%% Oftentimes, when wildcard subscription is used, keys must be +%% scanned in both dimensions simultaneously. +%% +%% Rocksdb allows to iterate over sorted keys very fast. This means we +%% need to map our two-dimentional keys to a single index that is +%% sorted in a way that helps to iterate over both time and topic +%% without having to do a lot of random seeks. +%% +%% == Mapping of 2D keys to rocksdb keys == +%% +%% We use "zigzag" pattern to store messages, where rocksdb key is +%% composed like like this: +%% +%% |ttttt|TTTTTTTTT|tttt| +%% ^ ^ ^ +%% | | | +%% +-------+ | +---------+ +%% | | | +%% most significant topic hash least significant +%% bits of timestamp bits of timestamp +%% (a.k.a epoch) (a.k.a time offset) +%% +%% Topic hash is level-aware: each topic level is hashed separately +%% and the resulting hashes are bitwise-concatentated. This allows us +%% to map topics to fixed-length bitstrings while keeping some degree +%% of information about the hierarchy. +%% +%% Next important concept is what we call "epoch". Duration of the +%% epoch is determined by maximum time offset. Epoch is calculated by +%% shifting bits of the timestamp right. +%% +%% The resulting index is a space-filling curve that looks like +%% this in the topic-time 2D space: +%% +%% T ^ ---->------ |---->------ |---->------ +%% | --/ / --/ / --/ +%% | -<-/ | -<-/ | -<-/ +%% | -/ | -/ | -/ +%% | ---->------ | ---->------ | ---->------ +%% | --/ / --/ / --/ +%% | ---/ | ---/ | ---/ +%% | -/ ^ -/ ^ -/ +%% | ---->------ | ---->------ | ---->------ +%% | --/ / --/ / --/ +%% | -<-/ | -<-/ | -<-/ +%% | -/ | -/ | -/ +%% | ---->------| ---->------| ----------> +%% | +%% -+------------+-----------------------------> t +%% epoch +%% +%% This structure allows to quickly seek to a the first message that +%% was recorded in a certain epoch in a certain topic or a +%% group of topics matching filter like `foo/bar/#`. +%% +%% Due to its structure, for each pair of rocksdb keys K1 and K2, such +%% that K1 > K2 and topic(K1) = topic(K2), timestamp(K1) > +%% timestamp(K2). +%% That is, replay doesn't reorder messages published in each +%% individual topic. +%% +%% This property doesn't hold between different topics, but it's not deemed +%% a problem right now. +%% +%%================================================================================ + +%% API: +-export([make_keymapper/1, vector_to_key/2, key_to_vector/2, next_range/3]). + +%% behavior callbacks: +-export([]). + +%% internal exports: +-export([]). + +-export_type([vector/0, key/0, dimension/0, offset/0, bitsize/0, bitsource/0, keymapper/0]). + +-compile( + {inline, [ + ones/1, + extract/2 + ]} +). + +-ifdef(TEST). +-include_lib("proper/include/proper.hrl"). +-include_lib("eunit/include/eunit.hrl"). +-endif. + +%%================================================================================ +%% Type declarations +%%================================================================================ + +-type scalar() :: integer(). + +-type vector() :: [scalar()]. + +%% N-th coordinate of a vector: +-type dimension() :: pos_integer(). + +-type offset() :: non_neg_integer(). + +-type bitsize() :: pos_integer(). + +%% The resulting 1D key: +-type key() :: binary(). + +-type bitsource() :: + %% Consume `_Size` bits from timestamp starting at `_Offset`th + %% bit from N-th element of the input vector: + {dimension(), offset(), bitsize()}. + +-record(scan_action, { + src_bitmask :: integer(), + src_offset :: offset(), + dst_offset :: offset() +}). + +-type scanner() :: [[#scan_action{}]]. + +-record(keymapper, { + schema :: [bitsource()], + scanner :: scanner(), + size :: non_neg_integer(), + dim_sizeof :: [non_neg_integer()] +}). + +-opaque keymapper() :: #keymapper{}. + +-type scalar_range() :: any | {'=', scalar()} | {'>=', scalar()}. + +%%================================================================================ +%% API functions +%%================================================================================ + +%% @doc +%% +%% Note: Dimension is 1-based. +-spec make_keymapper([bitsource()]) -> keymapper(). +make_keymapper(Bitsources) -> + Arr0 = array:new([{fixed, false}, {default, {0, []}}]), + {Size, Arr} = fold_bitsources( + fun(DestOffset, {Dim0, Offset, Size}, Acc) -> + Dim = Dim0 - 1, + Action = #scan_action{ + src_bitmask = ones(Size), src_offset = Offset, dst_offset = DestOffset + }, + {DimSizeof, Actions} = array:get(Dim, Acc), + array:set(Dim, {DimSizeof + Size, [Action | Actions]}, Acc) + end, + Arr0, + Bitsources + ), + {DimSizeof, Scanner} = lists:unzip(array:to_list(Arr)), + #keymapper{ + schema = Bitsources, + scanner = Scanner, + size = Size, + dim_sizeof = DimSizeof + }. + +%% @doc Map N-dimensional vector to a scalar key. +%% +%% Note: this function is not injective. +-spec vector_to_key(keymapper(), vector()) -> key(). +vector_to_key(#keymapper{scanner = []}, []) -> + 0; +vector_to_key(#keymapper{scanner = [Actions | Scanner]}, [Coord | Vector]) -> + do_vector_to_key(Actions, Scanner, Coord, Vector, 0). + +%% @doc Map key to a vector. +%% +%% Note: `vector_to_key(key_to_vector(K)) = K' but +%% `key_to_vector(vector_to_key(V)) = V' is not guaranteed. +-spec key_to_vector(keymapper(), key()) -> vector(). +key_to_vector(#keymapper{scanner = Scanner}, Key) -> + lists:map( + fun(Actions) -> + lists:foldl( + fun(Action, Acc) -> + Acc bor extract_inv(Key, Action) + end, + 0, + Actions + ) + end, + Scanner + ). + +%% @doc Given a keymapper, a filter, and a key, return a triple containing: +%% +%% 1. `NextKey', a key that is greater than the given one, and is +%% within the given range. +%% +%% 2. `Bitmask' +%% +%% 3. `Bitfilter' +%% +%% Bitmask and bitfilter can be used to verify that key any K is in +%% the range using the following inequality: +%% +%% K >= NextKey && (K band Bitmask) =:= Bitfilter. +%% +%% ...or `undefined' if the next key is outside the range. +-spec next_range(keymapper(), [scalar_range()], key()) -> {key(), integer(), integer()} | undefined. +next_range(Keymapper, Filter0, PrevKey) -> + %% Key -> Vector -> +1 on vector -> Key + Filter = desugar_filter(Keymapper, Filter0), + PrevVec = key_to_vector(Keymapper, PrevKey), + case inc_vector(Filter, PrevVec) of + overflow -> + undefined; + NextVec -> + NewKey = vector_to_key(Keymapper, NextVec), + Bitmask = make_bitmask(Keymapper, Filter), + Bitfilter = NewKey band Bitmask, + {NewKey, Bitmask, Bitfilter} + end. + +%%================================================================================ +%% Internal functions +%%================================================================================ + +-spec make_bitmask(keymapper(), [{non_neg_integer(), non_neg_integer()}]) -> non_neg_integer(). +make_bitmask(Keymapper = #keymapper{dim_sizeof = DimSizeof}, Ranges) -> + BitmaskVector = lists:map( + fun + ({{N, N}, Bits}) -> + %% For strict equality we can employ bitmask: + ones(Bits); + (_) -> + 0 + end, + lists:zip(Ranges, DimSizeof) + ), + vector_to_key(Keymapper, BitmaskVector). + +-spec inc_vector([{non_neg_integer(), non_neg_integer()}], vector()) -> vector() | overflow. +inc_vector(Filter, Vec0) -> + case normalize_vector(Filter, Vec0) of + {true, Vec} -> + Vec; + {false, Vec} -> + do_inc_vector(Filter, Vec, []) + end. + +do_inc_vector([], [], _Acc) -> + overflow; +do_inc_vector([{Min, Max} | Intervals], [Elem | Vec], Acc) -> + case Elem of + Max -> + do_inc_vector(Intervals, Vec, [Min | Acc]); + _ when Elem < Max -> + lists:reverse(Acc) ++ [Elem + 1 | Vec] + end. + +normalize_vector(Intervals, Vec0) -> + Vec = lists:map( + fun + ({{Min, _Max}, Elem}) when Min > Elem -> + Min; + ({{_Min, Max}, Elem}) when Max < Elem -> + Max; + ({_, Elem}) -> + Elem + end, + lists:zip(Intervals, Vec0) + ), + {Vec > Vec0, Vec}. + +%% Transform inequalities into a list of closed intervals that the +%% vector elements should lie in. +desugar_filter(#keymapper{dim_sizeof = DimSizeof}, Filter) -> + lists:map( + fun + ({any, Bitsize}) -> + {0, ones(Bitsize)}; + ({{'=', Val}, _Bitsize}) -> + {Val, Val}; + ({{'>=', Val}, Bitsize}) -> + {Val, ones(Bitsize)} + end, + lists:zip(Filter, DimSizeof) + ). + +-spec fold_bitsources(fun((_DstOffset :: non_neg_integer(), bitsource(), Acc) -> Acc), Acc, [ + bitsource() +]) -> {bitsize(), Acc}. +fold_bitsources(Fun, InitAcc, Bitsources) -> + lists:foldl( + fun(Bitsource = {_Dim, _Offset, Size}, {DstOffset, Acc0}) -> + Acc = Fun(DstOffset, Bitsource, Acc0), + {DstOffset + Size, Acc} + end, + {0, InitAcc}, + Bitsources + ). + +%% Specialized version of fold: +do_vector_to_key([], [], _Coord, [], Acc) -> + Acc; +do_vector_to_key([], [NewActions | Scanner], _Coord, [NewCoord | Vector], Acc) -> + do_vector_to_key(NewActions, Scanner, NewCoord, Vector, Acc); +do_vector_to_key([Action | Actions], Scanner, Coord, Vector, Acc0) -> + Acc = Acc0 bor extract(Coord, Action), + do_vector_to_key(Actions, Scanner, Coord, Vector, Acc). + +-spec extract(_Source :: scalar(), #scan_action{}) -> integer(). +extract(Src, #scan_action{src_bitmask = SrcBitmask, src_offset = SrcOffset, dst_offset = DstOffset}) -> + ((Src bsr SrcOffset) band SrcBitmask) bsl DstOffset. + +%% extract^-1 +-spec extract_inv(_Dest :: scalar(), #scan_action{}) -> integer(). +extract_inv(Dest, #scan_action{ + src_bitmask = SrcBitmask, src_offset = SrcOffset, dst_offset = DestOffset +}) -> + ((Dest bsr DestOffset) band SrcBitmask) bsl SrcOffset. + +ones(Bits) -> + 1 bsl Bits - 1. + +%% Create a bitmask that is sufficient to cover a given number. E.g.: +%% +%% 2#1000 -> 2#1111; 2#0 -> 2#0; 2#10101 -> 2#11111 +bitmask_of(N) -> + %% FIXME: avoid floats + NBits = ceil(math:log2(N + 1)), + ones(NBits). + +%%================================================================================ +%% Unit tests +%%================================================================================ + +-ifdef(TEST). + +bitmask_of_test() -> + ?assertEqual(2#0, bitmask_of(0)), + ?assertEqual(2#1, bitmask_of(1)), + ?assertEqual(2#11, bitmask_of(2#10)), + ?assertEqual(2#11, bitmask_of(2#11)), + ?assertEqual(2#1111, bitmask_of(2#1000)), + ?assertEqual(2#1111, bitmask_of(2#1111)), + ?assertEqual(ones(128), bitmask_of(ones(128))), + ?assertEqual(ones(256), bitmask_of(ones(256))). + +make_keymapper0_test() -> + Schema = [], + ?assertEqual( + #keymapper{ + schema = Schema, + scanner = [], + size = 0, + dim_sizeof = [] + }, + make_keymapper(Schema) + ). + +make_keymapper1_test() -> + Schema = [{1, 0, 3}, {2, 0, 5}], + ?assertEqual( + #keymapper{ + schema = Schema, + scanner = [ + [#scan_action{src_bitmask = 2#111, src_offset = 0, dst_offset = 0}], + [#scan_action{src_bitmask = 2#11111, src_offset = 0, dst_offset = 3}] + ], + size = 8, + dim_sizeof = [3, 5] + }, + make_keymapper(Schema) + ). + +make_keymapper2_test() -> + Schema = [{1, 0, 3}, {2, 0, 5}, {1, 3, 5}], + ?assertEqual( + #keymapper{ + schema = Schema, + scanner = [ + [ + #scan_action{src_bitmask = 2#11111, src_offset = 3, dst_offset = 8}, + #scan_action{src_bitmask = 2#111, src_offset = 0, dst_offset = 0} + ], + [#scan_action{src_bitmask = 2#11111, src_offset = 0, dst_offset = 3}] + ], + size = 13, + dim_sizeof = [8, 5] + }, + make_keymapper(Schema) + ). + +vector_to_key0_test() -> + Schema = [], + Vector = [], + ?assertEqual(0, vec2key(Schema, Vector)). + +vector_to_key1_test() -> + Schema = [{1, 0, 8}], + ?assertEqual(16#ff, vec2key(Schema, [16#ff])), + ?assertEqual(16#1a, vec2key(Schema, [16#1a])), + ?assertEqual(16#ff, vec2key(Schema, [16#aaff])). + +%% Test handling of source offset: +vector_to_key2_test() -> + Schema = [{1, 8, 8}], + ?assertEqual(0, vec2key(Schema, [16#ff])), + ?assertEqual(16#1a, vec2key(Schema, [16#1aff])), + ?assertEqual(16#aa, vec2key(Schema, [16#11aaff])). + +%% Basic test of 2D vector: +vector_to_key3_test() -> + Schema = [{1, 0, 8}, {2, 0, 8}], + ?assertEqual(16#aaff, vec2key(Schema, [16#ff, 16#aa])), + ?assertEqual(16#2211, vec2key(Schema, [16#aa11, 16#bb22])). + +%% Advanced test with 2D vector: +vector_to_key4_test() -> + Schema = [{1, 0, 8}, {2, 0, 8}, {1, 8, 8}, {2, 16, 8}], + ?assertEqual(16#bb112211, vec2key(Schema, [16#aa1111, 16#bb2222])). + +key_to_vector0_test() -> + Schema = [], + key2vec(Schema, []). + +key_to_vector1_test() -> + Schema = [{1, 0, 8}, {2, 0, 8}], + key2vec(Schema, [1, 1]), + key2vec(Schema, [255, 255]), + key2vec(Schema, [255, 1]), + key2vec(Schema, [0, 1]), + key2vec(Schema, [255, 0]). + +key_to_vector2_test() -> + Schema = [{1, 0, 3}, {2, 0, 8}, {1, 3, 5}], + key2vec(Schema, [1, 1]), + key2vec(Schema, [255, 255]), + key2vec(Schema, [255, 1]), + key2vec(Schema, [0, 1]), + key2vec(Schema, [255, 0]). + +inc_vector0_test() -> + Keymapper = make_keymapper([]), + ?assertMatch(overflow, incvec(Keymapper, [], [])). + +inc_vector1_test() -> + Keymapper = make_keymapper([{1, 0, 8}]), + ?assertMatch([3], incvec(Keymapper, [{'=', 3}], [1])), + ?assertMatch([3], incvec(Keymapper, [{'=', 3}], [2])), + ?assertMatch(overflow, incvec(Keymapper, [{'=', 3}], [3])), + ?assertMatch(overflow, incvec(Keymapper, [{'=', 3}], [4])), + ?assertMatch(overflow, incvec(Keymapper, [{'=', 3}], [255])), + %% Now with >=: + ?assertMatch([1], incvec(Keymapper, [{'>=', 0}], [0])), + ?assertMatch([255], incvec(Keymapper, [{'>=', 0}], [254])), + ?assertMatch(overflow, incvec(Keymapper, [{'>=', 0}], [255])), + + ?assertMatch([100], incvec(Keymapper, [{'>=', 100}], [0])), + ?assertMatch([100], incvec(Keymapper, [{'>=', 100}], [99])), + ?assertMatch([255], incvec(Keymapper, [{'>=', 100}], [254])), + ?assertMatch(overflow, incvec(Keymapper, [{'>=', 100}], [255])). + +inc_vector2_test() -> + Keymapper = make_keymapper([{1, 0, 8}, {2, 0, 8}, {3, 0, 8}]), + Filter = [{'>=', 0}, {'=', 100}, {'>=', 30}], + ?assertMatch([0, 100, 30], incvec(Keymapper, Filter, [0, 0, 0])), + ?assertMatch([1, 100, 30], incvec(Keymapper, Filter, [0, 100, 30])), + ?assertMatch([255, 100, 30], incvec(Keymapper, Filter, [254, 100, 30])), + ?assertMatch([0, 100, 31], incvec(Keymapper, Filter, [255, 100, 30])), + ?assertMatch([0, 100, 30], incvec(Keymapper, Filter, [0, 100, 29])), + ?assertMatch(overflow, incvec(Keymapper, Filter, [255, 100, 255])), + ?assertMatch([255, 100, 255], incvec(Keymapper, Filter, [254, 100, 255])), + ?assertMatch([0, 100, 255], incvec(Keymapper, Filter, [255, 100, 254])), + %% Nasty cases (shouldn't happen, hopefully): + ?assertMatch([1, 100, 30], incvec(Keymapper, Filter, [0, 101, 0])), + ?assertMatch([1, 100, 33], incvec(Keymapper, Filter, [0, 101, 33])), + ?assertMatch([0, 100, 255], incvec(Keymapper, Filter, [255, 101, 254])), + ?assertMatch(overflow, incvec(Keymapper, Filter, [255, 101, 255])). + +make_bitmask0_test() -> + Keymapper = make_keymapper([]), + ?assertMatch(0, mkbmask(Keymapper, [])). + +make_bitmask1_test() -> + Keymapper = make_keymapper([{1, 0, 8}]), + ?assertEqual(0, mkbmask(Keymapper, [any])), + ?assertEqual(16#ff, mkbmask(Keymapper, [{'=', 1}])), + ?assertEqual(16#ff, mkbmask(Keymapper, [{'=', 255}])), + ?assertEqual(0, mkbmask(Keymapper, [{'>=', 0}])), + ?assertEqual(0, mkbmask(Keymapper, [{'>=', 1}])), + ?assertEqual(0, mkbmask(Keymapper, [{'>=', 16#f}])). + +make_bitmask2_test() -> + Keymapper = make_keymapper([{1, 0, 3}, {2, 0, 4}, {3, 0, 2}]), + ?assertEqual(2#00_0000_000, mkbmask(Keymapper, [any, any, any])), + ?assertEqual(2#11_0000_000, mkbmask(Keymapper, [any, any, {'=', 0}])), + ?assertEqual(2#00_1111_000, mkbmask(Keymapper, [any, {'=', 0}, any])), + ?assertEqual(2#00_0000_111, mkbmask(Keymapper, [{'=', 0}, any, any])). + +make_bitmask3_test() -> + %% Key format of type |TimeOffset|Topic|Epoch|: + Keymapper = make_keymapper([{1, 8, 8}, {2, 0, 8}, {1, 0, 8}]), + ?assertEqual(2#00000000_00000000_00000000, mkbmask(Keymapper, [any, any])), + ?assertEqual(2#11111111_11111111_11111111, mkbmask(Keymapper, [{'=', 33}, {'=', 22}])), + ?assertEqual(2#11111111_11111111_11111111, mkbmask(Keymapper, [{'=', 33}, {'=', 22}])), + ?assertEqual(2#00000000_11111111_00000000, mkbmask(Keymapper, [{'>=', 255}, {'=', 22}])). + +next_range0_test() -> + Keymapper = make_keymapper([]), + Filter = [], + PrevKey = 0, + ?assertMatch(undefined, next_range(Keymapper, Filter, PrevKey)). + +next_range1_test() -> + Keymapper = make_keymapper([{1, 0, 8}, {2, 0, 8}]), + ?assertMatch(undefined, next_range(Keymapper, [{'=', 0}, {'=', 0}], 0)), + ?assertMatch({1, 16#ffff, 1}, next_range(Keymapper, [{'=', 1}, {'=', 0}], 0)), + ?assertMatch({16#100, 16#ffff, 16#100}, next_range(Keymapper, [{'=', 0}, {'=', 1}], 0)), + %% Now with any: + ?assertMatch({1, 0, 0}, next_range(Keymapper, [any, any], 0)), + ?assertMatch({2, 0, 0}, next_range(Keymapper, [any, any], 1)), + ?assertMatch({16#fffb, 0, 0}, next_range(Keymapper, [any, any], 16#fffa)), + %% Now with >=: + ?assertMatch( + {16#42_30, 16#ff00, 16#42_00}, next_range(Keymapper, [{'>=', 16#30}, {'=', 16#42}], 0) + ), + ?assertMatch( + {16#42_31, 16#ff00, 16#42_00}, + next_range(Keymapper, [{'>=', 16#30}, {'=', 16#42}], 16#42_30) + ), + + ?assertMatch( + {16#30_42, 16#00ff, 16#00_42}, next_range(Keymapper, [{'=', 16#42}, {'>=', 16#30}], 0) + ), + ?assertMatch( + {16#31_42, 16#00ff, 16#00_42}, + next_range(Keymapper, [{'=', 16#42}, {'>=', 16#30}], 16#00_43) + ). + +%% Bunch of tests that verifying that next_range doesn't skip over keys: + +-define(assertIterComplete(A, B), + ?assertEqual(A -- [0], B) +). + +-define(assertSameSet(A, B), + ?assertIterComplete(lists:sort(A), lists:sort(B)) +). + +iterate1_test() -> + SizeX = 3, + SizeY = 3, + Keymapper = make_keymapper([{1, 0, SizeX}, {2, 0, SizeY}]), + Keys = test_iteration(Keymapper, [any, any]), + Expected = [ + X bor (Y bsl SizeX) + || Y <- lists:seq(0, ones(SizeY)), X <- lists:seq(0, ones(SizeX)) + ], + ?assertIterComplete(Expected, Keys). + +iterate2_test() -> + SizeX = 64, + SizeY = 3, + Keymapper = make_keymapper([{1, 0, SizeX}, {2, 0, SizeY}]), + X = 123456789, + Keys = test_iteration(Keymapper, [{'=', X}, any]), + Expected = [ + X bor (Y bsl SizeX) + || Y <- lists:seq(0, ones(SizeY)) + ], + ?assertIterComplete(Expected, Keys). + +iterate3_test() -> + SizeX = 3, + SizeY = 64, + Y = 42, + Keymapper = make_keymapper([{1, 0, SizeX}, {2, 0, SizeY}]), + Keys = test_iteration(Keymapper, [any, {'=', Y}]), + Expected = [ + X bor (Y bsl SizeX) + || X <- lists:seq(0, ones(SizeX)) + ], + ?assertIterComplete(Expected, Keys). + +iterate4_test() -> + SizeX = 8, + SizeY = 4, + MinX = 16#fa, + MinY = 16#a, + Keymapper = make_keymapper([{1, 0, SizeX}, {2, 0, SizeY}]), + Keys = test_iteration(Keymapper, [{'>=', MinX}, {'>=', MinY}]), + Expected = [ + X bor (Y bsl SizeX) + || Y <- lists:seq(MinY, ones(SizeY)), X <- lists:seq(MinX, ones(SizeX)) + ], + ?assertIterComplete(Expected, Keys). + +iterate1_prop() -> + Size = 4, + ?FORALL( + {SizeX, SizeY}, + {integer(1, Size), integer(1, Size)}, + ?FORALL( + {SplitX, MinX, MinY}, + {integer(0, SizeX), integer(0, SizeX), integer(0, SizeY)}, + begin + Keymapper = make_keymapper([ + {1, 0, SplitX}, {2, 0, SizeY}, {1, SplitX, SizeX - SplitX} + ]), + Keys = test_iteration(Keymapper, [{'>=', MinX}, {'>=', MinY}]), + Expected = [ + vector_to_key(Keymapper, [X, Y]) + || X <- lists:seq(MinX, ones(SizeX)), + Y <- lists:seq(MinY, ones(SizeY)) + ], + ?assertSameSet(Expected, Keys), + true + end + ) + ). + +iterate5_test() -> + ?assert(proper:quickcheck(iterate1_prop(), 100)). + +iterate2_prop() -> + Size = 4, + ?FORALL( + {SizeX, SizeY}, + {integer(1, Size), integer(1, Size)}, + ?FORALL( + {SplitX, MinX, MinY}, + {integer(0, SizeX), integer(0, SizeX), integer(0, SizeY)}, + begin + Keymapper = make_keymapper([ + {1, SplitX, SizeX - SplitX}, {2, 0, SizeY}, {1, 0, SplitX} + ]), + Keys = test_iteration(Keymapper, [{'>=', MinX}, {'>=', MinY}]), + Expected = [ + vector_to_key(Keymapper, [X, Y]) + || X <- lists:seq(MinX, ones(SizeX)), + Y <- lists:seq(MinY, ones(SizeY)) + ], + ?assertSameSet(Expected, Keys), + true + end + ) + ). + +iterate6_test() -> + ?assert(proper:quickcheck(iterate2_prop(), 1000)). + +test_iteration(Keymapper, Filter) -> + test_iteration(Keymapper, Filter, 0). + +test_iteration(Keymapper, Filter, PrevKey) -> + case next_range(Keymapper, Filter, PrevKey) of + undefined -> + []; + {Key, Bitmask, Bitfilter} -> + ?assert((Key band Bitmask) =:= Bitfilter), + [Key | test_iteration(Keymapper, Filter, Key)] + end. + +mkbmask(Keymapper, Filter0) -> + Filter = desugar_filter(Keymapper, Filter0), + make_bitmask(Keymapper, Filter). + +incvec(Keymapper, Filter0, Vector) -> + Filter = desugar_filter(Keymapper, Filter0), + inc_vector(Filter, Vector). + +key2vec(Schema, Vector) -> + Keymapper = make_keymapper(Schema), + Key = vector_to_key(Keymapper, Vector), + ?assertEqual(Vector, key_to_vector(Keymapper, Key)). + +vec2key(Schema, Vector) -> + vector_to_key(make_keymapper(Schema), Vector). + +-endif. From c149e0e2df8d373cf09b472cadc8dc159426411a Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Wed, 11 Oct 2023 15:51:52 +0200 Subject: [PATCH 081/155] fix(ds): Pass topic filter to emqx_ds:make_iterator call --- apps/emqx_durable_storage/src/emqx_ds.erl | 8 ++--- .../src/emqx_ds_bitmask_keymapper.erl | 35 ++++++++++--------- .../src/emqx_ds_replication_layer.erl | 16 ++++----- .../src/emqx_ds_storage_layer.erl | 10 +++--- .../src/emqx_ds_storage_reference.erl | 10 +++--- .../src/proto/emqx_ds_proto_v1.erl | 8 ++--- .../test/emqx_ds_SUITE.erl | 15 ++++---- 7 files changed, 53 insertions(+), 49 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index cf4b5a031..dd6af9a03 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -28,7 +28,7 @@ -export([store_batch/2, store_batch/3]). %% Message replay API: --export([get_streams/3, make_iterator/2, next/2]). +-export([get_streams/3, make_iterator/3, next/2]). %% Misc. API: -export([]). @@ -159,9 +159,9 @@ store_batch(DB, Msgs) -> get_streams(DB, TopicFilter, StartTime) -> emqx_ds_replication_layer:get_streams(DB, TopicFilter, StartTime). --spec make_iterator(stream(), time()) -> make_iterator_result(). -make_iterator(Stream, StartTime) -> - emqx_ds_replication_layer:make_iterator(Stream, StartTime). +-spec make_iterator(stream(), topic_filter(), time()) -> make_iterator_result(). +make_iterator(Stream, TopicFilter, StartTime) -> + emqx_ds_replication_layer:make_iterator(Stream, TopicFilter, StartTime). -spec next(iterator(), pos_integer()) -> next_result(). next(Iter, BatchSize) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index 44f171b55..fd2d41946 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -334,29 +334,30 @@ extract_inv(Dest, #scan_action{ ones(Bits) -> 1 bsl Bits - 1. -%% Create a bitmask that is sufficient to cover a given number. E.g.: -%% -%% 2#1000 -> 2#1111; 2#0 -> 2#0; 2#10101 -> 2#11111 -bitmask_of(N) -> - %% FIXME: avoid floats - NBits = ceil(math:log2(N + 1)), - ones(NBits). - %%================================================================================ %% Unit tests %%================================================================================ -ifdef(TEST). -bitmask_of_test() -> - ?assertEqual(2#0, bitmask_of(0)), - ?assertEqual(2#1, bitmask_of(1)), - ?assertEqual(2#11, bitmask_of(2#10)), - ?assertEqual(2#11, bitmask_of(2#11)), - ?assertEqual(2#1111, bitmask_of(2#1000)), - ?assertEqual(2#1111, bitmask_of(2#1111)), - ?assertEqual(ones(128), bitmask_of(ones(128))), - ?assertEqual(ones(256), bitmask_of(ones(256))). +%% %% Create a bitmask that is sufficient to cover a given number. E.g.: +%% %% +%% %% 2#1000 -> 2#1111; 2#0 -> 2#0; 2#10101 -> 2#11111 +%% bitmask_of(N) -> +%% %% FIXME: avoid floats +%% NBits = ceil(math:log2(N + 1)), +%% ones(NBits). + + +%% bitmask_of_test() -> +%% ?assertEqual(2#0, bitmask_of(0)), +%% ?assertEqual(2#1, bitmask_of(1)), +%% ?assertEqual(2#11, bitmask_of(2#10)), +%% ?assertEqual(2#11, bitmask_of(2#11)), +%% ?assertEqual(2#1111, bitmask_of(2#1000)), +%% ?assertEqual(2#1111, bitmask_of(2#1111)), +%% ?assertEqual(ones(128), bitmask_of(ones(128))), +%% ?assertEqual(ones(256), bitmask_of(ones(256))). make_keymapper0_test() -> Schema = [], diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index a28c9de52..aeb2ce646 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -21,7 +21,7 @@ drop_db/1, store_batch/3, get_streams/3, - make_iterator/2, + make_iterator/3, next/2 ]). @@ -30,7 +30,7 @@ do_open_shard_v1/2, do_drop_shard_v1/1, do_get_streams_v1/3, - do_make_iterator_v1/3, + do_make_iterator_v1/4, do_next_v1/3 ]). @@ -132,11 +132,11 @@ get_streams(DB, TopicFilter, StartTime) -> Shards ). --spec make_iterator(stream(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). -make_iterator(Stream, StartTime) -> +-spec make_iterator(stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). +make_iterator(Stream, TopicFilter, StartTime) -> #stream{shard = Shard, enc = StorageStream} = Stream, Node = node_of_shard(Shard), - case emqx_ds_proto_v1:make_iterator(Node, Shard, StorageStream, StartTime) of + case emqx_ds_proto_v1:make_iterator(Node, Shard, StorageStream, TopicFilter, StartTime) of {ok, Iter} -> {ok, #iterator{shard = Shard, enc = Iter}}; Err = {error, _} -> @@ -184,9 +184,9 @@ do_drop_shard_v1(Shard) -> do_get_streams_v1(Shard, TopicFilter, StartTime) -> emqx_ds_storage_layer:get_streams(Shard, TopicFilter, StartTime). --spec do_make_iterator_v1(shard_id(), _Stream, emqx_ds:time()) -> {ok, iterator()} | {error, _}. -do_make_iterator_v1(Shard, Stream, StartTime) -> - emqx_ds_storage_layer:make_iterator(Shard, Stream, StartTime). +-spec do_make_iterator_v1(shard_id(), _Stream, emqx_ds:topic_filter(), emqx_ds:time()) -> {ok, iterator()} | {error, _}. +do_make_iterator_v1(Shard, Stream, TopicFilter, StartTime) -> + emqx_ds_storage_layer:make_iterator(Shard, Stream, TopicFilter, StartTime). -spec do_next_v1(shard_id(), Iter, pos_integer()) -> emqx_ds:next_result(Iter). do_next_v1(Shard, Iter, BatchSize) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index e9d4edc06..744ac869f 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -18,7 +18,7 @@ -behaviour(gen_server). %% Replication layer API: --export([open_shard/2, drop_shard/1, store_batch/3, get_streams/3, make_iterator/3, next/3]). +-export([open_shard/2, drop_shard/1, store_batch/3, get_streams/3, make_iterator/4, next/3]). %% gen_server -export([start_link/2, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). @@ -112,7 +112,7 @@ -callback get_streams(shard_id(), _Data, emqx_ds:topic_filter(), emqx_ds:time()) -> [_Stream]. --callback make_iterator(shard_id(), _Data, _Stream, emqx_ds:time()) -> +-callback make_iterator(shard_id(), _Data, _Stream, emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(_Iterator). -callback next(shard_id(), _Data, Iter, pos_integer()) -> @@ -158,11 +158,11 @@ get_streams(Shard, TopicFilter, StartTime) -> Gens ). --spec make_iterator(shard_id(), stream(), emqx_ds:time()) -> +-spec make_iterator(shard_id(), stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). -make_iterator(Shard, #stream{generation = GenId, enc = Stream}, StartTime) -> +make_iterator(Shard, #stream{generation = GenId, enc = Stream}, TopicFilter, StartTime) -> #{module := Mod, data := GenData} = generation_get(Shard, GenId), - case Mod:make_iterator(Shard, GenData, Stream, StartTime) of + case Mod:make_iterator(Shard, GenData, Stream, TopicFilter, StartTime) of {ok, Iter} -> {ok, #it{ generation = GenId, diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl index fd480eeab..5a91f9ecd 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl @@ -27,7 +27,7 @@ -export([]). %% behavior callbacks: --export([create/4, open/5, store_batch/4, get_streams/4, make_iterator/4, next/4]). +-export([create/4, open/5, store_batch/4, get_streams/4, make_iterator/5, next/4]). %% internal exports: -export([]). @@ -49,7 +49,7 @@ cf :: rocksdb:cf_handle() }). --record(stream, {topic_filter :: emqx_ds:topic_filter()}). +-record(stream, {}). -record(it, { topic_filter :: emqx_ds:topic_filter(), @@ -86,10 +86,10 @@ store_batch(_ShardId, #s{db = DB, cf = CF}, Messages, _Options) -> Messages ). -get_streams(_Shard, _Data, TopicFilter, _StartTime) -> - [#stream{topic_filter = TopicFilter}]. +get_streams(_Shard, _Data, _TopicFilter, _StartTime) -> + [#stream{}]. -make_iterator(_Shard, _Data, #stream{topic_filter = TopicFilter}, StartTime) -> +make_iterator(_Shard, _Data, #stream{}, TopicFilter, StartTime) -> {ok, #it{ topic_filter = TopicFilter, start_time = StartTime diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index 60671cef7..d4d7b3631 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -19,7 +19,7 @@ -include_lib("emqx/include/bpapi.hrl"). %% API: --export([open_shard/3, drop_shard/2, get_streams/4, make_iterator/4, next/4]). +-export([open_shard/3, drop_shard/2, get_streams/4, make_iterator/5, next/4]). %% behavior callbacks: -export([introduced_in/0]). @@ -45,10 +45,10 @@ drop_shard(Node, Shard) -> get_streams(Node, Shard, TopicFilter, Time) -> erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [Shard, TopicFilter, Time]). --spec make_iterator(node(), emqx_ds_replication_layer:shard(), _Stream, emqx_ds:time()) -> +-spec make_iterator(node(), emqx_ds_replication_layer:shard(), _Stream, emqx_ds:topic_filter(), emqx_ds:time()) -> {ok, emqx_ds_replication_layer:iterator()} | {error, _}. -make_iterator(Node, Shard, Stream, StartTime) -> - erpc:call(Node, emqx_ds_replication_layer, do_make_iterator_v1, [Shard, Stream, StartTime]). +make_iterator(Node, Shard, Stream, TopicFilter, StartTime) -> + erpc:call(Node, emqx_ds_replication_layer, do_make_iterator_v1, [Shard, Stream, TopicFilter, StartTime]). -spec next( node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:iterator(), pos_integer() diff --git a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl index 1935e41cf..2dc77c563 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl @@ -45,9 +45,10 @@ t_02_smoke_get_streams_start_iter(_Config) -> DB = ?FUNCTION_NAME, ?assertMatch(ok, emqx_ds:open_db(DB, #{})), StartTime = 0, - [{Rank, Stream}] = emqx_ds:get_streams(DB, ['#'], StartTime), + TopicFilter = ['#'], + [{Rank, Stream}] = emqx_ds:get_streams(DB, TopicFilter, StartTime), ?assertMatch({_, _}, Rank), - ?assertMatch({ok, _Iter}, emqx_ds:make_iterator(Stream, StartTime)). + ?assertMatch({ok, _Iter}, emqx_ds:make_iterator(Stream, TopicFilter, StartTime)). %% A simple smoke test that verifies that it's possible to iterate %% over messages. @@ -55,14 +56,15 @@ t_03_smoke_iterate(_Config) -> DB = ?FUNCTION_NAME, ?assertMatch(ok, emqx_ds:open_db(DB, #{})), StartTime = 0, + TopicFilter = ['#'], Msgs = [ message(<<"foo/bar">>, <<"1">>, 0), message(<<"foo">>, <<"2">>, 1), message(<<"bar/bar">>, <<"3">>, 2) ], ?assertMatch(ok, emqx_ds:store_batch(DB, Msgs)), - [{_, Stream}] = emqx_ds:get_streams(DB, ['#'], StartTime), - {ok, Iter0} = emqx_ds:make_iterator(Stream, StartTime), + [{_, Stream}] = emqx_ds:get_streams(DB, TopicFilter, StartTime), + {ok, Iter0} = emqx_ds:make_iterator(Stream, TopicFilter, StartTime), {ok, Iter, Batch} = iterate(Iter0, 1), ?assertEqual(Msgs, Batch, {Iter0, Iter}). @@ -74,6 +76,7 @@ t_03_smoke_iterate(_Config) -> t_04_restart(_Config) -> DB = ?FUNCTION_NAME, ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + TopicFilter = ['#'], StartTime = 0, Msgs = [ message(<<"foo/bar">>, <<"1">>, 0), @@ -81,8 +84,8 @@ t_04_restart(_Config) -> message(<<"bar/bar">>, <<"3">>, 2) ], ?assertMatch(ok, emqx_ds:store_batch(DB, Msgs)), - [{_, Stream}] = emqx_ds:get_streams(DB, ['#'], StartTime), - {ok, Iter0} = emqx_ds:make_iterator(Stream, StartTime), + [{_, Stream}] = emqx_ds:get_streams(DB, TopicFilter, StartTime), + {ok, Iter0} = emqx_ds:make_iterator(Stream, TopicFilter, StartTime), %% Restart the application: ?tp(warning, emqx_ds_SUITE_restart_app, #{}), ok = application:stop(emqx_durable_storage), From f1ab7c8a7c83d1109d287ba5b8ab95ce64376e9b Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Wed, 11 Oct 2023 16:38:16 +0200 Subject: [PATCH 082/155] feat(ds): Add persist callback to LTS trie --- apps/emqx_durable_storage/src/emqx_ds_lts.erl | 38 ++++++++++++++----- 1 file changed, 29 insertions(+), 9 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_lts.erl index fcc9f2b36..5422979b7 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_lts.erl @@ -17,10 +17,10 @@ -module(emqx_ds_lts). %% API: --export([trie_create/0, topic_key/3, match_topics/2, lookup_topic_key/2, dump_to_dot/2]). +-export([trie_create/1, trie_create/0, topic_key/3, match_topics/2, lookup_topic_key/2]). %% Debug: --export([trie_next/3, trie_insert/3]). +-export([trie_next/3, trie_insert/3, dump_to_dot/2]). -export_type([static_key/0, trie/0]). @@ -46,11 +46,16 @@ -define(PREFIX, prefix). -type state() :: static_key() | ?PREFIX. --type msg_storage_key() :: {static_key(), _Varying :: [binary()]}. +-type varying() :: [binary()]. + +-type msg_storage_key() :: {static_key(), varying()}. -type threshold_fun() :: fun((non_neg_integer()) -> non_neg_integer()). +-type persist_callback() :: fun((_Key, _Val) -> ok). + -record(trie, { + persist :: persist_callback(), trie :: ets:tid(), stats :: ets:tid() }). @@ -67,16 +72,23 @@ %%================================================================================ %% @doc Create an empty trie --spec trie_create() -> trie(). -trie_create() -> +-spec trie_create(persist_callback()) -> trie(). +trie_create(Persist) -> Trie = ets:new(trie, [{keypos, #trans.key}, set]), Stats = ets:new(stats, [{keypos, 1}, set]), #trie{ + persist = Persist, trie = Trie, stats = Stats }. -%% @doc Create a topic key, +-spec trie_create() -> trie(). +trie_create() -> + trie_create(fun(_, _) -> + ok + end). + +%% @doc Lookup the topic key. Create a new one, if not found. -spec topic_key(trie(), threshold_fun(), [binary()]) -> msg_storage_key(). topic_key(Trie, ThresholdFun, Tokens) -> do_topic_key(Trie, ThresholdFun, 0, ?PREFIX, Tokens, []). @@ -161,8 +173,9 @@ trie_next(#trie{trie = Trie}, State, Token) -> end. -spec trie_insert(trie(), state(), edge()) -> {Updated, state()} when - Updated :: false | non_neg_integer(). -trie_insert(#trie{trie = Trie, stats = Stats}, State, Token) -> + NChildren :: non_neg_integer(), + Updated :: false | NChildren. +trie_insert(#trie{trie = Trie, stats = Stats, persist = Persist}, State, Token) -> Key = {State, Token}, NewState = get_id_for_key(State, Token), Rec = #trans{ @@ -171,6 +184,7 @@ trie_insert(#trie{trie = Trie, stats = Stats}, State, Token) -> }, case ets:insert_new(Trie, Rec) of true -> + ok = Persist(Key, NewState), Inc = case Token of ?EOT -> 0; @@ -206,7 +220,7 @@ get_id_for_key(_State, _Token) -> crypto:strong_rand_bytes(8). %% erlfmt-ignore --spec do_match_topics(trie(), state(), non_neg_integer(), [binary() | '+' | '#']) -> +-spec do_match_topics(trie(), state(), [binary() | '+'], [binary() | '+' | '#']) -> list(). do_match_topics(Trie, State, Varying, []) -> case trie_next(Trie, State, ?EOT) of @@ -260,6 +274,8 @@ do_lookup_topic_key(Trie, State, [Tok | Rest], Varying) -> end. do_topic_key(Trie, _, _, State, [], Varying) -> + %% We reached the end of topic. Assert: Trie node that corresponds + %% to EOT cannot be a wildcard. {_, false, Static} = trie_next_(Trie, State, ?EOT), {Static, lists:reverse(Varying)}; do_topic_key(Trie, ThresholdFun, Depth, State, [Tok | Rest], Varying0) -> @@ -268,11 +284,15 @@ do_topic_key(Trie, ThresholdFun, Depth, State, [Tok | Rest], Varying0) -> Varying = case trie_next_(Trie, State, Tok) of {NChildren, _, _DiscardState} when is_integer(NChildren), NChildren > Threshold -> + %% Number of children for the trie node reached the + %% threshold, we need to insert wildcard here: {_, NextState} = trie_insert(Trie, State, ?PLUS), [Tok | Varying0]; {_, false, NextState} -> Varying0; {_, true, NextState} -> + %% This topic level is marked as wildcard in the trie, + %% we need to add it to the varying part of the key: [Tok | Varying0] end, do_topic_key(Trie, ThresholdFun, Depth + 1, NextState, Rest, Varying). From ac91dbc58fc6bc29cbdca54a11d1efe4880a3e17 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Wed, 11 Oct 2023 16:49:25 +0200 Subject: [PATCH 083/155] feat(ds): Restore LTS trie from a dump --- .../src/emqx_ds_bitmask_keymapper.erl | 1 - apps/emqx_durable_storage/src/emqx_ds_lts.erl | 38 ++++++++++++++----- .../src/emqx_ds_replication_layer.erl | 6 ++- .../src/proto/emqx_ds_proto_v1.erl | 8 +++- 4 files changed, 38 insertions(+), 15 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index fd2d41946..2f28de293 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -348,7 +348,6 @@ ones(Bits) -> %% NBits = ceil(math:log2(N + 1)), %% ones(NBits). - %% bitmask_of_test() -> %% ?assertEqual(2#0, bitmask_of(0)), %% ?assertEqual(2#1, bitmask_of(1)), diff --git a/apps/emqx_durable_storage/src/emqx_ds_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_lts.erl index 5422979b7..e9d3124f9 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_lts.erl @@ -17,7 +17,9 @@ -module(emqx_ds_lts). %% API: --export([trie_create/1, trie_create/0, topic_key/3, match_topics/2, lookup_topic_key/2]). +-export([ + trie_create/1, trie_create/0, trie_restore/2, topic_key/3, match_topics/2, lookup_topic_key/2 +]). %% Debug: -export([trie_next/3, trie_insert/3, dump_to_dot/2]). @@ -85,8 +87,19 @@ trie_create(Persist) -> -spec trie_create() -> trie(). trie_create() -> trie_create(fun(_, _) -> - ok - end). + ok + end). + +%% @doc Restore trie from a dump +-spec trie_restore(persist_callback(), [{_Key, _Val}]) -> trie(). +trie_restore(Persist, Dump) -> + Trie = trie_create(Persist), + lists:foreach( + fun({{StateFrom, Token}, StateTo}) -> + trie_insert(Trie, StateFrom, Token, StateTo) + end, + Dump + ). %% @doc Lookup the topic key. Create a new one, if not found. -spec topic_key(trie(), threshold_fun(), [binary()]) -> msg_storage_key(). @@ -173,11 +186,20 @@ trie_next(#trie{trie = Trie}, State, Token) -> end. -spec trie_insert(trie(), state(), edge()) -> {Updated, state()} when - NChildren :: non_neg_integer(), + NChildren :: non_neg_integer(), Updated :: false | NChildren. -trie_insert(#trie{trie = Trie, stats = Stats, persist = Persist}, State, Token) -> +trie_insert(Trie, State, Token) -> + trie_insert(Trie, State, Token, get_id_for_key(State, Token)). + +%%================================================================================ +%% Internal functions +%%================================================================================ + +-spec trie_insert(trie(), state(), edge(), state()) -> {Updated, state()} when + NChildren :: non_neg_integer(), + Updated :: false | NChildren. +trie_insert(#trie{trie = Trie, stats = Stats, persist = Persist}, State, Token, NewState) -> Key = {State, Token}, - NewState = get_id_for_key(State, Token), Rec = #trans{ key = Key, next = NewState @@ -198,10 +220,6 @@ trie_insert(#trie{trie = Trie, stats = Stats, persist = Persist}, State, Token) {false, NextState} end. -%%================================================================================ -%% Internal functions -%%================================================================================ - -spec get_id_for_key(state(), edge()) -> static_key(). get_id_for_key(_State, _Token) -> %% Requirements for the return value: diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index aeb2ce646..5b4ad8666 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -132,7 +132,8 @@ get_streams(DB, TopicFilter, StartTime) -> Shards ). --spec make_iterator(stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). +-spec make_iterator(stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> + emqx_ds:make_iterator_result(iterator()). make_iterator(Stream, TopicFilter, StartTime) -> #stream{shard = Shard, enc = StorageStream} = Stream, Node = node_of_shard(Shard), @@ -184,7 +185,8 @@ do_drop_shard_v1(Shard) -> do_get_streams_v1(Shard, TopicFilter, StartTime) -> emqx_ds_storage_layer:get_streams(Shard, TopicFilter, StartTime). --spec do_make_iterator_v1(shard_id(), _Stream, emqx_ds:topic_filter(), emqx_ds:time()) -> {ok, iterator()} | {error, _}. +-spec do_make_iterator_v1(shard_id(), _Stream, emqx_ds:topic_filter(), emqx_ds:time()) -> + {ok, iterator()} | {error, _}. do_make_iterator_v1(Shard, Stream, TopicFilter, StartTime) -> emqx_ds_storage_layer:make_iterator(Shard, Stream, TopicFilter, StartTime). diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index d4d7b3631..df9115a78 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -45,10 +45,14 @@ drop_shard(Node, Shard) -> get_streams(Node, Shard, TopicFilter, Time) -> erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [Shard, TopicFilter, Time]). --spec make_iterator(node(), emqx_ds_replication_layer:shard(), _Stream, emqx_ds:topic_filter(), emqx_ds:time()) -> +-spec make_iterator( + node(), emqx_ds_replication_layer:shard(), _Stream, emqx_ds:topic_filter(), emqx_ds:time() +) -> {ok, emqx_ds_replication_layer:iterator()} | {error, _}. make_iterator(Node, Shard, Stream, TopicFilter, StartTime) -> - erpc:call(Node, emqx_ds_replication_layer, do_make_iterator_v1, [Shard, Stream, TopicFilter, StartTime]). + erpc:call(Node, emqx_ds_replication_layer, do_make_iterator_v1, [ + Shard, Stream, TopicFilter, StartTime + ]). -spec next( node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:iterator(), pos_integer() From 7428e7037b51da049d780c6c38959e01bcbc712a Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Wed, 11 Oct 2023 20:53:34 +0200 Subject: [PATCH 084/155] feat(ds): Bitfield + Learned Topic Structure --- apps/emqx_durable_storage/src/emqx_ds.erl | 18 +- .../src/emqx_ds_bitmask_keymapper.erl | 60 ++- apps/emqx_durable_storage/src/emqx_ds_lts.erl | 58 +-- .../src/emqx_ds_replication_layer.erl | 2 +- .../src/emqx_ds_storage_bitfield_lts.erl | 346 ++++++++++++++++++ .../src/emqx_ds_storage_layer.erl | 23 +- .../src/emqx_ds_storage_layer_sup.erl | 4 +- .../src/emqx_ds_storage_reference.erl | 6 +- .../test/emqx_ds_SUITE.erl | 20 +- ...emqx_ds_message_storage_bitmask_SUITE.erl_ | 188 ---------- .../emqx_ds_storage_bitfield_lts_SUITE.erl | 343 +++++++++++++++++ .../test/emqx_ds_storage_layer_SUITE.erl_ | 292 --------------- 12 files changed, 821 insertions(+), 539 deletions(-) create mode 100644 apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl delete mode 100644 apps/emqx_durable_storage/test/emqx_ds_message_storage_bitmask_SUITE.erl_ create mode 100644 apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl delete mode 100644 apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl_ diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index dd6af9a03..b1a003e93 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -34,6 +34,8 @@ -export([]). -export_type([ + create_db_opts/0, + builtin_db_opts/0, db/0, time/0, topic_filter/0, @@ -58,7 +60,7 @@ %% Parsed topic filter. -type topic_filter() :: list(binary() | '+' | '#' | ''). --type stream_rank() :: {integer(), integer()}. +-type stream_rank() :: {term(), integer()}. -opaque stream() :: emqx_ds_replication_layer:stream(). @@ -83,9 +85,14 @@ -type message_store_opts() :: #{}. +-type builtin_db_opts() :: + #{ + backend := builtin, + storage := emqx_ds_storage_layer:prototype() + }. + -type create_db_opts() :: - %% TODO: keyspace - #{}. + builtin_db_opts(). -type message_id() :: emqx_ds_replication_layer:message_id(). @@ -96,7 +103,7 @@ %% @doc Different DBs are completely independent from each other. They %% could represent something like different tenants. -spec open_db(db(), create_db_opts()) -> ok. -open_db(DB, Opts) -> +open_db(DB, Opts = #{backend := builtin}) -> emqx_ds_replication_layer:open_db(DB, Opts). %% @doc TODO: currently if one or a few shards are down, they won't be @@ -109,8 +116,7 @@ drop_db(DB) -> store_batch(DB, Msgs, Opts) -> emqx_ds_replication_layer:store_batch(DB, Msgs, Opts). -%% TODO: Do we really need to return message IDs? It's extra work... --spec store_batch(db(), [emqx_types:message()]) -> {ok, [message_id()]} | {error, _}. +-spec store_batch(db(), [emqx_types:message()]) -> store_batch_result(). store_batch(DB, Msgs) -> store_batch(DB, Msgs, #{}). diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index 2f28de293..4b6fcbcdf 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -80,20 +80,24 @@ %%================================================================================ %% API: --export([make_keymapper/1, vector_to_key/2, key_to_vector/2, next_range/3]). - -%% behavior callbacks: --export([]). - -%% internal exports: --export([]). +-export([ + make_keymapper/1, + vector_to_key/2, + bin_vector_to_key/2, + key_to_vector/2, + bin_key_to_vector/2, + next_range/3, + key_to_bitstring/2, + bitstring_to_key/2 +]). -export_type([vector/0, key/0, dimension/0, offset/0, bitsize/0, bitsource/0, keymapper/0]). -compile( {inline, [ ones/1, - extract/2 + extract/2, + extract_inv/2 ]} ). @@ -118,7 +122,7 @@ -type bitsize() :: pos_integer(). %% The resulting 1D key: --type key() :: binary(). +-type key() :: non_neg_integer(). -type bitsource() :: %% Consume `_Size` bits from timestamp starting at `_Offset`th @@ -148,7 +152,8 @@ %% API functions %%================================================================================ -%% @doc +%% @doc Create a keymapper object that stores the "schema" of the +%% transformation from a list of bitsources. %% %% Note: Dimension is 1-based. -spec make_keymapper([bitsource()]) -> keymapper(). @@ -183,6 +188,19 @@ vector_to_key(#keymapper{scanner = []}, []) -> vector_to_key(#keymapper{scanner = [Actions | Scanner]}, [Coord | Vector]) -> do_vector_to_key(Actions, Scanner, Coord, Vector, 0). +%% @doc Same as `vector_to_key', but it works with binaries, and outputs a binary. +-spec bin_vector_to_key(keymapper(), [binary()]) -> binary(). +bin_vector_to_key(Keymapper = #keymapper{dim_sizeof = DimSizeof, size = Size}, Binaries) -> + Vec = lists:map( + fun({Bin, SizeOf}) -> + <> = Bin, + Int + end, + lists:zip(Binaries, DimSizeof) + ), + Key = vector_to_key(Keymapper, Vec), + <>. + %% @doc Map key to a vector. %% %% Note: `vector_to_key(key_to_vector(K)) = K' but @@ -202,6 +220,18 @@ key_to_vector(#keymapper{scanner = Scanner}, Key) -> Scanner ). +%% @doc Same as `key_to_vector', but it works with binaries. +-spec bin_key_to_vector(keymapper(), binary()) -> [binary()]. +bin_key_to_vector(Keymapper = #keymapper{dim_sizeof = DimSizeof, size = Size}, BinKey) -> + <> = BinKey, + Vector = key_to_vector(Keymapper, Key), + lists:map( + fun({Elem, SizeOf}) -> + <> + end, + lists:zip(Vector, DimSizeof) + ). + %% @doc Given a keymapper, a filter, and a key, return a triple containing: %% %% 1. `NextKey', a key that is greater than the given one, and is @@ -232,6 +262,15 @@ next_range(Keymapper, Filter0, PrevKey) -> {NewKey, Bitmask, Bitfilter} end. +-spec bitstring_to_key(keymapper(), bitstring()) -> key(). +bitstring_to_key(#keymapper{size = Size}, Bin) -> + <> = Bin, + Key. + +-spec key_to_bitstring(keymapper(), key()) -> bitstring(). +key_to_bitstring(#keymapper{size = Size}, Key) -> + <>. + %%================================================================================ %% Internal functions %%================================================================================ @@ -311,7 +350,6 @@ fold_bitsources(Fun, InitAcc, Bitsources) -> Bitsources ). -%% Specialized version of fold: do_vector_to_key([], [], _Coord, [], Acc) -> Acc; do_vector_to_key([], [NewActions | Scanner], _Coord, [NewCoord | Vector], Acc) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_lts.erl index e9d3124f9..a6e67c069 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_lts.erl @@ -24,7 +24,7 @@ %% Debug: -export([trie_next/3, trie_insert/3, dump_to_dot/2]). --export_type([static_key/0, trie/0]). +-export_type([options/0, static_key/0, trie/0]). -include_lib("stdlib/include/ms_transform.hrl"). @@ -43,12 +43,12 @@ -type edge() :: binary() | ?EOT | ?PLUS. %% Fixed size binary --type static_key() :: binary(). +-type static_key() :: non_neg_integer(). -define(PREFIX, prefix). -type state() :: static_key() | ?PREFIX. --type varying() :: [binary()]. +-type varying() :: [binary() | ?PLUS]. -type msg_storage_key() :: {static_key(), varying()}. @@ -56,8 +56,15 @@ -type persist_callback() :: fun((_Key, _Val) -> ok). +-type options() :: + #{ + persist_callback => persist_callback(), + static_key_size => pos_integer() + }. + -record(trie, { persist :: persist_callback(), + static_key_size :: pos_integer(), trie :: ets:tid(), stats :: ets:tid() }). @@ -74,32 +81,40 @@ %%================================================================================ %% @doc Create an empty trie --spec trie_create(persist_callback()) -> trie(). -trie_create(Persist) -> - Trie = ets:new(trie, [{keypos, #trans.key}, set]), - Stats = ets:new(stats, [{keypos, 1}, set]), +-spec trie_create(options()) -> trie(). +trie_create(UserOpts) -> + Defaults = #{ + persist_callback => fun(_, _) -> ok end, + static_key_size => 8 + }, + #{ + persist_callback := Persist, + static_key_size := StaticKeySize + } = maps:merge(Defaults, UserOpts), + Trie = ets:new(trie, [{keypos, #trans.key}, set, public]), + Stats = ets:new(stats, [{keypos, 1}, set, public]), #trie{ persist = Persist, + static_key_size = StaticKeySize, trie = Trie, stats = Stats }. -spec trie_create() -> trie(). trie_create() -> - trie_create(fun(_, _) -> - ok - end). + trie_create(#{}). %% @doc Restore trie from a dump --spec trie_restore(persist_callback(), [{_Key, _Val}]) -> trie(). -trie_restore(Persist, Dump) -> - Trie = trie_create(Persist), +-spec trie_restore(options(), [{_Key, _Val}]) -> trie(). +trie_restore(Options, Dump) -> + Trie = trie_create(Options), lists:foreach( fun({{StateFrom, Token}, StateTo}) -> trie_insert(Trie, StateFrom, Token, StateTo) end, Dump - ). + ), + Trie. %% @doc Lookup the topic key. Create a new one, if not found. -spec topic_key(trie(), threshold_fun(), [binary()]) -> msg_storage_key(). @@ -113,7 +128,7 @@ lookup_topic_key(Trie, Tokens) -> %% @doc Return list of keys of topics that match a given topic filter -spec match_topics(trie(), [binary() | '+' | '#']) -> - [{static_key(), _Varying :: binary() | ?PLUS}]. + [msg_storage_key()]. match_topics(Trie, TopicFilter) -> do_match_topics(Trie, ?PREFIX, [], TopicFilter). @@ -189,7 +204,7 @@ trie_next(#trie{trie = Trie}, State, Token) -> NChildren :: non_neg_integer(), Updated :: false | NChildren. trie_insert(Trie, State, Token) -> - trie_insert(Trie, State, Token, get_id_for_key(State, Token)). + trie_insert(Trie, State, Token, get_id_for_key(Trie, State, Token)). %%================================================================================ %% Internal functions @@ -220,8 +235,8 @@ trie_insert(#trie{trie = Trie, stats = Stats, persist = Persist}, State, Token, {false, NextState} end. --spec get_id_for_key(state(), edge()) -> static_key(). -get_id_for_key(_State, _Token) -> +-spec get_id_for_key(trie(), state(), edge()) -> static_key(). +get_id_for_key(#trie{static_key_size = Size}, _State, _Token) -> %% Requirements for the return value: %% %% It should be globally unique for the `{State, Token}` pair. Other @@ -235,7 +250,8 @@ get_id_for_key(_State, _Token) -> %% If we want to impress computer science crowd, sorry, I mean to %% minimize storage requirements, we can even employ Huffman coding %% based on the frequency of messages. - crypto:strong_rand_bytes(8). + <> = crypto:strong_rand_bytes(Size), + Int. %% erlfmt-ignore -spec do_match_topics(trie(), state(), [binary() | '+'], [binary() | '+' | '#']) -> @@ -492,7 +508,7 @@ topic_key_test() -> end, lists:seq(1, 100)) after - dump_to_dot(T, atom_to_list(?FUNCTION_NAME) ++ ".dot") + dump_to_dot(T, filename:join("_build", atom_to_list(?FUNCTION_NAME) ++ ".dot")) end. %% erlfmt-ignore @@ -539,7 +555,7 @@ topic_match_test() -> {S2_1_, ['+', '+']}]), ok after - dump_to_dot(T, atom_to_list(?FUNCTION_NAME) ++ ".dot") + dump_to_dot(T, filename:join("_build", atom_to_list(?FUNCTION_NAME) ++ ".dot")) end. -define(keys_history, topic_key_history). diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index 5b4ad8666..06cead725 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -119,7 +119,7 @@ get_streams(DB, TopicFilter, StartTime) -> Streams = emqx_ds_proto_v1:get_streams(Node, Shard, TopicFilter, StartTime), lists:map( fun({RankY, Stream}) -> - RankX = erlang:phash2(Shard, 255), + RankX = Shard, Rank = {RankX, RankY}, {Rank, #stream{ shard = Shard, diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl new file mode 100644 index 000000000..e8bfdaa2e --- /dev/null +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -0,0 +1,346 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +%% @doc Reference implementation of the storage. +%% +%% Trivial, extremely slow and inefficient. It also doesn't handle +%% restart of the Erlang node properly, so obviously it's only to be +%% used for testing. +-module(emqx_ds_storage_bitfield_lts). + +-behavior(emqx_ds_storage_layer). + +%% API: +-export([]). + +%% behavior callbacks: +-export([create/4, open/5, store_batch/4, get_streams/4, make_iterator/5, next/4]). + +%% internal exports: +-export([]). + +-export_type([options/0]). + +-include_lib("emqx/include/emqx.hrl"). + +%%================================================================================ +%% Type declarations +%%================================================================================ + +-type options() :: + #{ + bits_per_wildcard_level => pos_integer(), + topic_index_bytes => pos_integer(), + epoch_bits => non_neg_integer() + }. + +%% Permanent state: +-type schema() :: + #{ + bits_per_wildcard_level := pos_integer(), + topic_index_bytes := pos_integer(), + epoch_bits := non_neg_integer(), + ts_offset_bits := non_neg_integer() + }. + +%% Runtime state: +-record(s, { + db :: rocksdb:db_handle(), + data :: rocksdb:cf_handle(), + trie :: emqx_ds_lts:trie(), + keymappers :: array:array(emqx_ds_bitmask_keymapper:keymapper()) +}). + +-record(stream, { + storage_key :: emqx_ds_lts:msg_storage_key() +}). + +-record(it, { + topic_filter :: emqx_ds:topic_filter(), + start_time :: emqx_ds:time(), + storage_key :: emqx_ds_lts:msg_storage_key(), + last_seen_key = 0 :: emqx_ds_bitmask_keymapper:key(), + key_filter :: [emqx_ds_bitmask_keymapper:scalar_range()] +}). + +-define(QUICKCHECK_KEY(KEY, BITMASK, BITFILTER), + ((KEY band BITMASK) =:= BITFILTER) +). + +%%================================================================================ +%% API funcions +%%================================================================================ + +%%================================================================================ +%% behavior callbacks +%%================================================================================ + +create(_ShardId, DBHandle, GenId, Options) -> + %% Get options: + BitsPerTopicLevel = maps:get(bits_per_wildcard_level, Options, 64), + TopicIndexBytes = maps:get(topic_index_bytes, Options, 4), + TSOffsetBits = maps:get(epoch_bits, Options, 5), + %% Create column families: + DataCFName = data_cf(GenId), + TrieCFName = trie_cf(GenId), + {ok, DataCFHandle} = rocksdb:create_column_family(DBHandle, DataCFName, []), + {ok, TrieCFHandle} = rocksdb:create_column_family(DBHandle, TrieCFName, []), + %% Create schema: + + % Fixed size_of MQTT message timestamp + SizeOfTS = 64, + Schema = #{ + bits_per_wildcard_level => BitsPerTopicLevel, + topic_index_bytes => TopicIndexBytes, + epoch_bits => SizeOfTS - TSOffsetBits, + ts_offset_bits => TSOffsetBits + }, + {Schema, [{DataCFName, DataCFHandle}, {TrieCFName, TrieCFHandle}]}. + +open(_Shard, DBHandle, GenId, CFRefs, Schema) -> + #{ + bits_per_wildcard_level := BitsPerTopicLevel, + topic_index_bytes := TopicIndexBytes, + epoch_bits := EpochBits, + ts_offset_bits := TSOffsetBits + } = Schema, + {_, DataCF} = lists:keyfind(data_cf(GenId), 1, CFRefs), + {_, TrieCF} = lists:keyfind(trie_cf(GenId), 1, CFRefs), + Trie = restore_trie(TopicIndexBytes, DBHandle, TrieCF), + %% If user's topics have more than learned 10 wildcard levels then + %% total carnage is going on; learned topic structure doesn't + %% really apply: + MaxWildcardLevels = 10, + Keymappers = array:from_list( + [ + make_keymapper(TopicIndexBytes, EpochBits, BitsPerTopicLevel, TSOffsetBits, N) + || N <- lists:seq(0, MaxWildcardLevels) + ] + ), + #s{db = DBHandle, data = DataCF, trie = Trie, keymappers = Keymappers}. + +store_batch(_ShardId, S = #s{db = DB, data = Data}, Messages, _Options) -> + lists:foreach( + fun(Msg) -> + {Key, _} = make_key(S, Msg), + Val = serialize(Msg), + rocksdb:put(DB, Data, Key, Val, []) + end, + Messages + ). + +get_streams(_Shard, #s{trie = Trie}, TopicFilter, _StartTime) -> + Indexes = emqx_ds_lts:match_topics(Trie, TopicFilter), + [ + #stream{ + storage_key = I + } + || I <- Indexes + ]. + +make_iterator(_Shard, _Data, #stream{storage_key = StorageKey}, TopicFilter, StartTime) -> + {TopicIndex, Varying} = StorageKey, + Filter = [ + {'=', TopicIndex}, + {'>=', StartTime} + | lists:map( + fun + ('+') -> + any; + (TopicLevel) when is_binary(TopicLevel) -> + {'=', hash_topic_level(TopicLevel)} + end, + Varying + ) + ], + {ok, #it{ + topic_filter = TopicFilter, + start_time = StartTime, + storage_key = StorageKey, + key_filter = Filter + }}. + +next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> + #it{ + key_filter = KeyFilter + } = It0, + % TODO: ugh, so ugly + NVarying = length(KeyFilter) - 2, + Keymapper = array:get(NVarying, Keymappers), + {ok, ITHandle} = rocksdb:iterator(DB, CF, []), + try + next_loop(ITHandle, Keymapper, It0, [], BatchSize) + after + rocksdb:iterator_close(ITHandle) + end. + +%%================================================================================ +%% Internal functions +%%================================================================================ + +next_loop(_, _, It, Acc, 0) -> + {ok, It, lists:reverse(Acc)}; +next_loop(ITHandle, KeyMapper, It0, Acc0, N0) -> + {Key1, Bitmask, Bitfilter} = next_range(KeyMapper, It0), + case iterator_move(KeyMapper, ITHandle, {seek, Key1}) of + {ok, Key, Val} when ?QUICKCHECK_KEY(Key, Bitmask, Bitfilter) -> + Msg = deserialize(Val), + It1 = It0#it{last_seen_key = Key}, + case check_message(It1, Msg) of + true -> + N1 = N0 - 1, + Acc1 = [Msg | Acc0]; + false -> + N1 = N0, + Acc1 = Acc0 + end, + {N, It, Acc} = traverse_interval( + ITHandle, KeyMapper, Bitmask, Bitfilter, It1, Acc1, N1 + ), + next_loop(ITHandle, KeyMapper, It, Acc, N); + {ok, Key, _Val} -> + It = It0#it{last_seen_key = Key}, + next_loop(ITHandle, KeyMapper, It, Acc0, N0); + {error, invalid_iterator} -> + {ok, It0, lists:reverse(Acc0)} + end. + +traverse_interval(_, _, _, _, It, Acc, 0) -> + {0, It, Acc}; +traverse_interval(ITHandle, KeyMapper, Bitmask, Bitfilter, It0, Acc, N) -> + %% TODO: supply the upper limit to rocksdb to the last extra seek: + case iterator_move(KeyMapper, ITHandle, next) of + {ok, Key, Val} when ?QUICKCHECK_KEY(Key, Bitmask, Bitfilter) -> + Msg = deserialize(Val), + It = It0#it{last_seen_key = Key}, + case check_message(It, Msg) of + true -> + traverse_interval( + ITHandle, KeyMapper, Bitmask, Bitfilter, It, [Msg | Acc], N - 1 + ); + false -> + traverse_interval(ITHandle, KeyMapper, Bitmask, Bitfilter, It, Acc, N) + end; + {ok, Key, _Val} -> + It = It0#it{last_seen_key = Key}, + {N, It, Acc}; + {error, invalid_iterator} -> + {0, It0, Acc} + end. + +next_range(KeyMapper, #it{key_filter = KeyFilter, last_seen_key = PrevKey}) -> + emqx_ds_bitmask_keymapper:next_range(KeyMapper, KeyFilter, PrevKey). + +check_message(_Iterator, _Msg) -> + %% TODO. + true. + +iterator_move(KeyMapper, ITHandle, Action0) -> + Action = + case Action0 of + next -> + next; + {seek, Int} -> + {seek, emqx_ds_bitmask_keymapper:key_to_bitstring(KeyMapper, Int)} + end, + case rocksdb:iterator_move(ITHandle, Action) of + {ok, KeyBin, Val} -> + {ok, emqx_ds_bitmask_keymapper:bitstring_to_key(KeyMapper, KeyBin), Val}; + {ok, KeyBin} -> + {ok, emqx_ds_bitmask_keymapper:bitstring_to_key(KeyMapper, KeyBin)}; + Other -> + Other + end. + +-spec make_key(#s{}, #message{}) -> {binary(), [binary()]}. +make_key(#s{keymappers = KeyMappers, trie = Trie}, #message{timestamp = Timestamp, topic = TopicBin}) -> + Tokens = emqx_topic:tokens(TopicBin), + {TopicIndex, Varying} = emqx_ds_lts:topic_key(Trie, fun threshold_fun/1, Tokens), + VaryingHashes = [hash_topic_level(I) || I <- Varying], + KeyMapper = array:get(length(Varying), KeyMappers), + KeyBin = make_key(KeyMapper, TopicIndex, Timestamp, VaryingHashes), + {KeyBin, Varying}. + +-spec make_key(emqx_ds_bitmask_keymapper:keymapper(), emqx_ds_lts:static_key(), emqx_ds:time(), [ + non_neg_integer() +]) -> + binary(). +make_key(KeyMapper, TopicIndex, Timestamp, Varying) -> + emqx_ds_bitmask_keymapper:key_to_bitstring( + KeyMapper, + emqx_ds_bitmask_keymapper:vector_to_key(KeyMapper, [TopicIndex, Timestamp | Varying]) + ). + +%% TODO: don't hardcode the thresholds +threshold_fun(0) -> + 100; +threshold_fun(_) -> + 20. + +hash_topic_level(TopicLevel) -> + <> = erlang:md5(TopicLevel), + Int. + +serialize(Msg) -> + term_to_binary(Msg). + +deserialize(Blob) -> + binary_to_term(Blob). + +-define(BYTE_SIZE, 8). + +%% erlfmt-ignore +make_keymapper(TopicIndexBytes, EpochBits, BitsPerTopicLevel, TSOffsetBits, N) -> + Bitsources = + %% Dimension Offset Bitsize + [{1, 0, TopicIndexBytes * ?BYTE_SIZE}, %% Topic index + {2, TSOffsetBits, EpochBits }] ++ %% Timestamp epoch + [{2 + I, 0, BitsPerTopicLevel } %% Varying topic levels + || I <- lists:seq(1, N)] ++ + [{2, 0, TSOffsetBits }], %% Timestamp offset + emqx_ds_bitmask_keymapper:make_keymapper(Bitsources). + +-spec restore_trie(pos_integer(), rocksdb:db_handle(), rocksdb:cf_handle()) -> emqx_ds_lts:trie(). +restore_trie(TopicIndexBytes, DB, CF) -> + PersistCallback = fun(Key, Val) -> + rocksdb:put(DB, CF, term_to_binary(Key), term_to_binary(Val), []) + end, + {ok, IT} = rocksdb:iterator(DB, CF, []), + try + Dump = read_persisted_trie(IT, rocksdb:iterator_move(IT, first)), + TrieOpts = #{persist_callback => PersistCallback, static_key_size => TopicIndexBytes}, + emqx_ds_lts:trie_restore(TrieOpts, Dump) + after + rocksdb:iterator_close(IT) + end. + +read_persisted_trie(IT, {ok, KeyB, ValB}) -> + [ + {binary_to_term(KeyB), binary_to_term(ValB)} + | read_persisted_trie(IT, rocksdb:iterator_move(IT, next)) + ]; +read_persisted_trie(IT, {error, invalid_iterator}) -> + []. + +%% @doc Generate a column family ID for the MQTT messages +-spec data_cf(emqx_ds_storage_layer:gen_id()) -> [char()]. +data_cf(GenId) -> + "emqx_ds_storage_bitfield_lts_data" ++ integer_to_list(GenId). + +%% @doc Generate a column family ID for the trie +-spec trie_cf(emqx_ds_storage_layer:gen_id()) -> [char()]. +trie_cf(GenId) -> + "emqx_ds_storage_bitfield_lts_trie" ++ integer_to_list(GenId). diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index 744ac869f..bce976559 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -32,6 +32,10 @@ %% Type declarations %%================================================================================ +-type prototype() :: + {emqx_ds_storage_reference, emqx_ds_storage_reference:options()} + | {emqx_ds_storage_bitfield_lts, emqx_ds_storage_bitfield_lts:options()}. + -type shard_id() :: emqx_ds_replication_layer:shard_id(). -type cf_refs() :: [{string(), rocksdb:cf_handle()}]. @@ -107,7 +111,7 @@ _Data. -callback store_batch(shard_id(), _Data, [emqx_types:message()], emqx_ds:message_store_opts()) -> - ok. + emqx_ds:store_batch_result(). -callback get_streams(shard_id(), _Data, emqx_ds:topic_filter(), emqx_ds:time()) -> [_Stream]. @@ -122,7 +126,7 @@ %% API for the replication layer %%================================================================================ --spec open_shard(shard_id(), emqx_ds:create_db_opts()) -> ok. +-spec open_shard(shard_id(), emqx_ds:builtin_db_opts()) -> ok. open_shard(Shard, Options) -> emqx_ds_storage_layer_sup:ensure_shard(Shard, Options). @@ -195,7 +199,7 @@ next(Shard, Iter = #it{generation = GenId, enc = GenIter0}, BatchSize) -> -define(REF(ShardId), {via, gproc, {n, l, {?MODULE, ShardId}}}). --spec start_link(shard_id(), emqx_ds:create_db_opts()) -> +-spec start_link(shard_id(), emqx_ds:builtin_db_opts()) -> {ok, pid()}. start_link(Shard, Options) -> gen_server:start_link(?REF(Shard), ?MODULE, {Shard, Options}, []). @@ -224,7 +228,8 @@ init({ShardId, Options}) -> {Schema, CFRefs} = case get_schema_persistent(DB) of not_found -> - create_new_shard_schema(ShardId, DB, CFRefs0, Options); + Prototype = maps:get(storage, Options), + create_new_shard_schema(ShardId, DB, CFRefs0, Prototype); Scm -> {Scm, CFRefs0} end, @@ -300,14 +305,14 @@ open_generation(ShardId, DB, CFRefs, GenId, GenSchema) -> RuntimeData = Mod:open(ShardId, DB, GenId, CFRefs, Schema), GenSchema#{data => RuntimeData}. --spec create_new_shard_schema(shard_id(), rocksdb:db_handle(), cf_refs(), _Options) -> +-spec create_new_shard_schema(shard_id(), rocksdb:db_handle(), cf_refs(), prototype()) -> {shard_schema(), cf_refs()}. -create_new_shard_schema(ShardId, DB, CFRefs, Options) -> - ?tp(notice, ds_create_new_shard_schema, #{shard => ShardId, options => Options}), +create_new_shard_schema(ShardId, DB, CFRefs, Prototype) -> + ?tp(notice, ds_create_new_shard_schema, #{shard => ShardId, prototype => Prototype}), %% TODO: read prototype from options/config Schema0 = #{ current_generation => 0, - prototype => {emqx_ds_storage_reference, #{}} + prototype => Prototype }, {_NewGenId, Schema, NewCFRefs} = new_generation(ShardId, DB, Schema0, _Since = 0), {Schema, NewCFRefs ++ CFRefs}. @@ -331,7 +336,7 @@ commit_metadata(#s{shard_id = ShardId, schema = Schema, shard = Runtime, db = DB ok = put_schema_persistent(DB, Schema), put_schema_runtime(ShardId, Runtime). --spec rocksdb_open(shard_id(), emqx_ds:create_db_opts()) -> +-spec rocksdb_open(shard_id(), emqx_ds:builtin_db_opts()) -> {ok, rocksdb:db_handle(), cf_refs()} | {error, _TODO}. rocksdb_open(Shard, Options) -> DBOptions = [ diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl index bf73e3ac8..fac7204bf 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl @@ -25,7 +25,7 @@ start_link() -> supervisor:start_link({local, ?SUP}, ?MODULE, []). --spec start_shard(emqx_ds:shard(), emqx_ds_storage_layer:options()) -> +-spec start_shard(emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> supervisor:startchild_ret(). start_shard(Shard, Options) -> supervisor:start_child(?SUP, shard_child_spec(Shard, Options)). @@ -63,7 +63,7 @@ init([]) -> %% Internal functions %%================================================================================ --spec shard_child_spec(emqx_ds:shard(), emqx_ds_storage_layer:options()) -> +-spec shard_child_spec(emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> supervisor:child_spec(). shard_child_spec(Shard, Options) -> #{ diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl index 5a91f9ecd..9c7fc3158 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl @@ -32,7 +32,7 @@ %% internal exports: -export([]). --export_type([]). +-export_type([options/0]). -include_lib("emqx/include/emqx.hrl"). @@ -40,6 +40,8 @@ %% Type declarations %%================================================================================ +-type options() :: #{}. + %% Permanent state: -record(schema, {}). @@ -134,4 +136,4 @@ do_next(TopicFilter, StartTime, IT, Action, NLeft, Key0, Acc) -> %% @doc Generate a column family ID for the MQTT messages -spec data_cf(emqx_ds_storage_layer:gen_id()) -> [char()]. data_cf(GenId) -> - ?MODULE_STRING ++ integer_to_list(GenId). + "emqx_ds_storage_reference" ++ integer_to_list(GenId). diff --git a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl index 2dc77c563..9637431d3 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl @@ -23,19 +23,25 @@ -include_lib("stdlib/include/assert.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl"). +opts() -> + #{ + backend => builtin, + storage => {emqx_ds_storage_reference, #{}} + }. + %% A simple smoke test that verifies that opening/closing the DB %% doesn't crash, and not much else t_00_smoke_open_drop(_Config) -> DB = 'DB', - ?assertMatch(ok, emqx_ds:open_db(DB, #{})), - ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + ?assertMatch(ok, emqx_ds:open_db(DB, opts())), + ?assertMatch(ok, emqx_ds:open_db(DB, opts())), ?assertMatch(ok, emqx_ds:drop_db(DB)). %% A simple smoke test that verifies that storing the messages doesn't %% crash t_01_smoke_store(_Config) -> DB = default, - ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + ?assertMatch(ok, emqx_ds:open_db(DB, opts())), Msg = message(<<"foo/bar">>, <<"foo">>, 0), ?assertMatch(ok, emqx_ds:store_batch(DB, [Msg])). @@ -43,7 +49,7 @@ t_01_smoke_store(_Config) -> %% doesn't crash and that iterators can be opened. t_02_smoke_get_streams_start_iter(_Config) -> DB = ?FUNCTION_NAME, - ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + ?assertMatch(ok, emqx_ds:open_db(DB, opts())), StartTime = 0, TopicFilter = ['#'], [{Rank, Stream}] = emqx_ds:get_streams(DB, TopicFilter, StartTime), @@ -54,7 +60,7 @@ t_02_smoke_get_streams_start_iter(_Config) -> %% over messages. t_03_smoke_iterate(_Config) -> DB = ?FUNCTION_NAME, - ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + ?assertMatch(ok, emqx_ds:open_db(DB, opts())), StartTime = 0, TopicFilter = ['#'], Msgs = [ @@ -75,7 +81,7 @@ t_03_smoke_iterate(_Config) -> %% they are left off. t_04_restart(_Config) -> DB = ?FUNCTION_NAME, - ?assertMatch(ok, emqx_ds:open_db(DB, #{})), + ?assertMatch(ok, emqx_ds:open_db(DB, opts())), TopicFilter = ['#'], StartTime = 0, Msgs = [ @@ -90,7 +96,7 @@ t_04_restart(_Config) -> ?tp(warning, emqx_ds_SUITE_restart_app, #{}), ok = application:stop(emqx_durable_storage), {ok, _} = application:ensure_all_started(emqx_durable_storage), - ok = emqx_ds:open_db(DB, #{}), + ok = emqx_ds:open_db(DB, opts()), %% The old iterator should be still operational: {ok, Iter, Batch} = iterate(Iter0, 1), ?assertEqual(Msgs, Batch, {Iter0, Iter}). diff --git a/apps/emqx_durable_storage/test/emqx_ds_message_storage_bitmask_SUITE.erl_ b/apps/emqx_durable_storage/test/emqx_ds_message_storage_bitmask_SUITE.erl_ deleted file mode 100644 index 599bd6c7b..000000000 --- a/apps/emqx_durable_storage/test/emqx_ds_message_storage_bitmask_SUITE.erl_ +++ /dev/null @@ -1,188 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%%-------------------------------------------------------------------- --module(emqx_ds_message_storage_bitmask_SUITE). - --compile(export_all). --compile(nowarn_export_all). - --include_lib("stdlib/include/assert.hrl"). - --import(emqx_ds_message_storage_bitmask, [ - make_keymapper/1, - keymapper_info/1, - compute_topic_bitmask/2, - compute_time_bitmask/1, - compute_topic_seek/4 -]). - -all() -> emqx_common_test_helpers:all(?MODULE). - -t_make_keymapper(_) -> - ?assertMatch( - #{ - source := [ - {timestamp, 9, 23}, - {hash, level, 2}, - {hash, level, 4}, - {hash, levels, 8}, - {timestamp, 0, 9} - ], - bitsize := 46, - epoch := 512 - }, - keymapper_info( - make_keymapper(#{ - timestamp_bits => 32, - topic_bits_per_level => [2, 4, 8], - epoch => 1000 - }) - ) - ). - -t_make_keymapper_single_hash_level(_) -> - ?assertMatch( - #{ - source := [ - {timestamp, 0, 32}, - {hash, levels, 16} - ], - bitsize := 48, - epoch := 1 - }, - keymapper_info( - make_keymapper(#{ - timestamp_bits => 32, - topic_bits_per_level => [16], - epoch => 1 - }) - ) - ). - -t_make_keymapper_no_timestamp(_) -> - ?assertMatch( - #{ - source := [ - {hash, level, 4}, - {hash, level, 8}, - {hash, levels, 16} - ], - bitsize := 28, - epoch := 1 - }, - keymapper_info( - make_keymapper(#{ - timestamp_bits => 0, - topic_bits_per_level => [4, 8, 16], - epoch => 42 - }) - ) - ). - -t_compute_topic_bitmask(_) -> - KM = make_keymapper(#{topic_bits_per_level => [3, 4, 5, 2], timestamp_bits => 0, epoch => 1}), - ?assertEqual( - 2#111_1111_11111_11, - compute_topic_bitmask([<<"foo">>, <<"bar">>], KM) - ), - ?assertEqual( - 2#111_0000_11111_11, - compute_topic_bitmask([<<"foo">>, '+'], KM) - ), - ?assertEqual( - 2#111_0000_00000_11, - compute_topic_bitmask([<<"foo">>, '+', '+'], KM) - ), - ?assertEqual( - 2#111_0000_11111_00, - compute_topic_bitmask([<<"foo">>, '+', <<"bar">>, '+'], KM) - ). - -t_compute_topic_bitmask_wildcard(_) -> - KM = make_keymapper(#{topic_bits_per_level => [3, 4, 5, 2], timestamp_bits => 0, epoch => 1}), - ?assertEqual( - 2#000_0000_00000_00, - compute_topic_bitmask(['#'], KM) - ), - ?assertEqual( - 2#111_0000_00000_00, - compute_topic_bitmask([<<"foo">>, '#'], KM) - ), - ?assertEqual( - 2#111_1111_11111_00, - compute_topic_bitmask([<<"foo">>, <<"bar">>, <<"baz">>, '#'], KM) - ). - -t_compute_topic_bitmask_wildcard_long_tail(_) -> - KM = make_keymapper(#{topic_bits_per_level => [3, 4, 5, 2], timestamp_bits => 0, epoch => 1}), - ?assertEqual( - 2#111_1111_11111_11, - compute_topic_bitmask([<<"foo">>, <<"bar">>, <<"baz">>, <<>>, <<"xyzzy">>], KM) - ), - ?assertEqual( - 2#111_1111_11111_00, - compute_topic_bitmask([<<"foo">>, <<"bar">>, <<"baz">>, <<>>, '#'], KM) - ). - -t_compute_time_bitmask(_) -> - KM = make_keymapper(#{topic_bits_per_level => [1, 2, 3], timestamp_bits => 10, epoch => 200}), - ?assertEqual(2#111_000000_1111111, compute_time_bitmask(KM)). - -t_compute_time_bitmask_epoch_only(_) -> - KM = make_keymapper(#{topic_bits_per_level => [1, 2, 3], timestamp_bits => 10, epoch => 1}), - ?assertEqual(2#1111111111_000000, compute_time_bitmask(KM)). - -%% Filter = |123|***|678|***| -%% Mask = |123|***|678|***| -%% Key1 = |123|011|108|121| → Seek = 0 |123|011|678|000| -%% Key2 = |123|011|679|919| → Seek = 0 |123|012|678|000| -%% Key3 = |123|999|679|001| → Seek = 1 |123|000|678|000| → eos -%% Key4 = |125|011|179|017| → Seek = 1 |123|000|678|000| → eos - -t_compute_next_topic_seek(_) -> - KM = make_keymapper(#{topic_bits_per_level => [8, 8, 16, 12], timestamp_bits => 0, epoch => 1}), - ?assertMatch( - none, - compute_topic_seek( - 16#FD_42_4242_043, - 16#FD_42_4242_042, - 16#FF_FF_FFFF_FFF, - KM - ) - ), - ?assertMatch( - 16#FD_11_0678_000, - compute_topic_seek( - 16#FD_11_0108_121, - 16#FD_00_0678_000, - 16#FF_00_FFFF_000, - KM - ) - ), - ?assertMatch( - 16#FD_12_0678_000, - compute_topic_seek( - 16#FD_11_0679_919, - 16#FD_00_0678_000, - 16#FF_00_FFFF_000, - KM - ) - ), - ?assertMatch( - none, - compute_topic_seek( - 16#FD_FF_0679_001, - 16#FD_00_0678_000, - 16#FF_00_FFFF_000, - KM - ) - ), - ?assertMatch( - none, - compute_topic_seek( - 16#FE_11_0179_017, - 16#FD_00_0678_000, - 16#FF_00_FFFF_000, - KM - ) - ). diff --git a/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl new file mode 100644 index 000000000..f9a7b02c4 --- /dev/null +++ b/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl @@ -0,0 +1,343 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_ds_storage_bitfield_lts_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("emqx/include/emqx.hrl"). +-include_lib("common_test/include/ct.hrl"). +-include_lib("stdlib/include/assert.hrl"). + +-define(SHARD, shard(?FUNCTION_NAME)). + +-define(DEFAULT_CONFIG, #{ + backend => builtin, + storage => {emqx_ds_storage_bitfield_lts, #{}} +}). + +-define(COMPACT_CONFIG, #{ + backend => builtin, + storage => + {emqx_ds_storage_bitfield_lts, #{ + bits_per_wildcard_level => 8 + }} +}). + +%% Smoke test for opening and reopening the database +t_open(_Config) -> + ok = emqx_ds_storage_layer_sup:stop_shard(?SHARD), + {ok, _} = emqx_ds_storage_layer_sup:start_shard(?SHARD, #{}). + +%% Smoke test of store function +t_store(_Config) -> + MessageID = emqx_guid:gen(), + PublishedAt = 1000, + Topic = <<"foo/bar">>, + Payload = <<"message">>, + Msg = #message{ + id = MessageID, + topic = Topic, + payload = Payload, + timestamp = PublishedAt + }, + ?assertMatch(ok, emqx_ds_storage_layer:store_batch(?SHARD, [Msg], #{})). + +%% Smoke test for iteration through a concrete topic +t_iterate(_Config) -> + %% Prepare data: + Topics = [<<"foo/bar">>, <<"foo/bar/baz">>, <<"a">>], + Timestamps = lists:seq(1, 10), + Batch = [ + make_message(PublishedAt, Topic, integer_to_binary(PublishedAt)) + || Topic <- Topics, PublishedAt <- Timestamps + ], + ok = emqx_ds_storage_layer:store_batch(?SHARD, Batch, []), + %% Iterate through individual topics: + [ + begin + [{_Rank, Stream}] = emqx_ds_storage_layer:get_streams(?SHARD, parse_topic(Topic), 0), + {ok, It} = emqx_ds_storage_layer:make_iterator(?SHARD, Stream, parse_topic(Topic), 0), + {ok, NextIt, Messages} = emqx_ds_storage_layer:next(?SHARD, It, 100), + ?assertEqual( + lists:map(fun integer_to_binary/1, Timestamps), + payloads(Messages) + ), + {ok, _, []} = emqx_ds_storage_layer:next(?SHARD, NextIt, 100) + end + || Topic <- Topics + ], + ok. + +-define(assertSameSet(A, B), ?assertEqual(lists:sort(A), lists:sort(B))). + +%% Smoke test that verifies that concrete topics become individual +%% streams, unless there's too many of them +t_get_streams(_Config) -> + %% Prepare data: + Topics = [<<"foo/bar">>, <<"foo/bar/baz">>, <<"a">>], + Timestamps = lists:seq(1, 10), + Batch = [ + make_message(PublishedAt, Topic, integer_to_binary(PublishedAt)) + || Topic <- Topics, PublishedAt <- Timestamps + ], + ok = emqx_ds_storage_layer:store_batch(?SHARD, Batch, []), + GetStream = fun(Topic) -> + StartTime = 0, + emqx_ds_storage_layer:get_streams(?SHARD, parse_topic(Topic), StartTime) + end, + %% Get streams for individual topics to use as a reference for later: + [FooBar = {_, _}] = GetStream(<<"foo/bar">>), + [FooBarBaz] = GetStream(<<"foo/bar/baz">>), + [A] = GetStream(<<"a">>), + %% Restart shard to make sure trie is persisted: + ok = emqx_ds_storage_layer_sup:stop_shard(?SHARD), + {ok, _} = emqx_ds_storage_layer_sup:start_shard(?SHARD, #{}), + %% Test various wildcards: + [] = GetStream(<<"bar/foo">>), + ?assertEqual([FooBar], GetStream("+/+")), + ?assertSameSet([FooBar, FooBarBaz], GetStream(<<"foo/#">>)), + ?assertSameSet([FooBar, FooBarBaz, A], GetStream(<<"#">>)), + %% Now insert a bunch of messages with different topics to create wildcards: + NewBatch = [ + begin + B = integer_to_binary(I), + make_message(100, <<"foo/bar/", B/binary>>, <<"filler", B/binary>>) + end + || I <- lists:seq(1, 200) + ], + ok = emqx_ds_storage_layer:store_batch(?SHARD, NewBatch, []), + %% Check that "foo/bar/baz" topic now appears in two streams: + %% "foo/bar/baz" and "foo/bar/+": + NewStreams = lists:sort(GetStream(<<"foo/bar/baz">>)), + ?assertMatch([_, _], NewStreams), + ?assertMatch([_], NewStreams -- [FooBarBaz]), + ok. + +%% Smoke test for iteration with wildcard topic filter +%% t_iterate_wildcard(_Config) -> +%% %% Prepare data: +%% Topics = ["foo/bar", "foo/bar/baz", "a", "a/bar"], +%% Timestamps = lists:seq(1, 10), +%% _ = [ +%% store(?SHARD, PublishedAt, Topic, term_to_binary({Topic, PublishedAt})) +%% || Topic <- Topics, PublishedAt <- Timestamps +%% ], +%% ?assertEqual( +%% lists:sort([{Topic, PublishedAt} || Topic <- Topics, PublishedAt <- Timestamps]), +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "#", 0)]) +%% ), +%% ?assertEqual( +%% [], +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "#", 10 + 1)]) +%% ), +%% ?assertEqual( +%% lists:sort([{Topic, PublishedAt} || Topic <- Topics, PublishedAt <- lists:seq(5, 10)]), +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "#", 5)]) +%% ), +%% ?assertEqual( +%% lists:sort([ +%% {Topic, PublishedAt} +%% || Topic <- ["foo/bar", "foo/bar/baz"], PublishedAt <- Timestamps +%% ]), +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/#", 0)]) +%% ), +%% ?assertEqual( +%% lists:sort([{"foo/bar", PublishedAt} || PublishedAt <- Timestamps]), +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/+", 0)]) +%% ), +%% ?assertEqual( +%% [], +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/+/bar", 0)]) +%% ), +%% ?assertEqual( +%% lists:sort([ +%% {Topic, PublishedAt} +%% || Topic <- ["foo/bar", "foo/bar/baz", "a/bar"], PublishedAt <- Timestamps +%% ]), +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "+/bar/#", 0)]) +%% ), +%% ?assertEqual( +%% lists:sort([{Topic, PublishedAt} || Topic <- ["a", "a/bar"], PublishedAt <- Timestamps]), +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "a/#", 0)]) +%% ), +%% ?assertEqual( +%% [], +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "a/+/+", 0)]) +%% ), +%% ok. + + +%% t_create_gen(_Config) -> +%% {ok, 1} = emqx_ds_storage_layer:create_generation(?SHARD, 5, ?DEFAULT_CONFIG), +%% ?assertEqual( +%% {error, nonmonotonic}, +%% emqx_ds_storage_layer:create_generation(?SHARD, 1, ?DEFAULT_CONFIG) +%% ), +%% ?assertEqual( +%% {error, nonmonotonic}, +%% emqx_ds_storage_layer:create_generation(?SHARD, 5, ?DEFAULT_CONFIG) +%% ), +%% {ok, 2} = emqx_ds_storage_layer:create_generation(?SHARD, 10, ?COMPACT_CONFIG), +%% Topics = ["foo/bar", "foo/bar/baz"], +%% Timestamps = lists:seq(1, 100), +%% [ +%% ?assertMatch({ok, [_]}, store(?SHARD, PublishedAt, Topic, <<>>)) +%% || Topic <- Topics, PublishedAt <- Timestamps +%% ]. + +%% t_iterate_multigen(_Config) -> +%% {ok, 1} = emqx_ds_storage_layer:create_generation(?SHARD, 10, ?COMPACT_CONFIG), +%% {ok, 2} = emqx_ds_storage_layer:create_generation(?SHARD, 50, ?DEFAULT_CONFIG), +%% {ok, 3} = emqx_ds_storage_layer:create_generation(?SHARD, 1000, ?DEFAULT_CONFIG), +%% Topics = ["foo/bar", "foo/bar/baz", "a", "a/bar"], +%% Timestamps = lists:seq(1, 100), +%% _ = [ +%% store(?SHARD, PublishedAt, Topic, term_to_binary({Topic, PublishedAt})) +%% || Topic <- Topics, PublishedAt <- Timestamps +%% ], +%% ?assertEqual( +%% lists:sort([ +%% {Topic, PublishedAt} +%% || Topic <- ["foo/bar", "foo/bar/baz"], PublishedAt <- Timestamps +%% ]), +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/#", 0)]) +%% ), +%% ?assertEqual( +%% lists:sort([ +%% {Topic, PublishedAt} +%% || Topic <- ["a", "a/bar"], PublishedAt <- lists:seq(60, 100) +%% ]), +%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "a/#", 60)]) +%% ). + +%% t_iterate_multigen_preserve_restore(_Config) -> +%% ReplayID = atom_to_binary(?FUNCTION_NAME), +%% {ok, 1} = emqx_ds_storage_layer:create_generation(?SHARD, 10, ?COMPACT_CONFIG), +%% {ok, 2} = emqx_ds_storage_layer:create_generation(?SHARD, 50, ?DEFAULT_CONFIG), +%% {ok, 3} = emqx_ds_storage_layer:create_generation(?SHARD, 100, ?DEFAULT_CONFIG), +%% Topics = ["foo/bar", "foo/bar/baz", "a/bar"], +%% Timestamps = lists:seq(1, 100), +%% TopicFilter = "foo/#", +%% TopicsMatching = ["foo/bar", "foo/bar/baz"], +%% _ = [ +%% store(?SHARD, TS, Topic, term_to_binary({Topic, TS})) +%% || Topic <- Topics, TS <- Timestamps +%% ], +%% It0 = iterator(?SHARD, TopicFilter, 0), +%% {It1, Res10} = iterate(It0, 10), +%% % preserve mid-generation +%% ok = emqx_ds_storage_layer:preserve_iterator(It1, ReplayID), +%% {ok, It2} = emqx_ds_storage_layer:restore_iterator(?SHARD, ReplayID), +%% {It3, Res100} = iterate(It2, 88), +%% % preserve on the generation boundary +%% ok = emqx_ds_storage_layer:preserve_iterator(It3, ReplayID), +%% {ok, It4} = emqx_ds_storage_layer:restore_iterator(?SHARD, ReplayID), +%% {It5, Res200} = iterate(It4, 1000), +%% ?assertEqual({end_of_stream, []}, iterate(It5, 1)), +%% ?assertEqual( +%% lists:sort([{Topic, TS} || Topic <- TopicsMatching, TS <- Timestamps]), +%% lists:sort([binary_to_term(Payload) || Payload <- Res10 ++ Res100 ++ Res200]) +%% ), +%% ?assertEqual( +%% ok, +%% emqx_ds_storage_layer:discard_iterator(?SHARD, ReplayID) +%% ), +%% ?assertEqual( +%% {error, not_found}, +%% emqx_ds_storage_layer:restore_iterator(?SHARD, ReplayID) +%% ). + +make_message(PublishedAt, Topic, Payload) when is_list(Topic) -> + make_message(PublishedAt, list_to_binary(Topic), Payload); +make_message(PublishedAt, Topic, Payload) when is_binary(Topic) -> + ID = emqx_guid:gen(), + #message{ + id = ID, + topic = Topic, + timestamp = PublishedAt, + payload = Payload + }. + +store(Shard, PublishedAt, TopicL, Payload) when is_list(TopicL) -> + store(Shard, PublishedAt, list_to_binary(TopicL), Payload); +store(Shard, PublishedAt, Topic, Payload) -> + ID = emqx_guid:gen(), + Msg = #message{ + id = ID, + topic = Topic, + timestamp = PublishedAt, + payload = Payload + }, + emqx_ds_storage_layer:message_store(Shard, [Msg], #{}). + +%% iterate(Shard, TopicFilter, StartTime) -> +%% Streams = emqx_ds_storage_layer:get_streams(Shard, TopicFilter, StartTime), +%% lists:flatmap( +%% fun(Stream) -> +%% iterate(Shard, iterator(Shard, Stream, TopicFilter, StartTime)) +%% end, +%% Streams). + +%% iterate(Shard, It) -> +%% case emqx_ds_storage_layer:next(Shard, It) of +%% {ok, ItNext, [#message{payload = Payload}]} -> +%% [Payload | iterate(Shard, ItNext)]; +%% end_of_stream -> +%% [] +%% end. + +%% iterate(_Shard, end_of_stream, _N) -> +%% {end_of_stream, []}; +%% iterate(Shard, It, N) -> +%% case emqx_ds_storage_layer:next(Shard, It, N) of +%% {ok, ItFinal, Messages} -> +%% {ItFinal, [Payload || #message{payload = Payload} <- Messages]}; +%% end_of_stream -> +%% {end_of_stream, []} +%% end. + +%% iterator(Shard, Stream, TopicFilter, StartTime) -> +%% {ok, It} = emqx_ds_storage_layer:make_iterator(Shard, Stream, parse_topic(TopicFilter), StartTime), +%% It. + +payloads(Messages) -> + lists:map( + fun(#message{payload = P}) -> + P + end, + Messages + ). + +parse_topic(Topic = [L | _]) when is_binary(L); is_atom(L) -> + Topic; +parse_topic(Topic) -> + emqx_topic:words(iolist_to_binary(Topic)). + +%% CT callbacks + +all() -> emqx_common_test_helpers:all(?MODULE). + +init_per_suite(Config) -> + {ok, _} = application:ensure_all_started(emqx_durable_storage), + Config. + +end_per_suite(_Config) -> + ok = application:stop(emqx_durable_storage). + +init_per_testcase(TC, Config) -> + {ok, _} = emqx_ds_storage_layer_sup:start_shard(shard(TC), ?DEFAULT_CONFIG), + Config. + +end_per_testcase(TC, _Config) -> + ok = emqx_ds_storage_layer_sup:stop_shard(shard(TC)). + +shard(TC) -> + {?MODULE, TC}. + +keyspace(TC) -> + TC. + +set_keyspace_config(Keyspace, Config) -> + ok = application:set_env(emqx_ds, keyspace_config, #{Keyspace => Config}). diff --git a/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl_ b/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl_ deleted file mode 100644 index 25198cfd7..000000000 --- a/apps/emqx_durable_storage/test/emqx_ds_storage_layer_SUITE.erl_ +++ /dev/null @@ -1,292 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%%-------------------------------------------------------------------- --module(emqx_ds_storage_layer_SUITE). - --compile(export_all). --compile(nowarn_export_all). - --include_lib("emqx/include/emqx.hrl"). --include_lib("common_test/include/ct.hrl"). --include_lib("stdlib/include/assert.hrl"). - --define(SHARD, shard(?FUNCTION_NAME)). - --define(DEFAULT_CONFIG, - {emqx_ds_message_storage_bitmask, #{ - timestamp_bits => 64, - topic_bits_per_level => [8, 8, 32, 16], - epoch => 5, - iteration => #{ - iterator_refresh => {every, 5} - } - }} -). - --define(COMPACT_CONFIG, - {emqx_ds_message_storage_bitmask, #{ - timestamp_bits => 16, - topic_bits_per_level => [16, 16], - epoch => 10 - }} -). - -%% Smoke test for opening and reopening the database -t_open(_Config) -> - ok = emqx_ds_storage_layer_sup:stop_shard(?SHARD), - {ok, _} = emqx_ds_storage_layer_sup:start_shard(?SHARD, #{}). - -%% Smoke test of store function -t_store(_Config) -> - MessageID = emqx_guid:gen(), - PublishedAt = 1000, - Topic = <<"foo/bar">>, - Payload = <<"message">>, - Msg = #message{ - id = MessageID, - topic = Topic, - payload = Payload, - timestamp = PublishedAt - }, - ?assertMatch({ok, [_]}, emqx_ds_storage_layer:message_store(?SHARD, [Msg], #{})). - -%% Smoke test for iteration through a concrete topic -t_iterate(_Config) -> - %% Prepare data: - Topics = [<<"foo/bar">>, <<"foo/bar/baz">>, <<"a">>], - Timestamps = lists:seq(1, 10), - [ - store( - ?SHARD, - PublishedAt, - Topic, - integer_to_binary(PublishedAt) - ) - || Topic <- Topics, PublishedAt <- Timestamps - ], - %% Iterate through individual topics: - [ - begin - {ok, It} = emqx_ds_storage_layer:make_iterator(?SHARD, {parse_topic(Topic), 0}), - Values = iterate(It), - ?assertEqual(lists:map(fun integer_to_binary/1, Timestamps), Values) - end - || Topic <- Topics - ], - ok. - -%% Smoke test for iteration with wildcard topic filter -t_iterate_wildcard(_Config) -> - %% Prepare data: - Topics = ["foo/bar", "foo/bar/baz", "a", "a/bar"], - Timestamps = lists:seq(1, 10), - _ = [ - store(?SHARD, PublishedAt, Topic, term_to_binary({Topic, PublishedAt})) - || Topic <- Topics, PublishedAt <- Timestamps - ], - ?assertEqual( - lists:sort([{Topic, PublishedAt} || Topic <- Topics, PublishedAt <- Timestamps]), - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "#", 0)]) - ), - ?assertEqual( - [], - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "#", 10 + 1)]) - ), - ?assertEqual( - lists:sort([{Topic, PublishedAt} || Topic <- Topics, PublishedAt <- lists:seq(5, 10)]), - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "#", 5)]) - ), - ?assertEqual( - lists:sort([ - {Topic, PublishedAt} - || Topic <- ["foo/bar", "foo/bar/baz"], PublishedAt <- Timestamps - ]), - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/#", 0)]) - ), - ?assertEqual( - lists:sort([{"foo/bar", PublishedAt} || PublishedAt <- Timestamps]), - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/+", 0)]) - ), - ?assertEqual( - [], - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/+/bar", 0)]) - ), - ?assertEqual( - lists:sort([ - {Topic, PublishedAt} - || Topic <- ["foo/bar", "foo/bar/baz", "a/bar"], PublishedAt <- Timestamps - ]), - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "+/bar/#", 0)]) - ), - ?assertEqual( - lists:sort([{Topic, PublishedAt} || Topic <- ["a", "a/bar"], PublishedAt <- Timestamps]), - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "a/#", 0)]) - ), - ?assertEqual( - [], - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "a/+/+", 0)]) - ), - ok. - -t_iterate_long_tail_wildcard(_Config) -> - Topic = "b/c/d/e/f/g", - TopicFilter = "b/c/d/e/+/+", - Timestamps = lists:seq(1, 100), - _ = [ - store(?SHARD, PublishedAt, Topic, term_to_binary({Topic, PublishedAt})) - || PublishedAt <- Timestamps - ], - ?assertEqual( - lists:sort([{"b/c/d/e/f/g", PublishedAt} || PublishedAt <- lists:seq(50, 100)]), - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, TopicFilter, 50)]) - ). - -t_create_gen(_Config) -> - {ok, 1} = emqx_ds_storage_layer:create_generation(?SHARD, 5, ?DEFAULT_CONFIG), - ?assertEqual( - {error, nonmonotonic}, - emqx_ds_storage_layer:create_generation(?SHARD, 1, ?DEFAULT_CONFIG) - ), - ?assertEqual( - {error, nonmonotonic}, - emqx_ds_storage_layer:create_generation(?SHARD, 5, ?DEFAULT_CONFIG) - ), - {ok, 2} = emqx_ds_storage_layer:create_generation(?SHARD, 10, ?COMPACT_CONFIG), - Topics = ["foo/bar", "foo/bar/baz"], - Timestamps = lists:seq(1, 100), - [ - ?assertMatch({ok, [_]}, store(?SHARD, PublishedAt, Topic, <<>>)) - || Topic <- Topics, PublishedAt <- Timestamps - ]. - -t_iterate_multigen(_Config) -> - {ok, 1} = emqx_ds_storage_layer:create_generation(?SHARD, 10, ?COMPACT_CONFIG), - {ok, 2} = emqx_ds_storage_layer:create_generation(?SHARD, 50, ?DEFAULT_CONFIG), - {ok, 3} = emqx_ds_storage_layer:create_generation(?SHARD, 1000, ?DEFAULT_CONFIG), - Topics = ["foo/bar", "foo/bar/baz", "a", "a/bar"], - Timestamps = lists:seq(1, 100), - _ = [ - store(?SHARD, PublishedAt, Topic, term_to_binary({Topic, PublishedAt})) - || Topic <- Topics, PublishedAt <- Timestamps - ], - ?assertEqual( - lists:sort([ - {Topic, PublishedAt} - || Topic <- ["foo/bar", "foo/bar/baz"], PublishedAt <- Timestamps - ]), - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/#", 0)]) - ), - ?assertEqual( - lists:sort([ - {Topic, PublishedAt} - || Topic <- ["a", "a/bar"], PublishedAt <- lists:seq(60, 100) - ]), - lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "a/#", 60)]) - ). - -t_iterate_multigen_preserve_restore(_Config) -> - ReplayID = atom_to_binary(?FUNCTION_NAME), - {ok, 1} = emqx_ds_storage_layer:create_generation(?SHARD, 10, ?COMPACT_CONFIG), - {ok, 2} = emqx_ds_storage_layer:create_generation(?SHARD, 50, ?DEFAULT_CONFIG), - {ok, 3} = emqx_ds_storage_layer:create_generation(?SHARD, 100, ?DEFAULT_CONFIG), - Topics = ["foo/bar", "foo/bar/baz", "a/bar"], - Timestamps = lists:seq(1, 100), - TopicFilter = "foo/#", - TopicsMatching = ["foo/bar", "foo/bar/baz"], - _ = [ - store(?SHARD, TS, Topic, term_to_binary({Topic, TS})) - || Topic <- Topics, TS <- Timestamps - ], - It0 = iterator(?SHARD, TopicFilter, 0), - {It1, Res10} = iterate(It0, 10), - % preserve mid-generation - ok = emqx_ds_storage_layer:preserve_iterator(It1, ReplayID), - {ok, It2} = emqx_ds_storage_layer:restore_iterator(?SHARD, ReplayID), - {It3, Res100} = iterate(It2, 88), - % preserve on the generation boundary - ok = emqx_ds_storage_layer:preserve_iterator(It3, ReplayID), - {ok, It4} = emqx_ds_storage_layer:restore_iterator(?SHARD, ReplayID), - {It5, Res200} = iterate(It4, 1000), - ?assertEqual({end_of_stream, []}, iterate(It5, 1)), - ?assertEqual( - lists:sort([{Topic, TS} || Topic <- TopicsMatching, TS <- Timestamps]), - lists:sort([binary_to_term(Payload) || Payload <- Res10 ++ Res100 ++ Res200]) - ), - ?assertEqual( - ok, - emqx_ds_storage_layer:discard_iterator(?SHARD, ReplayID) - ), - ?assertEqual( - {error, not_found}, - emqx_ds_storage_layer:restore_iterator(?SHARD, ReplayID) - ). - -store(Shard, PublishedAt, TopicL, Payload) when is_list(TopicL) -> - store(Shard, PublishedAt, list_to_binary(TopicL), Payload); -store(Shard, PublishedAt, Topic, Payload) -> - ID = emqx_guid:gen(), - Msg = #message{ - id = ID, - topic = Topic, - timestamp = PublishedAt, - payload = Payload - }, - emqx_ds_storage_layer:message_store(Shard, [Msg], #{}). - -iterate(DB, TopicFilter, StartTime) -> - iterate(iterator(DB, TopicFilter, StartTime)). - -iterate(It) -> - case emqx_ds_storage_layer:next(It) of - {ok, ItNext, [#message{payload = Payload}]} -> - [Payload | iterate(ItNext)]; - end_of_stream -> - [] - end. - -iterate(end_of_stream, _N) -> - {end_of_stream, []}; -iterate(It, N) -> - case emqx_ds_storage_layer:next(It, N) of - {ok, ItFinal, Messages} -> - {ItFinal, [Payload || #message{payload = Payload} <- Messages]}; - end_of_stream -> - {end_of_stream, []} - end. - -iterator(DB, TopicFilter, StartTime) -> - {ok, It} = emqx_ds_storage_layer:make_iterator(DB, {parse_topic(TopicFilter), StartTime}), - It. - -parse_topic(Topic = [L | _]) when is_binary(L); is_atom(L) -> - Topic; -parse_topic(Topic) -> - emqx_topic:words(iolist_to_binary(Topic)). - -%% CT callbacks - -all() -> emqx_common_test_helpers:all(?MODULE). - -init_per_suite(Config) -> - {ok, _} = application:ensure_all_started(emqx_durable_storage), - Config. - -end_per_suite(_Config) -> - ok = application:stop(emqx_durable_storage). - -init_per_testcase(TC, Config) -> - ok = set_keyspace_config(keyspace(TC), ?DEFAULT_CONFIG), - {ok, _} = emqx_ds_storage_layer_sup:start_shard(shard(TC), #{}), - Config. - -end_per_testcase(TC, _Config) -> - ok = emqx_ds_storage_layer_sup:stop_shard(shard(TC)). - -shard(TC) -> - iolist_to_binary([?MODULE_STRING, "_", atom_to_list(TC)]). - -keyspace(TC) -> - TC. - -set_keyspace_config(Keyspace, Config) -> - ok = application:set_env(emqx_ds, keyspace_config, #{Keyspace => Config}). From 56b6b176c2c12a986cb007b91017bf2edba81e4e Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Fri, 13 Oct 2023 19:50:18 +0200 Subject: [PATCH 085/155] fix(ds): LTS shall keeps the concrete topic indexes --- apps/emqx_durable_storage/src/emqx_ds_lts.erl | 62 +++++++++++-------- .../emqx_ds_storage_bitfield_lts_SUITE.erl | 1 - 2 files changed, 37 insertions(+), 26 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_lts.erl index a6e67c069..c9a73e3e0 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_lts.erl @@ -159,7 +159,7 @@ dump_to_dot(#trie{trie = Trie, stats = Stats}, Filename) -> {ok, FD} = file:open(Filename, [write]), Print = fun (?PREFIX) -> "prefix"; - (NodeId) -> binary:encode_hex(NodeId) + (NodeId) -> integer_to_binary(NodeId, 16) end, io:format(FD, "digraph {~n", []), lists:foreach( @@ -190,12 +190,12 @@ trie_next(#trie{trie = Trie}, State, ?EOT) -> [] -> undefined end; trie_next(#trie{trie = Trie}, State, Token) -> - case ets:lookup(Trie, {State, ?PLUS}) of + case ets:lookup(Trie, {State, Token}) of [#trans{next = Next}] -> - {true, Next}; + {false, Next}; [] -> - case ets:lookup(Trie, {State, Token}) of - [#trans{next = Next}] -> {false, Next}; + case ets:lookup(Trie, {State, ?PLUS}) of + [#trans{next = Next}] -> {true, Next}; [] -> undefined end end. @@ -317,11 +317,11 @@ do_topic_key(Trie, ThresholdFun, Depth, State, [Tok | Rest], Varying0) -> Threshold = ThresholdFun(Depth), Varying = case trie_next_(Trie, State, Tok) of - {NChildren, _, _DiscardState} when is_integer(NChildren), NChildren > Threshold -> + {NChildren, _, NextState} when is_integer(NChildren), NChildren >= Threshold -> %% Number of children for the trie node reached the - %% threshold, we need to insert wildcard here: - {_, NextState} = trie_insert(Trie, State, ?PLUS), - [Tok | Varying0]; + %% threshold, we need to insert wildcard here. + {_, _WildcardState} = trie_insert(Trie, State, ?PLUS), + Varying0; {_, false, NextState} -> Varying0; {_, true, NextState} -> @@ -331,6 +331,7 @@ do_topic_key(Trie, ThresholdFun, Depth, State, [Tok | Rest], Varying0) -> end, do_topic_key(Trie, ThresholdFun, Depth + 1, NextState, Rest, Varying). +%% @doc Has side effects! Inserts missing elements -spec trie_next_(trie(), state(), binary() | ?EOT) -> {New, Wildcard, state()} when New :: false | non_neg_integer(), Wildcard :: boolean(). @@ -471,29 +472,36 @@ wildcard_lookup_test() -> topic_key_test() -> T = trie_create(), try - Threshold = 3, + Threshold = 4, ThresholdFun = fun(0) -> 1000; (_) -> Threshold end, %% Test that bottom layer threshold is high: lists:foreach( fun(I) -> - {_, []} = test_key(T, ThresholdFun, [I, 99, 99, 99]) + {_, []} = test_key(T, ThresholdFun, [I, 99999, 999999, 99999]) end, lists:seq(1, 10)), %% Test adding children on the 2nd level: lists:foreach( fun(I) -> case test_key(T, ThresholdFun, [1, I, 1]) of - {_, []} when I < Threshold -> + {_, []} -> + ?assert(I < Threshold, {I, '<', Threshold}), ok; {_, [Var]} -> + ?assert(I >= Threshold, {I, '>=', Threshold}), ?assertEqual(Var, integer_to_binary(I)) end end, lists:seq(1, 100)), %% This doesn't affect 2nd level with a different prefix: - {_, []} = test_key(T, ThresholdFun, [2, 1, 1]), + ?assertMatch({_, []}, test_key(T, ThresholdFun, [2, 1, 1])), + ?assertMatch({_, []}, test_key(T, ThresholdFun, [2, 10, 1])), + %% This didn't retroactively change the indexes that were + %% created prior to reaching the threshold: + ?assertMatch({_, []}, test_key(T, ThresholdFun, [1, 1, 1])), + ?assertMatch({_, []}, test_key(T, ThresholdFun, [1, 2, 1])), %% Now create another level of +: lists:foreach( fun(I) -> @@ -531,28 +539,29 @@ topic_match_test() -> assert_match_topics(T, [1, '+'], [{S11, []}, {S12, []}]), assert_match_topics(T, [1, '+', 1], [{S111, []}]), %% Match topics with #: - assert_match_topics(T, [1, '#'], [{S1, []}, {S11, []}, {S12, []}, {S111, []}]), - assert_match_topics(T, [1, 1, '#'], [{S11, []}, {S111, []}]), + assert_match_topics(T, [1, '#'], + [{S1, []}, + {S11, []}, {S12, []}, + {S111, []}]), + assert_match_topics(T, [1, 1, '#'], + [{S11, []}, + {S111, []}]), %% Now add learned wildcards: {S21, []} = test_key(T, ThresholdFun, [2, 1]), {S22, []} = test_key(T, ThresholdFun, [2, 2]), {S2_, [<<"3">>]} = test_key(T, ThresholdFun, [2, 3]), - {S2_11, [_]} = test_key(T, ThresholdFun, [2, 1, 1, 1]), - {S2_12, [_]} = test_key(T, ThresholdFun, [2, 1, 1, 2]), - {S2_1_, [_, _]} = test_key(T, ThresholdFun, [2, 1, 1, 3]), - %% Check matching: + {S2_11, [<<"3">>]} = test_key(T, ThresholdFun, [2, 3, 1, 1]), + {S2_12, [<<"4">>]} = test_key(T, ThresholdFun, [2, 4, 1, 2]), + {S2_1_, [<<"3">>, <<"3">>]} = test_key(T, ThresholdFun, [2, 3, 1, 3]), + %% %% Check matching: assert_match_topics(T, [2, 2], [{S22, []}, {S2_, [<<"2">>]}]), assert_match_topics(T, [2, '+'], [{S22, []}, {S21, []}, {S2_, ['+']}]), - assert_match_topics(T, [2, 1, 1, 2], - [{S2_12, [<<"1">>]}, - {S2_1_, [<<"1">>, <<"2">>]}]), assert_match_topics(T, [2, '#'], [{S21, []}, {S22, []}, {S2_, ['+']}, - {S2_11, ['+']}, {S2_12, ['+']}, - {S2_1_, ['+', '+']}]), + {S2_11, ['+']}, {S2_12, ['+']}, {S2_1_, ['+', '+']}]), ok after dump_to_dot(T, filename:join("_build", atom_to_list(?FUNCTION_NAME) ++ ".dot")) @@ -578,7 +587,10 @@ assert_match_topics(Trie, Filter0, Expected) -> test_key(Trie, Threshold, Topic0) -> Topic = [integer_to_binary(I) || I <- Topic0], Ret = topic_key(Trie, Threshold, Topic), - Ret = topic_key(Trie, Threshold, Topic), %% Test idempotency + %% Test idempotency: + Ret1 = topic_key(Trie, Threshold, Topic), + ?assertEqual(Ret, Ret1, Topic), + %% Add new key to the history: case get(?keys_history) of undefined -> OldHistory = #{}; OldHistory -> ok diff --git a/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl index f9a7b02c4..22a608a7f 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl @@ -168,7 +168,6 @@ t_get_streams(_Config) -> %% ), %% ok. - %% t_create_gen(_Config) -> %% {ok, 1} = emqx_ds_storage_layer:create_generation(?SHARD, 5, ?DEFAULT_CONFIG), %% ?assertEqual( From 164ae9e94a814f788c3c1145e01f45067b77acf9 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Sat, 14 Oct 2023 00:32:36 +0200 Subject: [PATCH 086/155] feat(ds): LTS bitfield storage passes all tests --- .../src/emqx_ds_bitmask_keymapper.erl | 28 +++- .../src/emqx_ds_storage_bitfield_lts.erl | 136 +++++++++++----- .../emqx_ds_storage_bitfield_lts_SUITE.erl | 146 +++++++++++++++++- 3 files changed, 260 insertions(+), 50 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index 4b6fcbcdf..5c3ae42d8 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -88,7 +88,8 @@ bin_key_to_vector/2, next_range/3, key_to_bitstring/2, - bitstring_to_key/2 + bitstring_to_key/2, + bitsize/1 ]). -export_type([vector/0, key/0, dimension/0, offset/0, bitsize/0, bitsource/0, keymapper/0]). @@ -146,7 +147,7 @@ -opaque keymapper() :: #keymapper{}. --type scalar_range() :: any | {'=', scalar()} | {'>=', scalar()}. +-type scalar_range() :: any | {'=', scalar() | infinity} | {'>=', scalar()}. %%================================================================================ %% API functions @@ -179,6 +180,10 @@ make_keymapper(Bitsources) -> dim_sizeof = DimSizeof }. +-spec bitsize(keymapper()) -> pos_integer(). +bitsize(#keymapper{size = Size}) -> + Size. + %% @doc Map N-dimensional vector to a scalar key. %% %% Note: this function is not injective. @@ -264,8 +269,12 @@ next_range(Keymapper, Filter0, PrevKey) -> -spec bitstring_to_key(keymapper(), bitstring()) -> key(). bitstring_to_key(#keymapper{size = Size}, Bin) -> - <> = Bin, - Key. + case Bin of + <> -> + Key; + _ -> + error({invalid_key, Bin, Size}) + end. -spec key_to_bitstring(keymapper(), key()) -> bitstring(). key_to_bitstring(#keymapper{size = Size}, Key) -> @@ -329,6 +338,9 @@ desugar_filter(#keymapper{dim_sizeof = DimSizeof}, Filter) -> fun ({any, Bitsize}) -> {0, ones(Bitsize)}; + ({{'=', infinity}, Bitsize}) -> + Val = ones(Bitsize), + {Val, Val}; ({{'=', Val}, _Bitsize}) -> {Val, Val}; ({{'>=', Val}, Bitsize}) -> @@ -470,6 +482,14 @@ vector_to_key4_test() -> Schema = [{1, 0, 8}, {2, 0, 8}, {1, 8, 8}, {2, 16, 8}], ?assertEqual(16#bb112211, vec2key(Schema, [16#aa1111, 16#bb2222])). +%% Test with binaries: +vector_to_key_bin_test() -> + Schema = [{1, 0, 8 * 4}, {2, 0, 8 * 5}, {3, 0, 8 * 5}], + Keymapper = make_keymapper(lists:reverse(Schema)), + ?assertMatch( + <<"wellhelloworld">>, bin_vector_to_key(Keymapper, [<<"well">>, <<"hello">>, <<"world">>]) + ). + key_to_vector0_test() -> Schema = [], key2vec(Schema, []). diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index e8bfdaa2e..7b8fbab0d 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -35,6 +35,7 @@ -export_type([options/0]). -include_lib("emqx/include/emqx.hrl"). +-include_lib("snabbkaffe/include/trace.hrl"). %%================================================================================ %% Type declarations @@ -52,7 +53,7 @@ #{ bits_per_wildcard_level := pos_integer(), topic_index_bytes := pos_integer(), - epoch_bits := non_neg_integer(), + ts_bits := non_neg_integer(), ts_offset_bits := non_neg_integer() }. @@ -80,6 +81,8 @@ ((KEY band BITMASK) =:= BITFILTER) ). +-define(COUNTER, emqx_ds_storage_bitfield_lts_counter). + %%================================================================================ %% API funcions %%================================================================================ @@ -92,20 +95,17 @@ create(_ShardId, DBHandle, GenId, Options) -> %% Get options: BitsPerTopicLevel = maps:get(bits_per_wildcard_level, Options, 64), TopicIndexBytes = maps:get(topic_index_bytes, Options, 4), - TSOffsetBits = maps:get(epoch_bits, Options, 5), + TSOffsetBits = maps:get(epoch_bits, Options, 8), %% TODO: change to 10 to make it around ~1 sec %% Create column families: DataCFName = data_cf(GenId), TrieCFName = trie_cf(GenId), {ok, DataCFHandle} = rocksdb:create_column_family(DBHandle, DataCFName, []), {ok, TrieCFHandle} = rocksdb:create_column_family(DBHandle, TrieCFName, []), %% Create schema: - - % Fixed size_of MQTT message timestamp - SizeOfTS = 64, Schema = #{ bits_per_wildcard_level => BitsPerTopicLevel, topic_index_bytes => TopicIndexBytes, - epoch_bits => SizeOfTS - TSOffsetBits, + ts_bits => 64, ts_offset_bits => TSOffsetBits }, {Schema, [{DataCFName, DataCFHandle}, {TrieCFName, TrieCFHandle}]}. @@ -114,19 +114,19 @@ open(_Shard, DBHandle, GenId, CFRefs, Schema) -> #{ bits_per_wildcard_level := BitsPerTopicLevel, topic_index_bytes := TopicIndexBytes, - epoch_bits := EpochBits, + ts_bits := TSBits, ts_offset_bits := TSOffsetBits } = Schema, {_, DataCF} = lists:keyfind(data_cf(GenId), 1, CFRefs), {_, TrieCF} = lists:keyfind(trie_cf(GenId), 1, CFRefs), Trie = restore_trie(TopicIndexBytes, DBHandle, TrieCF), - %% If user's topics have more than learned 10 wildcard levels then - %% total carnage is going on; learned topic structure doesn't - %% really apply: + %% If user's topics have more than learned 10 wildcard levels, + %% then it's total carnage; learned topic structure won't help + %% much: MaxWildcardLevels = 10, Keymappers = array:from_list( [ - make_keymapper(TopicIndexBytes, EpochBits, BitsPerTopicLevel, TSOffsetBits, N) + make_keymapper(TopicIndexBytes, BitsPerTopicLevel, TSBits, TSOffsetBits, N) || N <- lists:seq(0, MaxWildcardLevels) ] ), @@ -180,11 +180,18 @@ next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> % TODO: ugh, so ugly NVarying = length(KeyFilter) - 2, Keymapper = array:get(NVarying, Keymappers), - {ok, ITHandle} = rocksdb:iterator(DB, CF, []), + %% Calculate lower and upper bounds for iteration: + LowerBound = lower_bound(Keymapper, KeyFilter), + UpperBound = upper_bound(Keymapper, KeyFilter), + {ok, ITHandle} = rocksdb:iterator(DB, CF, [ + {iterate_lower_bound, LowerBound}, {iterate_upper_bound, UpperBound} + ]), try + put(?COUNTER, 0), next_loop(ITHandle, Keymapper, It0, [], BatchSize) after - rocksdb:iterator_close(ITHandle) + rocksdb:iterator_close(ITHandle), + erase(?COUNTER) end. %%================================================================================ @@ -193,35 +200,42 @@ next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> next_loop(_, _, It, Acc, 0) -> {ok, It, lists:reverse(Acc)}; -next_loop(ITHandle, KeyMapper, It0, Acc0, N0) -> - {Key1, Bitmask, Bitfilter} = next_range(KeyMapper, It0), - case iterator_move(KeyMapper, ITHandle, {seek, Key1}) of - {ok, Key, Val} when ?QUICKCHECK_KEY(Key, Bitmask, Bitfilter) -> - Msg = deserialize(Val), - It1 = It0#it{last_seen_key = Key}, - case check_message(It1, Msg) of - true -> - N1 = N0 - 1, - Acc1 = [Msg | Acc0]; - false -> - N1 = N0, - Acc1 = Acc0 - end, - {N, It, Acc} = traverse_interval( - ITHandle, KeyMapper, Bitmask, Bitfilter, It1, Acc1, N1 - ), - next_loop(ITHandle, KeyMapper, It, Acc, N); - {ok, Key, _Val} -> - It = It0#it{last_seen_key = Key}, - next_loop(ITHandle, KeyMapper, It, Acc0, N0); - {error, invalid_iterator} -> +next_loop(ITHandle, KeyMapper, It0 = #it{last_seen_key = Key0, key_filter = KeyFilter}, Acc0, N0) -> + inc_counter(), + case next_range(KeyMapper, It0) of + {Key1, Bitmask, Bitfilter} when Key1 > Key0 -> + case iterator_move(KeyMapper, ITHandle, {seek, Key1}) of + {ok, Key, Val} when ?QUICKCHECK_KEY(Key, Bitmask, Bitfilter) -> + assert_progress(bitmask_match, KeyMapper, KeyFilter, Key0, Key1), + Msg = deserialize(Val), + It1 = It0#it{last_seen_key = Key}, + case check_message(It1, Msg) of + true -> + N1 = N0 - 1, + Acc1 = [Msg | Acc0]; + false -> + N1 = N0, + Acc1 = Acc0 + end, + {N, It, Acc} = traverse_interval( + ITHandle, KeyMapper, Bitmask, Bitfilter, It1, Acc1, N1 + ), + next_loop(ITHandle, KeyMapper, It, Acc, N); + {ok, Key, _Val} -> + assert_progress(bitmask_miss, KeyMapper, KeyFilter, Key0, Key1), + It = It0#it{last_seen_key = Key}, + next_loop(ITHandle, KeyMapper, It, Acc0, N0); + {error, invalid_iterator} -> + {ok, It0, lists:reverse(Acc0)} + end; + _ -> {ok, It0, lists:reverse(Acc0)} end. traverse_interval(_, _, _, _, It, Acc, 0) -> {0, It, Acc}; traverse_interval(ITHandle, KeyMapper, Bitmask, Bitfilter, It0, Acc, N) -> - %% TODO: supply the upper limit to rocksdb to the last extra seek: + inc_counter(), case iterator_move(KeyMapper, ITHandle, next) of {ok, Key, Val} when ?QUICKCHECK_KEY(Key, Bitmask, Bitfilter) -> Msg = deserialize(Val), @@ -265,6 +279,28 @@ iterator_move(KeyMapper, ITHandle, Action0) -> Other end. +assert_progress(_Msg, _KeyMapper, _KeyFilter, Key0, Key1) when Key1 > Key0 -> + ?tp_ignore_side_effects_in_prod( + emqx_ds_storage_bitfield_lts_iter_move, + #{ location => _Msg + , key0 => format_key(_KeyMapper, Key0) + , key1 => format_key(_KeyMapper, Key1) + }), + ok; +assert_progress(Msg, KeyMapper, KeyFilter, Key0, Key1) -> + Str0 = format_key(KeyMapper, Key0), + Str1 = format_key(KeyMapper, Key1), + error(#{'$msg' => Msg, key0 => Str0, key1 => Str1, step => get(?COUNTER), keyfilter => lists:map(fun format_keyfilter/1, KeyFilter)}). + +format_key(KeyMapper, Key) -> + Vec = [integer_to_list(I, 16) || I <- emqx_ds_bitmask_keymapper:key_to_vector(KeyMapper, Key)], + lists:flatten(io_lib:format("~.16B (~s)", [Key, string:join(Vec, ",")])). + +format_keyfilter(any) -> + any; +format_keyfilter({Op, Val}) -> + {Op, integer_to_list(Val, 16)}. + -spec make_key(#s{}, #message{}) -> {binary(), [binary()]}. make_key(#s{keymappers = KeyMappers, trie = Trie}, #message{timestamp = Timestamp, topic = TopicBin}) -> Tokens = emqx_topic:tokens(TopicBin), @@ -303,15 +339,33 @@ deserialize(Blob) -> -define(BYTE_SIZE, 8). %% erlfmt-ignore -make_keymapper(TopicIndexBytes, EpochBits, BitsPerTopicLevel, TSOffsetBits, N) -> +make_keymapper(TopicIndexBytes, BitsPerTopicLevel, TSBits, TSOffsetBits, N) -> Bitsources = %% Dimension Offset Bitsize [{1, 0, TopicIndexBytes * ?BYTE_SIZE}, %% Topic index - {2, TSOffsetBits, EpochBits }] ++ %% Timestamp epoch + {2, TSOffsetBits, TSBits - TSOffsetBits }] ++ %% Timestamp epoch [{2 + I, 0, BitsPerTopicLevel } %% Varying topic levels || I <- lists:seq(1, N)] ++ [{2, 0, TSOffsetBits }], %% Timestamp offset - emqx_ds_bitmask_keymapper:make_keymapper(Bitsources). + Keymapper = emqx_ds_bitmask_keymapper:make_keymapper(lists:reverse(Bitsources)), + %% Assert: + case emqx_ds_bitmask_keymapper:bitsize(Keymapper) rem 8 of + 0 -> + ok; + _ -> + error(#{'$msg' => "Non-even key size", bitsources => Bitsources}) + end, + Keymapper. + +upper_bound(Keymapper, [TopicIndex | Rest]) -> + filter_to_key(Keymapper, [TopicIndex | [{'=', infinity} || _ <- Rest]]). + +lower_bound(Keymapper, [TopicIndex | Rest]) -> + filter_to_key(Keymapper, [TopicIndex | [{'=', 0} || _ <- Rest]]). + +filter_to_key(KeyMapper, KeyFilter) -> + {Key, _, _} = emqx_ds_bitmask_keymapper:next_range(KeyMapper, KeyFilter, 0), + emqx_ds_bitmask_keymapper:key_to_bitstring(KeyMapper, Key). -spec restore_trie(pos_integer(), rocksdb:db_handle(), rocksdb:cf_handle()) -> emqx_ds_lts:trie(). restore_trie(TopicIndexBytes, DB, CF) -> @@ -335,6 +389,10 @@ read_persisted_trie(IT, {ok, KeyB, ValB}) -> read_persisted_trie(IT, {error, invalid_iterator}) -> []. +inc_counter() -> + N = get(?COUNTER), + put(?COUNTER, N + 1). + %% @doc Generate a column family ID for the MQTT messages -spec data_cf(emqx_ds_storage_layer:gen_id()) -> [char()]. data_cf(GenId) -> diff --git a/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl index 22a608a7f..957383f30 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl @@ -8,6 +8,7 @@ -include_lib("emqx/include/emqx.hrl"). -include_lib("common_test/include/ct.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("stdlib/include/assert.hrl"). -define(SHARD, shard(?FUNCTION_NAME)). @@ -72,10 +73,10 @@ t_iterate(_Config) -> -define(assertSameSet(A, B), ?assertEqual(lists:sort(A), lists:sort(B))). -%% Smoke test that verifies that concrete topics become individual -%% streams, unless there's too many of them +%% Smoke test that verifies that concrete topics are mapped to +%% individual streams, unless there's too many of them. t_get_streams(_Config) -> - %% Prepare data: + %% Prepare data (without wildcards): Topics = [<<"foo/bar">>, <<"foo/bar/baz">>, <<"a">>], Timestamps = lists:seq(1, 10), Batch = [ @@ -91,11 +92,13 @@ t_get_streams(_Config) -> [FooBar = {_, _}] = GetStream(<<"foo/bar">>), [FooBarBaz] = GetStream(<<"foo/bar/baz">>), [A] = GetStream(<<"a">>), - %% Restart shard to make sure trie is persisted: + %% Restart shard to make sure trie is persisted and restored: ok = emqx_ds_storage_layer_sup:stop_shard(?SHARD), {ok, _} = emqx_ds_storage_layer_sup:start_shard(?SHARD, #{}), - %% Test various wildcards: + %% Verify that there are no "ghost streams" for topics that don't + %% have any messages: [] = GetStream(<<"bar/foo">>), + %% Test some wildcard patterns: ?assertEqual([FooBar], GetStream("+/+")), ?assertSameSet([FooBar, FooBarBaz], GetStream(<<"foo/#">>)), ?assertSameSet([FooBar, FooBarBaz, A], GetStream(<<"#">>)), @@ -110,11 +113,139 @@ t_get_streams(_Config) -> ok = emqx_ds_storage_layer:store_batch(?SHARD, NewBatch, []), %% Check that "foo/bar/baz" topic now appears in two streams: %% "foo/bar/baz" and "foo/bar/+": - NewStreams = lists:sort(GetStream(<<"foo/bar/baz">>)), + NewStreams = lists:sort(GetStream("foo/bar/baz")), ?assertMatch([_, _], NewStreams), - ?assertMatch([_], NewStreams -- [FooBarBaz]), + ?assert(lists:member(FooBarBaz, NewStreams)), + %% Verify that size of the trie is still relatively small, even + %% after processing 200+ topics: + AllStreams = GetStream("#"), + NTotal = length(AllStreams), + ?assert(NTotal < 30, {NTotal, '<', 30}), + ?assert(lists:member(FooBar, AllStreams)), + ?assert(lists:member(FooBarBaz, AllStreams)), + ?assert(lists:member(A, AllStreams)), ok. +t_replay(_Config) -> + %% Create concrete topics: + Topics = [<<"foo/bar">>, <<"foo/bar/baz">>], + Timestamps = lists:seq(1, 10), + Batch1 = [ + make_message(PublishedAt, Topic, integer_to_binary(PublishedAt)) + || Topic <- Topics, PublishedAt <- Timestamps + ], + ok = emqx_ds_storage_layer:store_batch(?SHARD, Batch1, []), + %% Create wildcard topics `wildcard/+/suffix/foo' and `wildcard/+/suffix/bar': + Batch2 = [ + begin + B = integer_to_binary(I), + make_message( + TS, <<"wildcard/", B/binary, "/suffix/", Suffix/binary>>, integer_to_binary(TS) + ) + end + || I <- lists:seq(1, 200), TS <- lists:seq(1, 10), Suffix <- [<<"foo">>, <<"bar">>] + ], + ok = emqx_ds_storage_layer:store_batch(?SHARD, Batch2, []), + %% Check various topic filters: + Messages = Batch1 ++ Batch2, + %% Missing topics (no ghost messages): + ?assertNot(check(?SHARD, <<"missing/foo/bar">>, 0, Messages)), + %% Regular topics: + ?assert(check(?SHARD, <<"foo/bar">>, 0, Messages)), + ?assert(check(?SHARD, <<"foo/bar/baz">>, 0, Messages)), + ?assert(check(?SHARD, <<"foo/#">>, 0, Messages)), + ?assert(check(?SHARD, <<"foo/+">>, 0, Messages)), + ?assert(check(?SHARD, <<"foo/+/+">>, 0, Messages)), + ?assert(check(?SHARD, <<"+/+/+">>, 0, Messages)), + ?assert(check(?SHARD, <<"+/+/baz">>, 0, Messages)), + %% Learned wildcard topics: + ?assertNot(check(?SHARD, <<"wildcard/1000/suffix/foo">>, 0, [])), + ?assert(check(?SHARD, <<"wildcard/1/suffix/foo">>, 0, Messages)), + ?assert(check(?SHARD, <<"wildcard/100/suffix/foo">>, 0, Messages)), + ?assert(check(?SHARD, <<"wildcard/+/suffix/foo">>, 0, Messages)), + ?assert(check(?SHARD, <<"wildcard/1/suffix/+">>, 0, Messages)), + ?assert(check(?SHARD, <<"wildcard/100/suffix/+">>, 0, Messages)), + ?assert(check(?SHARD, <<"wildcard/#">>, 0, Messages)), + ?assert(check(?SHARD, <<"wildcard/1/#">>, 0, Messages)), + ?assert(check(?SHARD, <<"wildcard/100/#">>, 0, Messages)), + ?assert(check(?SHARD, <<"#">>, 0, Messages)), + ok. + +check(Shard, TopicFilter, StartTime, ExpectedMessages) -> + ExpectedFiltered = lists:filter( + fun(#message{topic = Topic, timestamp = TS}) -> + emqx_topic:match(Topic, TopicFilter) andalso TS >= StartTime + end, + ExpectedMessages + ), + ?check_trace( + #{timetrap => 10_000}, + begin + Dump = dump_messages(Shard, TopicFilter, StartTime), + verify_dump(TopicFilter, StartTime, Dump), + Missing = ExpectedFiltered -- Dump, + Extras = Dump -- ExpectedFiltered, + ?assertMatch( + #{missing := [], unexpected := []}, + #{ + missing => Missing, + unexpected => Extras, + topic_filter => TopicFilter, + start_time => StartTime + } + ) + end, + []), + length(ExpectedFiltered) > 0. + +verify_dump(TopicFilter, StartTime, Dump) -> + lists:foldl( + fun(#message{topic = Topic, timestamp = TS}, Acc) -> + %% Verify that the topic of the message returned by the + %% iterator matches the expected topic filter: + ?assert(emqx_topic:match(Topic, TopicFilter), {unexpected_topic, Topic, TopicFilter}), + %% Verify that timestamp of the message is greater than + %% the StartTime of the iterator: + ?assert(TS >= StartTime, {start_time, TopicFilter, TS, StartTime}), + %% Verify that iterator didn't reorder messages + %% (timestamps for each topic are growing): + LastTopicTs = maps:get(Topic, Acc, -1), + ?assert(TS >= LastTopicTs, {topic_ts_reordering, Topic, TS, LastTopicTs}), + Acc#{Topic => TS} + end, + #{}, + Dump + ). + +dump_messages(Shard, TopicFilter, StartTime) -> + Streams = emqx_ds_storage_layer:get_streams(Shard, parse_topic(TopicFilter), StartTime), + lists:flatmap( + fun({_Rank, Stream}) -> + dump_stream(Shard, Stream, TopicFilter, StartTime) + end, + Streams + ). + +dump_stream(Shard, Stream, TopicFilter, StartTime) -> + BatchSize = 3, + {ok, Iterator} = emqx_ds_storage_layer:make_iterator( + Shard, Stream, parse_topic(TopicFilter), StartTime + ), + Loop = fun F(It, 0) -> + error({too_many_iterations, It}); + F(It, N) -> + case emqx_ds_storage_layer:next(Shard, It, BatchSize) of + end_of_stream -> + []; + {ok, _NextIt, []} -> + []; + {ok, NextIt, Batch} -> + Batch ++ F(NextIt, N - 1) + end + end, + MaxIterations = 1000, + Loop(Iterator, MaxIterations). + %% Smoke test for iteration with wildcard topic filter %% t_iterate_wildcard(_Config) -> %% %% Prepare data: @@ -317,6 +448,7 @@ parse_topic(Topic) -> %% CT callbacks all() -> emqx_common_test_helpers:all(?MODULE). +suite() -> [{timetrap, {seconds, 20}}]. init_per_suite(Config) -> {ok, _} = application:ensure_all_started(emqx_durable_storage), From ef46c09cafe087c27fa515e4bcdc8515b428d9df Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Sat, 14 Oct 2023 01:01:10 +0200 Subject: [PATCH 087/155] feat(ds): Implement ratchet function for bitmask keymapper --- apps/emqx/src/emqx_persistent_message.erl | 5 +- .../src/emqx_ds_bitmask.hrl | 36 + .../src/emqx_ds_bitmask_keymapper.erl | 581 +++++++------- .../src/emqx_ds_storage_bitfield_lts.erl | 187 ++--- .../src/emqx_ds_storage_layer.erl_ | 714 ----------------- .../src/emqx_ds_storage_layer_bitmask.erl_ | 748 ------------------ .../emqx_ds_storage_bitfield_lts_SUITE.erl | 124 +-- 7 files changed, 436 insertions(+), 1959 deletions(-) create mode 100644 apps/emqx_durable_storage/src/emqx_ds_bitmask.hrl delete mode 100644 apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl_ delete mode 100644 apps/emqx_durable_storage/src/emqx_ds_storage_layer_bitmask.erl_ diff --git a/apps/emqx/src/emqx_persistent_message.erl b/apps/emqx/src/emqx_persistent_message.erl index f3ec9def5..632ff2a27 100644 --- a/apps/emqx/src/emqx_persistent_message.erl +++ b/apps/emqx/src/emqx_persistent_message.erl @@ -40,7 +40,10 @@ init() -> ?WHEN_ENABLED(begin - ok = emqx_ds:open_db(?PERSISTENT_MESSAGE_DB, #{}), + ok = emqx_ds:open_db(?PERSISTENT_MESSAGE_DB, #{ + backend => builtin, + storage => {emqx_ds_storage_bitfield_lts, #{}} + }), ok = emqx_persistent_session_ds_router:init_tables(), ok = emqx_persistent_session_ds:create_tables(), ok diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask.hrl b/apps/emqx_durable_storage/src/emqx_ds_bitmask.hrl new file mode 100644 index 000000000..31af0e034 --- /dev/null +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask.hrl @@ -0,0 +1,36 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-ifndef(EMQX_DS_BITMASK_HRL). +-define(EMQX_DS_BITMASK_HRL, true). + +-record(filter_scan_action, { + offset :: emqx_ds_bitmask_keymapper:offset(), + size :: emqx_ds_bitmask_keymapper:bitsize(), + min :: non_neg_integer(), + max :: non_neg_integer() +}). + +-record(filter, { + size :: non_neg_integer(), + bitfilter :: non_neg_integer(), + bitmask :: non_neg_integer(), + %% Ranges (in _bitsource_ basis): + bitsource_ranges :: array:array(#filter_scan_action{}), + range_min :: non_neg_integer(), + range_max :: non_neg_integer() +}). + +-endif. diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index 5c3ae42d8..a512a141c 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -86,9 +86,12 @@ bin_vector_to_key/2, key_to_vector/2, bin_key_to_vector/2, - next_range/3, key_to_bitstring/2, bitstring_to_key/2, + make_filter/2, + ratchet/2, + bin_increment/2, + bin_checkmask/2, bitsize/1 ]). @@ -149,6 +152,10 @@ -type scalar_range() :: any | {'=', scalar() | infinity} | {'>=', scalar()}. +-include("emqx_ds_bitmask.hrl"). + +-type filter() :: #filter{}. + %%================================================================================ %% API functions %%================================================================================ @@ -237,36 +244,6 @@ bin_key_to_vector(Keymapper = #keymapper{dim_sizeof = DimSizeof, size = Size}, B lists:zip(Vector, DimSizeof) ). -%% @doc Given a keymapper, a filter, and a key, return a triple containing: -%% -%% 1. `NextKey', a key that is greater than the given one, and is -%% within the given range. -%% -%% 2. `Bitmask' -%% -%% 3. `Bitfilter' -%% -%% Bitmask and bitfilter can be used to verify that key any K is in -%% the range using the following inequality: -%% -%% K >= NextKey && (K band Bitmask) =:= Bitfilter. -%% -%% ...or `undefined' if the next key is outside the range. --spec next_range(keymapper(), [scalar_range()], key()) -> {key(), integer(), integer()} | undefined. -next_range(Keymapper, Filter0, PrevKey) -> - %% Key -> Vector -> +1 on vector -> Key - Filter = desugar_filter(Keymapper, Filter0), - PrevVec = key_to_vector(Keymapper, PrevKey), - case inc_vector(Filter, PrevVec) of - overflow -> - undefined; - NextVec -> - NewKey = vector_to_key(Keymapper, NextVec), - Bitmask = make_bitmask(Keymapper, Filter), - Bitfilter = NewKey band Bitmask, - {NewKey, Bitmask, Bitfilter} - end. - -spec bitstring_to_key(keymapper(), bitstring()) -> key(). bitstring_to_key(#keymapper{size = Size}, Bin) -> case Bin of @@ -280,60 +257,208 @@ bitstring_to_key(#keymapper{size = Size}, Bin) -> key_to_bitstring(#keymapper{size = Size}, Key) -> <>. +%% @doc Create a filter object that facilitates range scans. +-spec make_filter(keymapper(), [scalar_range()]) -> filter(). +make_filter(KeyMapper = #keymapper{schema = Schema, dim_sizeof = DimSizeof, size = Size}, Filter0) -> + NDim = length(DimSizeof), + %% Transform "symbolic" inequations to ranges: + Filter1 = inequations_to_ranges(KeyMapper, Filter0), + {Bitmask, Bitfilter} = make_bitfilter(KeyMapper, Filter1), + %% Calculate maximum source offset as per bitsource specification: + MaxOffset = lists:foldl( + fun({Dim, Offset, _Size}, Acc) -> + maps:update_with(Dim, fun(OldVal) -> max(OldVal, Offset) end, 0, Acc) + end, + #{}, + Schema + ), + %% Adjust minimum and maximum values for each interval like this: + %% + %% Min: 110100|101011 -> 110100|00000 + %% Max: 110101|001011 -> 110101|11111 + %% ^ + %% | + %% max offset + %% + %% This is needed so when we increment the vector, we always scan + %% the full range of least significant bits. + Filter2 = lists:map( + fun + ({{Val, Val}, _Dim}) -> + {Val, Val}; + ({{Min0, Max0}, Dim}) -> + Offset = maps:get(Dim, MaxOffset, 0), + %% Set least significant bits of Min to 0: + Min = (Min0 bsr Offset) bsl Offset, + %% Set least significant bits of Max to 1: + Max = Max0 bor ones(Offset), + {Min, Max} + end, + lists:zip(Filter1, lists:seq(1, NDim)) + ), + %% Project the vector into "bitsource coordinate system": + {_, Filter} = fold_bitsources( + fun(DstOffset, {Dim, SrcOffset, Size}, Acc) -> + {Min0, Max0} = lists:nth(Dim, Filter2), + Min = (Min0 bsr SrcOffset) band ones(Size), + Max = (Max0 bsr SrcOffset) band ones(Size), + Action = #filter_scan_action{ + offset = DstOffset, + size = Size, + min = Min, + max = Max + }, + [Action | Acc] + end, + [], + Schema + ), + Ranges = array:from_list(lists:reverse(Filter)), + %% Compute estimated upper and lower bounds of a _continous_ + %% interval where all keys lie: + case Filter of + [] -> + RangeMin = 0, + RangeMax = 0; + [#filter_scan_action{offset = MSBOffset, min = MSBMin, max = MSBMax} | _] -> + RangeMin = MSBMin bsl MSBOffset, + RangeMax = MSBMax bsl MSBOffset bor ones(MSBOffset) + end, + %% Final value + #filter{ + size = Size, + bitmask = Bitmask, + bitfilter = Bitfilter, + bitsource_ranges = Ranges, + range_min = RangeMin, + range_max = RangeMax + }. + +-spec ratchet(filter(), key()) -> key() | overflow. +ratchet(#filter{bitsource_ranges = Ranges, range_max = Max}, Key) when Key =< Max -> + NDim = array:size(Ranges), + case ratchet_scan(Ranges, NDim, Key, 0, _Pivot = {-1, 0}, _Carry = 0) of + overflow -> + overflow; + {Pivot, Increment} -> + ratchet_do(Ranges, Key, NDim - 1, Pivot, Increment) + end; +ratchet(_, _) -> + overflow. + +-spec bin_increment(filter(), binary()) -> binary() | overflow. +bin_increment(Filter = #filter{size = Size}, <<>>) -> + Key = ratchet(Filter, 0), + <>; +bin_increment(Filter = #filter{size = Size, bitmask = Bitmask, bitfilter = Bitfilter}, KeyBin) -> + <> = KeyBin, + Key1 = Key0 + 1, + if + Key1 band Bitmask =:= Bitfilter -> + %% TODO: check overflow + <>; + true -> + case ratchet(Filter, Key1) of + overflow -> + overflow; + Key -> + <> + end + end. + +-spec bin_checkmask(filter(), binary()) -> boolean(). +bin_checkmask(#filter{size = Size, bitmask = Bitmask, bitfilter = Bitfilter}, Key) -> + case Key of + <> -> + Int band Bitmask =:= Bitfilter; + _ -> + false + end. + %%================================================================================ %% Internal functions %%================================================================================ --spec make_bitmask(keymapper(), [{non_neg_integer(), non_neg_integer()}]) -> non_neg_integer(). -make_bitmask(Keymapper = #keymapper{dim_sizeof = DimSizeof}, Ranges) -> - BitmaskVector = lists:map( +%% Note: this function operates in bitsource basis, scanning it from 0 +%% to NDim (i.e. from the least significant bits to the most +%% significant bits) +ratchet_scan(_Ranges, NDim, _Key, NDim, Pivot, 0) -> + %% We've reached the end: + Pivot; +ratchet_scan(_Ranges, NDim, _Key, NDim, _Pivot, 1) -> + %% We've reached the end, but key is still not large enough: + overflow; +ratchet_scan(Ranges, NDim, Key, I, Pivot0, Carry) -> + #filter_scan_action{offset = Offset, size = Size, min = Min, max = Max} = array:get(I, Ranges), + %% Extract I-th element of the vector from the original key: + Elem = ((Key bsr Offset) band ones(Size)) + Carry, + if + Elem < Min -> + %% I-th coordinate is less than the specified minimum. + %% + %% We reset this coordinate to the minimum value. It means + %% we incremented this bitposition, the less significant + %% bits have to be reset to their respective minimum + %% values: + Pivot = {I + 1, 0}, + ratchet_scan(Ranges, NDim, Key, I + 1, Pivot, 0); + Elem > Max -> + %% I-th coordinate is larger than the specified + %% minimum. We can only fix this problem by incrementing + %% the next coordinate (i.e. more significant bits). + %% + %% We reset this coordinate to the minimum value, and + %% increment the next coordinate (by setting `Carry' to + %% 1). + Pivot = {I + 1, 1}, + ratchet_scan(Ranges, NDim, Key, I + 1, Pivot, 1); + true -> + %% Coordinate is within range: + ratchet_scan(Ranges, NDim, Key, I + 1, Pivot0, 0) + end. + +%% Note: this function operates in bitsource basis, scanning it from +%% NDim to 0. It applies the transformation specified by +%% `ratchet_scan'. +ratchet_do(Ranges, Key, I, _Pivot, _Increment) when I < 0 -> + 0; +ratchet_do(Ranges, Key, I, Pivot, Increment) -> + #filter_scan_action{offset = Offset, size = Size, min = Min} = array:get(I, Ranges), + Mask = ones(Offset + Size) bxor ones(Offset), + Elem = + if + I > Pivot -> + Mask band Key; + I =:= Pivot -> + (Mask band Key) + (Increment bsl Offset); + true -> + Min bsl Offset + end, + %% erlang:display( + %% {ratchet_do, I, integer_to_list(Key, 16), integer_to_list(Mask, 2), + %% integer_to_list(Elem, 16)} + %% ), + Elem bor ratchet_do(Ranges, Key, I - 1, Pivot, Increment). + +-spec make_bitfilter(keymapper(), [{non_neg_integer(), non_neg_integer()}]) -> + {non_neg_integer(), non_neg_integer()}. +make_bitfilter(Keymapper = #keymapper{dim_sizeof = DimSizeof}, Ranges) -> + L = lists:map( fun ({{N, N}, Bits}) -> %% For strict equality we can employ bitmask: - ones(Bits); + {ones(Bits), N}; (_) -> - 0 + {0, 0} end, lists:zip(Ranges, DimSizeof) ), - vector_to_key(Keymapper, BitmaskVector). - --spec inc_vector([{non_neg_integer(), non_neg_integer()}], vector()) -> vector() | overflow. -inc_vector(Filter, Vec0) -> - case normalize_vector(Filter, Vec0) of - {true, Vec} -> - Vec; - {false, Vec} -> - do_inc_vector(Filter, Vec, []) - end. - -do_inc_vector([], [], _Acc) -> - overflow; -do_inc_vector([{Min, Max} | Intervals], [Elem | Vec], Acc) -> - case Elem of - Max -> - do_inc_vector(Intervals, Vec, [Min | Acc]); - _ when Elem < Max -> - lists:reverse(Acc) ++ [Elem + 1 | Vec] - end. - -normalize_vector(Intervals, Vec0) -> - Vec = lists:map( - fun - ({{Min, _Max}, Elem}) when Min > Elem -> - Min; - ({{_Min, Max}, Elem}) when Max < Elem -> - Max; - ({_, Elem}) -> - Elem - end, - lists:zip(Intervals, Vec0) - ), - {Vec > Vec0, Vec}. + {Bitmask, Bitfilter} = lists:unzip(L), + {vector_to_key(Keymapper, Bitmask), vector_to_key(Keymapper, Bitfilter)}. %% Transform inequalities into a list of closed intervals that the %% vector elements should lie in. -desugar_filter(#keymapper{dim_sizeof = DimSizeof}, Filter) -> +inequations_to_ranges(#keymapper{dim_sizeof = DimSizeof}, Filter) -> lists:map( fun ({any, Bitsize}) -> @@ -390,24 +515,6 @@ ones(Bits) -> -ifdef(TEST). -%% %% Create a bitmask that is sufficient to cover a given number. E.g.: -%% %% -%% %% 2#1000 -> 2#1111; 2#0 -> 2#0; 2#10101 -> 2#11111 -%% bitmask_of(N) -> -%% %% FIXME: avoid floats -%% NBits = ceil(math:log2(N + 1)), -%% ones(NBits). - -%% bitmask_of_test() -> -%% ?assertEqual(2#0, bitmask_of(0)), -%% ?assertEqual(2#1, bitmask_of(1)), -%% ?assertEqual(2#11, bitmask_of(2#10)), -%% ?assertEqual(2#11, bitmask_of(2#11)), -%% ?assertEqual(2#1111, bitmask_of(2#1000)), -%% ?assertEqual(2#1111, bitmask_of(2#1111)), -%% ?assertEqual(ones(128), bitmask_of(ones(128))), -%% ?assertEqual(ones(256), bitmask_of(ones(256))). - make_keymapper0_test() -> Schema = [], ?assertEqual( @@ -510,235 +617,117 @@ key_to_vector2_test() -> key2vec(Schema, [0, 1]), key2vec(Schema, [255, 0]). -inc_vector0_test() -> - Keymapper = make_keymapper([]), - ?assertMatch(overflow, incvec(Keymapper, [], [])). - -inc_vector1_test() -> - Keymapper = make_keymapper([{1, 0, 8}]), - ?assertMatch([3], incvec(Keymapper, [{'=', 3}], [1])), - ?assertMatch([3], incvec(Keymapper, [{'=', 3}], [2])), - ?assertMatch(overflow, incvec(Keymapper, [{'=', 3}], [3])), - ?assertMatch(overflow, incvec(Keymapper, [{'=', 3}], [4])), - ?assertMatch(overflow, incvec(Keymapper, [{'=', 3}], [255])), - %% Now with >=: - ?assertMatch([1], incvec(Keymapper, [{'>=', 0}], [0])), - ?assertMatch([255], incvec(Keymapper, [{'>=', 0}], [254])), - ?assertMatch(overflow, incvec(Keymapper, [{'>=', 0}], [255])), - - ?assertMatch([100], incvec(Keymapper, [{'>=', 100}], [0])), - ?assertMatch([100], incvec(Keymapper, [{'>=', 100}], [99])), - ?assertMatch([255], incvec(Keymapper, [{'>=', 100}], [254])), - ?assertMatch(overflow, incvec(Keymapper, [{'>=', 100}], [255])). - -inc_vector2_test() -> - Keymapper = make_keymapper([{1, 0, 8}, {2, 0, 8}, {3, 0, 8}]), - Filter = [{'>=', 0}, {'=', 100}, {'>=', 30}], - ?assertMatch([0, 100, 30], incvec(Keymapper, Filter, [0, 0, 0])), - ?assertMatch([1, 100, 30], incvec(Keymapper, Filter, [0, 100, 30])), - ?assertMatch([255, 100, 30], incvec(Keymapper, Filter, [254, 100, 30])), - ?assertMatch([0, 100, 31], incvec(Keymapper, Filter, [255, 100, 30])), - ?assertMatch([0, 100, 30], incvec(Keymapper, Filter, [0, 100, 29])), - ?assertMatch(overflow, incvec(Keymapper, Filter, [255, 100, 255])), - ?assertMatch([255, 100, 255], incvec(Keymapper, Filter, [254, 100, 255])), - ?assertMatch([0, 100, 255], incvec(Keymapper, Filter, [255, 100, 254])), - %% Nasty cases (shouldn't happen, hopefully): - ?assertMatch([1, 100, 30], incvec(Keymapper, Filter, [0, 101, 0])), - ?assertMatch([1, 100, 33], incvec(Keymapper, Filter, [0, 101, 33])), - ?assertMatch([0, 100, 255], incvec(Keymapper, Filter, [255, 101, 254])), - ?assertMatch(overflow, incvec(Keymapper, Filter, [255, 101, 255])). - make_bitmask0_test() -> Keymapper = make_keymapper([]), - ?assertMatch(0, mkbmask(Keymapper, [])). + ?assertMatch({0, 0}, mkbmask(Keymapper, [])). make_bitmask1_test() -> Keymapper = make_keymapper([{1, 0, 8}]), - ?assertEqual(0, mkbmask(Keymapper, [any])), - ?assertEqual(16#ff, mkbmask(Keymapper, [{'=', 1}])), - ?assertEqual(16#ff, mkbmask(Keymapper, [{'=', 255}])), - ?assertEqual(0, mkbmask(Keymapper, [{'>=', 0}])), - ?assertEqual(0, mkbmask(Keymapper, [{'>=', 1}])), - ?assertEqual(0, mkbmask(Keymapper, [{'>=', 16#f}])). + ?assertEqual({0, 0}, mkbmask(Keymapper, [any])), + ?assertEqual({16#ff, 1}, mkbmask(Keymapper, [{'=', 1}])), + ?assertEqual({16#ff, 255}, mkbmask(Keymapper, [{'=', 255}])), + ?assertEqual({0, 0}, mkbmask(Keymapper, [{'>=', 0}])), + ?assertEqual({0, 0}, mkbmask(Keymapper, [{'>=', 1}])), + ?assertEqual({0, 0}, mkbmask(Keymapper, [{'>=', 16#f}])). make_bitmask2_test() -> Keymapper = make_keymapper([{1, 0, 3}, {2, 0, 4}, {3, 0, 2}]), - ?assertEqual(2#00_0000_000, mkbmask(Keymapper, [any, any, any])), - ?assertEqual(2#11_0000_000, mkbmask(Keymapper, [any, any, {'=', 0}])), - ?assertEqual(2#00_1111_000, mkbmask(Keymapper, [any, {'=', 0}, any])), - ?assertEqual(2#00_0000_111, mkbmask(Keymapper, [{'=', 0}, any, any])). + ?assertEqual({2#00_0000_000, 2#00_0000_000}, mkbmask(Keymapper, [any, any, any])), + ?assertEqual({2#11_0000_000, 2#00_0000_000}, mkbmask(Keymapper, [any, any, {'=', 0}])), + ?assertEqual({2#00_1111_000, 2#00_0000_000}, mkbmask(Keymapper, [any, {'=', 0}, any])), + ?assertEqual({2#00_0000_111, 2#00_0000_000}, mkbmask(Keymapper, [{'=', 0}, any, any])). make_bitmask3_test() -> %% Key format of type |TimeOffset|Topic|Epoch|: - Keymapper = make_keymapper([{1, 8, 8}, {2, 0, 8}, {1, 0, 8}]), - ?assertEqual(2#00000000_00000000_00000000, mkbmask(Keymapper, [any, any])), - ?assertEqual(2#11111111_11111111_11111111, mkbmask(Keymapper, [{'=', 33}, {'=', 22}])), - ?assertEqual(2#11111111_11111111_11111111, mkbmask(Keymapper, [{'=', 33}, {'=', 22}])), - ?assertEqual(2#00000000_11111111_00000000, mkbmask(Keymapper, [{'>=', 255}, {'=', 22}])). + Keymapper = make_keymapper([{1, 0, 8}, {2, 0, 8}, {1, 8, 8}]), + ?assertEqual({2#00000000_00000000_00000000, 16#00_00_00}, mkbmask(Keymapper, [any, any])), + ?assertEqual( + {2#11111111_11111111_11111111, 16#aa_cc_bb}, + mkbmask(Keymapper, [{'=', 16#aabb}, {'=', 16#cc}]) + ), + ?assertEqual( + {2#00000000_11111111_00000000, 16#00_bb_00}, mkbmask(Keymapper, [{'>=', 255}, {'=', 16#bb}]) + ). -next_range0_test() -> - Keymapper = make_keymapper([]), +make_filter_test() -> + KeyMapper = make_keymapper([]), Filter = [], - PrevKey = 0, - ?assertMatch(undefined, next_range(Keymapper, Filter, PrevKey)). + ?assertMatch(#filter{size = 0, bitmask = 0, bitfilter = 0}, make_filter(KeyMapper, Filter)). -next_range1_test() -> - Keymapper = make_keymapper([{1, 0, 8}, {2, 0, 8}]), - ?assertMatch(undefined, next_range(Keymapper, [{'=', 0}, {'=', 0}], 0)), - ?assertMatch({1, 16#ffff, 1}, next_range(Keymapper, [{'=', 1}, {'=', 0}], 0)), - ?assertMatch({16#100, 16#ffff, 16#100}, next_range(Keymapper, [{'=', 0}, {'=', 1}], 0)), - %% Now with any: - ?assertMatch({1, 0, 0}, next_range(Keymapper, [any, any], 0)), - ?assertMatch({2, 0, 0}, next_range(Keymapper, [any, any], 1)), - ?assertMatch({16#fffb, 0, 0}, next_range(Keymapper, [any, any], 16#fffa)), - %% Now with >=: +ratchet1_test() -> + Bitsources = [{1, 0, 8}], + M = make_keymapper(Bitsources), + F = make_filter(M, [any]), + #filter{bitsource_ranges = Rarr} = F, ?assertMatch( - {16#42_30, 16#ff00, 16#42_00}, next_range(Keymapper, [{'>=', 16#30}, {'=', 16#42}], 0) - ), - ?assertMatch( - {16#42_31, 16#ff00, 16#42_00}, - next_range(Keymapper, [{'>=', 16#30}, {'=', 16#42}], 16#42_30) + [ + #filter_scan_action{ + offset = 0, + size = 8, + min = 0, + max = 16#ff + } + ], + array:to_list(Rarr) ), + ?assertEqual(0, ratchet(F, 0)), + ?assertEqual(16#fa, ratchet(F, 16#fa)), + ?assertEqual(16#ff, ratchet(F, 16#ff)), + ?assertEqual(overflow, ratchet(F, 16#100), "TBD: filter must store the upper bound"). - ?assertMatch( - {16#30_42, 16#00ff, 16#00_42}, next_range(Keymapper, [{'=', 16#42}, {'>=', 16#30}], 0) - ), - ?assertMatch( - {16#31_42, 16#00ff, 16#00_42}, - next_range(Keymapper, [{'=', 16#42}, {'>=', 16#30}], 16#00_43) - ). +%% erlfmt-ignore +ratchet2_test() -> + Bitsources = [{1, 0, 8}, %% Static topic index + {2, 8, 8}, %% Epoch + {3, 0, 8}, %% Varying topic hash + {2, 0, 8}], %% Timestamp offset + M = make_keymapper(lists:reverse(Bitsources)), + F1 = make_filter(M, [{'=', 16#aa}, any, {'=', 16#cc}]), + ?assertEqual(16#aa00cc00, ratchet(F1, 0)), + ?assertEqual(16#aa01cc00, ratchet(F1, 16#aa00cd00)), + ?assertEqual(16#aa01cc11, ratchet(F1, 16#aa01cc11)), + ?assertEqual(16#aa11cc00, ratchet(F1, 16#aa10cd00)), + ?assertEqual(16#aa11cc00, ratchet(F1, 16#aa10dc11)), + ?assertEqual(overflow, ratchet(F1, 16#ab000000)), + F2 = make_filter(M, [{'=', 16#aa}, {'>=', 16#dddd}, {'=', 16#cc}]), + ?assertEqual(16#aaddcc00, ratchet(F2, 0)), + ?assertEqual(16#aa_de_cc_00, ratchet(F2, 16#aa_dd_cd_11)). -%% Bunch of tests that verifying that next_range doesn't skip over keys: +ratchet3_test() -> + ?assert(proper:quickcheck(ratchet1_prop(), 100)). --define(assertIterComplete(A, B), - ?assertEqual(A -- [0], B) -). +%% erlfmt-ignore +ratchet1_prop() -> + EpochBits = 4, + Bitsources = [{1, 0, 2}, %% Static topic index + {2, EpochBits, 4}, %% Epoch + {3, 0, 2}, %% Varying topic hash + {2, 0, EpochBits}], %% Timestamp offset + M = make_keymapper(lists:reverse(Bitsources)), + F1 = make_filter(M, [{'=', 2#10}, any, {'=', 2#01}]), + ?FORALL(N, integer(0, ones(12)), + ratchet_prop(F1, N)). --define(assertSameSet(A, B), - ?assertIterComplete(lists:sort(A), lists:sort(B)) -). - -iterate1_test() -> - SizeX = 3, - SizeY = 3, - Keymapper = make_keymapper([{1, 0, SizeX}, {2, 0, SizeY}]), - Keys = test_iteration(Keymapper, [any, any]), - Expected = [ - X bor (Y bsl SizeX) - || Y <- lists:seq(0, ones(SizeY)), X <- lists:seq(0, ones(SizeX)) - ], - ?assertIterComplete(Expected, Keys). - -iterate2_test() -> - SizeX = 64, - SizeY = 3, - Keymapper = make_keymapper([{1, 0, SizeX}, {2, 0, SizeY}]), - X = 123456789, - Keys = test_iteration(Keymapper, [{'=', X}, any]), - Expected = [ - X bor (Y bsl SizeX) - || Y <- lists:seq(0, ones(SizeY)) - ], - ?assertIterComplete(Expected, Keys). - -iterate3_test() -> - SizeX = 3, - SizeY = 64, - Y = 42, - Keymapper = make_keymapper([{1, 0, SizeX}, {2, 0, SizeY}]), - Keys = test_iteration(Keymapper, [any, {'=', Y}]), - Expected = [ - X bor (Y bsl SizeX) - || X <- lists:seq(0, ones(SizeX)) - ], - ?assertIterComplete(Expected, Keys). - -iterate4_test() -> - SizeX = 8, - SizeY = 4, - MinX = 16#fa, - MinY = 16#a, - Keymapper = make_keymapper([{1, 0, SizeX}, {2, 0, SizeY}]), - Keys = test_iteration(Keymapper, [{'>=', MinX}, {'>=', MinY}]), - Expected = [ - X bor (Y bsl SizeX) - || Y <- lists:seq(MinY, ones(SizeY)), X <- lists:seq(MinX, ones(SizeX)) - ], - ?assertIterComplete(Expected, Keys). - -iterate1_prop() -> - Size = 4, - ?FORALL( - {SizeX, SizeY}, - {integer(1, Size), integer(1, Size)}, - ?FORALL( - {SplitX, MinX, MinY}, - {integer(0, SizeX), integer(0, SizeX), integer(0, SizeY)}, - begin - Keymapper = make_keymapper([ - {1, 0, SplitX}, {2, 0, SizeY}, {1, SplitX, SizeX - SplitX} - ]), - Keys = test_iteration(Keymapper, [{'>=', MinX}, {'>=', MinY}]), - Expected = [ - vector_to_key(Keymapper, [X, Y]) - || X <- lists:seq(MinX, ones(SizeX)), - Y <- lists:seq(MinY, ones(SizeY)) - ], - ?assertSameSet(Expected, Keys), - true - end - ) - ). - -iterate5_test() -> - ?assert(proper:quickcheck(iterate1_prop(), 100)). - -iterate2_prop() -> - Size = 4, - ?FORALL( - {SizeX, SizeY}, - {integer(1, Size), integer(1, Size)}, - ?FORALL( - {SplitX, MinX, MinY}, - {integer(0, SizeX), integer(0, SizeX), integer(0, SizeY)}, - begin - Keymapper = make_keymapper([ - {1, SplitX, SizeX - SplitX}, {2, 0, SizeY}, {1, 0, SplitX} - ]), - Keys = test_iteration(Keymapper, [{'>=', MinX}, {'>=', MinY}]), - Expected = [ - vector_to_key(Keymapper, [X, Y]) - || X <- lists:seq(MinX, ones(SizeX)), - Y <- lists:seq(MinY, ones(SizeY)) - ], - ?assertSameSet(Expected, Keys), - true - end - ) - ). - -iterate6_test() -> - ?assert(proper:quickcheck(iterate2_prop(), 1000)). - -test_iteration(Keymapper, Filter) -> - test_iteration(Keymapper, Filter, 0). - -test_iteration(Keymapper, Filter, PrevKey) -> - case next_range(Keymapper, Filter, PrevKey) of - undefined -> - []; - {Key, Bitmask, Bitfilter} -> - ?assert((Key band Bitmask) =:= Bitfilter), - [Key | test_iteration(Keymapper, Filter, Key)] - end. +ratchet_prop(Filter = #filter{bitfilter = Bitfilter, bitmask = Bitmask, size = Size}, Key0) -> + Key = ratchet(Filter, Key0), + ?assert(Key =:= overflow orelse (Key band Bitmask =:= Bitfilter)), + ?assert(Key >= Key0, {Key, '>=', Key}), + IMax = ones(Size), + CheckGaps = fun + F(I) when I >= Key; I > IMax -> + true; + F(I) -> + ?assertNot( + I band Bitmask =:= Bitfilter, + {found_gap, Key0, I, Key} + ), + F(I + 1) + end, + CheckGaps(Key0). mkbmask(Keymapper, Filter0) -> - Filter = desugar_filter(Keymapper, Filter0), - make_bitmask(Keymapper, Filter). - -incvec(Keymapper, Filter0, Vector) -> - Filter = desugar_filter(Keymapper, Filter0), - inc_vector(Filter, Vector). + Filter = inequations_to_ranges(Keymapper, Filter0), + make_bitfilter(Keymapper, Filter). key2vec(Schema, Vector) -> Keymapper = make_keymapper(Schema), diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index 7b8fbab0d..8d406c93e 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -30,7 +30,7 @@ -export([create/4, open/5, store_batch/4, get_streams/4, make_iterator/5, next/4]). %% internal exports: --export([]). +-export([format_key/2, format_keyfilter/1]). -export_type([options/0]). @@ -73,8 +73,7 @@ topic_filter :: emqx_ds:topic_filter(), start_time :: emqx_ds:time(), storage_key :: emqx_ds_lts:msg_storage_key(), - last_seen_key = 0 :: emqx_ds_bitmask_keymapper:key(), - key_filter :: [emqx_ds_bitmask_keymapper:scalar_range()] + last_seen_key = <<>> :: binary() }). -define(QUICKCHECK_KEY(KEY, BITMASK, BITFILTER), @@ -83,6 +82,8 @@ -define(COUNTER, emqx_ds_storage_bitfield_lts_counter). +-include("emqx_ds_bitmask.hrl"). + %%================================================================================ %% API funcions %%================================================================================ @@ -95,7 +96,8 @@ create(_ShardId, DBHandle, GenId, Options) -> %% Get options: BitsPerTopicLevel = maps:get(bits_per_wildcard_level, Options, 64), TopicIndexBytes = maps:get(topic_index_bytes, Options, 4), - TSOffsetBits = maps:get(epoch_bits, Options, 8), %% TODO: change to 10 to make it around ~1 sec + %% 10 bits -> 1024 ms -> ~1 sec + TSOffsetBits = maps:get(epoch_bits, Options, 10), %% Create column families: DataCFName = data_cf(GenId), TrieCFName = trie_cf(GenId), @@ -120,17 +122,17 @@ open(_Shard, DBHandle, GenId, CFRefs, Schema) -> {_, DataCF} = lists:keyfind(data_cf(GenId), 1, CFRefs), {_, TrieCF} = lists:keyfind(trie_cf(GenId), 1, CFRefs), Trie = restore_trie(TopicIndexBytes, DBHandle, TrieCF), - %% If user's topics have more than learned 10 wildcard levels, - %% then it's total carnage; learned topic structure won't help - %% much: + %% If user's topics have more than learned 10 wildcard levels + %% (more than 2, really), then it's total carnage; learned topic + %% structure won't help. MaxWildcardLevels = 10, - Keymappers = array:from_list( + KeymapperCache = array:from_list( [ make_keymapper(TopicIndexBytes, BitsPerTopicLevel, TSBits, TSOffsetBits, N) || N <- lists:seq(0, MaxWildcardLevels) ] ), - #s{db = DBHandle, data = DataCF, trie = Trie, keymappers = Keymappers}. + #s{db = DBHandle, data = DataCF, trie = Trie, keymappers = KeymapperCache}. store_batch(_ShardId, S = #s{db = DB, data = Data}, Messages, _Options) -> lists:foreach( @@ -144,16 +146,26 @@ store_batch(_ShardId, S = #s{db = DB, data = Data}, Messages, _Options) -> get_streams(_Shard, #s{trie = Trie}, TopicFilter, _StartTime) -> Indexes = emqx_ds_lts:match_topics(Trie, TopicFilter), - [ - #stream{ - storage_key = I - } - || I <- Indexes - ]. + [#stream{storage_key = I} || I <- Indexes]. make_iterator(_Shard, _Data, #stream{storage_key = StorageKey}, TopicFilter, StartTime) -> + %% Note: it's a good idea to keep the iterator structure lean, + %% since it can be stored on a remote node that could update its + %% code independently from us. + {ok, #it{ + topic_filter = TopicFilter, + start_time = StartTime, + storage_key = StorageKey + }}. + +next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> + #it{ + start_time = StartTime, + storage_key = StorageKey + } = It0, + %% Make filter: {TopicIndex, Varying} = StorageKey, - Filter = [ + Inequations = [ {'=', TopicIndex}, {'>=', StartTime} | lists:map( @@ -166,29 +178,22 @@ make_iterator(_Shard, _Data, #stream{storage_key = StorageKey}, TopicFilter, Sta Varying ) ], - {ok, #it{ - topic_filter = TopicFilter, - start_time = StartTime, - storage_key = StorageKey, - key_filter = Filter - }}. - -next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> - #it{ - key_filter = KeyFilter - } = It0, - % TODO: ugh, so ugly - NVarying = length(KeyFilter) - 2, + %% Obtain a keymapper for the current number of varying + %% levels. Magic constant 2: we have two extra dimensions of topic + %% index and time; the rest of dimensions are varying levels. + NVarying = length(Inequations) - 2, Keymapper = array:get(NVarying, Keymappers), - %% Calculate lower and upper bounds for iteration: - LowerBound = lower_bound(Keymapper, KeyFilter), - UpperBound = upper_bound(Keymapper, KeyFilter), + Filter = + #filter{range_min = LowerBound, range_max = UpperBound} = emqx_ds_bitmask_keymapper:make_filter( + Keymapper, Inequations + ), {ok, ITHandle} = rocksdb:iterator(DB, CF, [ - {iterate_lower_bound, LowerBound}, {iterate_upper_bound, UpperBound} + {iterate_lower_bound, emqx_ds_bitmask_keymapper:key_to_bitstring(Keymapper, LowerBound)}, + {iterate_upper_bound, emqx_ds_bitmask_keymapper:key_to_bitstring(Keymapper, UpperBound)} ]), try put(?COUNTER, 0), - next_loop(ITHandle, Keymapper, It0, [], BatchSize) + next_loop(ITHandle, Keymapper, Filter, It0, [], BatchSize) after rocksdb:iterator_close(ITHandle), erase(?COUNTER) @@ -198,100 +203,64 @@ next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> %% Internal functions %%================================================================================ -next_loop(_, _, It, Acc, 0) -> +next_loop(ITHandle, KeyMapper, Filter, It, Acc, 0) -> {ok, It, lists:reverse(Acc)}; -next_loop(ITHandle, KeyMapper, It0 = #it{last_seen_key = Key0, key_filter = KeyFilter}, Acc0, N0) -> +next_loop(ITHandle, KeyMapper, Filter, It0, Acc0, N0) -> inc_counter(), - case next_range(KeyMapper, It0) of - {Key1, Bitmask, Bitfilter} when Key1 > Key0 -> - case iterator_move(KeyMapper, ITHandle, {seek, Key1}) of - {ok, Key, Val} when ?QUICKCHECK_KEY(Key, Bitmask, Bitfilter) -> - assert_progress(bitmask_match, KeyMapper, KeyFilter, Key0, Key1), - Msg = deserialize(Val), + #it{last_seen_key = Key0} = It0, + case emqx_ds_bitmask_keymapper:bin_increment(Filter, Key0) of + overflow -> + {ok, It0, lists:reverse(Acc0)}; + Key1 -> + %% assert + true = Key1 > Key0, + case rocksdb:iterator_move(ITHandle, {seek, Key1}) of + {ok, Key, Val} -> It1 = It0#it{last_seen_key = Key}, - case check_message(It1, Msg) of - true -> + case check_message(Filter, It1, Val) of + {true, Msg} -> N1 = N0 - 1, Acc1 = [Msg | Acc0]; false -> N1 = N0, Acc1 = Acc0 end, - {N, It, Acc} = traverse_interval( - ITHandle, KeyMapper, Bitmask, Bitfilter, It1, Acc1, N1 - ), - next_loop(ITHandle, KeyMapper, It, Acc, N); - {ok, Key, _Val} -> - assert_progress(bitmask_miss, KeyMapper, KeyFilter, Key0, Key1), - It = It0#it{last_seen_key = Key}, - next_loop(ITHandle, KeyMapper, It, Acc0, N0); + {N, It, Acc} = traverse_interval(ITHandle, KeyMapper, Filter, It1, Acc1, N1), + next_loop(ITHandle, KeyMapper, Filter, It, Acc, N); {error, invalid_iterator} -> {ok, It0, lists:reverse(Acc0)} - end; - _ -> - {ok, It0, lists:reverse(Acc0)} + end end. -traverse_interval(_, _, _, _, It, Acc, 0) -> +traverse_interval(_ITHandle, _KeyMapper, _Filter, It, Acc, 0) -> {0, It, Acc}; -traverse_interval(ITHandle, KeyMapper, Bitmask, Bitfilter, It0, Acc, N) -> +traverse_interval(ITHandle, KeyMapper, Filter, It0, Acc, N) -> inc_counter(), - case iterator_move(KeyMapper, ITHandle, next) of - {ok, Key, Val} when ?QUICKCHECK_KEY(Key, Bitmask, Bitfilter) -> - Msg = deserialize(Val), + case rocksdb:iterator_move(ITHandle, next) of + {ok, Key, Val} -> It = It0#it{last_seen_key = Key}, - case check_message(It, Msg) of - true -> - traverse_interval( - ITHandle, KeyMapper, Bitmask, Bitfilter, It, [Msg | Acc], N - 1 - ); + case check_message(Filter, It, Val) of + {true, Msg} -> + traverse_interval(ITHandle, KeyMapper, Filter, It, [Msg | Acc], N - 1); false -> - traverse_interval(ITHandle, KeyMapper, Bitmask, Bitfilter, It, Acc, N) + traverse_interval(ITHandle, KeyMapper, Filter, It, Acc, N) end; - {ok, Key, _Val} -> - It = It0#it{last_seen_key = Key}, - {N, It, Acc}; {error, invalid_iterator} -> {0, It0, Acc} end. -next_range(KeyMapper, #it{key_filter = KeyFilter, last_seen_key = PrevKey}) -> - emqx_ds_bitmask_keymapper:next_range(KeyMapper, KeyFilter, PrevKey). - -check_message(_Iterator, _Msg) -> - %% TODO. - true. - -iterator_move(KeyMapper, ITHandle, Action0) -> - Action = - case Action0 of - next -> - next; - {seek, Int} -> - {seek, emqx_ds_bitmask_keymapper:key_to_bitstring(KeyMapper, Int)} - end, - case rocksdb:iterator_move(ITHandle, Action) of - {ok, KeyBin, Val} -> - {ok, emqx_ds_bitmask_keymapper:bitstring_to_key(KeyMapper, KeyBin), Val}; - {ok, KeyBin} -> - {ok, emqx_ds_bitmask_keymapper:bitstring_to_key(KeyMapper, KeyBin)}; - Other -> - Other +-spec check_message(emqx_ds_bitmask_keymapper:filter(), #it{}, binary()) -> + {true, #message{}} | false. +check_message(Filter, #it{last_seen_key = Key}, Val) -> + case emqx_ds_bitmask_keymapper:bin_checkmask(Filter, Key) of + true -> + Msg = deserialize(Val), + %% TODO: check strict time and hash collisions + {true, Msg}; + false -> + false end. -assert_progress(_Msg, _KeyMapper, _KeyFilter, Key0, Key1) when Key1 > Key0 -> - ?tp_ignore_side_effects_in_prod( - emqx_ds_storage_bitfield_lts_iter_move, - #{ location => _Msg - , key0 => format_key(_KeyMapper, Key0) - , key1 => format_key(_KeyMapper, Key1) - }), - ok; -assert_progress(Msg, KeyMapper, KeyFilter, Key0, Key1) -> - Str0 = format_key(KeyMapper, Key0), - Str1 = format_key(KeyMapper, Key1), - error(#{'$msg' => Msg, key0 => Str0, key1 => Str1, step => get(?COUNTER), keyfilter => lists:map(fun format_keyfilter/1, KeyFilter)}). - format_key(KeyMapper, Key) -> Vec = [integer_to_list(I, 16) || I <- emqx_ds_bitmask_keymapper:key_to_vector(KeyMapper, Key)], lists:flatten(io_lib:format("~.16B (~s)", [Key, string:join(Vec, ",")])). @@ -357,16 +326,6 @@ make_keymapper(TopicIndexBytes, BitsPerTopicLevel, TSBits, TSOffsetBits, N) -> end, Keymapper. -upper_bound(Keymapper, [TopicIndex | Rest]) -> - filter_to_key(Keymapper, [TopicIndex | [{'=', infinity} || _ <- Rest]]). - -lower_bound(Keymapper, [TopicIndex | Rest]) -> - filter_to_key(Keymapper, [TopicIndex | [{'=', 0} || _ <- Rest]]). - -filter_to_key(KeyMapper, KeyFilter) -> - {Key, _, _} = emqx_ds_bitmask_keymapper:next_range(KeyMapper, KeyFilter, 0), - emqx_ds_bitmask_keymapper:key_to_bitstring(KeyMapper, Key). - -spec restore_trie(pos_integer(), rocksdb:db_handle(), rocksdb:cf_handle()) -> emqx_ds_lts:trie(). restore_trie(TopicIndexBytes, DB, CF) -> PersistCallback = fun(Key, Val) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl_ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl_ deleted file mode 100644 index 32f18d18b..000000000 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl_ +++ /dev/null @@ -1,714 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%%-------------------------------------------------------------------- --module(emqx_ds_storage_layer). - --behaviour(gen_server). - -%% API: --export([start_link/2]). --export([create_generation/3]). - --export([open_shard/2, get_streams/3]). --export([message_store/3]). --export([delete/4]). - --export([make_iterator/3, next/1, next/2]). - --export([ - preserve_iterator/2, - restore_iterator/2, - discard_iterator/2, - ensure_iterator/3, - discard_iterator_prefix/2, - list_iterator_prefix/2, - foldl_iterator_prefix/4 -]). - -%% gen_server callbacks: --export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). - --export_type([stream/0, cf_refs/0, gen_id/0, options/0, state/0, iterator/0]). --export_type([db_options/0, db_write_options/0, db_read_options/0]). - --compile({inline, [meta_lookup/2]}). - --include_lib("emqx/include/emqx.hrl"). - -%%================================================================================ -%% Type declarations -%%================================================================================ - --type options() :: #{ - dir => file:filename() -}. - -%% see rocksdb:db_options() --type db_options() :: proplists:proplist(). -%% see rocksdb:write_options() --type db_write_options() :: proplists:proplist(). -%% see rocksdb:read_options() --type db_read_options() :: proplists:proplist(). - --type cf_refs() :: [{string(), rocksdb:cf_handle()}]. - -%% Message storage generation -%% Keep in mind that instances of this type are persisted in long-term storage. --type generation() :: #{ - %% Module that handles data for the generation - module := module(), - %% Module-specific data defined at generation creation time - data := term(), - %% When should this generation become active? - %% This generation should only contain messages timestamped no earlier than that. - %% The very first generation will have `since` equal 0. - since := emqx_ds:time() -}. - --record(s, { - shard :: emqx_ds:shard(), - keyspace :: emqx_ds_conf:keyspace(), - db :: rocksdb:db_handle(), - cf_iterator :: rocksdb:cf_handle(), - cf_generations :: cf_refs() -}). - --record(stream, - { generation :: gen_id() - , topic_filter :: emqx_ds:topic_filter() - , since :: emqx_ds:time() - , enc :: _EncapsultatedData - }). - --opaque stream() :: #stream{}. - --record(it, { - shard :: emqx_ds:shard(), - gen :: gen_id(), - replay :: emqx_ds:replay(), - module :: module(), - data :: term() -}). - --type gen_id() :: 0..16#ffff. - --opaque state() :: #s{}. --opaque iterator() :: #it{}. - -%% Contents of the default column family: -%% -%% [{<<"genNN">>, #generation{}}, ..., -%% {<<"current">>, GenID}] - --define(DEFAULT_CF, "default"). --define(DEFAULT_CF_OPTS, []). - --define(ITERATOR_CF, "$iterators"). - -%% TODO -%% 1. CuckooTable might be of use here / `OptimizeForPointLookup(...)`. -%% 2. Supposedly might be compressed _very_ effectively. -%% 3. `inplace_update_support`? --define(ITERATOR_CF_OPTS, []). - --define(REF(ShardId), {via, gproc, {n, l, {?MODULE, ShardId}}}). - -%%================================================================================ -%% Callbacks -%%================================================================================ - --callback create_new(rocksdb:db_handle(), gen_id(), _Options :: term()) -> - {_Schema, cf_refs()}. - --callback open( - emqx_ds:shard(), - rocksdb:db_handle(), - gen_id(), - cf_refs(), - _Schema -) -> - _DB. - --callback store( - _DB, - _MessageID :: binary(), - emqx_ds:time(), - emqx_ds:topic(), - _Payload :: binary() -) -> - ok | {error, _}. - --callback delete(_DB, _MessageID :: binary(), emqx_ds:time(), emqx_ds:topic()) -> - ok | {error, _}. - --callback get_streams(_DB, emqx_ds:topic_filter(), emqx_ds:time()) -> - [{_TopicRankX, _Stream}]. - --callback make_iterator(_DB, emqx_ds:replay()) -> - {ok, _It} | {error, _}. - --callback restore_iterator(_DB, _Serialized :: binary()) -> {ok, _It} | {error, _}. - --callback preserve_iterator(_It) -> term(). - --callback next(It) -> {value, binary(), It} | none | {error, closed}. - -%%================================================================================ -%% Replication layer API -%%================================================================================ - --spec open_shard(emqx_ds_replication_layer:shard(), emqx_ds_storage_layer:options()) -> ok. -open_shard(Shard, Options) -> - emqx_ds_storage_layer_sup:ensure_shard(Shard, Options). - --spec get_streams(emqx_ds:shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> [{emqx_ds:stream_rank(), _Stream}]. -get_streams(Shard, TopicFilter, StartTime) -> - %% TODO: lookup ALL generations - {GenId, #{module := Mod, data := ModState}} = meta_lookup_gen(Shard, StartTime), - lists:map( - fun({RankX, ModStream}) -> - Stream = #stream{ generation = GenId - , topic_filter = TopicFilter - , since = StartTime - , enc = ModStream - }, - Rank = {RankX, GenId}, - {Rank, Stream} - end, - Mod:get_streams(ModState, TopicFilter, StartTime)). - --spec message_store(emqx_ds:shard(), [emqx_types:message()], emqx_ds:message_store_opts()) -> - {ok, _MessageId} | {error, _}. -message_store(Shard, Msgs, _Opts) -> - {ok, lists:map( - fun(Msg) -> - GUID = emqx_message:id(Msg), - Timestamp = Msg#message.timestamp, - {_GenId, #{module := Mod, data := ModState}} = meta_lookup_gen(Shard, Timestamp), - Topic = emqx_topic:words(emqx_message:topic(Msg)), - Payload = serialize(Msg), - Mod:store(ModState, GUID, Timestamp, Topic, Payload), - GUID - end, - Msgs)}. - --spec next(iterator()) -> {ok, iterator(), [binary()]} | end_of_stream. -next(It = #it{}) -> - next(It, _BatchSize = 1). - --spec next(iterator(), pos_integer()) -> {ok, iterator(), [binary()]} | end_of_stream. -next(#it{data = {?MODULE, end_of_stream}}, _BatchSize) -> - end_of_stream; -next( - It = #it{shard = Shard, module = Mod, gen = Gen, data = {?MODULE, retry, Serialized}}, BatchSize -) -> - #{data := DBData} = meta_get_gen(Shard, Gen), - {ok, ItData} = Mod:restore_iterator(DBData, Serialized), - next(It#it{data = ItData}, BatchSize); -next(It = #it{}, BatchSize) -> - do_next(It, BatchSize, _Acc = []). - -%%================================================================================ -%% API functions -%%================================================================================ - --spec create_generation( - emqx_ds:shard(), emqx_ds:time(), emqx_ds_conf:backend_config() -) -> - {ok, gen_id()} | {error, nonmonotonic}. -create_generation(ShardId, Since, Config = {_Module, _Options}) -> - gen_server:call(?REF(ShardId), {create_generation, Since, Config}). - --spec delete(emqx_ds:shard(), emqx_guid:guid(), emqx_ds:time(), emqx_ds:topic()) -> - ok | {error, _}. -delete(Shard, GUID, Time, Topic) -> - {_GenId, #{module := Mod, data := Data}} = meta_lookup_gen(Shard, Time), - Mod:delete(Data, GUID, Time, Topic). - --spec make_iterator(emqx_ds:shard(), stream(), emqx_ds:time()) -> - {ok, iterator()} | {error, _TODO}. -make_iterator(Shard, Stream, StartTime) -> - #stream{ topic_filter = TopicFilter - , since = Since - , enc = Enc - } = Stream, - {GenId, Gen} = meta_lookup_gen(Shard, StartTime), - Replay = {TopicFilter, Since}, - case Mod:make_iterator(Data, Replay, Options) of - #it{ gen = GenId, - replay = {TopicFilter, Since} - }. - --spec do_next(iterator(), non_neg_integer(), [binary()]) -> - {ok, iterator(), [binary()]} | end_of_stream. -do_next(It, N, Acc) when N =< 0 -> - {ok, It, lists:reverse(Acc)}; -do_next(It = #it{module = Mod, data = ItData}, N, Acc) -> - case Mod:next(ItData) of - {value, Bin, ItDataNext} -> - Val = deserialize(Bin), - do_next(It#it{data = ItDataNext}, N - 1, [Val | Acc]); - {error, _} = _Error -> - %% todo: log? - %% iterator might be invalid now; will need to re-open it. - Serialized = Mod:preserve_iterator(ItData), - {ok, It#it{data = {?MODULE, retry, Serialized}}, lists:reverse(Acc)}; - none -> - case open_next_iterator(It) of - {ok, ItNext} -> - do_next(ItNext, N, Acc); - {error, _} = _Error -> - %% todo: log? - %% fixme: only bad options may lead to this? - %% return an "empty" iterator to be re-opened when retrying? - Serialized = Mod:preserve_iterator(ItData), - {ok, It#it{data = {?MODULE, retry, Serialized}}, lists:reverse(Acc)}; - none -> - case Acc of - [] -> - end_of_stream; - _ -> - {ok, It#it{data = {?MODULE, end_of_stream}}, lists:reverse(Acc)} - end - end - end. - --spec preserve_iterator(iterator(), emqx_ds:iterator_id()) -> - ok | {error, _TODO}. -preserve_iterator(It = #it{}, IteratorID) -> - iterator_put_state(IteratorID, It). - --spec restore_iterator(emqx_ds:shard(), emqx_ds:replay_id()) -> - {ok, iterator()} | {error, _TODO}. -restore_iterator(Shard, ReplayID) -> - case iterator_get_state(Shard, ReplayID) of - {ok, Serial} -> - restore_iterator_state(Shard, Serial); - not_found -> - {error, not_found}; - {error, _Reason} = Error -> - Error - end. - --spec ensure_iterator(emqx_ds:shard(), emqx_ds:iterator_id(), emqx_ds:replay()) -> - {ok, iterator()} | {error, _TODO}. -ensure_iterator(Shard, IteratorID, Replay = {_TopicFilter, _StartMS}) -> - case restore_iterator(Shard, IteratorID) of - {ok, It} -> - {ok, It}; - {error, not_found} -> - {ok, It} = make_iterator(Shard, Replay), - ok = emqx_ds_storage_layer:preserve_iterator(It, IteratorID), - {ok, It}; - Error -> - Error - end. - --spec discard_iterator(emqx_ds:shard(), emqx_ds:replay_id()) -> - ok | {error, _TODO}. -discard_iterator(Shard, ReplayID) -> - iterator_delete(Shard, ReplayID). - --spec discard_iterator_prefix(emqx_ds:shard(), binary()) -> - ok | {error, _TODO}. -discard_iterator_prefix(Shard, KeyPrefix) -> - case do_discard_iterator_prefix(Shard, KeyPrefix) of - {ok, _} -> ok; - Error -> Error - end. - --spec list_iterator_prefix( - emqx_ds:shard(), - binary() -) -> {ok, [emqx_ds:iterator_id()]} | {error, _TODO}. -list_iterator_prefix(Shard, KeyPrefix) -> - do_list_iterator_prefix(Shard, KeyPrefix). - --spec foldl_iterator_prefix( - emqx_ds:shard(), - binary(), - fun((_Key :: binary(), _Value :: binary(), Acc) -> Acc), - Acc -) -> {ok, Acc} | {error, _TODO} when - Acc :: term(). -foldl_iterator_prefix(Shard, KeyPrefix, Fn, Acc) -> - do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, Acc). - -%%================================================================================ -%% gen_server -%%================================================================================ - --spec start_link(emqx_ds:shard(), emqx_ds_storage_layer:options()) -> - {ok, pid()}. -start_link(Shard, Options) -> - gen_server:start_link(?REF(Shard), ?MODULE, {Shard, Options}, []). - -init({Shard, Options}) -> - process_flag(trap_exit, true), - {ok, S0} = do_open_db(Shard, Options), - S = ensure_current_generation(S0), - ok = populate_metadata(S), - {ok, S}. - -handle_call({create_generation, Since, Config}, _From, S) -> - case create_new_gen(Since, Config, S) of - {ok, GenId, NS} -> - {reply, {ok, GenId}, NS}; - {error, _} = Error -> - {reply, Error, S} - end; -handle_call(_Call, _From, S) -> - {reply, {error, unknown_call}, S}. - -handle_cast(_Cast, S) -> - {noreply, S}. - -handle_info(_Info, S) -> - {noreply, S}. - -terminate(_Reason, #s{db = DB, shard = Shard}) -> - meta_erase(Shard), - ok = rocksdb:close(DB). - -%%================================================================================ -%% Internal functions -%%================================================================================ - --record(db, {handle :: rocksdb:db_handle(), cf_iterator :: rocksdb:cf_handle()}). - --spec populate_metadata(state()) -> ok. -populate_metadata(S = #s{shard = Shard, db = DBHandle, cf_iterator = CFIterator}) -> - ok = meta_put(Shard, db, #db{handle = DBHandle, cf_iterator = CFIterator}), - Current = schema_get_current(DBHandle), - lists:foreach(fun(GenId) -> populate_metadata(GenId, S) end, lists:seq(0, Current)). - --spec populate_metadata(gen_id(), state()) -> ok. -populate_metadata(GenId, S = #s{shard = Shard, db = DBHandle}) -> - Gen = open_gen(GenId, schema_get_gen(DBHandle, GenId), S), - meta_register_gen(Shard, GenId, Gen). - --spec ensure_current_generation(state()) -> state(). -ensure_current_generation(S = #s{shard = _Shard, keyspace = Keyspace, db = DBHandle}) -> - case schema_get_current(DBHandle) of - undefined -> - Config = emqx_ds_conf:keyspace_config(Keyspace), - {ok, _, NS} = create_new_gen(0, Config, S), - NS; - _GenId -> - S - end. - --spec create_new_gen(emqx_ds:time(), emqx_ds_conf:backend_config(), state()) -> - {ok, gen_id(), state()} | {error, nonmonotonic}. -create_new_gen(Since, Config, S = #s{shard = Shard, db = DBHandle}) -> - GenId = get_next_id(meta_get_current(Shard)), - GenId = get_next_id(schema_get_current(DBHandle)), - case is_gen_valid(Shard, GenId, Since) of - ok -> - {ok, Gen, NS} = create_gen(GenId, Since, Config, S), - %% TODO: Transaction? Column family creation can't be transactional, anyway. - ok = schema_put_gen(DBHandle, GenId, Gen), - ok = schema_put_current(DBHandle, GenId), - ok = meta_register_gen(Shard, GenId, open_gen(GenId, Gen, NS)), - {ok, GenId, NS}; - {error, _} = Error -> - Error - end. - --spec create_gen(gen_id(), emqx_ds:time(), emqx_ds_conf:backend_config(), state()) -> - {ok, generation(), state()}. -create_gen(GenId, Since, {Module, Options}, S = #s{db = DBHandle, cf_generations = CFs}) -> - % TODO: Backend implementation should ensure idempotency. - {Schema, NewCFs} = Module:create_new(DBHandle, GenId, Options), - Gen = #{ - module => Module, - data => Schema, - since => Since - }, - {ok, Gen, S#s{cf_generations = NewCFs ++ CFs}}. - --spec do_open_db(emqx_ds:shard(), options()) -> {ok, state()} | {error, _TODO}. -do_open_db(Shard, Options) -> - DefaultDir = binary_to_list(Shard), - DBDir = unicode:characters_to_list(maps:get(dir, Options, DefaultDir)), - %% TODO: properly forward keyspace - Keyspace = maps:get(keyspace, Options, default_keyspace), - DBOptions = [ - {create_if_missing, true}, - {create_missing_column_families, true} - | emqx_ds_conf:db_options(Keyspace) - ], - _ = filelib:ensure_dir(DBDir), - ExistingCFs = - case rocksdb:list_column_families(DBDir, DBOptions) of - {ok, CFs} -> - [{Name, []} || Name <- CFs, Name /= ?DEFAULT_CF, Name /= ?ITERATOR_CF]; - % DB is not present. First start - {error, {db_open, _}} -> - [] - end, - ColumnFamilies = [ - {?DEFAULT_CF, ?DEFAULT_CF_OPTS}, - {?ITERATOR_CF, ?ITERATOR_CF_OPTS} - | ExistingCFs - ], - case rocksdb:open(DBDir, DBOptions, ColumnFamilies) of - {ok, DBHandle, [_CFDefault, CFIterator | CFRefs]} -> - {CFNames, _} = lists:unzip(ExistingCFs), - {ok, #s{ - shard = Shard, - keyspace = Keyspace, - db = DBHandle, - cf_iterator = CFIterator, - cf_generations = lists:zip(CFNames, CFRefs) - }}; - Error -> - Error - end. - --spec open_gen(gen_id(), generation(), state()) -> generation(). -open_gen( - GenId, - Gen = #{module := Mod, data := Data}, - #s{shard = Shard, db = DBHandle, cf_generations = CFs} -) -> - DB = Mod:open(Shard, DBHandle, GenId, CFs, Data), - Gen#{data := DB}. - --spec open_next_iterator(iterator()) -> {ok, iterator()} | {error, _Reason} | none. -open_next_iterator(It = #it{shard = Shard, gen = GenId}) -> - open_next_iterator(meta_get_gen(Shard, GenId + 1), It#it{gen = GenId + 1}). - -open_next_iterator(undefined, _It) -> - none; -open_next_iterator(Gen = #{}, It) -> - open_iterator(Gen, It). - --spec open_restore_iterator(generation(), iterator(), binary()) -> - {ok, iterator()} | {error, _Reason}. -open_restore_iterator(#{module := Mod, data := Data}, It = #it{}, Serial) -> - case Mod:restore_iterator(Data, Serial) of - {ok, ItData} -> - {ok, It#it{module = Mod, data = ItData}}; - Err -> - Err - end. - -%% - --define(KEY_REPLAY_STATE(IteratorId), <<(IteratorId)/binary, "rs">>). --define(KEY_REPLAY_STATE_PAT(KeyReplayState), begin - <> = (KeyReplayState), - IteratorId -end). - --define(ITERATION_WRITE_OPTS, []). --define(ITERATION_READ_OPTS, []). - -iterator_get_state(Shard, ReplayID) -> - #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), - rocksdb:get(Handle, CF, ?KEY_REPLAY_STATE(ReplayID), ?ITERATION_READ_OPTS). - -iterator_put_state(ID, It = #it{shard = Shard}) -> - #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), - Serial = preserve_iterator_state(It), - rocksdb:put(Handle, CF, ?KEY_REPLAY_STATE(ID), Serial, ?ITERATION_WRITE_OPTS). - -iterator_delete(Shard, ID) -> - #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), - rocksdb:delete(Handle, CF, ?KEY_REPLAY_STATE(ID), ?ITERATION_WRITE_OPTS). - -preserve_iterator_state(#it{ - gen = Gen, - replay = {TopicFilter, StartTime}, - module = Mod, - data = ItData -}) -> - term_to_binary(#{ - v => 1, - gen => Gen, - filter => TopicFilter, - start => StartTime, - st => Mod:preserve_iterator(ItData) - }). - -restore_iterator_state(Shard, Serial) when is_binary(Serial) -> - restore_iterator_state(Shard, binary_to_term(Serial)); -restore_iterator_state( - Shard, - #{ - v := 1, - gen := Gen, - filter := TopicFilter, - start := StartTime, - st := State - } -) -> - It = #it{shard = Shard, gen = Gen, replay = {TopicFilter, StartTime}}, - open_restore_iterator(meta_get_gen(Shard, Gen), It, State). - -do_list_iterator_prefix(Shard, KeyPrefix) -> - Fn = fun(K0, _V, Acc) -> - K = ?KEY_REPLAY_STATE_PAT(K0), - [K | Acc] - end, - do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, []). - -do_discard_iterator_prefix(Shard, KeyPrefix) -> - #db{handle = DBHandle, cf_iterator = CF} = meta_lookup(Shard, db), - Fn = fun(K, _V, _Acc) -> ok = rocksdb:delete(DBHandle, CF, K, ?ITERATION_WRITE_OPTS) end, - do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, ok). - -do_foldl_iterator_prefix(Shard, KeyPrefix, Fn, Acc) -> - #db{handle = Handle, cf_iterator = CF} = meta_lookup(Shard, db), - case rocksdb:iterator(Handle, CF, ?ITERATION_READ_OPTS) of - {ok, It} -> - NextAction = {seek, KeyPrefix}, - do_foldl_iterator_prefix(Handle, CF, It, KeyPrefix, NextAction, Fn, Acc); - Error -> - Error - end. - -do_foldl_iterator_prefix(DBHandle, CF, It, KeyPrefix, NextAction, Fn, Acc) -> - case rocksdb:iterator_move(It, NextAction) of - {ok, K = <>, V} -> - NewAcc = Fn(K, V, Acc), - do_foldl_iterator_prefix(DBHandle, CF, It, KeyPrefix, next, Fn, NewAcc); - {ok, _K, _V} -> - ok = rocksdb:iterator_close(It), - {ok, Acc}; - {error, invalid_iterator} -> - ok = rocksdb:iterator_close(It), - {ok, Acc}; - Error -> - ok = rocksdb:iterator_close(It), - Error - end. - -%% Functions for dealing with the metadata stored persistently in rocksdb - --define(CURRENT_GEN, <<"current">>). --define(SCHEMA_WRITE_OPTS, []). --define(SCHEMA_READ_OPTS, []). - --spec schema_get_gen(rocksdb:db_handle(), gen_id()) -> generation(). -schema_get_gen(DBHandle, GenId) -> - {ok, Bin} = rocksdb:get(DBHandle, schema_gen_key(GenId), ?SCHEMA_READ_OPTS), - binary_to_term(Bin). - --spec schema_put_gen(rocksdb:db_handle(), gen_id(), generation()) -> ok | {error, _}. -schema_put_gen(DBHandle, GenId, Gen) -> - rocksdb:put(DBHandle, schema_gen_key(GenId), term_to_binary(Gen), ?SCHEMA_WRITE_OPTS). - --spec schema_get_current(rocksdb:db_handle()) -> gen_id() | undefined. -schema_get_current(DBHandle) -> - case rocksdb:get(DBHandle, ?CURRENT_GEN, ?SCHEMA_READ_OPTS) of - {ok, Bin} -> - binary_to_integer(Bin); - not_found -> - undefined - end. - --spec schema_put_current(rocksdb:db_handle(), gen_id()) -> ok | {error, _}. -schema_put_current(DBHandle, GenId) -> - rocksdb:put(DBHandle, ?CURRENT_GEN, integer_to_binary(GenId), ?SCHEMA_WRITE_OPTS). - --spec schema_gen_key(integer()) -> binary(). -schema_gen_key(N) -> - <<"gen", N:32>>. - --undef(CURRENT_GEN). --undef(SCHEMA_WRITE_OPTS). --undef(SCHEMA_READ_OPTS). - -%% Functions for dealing with the runtime shard metadata: - --define(PERSISTENT_TERM(SHARD, GEN), {emqx_ds_storage_layer, SHARD, GEN}). - --spec meta_register_gen(emqx_ds:shard(), gen_id(), generation()) -> ok. -meta_register_gen(Shard, GenId, Gen) -> - Gs = - case GenId > 0 of - true -> meta_lookup(Shard, GenId - 1); - false -> [] - end, - ok = meta_put(Shard, GenId, [Gen | Gs]), - ok = meta_put(Shard, current, GenId). - --spec meta_lookup_gen(emqx_ds:shard(), emqx_ds:time()) -> {gen_id(), generation()}. -meta_lookup_gen(Shard, Time) -> - %% TODO - %% Is cheaper persistent term GC on update here worth extra lookup? I'm leaning - %% towards a "no". - Current = meta_lookup(Shard, current), - Gens = meta_lookup(Shard, Current), - find_gen(Time, Current, Gens). - -find_gen(Time, GenId, [Gen = #{since := Since} | _]) when Time >= Since -> - {GenId, Gen}; -find_gen(Time, GenId, [_Gen | Rest]) -> - find_gen(Time, GenId - 1, Rest). - --spec meta_get_gen(emqx_ds:shard(), gen_id()) -> generation() | undefined. -meta_get_gen(Shard, GenId) -> - case meta_lookup(Shard, GenId, []) of - [Gen | _Older] -> Gen; - [] -> undefined - end. - --spec meta_get_current(emqx_ds:shard()) -> gen_id() | undefined. -meta_get_current(Shard) -> - meta_lookup(Shard, current, undefined). - --spec meta_lookup(emqx_ds:shard(), _K) -> _V. -meta_lookup(Shard, Key) -> - persistent_term:get(?PERSISTENT_TERM(Shard, Key)). - --spec meta_lookup(emqx_ds:shard(), _K, Default) -> _V | Default. -meta_lookup(Shard, K, Default) -> - persistent_term:get(?PERSISTENT_TERM(Shard, K), Default). - --spec meta_put(emqx_ds:shard(), _K, _V) -> ok. -meta_put(Shard, K, V) -> - persistent_term:put(?PERSISTENT_TERM(Shard, K), V). - --spec meta_erase(emqx_ds:shard()) -> ok. -meta_erase(Shard) -> - [ - persistent_term:erase(K) - || {K = ?PERSISTENT_TERM(Z, _), _} <- persistent_term:get(), Z =:= Shard - ], - ok. - --undef(PERSISTENT_TERM). - -get_next_id(undefined) -> 0; -get_next_id(GenId) -> GenId + 1. - -is_gen_valid(Shard, GenId, Since) when GenId > 0 -> - [GenPrev | _] = meta_lookup(Shard, GenId - 1), - case GenPrev of - #{since := SincePrev} when Since > SincePrev -> - ok; - #{} -> - {error, nonmonotonic} - end; -is_gen_valid(_Shard, 0, 0) -> - ok. - -serialize(Msg) -> - %% TODO: remove topic, GUID, etc. from the stored - %% message. Reconstruct it from the metadata. - term_to_binary(emqx_message:to_map(Msg)). - -deserialize(Bin) -> - emqx_message:from_map(binary_to_term(Bin)). - - -%% -spec store_cfs(rocksdb:db_handle(), [{string(), rocksdb:cf_handle()}]) -> ok. -%% store_cfs(DBHandle, CFRefs) -> -%% lists:foreach( -%% fun({CFName, CFRef}) -> -%% persistent_term:put({self(), CFName}, {DBHandle, CFRef}) -%% end, -%% CFRefs). diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_bitmask.erl_ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_bitmask.erl_ deleted file mode 100644 index bdf5a1453..000000000 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_bitmask.erl_ +++ /dev/null @@ -1,748 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%%-------------------------------------------------------------------- - --module(emqx_ds_message_storage_bitmask). - -%%================================================================================ -%% @doc Description of the schema -%% -%% Let us assume that `T' is a topic and `t' is time. These are the two -%% dimensions used to index messages. They can be viewed as -%% "coordinates" of an MQTT message in a 2D space. -%% -%% Oftentimes, when wildcard subscription is used, keys must be -%% scanned in both dimensions simultaneously. -%% -%% Rocksdb allows to iterate over sorted keys very fast. This means we -%% need to map our two-dimentional keys to a single index that is -%% sorted in a way that helps to iterate over both time and topic -%% without having to do a lot of random seeks. -%% -%% == Mapping of 2D keys to rocksdb keys == -%% -%% We use "zigzag" pattern to store messages, where rocksdb key is -%% composed like like this: -%% -%% |ttttt|TTTTTTTTT|tttt| -%% ^ ^ ^ -%% | | | -%% +-------+ | +---------+ -%% | | | -%% most significant topic hash least significant -%% bits of timestamp bits of timestamp -%% (a.k.a epoch) (a.k.a time offset) -%% -%% Topic hash is level-aware: each topic level is hashed separately -%% and the resulting hashes are bitwise-concatentated. This allows us -%% to map topics to fixed-length bitstrings while keeping some degree -%% of information about the hierarchy. -%% -%% Next important concept is what we call "epoch". Duration of the -%% epoch is determined by maximum time offset. Epoch is calculated by -%% shifting bits of the timestamp right. -%% -%% The resulting index is a space-filling curve that looks like -%% this in the topic-time 2D space: -%% -%% T ^ ---->------ |---->------ |---->------ -%% | --/ / --/ / --/ -%% | -<-/ | -<-/ | -<-/ -%% | -/ | -/ | -/ -%% | ---->------ | ---->------ | ---->------ -%% | --/ / --/ / --/ -%% | ---/ | ---/ | ---/ -%% | -/ ^ -/ ^ -/ -%% | ---->------ | ---->------ | ---->------ -%% | --/ / --/ / --/ -%% | -<-/ | -<-/ | -<-/ -%% | -/ | -/ | -/ -%% | ---->------| ---->------| ----------> -%% | -%% -+------------+-----------------------------> t -%% epoch -%% -%% This structure allows to quickly seek to a the first message that -%% was recorded in a certain epoch in a certain topic or a -%% group of topics matching filter like `foo/bar/#`. -%% -%% Due to its structure, for each pair of rocksdb keys K1 and K2, such -%% that K1 > K2 and topic(K1) = topic(K2), timestamp(K1) > -%% timestamp(K2). -%% That is, replay doesn't reorder messages published in each -%% individual topic. -%% -%% This property doesn't hold between different topics, but it's not deemed -%% a problem right now. -%% -%%================================================================================ - --behaviour(emqx_ds_storage_layer). - -%% API: --export([create_new/3, open/5]). --export([make_keymapper/1]). - --export([store/5, delete/4]). - --export([get_streams/3, make_iterator/3, next/1]). - --export([preserve_iterator/1, restore_iterator/2, refresh_iterator/1]). - -%% Debug/troubleshooting: -%% Keymappers --export([ - keymapper_info/1, - compute_bitstring/3, - compute_topic_bitmask/2, - compute_time_bitmask/1, - hash/2 -]). - -%% Keyspace filters --export([ - make_keyspace_filter/2, - compute_initial_seek/1, - compute_next_seek/2, - compute_time_seek/3, - compute_topic_seek/4 -]). - --export_type([db/0, stream/0, iterator/0, schema/0]). - --export_type([options/0]). --export_type([iteration_options/0]). - --compile( - {inline, [ - bitwise_concat/3, - ones/1, - successor/1, - topic_hash_matches/3, - time_matches/3 - ]} -). - -%%================================================================================ -%% Type declarations -%%================================================================================ - --opaque stream() :: emqx_ds:topic_filter(). - --type topic() :: emqx_ds:topic(). --type topic_filter() :: emqx_ds:topic_filter(). --type time() :: emqx_ds:time(). - -%% Number of bits --type bits() :: non_neg_integer(). - -%% Key of a RocksDB record. --type key() :: binary(). - -%% Distribution of entropy among topic levels. -%% Example: [4, 8, 16] means that level 1 gets 4 bits, level 2 gets 8 bits, -%% and _rest of levels_ (if any) get 16 bits. --type bits_per_level() :: [bits(), ...]. - --type options() :: #{ - %% Number of bits in a message timestamp. - timestamp_bits := bits(), - %% Number of bits in a key allocated to each level in a message topic. - topic_bits_per_level := bits_per_level(), - %% Maximum granularity of iteration over time. - epoch := time(), - - iteration => iteration_options(), - - cf_options => emqx_ds_storage_layer:db_cf_options() -}. - --type iteration_options() :: #{ - %% Request periodic iterator refresh. - %% This might be helpful during replays taking a lot of time (e.g. tens of seconds). - %% Note that `{every, 1000}` means 1000 _operations_ with the iterator which is not - %% the same as 1000 replayed messages. - iterator_refresh => {every, _NumOperations :: pos_integer()} -}. - -%% Persistent configuration of the generation, it is used to create db -%% record when the database is reopened --record(schema, {keymapper :: keymapper()}). - --opaque schema() :: #schema{}. - --record(db, { - shard :: emqx_ds:shard(), - handle :: rocksdb:db_handle(), - cf :: rocksdb:cf_handle(), - keymapper :: keymapper(), - write_options = [{sync, true}] :: emqx_ds_storage_layer:db_write_options(), - read_options = [] :: emqx_ds_storage_layer:db_read_options() -}). - --record(it, { - handle :: rocksdb:itr_handle(), - filter :: keyspace_filter(), - cursor :: binary() | undefined, - next_action :: {seek, binary()} | next, - refresh_counter :: {non_neg_integer(), pos_integer()} | undefined -}). - --record(filter, { - keymapper :: keymapper(), - topic_filter :: topic_filter(), - start_time :: integer(), - hash_bitfilter :: integer(), - hash_bitmask :: integer(), - time_bitfilter :: integer(), - time_bitmask :: integer() -}). - -% NOTE -% Keymapper decides how to map messages into RocksDB column family keyspace. --record(keymapper, { - source :: [bitsource(), ...], - bitsize :: bits(), - epoch :: non_neg_integer() -}). - --type bitsource() :: - %% Consume `_Size` bits from timestamp starting at `_Offset`th bit. - %% TODO consistency - {timestamp, _Offset :: bits(), _Size :: bits()} - %% Consume next topic level (either one or all of them) and compute `_Size` bits-wide hash. - | {hash, level | levels, _Size :: bits()}. - --opaque db() :: #db{}. --opaque iterator() :: #it{}. --type serialized_iterator() :: binary(). --type keymapper() :: #keymapper{}. --type keyspace_filter() :: #filter{}. - -%%================================================================================ -%% API funcions -%%================================================================================ - -%% Create a new column family for the generation and a serializable representation of the schema --spec create_new(rocksdb:db_handle(), emqx_ds_storage_layer:gen_id(), options()) -> - {schema(), emqx_ds_storage_layer:cf_refs()}. -create_new(DBHandle, GenId, Options) -> - CFName = data_cf(GenId), - CFOptions = maps:get(cf_options, Options, []), - {ok, CFHandle} = rocksdb:create_column_family(DBHandle, CFName, CFOptions), - Schema = #schema{keymapper = make_keymapper(Options)}, - {Schema, [{CFName, CFHandle}]}. - -%% Reopen the database --spec open( - emqx_ds:shard(), - rocksdb:db_handle(), - emqx_ds_storage_layer:gen_id(), - emqx_ds_storage_layer:cf_refs(), - schema() -) -> - db(). -open(Shard, DBHandle, GenId, CFs, #schema{keymapper = Keymapper}) -> - {value, {_, CFHandle}} = lists:keysearch(data_cf(GenId), 1, CFs), - #db{ - shard = Shard, - handle = DBHandle, - cf = CFHandle, - keymapper = Keymapper - }. - --spec make_keymapper(options()) -> keymapper(). -make_keymapper(#{ - timestamp_bits := TimestampBits, - topic_bits_per_level := BitsPerLevel, - epoch := MaxEpoch -}) -> - TimestampLSBs = min(TimestampBits, floor(math:log2(MaxEpoch))), - TimestampMSBs = TimestampBits - TimestampLSBs, - NLevels = length(BitsPerLevel), - {LevelBits, [TailLevelsBits]} = lists:split(NLevels - 1, BitsPerLevel), - Source = lists:flatten([ - [{timestamp, TimestampLSBs, TimestampMSBs} || TimestampMSBs > 0], - [{hash, level, Bits} || Bits <- LevelBits], - {hash, levels, TailLevelsBits}, - [{timestamp, 0, TimestampLSBs} || TimestampLSBs > 0] - ]), - #keymapper{ - source = Source, - bitsize = lists:sum([S || {_, _, S} <- Source]), - epoch = 1 bsl TimestampLSBs - }. - --spec store(db(), emqx_guid:guid(), emqx_ds:time(), topic(), binary()) -> - ok | {error, _TODO}. -store(DB = #db{handle = DBHandle, cf = CFHandle}, MessageID, PublishedAt, Topic, MessagePayload) -> - Key = make_message_key(Topic, PublishedAt, MessageID, DB#db.keymapper), - Value = make_message_value(Topic, MessagePayload), - rocksdb:put(DBHandle, CFHandle, Key, Value, DB#db.write_options). - --spec delete(db(), emqx_guid:guid(), emqx_ds:time(), topic()) -> - ok | {error, _TODO}. -delete(DB = #db{handle = DBHandle, cf = CFHandle}, MessageID, PublishedAt, Topic) -> - Key = make_message_key(Topic, PublishedAt, MessageID, DB#db.keymapper), - rocksdb:delete(DBHandle, CFHandle, Key, DB#db.write_options). - --spec get_streams(db(), emqx_ds:topic_filter(), emqx_ds:time()) -> - [stream()]. -get_streams(_, TopicFilter, _) -> - [{0, TopicFilter}]. - --spec make_iterator(db(), emqx_ds:replay(), iteration_options()) -> - % {error, invalid_start_time}? might just start from the beginning of time - % and call it a day: client violated the contract anyway. - {ok, iterator()} | {error, _TODO}. -make_iterator(DB = #db{handle = DBHandle, cf = CFHandle}, Replay, Options) -> - case rocksdb:iterator(DBHandle, CFHandle, DB#db.read_options) of - {ok, ITHandle} -> - Filter = make_keyspace_filter(Replay, DB#db.keymapper), - InitialSeek = combine(compute_initial_seek(Filter), <<>>, DB#db.keymapper), - RefreshCounter = make_refresh_counter(maps:get(iterator_refresh, Options, undefined)), - {ok, #it{ - handle = ITHandle, - filter = Filter, - next_action = {seek, InitialSeek}, - refresh_counter = RefreshCounter - }}; - Err -> - Err - end. - --spec next(iterator()) -> {value, binary(), iterator()} | none | {error, closed}. -next(It0 = #it{filter = #filter{keymapper = Keymapper}}) -> - It = maybe_refresh_iterator(It0), - case rocksdb:iterator_move(It#it.handle, It#it.next_action) of - % spec says `{ok, Key}` is also possible but the implementation says it's not - {ok, Key, Value} -> - % Preserve last seen key in the iterator so it could be restored / refreshed later. - ItNext = It#it{cursor = Key}, - Bitstring = extract(Key, Keymapper), - case match_next(Bitstring, Value, It#it.filter) of - {_Topic, Payload} -> - {value, Payload, ItNext#it{next_action = next}}; - next -> - next(ItNext#it{next_action = next}); - NextBitstring when is_integer(NextBitstring) -> - NextSeek = combine(NextBitstring, <<>>, Keymapper), - next(ItNext#it{next_action = {seek, NextSeek}}); - none -> - stop_iteration(ItNext) - end; - {error, invalid_iterator} -> - stop_iteration(It); - {error, iterator_closed} -> - {error, closed} - end. - --spec preserve_iterator(iterator()) -> serialized_iterator(). -preserve_iterator(#it{ - cursor = Cursor, - filter = #filter{ - topic_filter = TopicFilter, - start_time = StartTime - } -}) -> - State = #{ - v => 1, - cursor => Cursor, - replay => {TopicFilter, StartTime} - }, - term_to_binary(State). - --spec restore_iterator(db(), serialized_iterator()) -> - {ok, iterator()} | {error, _TODO}. -restore_iterator(DB, Serial) when is_binary(Serial) -> - State = binary_to_term(Serial), - restore_iterator(DB, State); -restore_iterator(DB, #{ - v := 1, - cursor := Cursor, - replay := Replay = {_TopicFilter, _StartTime} -}) -> - Options = #{}, % TODO: passthrough options - case make_iterator(DB, Replay, Options) of - {ok, It} when Cursor == undefined -> - % Iterator was preserved right after it has been made. - {ok, It}; - {ok, It} -> - % Iterator was preserved mid-replay, seek right past the last seen key. - {ok, It#it{cursor = Cursor, next_action = {seek, successor(Cursor)}}}; - Err -> - Err - end. - --spec refresh_iterator(iterator()) -> iterator(). -refresh_iterator(It = #it{handle = Handle, cursor = Cursor, next_action = Action}) -> - case rocksdb:iterator_refresh(Handle) of - ok when Action =:= next -> - % Now the underlying iterator is invalid, need to seek instead. - It#it{next_action = {seek, successor(Cursor)}}; - ok -> - % Now the underlying iterator is invalid, but will seek soon anyway. - It; - {error, _} -> - % Implementation could in theory return an {error, ...} tuple. - % Supposedly our best bet is to ignore it. - % TODO logging? - It - end. - -%%================================================================================ -%% Internal exports -%%================================================================================ - --spec keymapper_info(keymapper()) -> - #{source := [bitsource()], bitsize := bits(), epoch := time()}. -keymapper_info(#keymapper{source = Source, bitsize = Bitsize, epoch = Epoch}) -> - #{source => Source, bitsize => Bitsize, epoch => Epoch}. - -make_message_key(Topic, PublishedAt, MessageID, Keymapper) -> - combine(compute_bitstring(Topic, PublishedAt, Keymapper), MessageID, Keymapper). - -make_message_value(Topic, MessagePayload) -> - term_to_binary({Topic, MessagePayload}). - -unwrap_message_value(Binary) -> - binary_to_term(Binary). - --spec combine(_Bitstring :: integer(), emqx_guid:guid() | <<>>, keymapper()) -> - key(). -combine(Bitstring, MessageID, #keymapper{bitsize = Size}) -> - <>. - --spec extract(key(), keymapper()) -> - _Bitstring :: integer(). -extract(Key, #keymapper{bitsize = Size}) -> - <> = Key, - Bitstring. - --spec compute_bitstring(topic_filter(), time(), keymapper()) -> integer(). -compute_bitstring(TopicFilter, Timestamp, #keymapper{source = Source}) -> - compute_bitstring(TopicFilter, Timestamp, Source, 0). - --spec compute_topic_bitmask(topic_filter(), keymapper()) -> integer(). -compute_topic_bitmask(TopicFilter, #keymapper{source = Source}) -> - compute_topic_bitmask(TopicFilter, Source, 0). - --spec compute_time_bitmask(keymapper()) -> integer(). -compute_time_bitmask(#keymapper{source = Source}) -> - compute_time_bitmask(Source, 0). - --spec hash(term(), bits()) -> integer(). -hash(Input, Bits) -> - % at most 32 bits - erlang:phash2(Input, 1 bsl Bits). - --spec make_keyspace_filter(emqx_ds:replay(), keymapper()) -> keyspace_filter(). -make_keyspace_filter({TopicFilter, StartTime}, Keymapper) -> - Bitstring = compute_bitstring(TopicFilter, StartTime, Keymapper), - HashBitmask = compute_topic_bitmask(TopicFilter, Keymapper), - TimeBitmask = compute_time_bitmask(Keymapper), - HashBitfilter = Bitstring band HashBitmask, - TimeBitfilter = Bitstring band TimeBitmask, - #filter{ - keymapper = Keymapper, - topic_filter = TopicFilter, - start_time = StartTime, - hash_bitfilter = HashBitfilter, - hash_bitmask = HashBitmask, - time_bitfilter = TimeBitfilter, - time_bitmask = TimeBitmask - }. - --spec compute_initial_seek(keyspace_filter()) -> integer(). -compute_initial_seek(#filter{hash_bitfilter = HashBitfilter, time_bitfilter = TimeBitfilter}) -> - % Should be the same as `compute_initial_seek(0, Filter)`. - HashBitfilter bor TimeBitfilter. - --spec compute_next_seek(integer(), keyspace_filter()) -> integer(). -compute_next_seek( - Bitstring, - Filter = #filter{ - hash_bitfilter = HashBitfilter, - hash_bitmask = HashBitmask, - time_bitfilter = TimeBitfilter, - time_bitmask = TimeBitmask - } -) -> - HashMatches = topic_hash_matches(Bitstring, HashBitfilter, HashBitmask), - TimeMatches = time_matches(Bitstring, TimeBitfilter, TimeBitmask), - compute_next_seek(HashMatches, TimeMatches, Bitstring, Filter). - -%%================================================================================ -%% Internal functions -%%================================================================================ - -compute_bitstring(Topic, Timestamp, [{timestamp, Offset, Size} | Rest], Acc) -> - I = (Timestamp bsr Offset) band ones(Size), - compute_bitstring(Topic, Timestamp, Rest, bitwise_concat(Acc, I, Size)); -compute_bitstring([], Timestamp, [{hash, level, Size} | Rest], Acc) -> - I = hash(<<"/">>, Size), - compute_bitstring([], Timestamp, Rest, bitwise_concat(Acc, I, Size)); -compute_bitstring([Level | Tail], Timestamp, [{hash, level, Size} | Rest], Acc) -> - I = hash(Level, Size), - compute_bitstring(Tail, Timestamp, Rest, bitwise_concat(Acc, I, Size)); -compute_bitstring(Tail, Timestamp, [{hash, levels, Size} | Rest], Acc) -> - I = hash(Tail, Size), - compute_bitstring(Tail, Timestamp, Rest, bitwise_concat(Acc, I, Size)); -compute_bitstring(_, _, [], Acc) -> - Acc. - -compute_topic_bitmask(Filter, [{timestamp, _, Size} | Rest], Acc) -> - compute_topic_bitmask(Filter, Rest, bitwise_concat(Acc, 0, Size)); -compute_topic_bitmask(['#'], [{hash, _, Size} | Rest], Acc) -> - compute_topic_bitmask(['#'], Rest, bitwise_concat(Acc, 0, Size)); -compute_topic_bitmask(['+' | Tail], [{hash, _, Size} | Rest], Acc) -> - compute_topic_bitmask(Tail, Rest, bitwise_concat(Acc, 0, Size)); -compute_topic_bitmask([], [{hash, level, Size} | Rest], Acc) -> - compute_topic_bitmask([], Rest, bitwise_concat(Acc, ones(Size), Size)); -compute_topic_bitmask([_ | Tail], [{hash, level, Size} | Rest], Acc) -> - compute_topic_bitmask(Tail, Rest, bitwise_concat(Acc, ones(Size), Size)); -compute_topic_bitmask(Tail, [{hash, levels, Size} | Rest], Acc) -> - Mask = - case lists:member('+', Tail) orelse lists:member('#', Tail) of - true -> 0; - false -> ones(Size) - end, - compute_topic_bitmask([], Rest, bitwise_concat(Acc, Mask, Size)); -compute_topic_bitmask(_, [], Acc) -> - Acc. - -compute_time_bitmask([{timestamp, _, Size} | Rest], Acc) -> - compute_time_bitmask(Rest, bitwise_concat(Acc, ones(Size), Size)); -compute_time_bitmask([{hash, _, Size} | Rest], Acc) -> - compute_time_bitmask(Rest, bitwise_concat(Acc, 0, Size)); -compute_time_bitmask([], Acc) -> - Acc. - -bitwise_concat(Acc, Item, ItemSize) -> - (Acc bsl ItemSize) bor Item. - -ones(Bits) -> - 1 bsl Bits - 1. - --spec successor(key()) -> key(). -successor(Key) -> - <>. - -%% |123|345|678| -%% foo bar baz - -%% |123|000|678| - |123|fff|678| - -%% foo + baz - -%% |fff|000|fff| - -%% |123|000|678| - -%% |123|056|678| & |fff|000|fff| = |123|000|678|. - -match_next( - Bitstring, - Value, - Filter = #filter{ - topic_filter = TopicFilter, - hash_bitfilter = HashBitfilter, - hash_bitmask = HashBitmask, - time_bitfilter = TimeBitfilter, - time_bitmask = TimeBitmask - } -) -> - HashMatches = topic_hash_matches(Bitstring, HashBitfilter, HashBitmask), - TimeMatches = time_matches(Bitstring, TimeBitfilter, TimeBitmask), - case HashMatches and TimeMatches of - true -> - Message = {Topic, _Payload} = unwrap_message_value(Value), - case emqx_topic:match(Topic, TopicFilter) of - true -> - Message; - false -> - next - end; - false -> - compute_next_seek(HashMatches, TimeMatches, Bitstring, Filter) - end. - -%% `Bitstring` is out of the hash space defined by `HashBitfilter`. -compute_next_seek( - _HashMatches = false, - _TimeMatches, - Bitstring, - Filter = #filter{ - keymapper = Keymapper, - hash_bitfilter = HashBitfilter, - hash_bitmask = HashBitmask, - time_bitfilter = TimeBitfilter, - time_bitmask = TimeBitmask - } -) -> - NextBitstring = compute_topic_seek(Bitstring, HashBitfilter, HashBitmask, Keymapper), - case NextBitstring of - none -> - none; - _ -> - TimeMatches = time_matches(NextBitstring, TimeBitfilter, TimeBitmask), - compute_next_seek(true, TimeMatches, NextBitstring, Filter) - end; -%% `Bitstring` is out of the time range defined by `TimeBitfilter`. -compute_next_seek( - _HashMatches = true, - _TimeMatches = false, - Bitstring, - #filter{ - time_bitfilter = TimeBitfilter, - time_bitmask = TimeBitmask - } -) -> - compute_time_seek(Bitstring, TimeBitfilter, TimeBitmask); -compute_next_seek(true, true, Bitstring, _It) -> - Bitstring. - -topic_hash_matches(Bitstring, HashBitfilter, HashBitmask) -> - (Bitstring band HashBitmask) == HashBitfilter. - -time_matches(Bitstring, TimeBitfilter, TimeBitmask) -> - (Bitstring band TimeBitmask) >= TimeBitfilter. - -compute_time_seek(Bitstring, TimeBitfilter, TimeBitmask) -> - % Replace the bits of the timestamp in `Bistring` with bits from `Timebitfilter`. - (Bitstring band (bnot TimeBitmask)) bor TimeBitfilter. - -%% Find the closest bitstring which is: -%% * greater than `Bitstring`, -%% * and falls into the hash space defined by `HashBitfilter`. -%% Note that the result can end up "back" in time and out of the time range. -compute_topic_seek(Bitstring, HashBitfilter, HashBitmask, Keymapper) -> - Sources = Keymapper#keymapper.source, - Size = Keymapper#keymapper.bitsize, - compute_topic_seek(Bitstring, HashBitfilter, HashBitmask, Sources, Size). - -compute_topic_seek(Bitstring, HashBitfilter, HashBitmask, Sources, Size) -> - % NOTE - % We're iterating through `Substring` here, in lockstep with `HashBitfilter` - % and `HashBitmask`, starting from least signigicant bits. Each bitsource in - % `Sources` has a bitsize `S` and, accordingly, gives us a sub-bitstring `S` - % bits long which we interpret as a "digit". There are 2 flavors of those - % "digits": - % * regular digit with 2^S possible values, - % * degenerate digit with exactly 1 possible value U (represented with 0). - % Our goal here is to find a successor of `Bistring` and perform a kind of - % digit-by-digit addition operation with carry propagation. - NextSeek = zipfoldr3( - fun(Source, Substring, Filter, LBitmask, Offset, Acc) -> - case Source of - {hash, _, S} when LBitmask =:= 0 -> - % Regular case - bitwise_add_digit(Substring, Acc, S, Offset); - {hash, _, _} when LBitmask =/= 0, Substring < Filter -> - % Degenerate case, I_digit < U, no overflow. - % Successor is `U bsl Offset` which is equivalent to 0. - 0; - {hash, _, S} when LBitmask =/= 0, Substring > Filter -> - % Degenerate case, I_digit > U, overflow. - % Successor is `(1 bsl Size + U) bsl Offset`. - overflow_digit(S, Offset); - {hash, _, S} when LBitmask =/= 0 -> - % Degenerate case, I_digit = U - % Perform digit addition with I_digit = 0, assuming "digit" has - % 0 bits of information (but is `S` bits long at the same time). - % This will overflow only if the result of previous iteration - % was an overflow. - bitwise_add_digit(0, Acc, 0, S, Offset); - {timestamp, _, S} -> - % Regular case - bitwise_add_digit(Substring, Acc, S, Offset) - end - end, - 0, - Bitstring, - HashBitfilter, - HashBitmask, - Size, - Sources - ), - case NextSeek bsr Size of - _Carry = 0 -> - % Found the successor. - % We need to recover values of those degenerate digits which we - % represented with 0 during digit-by-digit iteration. - NextSeek bor (HashBitfilter band HashBitmask); - _Carry = 1 -> - % We got "carried away" past the range, time to stop iteration. - none - end. - -bitwise_add_digit(Digit, Number, Width, Offset) -> - bitwise_add_digit(Digit, Number, Width, Width, Offset). - -%% Add "digit" (represented with integer `Digit`) to the `Number` assuming -%% this digit starts at `Offset` bits in `Number` and is `Width` bits long. -%% Perform an overflow if the result of addition would not fit into `Bits` -%% bits. -bitwise_add_digit(Digit, Number, Bits, Width, Offset) -> - Sum = (Digit bsl Offset) + Number, - case (Sum bsr Offset) < (1 bsl Bits) of - true -> Sum; - false -> overflow_digit(Width, Offset) - end. - -%% Constuct a number which denotes an overflow of digit that starts at -%% `Offset` bits and is `Width` bits long. -overflow_digit(Width, Offset) -> - (1 bsl Width) bsl Offset. - -%% Iterate through sub-bitstrings of 3 integers in lockstep, starting from least -%% significant bits first. -%% -%% Each integer is assumed to be `Size` bits long. Lengths of sub-bitstring are -%% specified in `Sources` list, in order from most significant bits to least -%% significant. Each iteration calls `FoldFun` with: -%% * bitsource that was used to extract sub-bitstrings, -%% * 3 sub-bitstrings in integer representation, -%% * bit offset into integers, -%% * current accumulator. --spec zipfoldr3(FoldFun, Acc, integer(), integer(), integer(), _Size :: bits(), [bitsource()]) -> - Acc -when - FoldFun :: fun((bitsource(), integer(), integer(), integer(), _Offset :: bits(), Acc) -> Acc). -zipfoldr3(_FoldFun, Acc, _, _, _, 0, []) -> - Acc; -zipfoldr3(FoldFun, Acc, I1, I2, I3, Offset, [Source = {_, _, S} | Rest]) -> - OffsetNext = Offset - S, - AccNext = zipfoldr3(FoldFun, Acc, I1, I2, I3, OffsetNext, Rest), - FoldFun( - Source, - substring(I1, OffsetNext, S), - substring(I2, OffsetNext, S), - substring(I3, OffsetNext, S), - OffsetNext, - AccNext - ). - -substring(I, Offset, Size) -> - (I bsr Offset) band ones(Size). - -%% @doc Generate a column family ID for the MQTT messages --spec data_cf(emqx_ds_storage_layer:gen_id()) -> [char()]. -data_cf(GenId) -> - ?MODULE_STRING ++ integer_to_list(GenId). - -make_refresh_counter({every, N}) when is_integer(N), N > 0 -> - {0, N}; -make_refresh_counter(undefined) -> - undefined. - -maybe_refresh_iterator(It = #it{refresh_counter = {N, N}}) -> - refresh_iterator(It#it{refresh_counter = {0, N}}); -maybe_refresh_iterator(It = #it{refresh_counter = {M, N}}) -> - It#it{refresh_counter = {M + 1, N}}; -maybe_refresh_iterator(It = #it{refresh_counter = undefined}) -> - It. - -stop_iteration(It) -> - ok = rocksdb:iterator_close(It#it.handle), - none. diff --git a/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl index 957383f30..ac037e861 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl @@ -129,7 +129,7 @@ t_get_streams(_Config) -> t_replay(_Config) -> %% Create concrete topics: Topics = [<<"foo/bar">>, <<"foo/bar/baz">>], - Timestamps = lists:seq(1, 10), + Timestamps = lists:seq(1, 10_000, 100), Batch1 = [ make_message(PublishedAt, Topic, integer_to_binary(PublishedAt)) || Topic <- Topics, PublishedAt <- Timestamps @@ -140,10 +140,10 @@ t_replay(_Config) -> begin B = integer_to_binary(I), make_message( - TS, <<"wildcard/", B/binary, "/suffix/", Suffix/binary>>, integer_to_binary(TS) + TS, <<"wildcard/", B/binary, "/suffix/", Suffix/binary>>, integer_to_binary(TS) ) end - || I <- lists:seq(1, 200), TS <- lists:seq(1, 10), Suffix <- [<<"foo">>, <<"bar">>] + || I <- lists:seq(1, 200), TS <- Timestamps, Suffix <- [<<"foo">>, <<"bar">>] ], ok = emqx_ds_storage_layer:store_batch(?SHARD, Batch2, []), %% Check various topic filters: @@ -158,6 +158,9 @@ t_replay(_Config) -> ?assert(check(?SHARD, <<"foo/+/+">>, 0, Messages)), ?assert(check(?SHARD, <<"+/+/+">>, 0, Messages)), ?assert(check(?SHARD, <<"+/+/baz">>, 0, Messages)), + %% Restart shard to make sure trie is persisted and restored: + ok = emqx_ds_storage_layer_sup:stop_shard(?SHARD), + {ok, _} = emqx_ds_storage_layer_sup:start_shard(?SHARD, #{}), %% Learned wildcard topics: ?assertNot(check(?SHARD, <<"wildcard/1000/suffix/foo">>, 0, [])), ?assert(check(?SHARD, <<"wildcard/1/suffix/foo">>, 0, Messages)), @@ -179,23 +182,24 @@ check(Shard, TopicFilter, StartTime, ExpectedMessages) -> ExpectedMessages ), ?check_trace( - #{timetrap => 10_000}, - begin - Dump = dump_messages(Shard, TopicFilter, StartTime), - verify_dump(TopicFilter, StartTime, Dump), - Missing = ExpectedFiltered -- Dump, - Extras = Dump -- ExpectedFiltered, - ?assertMatch( - #{missing := [], unexpected := []}, - #{ - missing => Missing, - unexpected => Extras, - topic_filter => TopicFilter, - start_time => StartTime - } - ) - end, - []), + #{timetrap => 10_000}, + begin + Dump = dump_messages(Shard, TopicFilter, StartTime), + verify_dump(TopicFilter, StartTime, Dump), + Missing = ExpectedFiltered -- Dump, + Extras = Dump -- ExpectedFiltered, + ?assertMatch( + #{missing := [], unexpected := []}, + #{ + missing => Missing, + unexpected => Extras, + topic_filter => TopicFilter, + start_time => StartTime + } + ) + end, + [] + ), length(ExpectedFiltered) > 0. verify_dump(TopicFilter, StartTime, Dump) -> @@ -227,78 +231,26 @@ dump_messages(Shard, TopicFilter, StartTime) -> ). dump_stream(Shard, Stream, TopicFilter, StartTime) -> - BatchSize = 3, + BatchSize = 100, {ok, Iterator} = emqx_ds_storage_layer:make_iterator( Shard, Stream, parse_topic(TopicFilter), StartTime ), - Loop = fun F(It, 0) -> - error({too_many_iterations, It}); - F(It, N) -> - case emqx_ds_storage_layer:next(Shard, It, BatchSize) of - end_of_stream -> - []; - {ok, _NextIt, []} -> - []; - {ok, NextIt, Batch} -> - Batch ++ F(NextIt, N - 1) - end + Loop = fun + F(It, 0) -> + error({too_many_iterations, It}); + F(It, N) -> + case emqx_ds_storage_layer:next(Shard, It, BatchSize) of + end_of_stream -> + []; + {ok, _NextIt, []} -> + []; + {ok, NextIt, Batch} -> + Batch ++ F(NextIt, N - 1) + end end, - MaxIterations = 1000, + MaxIterations = 1000000, Loop(Iterator, MaxIterations). -%% Smoke test for iteration with wildcard topic filter -%% t_iterate_wildcard(_Config) -> -%% %% Prepare data: -%% Topics = ["foo/bar", "foo/bar/baz", "a", "a/bar"], -%% Timestamps = lists:seq(1, 10), -%% _ = [ -%% store(?SHARD, PublishedAt, Topic, term_to_binary({Topic, PublishedAt})) -%% || Topic <- Topics, PublishedAt <- Timestamps -%% ], -%% ?assertEqual( -%% lists:sort([{Topic, PublishedAt} || Topic <- Topics, PublishedAt <- Timestamps]), -%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "#", 0)]) -%% ), -%% ?assertEqual( -%% [], -%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "#", 10 + 1)]) -%% ), -%% ?assertEqual( -%% lists:sort([{Topic, PublishedAt} || Topic <- Topics, PublishedAt <- lists:seq(5, 10)]), -%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "#", 5)]) -%% ), -%% ?assertEqual( -%% lists:sort([ -%% {Topic, PublishedAt} -%% || Topic <- ["foo/bar", "foo/bar/baz"], PublishedAt <- Timestamps -%% ]), -%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/#", 0)]) -%% ), -%% ?assertEqual( -%% lists:sort([{"foo/bar", PublishedAt} || PublishedAt <- Timestamps]), -%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/+", 0)]) -%% ), -%% ?assertEqual( -%% [], -%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "foo/+/bar", 0)]) -%% ), -%% ?assertEqual( -%% lists:sort([ -%% {Topic, PublishedAt} -%% || Topic <- ["foo/bar", "foo/bar/baz", "a/bar"], PublishedAt <- Timestamps -%% ]), -%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "+/bar/#", 0)]) -%% ), -%% ?assertEqual( -%% lists:sort([{Topic, PublishedAt} || Topic <- ["a", "a/bar"], PublishedAt <- Timestamps]), -%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "a/#", 0)]) -%% ), -%% ?assertEqual( -%% [], -%% lists:sort([binary_to_term(Payload) || Payload <- iterate(?SHARD, "a/+/+", 0)]) -%% ), -%% ok. - %% t_create_gen(_Config) -> %% {ok, 1} = emqx_ds_storage_layer:create_generation(?SHARD, 5, ?DEFAULT_CONFIG), %% ?assertEqual( From 465e8a90ddb8279dd6992edb0ef9204cb96e0744 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Mon, 16 Oct 2023 10:17:44 +0200 Subject: [PATCH 088/155] revert(ds): Remove change from the old protocol file --- .../emqx_persistent_session_ds_proto_v1.erl | 19 +------------------ scripts/check-elixir-applications.exs | 2 +- scripts/check-elixir-deps-discrepancies.exs | 2 +- ...elixir-emqx-machine-boot-discrepancies.exs | 2 +- scripts/check_missing_reboot_apps.exs | 2 +- 5 files changed, 5 insertions(+), 22 deletions(-) diff --git a/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl b/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl index d9b882f3d..d35ccd963 100644 --- a/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl +++ b/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl @@ -23,8 +23,7 @@ open_iterator/4, close_iterator/2, - close_all_iterators/2, - get_streams/5 + close_all_iterators/2 ]). -include_lib("emqx/include/bpapi.hrl"). @@ -51,22 +50,6 @@ open_iterator(Nodes, TopicFilter, StartMS, IteratorID) -> ?TIMEOUT ). --spec get_streams( - node(), - emqx_ds:keyspace(), - emqx_ds:shard_id(), - emqx_ds:topic_filter(), - emqx_ds:time() -) -> - [emqx_ds_storage_layer:stream()]. -get_streams(Node, Keyspace, ShardId, TopicFilter, StartTime) -> - erpc:call( - Node, - emqx_ds_storage_layer, - get_streams, - [Keyspace, ShardId, TopicFilter, StartTime] - ). - -spec close_iterator( [node()], emqx_ds:iterator_id() diff --git a/scripts/check-elixir-applications.exs b/scripts/check-elixir-applications.exs index 1e604c69f..42c838199 100755 --- a/scripts/check-elixir-applications.exs +++ b/scripts/check-elixir-applications.exs @@ -1,4 +1,4 @@ -#! /usr/bin/env elixir +#!/usr/bin/env elixir defmodule CheckElixirApplications do alias EMQXUmbrella.MixProject diff --git a/scripts/check-elixir-deps-discrepancies.exs b/scripts/check-elixir-deps-discrepancies.exs index 1363219ed..408079d7d 100755 --- a/scripts/check-elixir-deps-discrepancies.exs +++ b/scripts/check-elixir-deps-discrepancies.exs @@ -1,4 +1,4 @@ -#! /usr/bin/env elixir +#!/usr/bin/env elixir # ensure we have a fresh rebar.lock diff --git a/scripts/check-elixir-emqx-machine-boot-discrepancies.exs b/scripts/check-elixir-emqx-machine-boot-discrepancies.exs index 9ffdc47bf..d07e6978f 100755 --- a/scripts/check-elixir-emqx-machine-boot-discrepancies.exs +++ b/scripts/check-elixir-emqx-machine-boot-discrepancies.exs @@ -1,4 +1,4 @@ -#! /usr/bin/env elixir +#!/usr/bin/env elixir defmodule CheckElixirEMQXMachineBootDiscrepancies do alias EMQXUmbrella.MixProject diff --git a/scripts/check_missing_reboot_apps.exs b/scripts/check_missing_reboot_apps.exs index 7f2178ec1..91d4b39ea 100755 --- a/scripts/check_missing_reboot_apps.exs +++ b/scripts/check_missing_reboot_apps.exs @@ -1,4 +1,4 @@ -#! /usr/bin/env elixir +#!/usr/bin/env elixir alias EMQXUmbrella.MixProject From 87689890ff8dbb68c96fe755b4e1f6ce6f801092 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Mon, 16 Oct 2023 10:47:51 +0200 Subject: [PATCH 089/155] chore(ds): Fix linter and compilation warnings --- apps/emqx_durable_storage/src/emqx_ds.erl | 1 + .../src/emqx_ds_bitmask_keymapper.erl | 18 ++++++--- apps/emqx_durable_storage/src/emqx_ds_lts.erl | 8 +++- .../src/emqx_ds_storage_bitfield_lts.erl | 37 ++++++++++++++----- .../src/emqx_ds_storage_reference.erl | 2 +- .../emqx_ds_storage_bitfield_lts_SUITE.erl | 30 --------------- 6 files changed, 49 insertions(+), 47 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index b1a003e93..941573bf8 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -43,6 +43,7 @@ stream/0, stream_rank/0, iterator/0, + message_id/0, next_result/1, next_result/0, store_batch_result/0, make_iterator_result/1, make_iterator_result/0 diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index a512a141c..e18c8498d 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -105,6 +105,8 @@ ]} ). +-elvis([{elvis_style, no_if_expression, disable}]). + -ifdef(TEST). -include_lib("proper/include/proper.hrl"). -include_lib("eunit/include/eunit.hrl"). @@ -139,7 +141,9 @@ dst_offset :: offset() }). --type scanner() :: [[#scan_action{}]]. +-type scan_action() :: #scan_action{}. + +-type scanner() :: [[scan_action()]]. -record(keymapper, { schema :: [bitsource()], @@ -259,7 +263,9 @@ key_to_bitstring(#keymapper{size = Size}, Key) -> %% @doc Create a filter object that facilitates range scans. -spec make_filter(keymapper(), [scalar_range()]) -> filter(). -make_filter(KeyMapper = #keymapper{schema = Schema, dim_sizeof = DimSizeof, size = Size}, Filter0) -> +make_filter( + KeyMapper = #keymapper{schema = Schema, dim_sizeof = DimSizeof, size = TotalSize}, Filter0 +) -> NDim = length(DimSizeof), %% Transform "symbolic" inequations to ranges: Filter1 = inequations_to_ranges(KeyMapper, Filter0), @@ -326,7 +332,7 @@ make_filter(KeyMapper = #keymapper{schema = Schema, dim_sizeof = DimSizeof, size end, %% Final value #filter{ - size = Size, + size = TotalSize, bitmask = Bitmask, bitfilter = Bitfilter, bitsource_ranges = Ranges, @@ -420,7 +426,7 @@ ratchet_scan(Ranges, NDim, Key, I, Pivot0, Carry) -> %% Note: this function operates in bitsource basis, scanning it from %% NDim to 0. It applies the transformation specified by %% `ratchet_scan'. -ratchet_do(Ranges, Key, I, _Pivot, _Increment) when I < 0 -> +ratchet_do(_Ranges, _Key, I, _Pivot, _Increment) when I < 0 -> 0; ratchet_do(Ranges, Key, I, Pivot, Increment) -> #filter_scan_action{offset = Offset, size = Size, min = Min} = array:get(I, Ranges), @@ -495,12 +501,12 @@ do_vector_to_key([Action | Actions], Scanner, Coord, Vector, Acc0) -> Acc = Acc0 bor extract(Coord, Action), do_vector_to_key(Actions, Scanner, Coord, Vector, Acc). --spec extract(_Source :: scalar(), #scan_action{}) -> integer(). +-spec extract(_Source :: scalar(), scan_action()) -> integer(). extract(Src, #scan_action{src_bitmask = SrcBitmask, src_offset = SrcOffset, dst_offset = DstOffset}) -> ((Src bsr SrcOffset) band SrcBitmask) bsl DstOffset. %% extract^-1 --spec extract_inv(_Dest :: scalar(), #scan_action{}) -> integer(). +-spec extract_inv(_Dest :: scalar(), scan_action()) -> integer(). extract_inv(Dest, #scan_action{ src_bitmask = SrcBitmask, src_offset = SrcOffset, dst_offset = DestOffset }) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_lts.erl index c9a73e3e0..d06854fd0 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_lts.erl @@ -32,6 +32,8 @@ -include_lib("eunit/include/eunit.hrl"). -endif. +-elvis([{elvis_style, variable_naming_convention, disable}]). + %%================================================================================ %% Type declarations %%================================================================================ @@ -601,7 +603,11 @@ test_key(Trie, Threshold, Topic0) -> fun(Old) -> case Old =:= Topic of true -> Old; - false -> error(#{'$msg' => "Duplicate key!", key => Ret, old_topic => Old, new_topic => Topic}) + false -> error(#{ '$msg' => "Duplicate key!" + , key => Ret + , old_topic => Old + , new_topic => Topic + }) end end, Topic, diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index 8d406c93e..b85fb48b0 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -21,7 +21,7 @@ %% used for testing. -module(emqx_ds_storage_bitfield_lts). --behavior(emqx_ds_storage_layer). +-behaviour(emqx_ds_storage_layer). %% API: -export([]). @@ -65,6 +65,8 @@ keymappers :: array:array(emqx_ds_bitmask_keymapper:keymapper()) }). +-type s() :: #s{}. + -record(stream, { storage_key :: emqx_ds_lts:msg_storage_key() }). @@ -76,9 +78,7 @@ last_seen_key = <<>> :: binary() }). --define(QUICKCHECK_KEY(KEY, BITMASK, BITFILTER), - ((KEY band BITMASK) =:= BITFILTER) -). +-type iterator() :: #it{}. -define(COUNTER, emqx_ds_storage_bitfield_lts_counter). @@ -92,6 +92,13 @@ %% behavior callbacks %%================================================================================ +-spec create( + emqx_ds_replication_layer:shard_id(), + rocksdb:db_handle(), + emqx_ds_storage_layer:gen_id(), + options() +) -> + {schema(), emqx_ds_storage_layer:cf_refs()}. create(_ShardId, DBHandle, GenId, Options) -> %% Get options: BitsPerTopicLevel = maps:get(bits_per_wildcard_level, Options, 64), @@ -112,6 +119,14 @@ create(_ShardId, DBHandle, GenId, Options) -> }, {Schema, [{DataCFName, DataCFHandle}, {TrieCFName, TrieCFHandle}]}. +-spec open( + emqx_ds_replication_layer:shard_id(), + rocksdb:db_handle(), + emqx_ds_storage_layer:gen_id(), + emqx_ds_storage_layer:cf_refs(), + schema() +) -> + s(). open(_Shard, DBHandle, GenId, CFRefs, Schema) -> #{ bits_per_wildcard_level := BitsPerTopicLevel, @@ -134,6 +149,10 @@ open(_Shard, DBHandle, GenId, CFRefs, Schema) -> ), #s{db = DBHandle, data = DataCF, trie = Trie, keymappers = KeymapperCache}. +-spec store_batch( + emqx_ds_replication_layer:shard_id(), s(), [emqx_types:message()], emqx_ds:message_store_opts() +) -> + emqx_ds:store_batch_result(). store_batch(_ShardId, S = #s{db = DB, data = Data}, Messages, _Options) -> lists:foreach( fun(Msg) -> @@ -203,7 +222,7 @@ next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> %% Internal functions %%================================================================================ -next_loop(ITHandle, KeyMapper, Filter, It, Acc, 0) -> +next_loop(_ITHandle, _KeyMapper, _Filter, It, Acc, 0) -> {ok, It, lists:reverse(Acc)}; next_loop(ITHandle, KeyMapper, Filter, It0, Acc0, N0) -> inc_counter(), @@ -249,8 +268,8 @@ traverse_interval(ITHandle, KeyMapper, Filter, It0, Acc, N) -> {0, It0, Acc} end. --spec check_message(emqx_ds_bitmask_keymapper:filter(), #it{}, binary()) -> - {true, #message{}} | false. +-spec check_message(emqx_ds_bitmask_keymapper:filter(), iterator(), binary()) -> + {true, emqx_types:message()} | false. check_message(Filter, #it{last_seen_key = Key}, Val) -> case emqx_ds_bitmask_keymapper:bin_checkmask(Filter, Key) of true -> @@ -270,7 +289,7 @@ format_keyfilter(any) -> format_keyfilter({Op, Val}) -> {Op, integer_to_list(Val, 16)}. --spec make_key(#s{}, #message{}) -> {binary(), [binary()]}. +-spec make_key(s(), emqx_types:message()) -> {binary(), [binary()]}. make_key(#s{keymappers = KeyMappers, trie = Trie}, #message{timestamp = Timestamp, topic = TopicBin}) -> Tokens = emqx_topic:tokens(TopicBin), {TopicIndex, Varying} = emqx_ds_lts:topic_key(Trie, fun threshold_fun/1, Tokens), @@ -345,7 +364,7 @@ read_persisted_trie(IT, {ok, KeyB, ValB}) -> {binary_to_term(KeyB), binary_to_term(ValB)} | read_persisted_trie(IT, rocksdb:iterator_move(IT, next)) ]; -read_persisted_trie(IT, {error, invalid_iterator}) -> +read_persisted_trie(_IT, {error, invalid_iterator}) -> []. inc_counter() -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl index 9c7fc3158..ec00f1310 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl @@ -21,7 +21,7 @@ %% used for testing. -module(emqx_ds_storage_reference). --behavior(emqx_ds_storage_layer). +-behaviour(emqx_ds_storage_layer). %% API: -export([]). diff --git a/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl index ac037e861..6dc24a269 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_storage_bitfield_lts_SUITE.erl @@ -354,36 +354,6 @@ store(Shard, PublishedAt, Topic, Payload) -> }, emqx_ds_storage_layer:message_store(Shard, [Msg], #{}). -%% iterate(Shard, TopicFilter, StartTime) -> -%% Streams = emqx_ds_storage_layer:get_streams(Shard, TopicFilter, StartTime), -%% lists:flatmap( -%% fun(Stream) -> -%% iterate(Shard, iterator(Shard, Stream, TopicFilter, StartTime)) -%% end, -%% Streams). - -%% iterate(Shard, It) -> -%% case emqx_ds_storage_layer:next(Shard, It) of -%% {ok, ItNext, [#message{payload = Payload}]} -> -%% [Payload | iterate(Shard, ItNext)]; -%% end_of_stream -> -%% [] -%% end. - -%% iterate(_Shard, end_of_stream, _N) -> -%% {end_of_stream, []}; -%% iterate(Shard, It, N) -> -%% case emqx_ds_storage_layer:next(Shard, It, N) of -%% {ok, ItFinal, Messages} -> -%% {ItFinal, [Payload || #message{payload = Payload} <- Messages]}; -%% end_of_stream -> -%% {end_of_stream, []} -%% end. - -%% iterator(Shard, Stream, TopicFilter, StartTime) -> -%% {ok, It} = emqx_ds_storage_layer:make_iterator(Shard, Stream, parse_topic(TopicFilter), StartTime), -%% It. - payloads(Messages) -> lists:map( fun(#message{payload = P}) -> From e745e42093656e4f123ae6780d3257649d1d5001 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Wed, 18 Oct 2023 01:07:21 +0200 Subject: [PATCH 090/155] test(ds): Explore full range of keys when testing ratchet function --- apps/emqx_durable_storage/src/emqx_ds.erl_ | 189 ------- .../src/emqx_ds_bitmask_keymapper.erl | 42 +- .../emqx_ds_message_storage_bitmask_shim.erl | 17 +- .../props/prop_replay_message_storage.erl | 463 ------------------ 4 files changed, 40 insertions(+), 671 deletions(-) delete mode 100644 apps/emqx_durable_storage/src/emqx_ds.erl_ delete mode 100644 apps/emqx_durable_storage/test/props/prop_replay_message_storage.erl diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl_ b/apps/emqx_durable_storage/src/emqx_ds.erl_ deleted file mode 100644 index 1acbcc7c7..000000000 --- a/apps/emqx_durable_storage/src/emqx_ds.erl_ +++ /dev/null @@ -1,189 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- --module(emqx_ds). - --include_lib("stdlib/include/ms_transform.hrl"). --include_lib("snabbkaffe/include/snabbkaffe.hrl"). - -%% API: --export([ensure_shard/2]). -%% Messages: --export([message_store/2, message_store/1, message_stats/0]). -%% Iterator: --export([get_streams/3, open_iterator/1, next/2]). - -%% internal exports: --export([]). - --export_type([ - stream/0, - keyspace/0, - message_id/0, - message_stats/0, - message_store_opts/0, - replay/0, - replay_id/0, - %iterator_id/0, - iterator/0, - topic/0, - topic_filter/0, - time/0 -]). - -%%================================================================================ -%% Type declarations -%%================================================================================ - -%% This record enapsulates the stream entity from the storage level. -%% -%% TODO: currently the stream is hardwired to only support the -%% internal rocksdb storage. In t he future we want to add another -%% implementations for emqx_ds, so this type has to take this into -%% account. --record(stream, - { shard :: emqx_ds:shard() - , :: emqx_ds_storage_layer:stream() - }). - --opaque stream() :: #stream{}. - --type iterator() :: term(). - -%-type iterator_id() :: binary(). - --type message_store_opts() :: #{}. - --type message_stats() :: #{}. - --type message_id() :: binary(). - -%% Parsed topic. --type topic() :: list(binary()). - -%% Parsed topic filter. --type topic_filter() :: list(binary() | '+' | '#' | ''). - --type keyspace() :: atom(). --type shard_id() :: binary(). --type shard() :: {keyspace(), shard_id()}. - -%% Timestamp -%% Earliest possible timestamp is 0. -%% TODO granularity? Currently, we should always use micro second, as that's the unit we -%% use in emqx_guid. Otherwise, the iterators won't match the message timestamps. --type time() :: non_neg_integer(). - --type replay_id() :: binary(). - --type replay() :: { - _TopicFilter :: topic_filter(), - _StartTime :: time() -}. - -%%================================================================================ -%% API funcions -%%================================================================================ - -%% @doc Get a list of streams needed for replaying a topic filter. -%% -%% Motivation: under the hood, EMQX may store different topics at -%% different locations or even in different databases. A wildcard -%% topic filter may require pulling data from any number of locations. -%% -%% Stream is an abstraction exposed by `emqx_ds' that reflects the -%% notion that different topics can be stored differently, but hides -%% the implementation details. -%% -%% Rules: -%% -%% 1. New streams matching the topic filter can appear without notice, -%% so the replayer must periodically call this function to get the -%% updated list of streams. -%% -%% 2. Streams may depend on one another. Therefore, care should be -%% taken while replaying them in parallel to avoid out-of-order -%% replay. This function returns stream together with its -%% "coordinates": `{X, T, Stream}'. If X coordinate of two streams is -%% different, then they can be replayed in parallel. If it's the -%% same, then the stream with smaller T coordinate should be replayed -%% first. --spec get_streams(keyspace(), topic_filter(), time()) -> [{integer(), integer(), stream()}]. -get_streams(Keyspace, TopicFilter, StartTime) -> - ShardIds = emqx_ds_replication_layer:get_all_shards(Keyspace), - lists:flatmap( - fun(Shard) -> - Node = emqx_ds_replication_layer:shard_to_node(Shard), - try - Streams = emqx_persistent_session_ds_proto_v1:get_streams(Node, Keyspace, Shard, TopicFilter, StartTime), - [#stream{ shard = {Keyspace, ShardId} - , stream = Stream - } || Stream <- Streams] - catch - error:{erpc, _} -> - %% The caller has to periodically refresh the - %% list of streams anyway, so it's ok to ignore - %% transient errors. - [] - end - end, - ShardIds). - --spec ensure_shard(shard(), emqx_ds_storage_layer:options()) -> - ok | {error, _Reason}. -ensure_shard(Sharzd, Options) -> - case emqx_ds_storage_layer_sup:start_shard(Shard, Options) of - {ok, _Pid} -> - ok; - {error, {already_started, _Pid}} -> - ok; - {error, Reason} -> - {error, Reason} - end. - -%%-------------------------------------------------------------------------------- -%% Message -%%-------------------------------------------------------------------------------- - --spec message_store([emqx_types:message()], message_store_opts()) -> - {ok, [message_id()]} | {error, _}. -message_store(Msg, Opts) -> - message_store(Msg, Opts). - --spec message_store([emqx_types:message()]) -> {ok, [message_id()]} | {error, _}. -message_store(Msg) -> - message_store(Msg, #{}). - --spec message_stats() -> message_stats(). -message_stats() -> - #{}. - -%%-------------------------------------------------------------------------------- -%% Iterator (pull API) -%%-------------------------------------------------------------------------------- - --spec open_iterator(stream()) -> {ok, iterator()}. -open_iterator(#stream{shard = {_Keyspace, _ShardId}, stream = _StorageSpecificStream}) -> - error(todo). - --spec next(iterator(), non_neg_integer()) -> - {ok, iterator(), [emqx_types:message()]} - | end_of_stream. -next(_Iterator, _BatchSize) -> - error(todo). - -%%================================================================================ -%% Internal functions -%%================================================================================ diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index e18c8498d..90c381104 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -699,26 +699,44 @@ ratchet2_test() -> ?assertEqual(16#aaddcc00, ratchet(F2, 0)), ?assertEqual(16#aa_de_cc_00, ratchet(F2, 16#aa_dd_cd_11)). -ratchet3_test() -> - ?assert(proper:quickcheck(ratchet1_prop(), 100)). - %% erlfmt-ignore -ratchet1_prop() -> +ratchet3_test_() -> EpochBits = 4, Bitsources = [{1, 0, 2}, %% Static topic index {2, EpochBits, 4}, %% Epoch {3, 0, 2}, %% Varying topic hash {2, 0, EpochBits}], %% Timestamp offset - M = make_keymapper(lists:reverse(Bitsources)), - F1 = make_filter(M, [{'=', 2#10}, any, {'=', 2#01}]), - ?FORALL(N, integer(0, ones(12)), - ratchet_prop(F1, N)). + Keymapper = make_keymapper(lists:reverse(Bitsources)), + Filter1 = make_filter(Keymapper, [{'=', 2#10}, any, {'=', 2#01}]), + Filter2 = make_filter(Keymapper, [{'=', 2#01}, any, any]), + Filter3 = make_filter(Keymapper, [{'=', 2#01}, {'>=', 16#aa}, any]), + {timeout, 15, + [?_assert(test_iterate(Filter1, 0)), + ?_assert(test_iterate(Filter2, 0)), + %% Not starting from 0 here for simplicity, since the beginning + %% of a >= interval can't be properly checked with a bitmask: + ?_assert(test_iterate(Filter3, ratchet(Filter3, 1))) + ]}. -ratchet_prop(Filter = #filter{bitfilter = Bitfilter, bitmask = Bitmask, size = Size}, Key0) -> - Key = ratchet(Filter, Key0), +%% Note: this function iterates through the full range of keys, so its +%% complexity grows _exponentially_ with the total size of the +%% keymapper. +test_iterate(Filter, overflow) -> + true; +test_iterate(Filter, Key0) -> + Key = ratchet(Filter, Key0 + 1), + ?assert(ratchet_prop(Filter, Key0, Key)), + test_iterate(Filter, Key). + +ratchet_prop(Filter = #filter{bitfilter = Bitfilter, bitmask = Bitmask, size = Size}, Key0, Key) -> + %% Validate basic properties of the generated key. It must be + %% greater than the old key, and match the bitmask: ?assert(Key =:= overflow orelse (Key band Bitmask =:= Bitfilter)), - ?assert(Key >= Key0, {Key, '>=', Key}), + ?assert(Key > Key0, {Key, '>=', Key}), IMax = ones(Size), + %% Iterate through all keys between `Key0 + 1' and `Key' and + %% validate that none of them match the bitmask. Ultimately, it + %% means that `ratchet' function doesn't skip over any valid keys: CheckGaps = fun F(I) when I >= Key; I > IMax -> true; @@ -729,7 +747,7 @@ ratchet_prop(Filter = #filter{bitfilter = Bitfilter, bitmask = Bitmask, size = S ), F(I + 1) end, - CheckGaps(Key0). + CheckGaps(Key0 + 1). mkbmask(Keymapper, Filter0) -> Filter = inequations_to_ranges(Keymapper, Filter0), diff --git a/apps/emqx_durable_storage/test/props/emqx_ds_message_storage_bitmask_shim.erl b/apps/emqx_durable_storage/test/props/emqx_ds_message_storage_bitmask_shim.erl index e9daf2581..9b5af9428 100644 --- a/apps/emqx_durable_storage/test/props/emqx_ds_message_storage_bitmask_shim.erl +++ b/apps/emqx_durable_storage/test/props/emqx_ds_message_storage_bitmask_shim.erl @@ -4,9 +4,11 @@ -module(emqx_ds_message_storage_bitmask_shim). +-include_lib("emqx/include/emqx.hrl"). + -export([open/0]). -export([close/1]). --export([store/5]). +-export([store/2]). -export([iterate/2]). -type topic() :: list(binary()). @@ -25,20 +27,21 @@ close(Tab) -> true = ets:delete(Tab), ok. --spec store(t(), emqx_guid:guid(), time(), topic(), binary()) -> +-spec store(t(), emqx_types:message()) -> ok | {error, _TODO}. -store(Tab, MessageID, PublishedAt, Topic, Payload) -> - true = ets:insert(Tab, {{PublishedAt, MessageID}, Topic, Payload}), +store(Tab, Msg = #message{id = MessageID, timestamp = PublishedAt}) -> + true = ets:insert(Tab, {{PublishedAt, MessageID}, Msg}), ok. -spec iterate(t(), emqx_ds:replay()) -> [binary()]. -iterate(Tab, {TopicFilter, StartTime}) -> +iterate(Tab, {TopicFilter0, StartTime}) -> + TopicFilter = iolist_to_binary(lists:join("/", TopicFilter0)), ets:foldr( - fun({{PublishedAt, _}, Topic, Payload}, Acc) -> + fun({{PublishedAt, _}, Msg = #message{topic = Topic}}, Acc) -> case emqx_topic:match(Topic, TopicFilter) of true when PublishedAt >= StartTime -> - [Payload | Acc]; + [Msg | Acc]; _ -> Acc end diff --git a/apps/emqx_durable_storage/test/props/prop_replay_message_storage.erl b/apps/emqx_durable_storage/test/props/prop_replay_message_storage.erl deleted file mode 100644 index d96996534..000000000 --- a/apps/emqx_durable_storage/test/props/prop_replay_message_storage.erl +++ /dev/null @@ -1,463 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2020-2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%%-------------------------------------------------------------------- - --module(prop_replay_message_storage). - --include_lib("proper/include/proper.hrl"). --include_lib("eunit/include/eunit.hrl"). - --define(WORK_DIR, ["_build", "test"]). --define(RUN_ID, {?MODULE, testrun_id}). - --define(KEYSPACE, ?MODULE). --define(SHARD_ID, <<"shard">>). --define(SHARD, {?KEYSPACE, ?SHARD_ID}). --define(GEN_ID, 42). - -%%-------------------------------------------------------------------- -%% Properties -%%-------------------------------------------------------------------- - -prop_bitstring_computes() -> - ?FORALL( - Keymapper, - keymapper(), - ?FORALL({Topic, Timestamp}, {topic(), integer()}, begin - BS = emqx_ds_message_storage_bitmask:compute_bitstring(Topic, Timestamp, Keymapper), - is_integer(BS) andalso (BS < (1 bsl get_keymapper_bitsize(Keymapper))) - end) - ). - -prop_topic_bitmask_computes() -> - Keymapper = make_keymapper(16, [8, 12, 16], 100), - ?FORALL(TopicFilter, topic_filter(), begin - Mask = emqx_ds_message_storage_bitmask:compute_topic_bitmask(TopicFilter, Keymapper), - % topic bits + timestamp LSBs - is_integer(Mask) andalso (Mask < (1 bsl (36 + 6))) - end). - -prop_next_seek_monotonic() -> - ?FORALL( - {TopicFilter, StartTime, Keymapper}, - {topic_filter(), pos_integer(), keymapper()}, - begin - Filter = emqx_ds_message_storage_bitmask:make_keyspace_filter( - {TopicFilter, StartTime}, - Keymapper - ), - ?FORALL( - Bitstring, - bitstr(get_keymapper_bitsize(Keymapper)), - emqx_ds_message_storage_bitmask:compute_next_seek(Bitstring, Filter) >= Bitstring - ) - end - ). - -prop_next_seek_eq_initial_seek() -> - ?FORALL( - Filter, - keyspace_filter(), - emqx_ds_message_storage_bitmask:compute_initial_seek(Filter) =:= - emqx_ds_message_storage_bitmask:compute_next_seek(0, Filter) - ). - -prop_iterate_messages() -> - TBPL = [4, 8, 12], - Options = #{ - timestamp_bits => 32, - topic_bits_per_level => TBPL, - epoch => 200 - }, - % TODO - % Shrinking is too unpredictable and leaves a LOT of garbage in the scratch dit. - ?FORALL(Stream, noshrink(non_empty(messages(topic(TBPL)))), begin - Filepath = make_filepath(?FUNCTION_NAME, erlang:system_time(microsecond)), - {DB, Handle} = open_db(Filepath, Options), - Shim = emqx_ds_message_storage_bitmask_shim:open(), - ok = store_db(DB, Stream), - ok = store_shim(Shim, Stream), - ?FORALL( - { - {Topic, _}, - Pattern, - StartTime - }, - { - nth(Stream), - topic_filter_pattern(), - start_time() - }, - begin - TopicFilter = make_topic_filter(Pattern, Topic), - Iteration = {TopicFilter, StartTime}, - Messages = iterate_db(DB, Iteration), - Reference = iterate_shim(Shim, Iteration), - ok = close_db(Handle), - ok = emqx_ds_message_storage_bitmask_shim:close(Shim), - ?WHENFAIL( - begin - io:format(user, " *** Filepath = ~s~n", [Filepath]), - io:format(user, " *** TopicFilter = ~p~n", [TopicFilter]), - io:format(user, " *** StartTime = ~p~n", [StartTime]) - end, - is_list(Messages) andalso equals(Messages -- Reference, Reference -- Messages) - ) - end - ) - end). - -prop_iterate_eq_iterate_with_preserve_restore() -> - TBPL = [4, 8, 16, 12], - Options = #{ - timestamp_bits => 32, - topic_bits_per_level => TBPL, - epoch => 500 - }, - {DB, _Handle} = open_db(make_filepath(?FUNCTION_NAME), Options), - ?FORALL(Stream, non_empty(messages(topic(TBPL))), begin - % TODO - % This proptest is impure because messages from testruns assumed to be - % independent of each other are accumulated in the same storage. This - % would probably confuse shrinker in the event a testrun fails. - ok = store_db(DB, Stream), - ?FORALL( - { - {Topic, _}, - Pat, - StartTime, - Commands - }, - { - nth(Stream), - topic_filter_pattern(), - start_time(), - shuffled(flat([non_empty(list({preserve, restore})), list(iterate)])) - }, - begin - Replay = {make_topic_filter(Pat, Topic), StartTime}, - Iterator = make_iterator(DB, Replay), - Ctx = #{db => DB, replay => Replay}, - Messages = run_iterator_commands(Commands, Iterator, Ctx), - equals(Messages, iterate_db(DB, Replay)) - end - ) - end). - -prop_iterate_eq_iterate_with_refresh() -> - TBPL = [4, 8, 16, 12], - Options = #{ - timestamp_bits => 32, - topic_bits_per_level => TBPL, - epoch => 500 - }, - {DB, _Handle} = open_db(make_filepath(?FUNCTION_NAME), Options), - ?FORALL(Stream, non_empty(messages(topic(TBPL))), begin - % TODO - % This proptest is also impure, see above. - ok = store_db(DB, Stream), - ?FORALL( - { - {Topic, _}, - Pat, - StartTime, - RefreshEvery - }, - { - nth(Stream), - topic_filter_pattern(), - start_time(), - pos_integer() - }, - ?TIMEOUT(5000, begin - Replay = {make_topic_filter(Pat, Topic), StartTime}, - IterationOptions = #{iterator_refresh => {every, RefreshEvery}}, - Iterator = make_iterator(DB, Replay, IterationOptions), - Messages = iterate_db(Iterator), - equals(Messages, iterate_db(DB, Replay)) - end) - ) - end). - -% store_message_stream(DB, [{Topic, {Payload, ChunkNum, _ChunkCount}} | Rest]) -> -% MessageID = emqx_guid:gen(), -% PublishedAt = ChunkNum, -% MessageID, PublishedAt, Topic -% ]), -% ok = emqx_ds_message_storage_bitmask:store(DB, MessageID, PublishedAt, Topic, Payload), -% store_message_stream(DB, payload_gen:next(Rest)); -% store_message_stream(_Zone, []) -> -% ok. - -store_db(DB, Messages) -> - lists:foreach( - fun({Topic, Payload = {MessageID, Timestamp, _}}) -> - Bin = term_to_binary(Payload), - emqx_ds_message_storage_bitmask:store(DB, MessageID, Timestamp, Topic, Bin) - end, - Messages - ). - -iterate_db(DB, Iteration) -> - iterate_db(make_iterator(DB, Iteration)). - -iterate_db(It) -> - case emqx_ds_message_storage_bitmask:next(It) of - {value, Payload, ItNext} -> - [binary_to_term(Payload) | iterate_db(ItNext)]; - none -> - [] - end. - -make_iterator(DB, Replay) -> - {ok, It} = emqx_ds_message_storage_bitmask:make_iterator(DB, Replay), - It. - -make_iterator(DB, Replay, Options) -> - {ok, It} = emqx_ds_message_storage_bitmask:make_iterator(DB, Replay, Options), - It. - -run_iterator_commands([iterate | Rest], It, Ctx) -> - case emqx_ds_message_storage_bitmask:next(It) of - {value, Payload, ItNext} -> - [binary_to_term(Payload) | run_iterator_commands(Rest, ItNext, Ctx)]; - none -> - [] - end; -run_iterator_commands([{preserve, restore} | Rest], It, Ctx) -> - #{db := DB} = Ctx, - Serial = emqx_ds_message_storage_bitmask:preserve_iterator(It), - {ok, ItNext} = emqx_ds_message_storage_bitmask:restore_iterator(DB, Serial), - run_iterator_commands(Rest, ItNext, Ctx); -run_iterator_commands([], It, _Ctx) -> - iterate_db(It). - -store_shim(Shim, Messages) -> - lists:foreach( - fun({Topic, Payload = {MessageID, Timestamp, _}}) -> - Bin = term_to_binary(Payload), - emqx_ds_message_storage_bitmask_shim:store(Shim, MessageID, Timestamp, Topic, Bin) - end, - Messages - ). - -iterate_shim(Shim, Iteration) -> - lists:map( - fun binary_to_term/1, - emqx_ds_message_storage_bitmask_shim:iterate(Shim, Iteration) - ). - -%%-------------------------------------------------------------------- -%% Setup / teardown -%%-------------------------------------------------------------------- - -open_db(Filepath, Options) -> - {ok, Handle} = rocksdb:open(Filepath, [{create_if_missing, true}]), - {Schema, CFRefs} = emqx_ds_message_storage_bitmask:create_new(Handle, ?GEN_ID, Options), - DB = emqx_ds_message_storage_bitmask:open(?SHARD, Handle, ?GEN_ID, CFRefs, Schema), - {DB, Handle}. - -close_db(Handle) -> - rocksdb:close(Handle). - -make_filepath(TC) -> - make_filepath(TC, 0). - -make_filepath(TC, InstID) -> - Name = io_lib:format("~0p.~0p", [TC, InstID]), - Path = filename:join(?WORK_DIR ++ ["proper", "runs", get_run_id(), ?MODULE_STRING, Name]), - ok = filelib:ensure_dir(Path), - Path. - -get_run_id() -> - case persistent_term:get(?RUN_ID, undefined) of - RunID when RunID /= undefined -> - RunID; - undefined -> - RunID = make_run_id(), - ok = persistent_term:put(?RUN_ID, RunID), - RunID - end. - -make_run_id() -> - calendar:system_time_to_rfc3339(erlang:system_time(second), [{offset, "Z"}]). - -%%-------------------------------------------------------------------- -%% Type generators -%%-------------------------------------------------------------------- - -topic() -> - non_empty(list(topic_level())). - -topic(EntropyWeights) -> - ?LET(L, scaled(1 / 4, list(1)), begin - EWs = lists:sublist(EntropyWeights ++ L, length(L)), - ?SIZED(S, [oneof([topic_level(S * EW), topic_level_fixed()]) || EW <- EWs]) - end). - -topic_filter() -> - ?SUCHTHAT( - L, - non_empty( - list( - frequency([ - {5, topic_level()}, - {2, '+'}, - {1, '#'} - ]) - ) - ), - not lists:member('#', L) orelse lists:last(L) == '#' - ). - -topic_level_pattern() -> - frequency([ - {5, level}, - {2, '+'}, - {1, '#'} - ]). - -topic_filter_pattern() -> - list(topic_level_pattern()). - -topic_filter(Topic) -> - ?LET({T, Pat}, {Topic, topic_filter_pattern()}, make_topic_filter(Pat, T)). - -make_topic_filter([], _) -> - []; -make_topic_filter(_, []) -> - []; -make_topic_filter(['#' | _], _) -> - ['#']; -make_topic_filter(['+' | Rest], [_ | Levels]) -> - ['+' | make_topic_filter(Rest, Levels)]; -make_topic_filter([level | Rest], [L | Levels]) -> - [L | make_topic_filter(Rest, Levels)]. - -% topic() -> -% ?LAZY(?SIZED(S, frequency([ -% {S, [topic_level() | topic()]}, -% {1, []} -% ]))). - -% topic_filter() -> -% ?LAZY(?SIZED(S, frequency([ -% {round(S / 3 * 2), [topic_level() | topic_filter()]}, -% {round(S / 3 * 1), ['+' | topic_filter()]}, -% {1, []}, -% {1, ['#']} -% ]))). - -topic_level() -> - ?LET(L, list(oneof([range($a, $z), range($0, $9)])), iolist_to_binary(L)). - -topic_level(Entropy) -> - S = floor(1 + math:log2(Entropy) / 4), - ?LET(I, range(1, Entropy), iolist_to_binary(io_lib:format("~*.16.0B", [S, I]))). - -topic_level_fixed() -> - oneof([ - <<"foo">>, - <<"bar">>, - <<"baz">>, - <<"xyzzy">> - ]). - -keymapper() -> - ?LET( - {TimestampBits, TopicBits, Epoch}, - { - range(0, 128), - non_empty(list(range(1, 32))), - pos_integer() - }, - make_keymapper(TimestampBits, TopicBits, Epoch * 100) - ). - -keyspace_filter() -> - ?LET( - {TopicFilter, StartTime, Keymapper}, - {topic_filter(), pos_integer(), keymapper()}, - emqx_ds_message_storage_bitmask:make_keyspace_filter({TopicFilter, StartTime}, Keymapper) - ). - -messages(Topic) -> - ?LET( - Ts, - list(Topic), - interleaved( - ?LET(Messages, vector(length(Ts), scaled(4, list(message()))), lists:zip(Ts, Messages)) - ) - ). - -message() -> - ?LET({Timestamp, Payload}, {timestamp(), binary()}, {emqx_guid:gen(), Timestamp, Payload}). - -message_streams(Topic) -> - ?LET(Topics, list(Topic), [{T, payload_gen:binary_stream_gen(64)} || T <- Topics]). - -timestamp() -> - scaled(20, pos_integer()). - -start_time() -> - scaled(10, pos_integer()). - -bitstr(Size) -> - ?LET(B, binary(1 + (Size div 8)), binary:decode_unsigned(B) band (1 bsl Size - 1)). - -nth(L) -> - ?LET(I, range(1, length(L)), lists:nth(I, L)). - -scaled(Factor, T) -> - ?SIZED(S, resize(ceil(S * Factor), T)). - -interleaved(T) -> - ?LET({L, Seed}, {T, integer()}, interleave(L, rand:seed_s(exsss, Seed))). - -shuffled(T) -> - ?LET({L, Seed}, {T, integer()}, shuffle(L, rand:seed_s(exsss, Seed))). - -flat(T) -> - ?LET(L, T, lists:flatten(L)). - -%%-------------------------------------------------------------------- -%% Internal functions -%%-------------------------------------------------------------------- - -make_keymapper(TimestampBits, TopicBits, MaxEpoch) -> - emqx_ds_message_storage_bitmask:make_keymapper(#{ - timestamp_bits => TimestampBits, - topic_bits_per_level => TopicBits, - epoch => MaxEpoch - }). - -get_keymapper_bitsize(Keymapper) -> - maps:get(bitsize, emqx_ds_message_storage_bitmask:keymapper_info(Keymapper)). - --spec interleave(list({Tag, list(E)}), rand:state()) -> list({Tag, E}). -interleave(Seqs, Rng) -> - interleave(Seqs, length(Seqs), Rng). - -interleave(Seqs, L, Rng) when L > 0 -> - {N, RngNext} = rand:uniform_s(L, Rng), - {SeqHead, SeqTail} = lists:split(N - 1, Seqs), - case SeqTail of - [{Tag, [M | Rest]} | SeqRest] -> - [{Tag, M} | interleave(SeqHead ++ [{Tag, Rest} | SeqRest], L, RngNext)]; - [{_, []} | SeqRest] -> - interleave(SeqHead ++ SeqRest, L - 1, RngNext) - end; -interleave([], 0, _) -> - []. - --spec shuffle(list(E), rand:state()) -> list(E). -shuffle(L, Rng) -> - {Rands, _} = randoms(length(L), Rng), - [E || {_, E} <- lists:sort(lists:zip(Rands, L))]. - -randoms(N, Rng) when N > 0 -> - {Rand, RngNext} = rand:uniform_s(Rng), - {Tail, RngFinal} = randoms(N - 1, RngNext), - {[Rand | Tail], RngFinal}; -randoms(_, Rng) -> - {[], Rng}. From 2de79dd9ac42d3a9e825fd34ccce865db785ad64 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Wed, 25 Oct 2023 08:48:20 +0200 Subject: [PATCH 091/155] feat(ds): Replay QoS1 messages --- Makefile | 2 +- .../emqx_persistent_message_ds_replayer.erl | 207 ++++++++++++++++++ apps/emqx/src/emqx_persistent_session_ds.erl | 165 ++++++++++---- apps/emqx/src/emqx_persistent_session_ds.hrl | 56 +++++ .../test/emqx_persistent_messages_SUITE.erl | 20 +- .../test/emqx_persistent_session_SUITE.erl | 42 ++++ apps/emqx_durable_storage/src/emqx_ds.erl | 18 +- .../src/emqx_ds_helper.erl | 73 ++++++ .../src/emqx_ds_replication_layer.erl | 24 +- .../src/emqx_ds_storage_layer.erl | 2 +- tdd | 13 ++ topic_match_test.png | Bin 0 -> 176221 bytes 12 files changed, 563 insertions(+), 59 deletions(-) create mode 100644 apps/emqx/src/emqx_persistent_message_ds_replayer.erl create mode 100644 apps/emqx/src/emqx_persistent_session_ds.hrl create mode 100644 apps/emqx_durable_storage/src/emqx_ds_helper.erl create mode 100755 tdd create mode 100644 topic_match_test.png diff --git a/Makefile b/Makefile index 254a4b0f9..ed10a09fd 100644 --- a/Makefile +++ b/Makefile @@ -85,7 +85,7 @@ $(REL_PROFILES:%=%-compile): $(REBAR) merge-config .PHONY: ct ct: $(REBAR) merge-config - @$(REBAR) ct --name $(CT_NODE_NAME) -c -v --cover_export_name $(CT_COVER_EXPORT_PREFIX)-ct + ENABLE_COVER_COMPILE=1 $(REBAR) ct --name $(CT_NODE_NAME) -c -v --cover_export_name $(CT_COVER_EXPORT_PREFIX)-ct ## only check bpapi for enterprise profile because it's a super-set. .PHONY: static_checks diff --git a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl new file mode 100644 index 000000000..ce57eaa80 --- /dev/null +++ b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl @@ -0,0 +1,207 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +%% @doc This module implements the routines for replaying streams of +%% messages. +-module(emqx_persistent_message_ds_replayer). + +%% API: +-export([new/0, next_packet_id/1, replay/2, commit_offset/3, poll/3]). + +%% internal exports: +-export([]). + +-export_type([inflight/0]). + +-include("emqx_persistent_session_ds.hrl"). + +%%================================================================================ +%% Type declarations +%%================================================================================ + +%% Note: sequence numbers are monotonic; they don't wrap around: +-type seqno() :: non_neg_integer(). + +-record(range, { + stream :: emqx_ds:stream(), + first :: seqno(), + last :: seqno(), + iterator_next :: emqx_ds:iterator() | undefined +}). + +-type range() :: #range{}. + +-record(inflight, { + next_seqno = 0 :: seqno(), + acked_seqno = 0 :: seqno(), + offset_ranges = [] :: [range()] +}). + +-opaque inflight() :: #inflight{}. + +%%================================================================================ +%% API funcions +%%================================================================================ + +-spec new() -> inflight(). +new() -> + #inflight{}. + +-spec next_packet_id(inflight()) -> {emqx_types:packet_id(), inflight()}. +next_packet_id(Inflight0 = #inflight{next_seqno = LastSeqno}) -> + Inflight = Inflight0#inflight{next_seqno = LastSeqno + 1}, + {seqno_to_packet_id(LastSeqno), Inflight}. + +-spec replay(emqx_persistent_session_ds:id(), inflight()) -> + emqx_session:replies(). +replay(_SessionId, _Inflight = #inflight{offset_ranges = _Ranges}) -> + []. + +-spec commit_offset(emqx_persistent_session_ds:id(), emqx_types:packet_id(), inflight()) -> + {_IsValidOffset :: boolean(), inflight()}. +commit_offset(SessionId, PacketId, Inflight0 = #inflight{acked_seqno = AckedSeqno0, next_seqno = NextSeqNo, offset_ranges = Ranges0}) -> + AckedSeqno = packet_id_to_seqno(NextSeqNo, PacketId), + true = AckedSeqno0 < AckedSeqno, + Ranges = lists:filter( + fun(#range{stream = Stream, last = LastSeqno, iterator_next = ItNext}) -> + case LastSeqno =< AckedSeqno of + true -> + %% This range has been fully + %% acked. Remove it and replace saved + %% iterator with the trailing iterator. + update_iterator(SessionId, Stream, ItNext), + false; + false -> + %% This range still has unacked + %% messages: + true + end + end, + Ranges0 + ), + Inflight = Inflight0#inflight{acked_seqno = AckedSeqno, offset_ranges = Ranges}, + {true, Inflight}. + +-spec poll(emqx_persistent_session_ds:id(), inflight(), pos_integer()) -> + {emqx_session:replies(), inflight()}. +poll(SessionId, Inflight0, WindowSize) when WindowSize > 0, WindowSize < 16#7fff -> + #inflight{next_seqno = NextSeqNo0, acked_seqno = AckedSeqno} = + Inflight0, + FetchThreshold = max(1, WindowSize div 2), + FreeSpace = AckedSeqno + WindowSize - NextSeqNo0, + case FreeSpace >= FetchThreshold of + false -> + %% TODO: this branch is meant to avoid fetching data from + %% the DB in chunks that are too small. However, this + %% logic is not exactly good for the latency. Can the + %% client get stuck even? + {[], Inflight0}; + true -> + Streams = shuffle(get_streams(SessionId)), + fetch(SessionId, Inflight0, Streams, FreeSpace, []) + end. + +%%================================================================================ +%% Internal exports +%%================================================================================ + +%%================================================================================ +%% Internal functions +%%================================================================================ + +fetch(_SessionId, Inflight, _Streams = [], _N, Acc) -> + {lists:reverse(Acc), Inflight}; +fetch(_SessionId, Inflight, _Streams, 0, Acc) -> + {lists:reverse(Acc), Inflight}; +fetch(SessionId, Inflight0, [#ds_stream{stream = Stream} | Streams], N, Publishes0) -> + #inflight{next_seqno = FirstSeqNo, offset_ranges = Ranges0} = Inflight0, + ItBegin = get_last_iterator(SessionId, Stream, Ranges0), + {ok, ItEnd, Messages} = emqx_ds:next(ItBegin, N), + {Publishes, Inflight1} = + lists:foldl( + fun(Msg, {PubAcc0, InflightAcc0}) -> + {PacketId, InflightAcc} = next_packet_id(InflightAcc0), + PubAcc = [{PacketId, Msg} | PubAcc0], + {PubAcc, InflightAcc} + end, + {Publishes0, Inflight0}, + Messages + ), + #inflight{next_seqno = LastSeqNo} = Inflight1, + NMessages = LastSeqNo - FirstSeqNo, + case NMessages > 0 of + true -> + Range = #range{ + first = FirstSeqNo, + last = LastSeqNo - 1, + stream = Stream, + iterator_next = ItEnd + }, + Inflight = Inflight1#inflight{offset_ranges = Ranges0 ++ [Range]}, + fetch(SessionId, Inflight, Streams, N - NMessages, Publishes); + false -> + fetch(SessionId, Inflight1, Streams, N, Publishes) + end. + +update_iterator(SessionId, Stream, Iterator) -> + mria:dirty_write(?SESSION_ITER_TAB, #ds_iter{id = {SessionId, Stream}, iter = Iterator}). + +get_last_iterator(SessionId, Stream, Ranges) -> + case lists:keyfind(Stream, #range.stream, lists:reverse(Ranges)) of + false -> + get_iterator(SessionId, Stream); + #range{iterator_next = Next} -> + Next + end. + +get_iterator(SessionId, Stream) -> + Id = {SessionId, Stream}, + [#ds_iter{iter = It}] = mnesia:dirty_read(?SESSION_ITER_TAB, Id), + It. + +get_streams(SessionId) -> + mnesia:dirty_read(?SESSION_STREAM_TAB, SessionId). + +%% Packet ID as defined by MQTT protocol is a 16-bit integer in range +%% 1..FFFF. This function translates internal session sequence number +%% to MQTT packet ID by chopping off most significant bits and adding +%% 1. This assumes that there's never more FFFF in-flight packets at +%% any time: +-spec seqno_to_packet_id(non_neg_integer()) -> emqx_types:packet_id(). +seqno_to_packet_id(Counter) -> + Counter rem 16#ffff + 1. + +%% Reconstruct session counter by adding most significant bits from +%% the current counter to the packet id. +-spec packet_id_to_seqno(non_neg_integer(), emqx_types:packet_id()) -> non_neg_integer(). +packet_id_to_seqno(NextSeqNo, PacketId) -> + N = ((NextSeqNo bsr 16) bsl 16) + PacketId, + case N > NextSeqNo of + true -> N - 16#10000; + false -> N + end. + +-spec shuffle([A]) -> [A]. +shuffle(L0) -> + L1 = lists:map( + fun(A) -> + {rand:uniform(), A} + end, + L0 + ), + L2 = lists:sort(L1), + {_, L} = lists:unzip(L2), + L. diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index 9bc9e0b91..b8afc771f 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -18,9 +18,12 @@ -include("emqx.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl"). +-include_lib("stdlib/include/ms_transform.hrl"). -include("emqx_mqtt.hrl"). +-include("emqx_persistent_session_ds.hrl"). + %% Session API -export([ create/3, @@ -50,7 +53,7 @@ -export([ deliver/3, replay/3, - % handle_timeout/3, + handle_timeout/3, disconnect/1, terminate/2 ]). @@ -81,10 +84,14 @@ expires_at := timestamp() | never, %% Client’s Subscriptions. iterators := #{topic() => subscription()}, + %% Inflight messages + inflight := emqx_persistent_message_ds_replayer:inflight(), %% props := map() }. +%% -type session() :: #session{}. + -type timestamp() :: emqx_utils_calendar:epoch_millisecond(). -type topic() :: emqx_types:topic(). -type clientinfo() :: emqx_types:clientinfo(). @@ -113,6 +120,8 @@ open(#{clientid := ClientID}, _ConnInfo) -> %% somehow isolate those idling not-yet-expired sessions into a separate process %% space, and move this call back into `emqx_cm` where it belongs. ok = emqx_cm:discard_session(ClientID), + ensure_timer(pull), + ensure_timer(get_streams), case open_session(ClientID) of Session = #{} -> {true, Session, []}; @@ -259,8 +268,8 @@ get_subscription(TopicFilter, #{iterators := Iters}) -> {ok, emqx_types:publish_result(), replies(), session()} | {error, emqx_types:reason_code()}. publish(_PacketId, Msg, Session) -> - % TODO: stub - {ok, emqx_broker:publish(Msg), [], Session}. + ok = emqx_ds:store_batch(?PERSISTENT_MESSAGE_DB, [Msg]), + {ok, persisted, [], Session}. %%-------------------------------------------------------------------- %% Client -> Broker: PUBACK @@ -269,9 +278,14 @@ publish(_PacketId, Msg, Session) -> -spec puback(clientinfo(), emqx_types:packet_id(), session()) -> {ok, emqx_types:message(), replies(), session()} | {error, emqx_types:reason_code()}. -puback(_ClientInfo, _PacketId, _Session = #{}) -> - % TODO: stub - {error, ?RC_PACKET_IDENTIFIER_NOT_FOUND}. +puback(_ClientInfo, PacketId, Session = #{id := Id, inflight := Inflight0}) -> + case emqx_persistent_message_ds_replayer:commit_offset(Id, PacketId, Inflight0) of + {true, Inflight} -> + Msg = #message{}, %% TODO + {ok, Msg, [], Session#{inflight => Inflight}}; + {false, _} -> + {error, ?RC_PACKET_IDENTIFIER_NOT_FOUND} + end. %%-------------------------------------------------------------------- %% Client -> Broker: PUBREC @@ -308,10 +322,23 @@ pubcomp(_ClientInfo, _PacketId, _Session = #{}) -> %%-------------------------------------------------------------------- -spec deliver(clientinfo(), [emqx_types:deliver()], session()) -> - no_return(). -deliver(_ClientInfo, _Delivers, _Session = #{}) -> - % TODO: ensure it's unreachable somehow - error(unexpected). + {ok, emqx_types:message(), replies(), session()}. +deliver(_ClientInfo, _Delivers, Session) -> + %% This may be triggered for the system messages. FIXME. + {ok, [], Session}. + +-spec handle_timeout(clientinfo(), emqx_session:common_timer_name(), session()) -> + {ok, replies(), session()} | {ok, replies(), timeout(), session()}. +handle_timeout(_ClientInfo, pull, Session = #{id := Id, inflight := Inflight0}) -> + WindowSize = 100, + {Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, WindowSize), + %%logger:warning("Inflight: ~p", [Inflight]), + ensure_timer(pull), + {ok, Publishes, Session#{inflight => Inflight}}; +handle_timeout(_ClientInfo, get_streams, Session = #{id := Id}) -> + renew_streams(Id), + ensure_timer(get_streams), + {ok, [], Session}. -spec replay(clientinfo(), [], session()) -> {ok, replies(), session()}. @@ -390,29 +417,11 @@ del_subscription(TopicFilterBin, DSSessionId) -> %% Session tables operations %%-------------------------------------------------------------------- --define(SESSION_TAB, emqx_ds_session). --define(SESSION_SUBSCRIPTIONS_TAB, emqx_ds_session_subscriptions). --define(DS_MRIA_SHARD, emqx_ds_session_shard). - --record(session, { - %% same as clientid - id :: id(), - %% creation time - created_at :: _Millisecond :: non_neg_integer(), - expires_at = never :: _Millisecond :: non_neg_integer() | never, - %% for future usage - props = #{} :: map() -}). - --record(ds_sub, { - id :: subscription_id(), - start_time :: emqx_ds:time(), - props = #{} :: map(), - extra = #{} :: map() -}). --type ds_sub() :: #ds_sub{}. - create_tables() -> + ok = emqx_ds:open_db(?PERSISTENT_MESSAGE_DB, #{ + backend => builtin, + storage => {emqx_ds_storage_bitfield_lts, #{}} + }), ok = mria:create_table( ?SESSION_TAB, [ @@ -433,7 +442,29 @@ create_tables() -> {attributes, record_info(fields, ds_sub)} ] ), - ok = mria:wait_for_tables([?SESSION_TAB, ?SESSION_SUBSCRIPTIONS_TAB]), + ok = mria:create_table( + ?SESSION_STREAM_TAB, + [ + {rlog_shard, ?DS_MRIA_SHARD}, + {type, bag}, + {storage, storage()}, + {record_name, ds_stream}, + {attributes, record_info(fields, ds_stream)} + ] + ), + ok = mria:create_table( + ?SESSION_ITER_TAB, + [ + {rlog_shard, ?DS_MRIA_SHARD}, + {type, set}, + {storage, storage()}, + {record_name, ds_iter}, + {attributes, record_info(fields, ds_iter)} + ] + ), + ok = mria:wait_for_tables([ + ?SESSION_TAB, ?SESSION_SUBSCRIPTIONS_TAB, ?SESSION_STREAM_TAB, ?SESSION_ITER_TAB + ]), ok. -dialyzer({nowarn_function, storage/0}). @@ -482,7 +513,8 @@ session_create(SessionId, Props) -> id = SessionId, created_at = erlang:system_time(millisecond), expires_at = never, - props = Props + props = Props, + inflight = emqx_persistent_message_ds_replayer:new() }, ok = mnesia:write(?SESSION_TAB, Session, write), Session. @@ -555,12 +587,12 @@ session_del_subscription(#ds_sub{id = DSSubId}) -> mnesia:delete(?SESSION_SUBSCRIPTIONS_TAB, DSSubId, write). session_read_subscriptions(DSSessionId) -> - % NOTE: somewhat convoluted way to trick dialyzer - Pat = erlang:make_tuple(record_info(size, ds_sub), '_', [ - {1, ds_sub}, - {#ds_sub.id, {DSSessionId, '_'}} - ]), - mnesia:match_object(?SESSION_SUBSCRIPTIONS_TAB, Pat, read). + MS = ets:fun2ms( + fun(Sub = #ds_sub{id = {Sess, _}}) when Sess =:= DSSessionId -> + Sub + end + ), + mnesia:select(?SESSION_SUBSCRIPTIONS_TAB, MS, read). -spec new_subscription_id(id(), topic_filter()) -> {subscription_id(), emqx_ds:time()}. new_subscription_id(DSSessionId, TopicFilter) -> @@ -568,12 +600,58 @@ new_subscription_id(DSSessionId, TopicFilter) -> DSSubId = {DSSessionId, TopicFilter}, {DSSubId, NowMS}. +%%-------------------------------------------------------------------- +%% Reading batches +%%-------------------------------------------------------------------- + +renew_streams(Id) -> + Subscriptions = ro_transaction(fun() -> session_read_subscriptions(Id) end), + ExistingStreams = ro_transaction(fun() -> mnesia:read(?SESSION_STREAM_TAB, Id) end), + lists:foreach( + fun(#ds_sub{id = {_, TopicFilter}, start_time = StartTime}) -> + renew_streams(Id, ExistingStreams, TopicFilter, StartTime) + end, + Subscriptions + ). + +renew_streams(Id, ExistingStreams, TopicFilter, StartTime) -> + AllStreams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartTime), + transaction( + fun() -> + lists:foreach( + fun({Rank, Stream}) -> + Rec = #ds_stream{ + session = Id, + topic_filter = TopicFilter, + stream = Stream, + rank = Rank + }, + case lists:member(Rec, ExistingStreams) of + true -> + ok; + false -> + mnesia:write(?SESSION_STREAM_TAB, Rec, write), + % StartTime), + {ok, Iterator} = emqx_ds:make_iterator(Stream, TopicFilter, 0), + IterRec = #ds_iter{id = {Id, Stream}, iter = Iterator}, + mnesia:write(?SESSION_ITER_TAB, IterRec, write) + end + end, + AllStreams + ) + end + ). + %%-------------------------------------------------------------------------------- transaction(Fun) -> {atomic, Res} = mria:transaction(?DS_MRIA_SHARD, Fun), Res. +ro_transaction(Fun) -> + {atomic, Res} = mria:ro_transaction(?DS_MRIA_SHARD, Fun), + Res. + %%-------------------------------------------------------------------------------- export_subscriptions(DSSubs) -> @@ -586,7 +664,7 @@ export_subscriptions(DSSubs) -> ). export_session(#session{} = Record) -> - export_record(Record, #session.id, [id, created_at, expires_at, props], #{}). + export_record(Record, #session.id, [id, created_at, expires_at, inflight, props], #{}). export_subscription(#ds_sub{} = Record) -> export_record(Record, #ds_sub.start_time, [start_time, props, extra], #{}). @@ -595,3 +673,8 @@ export_record(Record, I, [Field | Rest], Acc) -> export_record(Record, I + 1, Rest, Acc#{Field => element(I, Record)}); export_record(_, _, [], Acc) -> Acc. + +-spec ensure_timer(pull | get_streams) -> ok. +ensure_timer(Type) -> + emqx_utils:start_timer(100, {emqx_session, Type}), + ok. diff --git a/apps/emqx/src/emqx_persistent_session_ds.hrl b/apps/emqx/src/emqx_persistent_session_ds.hrl new file mode 100644 index 000000000..54b077795 --- /dev/null +++ b/apps/emqx/src/emqx_persistent_session_ds.hrl @@ -0,0 +1,56 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-ifndef(EMQX_PERSISTENT_SESSION_DS_HRL_HRL). +-define(EMQX_PERSISTENT_SESSION_DS_HRL_HRL, true). + +-define(SESSION_TAB, emqx_ds_session). +-define(SESSION_SUBSCRIPTIONS_TAB, emqx_ds_session_subscriptions). +-define(SESSION_STREAM_TAB, emqx_ds_stream_tab). +-define(SESSION_ITER_TAB, emqx_ds_iter_tab). +-define(DS_MRIA_SHARD, emqx_ds_session_shard). + +-record(ds_sub, { + id :: emqx_persistent_session_ds:subscription_id(), + start_time :: emqx_ds:time(), + props = #{} :: map(), + extra = #{} :: map() +}). +-type ds_sub() :: #ds_sub{}. + +-record(ds_stream, { + session :: emqx_persistent_session_ds:id(), + topic_filter :: emqx_ds:topic_filter(), + stream :: emqx_ds:stream(), + rank :: emqx_ds:stream_rank() +}). + +-record(ds_iter, { + id :: {emqx_persistent_session_ds:id(), emqx_ds:stream()}, + iter :: emqx_ds:iterator() +}). + +-record(session, { + %% same as clientid + id :: emqx_persistent_session_ds:id(), + %% creation time + created_at :: _Millisecond :: non_neg_integer(), + expires_at = never :: _Millisecond :: non_neg_integer() | never, + inflight :: emqx_persistent_message_ds_replayer:inflight(), + %% for future usage + props = #{} :: map() +}). + +-endif. diff --git a/apps/emqx/test/emqx_persistent_messages_SUITE.erl b/apps/emqx/test/emqx_persistent_messages_SUITE.erl index 32e59a114..db025a457 100644 --- a/apps/emqx/test/emqx_persistent_messages_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_messages_SUITE.erl @@ -103,8 +103,8 @@ t_messages_persisted(_Config) -> ct:pal("Persisted = ~p", [Persisted]), ?assertEqual( - [M1, M2, M5, M7, M9, M10], - [{emqx_message:topic(M), emqx_message:payload(M)} || M <- Persisted] + lists:sort([M1, M2, M5, M7, M9, M10]), + lists:sort([{emqx_message:topic(M), emqx_message:payload(M)} || M <- Persisted]) ), ok. @@ -146,11 +146,11 @@ t_messages_persisted_2(_Config) -> ct:pal("Persisted = ~p", [Persisted]), ?assertEqual( - [ + lists:sort([ {T(<<"client/1/topic">>), <<"4">>}, {T(<<"client/2/topic">>), <<"5">>} - ], - [{emqx_message:topic(M), emqx_message:payload(M)} || M <- Persisted] + ]), + lists:sort([{emqx_message:topic(M), emqx_message:payload(M)} || M <- Persisted]) ), ok. @@ -252,9 +252,13 @@ connect(Opts0 = #{}) -> Client. consume(TopicFiler, StartMS) -> - [{_, Stream}] = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFiler, StartMS), - {ok, It} = emqx_ds:make_iterator(Stream, StartMS), - consume(It). + lists:flatmap( + fun({_Rank, Stream}) -> + {ok, It} = emqx_ds:make_iterator(Stream, StartMS, 0), + consume(It) + end, + emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFiler, StartMS) + ). consume(It) -> case emqx_ds:next(It, 100) of diff --git a/apps/emqx/test/emqx_persistent_session_SUITE.erl b/apps/emqx/test/emqx_persistent_session_SUITE.erl index be3bf6e6a..008305671 100644 --- a/apps/emqx/test/emqx_persistent_session_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_session_SUITE.erl @@ -510,6 +510,48 @@ t_process_dies_session_expires(Config) -> emqtt:disconnect(Client2). +t_publish_while_client_is_gone_qos1(Config) -> + %% A persistent session should receive messages in its + %% subscription even if the process owning the session dies. + ConnFun = ?config(conn_fun, Config), + Topic = ?config(topic, Config), + STopic = ?config(stopic, Config), + Payload1 = <<"hello1">>, + Payload2 = <<"hello2">>, + ClientId = ?config(client_id, Config), + {ok, Client1} = emqtt:start_link([ + {proto_ver, v5}, + {clientid, ClientId}, + {properties, #{'Session-Expiry-Interval' => 30}}, + {clean_start, true} + | Config + ]), + {ok, _} = emqtt:ConnFun(Client1), + {ok, _, [1]} = emqtt:subscribe(Client1, STopic, qos1), + + ok = emqtt:disconnect(Client1), + maybe_kill_connection_process(ClientId, Config), + + ok = publish(Topic, [Payload1, Payload2]), + + {ok, Client2} = emqtt:start_link([ + {proto_ver, v5}, + {clientid, ClientId}, + {properties, #{'Session-Expiry-Interval' => 30}}, + {clean_start, false} + | Config + ]), + {ok, _} = emqtt:ConnFun(Client2), + Msgs = receive_messages(2), + ?assertMatch([_, _], Msgs), + [Msg2, Msg1] = Msgs, + ?assertEqual({ok, iolist_to_binary(Payload1)}, maps:find(payload, Msg1)), + ?assertEqual({ok, 1}, maps:find(qos, Msg1)), + ?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg2)), + ?assertEqual({ok, 1}, maps:find(qos, Msg2)), + + ok = emqtt:disconnect(Client2). + t_publish_while_client_is_gone(init, Config) -> skip_ds_tc(Config); t_publish_while_client_is_gone('end', _Config) -> ok. t_publish_while_client_is_gone(Config) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index 941573bf8..c8199239f 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -30,6 +30,9 @@ %% Message replay API: -export([get_streams/3, make_iterator/3, next/2]). +%% Iterator storage API: +-export([save_iterator/3, get_iterator/2]). + %% Misc. API: -export([]). @@ -46,7 +49,8 @@ message_id/0, next_result/1, next_result/0, store_batch_result/0, - make_iterator_result/1, make_iterator_result/0 + make_iterator_result/1, make_iterator_result/0, + get_iterator_result/1 ]). %%================================================================================ @@ -97,6 +101,10 @@ -type message_id() :: emqx_ds_replication_layer:message_id(). +-type iterator_id() :: term(). + +-type get_iterator_result(Iterator) :: {ok, Iterator} | undefined. + %%================================================================================ %% API funcions %%================================================================================ @@ -174,6 +182,14 @@ make_iterator(Stream, TopicFilter, StartTime) -> next(Iter, BatchSize) -> emqx_ds_replication_layer:next(Iter, BatchSize). +-spec save_iterator(db(), iterator_id(), iterator()) -> ok. +save_iterator(DB, ITRef, Iterator) -> + emqx_ds_replication_layer:save_iterator(DB, ITRef, Iterator). + +-spec get_iterator(db(), iterator_id()) -> get_iterator_result(iterator()). +get_iterator(DB, ITRef) -> + emqx_ds_replication_layer:get_iterator(DB, ITRef). + %%================================================================================ %% Internal exports %%================================================================================ diff --git a/apps/emqx_durable_storage/src/emqx_ds_helper.erl b/apps/emqx_durable_storage/src/emqx_ds_helper.erl new file mode 100644 index 000000000..5b55831d1 --- /dev/null +++ b/apps/emqx_durable_storage/src/emqx_ds_helper.erl @@ -0,0 +1,73 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-module(emqx_ds_helper). + +%% API: +-export([create_rr/1]). + +%% internal exports: +-export([]). + +-export_type([rr/0]). + +%%================================================================================ +%% Type declarations +%%================================================================================ + +-type item() :: {emqx_ds:stream_rank(), emqx_ds:stream()}. + +-type rr() :: #{ + queue := #{term() => [{integer(), emqx_ds:stream()}]}, + active_ring := {[item()], [item()]} +}. + +%%================================================================================ +%% API funcions +%%================================================================================ + +-spec create_rr([item()]) -> rr(). +create_rr(Streams) -> + RR0 = #{latest_rank => #{}, active_ring => {[], []}}, + add_streams(RR0, Streams). + +-spec add_streams(rr(), [item()]) -> rr(). +add_streams(#{queue := Q0, active_ring := R0}, Streams) -> + Q1 = lists:foldl( + fun({{RankX, RankY}, Stream}, Acc) -> + maps:update_with(RankX, fun(L) -> [{RankY, Stream} | L] end, Acc) + end, + Q0, + Streams + ), + Q2 = maps:map( + fun(_RankX, Streams1) -> + lists:usort(Streams1) + end, + Q1 + ), + #{queue => Q2, active_ring => R0}. + +%%================================================================================ +%% behavior callbacks +%%================================================================================ + +%%================================================================================ +%% Internal exports +%%================================================================================ + +%%================================================================================ +%% Internal functions +%%================================================================================ diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index 06cead725..9b1ff5c7c 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -22,7 +22,9 @@ store_batch/3, get_streams/3, make_iterator/3, - next/2 + next/2, + save_iterator/3, + get_iterator/2 ]). %% internal exports: @@ -42,7 +44,7 @@ -type db() :: emqx_ds:db(). --type shard_id() :: {emqx_ds:db(), atom()}. +-type shard_id() :: {db(), atom()}. %% This record enapsulates the stream entity from the replication %% level. @@ -71,7 +73,7 @@ %% API functions %%================================================================================ --spec list_shards(emqx_ds:db()) -> [shard_id()]. +-spec list_shards(db()) -> [shard_id()]. list_shards(DB) -> %% TODO: milestone 5 lists:map( @@ -81,7 +83,7 @@ list_shards(DB) -> list_nodes() ). --spec open_db(emqx_ds:db(), emqx_ds:create_db_opts()) -> ok | {error, _}. +-spec open_db(db(), emqx_ds:create_db_opts()) -> ok | {error, _}. open_db(DB, Opts) -> %% TODO: improve error reporting, don't just crash lists:foreach( @@ -92,7 +94,7 @@ open_db(DB, Opts) -> list_nodes() ). --spec drop_db(emqx_ds:db()) -> ok | {error, _}. +-spec drop_db(db()) -> ok | {error, _}. drop_db(DB) -> lists:foreach( fun(Node) -> @@ -102,7 +104,7 @@ drop_db(DB) -> list_nodes() ). --spec store_batch(emqx_ds:db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> +-spec store_batch(db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> emqx_ds:store_batch_result(). store_batch(DB, Msg, Opts) -> %% TODO: Currently we store messages locally. @@ -112,7 +114,7 @@ store_batch(DB, Msg, Opts) -> -spec get_streams(db(), emqx_ds:topic_filter(), emqx_ds:time()) -> [{emqx_ds:stream_rank(), stream()}]. get_streams(DB, TopicFilter, StartTime) -> - Shards = emqx_ds_replication_layer:list_shards(DB), + Shards = list_shards(DB), lists:flatmap( fun(Shard) -> Node = node_of_shard(Shard), @@ -164,6 +166,14 @@ next(Iter0, BatchSize) -> Other end. +-spec save_iterator(db(), emqx_ds:iterator_id(), iterator()) -> ok. +save_iterator(_DB, _ITRef, _Iterator) -> + error(todo). + +-spec get_iterator(db(), emqx_ds:iterator_id()) -> emqx_ds:get_iterator_result(iterator()). +get_iterator(_DB, _ITRef) -> + error(todo). + %%================================================================================ %% behavior callbacks %%================================================================================ diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index bce976559..8b2e3cc61 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -368,7 +368,7 @@ rocksdb_open(Shard, Options) -> -spec db_dir(shard_id()) -> file:filename(). db_dir({DB, ShardId}) -> - lists:flatten([atom_to_list(DB), $:, atom_to_list(ShardId)]). + filename:join("data", lists:flatten([atom_to_list(DB), $:, atom_to_list(ShardId)])). %%-------------------------------------------------------------------------------- %% Schema access diff --git a/tdd b/tdd new file mode 100755 index 000000000..197891df6 --- /dev/null +++ b/tdd @@ -0,0 +1,13 @@ +#!/bin/bash + +make fmt > /dev/null &>1 & + +./rebar3 ct --name ct@127.0.0.1 --readable=true --suite ./_build/test/lib/emqx/test/emqx_persistent_session_SUITE.beam --case t_publish_while_client_is_gone_qos1 --group tcp + +suites=$(cat <;NBzUP%>Hf`Lwk&23HlkB-ODpXW7 z4OCSBI8pzDzsYFt$ie@tGm@7%L$yr&UrbSA5Ea#KD%mrq)tn-RI-KoPuY6w~3m51J zAEDv)d2KJ>nop%i6~C7|%IDzDgk+58zJS-Ie^#f#e7+W1vfRaISG9hbrfzt_|2s3AUv{Gn}aZF9z4p$fhI{r&jXk&%(X!NF1T z2fxAR*Doe!X69YQXQ;co=HPqEqtPDZPi^G1Uq|`UhpEZQ$w^5`uU`FbNLD>?;6S`Y z=63VPa#v;q@0XO6goK17CMNp%`K6|&7LY&q4W6Q?s3<%kH{bV=KJ^W zb8{}I=^x9*x4iSce?Nl%ikyPNm#0TB@291ud|Bz=mpi#HsKRdc_qUxP2oZEFkg8&22Hun)~yoe1S_`JSzSM z`MRaQrTFVgsjaQBuuzRteW+TEGgyHmNcd`(xs}z| zs;a8bC5k(HJ0 z?CexYWH}=&EF8clmhZW|^ybZ*`}b*2)8kg%JM5deFTOUedoHVzNO`NdHjk~z*Qesa zwkEyQD-g73dF4FO+tk!FJL}{xeav%dp`}GDId0P`j-YGRQDVZ8N~w(+YHCSpDcg@| zywJ+n%g1+qz?5>45dXh{bLCT2mHSMGqmV^QO-suO$B}l4g>Q87Av{y#dv_k;&t;+4pqmSKN<5U+ip_E`}tGtxio>Fd0N$rue9y?9DMrp zX_Y&aOZ9E)VIg1B%WrUCU~asU*TCtUPhDNzJl>=4!KkMvZaiytrcP{Oj)e&vj0?O!g7Wh3$+Dxfma!zf;I6qQO^sS7z%%r|anA z5BBB0p&{KOm#O!35?^jgk-y)N{*gQ@`FB)?*RNSy8|2wsk(9idljCS*HS&?&Q(;1S ztCEtEviKk48#l(J=)@+=wkhvFcraSrU3=g>_@M}e!(l7$K%n%hYz1V?fd!jad>zohs-TG z3yUFy6uV1bEY@$LC&b{1@MiKCmU>AC32sCJjg(~bx8+@>G7V*<=KxCR&8&t-4(Q+ zbXi0OI;a>nStrk1guudEjPB^@(0r-e+Vw+xszDWn!M69ypZTukVzb7SPoK_NTaR+< zWKY&#CpJ`5noe_?&J6zXR6RZAOG3_G&C-tlO8LX?`276*+}vE>Z3jnMGTwjvsx)vy z^U%qYfAFmmOS5rEyt7}J4BRg9hmPd^it=D}Y|p(xq{Ua4Zp~gcH#bL`OIA$`V3%NH zW9wADt{YG9zU#HI|Zj(m2{g$wlFM1~3%QckRo6t@42d^`5Nd}(Rv z`$rQ^=-5O}1$L@)HdYCBaYdmpyfUedN=Qh^xcm{nw*BLm zZh7t6;Uh;1t{+QS&+b;!)wPVhkS%6An3l6{{rdEanbHe41{#uuI-P!OFDXz>RGK=( zy>{KYjW^G0YC4uT7c!)mEX_UP*3owz&cYAH8LVH4LDnOb^6$09a=0yvGgvN7W2TE9 zwcu)|b7P7+yT|M$UEOw*X!omp%=&I))spKtLXw)>=aMLhDi_Q7xL%io_&-VIAj*Wd}4`dPgnq40h85t=V`=G+uCQrAv z>g}!dnrYhV7cU+Ts$key-dhz;{PFzx-654%_(OM7rx#7P{h9dX`O8Iuskrd@^E33` zeSLjhwyj-1dwZu7B^LYR0>uaLSIz%Yc9+Y+Ozkuk6UrYRD43e|VRsks($=$c9ywAz zWmx>DH~g$0qi^+>h?DhLcG`6k5)xwqD75h^&L3GF3&%dN@P`gzt27Kwu?Sh|W|=i2 z3?4su@+2&bGs+n|>{?b^dwcucyVNA}U)9m+_5BvNUg{Uk{rMGLT~QG#?w-G!8b$Qz z(W93?2B7NM*xTzNryzOEO%1yG>Zz*QA~UR8ySCp>IB!9vq-;AAljg;Xe|`i?2%kKu z*O}hWzk~Mn-Me?C?rg3KJD&QVx3%`~+j^LaEed>7pP&&3npv# z?cIyras1ZX-?KjgN4rWqJM!&Ei|4IIc@G~tgfi5TWgb#%T3B`g(H#;Qxri-1?MPhZ z)hj+GrjU2<-Yv0Fifjzi#$-z6p*oikA75^km7kZl>C5#C7cO*oGRRAWhK6=T-@kwV zO{0b=`GW}XmyT25&?$1s8h$hJtD5HKb%2%1jlc1FWfSY&f4^NHE@;`7dm}C}(PZ>j zb)?7)^7pReW{(RB=HqoGczH*aJ?H9mtc$V?-`=d;=x_ySEj4v8p6M248_cauX1um$J$C3hpoSF=ZRF{m`4Km z5XjCOR8dxxBw;_$@Xm*Z-E}Af6=LDFie5%`b~f-BDM63&m5*XpAfGf( zr~)Gki)(l3zbN&zrPmb|6|oMQ>0{&L1B>c_8aH>(&d;ao=8eyo%nqccOG-)6Y~0A^ z)cx-!7QuxjntS*B{C-u1I}&?yVZ74azN+hSXejc|4kjic!+n%U);-7b7q=hoxc+_< zHm*RZ!fHMZ4-bzSb|6j;YDqUYx6$-_0RbAE!7fF`K)v%5bP@}{3r34wFRnt;N6jp9 z9PM-+$us%!!9Ut#uK!BWLUY7P+pC?0&N&?pB#EUly0K7f{e#hNua$W^{Q@PHoaQF` zfn57W|Ayb`6>NCl6j?K;+~{KF)hep{_TS5V0pol>1_uYDz85DZ{)%+$Y@JVyh=_>7 zHvt-=z?Mz_oayo`aTsp%_V$)%FTySl^j!E+vb;Fz<>i&r<>89U00Wp_yLOG27a%-J z)Oj55p&&osV>klH7?`UAcqYAgE@ikPEG&$GgmsyE|K7ZLvs!$92(Y5B`2ytyd2Ayo zkr9N~ma69F83Zp1f6w=FKUD6{^vfL{^TXy%FD@21q=keu1Ki)2#-gw+E-tP;{pr)E zS-?#$#!8eSefKH#V7V;Q+Px&2<+r_@}Pst;?CngQI@8b=0{~@WN2D5|4!+_#3>3DoxVStKy?l2n1W$B`hv%z>py&K>&eYTtI;5Ms8MbdfDqZUHC&4 za@uX=?d&FpbGkY`<0ucVyYaIS`FDoTT~RrK^&H-B5;DGeb=|sk(_Nx`^ z2L=Wj6gu5Fb;@g223h-R=AU%W#nj!@S2Od_G0oEq4-KL2M#`|e+s%x0`1|{B*sy`O zc=lOzbo6W@D=TYs*~z$iHPVcp5@y-zKv!@faobHDFM4q8kv|hEB}3< zy{+l)*_1fovx$)K^Z+Pk^gF=KZHevT?y-o?Ra#mqgL2fhyyO^d;C80JuMhFHp2%fU zQ67!znzF3x$!l`z!Cx$R%0*mU95rs@_wT32&6`lC;sb8^9oy+I)PDUv-f5Qe#MQ8s zg?#Wg*mqOkq^GBUg(#Yxon1@ASUB-jgsDTW|FhfDpI@k)*<|RUSL9-kEMTDLeCDLh z&rdHdX1>nMoEh!%T%2l_U%8R)gMX2>$M@de>?@T|5)qNfiBWP&O5*kdYSFq)Mj0ng zo-CUE?uV8aZ^z;vk32NdQHbBNvNAmI`#W?WKYWmjq@?BU5@WY^N?xv!Qd0Vg@N))c zNQ`p9#)`6WbPN%$rM{A-onz5D-foK=>o_t60EtyXGHr-m37UWZf>jqJ5^>!8=inpD zQ|X?H5hupdayt3Lt5ojnyn*!(dhppqC>hop!PXa@XxP&sO7U|@V=LMPuYIXs-ymSv9K)C!P`*segjDtB0K zBjY!{)FXPwC&9sbSbt@S#mV+O+xX3E$q)RSWu;;CdYGD#F*ZB)4P8MI}~ zmcLC^?*5fmT}oY7Q*-@$*#;d_LQG7lSr#I)EkzK8lgO|=8#PkxmzNfa#GIfQ4k7_* zZh&M}pqQpHKR-{`QoziHFC#i(V6gGQ^b#vGvoIm+PD|u&v*?Sq=AVrNcUWpP=*IWOt;HCJ&!gh_Uit1`!9v;Ru zv96@6$gl1Na{d*@ZAGp##CKYE5wVM0k52X^a$2NFbM}=+YE5-@d(=+?k6;0uot@Eu zR8&+DO{aac-VCzaQrGfg7h-h##&UH{Jt7-tX|rzKy4T*A{2smJ&#vq`TFVZSAmD%6 zix&(Yt=YbTfin{mEKm9J^78O3^E0CoPQMLpZ8HM{tI=8i`VzVgRn~pZj*c$yU*A1> z_~M0yg`uG#WH6$Ry?Ryi^()ag;bvtO6^qcOMn**BM;mBr=D5ySrD>!#nz8KM$)WHx zMk6(ZTj$Bsr=R#G$@gUVAZ`UB+edxgYt^l0tJk1}l!82x(C5@AUf zmlo%b@$;vore5K1E%V)W*@JP%jvc#paan#6Z)j*R=3}<8u|W>OCy-sjN-!MvV08KN zudk6JXv3dB-|y%+(^VKyZdCE*OGCe%PPWBSW?G;g8YZ3wQvsw83pvVuUw80tZFn`< znSIi27HGvkGE&dUDOW4Y7pdCcpG-Ymwf-6Qs_*u8b`4-{u&50U0Evwa4ZuGMFUX%aZ`9gbn|ia_^^%z@ff+b^b-$&tJbxu3TvZ z{xBt#efS_OA|fC8h8?{oT7r-#PwuS^dGu)OST1s3R#u}!7>LXC^mJ5Lkd+`V@d2Sp zVKZ}&1gRvy;THS>Ud!5=mrY1OphbMG85o3t0W)*+W1$M0HfdbxK{gkd*3aOL4w`1(5a84R{HO1?KFpp-YQT$=w3mp?exF; z?VXR6mDTE96ei)+-+WH{2A%w^w}v_-j)Mmi0sK2V#RUW!G_rjC{A_J(9troyfaWdR zZi?F}UwaL|0>Eg6C8ww7WSMyT1zVK;ppicbBGkxHhHET0FVES<1)T$4(b_d@?rvwt zvg0G$Ib~fpI7KGVt{NMo0)pgCZX7^?T)TGd-voD`YbB+gDjBTJu0V)dTH0T~{#4-F zqckGZ(E^Pn2O0HY>&wc^*Z12Y_Q4z0*4CP3X~)%~e!e4E3W;@Ubo=;f0d&CqTSf~CZAK7alk8v#(DW{`qV z7VoT3MSZJ>hlf$6-l$uGWo&C|N>JlWREjji`tkFt@Px=mGVGI_QB01Aj0Dwwd}r0O zvwqV4I}g|kkRySw{cjG}d}k`aCgPw#-cDdi3yX@t*0_IueE5wepA#Bauq;xkjR}c~ zj&^n}g7FCAVBwwQi!gKi&Cc?5?d=ia;pQkkX!1c3*ViAz!!~7KNLB-m9C%)O79{9U zXOWKX=EP^ujKHxq>$|3Drzy$GUYb8fzS?iszqiEH+|ckCxH-=9M^8_@O0p@*un)EN zb43LfSU8gB$dSs*N^_8{2=CfwXMu|Ua;Z{uE3Mk^pxd`^Z#$^e(k6%(tC4yf4CzoS z)*B3PQqrO1%m29dPWIQM`e27PztjWZ6BpNytL4&ua|r9VLZ%viT7hPzpVSQu(whf8 zktSGJL@32hmUUa7<6lO0B;2|sd z>JGIcbO)#A78cx2)92?{Ui})egPwl3y}~Je+{m$Gs)_Yr3GiRp*`t5{G|tj4avC$D zqoYf%r}P|e@2>2E_n~Q)LBYZHD4w4`%Y6&63%xs287LtAb|_0Z+G#)+ zb*2Jn1vb#o1pR%1XI5Sy-wQiy(yo&>&r?$5{;P%zJS{!1^0yQf0BAwX36_hmHEl4n zH!^yr%M&6@VX_`DuPpoRY}6tkN4&t6)>a4^fRNJUMMp9tXxGdCy#LoyH3f7Z#0vw1 zE*}~uGl4K<%XpQ;_QK?~+P!Hd(a|17u|+2YmPGRq!=+1?va>9o#l<1<#0PAkwar#c zo*pJ8o<9Uy9hEYKM{g%;+TZ;h^Y_V;E^<8qzknQ!^(@kle)tfCJ+>C!4XN3nZ>QX(nO)&vR-ZjhXNIacndX7umHa_t-(!1<8c1W}-n6{n}CQLVC56m@kY z5M~Xss}^pyT8Llo_R>i0D0H^Hcrmo3#8Xc0o^;&B-0QJOyek{xl;?`5<5lMJ#uCLB z)W7Q+OB9xG$P@M|R3UQMZg#?H8sVsk;B+59eolq}j0_C>SDvxvh#f!R*?aeZF;Tzlu3vv3eAufi6;c#8w_;*_e0+S?n>S}U$7B>El9fw0ILa8!UI0w63(r?Ao=~BG()!J7!tiw{QRP zF_7TJ8_n$T3UA%I_4nmjuPpp^S*UshX2R_-FlePUp>JwwX+eU?u-&$O`vH3eDk&XJ zO-&`G0CFUvw}3_njta%zkCA(Pa`N#?1m#lyy)9ibq%JWfNi%?pXJ$Txi@a~bbJW+@ zXK4>Z9^zt@cXKPmjzV*N=8ShkEhWWxUZ9}z?t|!=@J%RvM!z^DBn;@i5tJa5*R5G| zj&)n<-OK!;fJSlk{n-|+DtdZim)@<((oU;2eOWwb<|DahWz$OwQ|`#?p}YmDMNLhO zy2OH1_VFW74-y}hhf#(Ws5g7z4z$5woInpf3JPj8n*s*{MfW)cz?V|5AZNat|Mvt# z^_v8O3SW=lEAcF8G>bfL{sfQq+E`Xg>*>m)F<)F+@7tOG&-79n3|vFPJ468H+nZm$ z948l#yob7R^ymemERux}uWx~r3~H$Le*uE5tiuBXZS!a4FJ~3l8*0GG&4(4Nq#Kc(Q}xSGau~~YPyz& z1|yN1c9Gwdixj1s;Pd|9QWF`8+USq99R2GBxEK=2cS+LLcH&pGXH-OlMTQm#sZ63- z+q35qB*Be7&!RhNs3!N264cOGsUBj;G^xg(gWAWsaF)@pe7qM03b+D4n3A%h;~V~7 z=^>cqfE_ojyJcw^4&)uGz~SmT4_&>*3h2SJ`Vgoq*WOSQLX9FFqZ9pIO2{0LDhN5T zWL}Y~v^4pD&7k>yv(d|cqS2kD=@m@J-eGIYvpqS9HDF_4Fm~+?QWnqq5g@|mK3VIzIB4L0f}V<&IkVNRyL45WMge%c*RT0K zJ(nLpX7HD`MYFlsd(~}qGAivtm$jK0=gRjzDAjHy-hkKJ__(+@L}@BZX6sOIFWBKX zQ2hY{ktiSx*S~!myUZ^NsY)ygk{;8VwAt@y&e9fDIy+l1Ugabh7?cn+@z6@P-Y%T!bd>IaNQq1W$+RMec!F2F8LRRg$ zz?&4*IXPVH5fZQxDM>3>sLv5lAjrO4y`?M(o0A|j;L){`esUL=Dp(}tuAt9RV`pI_mM=SQQSovnzn zuc-;!g^8M4L$@_)k`Nd?u{F@a5^}J~Dz8=QDk`sSe<`6OcxrU2Wr}%;G z3miwA(E8?Bq4STx-?zNW8%Rk7qdI@>{7RmTy-E49ZaDz~frt|~zCW{MxuC17YiJnL zaMUw|N#E%k#HO|!>n`-*Yfqb6S&6$&TNoQZZ^*-U0M%eow9_;a>(MRY*{h-@4AL}s zwcmWIug|pp_>9u+Gjpz_LLOQCjD`jn`eOZbCb~7J`#Q5jLAHZ!Z8$pr1)9?v5J&Fw z(~4mNvTnU#=iR4UO#<1(I4$|8nD;A1i$_L9y=*jd9B#Xkq4oX853~qK-4Jr-e$K6+ z0I9Vrfm7~l$r66=-p2n9FsJ7F8DSL}6h&MMMgI|Nf0+5G=Qyg(dv)e4k=pNuc}A&16@_O5H(Qx_#qWR4!!FzCKfw zejr||+kv>Bjsi#XthO|r96>=rFkI+fHBte+dTjDmP(|Nr%CZ}_yw>7^0*LzUQY3!l z$dP^f;$FUdDMLc^e<&|MJMf&!>s$4I>P~dD0VoLpccK%>)mf!rj0^a20z~QBg?z|}*{;0>5J><)I!b&xYhWLL6 z(Oz#?ve~&8d3h&=gqpKo`M))MrNxC!3QZ@$ay6CLZtAqOwDojs4Q5%8+PHXm*^zc( zHbP$|C$~|Ao80T~U#X7kv|)AA*N;lGRMpaIX>9E2@87X~`xSFik{V}mJsnl457f8Y zn^^3G^&A|s{roBmohPtS|BrtEKR9eo@ABmzV42Zt{Ihy>+_Pt}S+!(?n|mb8K}976 z6kh?_FH76f#l;1F2DHfKipd}Y<0%{dFA_V|Wo~MU6z7ebf)g!Ed-dwomNcZY@^Tml zD+;hndeKDw7Wo6}+EpeNhvL&t`_*_|;nU88mkBkxu zL<#d=!_ie#%(4GJd2c1AnXOiG!~q~f2rTE+)q{UmW-G$f1NMx#N-6JCg#wH))NI?=uRQzrpCNDd|MbFCUh@ANjNT;F-aRtH zLjmKZ^egm%M;-E3UrlUVo3@YS7DmRah$Ug+w#G*Ffu~ed-827TKtXha?*~I(33U3} zxB}ee-&g2Dc|PV?^bxYMzNj7~K~z($Fyb1v10Eh8Vyjav#=m@Ng*OO-1Q{1~^Di%1 zR(2*&_wSw6oFWHz>ABk0)@xu)%}KjN9KIOyw~|PAZr}dk@PpiAQ&I8NScI!sb{6I( zC4B;ifBd+5TN@*8S+tz zq~R&cm89TjE!-O@&@Y-RP* z@)8S})eJ58B{-C$6iBFD=aTaCUBF=gka2UT#m65I76$e(7YKX$^eJ1o3Y;GHW#2lfOXKr zP`~gix7MpufIH8hpEz*>!Y3=M`%uf#)AS4*eHzW+cnimsU@PhPEVw=fH zG%$w`93ZmmcZFn;Jl)R5#;Lxzz?X1xwMUW#O};ba`d><4-`Q!!9}08`9^b-(AsS#8 zl>XL0ylW{bDPB#!o&HeTiz2#V2=42fHTO(8e+XE3SO!st#b&E^b#eJ@{94{cLP`To zA+h*Fnbq1{Pew|VVN!+;0KTkI@$6Y2G5fT!@H_|9)i;Y4t z`}xyO)|$Kn*HM;kk|wU!6tT*o3c{q;)B^0HsVV&+2viu%WfuetEb87s3VHM9F8gFo zc6M0RRf;p4S`v3W%l!@cMc+!BRy#X-D;M=h(! zw*1VklwJNzVWuMcL~^!bw&ivQ(lM#KC)63&oQ^n4in?KEXLsYqmc%ZjRQ(7^$LN3k zBH2J^jVX(XiGivG7XpE&f$StNeL&G#N@Af~fVC_x?woExm*3uQaiqzPx3wj}bJ(m% zO#!;PuAW}Ga)o7=NtV50S#{0Y)5vwUj*d*}4&RtAoh=elvtX}Z4(lvQBmMa5owmJU z{*kEIz?tt8Vj~ZAwd4m5lxUT0kv|Du-bhA96ow)E4zbz6K0h%3_UzfSfnw7wKpEx! zTX{rJ^Mnu{Vmk3zLp;jNfty$(>6rB}Fx{dP&owo;`274KSO{_hy3uu87fm%idBTWQ z!Usr`KlDCfsmCYiL@V^_f#SZ)blFP2Ct4Xzz7{ED^Xe*n5L9PO->t8~8=ZyAjn)8&8-{jrn`!cF{zt)u>=fzeO5fO3yQp&sYlv}f0yh?63-NAicHP%v*C{+Uc7krUxQHFtM+)J{uFOZ2RYidEnS3YyLHC8l%LYZm=t@!Z+= z7hUX5m#rpcz>##W#W%UycZRMk1yccL@E3Mo-)jA#C7_(W+)NE2892%#a`L2oiBwHd z$d6x83CN=h^z_|>gY)J>bP62xkmi>@A4#XaSyNf82hfhC>y}vlQ(e5MjL?@mGU*z)!CrcgGmOM zWb6V|Pjyw*eQb`r%1%yBvvYI$vzxy9i8~k=uDcbg)$}JzDR?*o+HrQa#zNhzPvzz1 z&CQyHVt6{@Gdk*t!2-f=+FZQ1{r#8FV|47|?bf51NH`^jz(yMd7l2?rV`A%f1s&0m z;#@z_#pr-$LZ{QW9|wVQ$b@zS7N8@sUhHaf_N}?OIrz9n6j{+X==A7u_1xciTuyW| zH8nNbe`@NOGlXzWdzPH>Z$F3FOmMSc142R!CTkrMaywL^U_@leKTi?O- z9rd10iKO&&rsWN-BaC+??j70hYtS}=eO%cy4#s&z895EP%a%l3S`SIQM6e!3HcQc? ztYZ3XVr%w~($Z6mUb`xIgzHBPBjFy%zMVZ)^W!ev3LM82nWSy{!a}@edxco5~0p)88yNv(NJ%a@@!?h6>v}515_8|R!Z{7uphJEW+ zewRtpu^%2!4t(sc#qLs3c?5kRB*cV|xxJ$U4fJea9`=#wM-oY=P|C3268+Ad!Z*G{ z*O0mP9$nVADzmI8G1u&*q&~nTqiShUkbEeFTi3kBpo+#7%*$-pxN&R25FYK#ojt@% zN5)-S6l5fGJozz+ZLTNzL%9weN^)g2fno_N3XgxO@c$J>~kzQDOzpEr8Bj9;}g9KXxK-tjJJ{CTX8|I@WBZ58yj-Y<Wf(+UJv84vZmnF#t(guduw>YdHi{7`K3X81(PtSQQ{?!j}rii7*D!oDZJhD_VH zxAu~dG+|$zo4f7fbN0*`LJcRJx^J#w*;IZJp?O=BoMhd>^F*Drn3vv1>P_izlhtk!&y zmZk`z!AEdRNe3``H#OcEJTB44%{7dSzQbUGa3orkQzck+cq@BBEy3EjzDVC%FdRE> z-@bj8LBnvz!P@{yhuH-&U$!B8Rtsjw+FB@x%-gpgNBayPES!Y!SQAY&Zjsy=;O>Gw@uFyV$`azcn=@1c$ zFh}fUi>ztrK>w4Eq69k(2$r(1$T9wLahHKIfdvzkqY1NNuv|}fH^gnjT#)I!@Pk)Y z62ora-rvc5{Tfq4>FL%`kI-LhIb&8Wm>?pk_?XGtHFrNKz)uhN;pfjOD_4L146bRL z#LfGGflLxNpIfdz2UnP`#XDAEyK{w7-9LUDbDbW7REp1rZ5-!VF+Z}9jhuXIJ>9*D zVIV8~9(U?DGymhqkIP0zEt%Vy4Y8izo0ZBq+Tlfl0Sr+@Obx)ngEk$5X358hrMpF@ zLcKbngCS4|ZxpNpsIEu|Xc-}{ZT7QXeM=QT4|62m)-)VFaB#)dGBYwhfhHLi<}%f& zS&*AcxhYAQ3|B_}U_TQ)ew<)w3zMUtK0VU_IeF+vo8-NhKMCMz;zac`0K(H z5@1}$=tGCgU>f#n2$>#S(dw7gFI*r-G|~-9mXKU9Sf>xp0dm+y?$yleI5|0y?()Dz zQyhWS(Gs5J_qL-Y-p4`&;{*HvCbQz;vF1XO*uVcLM$mL~uYZGi3Pb?$7;=E}#0V0M zHpmddw!I2x&Xln){k{YHxyYeI=g5AIQZI7<4;Sq#>#kxLRMGb#55h7@AWXa_BBFJf zYTMfj9Y^(Vy{&0%JbvSQ8O#SlLOJ?h3Hvm9JHk)Smbln!kFFlBOqXwK;nP2g?t&DE z>T`#VU67d;`0C$(|3wHO>o66kym-MSCRX(NbsdO}0|$DV1-BgH;k!q3)> zo*7nG0SGk%x(I+$svY-dFq%xYt`1hqt=qO?s6bOskC^0$LJJLR^bB6tI+KZB_<_e~ zW;|ggZzGYau1-u;OGqHxVfk2S`$IUi2{F&CkrSF#UNZ~}sXPfUUw(D?iu??7q!lJ? z;5PuCNz-82vBMb+ClZcBNdxErKD@2TNjr3f*pNzL0{t+H4s2;O%Mu3@0Ez$eWoO? z(xoJ1Hm7FZdiI{)_r6d!K4yK9O>$~TTdDxymO%Gqd-hxxtufH1hM#6zbhm+wR#$)U;PDBjzp{vJ)?xjoLdX_$5 zJeGGC`sBmGa-=s9n4)2UjDSu~><3MKXPBxwJ31CI);E0g33-~sAN?qXb#kmad`43- zKZFU5IQ);;O#1{BY95{JFPIF)O%2{4Y{a;{H8w;}r^{fQc{;0@i=Bo>Q}ANEuF-g< zeSgXFX-STt1N-+EE>4pGWZ-SS=I*|Tfd;}q4rY(S@vnXd(8;H=oslu&`E$&d`b!5r ze7MIxLVoMktu`3QYW1*y#0o+$Cx@4dtMKB*F z{N{}}MKrdUs-HOx1SMzf3C zih0_7{n;nyS|0sgI$H8oM3L&phgZ5?5)1p^8~1))(`Lfa+#VQzU=K`T3)gbc$6kg3 z#aRr5fF&5*P?lvdT^sg*i2B8s2U`v0T!8oxHw;R8Kfr)bjXdzu8EWthm#ob2deFgO z9VQ_Yan$fV3P|FyPd*vH zM&|-KEFiB9VU?%*vMweAw)LtifBL-)8-oR12O17TYm)r*=30GQ$0$H2^&UWLxk@H3 ztyh3PiQLMOkx>|zMfW+(CPG5G6=Upge^v+scLvjZICRU`<%HdY!1c zW#p^ZueFQaoDUuWiT*+-X9Ve+805ip-N433PwA94_mTm(jn!`6(dQzI98Gp@HhFt< ztx?7-%)S7;?=eEZJX5lqp~rwe&-yx?2wV0^W7rZ!LTUfjsmEi}zHhJT)C*RKLh_xsy>DCjLO$wM>U2P9|F#Zk>YBL>eg zD5(QAIvP{1kiuW?Q$)lrvK+k6B@z#MQAY>lc}>@LGyy4HIgtRn&`e%uW zGQ7kLeSKZsII48Y%NObCB{n^BXpg7N2R~sP4ctVdZ~qIuf~J-hC*&J=p}+Lt9kX6= zFHv4#_%y-#?Q3%8%!o2(PEk_9C!t${ju%s$397)$WE%5a=6L%E6nRC(Nd7AuNj;)I z0NWTuMU!@v30ZX0PJz2FIOT)bYTvFueY>TtuQm>21=q~XKI_6n&dS7;14xP<5Z zv1|&#NT96+7*}1?Y~UI_>G&Dep#8yeq3@OIIxpT5K7f&Exog*FQB?lS&faG}dQ<;3 zc0K16B~@1S&8)(cAP)Q7-tngx`beJUjb+360Cn<8An`9>mdZN(tPqAioF;-YZhlaQ zgVpUnq1$DiUIeH|MM%Z6#g)GC zCIWWD9wb>~o)<|;S{V0*6hymeQ?!YQ_XD(L@E{{A&7)^P>l7JsFr*UWU6!V%xfZSb zy*8Y@@1COB=1qC@-S$g?) z*G4;wP(c{{%D-$^QBjeTmj?=XyjAcG=1V#->X-Ku!9lb*80ur>(mF-`2T-Ol+559I zUx>i*<6;?wi5=IVDZ|ABO2%7q&-NXc%w}GkmXf-)xk^-BR>1enI5JI6xc)OJ?Cd9P zj4`i|HapTwfFZ)ehJEmaC`h|3vndLX3fTL>=Jc*Y=VR`}S&4p_<MG%=SRU&&|(Q zsYF2O>FR=7M4PIpppfJP?q7}WDYO?e*{fF*{{8nY@)cB&x4$O9b~eAb_+wze1@9F~ zO1yg{M!#cXV!&R)%qA#^CY{h-?bgSA|8C@a#>m1#Vf5ubvnFW~bU$a$u3c0+o>lLQ zp`~q-b@lcA2L?Q)BTEbt0>syjKbCnq-Tn3!6cXMTGP~N;Km_dxH{(-EM^hG8RXT1O{Kp8Yg4?j2iIWfqg@UmUHw1?4e+t#g5-#*4vH;NJd`)&Q#zXH{GW|A7_-D*q7s^6dJl`ox`u8xvcZI=QwN(;2(qsK|+z;jAa`xZP?yTr5OumHWC5qE+_1I1TyT%+2NvnWhYx6Nk=S3n5IAo)8vTd|a8Y%i1+bfrgnnEH{ z-=t`l`%GkL=nj&)sK~&hYgSe~MbBZsYH!yyFem}8PfHuV!|vhq-SGaksewl9+2_$$ zuUy$8iI7N2t4&&U>BfztMaGbu}d@|YgSnPb-MG1ZlM!qQyej?YQY53v1S6TJ@SCW4eI8|8z*)_lAlXP zhX+wtjT311fH&<9esB6`y&HUVd6wZyN8#%Gw+jd3%*~7%=;?89^%eFUhG7w{;8$Hq z|2|>fQD+BXyT0?1pRgEep8FX#66O&IiUBM_BRk}b<{(C|{^uX4D0udsR;j z4#`iL>VYed&#teAm_?w$KeRG7K4XmmpWF1~eYN}In>92wZS&f{CQ7L+BIs;jTSUgZ zP@<&VP&+nygP)%tuB_xP0V210>96}noZr-^1J*|nXMe$ajYHD1&7BU-G1vV#fxb349m&{@e4!M9Ahi9q7D7 zJdf}Z_d{V}1DiFVGHlob`r4Zw{UlqU40WE%lv%-%Xhb7~v4Q5MEog-XOhq4_*_-Dy zw)f8aSQReisU2B_B*7v3J}?px$uVCYR^ju3KEnlWH8MtH6Ty7X`#Mcm?Qt8RRmiY(F!iw{2Tc zcekPMnRgD{%G?41j~CokFj;#Qa+jguDdlHpszkHw2Q?Z)Jjt|*xA&TX2?|w$L5atgvCfD4WS$0v zrl@YUL=uAxJK4Hs7&rw|G??QrK57~tnwqi|&&t#*U3eIV+;TVOUq>RBk|ttBEqjy)eLt2YI-z2trl2Mut(7})lV2V0S_(>uhNOW@*n*mX z1_y8zGHes21eOkjJZNWt5)GGc=*>`Y#`&{he!i5fCMN-xk_?Vf#90|9c5N=Yj!8&F zmIp9tGH8GBco?L;Adi2-4~PhEdf*yPs2gZZg;_^pe(2boIx)o#`VL>m!ore#0IC-p zgg7+@3SMMM2T2I$}^b5}OR zCm`=UCJ_*k=+Fk4UMn`v$T!R~L=6*T{F`?08xsV;`=kM1*U>H5`~;g4$ktKfxs0%a zES7x)iL);mc31}#AT*A6hW31WKdi-9KpbV5-EbISy*&OMnlrcp`(HZYV1NtBVa=K~^pa*~X2di+j%s>C1M@7VzrkH$Bli1F8Jm~@r0FaY2p3d) z9DAiPKj$Asy5J3a@bh4D9iSIozs^_0w{PFA*7~!N`=rs=>Ao@DGGTEu;rt=Qc!lmb zSlO+To$vrT1jw(4^#>aO8 z&fUAaLAOBUnVOy^BpVb>E-tPX9y#tH4n@vGhi36Af*`HN8lf!`)y_y-vzQ-KzR^c0x0kczla9dq^k5IphDluK zO-zODv?GoZEA?7s_5^60iCcRTRMZmaHB>e<+ctTk0L9$g!%)>Q()UasehVCfQP<_r_&vAW_W7`vIN+01<*pGMfXGFT75xmREZYAHIeJ1rWl-qnis(2QAh|(fODF zH+%cz!D+D{D!{Qp!FZ+#<9HeJ7#11hb?(U6Xna`F^5X>%m7ypAr*Gj95`I)1>fAn&!^a_q7IWH9CA+C{MWTd45O3_F( zb#_t+X(}pxMt{LzSvR82&D{l%g7M@AXdZO!#7R#`Yv@l}cm_5ITD7-W(QQRWYfNo{ zG<117ZIAbtaG^jm>1VwTXpedI^3`Ar0$qRU(iV8LzJGr<2{af+oIi+>eO%gec?r_+ z1-XAv_I%5~qcO;Q_Ds^iU^!S*e{5{57JOHStEy@}f(WoNPj>_d*L)uxRbEbd{`@C; zcez2^6T6Nb%S6})>^PRtu?ZXpu8gRck5_?SkC-bcD3DOMOqs{+gL=DH7oetzQ#Fqs zKMrQ14+pubS^tT*pWGofJn#NJ@l^j$YR8{I`Ji{h1TD1>4kqQzjPzsT2d%6aPQieQ z3b2C}$Q-@lQY@;H(K8coP~H1`Y07P0Z=xmrXS_ z55Q#y2gk?8!hSFdv8Fck{?n(-`GeKS15g6`G+9NR9^BJVB~mkp6Qz4Q1TC0BVQi~Z z^-KhVnEC2eQc6lb2ucv4I6@^ebK>z~)xHP9d!D2JC&AY~G{sJnpx_|sgvvwmM+f~;w+>_qj?uwTlN7KUaN1ZWt=IH+kj>NT;ks3vz(lUW0UgTZoititFjrlIeF*ulXFAm#ORST_B$ z$|V!S)kt2fC$8_tK~o4D)Ynj9drs0>Tx3!SJ9^T|6{QXs1!B8(&f(oulXfTZ3(Z(u0xR@X*i=Yi?qz;h-Pz->>y{ zQWf$6u>q-xdo0Yt{F19XLY#l+H1^$L)Em997n9#kYlINT{{6Jfe1@1K051Z!*sick zRFnSzPy_VzVqJU8nFsgdTYo4->U?|3?cslIziq-froEr zCj)~3d@V4iXUnhlGI{*);T9$)1bZINAw^Ncxkbe|Rue%#)SS*viYHct;FEE&wI)d5N#3+|o1x z5}OspO0UF2k0%7C4b{^TNMc4RoL(d8FJJQP+4Bt?DY4JExKN;Qu#^hI{oFa~*Ti5z z$6LtHknUlb#>n$=0RdE5`L#EI_kE^=s5&$Nk+3IVxWN2&$m7Rwdrd zP{sd8*L%n1{I~!AXHt<$5mJewB1uR|MN1{22pK6E8D%6%ibyIVAuSq|hD}1^9Yr!* zMrK(JqLjUU_g7ul=X+h>@8@^^(ak*1*ZCaBcpQ)8aj?dqL$0l&n^8*8EkkBu5T_5w*hG8zu=! zm$Q7BvI=S6Dc3>f_Z*2KKMt)l_L@4idsrd;xha1Vh?+v?mj)o%9xpPKlk8Hk9Z+lkH_DfnbUWdMb|{Qmajuu!$#?AW&81=}~b-E@(OqVzbR-x%YR5a$(M-PVf-%7S$;hAp!fu>i^9J$lQon zkCd$P0z}gNYWNbRDh5VIo}QjqG#&+=F?=s~V9MA&`tBmL>xciOt10#FeQLI0@aO^E z#^YYDxMixZ*->lKI7^@vEWUQOacwllgsBf>PQK9 zAE#y*oNbIPx|bZk@L6}zBT`%R0uw}7w?&@B7@l(MKdb%x2vyuan_)CLpNraDwa9m< zv#o0O>@g!JPGjE@OIEj@J^Lqp!}RuL=v|l+5e5r7-ofTlF6lit?MFZX)fr+7UeF(i zr|Aem4d=d*lv$Lu1}F@@PFDy(VBVPri8;=iW&eA&las}rB%4lU6%~gzHnTOrHg&sX z69hp5b0Y`?=Gs0K!s}A3phr*^J-d@{N{TSDGS#w)z@}0^QQ3!x^vqAa6i-yIPGn~K ze~YD3q;q#t*yH-{W+cA>Y2E^T= zKv|g+mhiw=x^>v#i|s|ug2QY&XR;wX(?u5tbLhklLcrlCwt=bvM`nHLoUuGSZS$zx zPc*WMVdtmKdh3WUp`>9EWTo%UT8K*`(dHOMJcetAT)~TdyVyEVq>?se3BSWpPn;;* z=VNgb!(lR{J5tJ1Z)bv_3w@QHx3UE1xev+%7tG9mRFd2r2coi80G z7M=3tfU*ym6ZlhNLc)|+-Z@9!e>WX?jJ9@|eGKR-=QdrS`Jz9G0uP;JIhdz7^Ox_SVSXUCi>P_wiy0C|W8hD{Dwz8ND>_H((nDn zPUDRef#U#*oM%1uCywy;s%Md?0)WUi^aJ z;%kWc*g4Xi;qgavbN8*gmK>lPZ3p=~vzR~3o^5Dg@E*x&?an$4n*=Jxg$p305C2)q z_X)n7a`tSDve&u9`|e|Slb~PUPBrw53t5UmXc*Qf)JR`nfB5j1J{K_PmAjtM&R}aB zrtAgrkX!h<>WUw9X3NyfBS&b~fr_!_{g7z8Oofqan&5Lgj6J*$ovX~{`$kvPn_ArO zZu^)s&zG?G_JKwC#>e>kdX2ER@83T)+Srb=?rpzfP5BFk&xHS1E^)KX?QxjNvsX7{ z61mKDaXIPwX{ykc%H@F^3R*>^4z{2QBC7bu-EVHH-aHuDaT|1Lopl(my`JR*Rw-jC z+ls~?h?&0+uCh92zR}Zs;o45>+-GN#o&iD1VTBa3-!FD^mdt$C*C{J@=iE%+<^y?q zCz~BVas@_v+EhCKv8o^TdGGhQZM%I>FI;+Z z;*R6>S4ON>^j^Bu-Yn(Q=qKh*zL%_vEt01KD$F{~T(M~UR{*xv3 zW=WD8*AzXpDN(8cc? zi|!sa$;nwqPoOEf1_QeNrb+_RCN3!h$*WtZj!@d_2j(7>dn>U@=h&2~Q>|P>GmfW? z_&8p3OKImGZ({-)-lSc+)ogFlkQwQ>!ad#8d#UXmJNe%Ww{pZAjQY~=iS@hvGsYxB zD(euH0$i45Y7>IhgNF|;@QN@X)*DOu`ku5xE%H-53)y_(x%3cp2Jecx#c*~2{ z>X{>TC;d@dx~{2u6Z(v?x2D;D@$KeaZ|X5=&6_1(Z7ci5Ur1|Nb#|#;c-6u^KL*`j z2=i@k@8%4tU!YGY%1$QmK|5UNSpc!qR&{fCpEPIOfj%@GJFe9#?LK$}Qd{nFLMw@= z?pnK*sFM#0>wDmg!uD%~e%9&fJJ97OQm&I6Vm1r-jueSl%yA&4yU(qyuUo#Gs-X&f z;#_WbhQ9c7T&Vp#(tEv-|BrMcPG3{=8mCxae^>DD#yOnB=h} zwiIx4C#P~lx})RDKfl(cPc}9%@D*xxu|n}!^z0T>RLF!khlG&imEXD>^f(6j zO(@K-Zz$-+nt*##4x97J%X3@Zp4+W8QXTz&|5yVC7aT0oSq&U@#czpg z#>Xuy7Pn`FmZ|#S8?3?5RwJR0LG+m#G6?b!NWfg(#;JSu5$;lj@+URIb^$g(se-pl-F+c^k+J~=CWhl}pr+qaR) z*dov6;>yg(&2iF`9HE-T9`aw3C>(k{hn1XqgfyOz2dz zauk(Udov#*Z|r5&@pgsEwhgayh515$6K9-A8VjQM>FYj2=evhbxTc|EVTUU5&Yd=A zx@!g*2NqT?S4l%eh6omDf~N}BgP-ynIBUt`#V|+uw&lsH`;ZI#i3rx%fY!|1+%Mn^ z(~r%1Q&V%r&nZY@M6?y~6zVnB3t#Hcw4!dc6|~``YunXn9ny{tb)Al#sP!7Xbcxj) zuOFZGHv^`EH*}W{rEq{jL)|8D%tlHWA(FBl7jr&zs8P`*S|%OCGEl6*`29ElP2h@A zTX=pc(OE|r9YeN8dQE?4m0rfh%wxxX^T_`|j>ga-iwuS2lYjo@b$(UHND`>b=$tJ< zq-=M|0&C4^tNJbnmbcxLe?8*8Qkdxn4&1q2_4MM}$r{n}miQye6Y*1RWsx*K7C{RMvC5m5FR#98?$}||i4#MQ z&lHxmbvW(;LJxi|-{+0=l!w`*Mi7o52SCautnR5dht8QrkOci)n~eC!0`=F-2S`Hj zy#ESjakOkV!DW?R8_hPrv<)3dG4aUFNmX-{P3N!rsH(E>m!Fo~FivVh8~-0!eP;2T zf==t`DBosSvth_TxxdD((+kt+IeL@r?GH{FMJbD@9!9~dmhb$0H+6NJIcgQk$wI{l zcGb|i`E`wrcl?Z>&7wz(BLOKaEelT3>=no6TqwVFYkT9t>dt$$j(6LB4QeN`i~QA^ zmF!MT!0XntGqZ)&uKwPhSm_F5u1ZSgz4fX~EUz;x^Syh$qGIag$x)5FFCM@+q3fCI zsMNjt)EP5s>+4HuTh^H-B`5dw7~f$2?QPcUH*c_6Fm}OrFch`5#jl!~)v?gq`%XI^ z*A&SKJ-gz?m{iA6T)y08^}QS3y~8{Iq5p`a2PAh8j?=)vS-_O?ss|0!^ch%rVC?@n z!znSk!Y%b}$Mx!_z%##{b;x=4<5FW4=_jB4mi@mcxWlG?Ncal)dwGD;mK8~NNVo;% z=WY?g&lr0=Psyw1Z-HZ$#|S30ZYH3SK&AkqD9?ze5 z$+DGuc9uI%`H~B`YVBIQ_@av|h1IPa-DVyNwp*ObN%b)V&Qt@fV1eMXAA&HlgDgfHJ z+FLAZppQU)gedVna7K6Zepmx%QeU0V9 zCk4w}(tHGReR*-&LCRkzT_>CX-QS1N?%n%4Hkp)AZ*IOn4Q5)P+$f#{M27*~#SX7LvJ>~z@!XE3w^JSWuXG`})zM*_3l z3*&tE0D&^b`9F`g_LdcMvq?wbg5r*$b4FOdtgjy=-|1W6AEr_4QWuVj`+tQ82Zb4k z#+Rl-Gor*Nr#gy+P@fV2^(_CQa0Rbk67SK6(;G5mn)e<8HS5O{UNZ^_x$2q~-FdSV zDT_8{`Jb^rzUYqj+yj3P-oWE!OJ3E~kTG5JE4j4IMVGvyXM{C)G2kfKn5o*(0RxI2 zJ&NNt8gE&VX9Es)|HzC`%Ni(5(`U`vR#hP^@FQ$~0i_0TU6V(1xGIPapKd*7A~+OvRQtTiCyq7zV6m6clpR<*GGKrfbRyN5x} zZSnW85uR3)9VWTHz|2txLV>%8NA3Qy(=)NFMy_3*MhFj(yLaC{h6aMx(b+>zd(w4M zUmuFtG|u7T0LKEMoWn*{KfEB+FN+}6?Gz*zzm+Q;?O)3%8)SmfwBvcD58Jl4RNJxF z^0*e8VTmduRtuNE&g-d!npwbJJd~m|B@kCPx2OpJN1;u!+nu#%V(v*h*^~tr1*%f; zT&OV&WDW$qQ7HZW!KjR=L9&f>d*TBl0WmMr|68!%4>ye&xfat#UBtFE#Vv}WR zzdjCg1n1``prN9mPHk&(X~)tDiEw1m8IZR;KZo3FaLk$1gNEQm#m}5v2hjqF%;SJ= zuJDZLLZJ@OVN*4=b1<;^7jwqQ)+ByNfewUR^+3hzMMW4mL`pt=4^)dnE+WiY2z)@P zW9$(JrxFqfzXoU{_f~xcNXg{=qo{N2q=rm`7^5aHQMeMy0}mz49Ef=+n@rKLUD9Ed z=J^_F-ltl#?VP1}PF=bLE+>^rvRd^bE>Qd#iwP>Z?e?WhsO9aG72f#$V>jBWl$--v zh%()_OBN#TuAQq>7ny0`v_GuzZU!>tZ7pBwly`ATr;-&2+!3t zF;LNKkMFL7j8c|t-2F3$3qW_1bm&zYtB&R5RF;%1LEoGUF>qjbVa-X`KEGtI8a>;G z&_9StY7=-CivqDZa17n3MT9}^wkM!lsO2!~R8*#*>XkitlGx~DJ`*DitDS}f~nQqWeE&Txo_?^K!!HbYQPrJ{we^&h#?i4*tyTuT$rCf;B~7tv~3NW>A&%cHi) zImM(c0|^ew9^y?#%Ng&i4y}>)4~!%{9vK;#M|!>Yz9G;kDhjtg$Yvo^`u)Gnb#;mS z<$4P+Kf-yt0Z>8^2Z`Cfy-QSa_f8#wiQ=XUKwFAyI~Bi$gW0iz*psut2t7AArZ;i-7*(@Gj46FLs|n7Vk7R-E&d~O*?`a zeg6RA6;adh_D8*{ze2S1oZuX<$W)m*L2)H0vl{>|yT@(-7+;4?!S`x0HD_FvO^0q- z5wHUQf4|xK=>xhQ;HORubC-4(zpD92W0>g2VTL5IXvmu`M!neUVTYjw)cpi{SR4KS z43pSsnz~U{72kMJ^jMe&z|4wDt*|_Mo6&LSrF-4vO;R$f1TR=UGcz}*B6 zJexzgs$6hC3;b=IZDon=>dQ4b5H!H5MSX>dPt-*LJYT=li`Fs!j_(Gxa{#Vg3$?FF zvFgElw#;$hcmD zIfj6KSu?#jSJaf~GW0inF3|oWBOe`?n@f%pFzlBxF@<^Gy=|`gFV*G!rt7+uyqY^c z)e&;WhsS3JtE*qRbH^JpAWei&@^IC`o>$%vn8-AX<0npN|I1eo8hPVNek0$v)U3@{ zHW?A30-)aBKauen&N`YK&*r4AHe`26zFE136~CZG3(*%9WFk5WwUmFj62HL2#@@H7 zezU~OE8nkC`LkvaDjjV{_G8*K>WTeD-;+`@X&6`Gr8a9Zj?|O+J>FP?AnD;N`aqolh49Mi#|L#_YusI^oW3D zI?a(88QR-FR|iVZg|nTI|HW}ED9E&gCqck8=l?l4xF$gf`>P{li76jRdkOj~v({f6 z;vJXK3?RZb1>QA70?%dkY!neZXhEWM0MPjvn2EMiVXk1W=B?Ge&1ffN9Wtu-(d>tn*pn$VO&BWk4r z?TDPnXZ30*Q{mgMU7Gg4->N9(kgE5f!V0!&9TR_vIfz%nl%wNXzCn2?Z}Ag`27bCtlIZ=c=dlQ$cdDX zaqM;L(Q=GFHh%8)X*w`xA!FmiW9AO`C<=$C1!X`^NxwSuqul6+%e%^KGIys7;%!eT zYizqPvLbINY=8)4nUD-)IP8SMs&h?82Qql|C%uTPSt%_4d6%cX;8ifmY}!D@KVP1# zAU^?v14ylD_NSHeep%VPi+S3KBRu)jyEKKA_^AtrbO^1O{TRCe zonH9L{@cHNBSt3afon4{dCYHmROF2UOB9<FkfE@ zY8~U5s6ze8GtI2*?@85CsBGtlsC9l=#D_Siwg4}+`a4fhd0U_3qKm|u#>Ql^gn-gx z%NZ)WgkBlgI5+RyVbalpi~FA+J#}grap{0=*NTc@zLK&ycAqdYPTykoPYKUxS_5&K z(8^mnbo5>ATdUHr1PTWw+LalLk{Q}6psFNLG+Pn zpb0@|rnDIN^4En@Te|_f320FdN=r{CR}KveySLxCB{aFik*nVoeLa2Wduc6|3piM= z@&tVo%z&%ky}3h!g^e#QZvS=zIPSDBxly1Ma3!!(!gt>ZiIlj;S`FQE9Ohko=xj}I#6Ogc4>4;Qu0;RI7<=k;$ub|rX$TVQi2 zwhDjV>;7?`L z?zH_k7vPAvdl+aj1CYx%>Ht1K?}+>QnY8H`)1vfZ_x#h%*j~8z(=$7^?XD0t1%H_E zZ|pYh-ukR3j?itg1U%eK^C|?$pXf^f6-FrydkJ^#b93{&iwB=;{a45_hjIbP_q~LX zB-TF`egu`b#2nG%HFUi#{q9s+B|2;}?}yF^EC+yoxR5POKwS8LgTe6}xYCEhS0*BW zT$XIQZupb4=I0L4_Hk!iOk$!i1Qiu+xbK3nEdf2S^{6NJ#eBN}=QyWry;oZyXmZuvN+A>PD%TwJ(A zZ$5ljzIgEszmj{x7{e7b+(Kpm)uBZ=7jiG`pwq6-Q8e=sSNB(x-pC|l>ZFWk0KMV} zPA1&CwP7~AKc;*U%0j5fTNEWK9f^z$GeCLKC0^ul%5Fe6=}=(M|IRi}chN!l04i^q zK+l7LZ%mDP?m>JewM2;+mdV_xneJzA=u0F#LR+k`PH!6;pl?6GfeMUXQarU`9$U_x z`^t>P7Xj~vt56k;QFe9$qZha8E8|g93 zv|1Fc{Mrt2r4l2mMi3TrLxCY1XdJYi@uAn;aPF9tloTyc5djJN_nX2lgKWxZnO)DW zrkIfT5nKX^;~Y6Xpf1kRwdr%0wtoHk54TZ!FnlM$6TcAxHqo=2cm|0MSEvL1^X+Aq z++~~8-cm@T(`+=M2egE@WD@kj6UAKKHRM&G^pAcz(T4^gSpTEL&V=*ZsUB6aIPXNO z0C*4=bR-7KcM1#&a{Cq-PRYY#(K1>EX3}=ZaO|=P=jZ61nJNrRYW1MrO?Jna)n-m{ z)p3b{Y{+Q{HL)>R#$CI12~jkPs>bIcS_X;isb-(pKzK}X9C*WRh&NB$v4e%%;w6XR_BFD;!jvDq%8H8qTTJ2F zq#q23qmB(ajRMHty+v<)82+Avyu+Ae=xKL6rm^V+5&Ml+s0GESj~G#L^Jccuv%-#| z1EmiilFv}*poszop|i9ng`Ub^YXispmAJdn2pNaB6+~ZWv$8s`7WjFs7D7I zd)j0cBm(r2?1u!%5c9Y6xzLl^1fclki(Hp3iDNfjhRTV3X!UBd4_Xu!=h{6xe8X#3 zcN||*k{$*0SXv|6BLajhJw68cO17Q-xHjA9SqY11&z{2DRqfzI8$+#4QuUV5;Cf)- zm+1N(Ce%@KA;vSQ*lClBy@!W}`T|Io@9*D#R`SND;nsWh$8*5~N4G^}i2oD}J(FzP z-jst**O`U4dIk`qcM(o3cg16#sPr=-3$#gIXyn+3N$}6CLM5_-2nAkLa%zU=<_oAgxkG2cjr1`jR_aFuts5F6&0@q0cwQGDh<{Jj(H&fEy``tFM|c* znKeQw0IZ&lr6+I9PrPhw?9-zMF|k`CED(;8pq^uAXXoOgbY_K=C|?HjhtS-O0{ayOi)-qLo?f2Gx;C)@?LuF{hQej(+yjvK;wC8XXCtQ~p%yP|oocBFY>Zb`|lTM(Xw8t$s*eIF9$)YRN-fyp;Ik#ED>7cUvD)m{I#czaN%xdcm^MEl@GZ^z!W8+hTEs(F zh1MztUfzPcg$aG4X|+PD4fe6tCuo8u$I*|-s4-3#ZKiR2jnEtK`i-R-;qld zFSpfC+7V1{Ej5x=WHV@-1kIqA5ikZgj2=QzQ#?){gB1d&b#40l=;Ft3Q07+sGP+1V z&sX<~ed;-ndM+NMm>s-1T>9JetvKo z!SK$v->WV^fALvs;))41ha2C0x9z(mAmQ?Et=h|XjZduqP`7!;Yx&f5^CMT*=mdUw z#C#J9%O5g16~RuTZ6xaMx{jt1)P+uDWJpbYRDPWTPGO<=7e$H5Zjc>GiQv{TzSdf! zbEi&($dTK&eS+|8VjxQSCYc${#V7S@{{9Qhmf}D3g)MCj-djRLY0t_#B$Q@EW*~b$ zTtKMSp2!!gOby zYn^%+Tl04-+k}72J6SLe(g122m;kglVkVMxS-yJV;{mz7~r^u??!SLb>4j|b9^BPavXa)9t6_%@;E_j(6~5kPl^@8sb&_45YG~0M_W))a~pdYD$wIIww@?_yl`< zE}_26tCgd8zce!+aEj_5LQ}?daTZP_TC*v6d~obzilcN@%vXZ9f*?!y8bj_aL4a?V zOu;|CYuCD6@X<6h+;^`Q1`W3m?0|@sx@f_=73Oy=nq`%SY_XijB$niRQQNmuxk6ZT z8%HZVJUFQ*pQW^icJuF7y9^D)@`B-vd3H*qf1afkN_nGw!%G$%cRA%j1|%es92bB4 z@E*qJ;nU|xkI!G(V{~d|RaNioR^BOc7)wTA)6aX!9+)i7&I6+DPHtSWBxE@!RX^S# z&q~tVc#}DCTtRIMn2Kh%@#f~{6rLzHb5ZN%WJTGW^T^TNB7_zKL&Goht-!dTqK}t8 zSq$7I(%*+x*g~QVLDrOlRsBwhV0B15CbSGPaodf6P*pXbac-2tA7%FrZWh?ZMEp4u zcbfMr$hdMq5k0}H7X*%+%^+csgS98=_JqCKeP|z;Ni>&+&r^yj2JF##P=!`HgKkUv z3>Sr<$g(3UiU5y_3G74}IXQHvxt_y~j54^MTV=MbD9DuLU-F1w7A#nRSS<>`ea3E3 zy?b5kP_%>AMWQMmaAWh!acmOvVjC&L(Q|pwM33ntO|sPyv}>@d9EWCqAn!D9AHa`C zUWU&$3D-I#_$3kpO7kpJce!{9tLx&zjHh8TXL51|Jg72PAFH$mV@eT*HWPZIsKCrS zB%!~^ob6ZHyL?>F9z9_6Sgc*lTZxo5!=ORsbrO^2S>clpI@w&Lg%}45l})MeB+MFe z7v^q8YJ7y+Bdt|kI_Y2S2vZ9?Z|1pzl@YWi zKth5ukA{{xDN>PUb+eoW-v9b9M?*ueA-~FB*kI=KUUd*f4yQ{P5QS}qN@T5ZMN!8` zp6qc{FW|d^J(6wa%UT%nJic>txlo`5npWD{8<}(ylkV@l@P(kJb#lFf6*vtlefv^O zA$^B%$@y39-u(8ZVS9)w81B`(^< zizZ%I_!{mV+t$||6uxM*O5g+)!$faSPurtsEb4al3XTrTwF%#`!))z`IaJb!q#)YE z$pUGLDn4W~T4eK_n39mEd4)llRh$3ncj?Umso@35=RZz9G_83VF1&rAINKE59C|)oHv7z#?Jt@e2R2U; z2C!1?!q`=J1`vU)7M&h|+qQ_wwRYswya)V)Qd)nf6fSdID&+5Tjgi z7qp7LgJ;A!yk*1_6cTx8vWR$<_vs_b83bGf1c6xX{V^z7*Rw_2O7z1D1%5FSi2Y|B>=5k7VzIqr#{9v( zdl{~WU{o-a|p5rm^uieWrrcC%Pp<4DlO~S`eq^ z6w-r%Av-~g2SY?ZN=lS6;oq^2YM@b&T9hZvCXcm>pFKNpXHi8Tc21;>g%`QEq832K z(#6y2arssk&sXo>LBN}|_J*j`Mk5+5nMt6Srrf+dVQ?2w1~?wP@>_3&<@J3`{EQA< zY1M=Q@DhnuCXXE;szem;7S#t1BGO*uBj6^G*RUJlAsu72gkF}d$7nUi`McilP^gb; z?w%4sQjp_Ts-h%n&>AtKg+lXRgz&(OkcSf_aVc4pau7wo#%}9%9H{-+kt2ork#`n7 zsj7ii+ape1vfXD+ybafI-*$Q1m&>X340bK_~VipE6 zdD3h4^RWd@g8VM>0*vn{{y1-VJh2x3fTM39DH$99pW!FCfKWFjWJA3Vj1qSfB^*U_ z<7?aY16TnT8vIPdX>z)tp|rai4+0xeicQlH@21fz|63F-s+{mYv8KKKZu|8AE7lAi zoj}x2SvhUWpbcFU?+AUMn5)H+(xNr{@BqBPf#Tz#_Kt#wiq~WcBtE9y5ZY|w(0A#A zIG|klZ&kVC=g$R&0DV7k^DKgYunI_^dNik@mJa^pc6MnDC_wr3zh5{0J-6X^z&8Ui zQKjIGgqs#6+_aqDXFSI3B#5Kebx%o}i9qK6$YFaE6PDid7R!F78BgB@t*KxN?*s&{>9)bj`dcLctG6$Z49Q7!MG zjWO7o9OmH6+`+1h|iZHLPC){o!sd7NULd(h{G zX#s~ixce^g_Pzv3=;Tn}8?`N-5L(^cn-nc^lo-p{Tg-Li&c)mg8A^cdiP0l(S0B+Ht!$onh55Qh~jAvL7dW4Ou+X-*%djGZ^?U|DH!qY5WJsK?#{J&~L7Rpf3>lSTk4#{)Ir zt{jC$J1H(DiW1~k#*9h2SNr0^Y7W~75}h`n=O$R|BP3LBV5*SYsk@+`~tYNP!aXEMiBR z5M=SS?N|_}A>Rq=>$kuSXlps^p!sV)Z4@S1X=~8$&ky?swJMDMH7uS&fhRr=(#P(0 z_#=7-q=$xvZTE_NMybX-s>i)+cv;C1Lyn?x;>0x=xaE zgf*XM*TWdw*cJeTb71MFjN3_rEWAM>IVw;R6Qboej?WBE-4%whUb@t$P#@Dr$;RWr z4j|avT*GJT;`%zIW539{Xn(8AHd5NUYSex_97Q%k>+0~~>(8H$fi?a=zENd}Xny(j zp_c^`Y?ia2A|*(G6pFcf@kR1b)}WALV2Z8go+H_&J zBhM4BETgu?OI6NRBU*Rn>DQEADXpfG{9WHk1bC=>yy7sWNba)WGsA*UYpwVj~=E_eyi^H)B)N*QPq!9X$iqS$INeoA%tyyVu+5k_tB2 zB2(s4#*aZ#}Pq=xqs%Ua0VKbcxnORv?<>eh}e}GHS0S>Ojfxy)e zo9)_gbHQ)~`USr2x-hih`X|{`u!Gycatg8w%CuhZf{)^ntUT85tQ6gzZc_iwgcB&(?L#%Slb0mohUIwubL`IYQ(I zFJ7!zzFe)1f4K@c#$6Sm_&OwMHfw#hbJ5+Gocw~cMxrHvuX24WSZp(V)Hq~(SpDv* zst#*Mi6SkiB+xMb?eJCmQ&sQbGecYJGAR$^x+!IX#x5Nn#scA0PKxRH5Z4m5cHdz? zpc~nlY1vo|T>s4z*NUpg*VomVhwK1)@^xeePF;dUxQnih_EVX(OP5@AVzL*fz|9Rv z@mRb#{KcZ-@x+03xk*WC+pk^KcOWdWvYOwdJz{6cn@^BPf|{;KQFzvi?bWlV+}Ur7 zQigax^2%i&aKX&paBz^hoK@XDK*N21!OtDgn`lsVNIi1pt|a3j zz0*kRdDPdj2D@*)5t0NFx{!H~%%ACo6<%0b(*fI|FQ0#r@+8qz-CeFtU=KIFSa78t zQKeN?<8_1R+slSz!O7nc1-u_stWx)|%FA%oS69Rv`>NO}d>aOI$fCUtf5f5lu!8EYE*MnE>N3fKfrxdcYR%SK-rkL{ zt{SPUm+aPrS818*`1w&!#zo=L&RA!Wd@4A%%wM<{{oThM&a_O`w)VLf^2_S|i#;_rv@5*4;ZU z_U;^WBSl7L$KSF*&F4es{Y@HGkTKF$A+TI5xyQJ#DaXY>n+}{xzpAxy--hksmv(_4 z^I~t{>VwfHn)#0hHhu*hdcyVMVUMIDCb+TU5(A`#p(^GH1#D;>(2W{mKX z;UU9&dko!T6Yy=63d}sU4A7=*#y# zW*xKmdPvUt)L)B3`uV+HHv5zDa38_r7H=8%l|i(T69-OU)@Z}~_mEizst-84Idia0 zRlfv1D*8o%-TEK(>zbE;Y-@NInM|wcx5dj+VjjhJ-!mRB-^Vi{G_1*37^GdI52iZ!!0namaUr+RwFPt#q%cjZ?gJzK7?t zh?d5pyJ2yY4@M7aSY3U)yXg9m^j^ndAz6(jh$_1GL+}#e+Gm_RmoI-_Sy|++510WB zaS}u=g&?)19n-IHuW_RoZjReFsX^s6uOH$wSWuekI#jmTB7F+g6F70<*_t2q4 zJR&h*11u|~zRBi%+Oj;wICRt8z-v3L>H`NR z_3ggBs`;R0;53_YS5g|Jt;?5;-sYSrdg%%U&)Yp_ zymfZ=I9NMLxpT^s%p>Ez()h0|pIJCSVD?h=UH|I3Y|rhX$w#g%7*2h_4|m);WbWd~ z67$dVQrxc>C~YHn|NE zB_*-BQ)Yw&sN|pA>arl@g<4&i^WM0DUo7KJUmjNjG64N@q9545$hfSP$&>S@E!N7HtQ+6{gPAB#j zbiWx#U631yh)Q!D8*($e`H6jB+fzF?rkw4VU*Wj_Me6ATNe$&2104l zm^QlPO5&ggN?CCyrz(pAUKX96?-3UAq1bksfAWa@@zX>F14AcoA0#))Y2t#~u}u-f z0~5lxJaQU*Y^3@=-TC6J<=YN9mq+AgpIfc9W5IYyv+p}{;&WbpF!@&3<73JA2|tU* zpWKuk?W}4-*`-5?4NAP4iB>VS7{;KX4s)Hm`g}w`3pN z16`?r&z?;a1+})g!5TWbi4vm7xu9h5FpIo}$Jyx;59U@__kmRAsqDj&R4?6MgwNiWc7^z&&zQ3Dgl=M zbbG`-WMg=xeqT$A3wS9#DP$?Wg%tR2eo-+$z}>BEga=1aE;lEqx*|3$%VCO`>htd@ zQ@VOog>8E>M{mX&3mL~PTK!9N+ts5uUWnsRo7RCGm}hR{b4gJ{#KbLv5wvcH-l*6)2TKef-4D}xj2Y|r7a z_jfgtSuD5LV6B7hdz%-#x8*lqx)a_N*$fNaV)f(EhmKz-7{#O&6?L5MRuB#%V|!sD z??|db>jK&Eodv_oEaVIIa~>XIa`OICnDywB<(@p*&u`GSLptT}(G;0x=Gk$t<%sFv zN8d7+ri|0|u&pdBQ&*H8FL4tXsj{XzXeXqEq#7_%1Xn6Pqo|l2VSSJW5%PMT8enxc1nXI6;NxMu^U@_Vo!Pf<-GV}-HAXq_Mm5CmvEN^JU$*RB#&>H; zGxr)JJ1JfEEnjBVHD}4ZiF0O74+>CM>ZCKIPhFGs9DA)v0hmuBkn%BiybKe4b2gXA zrd%C0f#T(-x;_vW(1Br?v^ z$IS|`E_V9Wt4u}w-Joha%Py4%yo3WLD_jhHsk~} z?VhGTiwKI?h}XfZV=DiqESGZYpnX}4$O@c0=I%y?sOCCJMbAj-#d(_yJSV>QE*0j- zQj>alwBya!j>B<^J5TIABv*X%ru68J<^){0LFmFSo<5Dd{8IDUH~HvCPng1y7#BCN zah~EomdBbFZ`$V~nVqHKX96Hlv?-m4_tX*fH6~QEX~%3b6j}5Ac?~RddhgBQiMTla zl#4EBpVzhcF6qzY6dQN+toy`p$MU`HZ=zt7c;~w96ux+o6Ge}B+7r;5K3XiU|B`rv z0E3I>XgkqVyyMXG>ODW{$W59i<2-MX!*4Wgb^7en+37$HCp=~}K#;$>+}uk$mQx8t zKMGdNkw;5ZBCUw6?-aiJEz_{H$#_`#d#P0`vt9OB1EXK8 z_5PHS(*Ugjr)U>`>uvq5dArM$L!%N$m(IIYS}OnLhw?bS%wDy$aHCT19(z=Zyn6RJ za~rhx*s&UiZ^KsT_IlAN^eSHgH+k(uhfSi7eVx{r-o;YmzkQ!PeX6=hRwQ@nY}4xe z(x2P>6VLCbvzl^i>H6`_g9DaPHUmxhOPli8t@CvBvO-{IKYH?HY_oP}HSxzr6PDD5 zyj5;$TjJShYJYv42EaV@>A7M+KS#MLM6R4`eow7)~cUaf6KPk8X6P^l>g>t4U!vT@nG zd1C3t^Scgn8W1E*2w1VAcs3)n^c^cKI8Wd9XS`F-nRN+(12oSGP;@&wb zpRXBlkUMNj0j1_e&G+=_DgESAZd&K>z^l9&+w@g$%=3^wU9Rj2p7iBf)YDEI6pL?t zDSNYly()cdy#1PT-R7O=X;8qEY5(acl3?6u=957~huRtez}fxoI(i!I-#rhUmQEJ&5iQw zRRYav3FLX34f>pdMo7HO?AMVaL&q)GpTcG~$-z{UOin70+LzfoU^113l;e`04eJc& zXwAF>_ji!rD8A|C%W6YUFqX^mS(oO)kGqy|`L)ho=|p0>_^+YqJV2A3y zgC`b;Xa?$#UIAC6UVdCwrf{ztq-Xa>2jB92uyA%44x0Bvd#UiN4e7;V7yFw%IGANx zn)-i5HZ*I}jbM2}V@aP&jGXgVWvkH+wdo_5WNcPnP{#Tdy%>W z3|$`hVqRW?f6mp1JDZnjuD&n_mv7ovD@a)SO#_rHhR4esQ+`#UoOUA3*vNN3Y)eE1 zt)+DBVr-+GrUt_HhAF`DaOxHDHEwasFquLa`OP^cq_i$tl0RiTtO@a(JCeAwQg*))y`u9Gq4(KglvSs_Mp}|l$b|_nrfNmo zkr(mZAXr5N$z8r6gy+v|$aGUb&#J67UnlF>-RDAnYO2@?iKAIrS8Aux?^=4B(Oz60 z&FQZpiR4|p*fqd%fT&@-PRw*?XM(EK5#~)#FP}Q4FcO(9jr&!vVyg6wCF?>DU)_b> z-eey9&!#f}Fp61^9#s!I2#XFq%+^Oa{kZyP=F+cIsw;SqxmVmhJ;N3il$z!fcRZ^R zZ~9BiA!qAbRewI*KQH{_!7Hztn|u|9tR6jTlzFXq@GMwK6DLhltIJ>r^)-*w$p#26 ztQWA%O`5Y~? zmsg(p20sH*3=^o0prS>JelzvDG(vQzKWJ~EMD>+N!G4#>%b$~t_B9LGqE!~&>pZL| zI+~gp2r^cJdVzj~;bsP3o%&mH=gtsaU2BtWHNU%F_C5_9MGN?xLkf{oEK^Uue^F}C zakRK-W8wOB>r%%Q!*$Eut}+PWg}glFHN*YG{wITj;_~xDuXYZ|8rC6 zVlFG&^e?pk`o%s}WVi=Zd7^Ihenk#Nk_VerH3QNh&<0TCvp7+-)0|((|l=` zi{@0q&#O`y)KE^wzwETxS|M_F+LB?F60&>smk2pPtzaCT zV<^dJ2ICFcTx3W0kD0_Im7#+NucrwWpN9fNz_hYon=$lloCcMKjYL+|5dOs@(VnCe zLvt-offq`h{A5b3V?7OQmcvU(S$37d&9`aVaPtHWSsA4OXRdo0NKnUae_X~M>S2N~2atZVERd<|p z4467+&QpM>d2v9X4J*U@>$)74cr_6r&mVJAk}^)7JjuUnZ1O^z26&V3>(#4}DSbu- z^)=<+tikHizkIu-#G-0c-gQtq!=!-+&t*~$MmXy3#Cd0`+L4*e8Mr#~)-)5Bv#8xhgPz62eC9YI$qmj9{sHk$Xfbqi ztNfl)CdA%|2flWNvjDj$FC^U!h;B?i zVcpdV73TezUb5&d=anrJJf-fX^0?QBWzMfn%OjKxYx@uUP z)az`a+9*t@e*bp`CdSW<)FpCTKsflFLva_Y#M(WW~SyGafnOUHJyP!aHtk)s0 zs^VfPorlT$wdV%*kG5l|4T=NsWbw;gx^&^>gK zcQRC5E8J~KCUr?Qe-H70arJ#G7?~VC5aUyDR@i*S;+qZ&!h9ViHpau^b zWbv+J<~;_#771s^|LT^`@CP!_^&VThos*ZwDLi}_g%Vmnn4vx*-htY9zHZ5j7k$61 z@96%Bt3casS%TQC4C}#riG!Jo8ArL((Xo^X^2H?*o$!Wy1*d=(vNX7#?~|Pj?($9b z6gw5XLe02%*<&^XW@(Yfu-IFqe6(=;&~#&N9?Z_YpC^~O=hhp9XW-ll(&$Wm;I(n% zSR`}4e6i`>Upd(lO=zpoFTV;WR5iu-

%$KK99LqKYcxeyZE0K z{g~;#NB&o>$igf%xc33mu&FpsPM(&&ZPy~f-1KKJzSgm5O)aA~CSG3QhA^gTbIC?K zUx%=&*MgFnPM>D!dqryY>R&gEPan2ZVd{~VO_@Txkk68997>X9-)l&EgJaC(ek947 zT`il2MHg+z8S2nEdi$pfeUdA792<8x>+`=Vp26C2qjsmHd~Vk1_9vT}y_bDV3YZ!+ zAaT)gg=4ENer~b6`DqEq-q_tIsUVIXjkYi)JpKdZ7ZnB6YT_%R?Q-9ZmwYS9E*nvo zk~z(q#Y|Z?LXh!%{93F}r1c-h7YR(44DGwQKsADFc;X`N9!FcB>MM= zG}lSqe2%Tz)pBi43QLluT36+)lRQ)}@!stv(-NxC4|$FK@pHkL>T`eV0FDn{V8QfJ z;MotRR#8y_)Wk3Z5GBr-v`pG>6DQ0KZ=ZL2gh!4ej?&kziEJcy_S3bQ0$0Kz&$3zy zL3M;h<={HLRV!0_wr^LPspQqLVx%Y>x%UU^zUvy=;FHH%ZS&<1+;18l7^yfb$+%R* z^a7j|QVzZZ9592R3-W|8x9=mIvS9GMeQg&%OXieVZJ+7+WLvw_Tk}MP6Dr)2%`$xz zJ19a@7;Wb`929Z*aE@m7_rsNii>1Eq%hnn4s%3|?%J{x7Tu~yXIY)nIaAuHH;ths|Z1?$YCL{jYG7qT1WGAnKoL_w{@9z^RDX%uXlqYO2xgp*ClGqiVj?m*eh?TlD?9EV*4;9lV!&v9~K^X zDyg@0()!68hdi{M_)%+Yg3R5tA-gVa_}KZYe#z~W<^zLLEzN&!IU~DJv-^SLMK3=I znNEsmaOBLx-E1Utn{pmK4^dW$pZoJ_tC(g%dDz}zm;Nzwa4KyZHeGU(M4g#TqL!YL zv?T@e+j&ckM`w2TRj^Lb2sZB>zFoC&<073#7ej^33jglUy>=TRI(Oicp~0_fi_esK zS~xmpSWkZ_cQCWB$Q`vYfXE_k97)kh0U#y0OOU5cA8(`*cPvzVjCNSGMd^~aO-f2` z2eMN857O!@At!sVd$Cx-Seai=YKC2QZE<(&XKn62acE+=$)q@Ttxyk@NU9ZXT-iV1 zb&1#&Tkjah{=NG4YEkSFm(r%7sOazc&nhOB1browBHIJ8rX6pF%2nWxug!V0!o&*3 zTX-=BE{ghJH=#hM&wSV25yL%#B^&qpEnFV)`Bt%^YCw-TeX zrQXT=pWWRMwXIrrb4lSbbC+hRAM*?!i=zEvMk^N0Z#FXkPuPy4j)UH*-@xLRK2OU&%!QN^J#wh3-`i``ORu8~kp zJg>3r-ol#&`%CnO;|8tLb|_0}TaQ0Sr|4H`#<(-1hh34~AK~}CIPt)Ypn_^~%cX_Z zpP%;2J)dE@^mbUx_sDgspKrZO$`g|(i2XSb)wbuAt3s3PpW4IoYV15ejA1-TT<@n7y=HM$A0( z12Qv`j>SYKAyYx?L-#&i!8wP+@;`Kp^3=Hn`A3{A@mb9Nq396W_yJ<8 zMbsWn!fK`{`|Q}ca}M?)8na$xxGKE}v6dcTGbQ%TtI2QG9J=i4?sy1@uO6Xl0`2DM_3!nSs z;o*A|0F04Ln3%1h;P5!#Xp%s3rC;gh=GNTJQ={LFXV0EhRjmX-%8nfOpD*Ay;O||X zIqmOzrgr+`#{lwv73*TsanCu8^A%dN>8X0Jkf2h(mU}12qosAZ z5)u>=(t+^++g6Vt>C$n0Gg!-9jWf z*8T6l|NJ;Z$9`^0SW~B=>{Rws{N~M0Ic_EUOx8a1%02CrF5kOPAI3C%MKQTQq@|1} zK&p%G?G?ulKvdb|-pb0gbj09s`r(80%3gmzO61q)f4S{u=a}T7_PrA+23aF18*mt8 z6lhKMN@<*u79(bWFZ=q(h=;u3IG^wHgtlOoI~fbw>o)wh>vGb}ZC~uQD_UWws7<&4qDW$=QP7P9dk=$I=Puyp zgUl(&7+atp4Y&68IKJcTWTYW;@?SNz z7u<$yuPHZzSj*gd@F1aY(6y)=#j3~o3>yT8P;m|H#PwLN6Y=kWvK|x|aF}Fc|Q$a_`0cqN-jGWacSe{y<1S zksi}dw!QY?2Mg98s}Wg;K8jS7#_J%RcoyH%nD|>?2l2ol7r`#(Z(Y2WA4*%>K~M@&cfy4A2)~3mMfdJGaM`M;5QL~e z!HLZ#cHvO*SxtTYEi&>y{oPGD&Pwb79;%1Ie>Z=AxBq4RS8H(xVP*qY49Fbj7#I{Z z(LSb-B~t@qO;i!=0_#iV{>1eRBSz;51~Dj2k;x1k=hJ0pzqekEzHMd>|9RM9D*wyn zwS`jHv$hh`KK=JxnfDkTi|TCH#njqVPDm!h{o~^D6U~!g=Gyf5%X)1_RnP#$Vnog9 z!!`$8X89d8`x9d_PMx~Xq#z$3wLC-4-v?CD0s0$$$6q#z-+b)tZ_)ZCuFt+HcdD7| z@vz{;lUaB4*8Nj?qkW$DQli-|r#y$nCTgC?mw9-`Ykp`?8M3=+H*y1sqFYi%m80~p zn%=sz&bxDAOsVqo)?bU}7ws*&ykK#ZZgk8zd;Q3=EbSRjStAKj}ngVLnv)@~q3bBm-1_x$6CW5I>#4|02PP z3q68t;6&Q}-vY2X+X*&1Gk(}Zm7mjIUn^CCljDCcB4eg)5`x=PoA(DVJVUT#E+&- z4|pFFrITtqXXZ?^xpSa@V9nST2r&Et4v@DbS9loN3tu4YSGZ76{mhY#_oS62ShOYPYe2ZVS;Fbb_k*hBDs(9jT z58;hRjaG%k7v1gk2V+1@BMWEjzG^(yJVSPejPEcrhj2_xTt}uerM1lddm?@V8q%<$!Xd=|Wy`IN8jwpyr%ELH`{5kXH zeSf<4V?viq1Z0@!Wh=}1ZB5-whP(}zfuu43ETkfDJga#&???SvMk@V;3pF$O^9A@W z-O{<=^tkF(h9JqNtlo4G-n+DTYy@M{9=n<|(o4Rtj&mT%9#nD`COJ+{yWhtg5XX3* z=7x!od~mk>kJ!TncNYLKEMBa&H_>tC%t6T&6z()#W@=#f#dEEZ{u2|PC;UIxz7XTW z%3HUN+(=|VX&*^pV#Z$2Ud-hnBnG5ZE3;DB`)F6TsF(y}T>;ePW!dEzqT5=+1r~4& z)PO4-sH9XGFsWH$``a0bn~P4Ef}F9G|MQ^-mi*Z^U;c70tfN`(eRiOKAk)*a++0*x z2rc_iy4UQ3Gqn+moy9?bz60Vdy+hp7&r|H|{zVzeS&)x!C$^rY*RF+e6AUOx21kH+Ns~C^ z6(76)$?0bH{_|j6{`+7b(z5m@Zv}iZ1vNk){RfQ`7tn=>p9r_rhMPlZ7u?I4%f{J~ zJ?Gy8c5~-`qhllV%e}cszvE(KSNr+78>uzwGumO~$oNyKR9Kl;U%!|%S(tX5txTmP z5=q(p-}kn0u6f$FeG61|b#rk4M9Y2i=2n$E%DQe{n0;(y+t``Y4{;`)jm!#NK**CZ^00jxTWkLyE)Y&)lAlK;p!S`cHv0^bwU7ELe3)|%<-M}$`Vd-Q1r;dORAaJWr^`vojzZ}- zLn{fz>ZO+QVHBeHw7KanY#ft0JD*NbO!rae2qA0q=(ANaNP`gezNWvh7MlUssRav6 zhYo#-W5fJsFP*m5NoM`79gYS}4ozXh^x)XZrCj?8h@XIrR7xB17FSY1~?= zPyS&_69YX+!;;F@v!2dB4Hi1S5pXoLu8tkw3_^JVE{vuwjDxqt#0dV9gZGD!uRn~A zW?EV!sTV&|!c6kyNx|_Hgc;nFu=As>?Q`7*GeK*~Yu3@}nx=ZbtC?$M@8neRx)o>W zXs=+8_3Qu4m!e&o8YW=)-)j+7hgP5Yn~B7u<6~6RW|h#JRaJAfduu$7+LB;7X%ghr zmpVDTe9v<^fHu@l6P}DPKEjP>szW2wA&$>E8H`=^#XXf%bdm)XKO%2oY&*?OueCHa z1z(@!1?z*}pL@`5;@&ubgbLXy4gJ+z5esnhSft zfFO%|Q*_8|q+inJGlJWe*Wc= zM55->bo*ssQi}7ZAv-NEPb_nXG(s(^@mNbkBO>p%tW+>n3z}dddLu<;Y}cEnrjG1H z9&IXyl)g6TQ8siIKega$T>b7&TZ`)Gsat~m2O{TG_9^Yew9vh*um9Y(b{}Hd?L%gz zIMI5py@{+Zuw$>qGyC%YkF7U>>v`SU|G(Jg5E7wM+6fV5Oo|4PP$@zsNs$IahSFps zL(+tV=0X{hqLSJYr8F5UGNl2bfsFNkf8w0ydH%28_jS%b4sGAhaNp}*>sr@!t=!R! zBn$|^T!u;+%y~~y?ta)-2M>a2vxVN@etMOP7Atew?B#v``ty3e5BhWRMcJ!{or{VZ z5gz@YGgS-z0+MTd+TLY}E$Fr96=&a{kid-a+R6x^>aJywi`FR zm^Ly~<`6%hxdO(S@H8RHS_E!CQx1`nh2TEw{K9QkwATR z)`Dz-h|=@5|5VgD-=}C>gOlAxr^S%sc>EJ_&f(?5*Q6u^pT##pZgBTMFaCk-pBL{tKFOTceJHpZ+v4caCC0|wC%mS= zI{yp6k!O9nBYc@i=uxTjDb>P8aIpkR=ySp>3Nk@mXQTFg=xGiea&~kS_9Ws2_wX)q zp#_3m6%j^ z85*YNmS&ONF^?vi2^h_lWx&wr(xDcQ5znRGrzs z(;&|#{1M5_`QpwyN2sb|C64p{@ASAGZYNHq?a$kPJ?}UOI-=#Sdq_=%$c>~pwF~*$ z*I|VM^?)?>*u3~MC~&YuYkdCBPsO~2@diD5_fEca$$Ca%L?)*ePM=ZU*S9ymB94&7 z#GOCCKzKQ}Bd(^&I3;Rd>?N#Jzg-IzNVoi0bQ zFveaOo4K{2!fKJ(8-z)cDv@+EbZj`2E40-M+q> zb`B$czYS6L$k`p0xF|acI)gC5eNn%_MFxHXthnVmA`Ie#535Oba@Hn8bu|1eOi-8{8Pf+&CRX2SA2>w?BrOALJge$-7FB*&G231?{I^Y@CqEu!TV<_9x2^2CYxwXZOF+5aao)NNh)AJo%Fe@_SF zNaV8<@=Mv~N(BXleY)}>E0l^mXHDQ>%*&grqM|ZBv|#J8371Ao9H5VkN0%^*5e;ml zn*GAa_ggXh;`l<$y%fr|p^NhP@gG9Mb7sAK^=cEF2DT*;2@*3#zs<6_e+zS9N05vkA+cW-T6 zkyz|7XZ|N`kGl8;aqSw_Wwi~QYiiHMX8+#PlvgM#4$u>CX?bs<$n;V0UQ|J2R8&Ie zD)5FV2th0a5`4SQswn30QBOn2chiX2ySKZiCUgN{l7`Q1h=-aUE?qfVqSt<1kY$QF zww$^0ha4^8@~!t(l82(5qC(2vDr^$(j>48-Y`JAIc_d@7*{BkF^iWB%iSU|jbtAHK zXs_YX@`lt7oY1eMaldAU{E(mQLCEQX_KD(5@G8?8b42{f!|7MeMi15>A&za>pCtWf zzuzh5pXy-NI69o@u}XD(IB#fEqlU>;g0v z%Z_rcU=mjV@;O(5agG}{Z;su(5$-m(_q+77UfU6W&W!qV7Pk!SKpO&!VA9$FRM*z( z_j}E{C$;{a3Ak?C_MClZYpXydZ=`Q$&FGCPgvu*iGMO^Vefz{HB?A6xYDS#= zmytHG@aimvUse(jla4Bf2jDZi@Bb+VKm8SSq9*_86x*nUix#=yCIwIVvuKmzhbSm|KP!=7})dEmHj*Y^f%FV;uQREvb3;~Zxb#0 zUH|>3u#Zxi74>s%*9men&*Fh~Z zA>@=~LSSchGvs|PV2J?AGiHIaJ&K}_9~Vs(5$h)U3x$04YaJo5|M$UaNRmY`(CAS# zvjqNIw6$2T;Qe+xIQx9wn$T?4UYn;Z6FucW}zNjM#N}@8;)6>pu|2U>F|;K9HKv zLhIG9-{brDDL8#DaCaD2Oc*(GHK9QmT+PA4gKRNWk`P7_qqGz3KMV~I0-f{w8l5{< zl=hM(M-3I>@KUkIZ{A4K2SUIFSX*}>yZ^oMmRVztFwrtFO+auuR`v97Cpadgkx)5V zg;CwfGdQwV0akMqIXWtLb6=H>?($E*gzX3U(lv|m!ZktQSd1%dc<%g)LI01$L8!QW zj(6Pqe=k6ZStd%#%5nO+Ah~}J=;^uF-P9BVi`WTcx^zfruUI$19T@6;9QNf<=XT9Apvfn2J9(HFb(`qf+q? z$8`WcUDXDUQdSlldy%)BZRGpEVLl%@jG4PB2~ou?HoOhKXSP<6AF;f`Camf#yxIP|2^wmyc$?lkA)z;M z^G59cU{C&jvr+9GjjI1(f!1fN?J7B{GhtB|*Jfm7+|OCckSd-X5w2vaU_rx~C(L<9 zwm@A(cL89kq?M_3q%6XG9!faIkaVBloEG=_Mrn$#y6p3&B0BF}>h^^d8pF<`keGf23pW!UpW0~5yQfCaFWCajYZ zlPl0moH~u-HddU67+Xee38^+kG5*N>_+-6ycTa{|1FYa8QIyhrf8FJ zewFI_Q)%Rdtb!E5vE#` zWP?EhpaJw=mhLa=t-h$~^kKhUS5~urhwOteOp_pE^d(wYSQNLc(BQW%|LkZnYmw3{Q-yxle{l%Xas53I+ue`7`G71!yUI$v%QuzG2mWs;9RjZFMG*WZ$U2o%LhQU(nov9xNLe$!}9aXU>rW|dVTdgP{k z(yeP={qI~Wz3ZR>17a9!%Ucxg_bV!G!8C^NDpGOVu|wD#?K)r!Mjfhd{&yQlnxSI| zh$qtAva*z*k#xY>!t{;5pUaF=IshUL zwjO?G61{%LdBoE7PZ}_YKl=lk&uWh=y27U)vjJ5slqRRbj?sFm*VR^G35aq)heaGDZgjjHhV zX{15arP$I5jxq%WW1+DsD<5na(&>T3H~aG6;3+-YUH|+;@W&F1l!hT2P~L>IdSd4@ z((z`RYL(6sj z!|BYLNUT}CTBq1)VE_KnXU~$QiyU4u=l45UZGOcU=yk#}0@s2_4eFQ$d|KGKYe_8X zB;=m>OK|#qqG=BwnnnWnLsgZgni@R8V-2EC500CTA*jZjaH1SRWCHR?6TaiD_3>Ja zQO%k)dgJ|jq6MP~&0iFldxpzg`@1gkX%ooJ*c11wMC3qxP_LW=aJF|ZU3g$upNelZ z>4oWsE3W}agc*Jb@cy})6pP3B{&u$sKE!EiRlv>b=g<4M?InZT&f_N)2~e0KdPsOC zV1nG*s)hm61qAeacA>dr2XX~Nc0+++IM3he?Y?(jBTC`sx5l!2M*IQa7ws&}>gg9mdz5ePv2|>9i>Y`u;NUt7$a0$J}?bqo|;FNx- zA&Nh2>u3w$HZavzbs0&rx(-bgnAKTXT1OQ`b$h^Y{D)7!938#WRk@61xoek>mlt)z zlod&x#B_19U|{Jv?BXkmUT#2que$z`@{*X&Lu~PyLHv{v_I|>oNfK926S!x6q*UPH zUrk9dmdJgx31~uawn*K@gZZlRdu@rW0}(!I5HHj+&n`kybuZnCx&-|JH6aK3rr6|J zBR(}XF|pQjm|C_GEeJ*+Y{DKY!L;I=S;u5mU)Z6o-qxZn4+#E|J3ge1UkMeS=#QF^ z<@0O!pzNr}RHq>!%?gq-N9HOB1uo#mB{ZO-?7$}0!E3xOOcEF+o}8YJQZ0}?^nad| zj_I;xyb^buy!eMi%pcko{98G}Jy3`5yk=1WTU?$zZ{O z7|?rgUNl*W@5gXZDJkVqT3~GlbXD89ebKb7T&YoSEDug1Jq;}gN5zY$TX|ogwHMyM zm*0eH`hP$!E>AAaRt%Y^FaSL~(Cgkor|!&UQB%GEv&}X>?B~~e=2bek46&=f^Qlv} zEUww5;!6FhWu1AN1NQ4i8(nX_JxsvSwC;49ATEq$CUQ8hTZd?0LT^jhqf^n*6B)S5 z!D!t>HtPAl+*c^XfA_Y){@}R0{g^iGH~wYW_JLqUONum&Qv>leP!So>_o*J-k@Zf~ zF_S4Y*RB;nA;8c7dqY<+MO;HBM_~pGXWKin*u}wt<40bL-Hdo_Te3P@!4aZQUm<0+ z4z}99(=C3^>E55VYhc0$ejU=TOCs~-=3hgsfA0d`B@`+er)0wP{YWZw<)aIj?~V}> zuYFhUv`)UNHDZ43VTkic!EuRn-h=-Hl$YR}pbI}R=*@d|kWtZ!Asr_Q$Q=^av);nF zrLoZ^^YbI7k02pk(7J}?@=TYG^*<{M$b7_fSsGx3JElS_1he$hMVXoN4ff2yxf$mq z?2+xHxn6;5w65GjTm*mKkxq-wrc`LFKLXG(KN$P&&X^@ew^I8FW=A zD`cu2D!sp_cjEg?OB)kTN02P>X>M&D^`n!|9ri^S64hhJhJe)u1(jA+U5JjJz3>pO zG52Ap{JgX>ZhcSoDP(=)!`ji}ez~rl!VDjASNEb(B4jKxJ`|(GDd6PPus8GWn@!N1 z>tw^m$*NC2dSSDu+ZDijg#W?^JK6YnM*M07lKk*nfCZ?`FV?>ee250t)3f~6KbLDrz`*VW9TE> zg6x>nr}f<%>I+;h@3A-`U@PT1(xi|04CgDay=1w&E z78tE)m!dQA?a>cE>K};Dl-KKblDZAzA0&YUQ}{o^bWbKR05v-~-8;8puD|Z>8s1Jz z`#;~#h}LdOyAJG)T4$0SoUM89_Lz<=oFZ1=C~$B(x@rUW3qByKVTBZCix-Ey-}`>? zI_n)#QD=v0M!Hb5{(6`wS2=Hic5vq~|KVje(I?cA@0&6YS^u=_w2da?LCXh`YH;)5JR~V(5ki z(#t~=rdO*bHjk9*)$nsI&vmHw8Duz0ZH>LUHo@aZBI%BDVppBKVf47s0p23Fq;wlGmuQItIzeDk_ouw!)k-L=vByY|Vr zm_>hFP;b(@dAXA1myn3gZ{^&yUF~<8wC*%H-oJjh{Z^flj0TK;{9L7m4H*&`H=H%X zR|KD0P5%^`0w3;6CO_nkVR`-aqQe6)D?ybZcnlR8TPqFRqWxG6S|Jx1mniqp&sfmtB>h?rezBDH|FkM4Ee4oxz)i@iu>FT^b*Yh?_q0UvkL|aO# zsuo_g>cL3ksmqV`8Par8Y>37=zsPc5xd6+WaD-&+c0{!YO5fH!^;y+-)cq~}OFh%a z^^84{b^OA_^f=R=CFlFr3?I@mVrKqC8_6B!-3BBF>3C(Hm~d>t;HHz4)(KjumKOeb zxI(R3zh=c>pJ=QFb9~5r8MAE9+qBvUkVz1u_n(cS9OR1e1geckU-dPe9k)m4gVpvY zeKU$xqLU5JWGP;@v07BRKsvo|eUn+ton`jvvYp>=?4}ZbU|&SfgEkMWT^FA&DDPY6 zs8I7dXH&3@WVeLPXg=YS%+=GiuVuDu@H^$#3n$3-6N$OUT|H2KKygvs*`}ff````t z-&Dy={3*8sM=xl7@;d+Qk6{bbBXgJ3KX|kz*mJ6y#TWT532JO-_5S(^ zzYL33R$tMZ8j-*G+}W<8370y`KWEn6nfY+4|5t*$uHVpEc^aHi&{XY!U_4hB-ws|8gU{~8K9e!-{AXeERhA+P!Q zb)#nF^IyFN1I_Wri>m#7I-P~bO#_32kd&SezbH^RQPjLV=%SLPztWmxbf1HJ+c-KJ zUM`t?eD2hO={pCg+Adrqt$X|Dv1Sj~qVs5I#P{nOjBN!JE7?D-FmAZ&X9%HtIs$t9 zo;`X~{a%PP5|QE}hD!fiXQ@hIQl}=}(f*{+Fgi*m=jW(j%-_8^&b{>h{n*<1*6%(_ zm%s~^vbI)~)Nsr`MF5JJn$-N&AJw4R=)L8sNTlH;#Z;k>`&kICvD- zx;(wM5rh7gF{VVX)YPLFJP<4sT8>Tbmw!p_aJOQl@#|iq=&0v~I_P5=1;3JHfx^TQ z?~6$si^aV2JW-s<0U3<}s=5Z>3;b~T*V6@R2a7_y%vT&Q_fkx#J!YMgn8g8^ zFBd2z6BFe_%M0@@kso5wm0nm_*F4=c$~$nOx80^)Wi5Ku^TkJ;W{zj$Kb zUD>fyRv%83h?nJAr!QMDz_+i-OV{!^IsUyczpjBQ0Vd%(f*>i#S<}4qb1{+(6^UM> z4^VcY<62iSSDT!&`rz_q%S!E)?DTA1Tox7Y>oSY9pzyAK!gvZ>m9b-2D<*8aaHvi# z9O$rYPA8u^iJ{|Hlu{&GSFO<>bJY35eUSUWZ`~zcAJ07D>xv`Sv(IpWo~`*IRjVxW zxzPbk`HW(|+)JQ(Sa(@FTF1L>f{(;b0c%V@NTgxqk{9t$hFoLQ@deD?3+iTAt54hh+>&mdX9;o&+k=MAsZUBs*e~(A z`5U1nXUN>j+xhuDlN=(Q*2`t_e&K1_|4*RU({ZAGpVNebz>*UZDdg*FsmZxhBU z+-*|QzjggJjA=>2=isSujWIF*Xaq|d^LU$eaYC?LzkdE>y_SbZd0L7wq9KQ={OZnb zS`M7GX3gK8T@C^O;tDd|dvr<+`+8%qT{j+9RawEaTJs}~Ra~yWplDfKd7uxrM3et#b}EUp z=!+nncZB7C-7T4xIo@itE;yfUm&t-9QyyQw+ijV14w`~{_xg-Y%@$!Ae4u@yk_7a` zi!i!uJYKS?Fy4bTh0d+i@4+ZGj1Y7Td`$&&JsiqN(|HGl2EIe4I1Z@^_uZ*2AH-%T zI~!d(xsXje_hSS-vIMibx4Eq6)y4q<*P9FN#K0ne{>V{GatTVi^0lqS*REebu-Bf# z+hPfB>W`9T(5=W%6;wvV)gtG*#vg5karzg~%JKR2S=t^v)x{4N|4h7FPB z|2lFw)-`C)5^>SH-P&@=zn-RzGOCxD!cY|D>%F^#n~phu;ewsjC9QKy12ye&mt6f; zQ8eUL2Yyl_y+_*^M-kqBNQgk6z+D;vebBIfc}dU>`OxtJ;DmuAI9j>)7V8Rq4Hydx zKR$473v-Fu@}UK379Z%a@pB+K7#G5WrImN-{oDXh2w;)lF$BzN_UwPbYx3P^5-s8) z@E!_mADID zpI&s*GuDs^cxtt1=!CdFyVAS*Mp2ZNUC&~5yP24p>N=Z0M#u*b7B^nv1Z}X^*4CC5 z{0gjRRPUTU<_KbI>`<6^E+HZDR1khNR8|kY3(*}e?bC4!WQ1Ej+DW8m%zC`qrmgrW z6CogFke)I9_v)3rs2ucfj~@@7JRmz{9;~#^0%(ZLB4D;<)t@_tdYhYc^ z-O+OtL!M=MN0xitpld@go2Nc>nb-q_F_WSZ*iDd96N!!PD7wE2zFV7i3O-AGLPH-v zc|w6pYQvA^J)A7nm2GT4!(23(na^`BT+M%t`!#n*FeA^KY5w7(@PU9_y!Fz1Mm9vf z3xk6TR>iXcPvCsQMzCMMekR4&GBO~FBuguc3W!QPo_VKb>bqxow`=Md^JGv5Ky@0C zHf_irc$UbZ?ta5k=>tm}C}rYAX16!Dw8SMl2?60vj7VkWNrPE} zO>}?+q1CUxe*eyal_ag4JiDf)|sO^RV+aO&7wGsfd*@$x-zEq^>-^rS_*XjvPL0B(bK0k}-Jr@ZZL%HSgaS zZjBSYWu}hr$y|7=z}@+A+t{-T(|smI2bK5x^p$8oYiO}T+Ij9fHWp+ZN6t~xrX6z? zLyj)A$wqb+)u{<9*e7~!hgYaDsR&Zy>C=F~%+G;`*S&l9kf1THrkyl;+9VxQcolf? zD($~L!rZdimpAZ4pgnZI-=IP*NATxdrXx)rNA%z*5N){t02i}Z9{9FAu&0?jeV*u6XOeKw3wRTb&b!lo*)2P`O{jG~eIW8X`F?9}Qq zcg#^bpeS^wLjWBgN*{2~-n}7J`82nm9_gJu9)#ritjLomEmp3)c=qhD;lnkYpI*Zm znc*Uv(kxO}zxQYT3KM1^G9YXj-9h4H&Dy%s&86#Oy~#*4Szs%%LxY30$HZf#H9qv+ z%a@HbbcNX^VfGRKUQ-n7zbQ@Qf1IP|&11n#R8_S()Ht%Tn~$aKfwg5`3k?j?8MYf* zKqp%;tw!Q2bcGotmLxqA?&R-BYsX*2*(K!6j5+c8rM@=@J6uS#j?hVfy-22bz}epU zwwE~&sb{%J*))#&kHh%}+8_)VZUd}E`Stv{H)TGL0yk!= zC`bM7B1_VC)+=;pco3NzUTR~-2el^uCNzwHu)(VA^ktS}2>zv3wDIv*N=o}xdPV~{#R zApEoTu!LpCF@+zXW&4B0i%3&wd-f@DHTQ}3IeCfJ3)6;O{NRVRU`SB7&$HFy7 zjS;}b>=6nl#h%{8hSxs@>W|#%2<;yFAM(RWEcYF6}9GXppOADC(13O~K;O<6?Q2O%N;a!0s#%M>Q){A69 zCo`fPot(=1He`{@F_y=J<_8UN7#bv@?u9agdT0B9Hxq;BN(vfZU3pwjZ^0IAZw&Cp z%{W>5*>ik#aR0`b{1wtSo1`aIRXm?Y@}^-kww^;MSXtU>)^iH2Upz*g%kRh-iA4xO zn$Z?9ot)uwfem#c8Qk*PXw@*l0=`ZHBq+kLDPU$6b=%BkzI0Iwm7XsFARLFoey1KUz8nQoO+TGb|Au5?)eIbR8=AGPJ z;(@=ET1{m8&suE9*~sPxq5F9K`uv#9x7ye^2(W0rN~ZAm7@_9cNhBs=lk}DKf+Kkw z6o0y((Bp-HN_dX1L+3Nd1eqU0=C^U{^oU4zpbiLDy(7e5e6jHPH=1c{(1v*hg0vR+ z1##R;<_NQ|F3$)(s4Fjc;UgNMX-@lJtyrV^>E>d8S*3c@(^+14^M`+La!AtuL z1Q4m|N$+$6lp9zT4ej2|2o@TuGJo&|?AMEQ#l~K|9@94XClq%{zgz;t`%~>nKU7J$ zNGi#bJkT-8el};L3!)K|iF8989EQ^U95Q6C%lpd;A$Z6N++$g|uI+TViFLnuLNyoG z(mvx^0(njPJ$w=W&05vO_j_ZMsP&fgBiK;_i^0RAx2YE<^W4zPW=Xsl<3_h4Y*}jM zmYb6k`*`oW@KIN5Z-5L5%Hp=~h!}%X)+*>s#;CM}V)X#lkcJM&V8R|InPmGNS-eE#k zP2yd*bVq-)k79?_xz98U8Ch9xf(4>+I#P?mRM#*@M3BAKPWZr-?STWEVQ@kW_J=~k zRkQGywx_64rZY>8NXBeYt(4Cxi>rgZ}f51f0X^i0#4CW~^zi9zC|#gxRP6ykzP< z)wEp#rq*YMN@Sv#^2~_XbHL@$aggRLD96ML6^m=uHa1NRrBWew!YOmS)3sz5#4@O1 zwFD|kCR&qC1iCr?L*t9qy;BW)kB)_K$N^GRG+tTxT(edMf-LwYeD?rx@d?Z<9=~V< z{3SCpPV#SfX^{TFTcH>K>{;gKB<>FxSaOHMs~ZOpUlCq#X1neIf&UL1rj^}~n}5kL z*yjUc4%WzJqQTxdRw zMxQ`jD#{!$61_Kho{D_QXACP@pfe919J;N_&7R|qJd#Jio;0m3JVA2Jv7hD}}$8CF`aULG z0o=(s37CJGSr9B-QhMC;g?WcT+VGyEqOv&^%gf!M90^Jci1PXehZr+V;Ay{FIaoO@ zbG?TKg@@mJxdHaOk<$B`*B?$D8zYxsQnS&u-N|$;fE~l|)JaRIzn*)xK4ZfGAKze9 zBoD1VQY26-Syoz~A1nqiV>*@x{^!i(z0ejmRnSq;K*``xiT9RzSitGahOoyL1U|<5 z+@xz37gAHVnwXR^N)tfq3a8AKE3}@4N56B{Cnj45J0una8}j=+=(uh39ok&7ei6~( z@RCkxpr9l;7H%h`4ip!^wB_9r&B(~($KTe}d_fB|Huv0JFNt2c83Ds%fzG!2_1rFV#5|l z72s5hzL$V)@cOoF+O(V{Pg({hL<;cu+_Kn+u&EMy(nN#%I;=s}`4PnZoE(&4RZ2=q zq|Gd)u*vI>CM49e-9mzc&*&#ep$&w2*A&&#^Ig!F+i?bczD;Y_4KO|bd5D=xRrgcC zP)KR|C3e4PW7^)1dIBt-NfT51vvSKBJWhpowlZ{@T%B85KiS z#KQtGA1~?0^by#CYnWcfH-Vm|$C)cfsBhsN-B`YOF>Z_-dGBmw6xU}8o(WuSF4}fV z6R3CdH=d@Dr1P4Y;!U_TSJrfE61=+6Gjr^^TOOM?=E(TvSJsjR5vO*9c3l{$ITHaI zxfM@LkoBP|MwZua#)UGirKAolxgUw<5T+4Rm<25O9B>XlNOCu#1r%GzX-X<8-u3aJ zqzyA_-8IWHqVJv_5t6S7^@71C!K!NP@yva;G4^axHxmQD=_)N&;85rHCL z$i!EKf?j)Ugsncu|Ls#Po$n$`3k!aF$;y@OFaF)d&SF;Bgdd`Wtn><=(N-{e-X*&^ zaHVm@i81;LJU=V9H%FK+7p}hLK$<0C05f}JKD^)357n?ASyaZEL+VAOiVC3*q2WMT zGm!Qf^TFVV5GZM-Xh%<&GG!|xF5r?GsZVzGeu~oXpBrB9ElkFTQf~^&fglwA!#bmV zhuP@nX{POc&ve0PnkQUaQ-gGuIsvKCO-#dyRGy!mMWU8IT}i}!KBKLgK4S(z&_#|) zC5g+@hYY;hzS$p$LEEP5#r!!8MI#ycY%-AsJeXpQr0VV$Z{Oa1swC+LBE^KPCFbUN zPraZBata=ZAzhSR7fHC0jw~+D%d>WlR&u}$W9FQ>h7lUyRq;_r(e zK$7Sqkhn+{g{d=DRc#7~0A4wH9Whs8Y#wn1o#3dAhcy|lbp`Re!MfKZl@zsMD^S9Z z9UnU1y}h0ei9hkNvz{?z`lzXi=4!~)c+4_6i!&qSOcUgOR7cJ|;;`Vz8TVhB;=;MX z&XJk?RVMa3^OA_2VZ*357Z{szfhsX?!#i2lijxZpejK3%(lgy%mn?Y{GQ$WB3`Psr zEItrm8FD!)!Ss9ep@+5D0>sA0Z)vKYS92RaX3)B;uU@^%lMFM&E_yQBWYD`*uoFN6 zXC^H)YeF-mKMTbYU_w1ho$-qYV(QKg0Jb(Wo9y}B^+3#{i!0lw?b*+P z)~#cF(aE;IWNV*oe{$Mr*)pjC1DXH}vOYgrVfkob<|&&qZ_mZXzUH>rrxxQ|IUI&4 z%^WvDx4NcA6Mul=eeLnyrr37e8=HuYGD@0UjS`pyBNoN&-tvGEt64U zsv+<6=I#;O=t8QYZC+-%&R1@{oom{-FY|*B6MPy$tC?Pc!12HxX)LcuMAB8tGc?T~ zJ`b@PQj+-(J+jM}H$j;~TS7mGX+m9|brBO~kMAh6eI8U)d}k9ZmY9s=5}dPS>7tQz z&1kvUTlEW+>lOcUXVG8*U#nJ>srhu9?AOp3eX~t%7f%eJW-2~+t*WKco61Z;JE=4a zXUu9SVJ3;bUFuy#?Ww4#X_adU7X>02oe8@nZq!&}3zLf<>Zp5soxl=Xdcjt5q>J*i z{>(mZAlNM%U16EA70^=SZR9Cz;7kw!|1G84iZ;N){D@8B?l|`+nNhE<8i%9A<-H7J zV(Rwo9C;f1bxx{pVHCM5i7%z8*MJ}1F^Y>KhD%E?xGg6$<;qjfu-2*_u(b9oB)CR; z-2dg3K76YsW3pUqmk=viNzjeLuBhG@I;l&l*VW>Gfxq;`H+YEGtk?-U}}ptKk>6MLdo8 z;jb%Sp1I@d&YiQZt*z-hJpTZj%!?(@GNDeFRa!Uo76s?_nnJ0Xqcp}?8f1nit3X_1 znY;*=P?RMQkyfoL^=$HwIeYx&#^8nN=j?l!Ipk@68)Xu|eDU9jcMNaLqd5akAKUHu zYE$$#V>c_zi#9^)yjUXsfxGd<#44%3w)72(h+y1oer+=nitIwmcVmH*07wyQGB~z9 z2GcQ0Da>g97MZLmnYSt;{9IztgdM>vNL>u zjugCs8vL%Zde|TxY4P924vx0At~gkwuI>a=(|P&nFD zNdoQCD9`p|+9_>PgMNt2>Kzt0?$h10=`zGlJpN=RfAcN(KV^`l`98$-SHJh0mJ)FQ zoFQ2vpj-Ur8hf^prI_(QJMXWKoks#peXAkkLY-aM4&yqtgVEGYFpq~~D9~d9wnSl# z6tDCat?C+QP+N=ITS4Ph%JPC}ltKmH7>i}Ms zXq;qN6CH4%BrD!seKqhnR=^}IUA(6$W*K3%4X+~NhEZWzn zbdRWBasHFk`x`pW@|6?l?*G|XbP$7>WJYO5`uE)-QaieYmRwQM2OiA(v!0jh7;=d2 zeO0P>USM|4^0g~c9R*25m{y{Rn#RJid7+;VYQL$txTtNvX~HAZ{fUo1>a1Dw2C1I= zV2=RfEes{qI}pR9<0`*1?)D`QreidSi45fE;@aXcwbL%mtJ@;+A6x&{yuG_-&z-x) zJLf6R7!&i4P0A9}#PiF$uDED*{!#kl^ebC4pPftj@~Fz;-PQA$W@Ua!JpU*&^^x>O zo%2dDXY%Av9QD+{8GK1WE=#NC&Cj9*+S8N=bdGG5bqhZ!7BX*cL}8d)(Mf?#JwjZ0 z`IUVqj=PMWG!*@6D0Ng-V-Gc_ib+%MPk|Gixy$bqKd}~YQ9b;EUj9_|@IzJ;v|UOs zN)CEd@5%}uuwP;7)NK?!y8}6!wJ-Y6)Im49Edbru<;z2-I3BC7y;oFJb!nAO^2I&& zLa9G={MB;D%H=I8_t#Ex54h1%Hh*^@-g!;lw@+0pn*4cXNxf@Bghx(t!cpx6)3V&1 z2WyNwxgO)_VTh!$!T>gU(W@qXAgsR!|sr_CljMYcWG6!y<(`YAQv^=Bx2*s1;}Th<@j&*7AW2enI;<=@y`x z>+m|B>DZ^v36OER;sG{HvdfUK${2NAgR1l{r!03u2||yMUSYi?;7TL8{rmBeiI@)v z@E^^_MZt=@&pdp~Cdvd3b%n^dpse2YX$E8u@!-RUADHff@K3w4et^L^_jS7i**)Mj zffrWH`Z)BLPDJkV2MTTXrko!adAsN~z5=N9LI1FS5)5Q_m<<~==ri@07mh7NIkjJA zZ+DZ*VJvhW;a;BY<;Ok<-pYHE?3w&x_u_{B}HRWPpNS?kl2S^33ji zhnK9K)biH)0sCaP_A2@U*FOLC*I#QV4OPE=D>(v5gMq>T;5|0KP>c%G2N_AqrcQg< zG&-ky=LsGB`H>g4$XMNzK8Ix_jgOvG{Y+%UR%^!| zI`eerXiIRmM)@U}r}OBGk5BgdHGF+a1&rN++I2&__%69<^$YCk#F-LZTfqX5Q;6K+ zDxl8KpB|d`)+bJhFn$;9wZG-&(Do%MuRQq~L9bl3Fv8zu-R{Q@DdeKzhKf7h99g=& zH1Scr>cqX`7-KIvXTA|6n)|=FJS8zR@fQLrey@=b*hb#w^s{uT-`B5S@phq+aq5tM zUm1-dl9F?J;3kebY3RMU>E3NWDcv{+kDom0_;nQb%CUq);o`9s&C~!eFA#T~qK}#W z37mnqPcAO>TeR~i^sWU^jzXk+x8*^+Ny6T}`t%%@oSocwkfxAerJLO6^)g%>1b)ys zr(x~hQgXYkcKD5Dmd*%qji$VjKO;-8Nd79Qh-p~R;238zy5T8U&tC9}$=fzZ{rFz1 z)t<(Y3h;@((7?s#OfwY#d>7EtNP~DRTuOvz$mvQW)mcjOwc5`+qHkg;jk5ihC)@e^ z`OQzZoH5fkXRXJ9l-I{>EB9VMwJ_YE2ZLGn)!PWE!;Z)W>z@sATv0XfKAUZXoGprG zHX9Osx>*FB8t1+u*Y)z7g+U%sYj}1fFJYLypv5_sF>ajTDA=REG`9P;(@g5xvF~h5 zj1Af7?m$WxSR!0y7EvV9^NZBjK$>_xqUR@mdf*uD%N$S`{Hw1V1g&7*KY51I;`{RD zrVn`$3|a%&6PJ`0PXN;C0}}_EN{?iYYgD@^5C^NT|ECf^Jh~50J9)qtS~>7 z8Q~{nx|TAhU>VU0;;iGQr>eVee|4+p2ZgIv4weiGb#-m&F6%J?L}paPc6^pU(m0Uy zY$?^3^}KZ#@(BcwZs208E7wB{n@j%5PvVP0OAfukG`v{bKZp0YQ$d zZ%H&%WXNhB&Qn!5M02>@{G^k2n%#TWGfVeW&d}l|joz}?7M%`Ndk}NGIp7l!ErV;N z|AT`(vD*GhY?b3#u`O4R=?HC8nnwa`jeNc??4A?EvMRUQa7@(-T;Hy+zukVOmQ6R* z{}dV@+$;Y5cNHsilD**N(9VBzZ6=PRWX>ds1RYC#SSqwIAYo7f_7oFyw0sE^gC=gq z?m(J^$fRK;tjzNA1WRc#j+Ne`y)QX=Gw+PnsZ{}Kp8k{&MhQJMoxj{zSy6G$M`KN= z^|?F8Fha!(q*9Q`z79WnW7wROx|NR(^PI+;U$RCF?b@lQPD9y)g=_J3VBb?%ADiNI zyk5u4XRjQ+X`c00mgI?{PwT2-`t^_l@&VNqnucIch{J^0=M<`?vYl_bc%0IUPAQL` z5;0Pnjffdc`o`DYEHCH&6)|#mgKA*j>BQ82Zl7_WmAr87oUN9hZ}EwUh^hzsyEn|} zRvDuj_rt%}0B!aV-NotF**=tMz84mWiL50%*hRgY!46Q4YJGXKy2g3#`jQ7bGN1N# zUm9;R`7=Xpo{%8Q2jQ=(fB?!ZUm@=8s^-J!fKxnMCJqM0egn%F%Le zk?-#tvceQMLbCAVcgrUHD*J+^5Fr;MJaO&Lojb$fp&DhZsli3}k}?~O6SuO&wy>gE zoAhK(^-W&Uy6%0UqoX4MKgl+K0OvS4~A#eMut zW_=VDIb&E3WtJIQa+iEmls{pTc8CXz9P zS6R~06*lPmC;v8e`?@rqT{Z)93+n*U1GN1uDz`3AQ&+zMkjGY-Um4;;sJ(`pnq<7~ z7CSg0GOD&cuJ$fH8nJ(W-UvxQVT3pSC0~Mma6o*+U;~JCo~!xm z`-zg7{hpqVZ67?I1Jro;;9h$`2oUZ7O>kZ`JkQEn)uQ%nOlRHIg-s_@)!sL2#4*~if_*zT9_(^(>w|gsW#3m<+a{PAT!sF6!eQRn16Up~ zDR`6YoBOG$KZjqZ*R?6_Cyq!irD{{o>a|}!MJrbmry!#10?+7*_=NELbhX-v>4KjK z9kv?h-mWt^mci^!EuEFDJE`eCv}usomR{%8!#@xuya|ico-@|mmH3|xrdiCJrqQM% zG|XH!O2lqIB%WF@OFlBOLbG}ebVNx%VA0RPE-ewE#VYeN4*uJuqO&E(c((RFSa}ct z=znbkXYrBhAe|~3B01)izxV9Gl9=xKbT^)cw~Qb5-1g+jlcYZPJ|vnZ!dif`#Vq8q zRmT{3#5As_HzBD&>&}2NQ z>F_G_Rcvh82HXJlXk2?|b{`fkm`Ps+EG=lv`&;stB6y$Qv~$CSh@ij9YBh4Kwb zd-JDHYge1K=*k|Sv1S>(hN|ta{e@F`gW_eqH5KpDu8~)$(;nNpjhdOQF@N5?=*N3r z(qWl?HLmJ&UpDp(EU+k41OC>5mn#g}fm0aMdjY{B$^2YvqhhznPj>I3&H8Iwkv2`a z3+|?heU8wozICf)@zbw|*GZC}dRXobByGNHtGadeULmG`{i^Wy-??}0XknQ!F43SE zv{Fc;;eQMb@9HxDUN{9pWYyhI=V1XB*Q&3V1cuwn{&NrQb88q z(i^|NR8k*%Fp1gyUHGvJ{@xE9T2+GfU%H)aHqM<>EtDbSgZF)%bR)ih#FpteO}135 zzsS)jWo35P_3Xj75kgif(F>lRo7J+yE2OsMFQ=+z`DJURUOIdc(e0&hfJY(9VFLP- z#Ytvzk|>qEv&)=|dbukx+>+Z}J7%Tt9(s_Pn`;aoIhhkZ*hD(^X~U3k3B z8sSfAW#tKHp-sK;!%zpkq>uDE$l0Ijf4g#ii&ppDft-FA%eM-@SL8AOd7oH{yY+X; zX!_CX0Kl4L7G-(7Vmkd%sI}ZhScY7QG%zT9mtj^hMJ6TB710YF%}^BYAzbckR#k7pn*erj#CZeZY%ej9+EH`f}dnaEX zZnSppm?Jl^v*6KH!ZT)n_msoaKw-Bn$x`}E-z}TYbHH$)kmJy>y;)pI1bP18z~lvH zTObYqAd>Kf_w6pOy<7Vwf7A|6LcRI{kFXcdZEBh|=1Y&a8@Fzy_5Z+6ff7lJ4i5g6;nQmt`}Wgx;-BL5PWl4gN_zK@ z<8WXZuwp(q{hYmU@Q?|ymq-?+ZsjeHiYz5cW3IkRqx}x z*5QK`cAvy?UqRv4)AP(vzt?nXTqjY~388+*uKGsX_w}2n#!dF^D{astzp${HCOxCi z%ic%cA1GcP3CBi{vxH?fr#G1#hJPwZ5cRgG3XG7<9osOJ-d0^PDP8{TORs@urJYu0 z_TEr3*8Kiio2zH$fds%dwV=CBjX>dDCwfPdtIzy7dU}l2e(OK-M7HNTC>GwrQ+E@= zN+hRm`7Yr&s@P_ngUS38PRoaB`G*_PEM{foHlI6Z=jQHSbZp{lxB)mCH<+y2>qh-$ z1^AvCDj*~Nhq?{*9WfKe^3$NZxZqH;XTC}JtbK=s20ecJ_jfAJVbFvz9k!GG_CROv zw8=yJTs>&@WmKf9$ES5_k*egQfY<^1_Psb7y$uuKoFd29i_P9?Tt9wKUQyAG1A|Q3 zuJSC9%vGw_M&5Z!=9uJ4nq2&+&P-znJv0KrK<)9C>9#Ew`59<^<&JMWGo*fC?oWSPsy<6PiS1qWk~8!`?kiww=ZWs~_K^nM&p`Q37ukK4QM}0@ zNy)xLC(U(|ch(kU%oYkFNhcSF zS}>{%$MVZW)PuvO7Cm*^`C%=H0e>%i4E0FcLksijN@He?cp>w$Nha%PDt<&GN^?Sl+`0{3BZD zlp4MhGV0Q>f8xJ^CJ^C)5elCLKI+k<2d-16M&6XLbQP>jpW48)C0Q!{Q3piF`nYsYyP_j^{!OsK@60gGpAc+vQ6#*YH_<2Yx7*qD0u$fEnwnL zwW3Fv!;4JUTcE!6y}6s)^wvcrMSU{2`9M#*Dswo(r0ThJH70ntX71$2}Q|URX{hn z-S{_MdszILjDLVOJ`o95BNrZ^kDXE;Y~gw<5ye|Z$OU6prb@Yoj{hqYtJo#8gdWZ- zoO2}~>Zmkb#(dRT%Dr*g7wOu?z7remnuNtql}S(jpH;sg!;RQITw{Bnx7qpTPYxfp zQ%b{UDAxROZpm4v&|c5E!z6PL%-pvo+_Mm@0DGi){z!ePL%rSKFPCb{w_ki*T#vv0 zflRWm;iZ%mP3g}>ky9WJVK2vg`nQ)V9WBdaZ2 zFLAVniC;!Zk+WBzwr}vhR)2RQjEZwUb7D8r_e@Scd-3AbkUU9N$p}Xe9#@kVnUrm{^k+UDsUj*rKNlvq8zDuzU0W9u3OZf(X|1LXKu{B zXX2u|4;`v6$#pmsYe#ROH!x@MSamh`XXTWpr5P+a_@3}_4_Zyk*>wd7PhZAucSnMd zihGbdEFrL>8X2E@*K|%JJnP{UYn?x{8IxVa0&sku2D5TGL?>IGh_*XBX7uQ!t5>&e z-1xHSFJ<#fKH^PW;gvJ<*nlq&HB_ZDsOLZ&}~9p zs>?m;g!_umOO6~xibfBmKG+@bVqTALkfWYp8TA1W6Il3Csd>tBMu6}s{6ZmkoAJss z4%|8&0rv#!m)X0zC0)Nja}}_O3O>5wITko>JUwj?PHg(IrHPz@G;;G zlO~y}^p3Wq3KJYyXhwiWHmumZciHvrNxJgWx4s{;trq%X-GJGSi#$B8gNNaE_(aBi zPl7Or&P=Vn`UL#s<8cM|Mmie5BuSqiZxTCif@^dvG5&ZJhEInL;@|ICn^FPE26_}? z^W$Dtu@&a;p|*yTl4KMlJ88790cc0?0_xWN>J!@HUyEjZywW1IqD3Wk`C*TN($WoY z-lW*w8#-{!_9+idPHK7{_dSF$_vWiz(}jL?{+lLh-oHnUI>FBQ0@}anj+LwWGcpdQ zp-i>h^}lz|+hf%ywhj+*{48=Lkz~Hb%SFfb*GltzruKijHh)RLzfB&CT3jQIXNNCb zihc?z9jhD9q^pnDsfk4A?Gg$sJBwybz3T8V5v&Tjo6>{{8vwcm)=Rq{%?n7?tbf?y z@QMXGF1t1-+P%#4x zldNKKo=_lU<&Z!N+6>Mk0Gs4B}&!?g0z>h(>_bj|oz4mLXJb(Ej>Y4=IhU=nc(Pc;$8J|KsaSzZ4k~z($B*{=B4KhU`QZkeZnJOw%M0WLl zR`x#UeXi?$pLLx>Tebdct@~d0{TseR%-O`FM+IYwV{g20ccCC?|i3_7tpF z-52Xzc*+FhNK+sp&_DqSl1(<(i$;xe-S1h^+ogi{@*cryaV|PK`H!+5k;VirjJMux z(sZ+xF1t)dhnqgxy?9Wo(*y5hH*@#O-`9)uqU1JOT3l3BQ(Hx84QiecBmj(JfS6(W zkR-{^FCL#8g5$eNFdee3?2G{jCMnYTC_bcvf+B|;wR z^Ehdg)#Ti_?fYg@Gm{BS_J!;R>0xcxAntD&mg)J@`U5Xz zeHI(-riRSw86^Ub>d28mI-KPA8A)0bU&a=G;EMs{GAU56}IwezX7F;@0atn^r)9Ec1-&Kr+o+-#pj; zzDOscz{~2fm2SU2kK5DhMVD5DTPF$3I{yEla*#+gsOU?^RQx)Ps__tFm5-*#@d8{6 zRwrEI1tc2$;@8czv@`_!W}2Wdaoy)RmIk-uWI=>Syy)z=)%ci=>UStRycjVG4)AC7 zNR{2)&q3R#@GGcW=RDSI(-Vji4HXmOEj5=F_V+)r%&wdoAN_*Hd*70FB9o&Jgb)zt z{Ifqkf5xprt*K+)#E(g*PXo)ec6B{zoz(dKd)W5vZYwr?X0>Q+Y66s_yWviwSvfLl z)F&iW2JLgB@^EICNxXfw@96AoknCBd8MT6c5cl$6+z!jMFjMa1Pg#{ei&xz}IVmj8 zo}M5RViTnc$Z4zKRxtUxf3Fo#r&JG;Po1iP^tRzsPE+I8d3wU~Z=7Jgw&{M1?ieVk zfl=&$3$Ja8`+e$#cy^gT1n`>saso;uE$!Q0k}s&mj})ZLb}NkCsoy_o_}H=EVRRAT z_1v+9*PP+izJC=)tKr($Q|Hf*hPFZs5cYl+N_bd5Q63mQ+CoiEXc7st&dK1xp{ie` z3SM)T3XN<4epxNB{??XzklV58rDjuXX{5fgep1|Fg78cl48uY`|Gc($Rt@sX zxLLj?nk(Pf8bcr~fN#+jrcU~j7ZVV|eA5BL5Y*xH!bVcL7-wiR9P7vpOMhVCEnMuE}`KsZkm99-4Expp>($3p&ZLf59pqW5$ z@V=qym%-S|)1WGd4%x#1M8OM?8ZhZ^y0ELwk{iA2Bd183{C{!h9a-(mf& zc{e7BGNtF765i%MF#nBbtY80&Rh;{P0Mqv4^0m85w0b>E_S^d;&)J{gre)-*Y0V8@4mb zeMPk&s5YS^%QiQWHsG1oF;Kp%9E%aMC!qmK#?zeIza)kmSeS4WZe zL;>v@bi1IiuocvofHqFGyW)7`FQOXziuJLg>vKROGlXrwU$)Tz#ze*U$?3V42H$P+ zC;C9>A(?CY)YdviUq1jI0{bdupp=8NB}E!2nR-(CA;O29pbY1W0P3-3uSe`~zxgP_ z5O~hO951t8G}MNRincD-iDtWHj~c3_^$6|=b8k_W_wxMvMMx+LXP_CJL8KSdL2MDq zMoeTR&;72?5i?4J5Ojf-BEvxnjb+jYgw9qJ)M|KcJwmT6X4IMFhw9qqWX78sv$L`K( zfzc_*&(E)@?-=BJ)H;bylAx=NO$pf!j8g$yAK<)kTd|wYHW{Vol(|^Isb;WRkENGG z)ZnwhOmwuK%@7qDYGz(_rJBdA#`8U4kPfGm?xRyASj=)uaL-Am_)XM)?d6R+)$C_S zhXwSIRkV#)i@Ga`AFT|5T4NEL+VYvU?HVQ<%Q-m<%Uv9o1l32aMpao<KIpotk zXJEmuPGXL0VZp_t zEID{ERBdF{1iCcO6%rJ>H$UFHE8M+vM*wY8F~#A47Y;syl>G17-k+RtDvw`Y($Xy z%=lsfDnro<22Pg$cih;0pcE6`3ZnNe{ZghSJA&5s8C65dvtQ;VtJc4Ci4QP(0J9j347z3c^gh z=-%)+DR5#6hZY&2|F5j%hpBM^q|98~eeSsFvYA&g_Rm1VR$m_C< z=qDqJW5Fdat$A z^H(-J?O?P3{4m@x#-fCwOg?jlFd|OF=_V`SaEs{2{R&r$+_KR?1=fXRHao%0v$lY5 zBYlryl6n8a&Hkc`vsSE7waCDE@Grt24M!thcp)dD)OaF9%&2->`x9+urv?fQP5y3H z9VhEQcEw{xnrXc!IZ`ZgApi$5FY;K>7XnKF#0CYSlBLNIMCov^Zs_tA&u8=M@6asf z&YoTM>61V{-a~g83eNcP@+%jMc~4BR?ni$JjvPbr#X(d3C%Zo1XWvh?}$=yhgq809*8Dk?6RGY8V2(Xv)@5lj^C zR~lVK*S+?%JjOHRFcd%m8g-SHR$`)Zp3#Fgbu>bTixJ06SfOuUKSr3I2rW#;cK`c| z#*fb)R!DbRXLrzaDUFG^D-e&6u}9}ocqY88Z91&hYfpAoepHjtQj*vaZ-&Z$>ZSlKpNn^yW=P#A_~#0+vMtO#p!3xn5)TkSJFE; zOJ!I836F%Oe08Uq-CPw^Ba2I*<}EBOpVfLpRN+|&;4KX&W3XcPEAz&&s3Z2y180s1 z5y|M$$OFqrN(!O}1ajoc_>QF385#{gR#M9}J2#0n<*02k^f!DWIC!h>B1B_%Z-=y~ z{+&?WCzduhzt1SssgzLRL(gzHL0?mgcR&!BPMZiI2%Lnn9teL* zkfS)IO1q6SAPZvoJ9|}lPRGqGeeN#)hHj)@Ng&gnB7K<28&qln#_-cyx=8azid>(G zltXwM=)6#Cc+_f6m7nxIC#Qwyw>(1dT0&v0;=u+xIO=A@zvGa}QewY&@u!*^dB6=k z8h?FSg6d`QVl}bEVMwhY3M}+Ei*4bOBTGOeUt>L&u{#vAUaF`8%ai4Uep7>Ch3Dr1 ze{dyJr*{;WR-idMkp8uoZm*-=tdpNmgFa?x2f|2*P)3Y^LryZjB-y&^L%pbIY&%UM zNY)t5YXlUi!H`+=$<(DKRT!C7$}4%+1R2oAD@J!t5z&cae2;G{;cQIKjP_!dmxsqu zu?17u6E&TuT3K27Wj8Fqz?ZATU2|5i&ZHy~Dv(qw9AX*6stWy}lWcD##d^^za0@wM zqYAd_!E!}J@gS?Zu+3|(e8K~RlF58)juB_Ee>Q9Iw)Gr1}w}9p3C)SMVK3R zTvw{2U_oO>THiYz+nZ3?*HDVD+3VD?F#F_sd7;b0b6(FGnhPPt%$p0OgwmCp9G5-= z)j$d&Yr`f6`4*$b0l6|ZRv@keRy}cIh{C?Q`g*qA+C8m+47jzw#wZ!35vU05eudfI zuh=mDl6{SvRhoyJ-Yn9AeZ?|GUH{;7E_M^!cvy35afQ(6k^-PH`pFc~;$bj--x6gt23|;_=Wmc7yzGxm1Y>qfx z60U@D5|tte6?nCDS#VC6q(J$)cW)iwehr6KyUYju&s?M%7&;EQAS4=|J&WiZ^`d)Z zcB%-27Av0XP3bXI16TpsU`YsdQYLaD4C(4^%f_ALE#$fstld@-@a>+J*emq5P_s7) zC)#p;H(I3rJmFYzh{<7)UeBM;Z!R$xlL?#1e93saUq#_!<`;6v-G6`5?5)4QJ&_OS zpf{#@<)%G@fku*7CO=Uy8Pb1Ahl}0fuQ}I!j@9p3^Xb!`F-juTS9ntjT5jOFb7Df- z#a?$drSS;XE*d5BUf&K9abr1sz^+U2jz29e5oBV8L(^<{FdIF(MC4}>fH;19cvA52 z;X^Ewy3I3+up!4qObsw2AqA}pc+7_nFS4~YLC1MWT_~~;GxAS(bTtbF;SW@VF{I?* z`~tn1;``S~6QLvi0CCx+KmCQkz&7;_bGy~Jv&?eObe^cy*sY>g0a9vsa7Jb(;LK}G z;l>5B{rguW&l}^VTehFr2)!RpxD+9Tb{ZYH+sjx{$iDvY!E}o+Q;9wW)06|;;e*O0 zLEFsE+E!Or6Wg|dR&ES{&~?%4BSGV(HCyE{msEFZEk+Oe{PX9#VSPamM~2?CSqcqQ0&%@{@6Hxj%f{U z&AcN$_KEb2Sm#YmySDU+SE{9}zSVfLMCzqW2`$TZhIMid>tFMgb_c5lY$Ug5yOlE| z4jt-gOm)S-sllRq`Ev0wRD?hqR#wCaZ4aJNt@c=Ush0;vTW$RV=AyVWC6YZ zigo|V75@C`7%8P@%(nMKj0xH}ju(8EDZV4Xkj4$3rVA;2ZNszNy*M#?^*BXR!K;N{ zp7D0`VeykgWPbcIe1*qWgRPjnpqn@BO4@k3OHWpeRhg}zySxiSWNE8k|7#BiGlJvVwQJk9i8n_%7PFGn_OLA!TNP9I zu9q>AA@m5`)Q&5%zu%c5R$~LirDT9(nJ0CeU|Di$Vr;$UeE;Ic@k*gNo0lG|&PzG0 z{c#3UqQp{Psn1*2WIAQetJjG{?fa*weuvDd)GfWTzR+^uZ+_j-(2zfQL-_NdI8|kP#7T8^cJEpE2?kGW4B*gP z`Xp_crT@qJdW$Jx=`9Q#<)B$)9}DV`yKm z*~w1`0V-sqZnv&vK?1gx*X#KmN=t( ze7pal@~Z0UZKKZn^fXP_-?@MEZlko52o2W09stsWfHWn#nFp=x5>VS zCyt(4_q*Tyi^m&_dnEy~Zdv9&Mvup1Xn3U670<|`C2qd+N4~4C51Tz%6m+tEJ3Ts> z)i5RZ6Zd|o#tH-0^GTB>V1YBey`y7iMNx(o4*L5X85yq}`n|GDesYK(+7aS$qZsh0IK+q5(gTrfz+~$@=u|nWtYYOAV}`XK3wt$*|EnS2*#VC0gIE%BEN? z?a%r$ZQ8VonSY_J939Jz-d(~tu-ji8^T6W5jh#($JLA?_yy;$W;rLx?9s5{W`X5u; zJ^?V}dGKT49ha!wA*sB5yYrlwA`>ktW=Vn8C9_mk>Zd%&v8ZCKN zi}5!*)}U(Jxd1)YyH4U3j3>Fm#a%Oo95-Barf(%PQ?JXN$fmhLTdh^Yy@%`#lm0NY ztSJ1fWa5BnYfvREOY*bxmlYlD)7}JkboDOXeK^K!O>OZyt_hyE;Ksfox-Y<)94sUTj`&U=V1{1#uAn$=wH!e4v#&lsZi zWY2@4DT~6mzWTc}60Eha_OF=%eesx8QP7lLTQk?^uJVsrG+-xk$w#ZVr`2|782xp6 zb`0f12t#_CZOHx(qzdnn^9<(UnIZL3C2IZUHe>|!A8>tnMWq!A^RG9f8FI)ew_}g; zT%~D#$VCY+TE}7=m*~|~h8G4j);Q(O>m-UhVx9E%WSUn%vA>}cFq^vkP#MV!*$noX zZeiWN`wpNqOZ4>PxO70_nK=&(WYZ`ztX-@5Jn-Z)r>HOQL?Um4_D#O#F@^ceetjx3 zEq%L)H3jXIbp}`=bFE)aHohCgr;LALq0E%~uJdNkrVS8ox|A^BcAN77>xyh9Qr#NR z1Q}-C9Rdw6@_QW*Z8DD$X| zDo`roDn5Kvv@aVV=9cL@MQoI}JO^itwD%6)Z6bf-S&$@a(2qq@+qT%}hv;V;NsvWV z6%MMYd?Zc$%_6C?;79o5%emwelv=aRt*C5^+QLtRvb$$K&Y0EC* zKfo)%C2!oAW1WOy!l`rTp1SEU#~RaFp_ONh>b(<)Mf8%~^zSACBjZQTG5O>y>0BeP zRv2m0YkbBpr{2;dK^4FMJN0kFtjU*`4*_0eorD^K?Vx={9VR12uypAY>EV|%dYFt! zce^xytMQGKS@w>{qBb#o1BlO2%<8&nz!!fJSlj5%r1DX zw>NDi?gS}H^@x(X{^PQbKHJu#>sIcs^2+cj%H}qdYaf+b&z(zb#FSVrLAPXH60>b* zp*W5CMUxR=j!2%wfK!a2b#p>Pk5#XlonuJ>?}$w^o^(BDj#3Fzj(m}e<%vFhgj`p5cX zw(iH<-1BrscVt@p%8!VNQpPijO%7=9@;#BcGQt{LD4ibLq(zTf(+F`%UUG5_>gSa# z9^wC9xk)0<_iT36T0O3c0I_zmKaq;T3`zp*3KzFnbT3`rW}jqZOF%exZto(EPwL`dUg6y-J?y~G8;^=d#Xp&7lpk)l&Ivi5 z<4hoTAYaj~t^37`x0s0JtV_FuN_o$ZYn2Qg()jz%*$21!hRq7;xABg~E)DNT+q1V7 zD)*S>=6q~Rp;w8snryMely5(x!!#Id?}`J3pJ|~8(T%5R-~L+ny^eo8&cH>V>vvQ; zKDSKAKjoI~)8$o%$DP)_f411I=6t54&S8(=+HWT}g~%G0c)QJ%TTt5dFEu&My6$0G zsf|aAr^@A}7`m25IcaY5o!{8Hw@|t$+rQwU{PE>8g*_l)hx+(ivm#x~4pjU-JUuPx zg+kCfXO-KY3MY(%G+$0iAww)>k8$4Dh-pPR7$pO z3fc4MI$X+3t+#>MdS`y$Qcl=5zwv2`_|okrHakzwd$d|15GHAiw(Eq5Rm%rOuU?+D zNfe~M=iKR=yKmXQ7xxqGRd>46{LnOk%J8A@8WtX&p0D3uI?&c4ef+Lou(t7?o-d^X zPn)+*jJgxKY|A>6eyv^84C5%l69}ba72A@H!yMNKAe0yhwC5PwBq%vOUH9oE3aCKHiX>waRIVt6ET`gx1N# z_2%Z|qYhg<)34vt$8}1uiCDL8Q@ySBOFMWDmeNg^AJ(&j$nxYfzbcuew6(tJP8Y>; zT@Tu;IhXFuQE`_JF&~qgd*Ls6xn8CHC+b>6eGe_2I;rw#;O&L;&DU5A?3Gz4Dq3-I zTW-$M2_x$2);COO4ECCMyFQ2f&12M%yykId(v}>ooi05ktaf@?Rd?OLQqS$Kiud+C z^x~1*lr_sc_!oTL>yauY5~XRUUhP)zSn_hrQ>XVbA5&kd{Rqw5v?U|z^}*UB!%L!! zh8?QXG@rfn+R}<`Qh}MJlXI$yj?|9bY@QQCAuOSNIToosp}={8Z0z8{-d?($L|KRj zUW6k5GRsFO(Lj9S)inA&Wet{i97h)p>>zk2tI=$~wSKSYvsLlZA<;Rr1f#Zp4)0~{ zd#uPT!A8}>p{+B$gPP8@?Ge$xKQgR6o)xL}vAxazu)FDeEZ{ZYe#4xT+6!BAw4Cz zng9MouYWIN)Ju_(`1PfU|NYkWKh~f@?RVxa`X3j!{LjUOJ|=h&Q5VN|!ifHJ{O7~jL9Ip{oH69RyM)Ml?f>%%2xj=-W+TUj5Rp;e_NK-E<1|Ch zP2e(&)a0HCGwuI>88e3TJhK6ZvVV_%XX<|IB(9~MOtdkh4l~++kH|muwv-vdIOuh0 zcNY5Rd;gBJAT-aV2g&@IRsVeVKlhlz)Ag46-v&_l<=ra#W(Ys|pX1*-%J7nmL~~C? z-*?=8O=U1{)-s;Oe-5MJ|4xMVOANusEEwVO$p7)a&HV425@xoyD0+in8T-GSr;Eka zB#_R2Q~vWoqN4|TMrF6Z16NQ$!*Y+@*;Y-Ca20-sz?kQ4Y?m}%H0;1=1VV*w!h=qt zP#UR%Oa!D?t0OZdd(i4QVF>MqJ2Zva~x+ul2d{&VsP#TmY0${P_fNU_6 zu)nZGQvWh$3(9)lcVI8BviP$ar5OF0Fa@%5a(wu;wN1!=Q?3QQdDeDRL;=k7gBlYIAY%621tU1rQX=``nr-WF?dXl`51)HK%EG z3KZq=#70oY7<-qMm-}r<7Jl>jGy-B|8E6z#P2q#VV=Vkjcyb);Zr*FZ7fn=IoibW8 ztw!{p;5mdFII2X^;rI9ju~y?^A^?q%o7JJyImC5fTzHrYM2#eZtLKn#)9%hm3!iXq?$0waM>U#H2tH_37Hy z*5d$oZs7lC=#F-i?YfnQ_`COIb_*838B}Z^aL#vj%b$H_$eferbhGN*@rwX5bH9`k zxCtgBF=B%@^hxb>VRJmHL^B-j)5uHn4S<6E`9}m0k5^ro!?zu0+NjQj%>k4_QpNb# zXHW#8VQ}B#MMn$#!N~Hh)3(WyDjhcu96mfBUDk>)VKTW5K-ZiqhIclACkhf3_g(l` zj#u5M|2ZxAbYL_jq&1a-6!Vq(4ngNuBVMfb%@yiO`)CE7TSB{{@$^+51+B*EJPP43 zO0EhLCWhMmmu;9QP;{7iYW8gx?GM+7 zhgUbe7fvD)*=hWl2wS5juJ?3ti8mT(chQY~19=hA(NP%BfP0XW=)|mF+A2&PZ)veE zQV{e81vh?pvXXX~pa3EqL7m%W@gBttiD&9p++UQTe2YH?LZHY>2y*{?OfEE4;G3q~oQt2zv35|*? znToBYe%;>(~}r%AXkT%gBA)O*`Ckmc4w>cIrR8pCm zWP@;otg%}^TgkI2r%!`r?rF?u<0>04X#UJO4^TBi^mwX1@rIq6`-j<-LNgr-8?c-v zq0geX&*(|05V`l<5iIKrFZ{sWnt0$q!LrA!YF@oE8pSY>8DHE*?STx;P7Tv%66B{( z56$f;T0J>lTsOUF`{Z0Rl6QC}H%_3zhV_-=pqlt#S-FF5j|b%qnn}BFhii(PYj0g! z`DtPjZ(h9`*t@Hw3JEHcB3M?z;7(iK2rg%{QISolrquY8@dJ1m-a_f};)0t7g|C&J z*8VoUW!IEfb4;$jZ|bt*?Bwa6Jj}ZrJX-!?_Vd|Jewo|m4bz)=(JWWB`o}UirzmsR z)vGPK7rE`3ab6cCP>!Oz?c2=JP?Y^q0;RY{fIAIck zPfNq=fKsqK3?kvs(|cq-Fi0J0Q_3Au+bLpZY30 zd=XDW<1|?=6>XtdVNzUZPD*a@_;gazv26>z1Id>8Zws|?ak5jC`#Y^ISuL5E?|d;J zSIfrk+IHta_uiL}Oj+Ec$t^VDxBP9-pb-sKee~5`#>)GytLe~J_tNuQipIS*y8X2q zT`Geh2t19BJAbUe)&50?2(BL6tDXnj*YTrU+Q;PjO~@3?8%k0<`8CqI8Gh2+{K`rb z$o1oPznfL|=u8hRaEGU%(B|g4tIAZ$b^Win%~g^~-j5?^<&})|I%NH)z{NmXC!G*$ z^dA18uGj_ly2M(?4gh#`Ww${phtTS$< zv8CU*oTs7oi>{W9?c-ajXT-a=rqyyx!hUzfr|3G|>KM8xw(7;Blr5bm1Nb=&!G)HF z{W6pDYvTPPVw}y2U55Iv8+zdGE_XO1(F1bj{HVFqjZfAe;8ffy^}w8YSCfgR9{j*y>|C43L;SRl#$b|zS172#C>Ii#`9=+7B z;NHEAKnPq?y~axQ8ebDM#$d8+T4G!$zdpyA$?@xLDx`B}ZzBAm;Opn-7a`7$4Vw&n zL(SRj?10NkH@3;qwa~dqWuBvG{2crMa38!mi%)5?oH}S@>ZTz)GRX@<*O z+)3ug@)gGW0+Rin>USHXb2DW%rDTq*2L@|c36E;y4FS^P-4CytV!6w0@t1!bW+jeJ zxRFyJXHN}%Mx{nJdwhDKY<;!7uTMrx|C|#`qRjIQ(vNRc^j$Tr$twcJyl`j)3!7h% z3~L=8QS%%eR;^or4)4>C!-Cl+OEw$kbn2$}@^0RSrCL$_7Rz{_!1D6Y?kR~~Yt;T) zbh?vYe|t4KS`zElum4_GR`U5No^!usE2pw%(t-I;AEsO#h^?C1+eI$3hV-nRrmXkT zXxTv7^;wGk6EyVfZ$HRCUamIkU`+fQ)L5#joaDXe_6R*DT}hFs`xL6ORMuzD?vb-p z34N-YbRgWD>?q&u4lCB_mMFg0?D@OmmD@n#(iGGU-#-^^^PKC4SVQSx;2VTmU43a$ zIgy`#K){=e?Ky5x+p`yO`P1gMw22w!io~?fT*wvW>W$dGvs>uvbB>38+;z}Ot62Tp z#oGr!i7>n3Prg30SLYR4>3u4}dg65{ z9L!ui_D($e{A+^%W6XBj-%0xyP3X69b1N2IH?mp0_~DS(0(9bp`g-&1q!?PnS;Nqw^b`H$u=JKs+cx4Wwm27yL}{3W5Z zAQE{T`P1*JhVmcU2IONlMf^0kOgyPHY9oRjE9^7j^$M}`*?kD4=l{{)5(n)$>XvId#EuoEha=b?0_k@v5(t&5Y zHfXcSFI@QG#9x|bJ>Zf6NYmBPDRQO}uG3@svbPjf$GZRSAbK?k6p*vAlJ9jdtO@5| zCn)DG5H(s>+aua~RYhLz7F>U$U4v3198RA)g~XiGU*CYsYfRofb4kI8$5U`nKM z=MPBc081Eq0*8ZKR+q8pRxa;)|DX!ZD)iKy>O#c4x0u937!Uw+gutBd!aZO$OSSr` z;1DEeF$^3ykQ8CyKqJ-gE}c4&!m?qxT;AwGd0fEF4YmZ#AV9pmya--u*RX<@=E^Lb z)T!^Yo7Zm+IgqM-mnJ)R<+hpYEGiC>(Qh1n=xzs*e1En^Y0nXHR#ej$(6gyFUjIiY9sZD9cS?CIWeA#&5o{t5~oV0hIH>pqcA zAg=;yCQEv{v)oInY?$=2yj=gYde&b#xEgoen&+-{J!cTx>>x*)yV@fcV6-N{7#QY$ z9w)4qdw8S(Wc59OJV7^}EW!uZ5JvTyJ7wm~9k$5{$7BwysQviSEW&7tTt_KWx&p&q zBbrtetF7>s7y<*V+S zied(Whvbk^90%C3^x3m{gZ2S%l+Vr)wQO}zf5^$3 zoOHc$hs;*veN6l4Xw&J3fk$NNuy-2-lz#dC9X+TKUss7FR0yGvd)PgWx7_>lNLAHG zZgjiH(eL-Z*bjmkU`?XSU6qSOLE_QYN%w&mtepj<1QeCqi*eFErNuJFN}^$RoCd`L z2cl(n)^_E}m5M!k-rR7SX_nh26aYm=ZSA|kx>vwg$b;FfC=){P;~;%g-ZOmP3^02z zs+CDh8MjZL9#($~tF#N+oCuB%%t$+IM4LbL^$%r6#thbIW1Eeiz7&|+^X2{ykBDtwVy24^s zcc`OCO7I#LA!sV?O3EnOsu)yPKR!Mt| zIc0Mi1U=YXm|`VI=%QdQMKlSzQLy71WULzgb=^dl%NIWt+guY?;J0tdNNE7lUIHrt z>q1LXmnQF)3!g-J$DgdgVmhk#bsjY$Kh!M3O@Vz1j}jw$W8=MxyNQa1(^*{}3)~Gw ztR!l(&YBN4o{-dK_~ku2Gy}yhEGRGo#$;&;Vii2o7-EBOzknD5g@=QH7%!9~U0?0? zSWvnBwGEmKP(#oUY|MUzWH5spZdz*|2&M?!=iUZT%q5Da*-)s09kc81!2xaf&ofT})4BvmeJJ<}eh>9ntplx%7^s zV#OMDO%q@xg%ySZwge|p{`@(J_p!r-!(`At`RvUOF!(512}&}q?9y!*gjt84A5Srj-1E@!f@c8M|_io=Fs;atEJzdkk(TyQ? z(UpBQd3Zzt%l7j0wZMH_3lW3+64v`t3$dU@`wHFxGWG703{+NJd>bsLRmdR%y&uOV zyO--IJm5Y9kw_QF3)p;s#MY^D735kqrbvh~ptNumI=`6r;IVe^p1^z>+Z-t-@@V6= z`x{CJK|LEw@6g-)8uv&Rp69M0^0veU6SBhcK?LK=9UWhxScdHawW+X59pMgwZggp% zQlHV{K|$;jPCU#BqBW$+7Nj^%1|@znU(_Xms1$)?0bC5RkSC6Eqh0!{D0J9b*36tc z_X*PxOOJ(CRvIw(sAwQ#APTJzNog_b`Mq7Cq1mt{rVg@Cb-oRFJ4}nJOU1ITTVG#( zxv3oRBzKRO)%%A>uA$|JU8tI;n8tXJru`wo0=7AO>Qp4t6;h0HK%FLK@;cx4S6!-S zc^aiAt)=zxF11M9hz8~d+ zXYHFWm^<(;cj~wqk2C7wpIxLFv1iZ2w%>tmW8eYta>3WDys)&J#SKg}pKe?^1v5SL z08zQ1=EwCPh>P0@K@HYwQ~7 z&BDiQ{reYF$tL+p^XR&-@-0x_oAoEF$$gyX0|tz59+F-9q>X;OzGp{3wM&TJ`2l8WaSwTNhnv(W^yx z_?2h*Bp*5Q5=!k-XpYNTY)lM|J5 zhnJ`?-se3$JZWq6-mGvaTf9FF@B<__x{_@hbwpMn7=7k2_1yoa)h>Emjd(eu% zu(W`!9c>V}BM>kW&7+JZh(^hA^~Qo(DLIh<8LdY5@WX0h4b3Mqf$fFm<|LIRydV6h z(vQoQssc}3LIv3wkd8=_5bQ#lKo1%z0heb#(17V-%-iwg`Sa7oZVnM z988@)cuSxn0jTV(Z9q%24>(YC2mx}n5H@zJy;Xj#_4|35#a|Fba(8zpiKBqMn$VJ> zpqbeM=p{n>@|HEz{4OSE5P3*pjqgAPKD9WUB>@uSsc+{A!trX%Q+LfcO!j7chqp!W z>Uy$**Bln`dgD8aYpxudqduIp6tJPpNrHKpMDN&mru-3+rS*}qIW0Z@YUy%c{|_vI~s{kAap!`~Bw(h{0kdOj;y zj#*@v-4*Py^c2yGH#*;AN&4Ts{=s``6--OS8zpw)dMy<;fvq{%HV9$)(sfz)!<{yu z%ND#B$&|>rH;*p&LAauclXTts*GOtq_o~0)zg`wtLq_l7Qu5+O;VSz}EBdIsCGNR@ zh}RF!12VnZ3kY8{c_9h4-NTuV{rAk?rmwT_Jt%!Gk^$ozOURT(bcwx5&{sWqp`k0Fq@rn;VzsKB(y#(fY$nHT(n5 z7x3)>ttEFL4~13s7X@9NO|-0}^q~5iU`_#i%|$n{T%aEkeSG=#_lqGfz1M5K`?=G> zSw5ud)lQc;6Ar)2_K_P`G+F(7%Lze5I}V#o0kzGhsMRP zA!|0@)fIH%kssq$v%*K^Ui4yr(kuUc`X;>MTFZoz@1r9hj{*ny<_jBbp8B^xI%4(D zryUtLc5GBkjAgXWj)ks}EQ9xQR?jiH30oX?&t9fKIlh?sRLhiyIZOO}lApCW(3C}y zblt=kY`yximutd^X5#YSoV8Co$+)dqdG#4)(rlt-NgC{5-r*x%3kXgmRO*(G^Y^+u z)a+2rW2DhQ^W)?^YuWVDU#lO(0Wy)83HNOc&S0TdBbMx6Lu_u5KSJ!TE_t0^LAUyf z%#yskkeo@j5#MQ&3nHz0iV>q1m`D`c2b@WZk_QCnZZamc*cac={be`b$*-Mx)gE;; zgby}%j`GhOMH2M9Y~Ket^%f;rGTFlc4HK02Kq1p}P?8aeRbp#QHGG@2qA$6M%jJaO zY+9($3x1}RZ)0%m@jV_GF?`PM^LK`Z9LthAH-6mVH};2r+m{l?JT951f5R@-p}SP{ zz~K~m;Xd5BQ5D$M6c|=1Cya2==6k{X)#w7JHrg<-wU_0@eS7y7RV}O*?~rxoatGLF z1mkt1p~peDKCav87{b@{DWCT0HMn=z?<*rA#j;VTjUF8}nk2Ujt_zBfHO4P1zha=< z!x*A<;fvC3eFV*ibLzRcD>1{^#cIi1<|Nr*hDZq}QVa6qz+fpzgOe;wO{uq&7c6nj)6L|y$jduf;McKff5_TTK5fH$?m%fqkY*NGq7FaC`5}q; zp50ww-%%sr#hi5J*CKY2Y~l@wo1Hs$NGpbg!7!lQOhulZYC6Im1bF4%Yc zV&&~+jI?b*>uRJj*#jt?=J8&<6a*DXaS$snNu(^Ow0rg{;uASJxw_H3zINoCGln>) zd)x1ci>m}ca5ioBF)LYPNL$Vs9Y{*RGLW?-_A?k|!mK23Swf=6-XHu;^R*xaN?M>> z6!M}DiGPb_d;Hh8*5|e=KoNREbS=2c1ripox$-3>mF{Yh7ik8TQNV>8$%&WuAD=?z zg}X*G@}HjG-fvsyp#$?BCYGwK1mRs|*uul+6Rr-5kPp%7@33~w#E7(sO=&4rGM;A= z&-)q&hQ8C&j{GYY`XspkQU~vdF=km;rCW@%b5F!6EYNPs=zTwwbV!-;+)Li-# zr4MS4(F@0B>j6DzJT31X97*l`)5bXHru|r{kXMNs%?dFj7-fqqxnD_+NyNO1dkfeN zGMh4N2wr{sSiWW6o}D`f1q=4&Nj5LyyZW%5q7Hc{Uazt{f4eMf z_KPHJK|vydCTQ#gz?)BgvLcCrnZs5~>=%P4BGh*~wMC?pCMF?OKfE(XK}>`K3H^Nm zQVz%@pzI=l>g*p)8n*AaKCfQ9-0Vz-EHL~q7_qkApNgMNQ7?rsSMmv7#fcuTf;QRS z3QB?2%5SSVWTZrV_b`m|ZR#U^OCw}U@}fr*3cL)vDK!IS%j6O4LV}0;*+n^7 zy24(_P_TaTBoCHhia2w;L+2SeC~=W{3nF9>6<$&F04HyqL~Xob_EtTdQ&u=Rr6NuJ zBM-j(VS*L|gG!?0d-qmwS3(|z;o-yH{L-M{8uQ^riFC><3L1}YQBodkLN$?XoWp8_ zRJU%UMvW?mPe(N`V9x&4BtT^3KUr<`M_o^SCRQ+b&&01p7w%;j5-eAc#jM1AY+>1^ z>$RVM_RN`5S5>vPp<)7ARqx(QFnm^47C86-96qRhD|!0#2cBS$M(t;PfgQO-tBh?u zs$-?X27gCnCb(QOikvgmBtPs}f<)N1q*xH=AI?$s@U@l9cUJQB@fqsp*Eio~s>Db& zwGFR#3@6;3ZD*IkUVDUiM)*+0NOj9HLgcX#-ax-+7tR+)h$&5sG(ST|SHg|K-{ z<jSTWHb%S+Js-PnTo>4(1$y}5gF!TA|Ja=G4wuxqYNYDpuE^@vca zD=WKV>1(`fp-cAk)Cw{Aw~aO~J|wp2#JP_i^5Uf+ z%Fn98_zVuFO}x4^#R*+%!90W_On^f*f09+qJ3Bw2ZIrsbDJzGdj)EeH%ewEqb1%*x z{mpaZqGP)$sM6`!gzt)8lY!vQiARYBdLX1WtPe1hHy|Z02o5=@fBL#LLnowoi;Ti} zlVDji9eG9Be-o_j7cFYNH|o6F;;VmSJy5)6Leu_Lq%VN*^)BD|&@;lIqA`&^V)G=hrvK~U;9yF)_m*6sTpMN12XerUnoGAtT|l4#RCK);T)6LVRC@Hn zU&i;oOP0q)HEt@45t1)55yGVgHNVjmit5#UWfni&>2v4$h{QP|J=pqUVvq-OV5?=@ z5+o4s51VipgSbso=ikr7?r2y+YuN73&J+ju-HiSVDW5xY6B zj3`a~gptEyy=(70wdxy2_1vN8m|Ez06gw8939zLv!q7ui;ycSPw2E6G^A=|!T zW6>1IS#@FF2_GTCE4D63u+BvNhWgh6)$kv6Pb5{S+OVjwD;5{0J{psVPw}XgnjV z92+MpKdh{5VPjBLSARjWj}qPw^zwdO7^3nP=^SLGnxw9%M;K_!B4_+4X4&_{+!J%< zh>cI1&$#-h<+KBzb%U65>h)_IvYRP2aZ+pMULB4lcb z5M4pRx2C4yvEoM#mt9_S1Q45$(5g~V31AZg29#*8(vMA}$47vT=fM%fg1P zU2@hc@5vXY7WFw)I-c0y1$^vZ=C^G855Ma)ls+VDiqrO_lD87(?TQr&+qZnCVMPp^ zH}qPt^O38d#z3c*{&zZK^umSAkGUT)Oak&cGO*s=BBx|Wr9>)Ms=C!H-a5K$(*Ku;8lG2_IIo-Ckwmr_ua$cXe8HjFr>!D2PH53-!r=I-y}QKl_6%ig|>Poc#6a`+`0N}otX zs3kWXc;1l4{!I)J_r^>DVG<7?p9H^fNW45Wg|RDo2bf+bG%YCjNX+D&9wbZm-k(M7 z*`k23^=;IX@I59jtuLQmFm?L$n8?VAcrzECKq|U0ps?O3uz@g(=_ep`qIX3)b`sXL zy?!>Ike|DFu|VkGY^32{G5%(~yw8kksn<|uB~nRrGs`;Utjd9Qk&`N({a1yOm%2j+ z(o5r8cU)C&x@?GM7sc|$%`5JA5M?|>4-sqMCJLVYvQ-S$lEN|&b}4lEpyxOLWIG%8 z`7*7dK-@h_7i7em2YlWccU!~ZuTc$Q%jCxwcunZ$RqNQDIN?IC=9y~Gq9*ryi_)C1 zFVpeJ1Gdd^{dzl&vgR7ZUBT6Z+QU2cz&v*|OHkn$3C~PVVNgXPQDuY!1F^NE|R8vdy0bvJ0VqNi}gW}Ttr)|=>sfvr5S zF@Qz?@AvOr-~aUeIbiB#{P5ntuB4d3ggM`Gt$0-?Ul+e?1aihiS`4NJg^HJvtXZfy zsDC_DlMU1n{u{||z~{?0G=iBlSc4H7lIgXZ8L`P5oJ+~4eo0)JY;;V#^S4VKKnhHD!NSyv~P&xc9A#+ z9ZtD)sRg^@Z=9+AMNWC?>RJK}RUnEovwm*agw@2xin6f2Wxz3_ey{SJ@K31NJ_*nw zeU}-#HPbsKd)c>1Ws)t!4}HAcSA!Q3<|}xRiLG6*|#qj*(!Q1X5wH*gz!36Ct`kS9qRQqp-+x zpX%#17E%N(VcS6GS182%xSC$53S#0g#R{4?k=dq{QE~FaiL*KKoHB{=^>aI0X-lRy zCLx5vk04M|Hg6En66tM~%9=oKDz8s4?+eu4P7}RwYnQ6iU?6~p8$~UI? zVW6;!Jm31SCkhSx{dYaArY&|0dk+d${VOgtqu77y z^lAUP)O`P#rfj2t^sx6&QaOhJ7b8s zs!El|E(sMjvN9s}+viN%q>K+3(wV%miYtNn+7FS+oFzvZ-RBiKjibYG-Ebbo2^K&) z&C-u^6RwHf7P@rkD)rnUI0JdC36&J3pkOcP?jm}JTLpo;j;~ikGBP9AaJ#ubM}2sS zyQXZeht#a zPc*z;5MPkJ9M^dDCPiq<)ac&&wZ$YRR`Yg7Ig>6A4QA1sJ?sEzVWg4a$)h$1&0cFM zeQG!Zn9~6J9;@&G(K__O3C&8+zP1i=5x>-bHx+ss-Be7;Rq1b6;FCl0x@@SA`H@L+ zvS0HQ*T|13sa(`#iOz~3hW=|qE1LqR`+cRAdEPn6+^h52lHj)Ba<3s8T z@2$$D=ZFwG1+|)m$@3R1pxMt(ilA2<5>1Q69wh(f(dd6PW*svcI35L>s>8mvmaJMd zty$9qf2AK96f%W;;ey#C#k>9F*9AD3AG5*)2UEz8jem#Zm85?6oxE_jSy_iCc)^yS zCx^muCB$FV@E2D%##ENj&;+@FWED1_AT=mXaWQyh{_vbfd1QdT@BM;;P~JMeFgm~7 zdNyhJW-W*2^0s14Je}SSX!|)GetCO=MVtzQKp_q{6}&o_N7X+!T%{lM@ z1>~qisw$b+jz4vjDmg2Jw3Jr^JmeZuwp4tqZ4t_=iEnO5dKMlr9H5{;M*4+jK)>wK z?QyM`yg>eObi6}Kpy|?OPreYa>Bruy-*(XS9VrymYE)=G0aQThO<5Vm!f~{4)WMh! zg-Ft|!@^NpCy}uq*tAuU-=cL`)%U?eR^?yS6x-=rLWV>F?lN+NBo!_DSq6JabfP04 zEIa845&^?z#m)%)erNC=&SfmICqI5E+-eklSn_}zm6n+`&E3k%=(KERZrxi<G9} zv=1ny0rVq3;JmfwYyLcDY8{LUY1p)>Q@zJ;y`QkFN=8dc=%N}W0|aCTM!eG#z)^Qo z-x?mlr3)A6*Y2%y?jKNThy(K#Mwl>n;$Pa{}oX);d{C`d|n5qgN|6!((TU**1L z%`@LkE8;1+mfh8Tes0!Dhei8}?WJ-JGW_c=J)=-uV1M}aI=KcKEy85|$s78~OYPY) zag{sNyd%f1lv7!PhQ=vMY(AO|!bsnYn>D0JK0(z4N{q-scws+eUV?o=WEVvu78kINl(50l@R=duT_aK)Plr ztc+kEWGEYA2LX?yf5+}3`GbIJE>W~#p`2=B!oK3mhusg`By#*rTe9s)+pi<+YCv#i zMQT*h24ZPAnX1^nBWS#8$EtTZ%t=*74yd_}H)I}Nd00gnu z3btX`t_5%1O7!4L+nma7Q<^k-RP-!JC9ffu$ySTZ_5}p1!C(C@>mtf9WESDfN^`K4 zWH2Qqg<2l^m4=#U`-nt`rV_KYF^)MpN((hIODYl(zNs^k2#pi;h|1}|7F znwb?aciVnF41Ar{j^7EoD$p=R@}~f1HsS7p+F}cKL+D+yYr7#|2(E!@x&Sb#5mN{o z#)O|Vsa2>}uo|PfjH5H!eW03LD?)f|tD!2<*5@^+UrS)(10Zu`*c=TGmWRxYk`67Ftut@Mb`GQNKgKbZITjg1oGb^JWpZ* z1adh6*g05FglqlSO@$6WhohCi3KdDMqi#fvPjG8x>85XmSSbZ04wK#ecVv15bx1sZ z{5AC@!XY485NYxGV=g{rvldz<=jAd$-ji)jKz)yZWUjq^ZC%|&v{d1>5ohS>>nmpa zh_26t6{F+@c*1_+!ur>*jq_tM%#944cEl7MxZKRClWl00@q?l~$Z+4uv;}$>Fu4)E zXGQCb1Q5xgp;072iq7E>B`8HBl}S-*&B~SCWMmGL_7DNmGW-v`=MQ3R9W7pZsj3uY zCQZ5uu(xn4R=}D-(8iC?SZq-)CR%`Q8a+>fM|_du_U!|-+e1mofQ%lbC+j|!zp!Y< zB{CPD==ME(sL$CX*$7gJlnxPY27LzhcUPs6sA%rIc@cbl@StGz$&yFM4-|`bR5o>p zQdA>r7NgHs+q;z@7t1MV2`GST*8<)DL9j>>LdV+*N-fS0;mWK~7U%C`G zZnc6%%(uoyoqw@c6WXy?15Z3z0SX&!K9YX4VIDnyz5*RQf-9s=Y3y&`E&(0L<#LZ# zn`!h2?xde$)b@OEGt;*o%qnhs zVPyEg4tMPmhSlAov&Ruy{3xhJ&*gbJ*xN66b;Yy(4i6?DJnFP*RbqcK&yDSwr~KQR z4GW;(Xumx9WCfy=JaT4dzjmnV&V-SohD$`?>`i?jkh@nFhGBeA_aS8L$&IAK0AuSD6{ikaGSIV=KY*3+NxG8!HCaPrfD_16m zg-JiSh0Z&Nyw;NKsWC`D2i;wA0m0H?FQHhhc6aB+HklGf$EK6GI2=_M-Cm9XC{fvx)Qe)O1anp&W=dCw3JUZH+ z8^@uY%OkzK_nLnOWh-SvK-^D_2*5M5;{JUsFx(J8_Euj(*)~IbsOY+ZHMZS-<+BOt zL1DlU`9D=%dpwkB8=gL`nF@_^h?r7qlv+7OO*QN&htjYqhft9krsFb*w=c3;{ecoTBhM!M#!3ii0d<3hnwbs3_<-`>$L9kOg{h0Q z8_ID-Ev6u!WT}|K1v=kjX^^#kIlh!oEHP<7sSDW;(7X*|z%MClq6p9l2=`h2ze2+k zUyFP%XhU?YE4N$U&dbrG)cJPuBXPLXioiUvSV(^|JnX7JF-=25>|i=$3NY0vn|}gc z|Dv{*W;nEGAy-2)K^{{%U-wwVf7I08NXhbv>@itB>7S5QZw&bi8!o*G{N`ZzauZZ{ zlUf>ooDK;$kS~Ba-@mn_>p3_stfBiMzrak9WB@dMa8Q>{2Q)qGzw41doLimQdCLo& z3m&jI6~q0+QyQWs&`8HlxR{>~PBJ=>2d5@an7XqyhHWTTT%ni9`&!QejBcwdxz0FUj2!%4?>;Nq+ zkzn~vd}jGg!*Ihw5Xqr1g$vIyF(7Y4s4cxh5Yy8{!Aj-U;kWCUTy^#i9u8asSJp8CJnsVy88O$RsFK2 zzP>)mCtdZcn5*L{yyGiTYXO2yNJj#*+_zO@86Y2sV1Y@xvv(=#9i9+oiH~Q0aPmft zExx!uBMM3~xXNd1$@bVbx2tyVnA>c?j*}n)D)%QBKZ=56{U`3laXV-j3W^Au8l$1V zFS}^vUfA0@}A+khDb=@C&18V$Dw(9}2=L@2zM{|&kV zI81yLJDb|ei2b43h(IJ=@sUa}c|Z$A!QMD-_yHKx=d>2kX*I71uoGU9)FGIh$`bq$;%73xJm2 zj4tKpbAIxk$pI2zq~vE5f&n65tk+Rv1Ux3XYkQS3Lx7znxS`6*O#lySh7TX~WoBw~ zfH#7+q-iuxQ@{z^5VeerjWw)(LC#^BJ&8Y&@j|S@N7A-A-R!;qyPx*!1q2~lrcj7P z8TB6p5fM%tHRV^Jg2Qr$Zh&MLdP(zLB?Lbg61w9sn!&-sOg;!I~wc5U-Lz{yg4@7c4i z0D^pre0pS7&Ie2Tb*9lCZtCnHs0UQvy!qt3_jK#=p1XC_MWX)fLsY(KF7=fuanp%Z zJ@TH@OTq90L6_^l0Sf@))5yhRUW9%fXerqDv^~(Wf2)d{AEkK{3*-&>T9we_?=Cxw-gNu-~(n$njAN$w6!PzZi5#D-}B2E&~!p};wWfd zp03iRdTFasGDM0-S&pfK%X5Lw)Fbmw&_)bbLA)F(dDso|KA5N|3Ut8gMW#&Coi|1@8aN)%-*qg&<(h*j%BH; z+{XCDGt#=}T*r_MZ%1kYbEV;hLf+SB0cw_#<77PUJCuByOG8s3h*wwZokR}~3+wUQ zsk^=+i0wZGQP1!CguzkNKa1pO*a_I1c>LUW7dPp5;>YVMq>@gLo*>!Cegwd5B6!~w z9Ly=Y^8(cmcXzZ&6<3QUAliU=nh0YYX+g~PFeud~Ig)gDWoh@45Jc$2j}~PavPQ#s zFMKX{jjh|PiVmu>6z&S%43Gs#Lw?mGm*;ONIhT{u z#lJqFXpYnNn3*GxYlmJu%sr|3@~xVhYwEp1%NT~Ow)pz>O=b(lmAwMzS%5$Qc7TyQ z;kvqMDl5A2DGfAh*81BRq6cAn1YmOmIpcYiL8+gDyu31T{-4Le;90KW`K*BI;>uoh9wP*za|t6l zOnssH%GRbuJ69aE(zP?JDs$kC#NE!HQ3P<(-ZzOy;E^>)g<51=9W68~Ef{^>6vC z+d${g0?!`F)&~)_=y9rC705dAD=@H9*RD`o!jym6BzonXO}&<0Ufs2)HTq%eAMIm) z39M3&d$J|>M*?cO-=D<5we7Adul_>j(XQ*4)#+Hcp=W?r-n&)f;xhY<)nE(P3+&j@X(!?fWy@#XWTI#+r>~;hGo~xjQUv5 ztv9{$I>l|s*fFb<$LtR(@*EAB+g|MKpytLiFcG(tZTXw%Mv)9D^$*AFdGXIY>`yEU|O4nihaM{nE!zr9LWo)0GCN@<-!x zDHrF(0(IFvp!v>fEzgf#pUp~n^xCGzB7>_@uk&)h;%o9J6RA|vxsosbeTb!L tf1W7GV?+LV_4nUc8Y$KE|L?9+PE5L-5tv=Q*qQu?+ZyibtDJ3z{|7Q5t9Sqa literal 0 HcmV?d00001 From 99329e1243d373c426d17620e1b6aef932509ff8 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Mon, 30 Oct 2023 21:43:16 +0100 Subject: [PATCH 092/155] refactor(ds): Address review remarks --- .../emqx_persistent_message_ds_replayer.erl | 8 +- apps/emqx/src/emqx_persistent_session_ds.erl | 3 +- .../src/emqx_ds_bitmask_keymapper.erl | 120 +++++++++++++----- .../src/emqx_ds_helper.erl | 73 ----------- .../src/emqx_ds_replication_layer.erl | 3 + .../src/emqx_ds_storage_bitfield_lts.erl | 17 ++- .../src/emqx_ds_storage_layer.erl | 8 +- .../src/proto/emqx_ds_proto_v1.erl | 3 +- tdd | 13 -- 9 files changed, 114 insertions(+), 134 deletions(-) delete mode 100644 apps/emqx_durable_storage/src/emqx_ds_helper.erl delete mode 100755 tdd diff --git a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl index ce57eaa80..d137891a2 100644 --- a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl +++ b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl @@ -72,7 +72,13 @@ replay(_SessionId, _Inflight = #inflight{offset_ranges = _Ranges}) -> -spec commit_offset(emqx_persistent_session_ds:id(), emqx_types:packet_id(), inflight()) -> {_IsValidOffset :: boolean(), inflight()}. -commit_offset(SessionId, PacketId, Inflight0 = #inflight{acked_seqno = AckedSeqno0, next_seqno = NextSeqNo, offset_ranges = Ranges0}) -> +commit_offset( + SessionId, + PacketId, + Inflight0 = #inflight{ + acked_seqno = AckedSeqno0, next_seqno = NextSeqNo, offset_ranges = Ranges0 + } +) -> AckedSeqno = packet_id_to_seqno(NextSeqNo, PacketId), true = AckedSeqno0 < AckedSeqno, Ranges = lists:filter( diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index b8afc771f..c99b8c947 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -281,7 +281,8 @@ publish(_PacketId, Msg, Session) -> puback(_ClientInfo, PacketId, Session = #{id := Id, inflight := Inflight0}) -> case emqx_persistent_message_ds_replayer:commit_offset(Id, PacketId, Inflight0) of {true, Inflight} -> - Msg = #message{}, %% TODO + %% TODO + Msg = #message{}, {ok, Msg, [], Session#{inflight => Inflight}}; {false, _} -> {error, ?RC_PACKET_IDENTIFIER_NOT_FOUND} diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index 90c381104..a67dbc0eb 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -168,6 +168,10 @@ %% transformation from a list of bitsources. %% %% Note: Dimension is 1-based. +%% +%% Note: order of bitsources is important. First element of the list +%% is mapped to the _least_ significant bits of the key, and the last +%% element becomes most significant bits. -spec make_keymapper([bitsource()]) -> keymapper(). make_keymapper(Bitsources) -> Arr0 = array:new([{fixed, false}, {default, {0, []}}]), @@ -207,12 +211,13 @@ vector_to_key(#keymapper{scanner = [Actions | Scanner]}, [Coord | Vector]) -> %% @doc Same as `vector_to_key', but it works with binaries, and outputs a binary. -spec bin_vector_to_key(keymapper(), [binary()]) -> binary(). bin_vector_to_key(Keymapper = #keymapper{dim_sizeof = DimSizeof, size = Size}, Binaries) -> - Vec = lists:map( - fun({Bin, SizeOf}) -> + Vec = lists:zipwith( + fun(Bin, SizeOf) -> <> = Bin, Int end, - lists:zip(Binaries, DimSizeof) + Binaries, + DimSizeof ), Key = vector_to_key(Keymapper, Vec), <>. @@ -241,13 +246,15 @@ key_to_vector(#keymapper{scanner = Scanner}, Key) -> bin_key_to_vector(Keymapper = #keymapper{dim_sizeof = DimSizeof, size = Size}, BinKey) -> <> = BinKey, Vector = key_to_vector(Keymapper, Key), - lists:map( - fun({Elem, SizeOf}) -> + lists:zipwith( + fun(Elem, SizeOf) -> <> end, - lists:zip(Vector, DimSizeof) + Vector, + DimSizeof ). +%% @doc Transform a bitstring to a key -spec bitstring_to_key(keymapper(), bitstring()) -> key(). bitstring_to_key(#keymapper{size = Size}, Bin) -> case Bin of @@ -257,6 +264,7 @@ bitstring_to_key(#keymapper{size = Size}, Bin) -> error({invalid_key, Bin, Size}) end. +%% @doc Transform key to a fixed-size bistring -spec key_to_bitstring(keymapper(), key()) -> bitstring(). key_to_bitstring(#keymapper{size = Size}, Key) -> <>. @@ -267,13 +275,15 @@ make_filter( KeyMapper = #keymapper{schema = Schema, dim_sizeof = DimSizeof, size = TotalSize}, Filter0 ) -> NDim = length(DimSizeof), - %% Transform "symbolic" inequations to ranges: - Filter1 = inequations_to_ranges(KeyMapper, Filter0), + %% Transform "symbolic" constraints to ranges: + Filter1 = constraints_to_ranges(KeyMapper, Filter0), {Bitmask, Bitfilter} = make_bitfilter(KeyMapper, Filter1), %% Calculate maximum source offset as per bitsource specification: MaxOffset = lists:foldl( fun({Dim, Offset, _Size}, Acc) -> - maps:update_with(Dim, fun(OldVal) -> max(OldVal, Offset) end, 0, Acc) + maps:update_with( + Dim, fun(OldVal) -> max(OldVal, Offset) end, maps:merge(#{Dim => 0}, Acc) + ) end, #{}, Schema @@ -288,11 +298,11 @@ make_filter( %% %% This is needed so when we increment the vector, we always scan %% the full range of least significant bits. - Filter2 = lists:map( + Filter2 = lists:zipwith( fun - ({{Val, Val}, _Dim}) -> + ({Val, Val}, _Dim) -> {Val, Val}; - ({{Min0, Max0}, Dim}) -> + ({Min0, Max0}, Dim) -> Offset = maps:get(Dim, MaxOffset, 0), %% Set least significant bits of Min to 0: Min = (Min0 bsr Offset) bsl Offset, @@ -300,7 +310,8 @@ make_filter( Max = Max0 bor ones(Offset), {Min, Max} end, - lists:zip(Filter1, lists:seq(1, NDim)) + Filter1, + lists:seq(1, NDim) ), %% Project the vector into "bitsource coordinate system": {_, Filter} = fold_bitsources( @@ -340,10 +351,37 @@ make_filter( range_max = RangeMax }. +%% @doc Given a filter `F' and key `K0', return the smallest key `K' +%% that satisfies the following conditions: +%% +%% 1. `K >= K0' +%% +%% 2. `K' satisfies filter `F'. +%% +%% If these conditions cannot be satisfied, return `overflow'. +%% +%% Corollary: `K' may be equal to `K0'. -spec ratchet(filter(), key()) -> key() | overflow. ratchet(#filter{bitsource_ranges = Ranges, range_max = Max}, Key) when Key =< Max -> + %% This function works in two steps: first, it finds the position + %% of bitsource ("pivot point") corresponding to the part of the + %% key that should be incremented (or set to the _minimum_ value + %% of the range, in case the respective part of the original key + %% is less than the minimum). It also returns "increment": value + %% that should be added to the part of the key at the pivot point. + %% Increment can be 0 or 1. + %% + %% Then it transforms the key using the following operation: + %% + %% 1. Parts of the key that are less than the pivot point are + %% reset to their minimum values. + %% + %% 2. `Increment' is added to the part of the key at the pivot + %% point. + %% + %% 3. The rest of key stays the same NDim = array:size(Ranges), - case ratchet_scan(Ranges, NDim, Key, 0, _Pivot = {-1, 0}, _Carry = 0) of + case ratchet_scan(Ranges, NDim, Key, 0, {_Pivot0 = -1, _Increment0 = 0}, _Carry = 0) of overflow -> overflow; {Pivot, Increment} -> @@ -352,16 +390,21 @@ ratchet(#filter{bitsource_ranges = Ranges, range_max = Max}, Key) when Key =< Ma ratchet(_, _) -> overflow. +%% @doc Given a binary representing a key and a filter, return the +%% next key matching the filter, or `overflow' if such key doesn't +%% exist. -spec bin_increment(filter(), binary()) -> binary() | overflow. bin_increment(Filter = #filter{size = Size}, <<>>) -> Key = ratchet(Filter, 0), <>; -bin_increment(Filter = #filter{size = Size, bitmask = Bitmask, bitfilter = Bitfilter}, KeyBin) -> +bin_increment( + Filter = #filter{size = Size, bitmask = Bitmask, bitfilter = Bitfilter, range_max = RangeMax}, + KeyBin +) -> <> = KeyBin, Key1 = Key0 + 1, if - Key1 band Bitmask =:= Bitfilter -> - %% TODO: check overflow + Key1 band Bitmask =:= Bitfilter, Key1 =< RangeMax -> <>; true -> case ratchet(Filter, Key1) of @@ -372,6 +415,10 @@ bin_increment(Filter = #filter{size = Size, bitmask = Bitmask, bitfilter = Bitfi end end. +%% @doc Given a filter and a binary representation of a key, return +%% `false' if the key _doesn't_ match the fitler. This function +%% returning `true' is necessary, but not sufficient condition that +%% the key satisfies the filter. -spec bin_checkmask(filter(), binary()) -> boolean(). bin_checkmask(#filter{size = Size, bitmask = Bitmask, bitfilter = Bitfilter}, Key) -> case Key of @@ -449,35 +496,37 @@ ratchet_do(Ranges, Key, I, Pivot, Increment) -> -spec make_bitfilter(keymapper(), [{non_neg_integer(), non_neg_integer()}]) -> {non_neg_integer(), non_neg_integer()}. make_bitfilter(Keymapper = #keymapper{dim_sizeof = DimSizeof}, Ranges) -> - L = lists:map( + L = lists:zipwith( fun - ({{N, N}, Bits}) -> + ({N, N}, Bits) -> %% For strict equality we can employ bitmask: {ones(Bits), N}; - (_) -> + (_, _) -> {0, 0} end, - lists:zip(Ranges, DimSizeof) + Ranges, + DimSizeof ), {Bitmask, Bitfilter} = lists:unzip(L), {vector_to_key(Keymapper, Bitmask), vector_to_key(Keymapper, Bitfilter)}. %% Transform inequalities into a list of closed intervals that the %% vector elements should lie in. -inequations_to_ranges(#keymapper{dim_sizeof = DimSizeof}, Filter) -> - lists:map( +constraints_to_ranges(#keymapper{dim_sizeof = DimSizeof}, Filter) -> + lists:zipwith( fun - ({any, Bitsize}) -> + (any, Bitsize) -> {0, ones(Bitsize)}; - ({{'=', infinity}, Bitsize}) -> + ({'=', infinity}, Bitsize) -> Val = ones(Bitsize), {Val, Val}; - ({{'=', Val}, _Bitsize}) -> + ({'=', Val}, _Bitsize) -> {Val, Val}; - ({{'>=', Val}, Bitsize}) -> + ({'>=', Val}, Bitsize) -> {Val, ones(Bitsize)} end, - lists:zip(Filter, DimSizeof) + Filter, + DimSizeof ). -spec fold_bitsources(fun((_DstOffset :: non_neg_integer(), bitsource(), Acc) -> Acc), Acc, [ @@ -679,7 +728,7 @@ ratchet1_test() -> ?assertEqual(0, ratchet(F, 0)), ?assertEqual(16#fa, ratchet(F, 16#fa)), ?assertEqual(16#ff, ratchet(F, 16#ff)), - ?assertEqual(overflow, ratchet(F, 16#100), "TBD: filter must store the upper bound"). + ?assertEqual(overflow, ratchet(F, 16#100)). %% erlfmt-ignore ratchet2_test() -> @@ -696,6 +745,11 @@ ratchet2_test() -> ?assertEqual(16#aa11cc00, ratchet(F1, 16#aa10dc11)), ?assertEqual(overflow, ratchet(F1, 16#ab000000)), F2 = make_filter(M, [{'=', 16#aa}, {'>=', 16#dddd}, {'=', 16#cc}]), + %% TODO: note that it's `16#aaddcc00` instead of + %% `16#aaddccdd'. That is because currently ratchet function + %% doesn't take LSBs of an '>=' interval if it has a hole in the + %% middle (see `make_filter/2'). This only adds extra keys to the + %% very first interval, so it's not deemed a huge problem. ?assertEqual(16#aaddcc00, ratchet(F2, 0)), ?assertEqual(16#aa_de_cc_00, ratchet(F2, 16#aa_dd_cd_11)). @@ -721,18 +775,18 @@ ratchet3_test_() -> %% Note: this function iterates through the full range of keys, so its %% complexity grows _exponentially_ with the total size of the %% keymapper. -test_iterate(Filter, overflow) -> +test_iterate(_Filter, overflow) -> true; test_iterate(Filter, Key0) -> Key = ratchet(Filter, Key0 + 1), ?assert(ratchet_prop(Filter, Key0, Key)), test_iterate(Filter, Key). -ratchet_prop(Filter = #filter{bitfilter = Bitfilter, bitmask = Bitmask, size = Size}, Key0, Key) -> +ratchet_prop(#filter{bitfilter = Bitfilter, bitmask = Bitmask, size = Size}, Key0, Key) -> %% Validate basic properties of the generated key. It must be %% greater than the old key, and match the bitmask: ?assert(Key =:= overflow orelse (Key band Bitmask =:= Bitfilter)), - ?assert(Key > Key0, {Key, '>=', Key}), + ?assert(Key > Key0, {Key, '>=', Key0}), IMax = ones(Size), %% Iterate through all keys between `Key0 + 1' and `Key' and %% validate that none of them match the bitmask. Ultimately, it @@ -750,7 +804,7 @@ ratchet_prop(Filter = #filter{bitfilter = Bitfilter, bitmask = Bitmask, size = S CheckGaps(Key0 + 1). mkbmask(Keymapper, Filter0) -> - Filter = inequations_to_ranges(Keymapper, Filter0), + Filter = constraints_to_ranges(Keymapper, Filter0), make_bitfilter(Keymapper, Filter). key2vec(Schema, Vector) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_helper.erl b/apps/emqx_durable_storage/src/emqx_ds_helper.erl deleted file mode 100644 index 5b55831d1..000000000 --- a/apps/emqx_durable_storage/src/emqx_ds_helper.erl +++ /dev/null @@ -1,73 +0,0 @@ -%%-------------------------------------------------------------------- -%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. -%% -%% Licensed under the Apache License, Version 2.0 (the "License"); -%% you may not use this file except in compliance with the License. -%% You may obtain a copy of the License at -%% -%% http://www.apache.org/licenses/LICENSE-2.0 -%% -%% Unless required by applicable law or agreed to in writing, software -%% distributed under the License is distributed on an "AS IS" BASIS, -%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -%% See the License for the specific language governing permissions and -%% limitations under the License. -%%-------------------------------------------------------------------- --module(emqx_ds_helper). - -%% API: --export([create_rr/1]). - -%% internal exports: --export([]). - --export_type([rr/0]). - -%%================================================================================ -%% Type declarations -%%================================================================================ - --type item() :: {emqx_ds:stream_rank(), emqx_ds:stream()}. - --type rr() :: #{ - queue := #{term() => [{integer(), emqx_ds:stream()}]}, - active_ring := {[item()], [item()]} -}. - -%%================================================================================ -%% API funcions -%%================================================================================ - --spec create_rr([item()]) -> rr(). -create_rr(Streams) -> - RR0 = #{latest_rank => #{}, active_ring => {[], []}}, - add_streams(RR0, Streams). - --spec add_streams(rr(), [item()]) -> rr(). -add_streams(#{queue := Q0, active_ring := R0}, Streams) -> - Q1 = lists:foldl( - fun({{RankX, RankY}, Stream}, Acc) -> - maps:update_with(RankX, fun(L) -> [{RankY, Stream} | L] end, Acc) - end, - Q0, - Streams - ), - Q2 = maps:map( - fun(_RankX, Streams1) -> - lists:usort(Streams1) - end, - Q1 - ), - #{queue => Q2, active_ring => R0}. - -%%================================================================================ -%% behavior callbacks -%%================================================================================ - -%%================================================================================ -%% Internal exports -%%================================================================================ - -%%================================================================================ -%% Internal functions -%%================================================================================ diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index 9b1ff5c7c..34bb66031 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -13,6 +13,9 @@ %% See the License for the specific language governing permissions and %% limitations under the License. %%-------------------------------------------------------------------- + +%% @doc Replication layer for DS backends that don't support +%% replication on their own. -module(emqx_ds_replication_layer). -export([ diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index b85fb48b0..85f4f5aa7 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -14,11 +14,8 @@ %% limitations under the License. %%-------------------------------------------------------------------- -%% @doc Reference implementation of the storage. -%% -%% Trivial, extremely slow and inefficient. It also doesn't handle -%% restart of the Erlang node properly, so obviously it's only to be -%% used for testing. +%% @doc A storage layout based on learned topic structure and using +%% bitfield mapping for the varying topic layers. -module(emqx_ds_storage_bitfield_lts). -behaviour(emqx_ds_storage_layer). @@ -82,6 +79,9 @@ -define(COUNTER, emqx_ds_storage_bitfield_lts_counter). +%% Limit on the number of wildcard levels in the learned topic trie: +-define(WILDCARD_LIMIT, 10). + -include("emqx_ds_bitmask.hrl"). %%================================================================================ @@ -140,7 +140,7 @@ open(_Shard, DBHandle, GenId, CFRefs, Schema) -> %% If user's topics have more than learned 10 wildcard levels %% (more than 2, really), then it's total carnage; learned topic %% structure won't help. - MaxWildcardLevels = 10, + MaxWildcardLevels = ?WILDCARD_LIMIT, KeymapperCache = array:from_list( [ make_keymapper(TopicIndexBytes, BitsPerTopicLevel, TSBits, TSOffsetBits, N) @@ -201,6 +201,9 @@ next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> %% levels. Magic constant 2: we have two extra dimensions of topic %% index and time; the rest of dimensions are varying levels. NVarying = length(Inequations) - 2, + %% Assert: + NVarying =< ?WILDCARD_LIMIT orelse + error({too_many_varying_topic_levels, NVarying}), Keymapper = array:get(NVarying, Keymappers), Filter = #filter{range_min = LowerBound, range_max = UpperBound} = emqx_ds_bitmask_keymapper:make_filter( @@ -208,7 +211,7 @@ next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> ), {ok, ITHandle} = rocksdb:iterator(DB, CF, [ {iterate_lower_bound, emqx_ds_bitmask_keymapper:key_to_bitstring(Keymapper, LowerBound)}, - {iterate_upper_bound, emqx_ds_bitmask_keymapper:key_to_bitstring(Keymapper, UpperBound)} + {iterate_upper_bound, emqx_ds_bitmask_keymapper:key_to_bitstring(Keymapper, UpperBound + 1)} ]), try put(?COUNTER, 0), diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index 8b2e3cc61..32ca85935 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -45,7 +45,7 @@ %% Note: this record might be stored permanently on a remote node. -record(stream, { generation :: gen_id(), - enc :: _EncapsultatedData, + enc :: _EncapsulatedData, misc = #{} :: map() }). @@ -54,7 +54,7 @@ %% Note: this record might be stored permanently on a remote node. -record(it, { generation :: gen_id(), - enc :: _EncapsultatedData, + enc :: _EncapsulatedData, misc = #{} :: map() }). @@ -83,10 +83,10 @@ %%%% Shard: -type shard(GenData) :: #{ - %% ID of the current generation (where the new data is written:) + %% ID of the current generation (where the new data is written): current_generation := gen_id(), %% This data is used to create new generation: - prototype := {module(), term()}, + prototype := prototype(), %% Generations: {generation, gen_id()} => GenData }. diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index df9115a78..c79f94377 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -68,5 +68,4 @@ next(Node, Shard, Iter, BatchSize) -> %%================================================================================ introduced_in() -> - %% FIXME - "5.3.0". + "5.4.0". diff --git a/tdd b/tdd deleted file mode 100755 index 197891df6..000000000 --- a/tdd +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -make fmt > /dev/null &>1 & - -./rebar3 ct --name ct@127.0.0.1 --readable=true --suite ./_build/test/lib/emqx/test/emqx_persistent_session_SUITE.beam --case t_publish_while_client_is_gone_qos1 --group tcp - -suites=$(cat < Date: Tue, 31 Oct 2023 16:15:54 +0100 Subject: [PATCH 093/155] fix(ds): Fix static checks --- Makefile | 2 +- apps/emqx/priv/bpapi.versions | 1 + apps/emqx/src/emqx_persistent_session_ds.erl | 44 +++++++++++++++--- .../emqx_persistent_session_ds_proto_v1.erl | 5 +- apps/emqx_durable_storage/src/emqx_ds.erl | 13 ------ .../src/emqx_ds_replication_layer.erl | 19 +++----- .../src/proto/emqx_ds_proto_v1.erl | 16 ++++--- topic_match_test.png | Bin 176221 -> 0 bytes 8 files changed, 59 insertions(+), 41 deletions(-) delete mode 100644 topic_match_test.png diff --git a/Makefile b/Makefile index ed10a09fd..8e8f4b493 100644 --- a/Makefile +++ b/Makefile @@ -85,7 +85,7 @@ $(REL_PROFILES:%=%-compile): $(REBAR) merge-config .PHONY: ct ct: $(REBAR) merge-config - ENABLE_COVER_COMPILE=1 $(REBAR) ct --name $(CT_NODE_NAME) -c -v --cover_export_name $(CT_COVER_EXPORT_PREFIX)-ct + @ENABLE_COVER_COMPILE=1 $(REBAR) ct --name $(CT_NODE_NAME) -c -v --cover_export_name $(CT_COVER_EXPORT_PREFIX)-ct ## only check bpapi for enterprise profile because it's a super-set. .PHONY: static_checks diff --git a/apps/emqx/priv/bpapi.versions b/apps/emqx/priv/bpapi.versions index 47967cb1e..f647c660f 100644 --- a/apps/emqx/priv/bpapi.versions +++ b/apps/emqx/priv/bpapi.versions @@ -18,6 +18,7 @@ {emqx_dashboard,1}. {emqx_delayed,1}. {emqx_delayed,2}. +{emqx_ds,1}. {emqx_eviction_agent,1}. {emqx_eviction_agent,2}. {emqx_exhook,1}. diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index c99b8c947..abecb72a2 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -61,6 +61,13 @@ %% session table operations -export([create_tables/0]). +%% Remove me later (satisfy checks for an unused BPAPI) +-export([ + do_open_iterator/3, + do_ensure_iterator_closed/1, + do_ensure_all_iterators_closed/1 +]). + -ifdef(TEST). -export([session_open/1]). -endif. @@ -268,13 +275,17 @@ get_subscription(TopicFilter, #{iterators := Iters}) -> {ok, emqx_types:publish_result(), replies(), session()} | {error, emqx_types:reason_code()}. publish(_PacketId, Msg, Session) -> - ok = emqx_ds:store_batch(?PERSISTENT_MESSAGE_DB, [Msg]), - {ok, persisted, [], Session}. + %% TODO: + Result = emqx_broker:publish(Msg), + {ok, Result, [], Session}. %%-------------------------------------------------------------------- %% Client -> Broker: PUBACK %%-------------------------------------------------------------------- +%% FIXME: parts of the commit offset function are mocked +-dialyzer({nowarn_function, puback/3}). + -spec puback(clientinfo(), emqx_types:packet_id(), session()) -> {ok, emqx_types:message(), replies(), session()} | {error, emqx_types:reason_code()}. @@ -323,17 +334,16 @@ pubcomp(_ClientInfo, _PacketId, _Session = #{}) -> %%-------------------------------------------------------------------- -spec deliver(clientinfo(), [emqx_types:deliver()], session()) -> - {ok, emqx_types:message(), replies(), session()}. + {ok, replies(), session()}. deliver(_ClientInfo, _Delivers, Session) -> - %% This may be triggered for the system messages. FIXME. + %% TODO: QoS0 and system messages end up here. {ok, [], Session}. --spec handle_timeout(clientinfo(), emqx_session:common_timer_name(), session()) -> +-spec handle_timeout(clientinfo(), _Timeout, session()) -> {ok, replies(), session()} | {ok, replies(), timeout(), session()}. handle_timeout(_ClientInfo, pull, Session = #{id := Id, inflight := Inflight0}) -> WindowSize = 100, {Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, WindowSize), - %%logger:warning("Inflight: ~p", [Inflight]), ensure_timer(pull), {ok, Publishes, Session#{inflight => Inflight}}; handle_timeout(_ClientInfo, get_streams, Session = #{id := Id}) -> @@ -601,6 +611,26 @@ new_subscription_id(DSSessionId, TopicFilter) -> DSSubId = {DSSessionId, TopicFilter}, {DSSubId, NowMS}. +%%-------------------------------------------------------------------- +%% RPC targets (v1) +%%-------------------------------------------------------------------- + +%% RPC target. +-spec do_open_iterator(emqx_types:words(), emqx_ds:time(), emqx_ds:iterator_id()) -> + {ok, emqx_ds_storage_layer:iterator()} | {error, _Reason}. +do_open_iterator(_TopicFilter, _StartMS, _IteratorID) -> + {error, not_implemented}. + +%% RPC target. +-spec do_ensure_iterator_closed(emqx_ds:iterator_id()) -> ok. +do_ensure_iterator_closed(_IteratorID) -> + ok. + +%% RPC target. +-spec do_ensure_all_iterators_closed(id()) -> ok. +do_ensure_all_iterators_closed(_DSSessionID) -> + ok. + %%-------------------------------------------------------------------- %% Reading batches %%-------------------------------------------------------------------- @@ -677,5 +707,5 @@ export_record(_, _, [], Acc) -> -spec ensure_timer(pull | get_streams) -> ok. ensure_timer(Type) -> - emqx_utils:start_timer(100, {emqx_session, Type}), + _ = emqx_utils:start_timer(100, {emqx_session, Type}), ok. diff --git a/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl b/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl index d35ccd963..e879b495c 100644 --- a/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl +++ b/apps/emqx/src/proto/emqx_persistent_session_ds_proto_v1.erl @@ -20,6 +20,7 @@ -export([ introduced_in/0, + deprecated_since/0, open_iterator/4, close_iterator/2, @@ -31,9 +32,11 @@ -define(TIMEOUT, 30_000). introduced_in() -> - %% FIXME "5.3.0". +deprecated_since() -> + "5.4.0". + -spec open_iterator( [node()], emqx_types:words(), diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index c8199239f..1e7f88367 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -30,9 +30,6 @@ %% Message replay API: -export([get_streams/3, make_iterator/3, next/2]). -%% Iterator storage API: --export([save_iterator/3, get_iterator/2]). - %% Misc. API: -export([]). @@ -101,8 +98,6 @@ -type message_id() :: emqx_ds_replication_layer:message_id(). --type iterator_id() :: term(). - -type get_iterator_result(Iterator) :: {ok, Iterator} | undefined. %%================================================================================ @@ -182,14 +177,6 @@ make_iterator(Stream, TopicFilter, StartTime) -> next(Iter, BatchSize) -> emqx_ds_replication_layer:next(Iter, BatchSize). --spec save_iterator(db(), iterator_id(), iterator()) -> ok. -save_iterator(DB, ITRef, Iterator) -> - emqx_ds_replication_layer:save_iterator(DB, ITRef, Iterator). - --spec get_iterator(db(), iterator_id()) -> get_iterator_result(iterator()). -get_iterator(DB, ITRef) -> - emqx_ds_replication_layer:get_iterator(DB, ITRef). - %%================================================================================ %% Internal exports %%================================================================================ diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index 34bb66031..d61dfa906 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -25,9 +25,7 @@ store_batch/3, get_streams/3, make_iterator/3, - next/2, - save_iterator/3, - get_iterator/2 + next/2 ]). %% internal exports: @@ -169,14 +167,6 @@ next(Iter0, BatchSize) -> Other end. --spec save_iterator(db(), emqx_ds:iterator_id(), iterator()) -> ok. -save_iterator(_DB, _ITRef, _Iterator) -> - error(todo). - --spec get_iterator(db(), emqx_ds:iterator_id()) -> emqx_ds:get_iterator_result(iterator()). -get_iterator(_DB, _ITRef) -> - error(todo). - %%================================================================================ %% behavior callbacks %%================================================================================ @@ -198,12 +188,15 @@ do_drop_shard_v1(Shard) -> do_get_streams_v1(Shard, TopicFilter, StartTime) -> emqx_ds_storage_layer:get_streams(Shard, TopicFilter, StartTime). --spec do_make_iterator_v1(shard_id(), _Stream, emqx_ds:topic_filter(), emqx_ds:time()) -> +-spec do_make_iterator_v1( + shard_id(), emqx_ds_storage_layer:stream(), emqx_ds:topic_filter(), emqx_ds:time() +) -> {ok, iterator()} | {error, _}. do_make_iterator_v1(Shard, Stream, TopicFilter, StartTime) -> emqx_ds_storage_layer:make_iterator(Shard, Stream, TopicFilter, StartTime). --spec do_next_v1(shard_id(), Iter, pos_integer()) -> emqx_ds:next_result(Iter). +-spec do_next_v1(shard_id(), emqx_ds_storage_layer:iterator(), pos_integer()) -> + emqx_ds:next_result(emqx_ds_storage_layer:iterator()). do_next_v1(Shard, Iter, BatchSize) -> emqx_ds_storage_layer:next(Shard, Iter, BatchSize). diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index c79f94377..c974b253f 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -28,25 +28,29 @@ %% API funcions %%================================================================================ --spec open_shard(node(), emqx_ds_replication_layer:shard(), emqx_ds:create_db_opts()) -> +-spec open_shard(node(), emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> ok. open_shard(Node, Shard, Opts) -> erpc:call(Node, emqx_ds_replication_layer, do_open_shard_v1, [Shard, Opts]). --spec drop_shard(node(), emqx_ds_replication_layer:shard()) -> +-spec drop_shard(node(), emqx_ds_replication_layer:shard_id()) -> ok. drop_shard(Node, Shard) -> erpc:call(Node, emqx_ds_replication_layer, do_drop_shard_v1, [Shard]). -spec get_streams( - node(), emqx_ds_replication_layer:shard(), emqx_ds:topic_filter(), emqx_ds:time() + node(), emqx_ds_replication_layer:shard_id(), emqx_ds:topic_filter(), emqx_ds:time() ) -> [{integer(), emqx_ds_replication_layer:stream()}]. get_streams(Node, Shard, TopicFilter, Time) -> erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [Shard, TopicFilter, Time]). -spec make_iterator( - node(), emqx_ds_replication_layer:shard(), _Stream, emqx_ds:topic_filter(), emqx_ds:time() + node(), + emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:stream(), + emqx_ds:topic_filter(), + emqx_ds:time() ) -> {ok, emqx_ds_replication_layer:iterator()} | {error, _}. make_iterator(Node, Shard, Stream, TopicFilter, StartTime) -> @@ -55,9 +59,9 @@ make_iterator(Node, Shard, Stream, TopicFilter, StartTime) -> ]). -spec next( - node(), emqx_ds_replication_layer:shard(), emqx_ds_replication_layer:iterator(), pos_integer() + node(), emqx_ds_replication_layer:shard_id(), emqx_ds_storage_layer:iterator(), pos_integer() ) -> - {ok, emqx_ds_replication_layer:iterator(), [emqx_types:messages()]} + {ok, emqx_ds_storage_layer:iterator(), [emqx_types:messages()]} | {ok, end_of_stream} | {error, _}. next(Node, Shard, Iter, BatchSize) -> diff --git a/topic_match_test.png b/topic_match_test.png deleted file mode 100644 index 6ff1a8911a6eb5a204c7e72271e3342f0a70a473..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 176221 zcmagG30zKXyEdN36h$Ewsf-~_lA@8KG}EkEqBPPx*Fzzxkfgb!S&}r5C?shf?;NBzUP%>Hf`Lwk&23HlkB-ODpXW7 z4OCSBI8pzDzsYFt$ie@tGm@7%L$yr&UrbSA5Ea#KD%mrq)tn-RI-KoPuY6w~3m51J zAEDv)d2KJ>nop%i6~C7|%IDzDgk+58zJS-Ie^#f#e7+W1vfRaISG9hbrfzt_|2s3AUv{Gn}aZF9z4p$fhI{r&jXk&%(X!NF1T z2fxAR*Doe!X69YQXQ;co=HPqEqtPDZPi^G1Uq|`UhpEZQ$w^5`uU`FbNLD>?;6S`Y z=63VPa#v;q@0XO6goK17CMNp%`K6|&7LY&q4W6Q?s3<%kH{bV=KJ^W zb8{}I=^x9*x4iSce?Nl%ikyPNm#0TB@291ud|Bz=mpi#HsKRdc_qUxP2oZEFkg8&22Hun)~yoe1S_`JSzSM z`MRaQrTFVgsjaQBuuzRteW+TEGgyHmNcd`(xs}z| zs;a8bC5k(HJ0 z?CexYWH}=&EF8clmhZW|^ybZ*`}b*2)8kg%JM5deFTOUedoHVzNO`NdHjk~z*Qesa zwkEyQD-g73dF4FO+tk!FJL}{xeav%dp`}GDId0P`j-YGRQDVZ8N~w(+YHCSpDcg@| zywJ+n%g1+qz?5>45dXh{bLCT2mHSMGqmV^QO-suO$B}l4g>Q87Av{y#dv_k;&t;+4pqmSKN<5U+ip_E`}tGtxio>Fd0N$rue9y?9DMrp zX_Y&aOZ9E)VIg1B%WrUCU~asU*TCtUPhDNzJl>=4!KkMvZaiytrcP{Oj)e&vj0?O!g7Wh3$+Dxfma!zf;I6qQO^sS7z%%r|anA z5BBB0p&{KOm#O!35?^jgk-y)N{*gQ@`FB)?*RNSy8|2wsk(9idljCS*HS&?&Q(;1S ztCEtEviKk48#l(J=)@+=wkhvFcraSrU3=g>_@M}e!(l7$K%n%hYz1V?fd!jad>zohs-TG z3yUFy6uV1bEY@$LC&b{1@MiKCmU>AC32sCJjg(~bx8+@>G7V*<=KxCR&8&t-4(Q+ zbXi0OI;a>nStrk1guudEjPB^@(0r-e+Vw+xszDWn!M69ypZTukVzb7SPoK_NTaR+< zWKY&#CpJ`5noe_?&J6zXR6RZAOG3_G&C-tlO8LX?`276*+}vE>Z3jnMGTwjvsx)vy z^U%qYfAFmmOS5rEyt7}J4BRg9hmPd^it=D}Y|p(xq{Ua4Zp~gcH#bL`OIA$`V3%NH zW9wADt{YG9zU#HI|Zj(m2{g$wlFM1~3%QckRo6t@42d^`5Nd}(Rv z`$rQ^=-5O}1$L@)HdYCBaYdmpyfUedN=Qh^xcm{nw*BLm zZh7t6;Uh;1t{+QS&+b;!)wPVhkS%6An3l6{{rdEanbHe41{#uuI-P!OFDXz>RGK=( zy>{KYjW^G0YC4uT7c!)mEX_UP*3owz&cYAH8LVH4LDnOb^6$09a=0yvGgvN7W2TE9 zwcu)|b7P7+yT|M$UEOw*X!omp%=&I))spKtLXw)>=aMLhDi_Q7xL%io_&-VIAj*Wd}4`dPgnq40h85t=V`=G+uCQrAv z>g}!dnrYhV7cU+Ts$key-dhz;{PFzx-654%_(OM7rx#7P{h9dX`O8Iuskrd@^E33` zeSLjhwyj-1dwZu7B^LYR0>uaLSIz%Yc9+Y+Ozkuk6UrYRD43e|VRsks($=$c9ywAz zWmx>DH~g$0qi^+>h?DhLcG`6k5)xwqD75h^&L3GF3&%dN@P`gzt27Kwu?Sh|W|=i2 z3?4su@+2&bGs+n|>{?b^dwcucyVNA}U)9m+_5BvNUg{Uk{rMGLT~QG#?w-G!8b$Qz z(W93?2B7NM*xTzNryzOEO%1yG>Zz*QA~UR8ySCp>IB!9vq-;AAljg;Xe|`i?2%kKu z*O}hWzk~Mn-Me?C?rg3KJD&QVx3%`~+j^LaEed>7pP&&3npv# z?cIyras1ZX-?KjgN4rWqJM!&Ei|4IIc@G~tgfi5TWgb#%T3B`g(H#;Qxri-1?MPhZ z)hj+GrjU2<-Yv0Fifjzi#$-z6p*oikA75^km7kZl>C5#C7cO*oGRRAWhK6=T-@kwV zO{0b=`GW}XmyT25&?$1s8h$hJtD5HKb%2%1jlc1FWfSY&f4^NHE@;`7dm}C}(PZ>j zb)?7)^7pReW{(RB=HqoGczH*aJ?H9mtc$V?-`=d;=x_ySEj4v8p6M248_cauX1um$J$C3hpoSF=ZRF{m`4Km z5XjCOR8dxxBw;_$@Xm*Z-E}Af6=LDFie5%`b~f-BDM63&m5*XpAfGf( zr~)Gki)(l3zbN&zrPmb|6|oMQ>0{&L1B>c_8aH>(&d;ao=8eyo%nqccOG-)6Y~0A^ z)cx-!7QuxjntS*B{C-u1I}&?yVZ74azN+hSXejc|4kjic!+n%U);-7b7q=hoxc+_< zHm*RZ!fHMZ4-bzSb|6j;YDqUYx6$-_0RbAE!7fF`K)v%5bP@}{3r34wFRnt;N6jp9 z9PM-+$us%!!9Ut#uK!BWLUY7P+pC?0&N&?pB#EUly0K7f{e#hNua$W^{Q@PHoaQF` zfn57W|Ayb`6>NCl6j?K;+~{KF)hep{_TS5V0pol>1_uYDz85DZ{)%+$Y@JVyh=_>7 zHvt-=z?Mz_oayo`aTsp%_V$)%FTySl^j!E+vb;Fz<>i&r<>89U00Wp_yLOG27a%-J z)Oj55p&&osV>klH7?`UAcqYAgE@ikPEG&$GgmsyE|K7ZLvs!$92(Y5B`2ytyd2Ayo zkr9N~ma69F83Zp1f6w=FKUD6{^vfL{^TXy%FD@21q=keu1Ki)2#-gw+E-tP;{pr)E zS-?#$#!8eSefKH#V7V;Q+Px&2<+r_@}Pst;?CngQI@8b=0{~@WN2D5|4!+_#3>3DoxVStKy?l2n1W$B`hv%z>py&K>&eYTtI;5Ms8MbdfDqZUHC&4 za@uX=?d&FpbGkY`<0ucVyYaIS`FDoTT~RrK^&H-B5;DGeb=|sk(_Nx`^ z2L=Wj6gu5Fb;@g223h-R=AU%W#nj!@S2Od_G0oEq4-KL2M#`|e+s%x0`1|{B*sy`O zc=lOzbo6W@D=TYs*~z$iHPVcp5@y-zKv!@faobHDFM4q8kv|hEB}3< zy{+l)*_1fovx$)K^Z+Pk^gF=KZHevT?y-o?Ra#mqgL2fhyyO^d;C80JuMhFHp2%fU zQ67!znzF3x$!l`z!Cx$R%0*mU95rs@_wT32&6`lC;sb8^9oy+I)PDUv-f5Qe#MQ8s zg?#Wg*mqOkq^GBUg(#Yxon1@ASUB-jgsDTW|FhfDpI@k)*<|RUSL9-kEMTDLeCDLh z&rdHdX1>nMoEh!%T%2l_U%8R)gMX2>$M@de>?@T|5)qNfiBWP&O5*kdYSFq)Mj0ng zo-CUE?uV8aZ^z;vk32NdQHbBNvNAmI`#W?WKYWmjq@?BU5@WY^N?xv!Qd0Vg@N))c zNQ`p9#)`6WbPN%$rM{A-onz5D-foK=>o_t60EtyXGHr-m37UWZf>jqJ5^>!8=inpD zQ|X?H5hupdayt3Lt5ojnyn*!(dhppqC>hop!PXa@XxP&sO7U|@V=LMPuYIXs-ymSv9K)C!P`*segjDtB0K zBjY!{)FXPwC&9sbSbt@S#mV+O+xX3E$q)RSWu;;CdYGD#F*ZB)4P8MI}~ zmcLC^?*5fmT}oY7Q*-@$*#;d_LQG7lSr#I)EkzK8lgO|=8#PkxmzNfa#GIfQ4k7_* zZh&M}pqQpHKR-{`QoziHFC#i(V6gGQ^b#vGvoIm+PD|u&v*?Sq=AVrNcUWpP=*IWOt;HCJ&!gh_Uit1`!9v;Ru zv96@6$gl1Na{d*@ZAGp##CKYE5wVM0k52X^a$2NFbM}=+YE5-@d(=+?k6;0uot@Eu zR8&+DO{aac-VCzaQrGfg7h-h##&UH{Jt7-tX|rzKy4T*A{2smJ&#vq`TFVZSAmD%6 zix&(Yt=YbTfin{mEKm9J^78O3^E0CoPQMLpZ8HM{tI=8i`VzVgRn~pZj*c$yU*A1> z_~M0yg`uG#WH6$Ry?Ryi^()ag;bvtO6^qcOMn**BM;mBr=D5ySrD>!#nz8KM$)WHx zMk6(ZTj$Bsr=R#G$@gUVAZ`UB+edxgYt^l0tJk1}l!82x(C5@AUf zmlo%b@$;vore5K1E%V)W*@JP%jvc#paan#6Z)j*R=3}<8u|W>OCy-sjN-!MvV08KN zudk6JXv3dB-|y%+(^VKyZdCE*OGCe%PPWBSW?G;g8YZ3wQvsw83pvVuUw80tZFn`< znSIi27HGvkGE&dUDOW4Y7pdCcpG-Ymwf-6Qs_*u8b`4-{u&50U0Evwa4ZuGMFUX%aZ`9gbn|ia_^^%z@ff+b^b-$&tJbxu3TvZ z{xBt#efS_OA|fC8h8?{oT7r-#PwuS^dGu)OST1s3R#u}!7>LXC^mJ5Lkd+`V@d2Sp zVKZ}&1gRvy;THS>Ud!5=mrY1OphbMG85o3t0W)*+W1$M0HfdbxK{gkd*3aOL4w`1(5a84R{HO1?KFpp-YQT$=w3mp?exF; z?VXR6mDTE96ei)+-+WH{2A%w^w}v_-j)Mmi0sK2V#RUW!G_rjC{A_J(9troyfaWdR zZi?F}UwaL|0>Eg6C8ww7WSMyT1zVK;ppicbBGkxHhHET0FVES<1)T$4(b_d@?rvwt zvg0G$Ib~fpI7KGVt{NMo0)pgCZX7^?T)TGd-voD`YbB+gDjBTJu0V)dTH0T~{#4-F zqckGZ(E^Pn2O0HY>&wc^*Z12Y_Q4z0*4CP3X~)%~e!e4E3W;@Ubo=;f0d&CqTSf~CZAK7alk8v#(DW{`qV z7VoT3MSZJ>hlf$6-l$uGWo&C|N>JlWREjji`tkFt@Px=mGVGI_QB01Aj0Dwwd}r0O zvwqV4I}g|kkRySw{cjG}d}k`aCgPw#-cDdi3yX@t*0_IueE5wepA#Bauq;xkjR}c~ zj&^n}g7FCAVBwwQi!gKi&Cc?5?d=ia;pQkkX!1c3*ViAz!!~7KNLB-m9C%)O79{9U zXOWKX=EP^ujKHxq>$|3Drzy$GUYb8fzS?iszqiEH+|ckCxH-=9M^8_@O0p@*un)EN zb43LfSU8gB$dSs*N^_8{2=CfwXMu|Ua;Z{uE3Mk^pxd`^Z#$^e(k6%(tC4yf4CzoS z)*B3PQqrO1%m29dPWIQM`e27PztjWZ6BpNytL4&ua|r9VLZ%viT7hPzpVSQu(whf8 zktSGJL@32hmUUa7<6lO0B;2|sd z>JGIcbO)#A78cx2)92?{Ui})egPwl3y}~Je+{m$Gs)_Yr3GiRp*`t5{G|tj4avC$D zqoYf%r}P|e@2>2E_n~Q)LBYZHD4w4`%Y6&63%xs287LtAb|_0Z+G#)+ zb*2Jn1vb#o1pR%1XI5Sy-wQiy(yo&>&r?$5{;P%zJS{!1^0yQf0BAwX36_hmHEl4n zH!^yr%M&6@VX_`DuPpoRY}6tkN4&t6)>a4^fRNJUMMp9tXxGdCy#LoyH3f7Z#0vw1 zE*}~uGl4K<%XpQ;_QK?~+P!Hd(a|17u|+2YmPGRq!=+1?va>9o#l<1<#0PAkwar#c zo*pJ8o<9Uy9hEYKM{g%;+TZ;h^Y_V;E^<8qzknQ!^(@kle)tfCJ+>C!4XN3nZ>QX(nO)&vR-ZjhXNIacndX7umHa_t-(!1<8c1W}-n6{n}CQLVC56m@kY z5M~Xss}^pyT8Llo_R>i0D0H^Hcrmo3#8Xc0o^;&B-0QJOyek{xl;?`5<5lMJ#uCLB z)W7Q+OB9xG$P@M|R3UQMZg#?H8sVsk;B+59eolq}j0_C>SDvxvh#f!R*?aeZF;Tzlu3vv3eAufi6;c#8w_;*_e0+S?n>S}U$7B>El9fw0ILa8!UI0w63(r?Ao=~BG()!J7!tiw{QRP zF_7TJ8_n$T3UA%I_4nmjuPpp^S*UshX2R_-FlePUp>JwwX+eU?u-&$O`vH3eDk&XJ zO-&`G0CFUvw}3_njta%zkCA(Pa`N#?1m#lyy)9ibq%JWfNi%?pXJ$Txi@a~bbJW+@ zXK4>Z9^zt@cXKPmjzV*N=8ShkEhWWxUZ9}z?t|!=@J%RvM!z^DBn;@i5tJa5*R5G| zj&)n<-OK!;fJSlk{n-|+DtdZim)@<((oU;2eOWwb<|DahWz$OwQ|`#?p}YmDMNLhO zy2OH1_VFW74-y}hhf#(Ws5g7z4z$5woInpf3JPj8n*s*{MfW)cz?V|5AZNat|Mvt# z^_v8O3SW=lEAcF8G>bfL{sfQq+E`Xg>*>m)F<)F+@7tOG&-79n3|vFPJ468H+nZm$ z948l#yob7R^ymemERux}uWx~r3~H$Le*uE5tiuBXZS!a4FJ~3l8*0GG&4(4Nq#Kc(Q}xSGau~~YPyz& z1|yN1c9Gwdixj1s;Pd|9QWF`8+USq99R2GBxEK=2cS+LLcH&pGXH-OlMTQm#sZ63- z+q35qB*Be7&!RhNs3!N264cOGsUBj;G^xg(gWAWsaF)@pe7qM03b+D4n3A%h;~V~7 z=^>cqfE_ojyJcw^4&)uGz~SmT4_&>*3h2SJ`Vgoq*WOSQLX9FFqZ9pIO2{0LDhN5T zWL}Y~v^4pD&7k>yv(d|cqS2kD=@m@J-eGIYvpqS9HDF_4Fm~+?QWnqq5g@|mK3VIzIB4L0f}V<&IkVNRyL45WMge%c*RT0K zJ(nLpX7HD`MYFlsd(~}qGAivtm$jK0=gRjzDAjHy-hkKJ__(+@L}@BZX6sOIFWBKX zQ2hY{ktiSx*S~!myUZ^NsY)ygk{;8VwAt@y&e9fDIy+l1Ugabh7?cn+@z6@P-Y%T!bd>IaNQq1W$+RMec!F2F8LRRg$ zz?&4*IXPVH5fZQxDM>3>sLv5lAjrO4y`?M(o0A|j;L){`esUL=Dp(}tuAt9RV`pI_mM=SQQSovnzn zuc-;!g^8M4L$@_)k`Nd?u{F@a5^}J~Dz8=QDk`sSe<`6OcxrU2Wr}%;G z3miwA(E8?Bq4STx-?zNW8%Rk7qdI@>{7RmTy-E49ZaDz~frt|~zCW{MxuC17YiJnL zaMUw|N#E%k#HO|!>n`-*Yfqb6S&6$&TNoQZZ^*-U0M%eow9_;a>(MRY*{h-@4AL}s zwcmWIug|pp_>9u+Gjpz_LLOQCjD`jn`eOZbCb~7J`#Q5jLAHZ!Z8$pr1)9?v5J&Fw z(~4mNvTnU#=iR4UO#<1(I4$|8nD;A1i$_L9y=*jd9B#Xkq4oX853~qK-4Jr-e$K6+ z0I9Vrfm7~l$r66=-p2n9FsJ7F8DSL}6h&MMMgI|Nf0+5G=Qyg(dv)e4k=pNuc}A&16@_O5H(Qx_#qWR4!!FzCKfw zejr||+kv>Bjsi#XthO|r96>=rFkI+fHBte+dTjDmP(|Nr%CZ}_yw>7^0*LzUQY3!l z$dP^f;$FUdDMLc^e<&|MJMf&!>s$4I>P~dD0VoLpccK%>)mf!rj0^a20z~QBg?z|}*{;0>5J><)I!b&xYhWLL6 z(Oz#?ve~&8d3h&=gqpKo`M))MrNxC!3QZ@$ay6CLZtAqOwDojs4Q5%8+PHXm*^zc( zHbP$|C$~|Ao80T~U#X7kv|)AA*N;lGRMpaIX>9E2@87X~`xSFik{V}mJsnl457f8Y zn^^3G^&A|s{roBmohPtS|BrtEKR9eo@ABmzV42Zt{Ihy>+_Pt}S+!(?n|mb8K}976 z6kh?_FH76f#l;1F2DHfKipd}Y<0%{dFA_V|Wo~MU6z7ebf)g!Ed-dwomNcZY@^Tml zD+;hndeKDw7Wo6}+EpeNhvL&t`_*_|;nU88mkBkxu zL<#d=!_ie#%(4GJd2c1AnXOiG!~q~f2rTE+)q{UmW-G$f1NMx#N-6JCg#wH))NI?=uRQzrpCNDd|MbFCUh@ANjNT;F-aRtH zLjmKZ^egm%M;-E3UrlUVo3@YS7DmRah$Ug+w#G*Ffu~ed-827TKtXha?*~I(33U3} zxB}ee-&g2Dc|PV?^bxYMzNj7~K~z($Fyb1v10Eh8Vyjav#=m@Ng*OO-1Q{1~^Di%1 zR(2*&_wSw6oFWHz>ABk0)@xu)%}KjN9KIOyw~|PAZr}dk@PpiAQ&I8NScI!sb{6I( zC4B;ifBd+5TN@*8S+tz zq~R&cm89TjE!-O@&@Y-RP* z@)8S})eJ58B{-C$6iBFD=aTaCUBF=gka2UT#m65I76$e(7YKX$^eJ1o3Y;GHW#2lfOXKr zP`~gix7MpufIH8hpEz*>!Y3=M`%uf#)AS4*eHzW+cnimsU@PhPEVw=fH zG%$w`93ZmmcZFn;Jl)R5#;Lxzz?X1xwMUW#O};ba`d><4-`Q!!9}08`9^b-(AsS#8 zl>XL0ylW{bDPB#!o&HeTiz2#V2=42fHTO(8e+XE3SO!st#b&E^b#eJ@{94{cLP`To zA+h*Fnbq1{Pew|VVN!+;0KTkI@$6Y2G5fT!@H_|9)i;Y4t z`}xyO)|$Kn*HM;kk|wU!6tT*o3c{q;)B^0HsVV&+2viu%WfuetEb87s3VHM9F8gFo zc6M0RRf;p4S`v3W%l!@cMc+!BRy#X-D;M=h(! zw*1VklwJNzVWuMcL~^!bw&ivQ(lM#KC)63&oQ^n4in?KEXLsYqmc%ZjRQ(7^$LN3k zBH2J^jVX(XiGivG7XpE&f$StNeL&G#N@Af~fVC_x?woExm*3uQaiqzPx3wj}bJ(m% zO#!;PuAW}Ga)o7=NtV50S#{0Y)5vwUj*d*}4&RtAoh=elvtX}Z4(lvQBmMa5owmJU z{*kEIz?tt8Vj~ZAwd4m5lxUT0kv|Du-bhA96ow)E4zbz6K0h%3_UzfSfnw7wKpEx! zTX{rJ^Mnu{Vmk3zLp;jNfty$(>6rB}Fx{dP&owo;`274KSO{_hy3uu87fm%idBTWQ z!Usr`KlDCfsmCYiL@V^_f#SZ)blFP2Ct4Xzz7{ED^Xe*n5L9PO->t8~8=ZyAjn)8&8-{jrn`!cF{zt)u>=fzeO5fO3yQp&sYlv}f0yh?63-NAicHP%v*C{+Uc7krUxQHFtM+)J{uFOZ2RYidEnS3YyLHC8l%LYZm=t@!Z+= z7hUX5m#rpcz>##W#W%UycZRMk1yccL@E3Mo-)jA#C7_(W+)NE2892%#a`L2oiBwHd z$d6x83CN=h^z_|>gY)J>bP62xkmi>@A4#XaSyNf82hfhC>y}vlQ(e5MjL?@mGU*z)!CrcgGmOM zWb6V|Pjyw*eQb`r%1%yBvvYI$vzxy9i8~k=uDcbg)$}JzDR?*o+HrQa#zNhzPvzz1 z&CQyHVt6{@Gdk*t!2-f=+FZQ1{r#8FV|47|?bf51NH`^jz(yMd7l2?rV`A%f1s&0m z;#@z_#pr-$LZ{QW9|wVQ$b@zS7N8@sUhHaf_N}?OIrz9n6j{+X==A7u_1xciTuyW| zH8nNbe`@NOGlXzWdzPH>Z$F3FOmMSc142R!CTkrMaywL^U_@leKTi?O- z9rd10iKO&&rsWN-BaC+??j70hYtS}=eO%cy4#s&z895EP%a%l3S`SIQM6e!3HcQc? ztYZ3XVr%w~($Z6mUb`xIgzHBPBjFy%zMVZ)^W!ev3LM82nWSy{!a}@edxco5~0p)88yNv(NJ%a@@!?h6>v}515_8|R!Z{7uphJEW+ zewRtpu^%2!4t(sc#qLs3c?5kRB*cV|xxJ$U4fJea9`=#wM-oY=P|C3268+Ad!Z*G{ z*O0mP9$nVADzmI8G1u&*q&~nTqiShUkbEeFTi3kBpo+#7%*$-pxN&R25FYK#ojt@% zN5)-S6l5fGJozz+ZLTNzL%9weN^)g2fno_N3XgxO@c$J>~kzQDOzpEr8Bj9;}g9KXxK-tjJJ{CTX8|I@WBZ58yj-Y<Wf(+UJv84vZmnF#t(guduw>YdHi{7`K3X81(PtSQQ{?!j}rii7*D!oDZJhD_VH zxAu~dG+|$zo4f7fbN0*`LJcRJx^J#w*;IZJp?O=BoMhd>^F*Drn3vv1>P_izlhtk!&y zmZk`z!AEdRNe3``H#OcEJTB44%{7dSzQbUGa3orkQzck+cq@BBEy3EjzDVC%FdRE> z-@bj8LBnvz!P@{yhuH-&U$!B8Rtsjw+FB@x%-gpgNBayPES!Y!SQAY&Zjsy=;O>Gw@uFyV$`azcn=@1c$ zFh}fUi>ztrK>w4Eq69k(2$r(1$T9wLahHKIfdvzkqY1NNuv|}fH^gnjT#)I!@Pk)Y z62ora-rvc5{Tfq4>FL%`kI-LhIb&8Wm>?pk_?XGtHFrNKz)uhN;pfjOD_4L146bRL z#LfGGflLxNpIfdz2UnP`#XDAEyK{w7-9LUDbDbW7REp1rZ5-!VF+Z}9jhuXIJ>9*D zVIV8~9(U?DGymhqkIP0zEt%Vy4Y8izo0ZBq+Tlfl0Sr+@Obx)ngEk$5X358hrMpF@ zLcKbngCS4|ZxpNpsIEu|Xc-}{ZT7QXeM=QT4|62m)-)VFaB#)dGBYwhfhHLi<}%f& zS&*AcxhYAQ3|B_}U_TQ)ew<)w3zMUtK0VU_IeF+vo8-NhKMCMz;zac`0K(H z5@1}$=tGCgU>f#n2$>#S(dw7gFI*r-G|~-9mXKU9Sf>xp0dm+y?$yleI5|0y?()Dz zQyhWS(Gs5J_qL-Y-p4`&;{*HvCbQz;vF1XO*uVcLM$mL~uYZGi3Pb?$7;=E}#0V0M zHpmddw!I2x&Xln){k{YHxyYeI=g5AIQZI7<4;Sq#>#kxLRMGb#55h7@AWXa_BBFJf zYTMfj9Y^(Vy{&0%JbvSQ8O#SlLOJ?h3Hvm9JHk)Smbln!kFFlBOqXwK;nP2g?t&DE z>T`#VU67d;`0C$(|3wHO>o66kym-MSCRX(NbsdO}0|$DV1-BgH;k!q3)> zo*7nG0SGk%x(I+$svY-dFq%xYt`1hqt=qO?s6bOskC^0$LJJLR^bB6tI+KZB_<_e~ zW;|ggZzGYau1-u;OGqHxVfk2S`$IUi2{F&CkrSF#UNZ~}sXPfUUw(D?iu??7q!lJ? z;5PuCNz-82vBMb+ClZcBNdxErKD@2TNjr3f*pNzL0{t+H4s2;O%Mu3@0Ez$eWoO? z(xoJ1Hm7FZdiI{)_r6d!K4yK9O>$~TTdDxymO%Gqd-hxxtufH1hM#6zbhm+wR#$)U;PDBjzp{vJ)?xjoLdX_$5 zJeGGC`sBmGa-=s9n4)2UjDSu~><3MKXPBxwJ31CI);E0g33-~sAN?qXb#kmad`43- zKZFU5IQ);;O#1{BY95{JFPIF)O%2{4Y{a;{H8w;}r^{fQc{;0@i=Bo>Q}ANEuF-g< zeSgXFX-STt1N-+EE>4pGWZ-SS=I*|Tfd;}q4rY(S@vnXd(8;H=oslu&`E$&d`b!5r ze7MIxLVoMktu`3QYW1*y#0o+$Cx@4dtMKB*F z{N{}}MKrdUs-HOx1SMzf3C zih0_7{n;nyS|0sgI$H8oM3L&phgZ5?5)1p^8~1))(`Lfa+#VQzU=K`T3)gbc$6kg3 z#aRr5fF&5*P?lvdT^sg*i2B8s2U`v0T!8oxHw;R8Kfr)bjXdzu8EWthm#ob2deFgO z9VQ_Yan$fV3P|FyPd*vH zM&|-KEFiB9VU?%*vMweAw)LtifBL-)8-oR12O17TYm)r*=30GQ$0$H2^&UWLxk@H3 ztyh3PiQLMOkx>|zMfW+(CPG5G6=Upge^v+scLvjZICRU`<%HdY!1c zW#p^ZueFQaoDUuWiT*+-X9Ve+805ip-N433PwA94_mTm(jn!`6(dQzI98Gp@HhFt< ztx?7-%)S7;?=eEZJX5lqp~rwe&-yx?2wV0^W7rZ!LTUfjsmEi}zHhJT)C*RKLh_xsy>DCjLO$wM>U2P9|F#Zk>YBL>eg zD5(QAIvP{1kiuW?Q$)lrvK+k6B@z#MQAY>lc}>@LGyy4HIgtRn&`e%uW zGQ7kLeSKZsII48Y%NObCB{n^BXpg7N2R~sP4ctVdZ~qIuf~J-hC*&J=p}+Lt9kX6= zFHv4#_%y-#?Q3%8%!o2(PEk_9C!t${ju%s$397)$WE%5a=6L%E6nRC(Nd7AuNj;)I z0NWTuMU!@v30ZX0PJz2FIOT)bYTvFueY>TtuQm>21=q~XKI_6n&dS7;14xP<5Z zv1|&#NT96+7*}1?Y~UI_>G&Dep#8yeq3@OIIxpT5K7f&Exog*FQB?lS&faG}dQ<;3 zc0K16B~@1S&8)(cAP)Q7-tngx`beJUjb+360Cn<8An`9>mdZN(tPqAioF;-YZhlaQ zgVpUnq1$DiUIeH|MM%Z6#g)GC zCIWWD9wb>~o)<|;S{V0*6hymeQ?!YQ_XD(L@E{{A&7)^P>l7JsFr*UWU6!V%xfZSb zy*8Y@@1COB=1qC@-S$g?) z*G4;wP(c{{%D-$^QBjeTmj?=XyjAcG=1V#->X-Ku!9lb*80ur>(mF-`2T-Ol+559I zUx>i*<6;?wi5=IVDZ|ABO2%7q&-NXc%w}GkmXf-)xk^-BR>1enI5JI6xc)OJ?Cd9P zj4`i|HapTwfFZ)ehJEmaC`h|3vndLX3fTL>=Jc*Y=VR`}S&4p_<MG%=SRU&&|(Q zsYF2O>FR=7M4PIpppfJP?q7}WDYO?e*{fF*{{8nY@)cB&x4$O9b~eAb_+wze1@9F~ zO1yg{M!#cXV!&R)%qA#^CY{h-?bgSA|8C@a#>m1#Vf5ubvnFW~bU$a$u3c0+o>lLQ zp`~q-b@lcA2L?Q)BTEbt0>syjKbCnq-Tn3!6cXMTGP~N;Km_dxH{(-EM^hG8RXT1O{Kp8Yg4?j2iIWfqg@UmUHw1?4e+t#g5-#*4vH;NJd`)&Q#zXH{GW|A7_-D*q7s^6dJl`ox`u8xvcZI=QwN(;2(qsK|+z;jAa`xZP?yTr5OumHWC5qE+_1I1TyT%+2NvnWhYx6Nk=S3n5IAo)8vTd|a8Y%i1+bfrgnnEH{ z-=t`l`%GkL=nj&)sK~&hYgSe~MbBZsYH!yyFem}8PfHuV!|vhq-SGaksewl9+2_$$ zuUy$8iI7N2t4&&U>BfztMaGbu}d@|YgSnPb-MG1ZlM!qQyej?YQY53v1S6TJ@SCW4eI8|8z*)_lAlXP zhX+wtjT311fH&<9esB6`y&HUVd6wZyN8#%Gw+jd3%*~7%=;?89^%eFUhG7w{;8$Hq z|2|>fQD+BXyT0?1pRgEep8FX#66O&IiUBM_BRk}b<{(C|{^uX4D0udsR;j z4#`iL>VYed&#teAm_?w$KeRG7K4XmmpWF1~eYN}In>92wZS&f{CQ7L+BIs;jTSUgZ zP@<&VP&+nygP)%tuB_xP0V210>96}noZr-^1J*|nXMe$ajYHD1&7BU-G1vV#fxb349m&{@e4!M9Ahi9q7D7 zJdf}Z_d{V}1DiFVGHlob`r4Zw{UlqU40WE%lv%-%Xhb7~v4Q5MEog-XOhq4_*_-Dy zw)f8aSQReisU2B_B*7v3J}?px$uVCYR^ju3KEnlWH8MtH6Ty7X`#Mcm?Qt8RRmiY(F!iw{2Tc zcekPMnRgD{%G?41j~CokFj;#Qa+jguDdlHpszkHw2Q?Z)Jjt|*xA&TX2?|w$L5atgvCfD4WS$0v zrl@YUL=uAxJK4Hs7&rw|G??QrK57~tnwqi|&&t#*U3eIV+;TVOUq>RBk|ttBEqjy)eLt2YI-z2trl2Mut(7})lV2V0S_(>uhNOW@*n*mX z1_y8zGHes21eOkjJZNWt5)GGc=*>`Y#`&{he!i5fCMN-xk_?Vf#90|9c5N=Yj!8&F zmIp9tGH8GBco?L;Adi2-4~PhEdf*yPs2gZZg;_^pe(2boIx)o#`VL>m!ore#0IC-p zgg7+@3SMMM2T2I$}^b5}OR zCm`=UCJ_*k=+Fk4UMn`v$T!R~L=6*T{F`?08xsV;`=kM1*U>H5`~;g4$ktKfxs0%a zES7x)iL);mc31}#AT*A6hW31WKdi-9KpbV5-EbISy*&OMnlrcp`(HZYV1NtBVa=K~^pa*~X2di+j%s>C1M@7VzrkH$Bli1F8Jm~@r0FaY2p3d) z9DAiPKj$Asy5J3a@bh4D9iSIozs^_0w{PFA*7~!N`=rs=>Ao@DGGTEu;rt=Qc!lmb zSlO+To$vrT1jw(4^#>aO8 z&fUAaLAOBUnVOy^BpVb>E-tPX9y#tH4n@vGhi36Af*`HN8lf!`)y_y-vzQ-KzR^c0x0kczla9dq^k5IphDluK zO-zODv?GoZEA?7s_5^60iCcRTRMZmaHB>e<+ctTk0L9$g!%)>Q()UasehVCfQP<_r_&vAW_W7`vIN+01<*pGMfXGFT75xmREZYAHIeJ1rWl-qnis(2QAh|(fODF zH+%cz!D+D{D!{Qp!FZ+#<9HeJ7#11hb?(U6Xna`F^5X>%m7ypAr*Gj95`I)1>fAn&!^a_q7IWH9CA+C{MWTd45O3_F( zb#_t+X(}pxMt{LzSvR82&D{l%g7M@AXdZO!#7R#`Yv@l}cm_5ITD7-W(QQRWYfNo{ zG<117ZIAbtaG^jm>1VwTXpedI^3`Ar0$qRU(iV8LzJGr<2{af+oIi+>eO%gec?r_+ z1-XAv_I%5~qcO;Q_Ds^iU^!S*e{5{57JOHStEy@}f(WoNPj>_d*L)uxRbEbd{`@C; zcez2^6T6Nb%S6})>^PRtu?ZXpu8gRck5_?SkC-bcD3DOMOqs{+gL=DH7oetzQ#Fqs zKMrQ14+pubS^tT*pWGofJn#NJ@l^j$YR8{I`Ji{h1TD1>4kqQzjPzsT2d%6aPQieQ z3b2C}$Q-@lQY@;H(K8coP~H1`Y07P0Z=xmrXS_ z55Q#y2gk?8!hSFdv8Fck{?n(-`GeKS15g6`G+9NR9^BJVB~mkp6Qz4Q1TC0BVQi~Z z^-KhVnEC2eQc6lb2ucv4I6@^ebK>z~)xHP9d!D2JC&AY~G{sJnpx_|sgvvwmM+f~;w+>_qj?uwTlN7KUaN1ZWt=IH+kj>NT;ks3vz(lUW0UgTZoititFjrlIeF*ulXFAm#ORST_B$ z$|V!S)kt2fC$8_tK~o4D)Ynj9drs0>Tx3!SJ9^T|6{QXs1!B8(&f(oulXfTZ3(Z(u0xR@X*i=Yi?qz;h-Pz->>y{ zQWf$6u>q-xdo0Yt{F19XLY#l+H1^$L)Em997n9#kYlINT{{6Jfe1@1K051Z!*sick zRFnSzPy_VzVqJU8nFsgdTYo4->U?|3?cslIziq-froEr zCj)~3d@V4iXUnhlGI{*);T9$)1bZINAw^Ncxkbe|Rue%#)SS*viYHct;FEE&wI)d5N#3+|o1x z5}OspO0UF2k0%7C4b{^TNMc4RoL(d8FJJQP+4Bt?DY4JExKN;Qu#^hI{oFa~*Ti5z z$6LtHknUlb#>n$=0RdE5`L#EI_kE^=s5&$Nk+3IVxWN2&$m7Rwdrd zP{sd8*L%n1{I~!AXHt<$5mJewB1uR|MN1{22pK6E8D%6%ibyIVAuSq|hD}1^9Yr!* zMrK(JqLjUU_g7ul=X+h>@8@^^(ak*1*ZCaBcpQ)8aj?dqL$0l&n^8*8EkkBu5T_5w*hG8zu=! zm$Q7BvI=S6Dc3>f_Z*2KKMt)l_L@4idsrd;xha1Vh?+v?mj)o%9xpPKlk8Hk9Z+lkH_DfnbUWdMb|{Qmajuu!$#?AW&81=}~b-E@(OqVzbR-x%YR5a$(M-PVf-%7S$;hAp!fu>i^9J$lQon zkCd$P0z}gNYWNbRDh5VIo}QjqG#&+=F?=s~V9MA&`tBmL>xciOt10#FeQLI0@aO^E z#^YYDxMixZ*->lKI7^@vEWUQOacwllgsBf>PQK9 zAE#y*oNbIPx|bZk@L6}zBT`%R0uw}7w?&@B7@l(MKdb%x2vyuan_)CLpNraDwa9m< zv#o0O>@g!JPGjE@OIEj@J^Lqp!}RuL=v|l+5e5r7-ofTlF6lit?MFZX)fr+7UeF(i zr|Aem4d=d*lv$Lu1}F@@PFDy(VBVPri8;=iW&eA&las}rB%4lU6%~gzHnTOrHg&sX z69hp5b0Y`?=Gs0K!s}A3phr*^J-d@{N{TSDGS#w)z@}0^QQ3!x^vqAa6i-yIPGn~K ze~YD3q;q#t*yH-{W+cA>Y2E^T= zKv|g+mhiw=x^>v#i|s|ug2QY&XR;wX(?u5tbLhklLcrlCwt=bvM`nHLoUuGSZS$zx zPc*WMVdtmKdh3WUp`>9EWTo%UT8K*`(dHOMJcetAT)~TdyVyEVq>?se3BSWpPn;;* z=VNgb!(lR{J5tJ1Z)bv_3w@QHx3UE1xev+%7tG9mRFd2r2coi80G z7M=3tfU*ym6ZlhNLc)|+-Z@9!e>WX?jJ9@|eGKR-=QdrS`Jz9G0uP;JIhdz7^Ox_SVSXUCi>P_wiy0C|W8hD{Dwz8ND>_H((nDn zPUDRef#U#*oM%1uCywy;s%Md?0)WUi^aJ z;%kWc*g4Xi;qgavbN8*gmK>lPZ3p=~vzR~3o^5Dg@E*x&?an$4n*=Jxg$p305C2)q z_X)n7a`tSDve&u9`|e|Slb~PUPBrw53t5UmXc*Qf)JR`nfB5j1J{K_PmAjtM&R}aB zrtAgrkX!h<>WUw9X3NyfBS&b~fr_!_{g7z8Oofqan&5Lgj6J*$ovX~{`$kvPn_ArO zZu^)s&zG?G_JKwC#>e>kdX2ER@83T)+Srb=?rpzfP5BFk&xHS1E^)KX?QxjNvsX7{ z61mKDaXIPwX{ykc%H@F^3R*>^4z{2QBC7bu-EVHH-aHuDaT|1Lopl(my`JR*Rw-jC z+ls~?h?&0+uCh92zR}Zs;o45>+-GN#o&iD1VTBa3-!FD^mdt$C*C{J@=iE%+<^y?q zCz~BVas@_v+EhCKv8o^TdGGhQZM%I>FI;+Z z;*R6>S4ON>^j^Bu-Yn(Q=qKh*zL%_vEt01KD$F{~T(M~UR{*xv3 zW=WD8*AzXpDN(8cc? zi|!sa$;nwqPoOEf1_QeNrb+_RCN3!h$*WtZj!@d_2j(7>dn>U@=h&2~Q>|P>GmfW? z_&8p3OKImGZ({-)-lSc+)ogFlkQwQ>!ad#8d#UXmJNe%Ww{pZAjQY~=iS@hvGsYxB zD(euH0$i45Y7>IhgNF|;@QN@X)*DOu`ku5xE%H-53)y_(x%3cp2Jecx#c*~2{ z>X{>TC;d@dx~{2u6Z(v?x2D;D@$KeaZ|X5=&6_1(Z7ci5Ur1|Nb#|#;c-6u^KL*`j z2=i@k@8%4tU!YGY%1$QmK|5UNSpc!qR&{fCpEPIOfj%@GJFe9#?LK$}Qd{nFLMw@= z?pnK*sFM#0>wDmg!uD%~e%9&fJJ97OQm&I6Vm1r-jueSl%yA&4yU(qyuUo#Gs-X&f z;#_WbhQ9c7T&Vp#(tEv-|BrMcPG3{=8mCxae^>DD#yOnB=h} zwiIx4C#P~lx})RDKfl(cPc}9%@D*xxu|n}!^z0T>RLF!khlG&imEXD>^f(6j zO(@K-Zz$-+nt*##4x97J%X3@Zp4+W8QXTz&|5yVC7aT0oSq&U@#czpg z#>Xuy7Pn`FmZ|#S8?3?5RwJR0LG+m#G6?b!NWfg(#;JSu5$;lj@+URIb^$g(se-pl-F+c^k+J~=CWhl}pr+qaR) z*dov6;>yg(&2iF`9HE-T9`aw3C>(k{hn1XqgfyOz2dz zauk(Udov#*Z|r5&@pgsEwhgayh515$6K9-A8VjQM>FYj2=evhbxTc|EVTUU5&Yd=A zx@!g*2NqT?S4l%eh6omDf~N}BgP-ynIBUt`#V|+uw&lsH`;ZI#i3rx%fY!|1+%Mn^ z(~r%1Q&V%r&nZY@M6?y~6zVnB3t#Hcw4!dc6|~``YunXn9ny{tb)Al#sP!7Xbcxj) zuOFZGHv^`EH*}W{rEq{jL)|8D%tlHWA(FBl7jr&zs8P`*S|%OCGEl6*`29ElP2h@A zTX=pc(OE|r9YeN8dQE?4m0rfh%wxxX^T_`|j>ga-iwuS2lYjo@b$(UHND`>b=$tJ< zq-=M|0&C4^tNJbnmbcxLe?8*8Qkdxn4&1q2_4MM}$r{n}miQye6Y*1RWsx*K7C{RMvC5m5FR#98?$}||i4#MQ z&lHxmbvW(;LJxi|-{+0=l!w`*Mi7o52SCautnR5dht8QrkOci)n~eC!0`=F-2S`Hj zy#ESjakOkV!DW?R8_hPrv<)3dG4aUFNmX-{P3N!rsH(E>m!Fo~FivVh8~-0!eP;2T zf==t`DBosSvth_TxxdD((+kt+IeL@r?GH{FMJbD@9!9~dmhb$0H+6NJIcgQk$wI{l zcGb|i`E`wrcl?Z>&7wz(BLOKaEelT3>=no6TqwVFYkT9t>dt$$j(6LB4QeN`i~QA^ zmF!MT!0XntGqZ)&uKwPhSm_F5u1ZSgz4fX~EUz;x^Syh$qGIag$x)5FFCM@+q3fCI zsMNjt)EP5s>+4HuTh^H-B`5dw7~f$2?QPcUH*c_6Fm}OrFch`5#jl!~)v?gq`%XI^ z*A&SKJ-gz?m{iA6T)y08^}QS3y~8{Iq5p`a2PAh8j?=)vS-_O?ss|0!^ch%rVC?@n z!znSk!Y%b}$Mx!_z%##{b;x=4<5FW4=_jB4mi@mcxWlG?Ncal)dwGD;mK8~NNVo;% z=WY?g&lr0=Psyw1Z-HZ$#|S30ZYH3SK&AkqD9?ze5 z$+DGuc9uI%`H~B`YVBIQ_@av|h1IPa-DVyNwp*ObN%b)V&Qt@fV1eMXAA&HlgDgfHJ z+FLAZppQU)gedVna7K6Zepmx%QeU0V9 zCk4w}(tHGReR*-&LCRkzT_>CX-QS1N?%n%4Hkp)AZ*IOn4Q5)P+$f#{M27*~#SX7LvJ>~z@!XE3w^JSWuXG`})zM*_3l z3*&tE0D&^b`9F`g_LdcMvq?wbg5r*$b4FOdtgjy=-|1W6AEr_4QWuVj`+tQ82Zb4k z#+Rl-Gor*Nr#gy+P@fV2^(_CQa0Rbk67SK6(;G5mn)e<8HS5O{UNZ^_x$2q~-FdSV zDT_8{`Jb^rzUYqj+yj3P-oWE!OJ3E~kTG5JE4j4IMVGvyXM{C)G2kfKn5o*(0RxI2 zJ&NNt8gE&VX9Es)|HzC`%Ni(5(`U`vR#hP^@FQ$~0i_0TU6V(1xGIPapKd*7A~+OvRQtTiCyq7zV6m6clpR<*GGKrfbRyN5x} zZSnW85uR3)9VWTHz|2txLV>%8NA3Qy(=)NFMy_3*MhFj(yLaC{h6aMx(b+>zd(w4M zUmuFtG|u7T0LKEMoWn*{KfEB+FN+}6?Gz*zzm+Q;?O)3%8)SmfwBvcD58Jl4RNJxF z^0*e8VTmduRtuNE&g-d!npwbJJd~m|B@kCPx2OpJN1;u!+nu#%V(v*h*^~tr1*%f; zT&OV&WDW$qQ7HZW!KjR=L9&f>d*TBl0WmMr|68!%4>ye&xfat#UBtFE#Vv}WR zzdjCg1n1``prN9mPHk&(X~)tDiEw1m8IZR;KZo3FaLk$1gNEQm#m}5v2hjqF%;SJ= zuJDZLLZJ@OVN*4=b1<;^7jwqQ)+ByNfewUR^+3hzMMW4mL`pt=4^)dnE+WiY2z)@P zW9$(JrxFqfzXoU{_f~xcNXg{=qo{N2q=rm`7^5aHQMeMy0}mz49Ef=+n@rKLUD9Ed z=J^_F-ltl#?VP1}PF=bLE+>^rvRd^bE>Qd#iwP>Z?e?WhsO9aG72f#$V>jBWl$--v zh%()_OBN#TuAQq>7ny0`v_GuzZU!>tZ7pBwly`ATr;-&2+!3t zF;LNKkMFL7j8c|t-2F3$3qW_1bm&zYtB&R5RF;%1LEoGUF>qjbVa-X`KEGtI8a>;G z&_9StY7=-CivqDZa17n3MT9}^wkM!lsO2!~R8*#*>XkitlGx~DJ`*DitDS}f~nQqWeE&Txo_?^K!!HbYQPrJ{we^&h#?i4*tyTuT$rCf;B~7tv~3NW>A&%cHi) zImM(c0|^ew9^y?#%Ng&i4y}>)4~!%{9vK;#M|!>Yz9G;kDhjtg$Yvo^`u)Gnb#;mS z<$4P+Kf-yt0Z>8^2Z`Cfy-QSa_f8#wiQ=XUKwFAyI~Bi$gW0iz*psut2t7AArZ;i-7*(@Gj46FLs|n7Vk7R-E&d~O*?`a zeg6RA6;adh_D8*{ze2S1oZuX<$W)m*L2)H0vl{>|yT@(-7+;4?!S`x0HD_FvO^0q- z5wHUQf4|xK=>xhQ;HORubC-4(zpD92W0>g2VTL5IXvmu`M!neUVTYjw)cpi{SR4KS z43pSsnz~U{72kMJ^jMe&z|4wDt*|_Mo6&LSrF-4vO;R$f1TR=UGcz}*B6 zJexzgs$6hC3;b=IZDon=>dQ4b5H!H5MSX>dPt-*LJYT=li`Fs!j_(Gxa{#Vg3$?FF zvFgElw#;$hcmD zIfj6KSu?#jSJaf~GW0inF3|oWBOe`?n@f%pFzlBxF@<^Gy=|`gFV*G!rt7+uyqY^c z)e&;WhsS3JtE*qRbH^JpAWei&@^IC`o>$%vn8-AX<0npN|I1eo8hPVNek0$v)U3@{ zHW?A30-)aBKauen&N`YK&*r4AHe`26zFE136~CZG3(*%9WFk5WwUmFj62HL2#@@H7 zezU~OE8nkC`LkvaDjjV{_G8*K>WTeD-;+`@X&6`Gr8a9Zj?|O+J>FP?AnD;N`aqolh49Mi#|L#_YusI^oW3D zI?a(88QR-FR|iVZg|nTI|HW}ED9E&gCqck8=l?l4xF$gf`>P{li76jRdkOj~v({f6 z;vJXK3?RZb1>QA70?%dkY!neZXhEWM0MPjvn2EMiVXk1W=B?Ge&1ffN9Wtu-(d>tn*pn$VO&BWk4r z?TDPnXZ30*Q{mgMU7Gg4->N9(kgE5f!V0!&9TR_vIfz%nl%wNXzCn2?Z}Ag`27bCtlIZ=c=dlQ$cdDX zaqM;L(Q=GFHh%8)X*w`xA!FmiW9AO`C<=$C1!X`^NxwSuqul6+%e%^KGIys7;%!eT zYizqPvLbINY=8)4nUD-)IP8SMs&h?82Qql|C%uTPSt%_4d6%cX;8ifmY}!D@KVP1# zAU^?v14ylD_NSHeep%VPi+S3KBRu)jyEKKA_^AtrbO^1O{TRCe zonH9L{@cHNBSt3afon4{dCYHmROF2UOB9<FkfE@ zY8~U5s6ze8GtI2*?@85CsBGtlsC9l=#D_Siwg4}+`a4fhd0U_3qKm|u#>Ql^gn-gx z%NZ)WgkBlgI5+RyVbalpi~FA+J#}grap{0=*NTc@zLK&ycAqdYPTykoPYKUxS_5&K z(8^mnbo5>ATdUHr1PTWw+LalLk{Q}6psFNLG+Pn zpb0@|rnDIN^4En@Te|_f320FdN=r{CR}KveySLxCB{aFik*nVoeLa2Wduc6|3piM= z@&tVo%z&%ky}3h!g^e#QZvS=zIPSDBxly1Ma3!!(!gt>ZiIlj;S`FQE9Ohko=xj}I#6Ogc4>4;Qu0;RI7<=k;$ub|rX$TVQi2 zwhDjV>;7?`L z?zH_k7vPAvdl+aj1CYx%>Ht1K?}+>QnY8H`)1vfZ_x#h%*j~8z(=$7^?XD0t1%H_E zZ|pYh-ukR3j?itg1U%eK^C|?$pXf^f6-FrydkJ^#b93{&iwB=;{a45_hjIbP_q~LX zB-TF`egu`b#2nG%HFUi#{q9s+B|2;}?}yF^EC+yoxR5POKwS8LgTe6}xYCEhS0*BW zT$XIQZupb4=I0L4_Hk!iOk$!i1Qiu+xbK3nEdf2S^{6NJ#eBN}=QyWry;oZyXmZuvN+A>PD%TwJ(A zZ$5ljzIgEszmj{x7{e7b+(Kpm)uBZ=7jiG`pwq6-Q8e=sSNB(x-pC|l>ZFWk0KMV} zPA1&CwP7~AKc;*U%0j5fTNEWK9f^z$GeCLKC0^ul%5Fe6=}=(M|IRi}chN!l04i^q zK+l7LZ%mDP?m>JewM2;+mdV_xneJzA=u0F#LR+k`PH!6;pl?6GfeMUXQarU`9$U_x z`^t>P7Xj~vt56k;QFe9$qZha8E8|g93 zv|1Fc{Mrt2r4l2mMi3TrLxCY1XdJYi@uAn;aPF9tloTyc5djJN_nX2lgKWxZnO)DW zrkIfT5nKX^;~Y6Xpf1kRwdr%0wtoHk54TZ!FnlM$6TcAxHqo=2cm|0MSEvL1^X+Aq z++~~8-cm@T(`+=M2egE@WD@kj6UAKKHRM&G^pAcz(T4^gSpTEL&V=*ZsUB6aIPXNO z0C*4=bR-7KcM1#&a{Cq-PRYY#(K1>EX3}=ZaO|=P=jZ61nJNrRYW1MrO?Jna)n-m{ z)p3b{Y{+Q{HL)>R#$CI12~jkPs>bIcS_X;isb-(pKzK}X9C*WRh&NB$v4e%%;w6XR_BFD;!jvDq%8H8qTTJ2F zq#q23qmB(ajRMHty+v<)82+Avyu+Ae=xKL6rm^V+5&Ml+s0GESj~G#L^Jccuv%-#| z1EmiilFv}*poszop|i9ng`Ub^YXispmAJdn2pNaB6+~ZWv$8s`7WjFs7D7I zd)j0cBm(r2?1u!%5c9Y6xzLl^1fclki(Hp3iDNfjhRTV3X!UBd4_Xu!=h{6xe8X#3 zcN||*k{$*0SXv|6BLajhJw68cO17Q-xHjA9SqY11&z{2DRqfzI8$+#4QuUV5;Cf)- zm+1N(Ce%@KA;vSQ*lClBy@!W}`T|Io@9*D#R`SND;nsWh$8*5~N4G^}i2oD}J(FzP z-jst**O`U4dIk`qcM(o3cg16#sPr=-3$#gIXyn+3N$}6CLM5_-2nAkLa%zU=<_oAgxkG2cjr1`jR_aFuts5F6&0@q0cwQGDh<{Jj(H&fEy``tFM|c* znKeQw0IZ&lr6+I9PrPhw?9-zMF|k`CED(;8pq^uAXXoOgbY_K=C|?HjhtS-O0{ayOi)-qLo?f2Gx;C)@?LuF{hQej(+yjvK;wC8XXCtQ~p%yP|oocBFY>Zb`|lTM(Xw8t$s*eIF9$)YRN-fyp;Ik#ED>7cUvD)m{I#czaN%xdcm^MEl@GZ^z!W8+hTEs(F zh1MztUfzPcg$aG4X|+PD4fe6tCuo8u$I*|-s4-3#ZKiR2jnEtK`i-R-;qld zFSpfC+7V1{Ej5x=WHV@-1kIqA5ikZgj2=QzQ#?){gB1d&b#40l=;Ft3Q07+sGP+1V z&sX<~ed;-ndM+NMm>s-1T>9JetvKo z!SK$v->WV^fALvs;))41ha2C0x9z(mAmQ?Et=h|XjZduqP`7!;Yx&f5^CMT*=mdUw z#C#J9%O5g16~RuTZ6xaMx{jt1)P+uDWJpbYRDPWTPGO<=7e$H5Zjc>GiQv{TzSdf! zbEi&($dTK&eS+|8VjxQSCYc${#V7S@{{9Qhmf}D3g)MCj-djRLY0t_#B$Q@EW*~b$ zTtKMSp2!!gOby zYn^%+Tl04-+k}72J6SLe(g122m;kglVkVMxS-yJV;{mz7~r^u??!SLb>4j|b9^BPavXa)9t6_%@;E_j(6~5kPl^@8sb&_45YG~0M_W))a~pdYD$wIIww@?_yl`< zE}_26tCgd8zce!+aEj_5LQ}?daTZP_TC*v6d~obzilcN@%vXZ9f*?!y8bj_aL4a?V zOu;|CYuCD6@X<6h+;^`Q1`W3m?0|@sx@f_=73Oy=nq`%SY_XijB$niRQQNmuxk6ZT z8%HZVJUFQ*pQW^icJuF7y9^D)@`B-vd3H*qf1afkN_nGw!%G$%cRA%j1|%es92bB4 z@E*qJ;nU|xkI!G(V{~d|RaNioR^BOc7)wTA)6aX!9+)i7&I6+DPHtSWBxE@!RX^S# z&q~tVc#}DCTtRIMn2Kh%@#f~{6rLzHb5ZN%WJTGW^T^TNB7_zKL&Goht-!dTqK}t8 zSq$7I(%*+x*g~QVLDrOlRsBwhV0B15CbSGPaodf6P*pXbac-2tA7%FrZWh?ZMEp4u zcbfMr$hdMq5k0}H7X*%+%^+csgS98=_JqCKeP|z;Ni>&+&r^yj2JF##P=!`HgKkUv z3>Sr<$g(3UiU5y_3G74}IXQHvxt_y~j54^MTV=MbD9DuLU-F1w7A#nRSS<>`ea3E3 zy?b5kP_%>AMWQMmaAWh!acmOvVjC&L(Q|pwM33ntO|sPyv}>@d9EWCqAn!D9AHa`C zUWU&$3D-I#_$3kpO7kpJce!{9tLx&zjHh8TXL51|Jg72PAFH$mV@eT*HWPZIsKCrS zB%!~^ob6ZHyL?>F9z9_6Sgc*lTZxo5!=ORsbrO^2S>clpI@w&Lg%}45l})MeB+MFe z7v^q8YJ7y+Bdt|kI_Y2S2vZ9?Z|1pzl@YWi zKth5ukA{{xDN>PUb+eoW-v9b9M?*ueA-~FB*kI=KUUd*f4yQ{P5QS}qN@T5ZMN!8` zp6qc{FW|d^J(6wa%UT%nJic>txlo`5npWD{8<}(ylkV@l@P(kJb#lFf6*vtlefv^O zA$^B%$@y39-u(8ZVS9)w81B`(^< zizZ%I_!{mV+t$||6uxM*O5g+)!$faSPurtsEb4al3XTrTwF%#`!))z`IaJb!q#)YE z$pUGLDn4W~T4eK_n39mEd4)llRh$3ncj?Umso@35=RZz9G_83VF1&rAINKE59C|)oHv7z#?Jt@e2R2U; z2C!1?!q`=J1`vU)7M&h|+qQ_wwRYswya)V)Qd)nf6fSdID&+5Tjgi z7qp7LgJ;A!yk*1_6cTx8vWR$<_vs_b83bGf1c6xX{V^z7*Rw_2O7z1D1%5FSi2Y|B>=5k7VzIqr#{9v( zdl{~WU{o-a|p5rm^uieWrrcC%Pp<4DlO~S`eq^ z6w-r%Av-~g2SY?ZN=lS6;oq^2YM@b&T9hZvCXcm>pFKNpXHi8Tc21;>g%`QEq832K z(#6y2arssk&sXo>LBN}|_J*j`Mk5+5nMt6Srrf+dVQ?2w1~?wP@>_3&<@J3`{EQA< zY1M=Q@DhnuCXXE;szem;7S#t1BGO*uBj6^G*RUJlAsu72gkF}d$7nUi`McilP^gb; z?w%4sQjp_Ts-h%n&>AtKg+lXRgz&(OkcSf_aVc4pau7wo#%}9%9H{-+kt2ork#`n7 zsj7ii+ape1vfXD+ybafI-*$Q1m&>X340bK_~VipE6 zdD3h4^RWd@g8VM>0*vn{{y1-VJh2x3fTM39DH$99pW!FCfKWFjWJA3Vj1qSfB^*U_ z<7?aY16TnT8vIPdX>z)tp|rai4+0xeicQlH@21fz|63F-s+{mYv8KKKZu|8AE7lAi zoj}x2SvhUWpbcFU?+AUMn5)H+(xNr{@BqBPf#Tz#_Kt#wiq~WcBtE9y5ZY|w(0A#A zIG|klZ&kVC=g$R&0DV7k^DKgYunI_^dNik@mJa^pc6MnDC_wr3zh5{0J-6X^z&8Ui zQKjIGgqs#6+_aqDXFSI3B#5Kebx%o}i9qK6$YFaE6PDid7R!F78BgB@t*KxN?*s&{>9)bj`dcLctG6$Z49Q7!MG zjWO7o9OmH6+`+1h|iZHLPC){o!sd7NULd(h{G zX#s~ixce^g_Pzv3=;Tn}8?`N-5L(^cn-nc^lo-p{Tg-Li&c)mg8A^cdiP0l(S0B+Ht!$onh55Qh~jAvL7dW4Ou+X-*%djGZ^?U|DH!qY5WJsK?#{J&~L7Rpf3>lSTk4#{)Ir zt{jC$J1H(DiW1~k#*9h2SNr0^Y7W~75}h`n=O$R|BP3LBV5*SYsk@+`~tYNP!aXEMiBR z5M=SS?N|_}A>Rq=>$kuSXlps^p!sV)Z4@S1X=~8$&ky?swJMDMH7uS&fhRr=(#P(0 z_#=7-q=$xvZTE_NMybX-s>i)+cv;C1Lyn?x;>0x=xaE zgf*XM*TWdw*cJeTb71MFjN3_rEWAM>IVw;R6Qboej?WBE-4%whUb@t$P#@Dr$;RWr z4j|avT*GJT;`%zIW539{Xn(8AHd5NUYSex_97Q%k>+0~~>(8H$fi?a=zENd}Xny(j zp_c^`Y?ia2A|*(G6pFcf@kR1b)}WALV2Z8go+H_&J zBhM4BETgu?OI6NRBU*Rn>DQEADXpfG{9WHk1bC=>yy7sWNba)WGsA*UYpwVj~=E_eyi^H)B)N*QPq!9X$iqS$INeoA%tyyVu+5k_tB2 zB2(s4#*aZ#}Pq=xqs%Ua0VKbcxnORv?<>eh}e}GHS0S>Ojfxy)e zo9)_gbHQ)~`USr2x-hih`X|{`u!Gycatg8w%CuhZf{)^ntUT85tQ6gzZc_iwgcB&(?L#%Slb0mohUIwubL`IYQ(I zFJ7!zzFe)1f4K@c#$6Sm_&OwMHfw#hbJ5+Gocw~cMxrHvuX24WSZp(V)Hq~(SpDv* zst#*Mi6SkiB+xMb?eJCmQ&sQbGecYJGAR$^x+!IX#x5Nn#scA0PKxRH5Z4m5cHdz? zpc~nlY1vo|T>s4z*NUpg*VomVhwK1)@^xeePF;dUxQnih_EVX(OP5@AVzL*fz|9Rv z@mRb#{KcZ-@x+03xk*WC+pk^KcOWdWvYOwdJz{6cn@^BPf|{;KQFzvi?bWlV+}Ur7 zQigax^2%i&aKX&paBz^hoK@XDK*N21!OtDgn`lsVNIi1pt|a3j zz0*kRdDPdj2D@*)5t0NFx{!H~%%ACo6<%0b(*fI|FQ0#r@+8qz-CeFtU=KIFSa78t zQKeN?<8_1R+slSz!O7nc1-u_stWx)|%FA%oS69Rv`>NO}d>aOI$fCUtf5f5lu!8EYE*MnE>N3fKfrxdcYR%SK-rkL{ zt{SPUm+aPrS818*`1w&!#zo=L&RA!Wd@4A%%wM<{{oThM&a_O`w)VLf^2_S|i#;_rv@5*4;ZU z_U;^WBSl7L$KSF*&F4es{Y@HGkTKF$A+TI5xyQJ#DaXY>n+}{xzpAxy--hksmv(_4 z^I~t{>VwfHn)#0hHhu*hdcyVMVUMIDCb+TU5(A`#p(^GH1#D;>(2W{mKX z;UU9&dko!T6Yy=63d}sU4A7=*#y# zW*xKmdPvUt)L)B3`uV+HHv5zDa38_r7H=8%l|i(T69-OU)@Z}~_mEizst-84Idia0 zRlfv1D*8o%-TEK(>zbE;Y-@NInM|wcx5dj+VjjhJ-!mRB-^Vi{G_1*37^GdI52iZ!!0namaUr+RwFPt#q%cjZ?gJzK7?t zh?d5pyJ2yY4@M7aSY3U)yXg9m^j^ndAz6(jh$_1GL+}#e+Gm_RmoI-_Sy|++510WB zaS}u=g&?)19n-IHuW_RoZjReFsX^s6uOH$wSWuekI#jmTB7F+g6F70<*_t2q4 zJR&h*11u|~zRBi%+Oj;wICRt8z-v3L>H`NR z_3ggBs`;R0;53_YS5g|Jt;?5;-sYSrdg%%U&)Yp_ zymfZ=I9NMLxpT^s%p>Ez()h0|pIJCSVD?h=UH|I3Y|rhX$w#g%7*2h_4|m);WbWd~ z67$dVQrxc>C~YHn|NE zB_*-BQ)Yw&sN|pA>arl@g<4&i^WM0DUo7KJUmjNjG64N@q9545$hfSP$&>S@E!N7HtQ+6{gPAB#j zbiWx#U631yh)Q!D8*($e`H6jB+fzF?rkw4VU*Wj_Me6ATNe$&2104l zm^QlPO5&ggN?CCyrz(pAUKX96?-3UAq1bksfAWa@@zX>F14AcoA0#))Y2t#~u}u-f z0~5lxJaQU*Y^3@=-TC6J<=YN9mq+AgpIfc9W5IYyv+p}{;&WbpF!@&3<73JA2|tU* zpWKuk?W}4-*`-5?4NAP4iB>VS7{;KX4s)Hm`g}w`3pN z16`?r&z?;a1+})g!5TWbi4vm7xu9h5FpIo}$Jyx;59U@__kmRAsqDj&R4?6MgwNiWc7^z&&zQ3Dgl=M zbbG`-WMg=xeqT$A3wS9#DP$?Wg%tR2eo-+$z}>BEga=1aE;lEqx*|3$%VCO`>htd@ zQ@VOog>8E>M{mX&3mL~PTK!9N+ts5uUWnsRo7RCGm}hR{b4gJ{#KbLv5wvcH-l*6)2TKef-4D}xj2Y|r7a z_jfgtSuD5LV6B7hdz%-#x8*lqx)a_N*$fNaV)f(EhmKz-7{#O&6?L5MRuB#%V|!sD z??|db>jK&Eodv_oEaVIIa~>XIa`OICnDywB<(@p*&u`GSLptT}(G;0x=Gk$t<%sFv zN8d7+ri|0|u&pdBQ&*H8FL4tXsj{XzXeXqEq#7_%1Xn6Pqo|l2VSSJW5%PMT8enxc1nXI6;NxMu^U@_Vo!Pf<-GV}-HAXq_Mm5CmvEN^JU$*RB#&>H; zGxr)JJ1JfEEnjBVHD}4ZiF0O74+>CM>ZCKIPhFGs9DA)v0hmuBkn%BiybKe4b2gXA zrd%C0f#T(-x;_vW(1Br?v^ z$IS|`E_V9Wt4u}w-Joha%Py4%yo3WLD_jhHsk~} z?VhGTiwKI?h}XfZV=DiqESGZYpnX}4$O@c0=I%y?sOCCJMbAj-#d(_yJSV>QE*0j- zQj>alwBya!j>B<^J5TIABv*X%ru68J<^){0LFmFSo<5Dd{8IDUH~HvCPng1y7#BCN zah~EomdBbFZ`$V~nVqHKX96Hlv?-m4_tX*fH6~QEX~%3b6j}5Ac?~RddhgBQiMTla zl#4EBpVzhcF6qzY6dQN+toy`p$MU`HZ=zt7c;~w96ux+o6Ge}B+7r;5K3XiU|B`rv z0E3I>XgkqVyyMXG>ODW{$W59i<2-MX!*4Wgb^7en+37$HCp=~}K#;$>+}uk$mQx8t zKMGdNkw;5ZBCUw6?-aiJEz_{H$#_`#d#P0`vt9OB1EXK8 z_5PHS(*Ugjr)U>`>uvq5dArM$L!%N$m(IIYS}OnLhw?bS%wDy$aHCT19(z=Zyn6RJ za~rhx*s&UiZ^KsT_IlAN^eSHgH+k(uhfSi7eVx{r-o;YmzkQ!PeX6=hRwQ@nY}4xe z(x2P>6VLCbvzl^i>H6`_g9DaPHUmxhOPli8t@CvBvO-{IKYH?HY_oP}HSxzr6PDD5 zyj5;$TjJShYJYv42EaV@>A7M+KS#MLM6R4`eow7)~cUaf6KPk8X6P^l>g>t4U!vT@nG zd1C3t^Scgn8W1E*2w1VAcs3)n^c^cKI8Wd9XS`F-nRN+(12oSGP;@&wb zpRXBlkUMNj0j1_e&G+=_DgESAZd&K>z^l9&+w@g$%=3^wU9Rj2p7iBf)YDEI6pL?t zDSNYly()cdy#1PT-R7O=X;8qEY5(acl3?6u=957~huRtez}fxoI(i!I-#rhUmQEJ&5iQw zRRYav3FLX34f>pdMo7HO?AMVaL&q)GpTcG~$-z{UOin70+LzfoU^113l;e`04eJc& zXwAF>_ji!rD8A|C%W6YUFqX^mS(oO)kGqy|`L)ho=|p0>_^+YqJV2A3y zgC`b;Xa?$#UIAC6UVdCwrf{ztq-Xa>2jB92uyA%44x0Bvd#UiN4e7;V7yFw%IGANx zn)-i5HZ*I}jbM2}V@aP&jGXgVWvkH+wdo_5WNcPnP{#Tdy%>W z3|$`hVqRW?f6mp1JDZnjuD&n_mv7ovD@a)SO#_rHhR4esQ+`#UoOUA3*vNN3Y)eE1 zt)+DBVr-+GrUt_HhAF`DaOxHDHEwasFquLa`OP^cq_i$tl0RiTtO@a(JCeAwQg*))y`u9Gq4(KglvSs_Mp}|l$b|_nrfNmo zkr(mZAXr5N$z8r6gy+v|$aGUb&#J67UnlF>-RDAnYO2@?iKAIrS8Aux?^=4B(Oz60 z&FQZpiR4|p*fqd%fT&@-PRw*?XM(EK5#~)#FP}Q4FcO(9jr&!vVyg6wCF?>DU)_b> z-eey9&!#f}Fp61^9#s!I2#XFq%+^Oa{kZyP=F+cIsw;SqxmVmhJ;N3il$z!fcRZ^R zZ~9BiA!qAbRewI*KQH{_!7Hztn|u|9tR6jTlzFXq@GMwK6DLhltIJ>r^)-*w$p#26 ztQWA%O`5Y~? zmsg(p20sH*3=^o0prS>JelzvDG(vQzKWJ~EMD>+N!G4#>%b$~t_B9LGqE!~&>pZL| zI+~gp2r^cJdVzj~;bsP3o%&mH=gtsaU2BtWHNU%F_C5_9MGN?xLkf{oEK^Uue^F}C zakRK-W8wOB>r%%Q!*$Eut}+PWg}glFHN*YG{wITj;_~xDuXYZ|8rC6 zVlFG&^e?pk`o%s}WVi=Zd7^Ihenk#Nk_VerH3QNh&<0TCvp7+-)0|((|l=` zi{@0q&#O`y)KE^wzwETxS|M_F+LB?F60&>smk2pPtzaCT zV<^dJ2ICFcTx3W0kD0_Im7#+NucrwWpN9fNz_hYon=$lloCcMKjYL+|5dOs@(VnCe zLvt-offq`h{A5b3V?7OQmcvU(S$37d&9`aVaPtHWSsA4OXRdo0NKnUae_X~M>S2N~2atZVERd<|p z4467+&QpM>d2v9X4J*U@>$)74cr_6r&mVJAk}^)7JjuUnZ1O^z26&V3>(#4}DSbu- z^)=<+tikHizkIu-#G-0c-gQtq!=!-+&t*~$MmXy3#Cd0`+L4*e8Mr#~)-)5Bv#8xhgPz62eC9YI$qmj9{sHk$Xfbqi ztNfl)CdA%|2flWNvjDj$FC^U!h;B?i zVcpdV73TezUb5&d=anrJJf-fX^0?QBWzMfn%OjKxYx@uUP z)az`a+9*t@e*bp`CdSW<)FpCTKsflFLva_Y#M(WW~SyGafnOUHJyP!aHtk)s0 zs^VfPorlT$wdV%*kG5l|4T=NsWbw;gx^&^>gK zcQRC5E8J~KCUr?Qe-H70arJ#G7?~VC5aUyDR@i*S;+qZ&!h9ViHpau^b zWbv+J<~;_#771s^|LT^`@CP!_^&VThos*ZwDLi}_g%Vmnn4vx*-htY9zHZ5j7k$61 z@96%Bt3casS%TQC4C}#riG!Jo8ArL((Xo^X^2H?*o$!Wy1*d=(vNX7#?~|Pj?($9b z6gw5XLe02%*<&^XW@(Yfu-IFqe6(=;&~#&N9?Z_YpC^~O=hhp9XW-ll(&$Wm;I(n% zSR`}4e6i`>Upd(lO=zpoFTV;WR5iu-

$*LYGw?t9b)0J&0U zIGE*+O@DN?Ct+B5`R02CV;Kj`>~jj1*E}wqKkq>Okt@hDP@CFP?k(C&8QrRj5e*OU z5U`7dljm4R44Y-KwNXvX-=S#rmr_~ZyGe%*y@E+ zm%tPKEXp7{H$QP{s=U60|8*4w9|S7JdSk=U$vwIu$gm7*C+jnPQ6~&)=K>pi8T#ixS_$T2+2uQ#u@ef>{*|QYZ2>#`?4eb z-L+~fYv*tu87qG2m8aKZUIwl$16J(sNS)8ZC0#kS9TXG%%OISU} z5n_s17Y_vzxvT6WORL2nl#)XI3Y&Lm+D3z}h8vbfiBcfOA`3ZgLL&3nSS^MH-!CmKh&4od z5Lx_sx$dIT^k{Q-56*u;Z#jh`BJWuTul8Dgz1%@gw6v4!F|{612M$x6nGkSa`M5e5Du4+P3JK0 zL6aDvpo|Pe?~&*3siyWppxOo(B3najoCx2UqY)op_3)u5*oc;6>2VcDZ*h?;0t`-? zs&fMvPHDXqdCxDbKXN2MzxW38VbAvg$|P_uFk7m?(*NY-U%e6R!A5bt>== zOa4fw*_WmoD@It*JIho-gAD0j7HV6IbuJ^JNro#3@Y5Iltw}ws5c;n)q&^v${f6EN zNlDK;NlJ_SyLId!HcJ_OyPNm!VdDbd#(S2E3ec(hP$!A5JsmLY951AYd&}71)tR)( z8XSgKF-gx2HlgYPT+cT@xR?BOJk$T*$V zsQe<9lEm_I7$G-sF8rWi(sD{D?fl)y^RDX1e;zDAClLm~jzx!wRasz2fz*{%<>mFM zcR9@(AX^Y8L<2U{DAjXm-Op4puBN7@1|lu4Xyu2uqu~3-@7lGJp-H}Vkm7`X8-ybf zT_&1;RwhP4vk=u?^tRwoK-G?BE&~mJ%a*C|aQ^+W5FVlO3@HdV8W=w&C zswCH`Q`46(Y7oFy6DEaUxRS3*JnBT1O_03PSRe*Gbx{g8ADa|Vnn z9CR7@nx@xL>%Y85p^N*Efhn8vn`OVh@wbdfpTWX9&J)(Mjsz2Mlp#2QFx)<1GUJ$J zlgBBrl<%$e6~c%e55-J1wQFjbsECVl?~eS%ONT|U5|^6rO?QPq0Pf> z&OaI^S}OPAK`Xd8mTmqi7buA?6m5hDb4|-Sd9sPG1Zxpbv zqN${-v}BagqEJcP&-eBH{qFnmxclRJTniuX_v>|z<2;VzBr4NyGtq{(!$K>;iJP~t zy8~Azo*sKsSYYYrT6^Q>G{vUf?J5#6!cv;>s4k=_cwvy`L;iJTHuun>mzp7gs)50wssKq)f$y z>Yb%kDD^-Ni19+FP*_OVY>4eG(p^gvD}3qLCD`NAFrWn=d%L9!tyzOn4LRy7Whrb6 zOAZmcD1dkoF=ut&TqzRuWHIDHf*r4c0eVzXHBT1NBtzK4cc-OiEr~Cj_J6qmS>GTc zxrbsw$yeukRhhngrWlkxeat~)-zHvZGx}4A$tVj1cUAP#JZCuWF+m_$yrwRkU3jX* z68`fsvZHh%8RhQLvH-W6Ehsm6U5#DU&!ai*M|EH?W>`2sQ36!&~y_#S0f) zP@DPsV*0E5xkW0^g?{JzuJDaHnmPlYfVd3G%Ki_OZCdE&;*WmZR z4Ly+9727|BgblI#AF@>XEwQ(l*kLq=S_dD8C#9tu*rf_t|LDL|>uC+Z)%TXEQ_bJP zCO6dp=!JFl3+BuLh5?>BWt(`SrF~Lh*I!|20`Gw8=ThmkGy3=T(ABXK^v#>K@JVr( zq99Y8J`EN&&lemZG9tpE!-E41i;V-An;U3lRbpbGHe}c^G^>BG;hcM8*d{_ImG;V& zLykOxOd}&LJ;ysPE6O*+@Nsgf5vPM?SDVuTl{lLXc8Xa)xB&$HrVV5?OnRgf#07MC?Ifu(a3Dc@sH_BVGP}hmVxN`vh^&h6br@_z7>*;>q2%^~=tnkX z(u9Qn^tDk1d^pe9f6%WM1QE%%AUeJ0#`Y8< zy3J4bv!_qn*x6CEPo6Ys$Si58fvk&pMzjqtcN8`Qqt=A5-`q4jMjxQ^7=OowBStDL)z?3UXxPpW&-Fw>jtbU^-zn~JZYwM z_zx|Ez82n7d%Qh0UQsVq+bwy=s*F;(lLw}aSa-5Tkuc@8apPfh@KM`zX54wYloe6OOthJ0Wo2@*9Ii0X*^@y75R@fKU*2j+Qlv)@;1PKKnh$t7 z*TyC)s%L(FzChkxdvBR820rY^J!N;S@xzB1_kU*^kD86&50?xQ45b@Y)4s1O<}F&( zQLG*afA3CdseOaiKbJ2z{qBG!409tQLZUjfZufDqNQdDaC>ij!fy_6nDCy&e4=df= zpvKVBKg3))*<+$&KBXcZ(w@!f6fdzfRFf)kM#Rt()lBsnqg3N?hu=S?L!tsRZa_ARh>kX3S0N9z%6-!A>F@f{$e)>e z4h(_Fj62T?L9MW(gVSd@SrMQtju@9cfgpb=G<&q89iZ~w`nbR-9$`$By zE6Zpp7ZLW;*8oLa=EzJsdn;e}Y)(!3+dj`o0)|?_`tMO zP2Vf;e=JzQ^Iz_%cePn?cE6-V>2d|y#j&yy`*PgPOg{vk9!gnzeEU~ntr-28u;7fq zMR8AfrdRi$+6_y|al?nZ(;jj3_;s0ARZ!HkKfe6x_nxi7zk>1z1Vrf@-G8)Whx3vf zoA14T{Res)J&jYlKBybjaMz#8E*Rf1`luS`sP6CNpZTVn(4rU0SZ`4ox|h8g35O5g z;FGXHt*lOPmO*NFLhU!?#uckpwGpJFi>iUFVM@HDQg3!AL2ZQ+Ra@&O6TS&YQGqA? zF?nh#a45OA@0B69%*cD;Uvb)usJy6BLdpDk$Kuz`k-p z(mnj|5nWePpxt+()`xKH$n5R-rosGP**`i>Kkv=OmC@DZm3(sj@)y0C*_jSF{DusS zRtzu!UR!79pAC(Ham!zZO!JRg7xqQiZuIZJ0T@cJ}q>_foSXdOy)PAkUUAR`sIfn7xB@(bU4sx)lVje0ghAw48)UuZ_j z>I`GcP@DFN4k|_SOor|yn8T_o0Bcn;EPCOYkrmSTKK!SBYNtX^&)@nM^KN9Mr~gDw zO=bZ39KG-=-}UbE=UXVt2*)VCg5&{sD2(O3zZ@#9!z?A*(`8$TtJ0B2Y;*d>1txHf z=pSwUc4$&*aX($hnQHOFfBvk@UeU|rYBwK5#~gS{-e%uhNN%p*yt#VSDj9|Sjp-&r6T`Y# z*&lPvNA=G_X;E3(g)7cly+731;Mc@wjm(NCKfvSJP`x8x_@z63d1t#AI?QZ!Uc7i6gz5877tWh! z+1|jB2y=ajl}T9TGiIh3^*}2n>H+I&7%gK6#F;ep?D~518|!}FSGN893BJLfpDiuP z+sSbG%d-J!!w0;igu~dzJf@ZaRH(Za*flDjl8pf|qaEr%7!O4~k!S={mabbjI-nF3 z@c6cm%FnYwJJ}|*TuE5DTV>RzQO?n0>-)TJQiXVwSVw7(86%U6-YNjO->-jY?f%2M z)02~zS*;5TuYPTE45KOGQq*5+Vt2hwTG_Cl|DZDdR$mXj@Dh2a`iRpyBj2Vx#I>7O zuUT`31{`W@WTlpTBp4frJMDVpXdFAJF?6M%kDB~Uo_dv>o3deu{5>icUqiL2x#nXI zs(JE|Z)B|_p;j~N%rytKWImGhM%gW>nFJL{iH=u<&N#6J>tRNa<`b6!k1bLxf z6D+PD9%7=V))UvmPHbKx0v9e_%CKp}%9YP6D~(hCmuLJbUT_X@X!m^I)Bl)U|7eW9 zY;WOekJAMdT`lA?Er+6F6DX{4dGdL(3Ijr%kemm#C1-TlKW7v^ZO5+*Om57>KjCh2TS`7`Wn4m2vdd3-NL4L0>^Yiqmtj!sDVc!o${wn}S{ zk<2~!QZGOrf-(Ff)``MGeNDCx?R4N!Q>sv$6aLUhS-h24gLKEest!Sib4P>K2a$8! z1Lw~E`&EPd<_=9i`d_$C_bPYl5j8q9b(6kg*U&c-IU~c|tnAID0cM2w>)s~~h#IoP z4K1+{VFPeX%pKje_->LgPZ=Cdnf-VE+|Nc1zlEg%$)n|>R;VtY5M`!ozQx!qZ9YpC zz$`N}J=h?4BOMd{*}H#yw{UlFboU z6%{QfHdG5F!pJyV3Y`}CgYZ{}%shboqp|R$Eq18c+EvwWox- z`%4SeG8O)g>xwK-Dli!x?s_>rng~FEI1(Q(=o86oYu1c>E7z^53!2AmRF&uspFP7Y z^5jYPRa)BG<{lfG%4UugG|;TQ($mA}9`gY{imz{9_9>94lm`xs%3UrW>4q#ZX~%cT zcc+kgutyfKbQ|^w`|KoHBuouT5Z`fXp3Z)ZM^N+Z`FJs`MPWA$1{B1hd$AO-YkxXm z>5gDm;gW|}hFrZH85=vuv5CTIJDwN&JGVT~*P$FRv|cUBZ#)a?i|(dJ0M-wAAlDI5X8W>JeJJL$<4!%mvspPz*0b_dY%#p0)RA zbs}ROy;} zJYEW6LpA&C`@FeVnF7ZRONvXKUD z_f4{I-(p?c?5(r57=}!fDXN=R?>1!r3+35UGiS5S8bjxkC&5(4xv#jsD1e3T=pdwD zeMXNw%;>Lc@^|Aon>esweNXB4eSAvq-}jRbtcKtQ=EnT%7ecJ0J2l1{eBwOa&sdcG zGUET|5R>{}uBA$2)|yc}!=w4p{}*H0Oy--Z-1o zhaJq4U)o#ok=lgqs(Z2%s+c{~qO*G%1cG@&?5`T~GP?<(cz&(8RM$MR@9`Xzrk;Cjj%|@JwSAvMRAL;Gl zFncu@O88ATZJKFim27Z8L#5@?*_YcfkU9BQ-TL0W6>;88tIETdZS4J5YX7R?vny_F z8QE+%QNQ}f@A}sX1D9z!FFC``Crb8&Q=J>$Tnj#!>+pM%bHS2arC%#?L*)P+0 zSMUOv!k1P9(3(+}!W0CXvUu@OQRxgq$WNa?$E?z`c>Ai^WKw_JM3HBz-Sr&H1AaEN^~9p{6S(ux5;aUwP%|^wk`j;^*&RKCorStqa^v3deqdH`cts zbSMDCsAU~B?ZF3MU`Ah^f7x9w%(cE=H`UnkL*-;cg{~A&nMaw7p_x(37l+jx*i(Ik z0x7m=`DpB_S{`>|Hu*3=Kjq}f6&caW942l8Q}C8?DlOftJAc*mB?njcVfVbFR3X4i z#3RhJ@q@{hU7Has2hc`}qi7!4=}z4NtlFz_5U{jKdqXev9jIJAKDZ>5y?he1jr0_9 z)%ouzXG^Oz%Cx-SeeR8!G_r6r0E8%C(F&$00v(bIEi;Ai@~yN+Sd7}e$FPo$)byEyW! z`;V7Y&t&oV_l47D0hxPk+O!v;-|2Ti&>@}obvKF{i$6^7`ZsxssP(Lrvg1$_QyYm) zp&Sspx6{w2L}k9Rx;;{@IAh7I=?rpQ5wLyy*!C;h_PzBl>@Q$KV~4SCef61-89NT_ zAG6~`ACH7-rwi3;b|03`Jd^&iG-EyGh@MoTjGCd^H53hSW{f% z!ou#d4aeD7W`FbNWLKwc?ar^HN?isfLiXYPQI;o+R)G%*!lQ|~1&9XoM)$(wz05LN zG!%Xe-xIuNw`=W$)Vc_VhK(K#2du*qP8fF@C$u{$Wqhbn>fW%N!7co_Bai0ZsFWAk zKLT#NzGyx8I5XL|*;3)lV`pH$UKan{HLBKjgH_um@8S6g=rD7 zrl91cY(db@Znp{Lm2{Fal9C*mTp;}#!}kIN1Fq9=X&wgX|D#SUp8#fn*+|b5g%UAV zI{O9FT}w;B!=!J6Wkp2=T_%`xTx2B7Q-+vl@u=4m@(CM5p$Gu&@w$ZlE5cZM=w9i8 zbC?+N*r;gZ;?yo@N39;`9$J0`$IUXMwR$Rr7g%B2a;|#P91NE$W6D>QeV}(oKkYt1U3`uL(8ElR?HCNe04Kmm z6>`RyQt_RiUKLjgYyJ6?QRCf`5{Z@FGo+^u5+AnThP)2@%zDr-noMjN6#n$8MSr0b z2FXJ9n|UQufn~!>4~=5fYh)yVl*F;}nw0PQQj7ON57u!Uo4`nOX0;Pii&Fxe2e!!C4oNMR6xJRSn+7@7D#U*bStVmb zuXWNj+$~tN5N;xaX*q`iy@-M@SFxyn*`0S7xo;rIim3+@?L}Kh*22 zIQf)zV5wijkfD38dp_4`IoH@$_@G;u&my(@4VjO-WvpI#TKcf$@S&mQ&y;3x!HcbR zhM)4(YCW%1hY7^7#6(bLhGj*|;d}-K{$@mip+hGh(EsOKANDPO^$J^?Q8-3?v?AVN zQth+T^zUj+QgNJTpPiIa7PGH4!7+y{EEC$I$(x0~#rT7&CCNB2K-dG%SZ(p*V`UfS z#IMc_KUdx0v|)nt3J*6xB;<3kv~U1YvazEq?g+kzz^2SRJt$!^J}z_KestS~;Nfi= zg(WAQwKdEhtyW0VGEIAsc6*`PlBmvK0>mEd_rSY|OaSn9Wby{wOk3#{8)SOLI1%Us zy9m_PXlaK4mfk4jUnwh-r&HerF!=@^VBp64HuHN42nV)$;=}=7Elx~bd=B{CgFN#f zYNw^`+BY9=&in-nD7c;O&D=k27h_c(!jZS&Gql}Ha zD9bn$tT|%%;v6soGW(!My_n2@2UQ1dNs)dho3^)5s^38H;thr{zHSZc6=|+qhmXVJ8gt?_J<|dl6F4|(jpZoBfOrh3;rF! zu&WqsZ^o9*U4brst9vxeii#LHe0jGbCAkaQo-x@Lk$2ltJ))LJ_B;_e?rE{OlbYNt zO}+AAvqNm0vO{`X?H_Y{R8gAG%S!bhn{s|u++FN)E${eG^;=sHR%|bQdwcsw0VI?g zmgBKxMtuJ=525K=vBC*9*TRKo($mA2B42`kI$eU{a>N9to-+~iT=QME^c5>{wr$^j zS_4B{%)S`qc2&=iN z%D+2)Zwi%NV*5->P>a(cFjvMexNy}as{WU2w^~_Awj9Llo*))@I-3rW`H?>d@S{T@J#%Tm%-eEX+>-I?53Pp4czmhJ9j=4i$N)P z!lOovV2b)u;91|^o;FqH?~=^ov{9TWW#ym|)2=h>QkYj+BMw`jo!lS$TY2QD~| zZg?>Bp875~zjJDtB`F27ot+KYRmq$R6m09Kf6wKpHxW-@j7qc0_h&bm002-z4&DiJ zros*nK1#oV19dj)-1s&yC?NOENxR)=yQMm=l$T8xmnhijpyWNir?r-%-!Prflo8L} zw#<`wcJi@ufReKE40CfY{i8yU%y@XkE7kX1&+rDJAO_!*D7_#fS>N}juc7b*LS%8S z72IIr8dj{erCP%u0ZGB>?=V z=<72JqylZ{3GxdWc)ib~bAKBB%v$%z;}1W-YMFlj`RB#|wMvtWyBgAy*j&4PP_f$DmT@v+DGi zG`Q_olPel!5RC?PJ(%>ciRieXoKPCmcNK0s$|Og~^IQy%hTnP7Y*UIuMM=jkbPv@$hKC z5-n=)cJbtZRAQ`VJdvo4j zbc3uPxb`qzcl>YDUW#BFsP$n3gSA*QCv@-`aQkCmbuLe|w4_AVr=OH0E&RdVyG<02 zmF;|?zIVLZZT;Q!3CxG5Nv|U4nd>~2opPYX(IXRhuOMShIqvOsO zpPpP7Pnb8o*!1wDX>>`9+X5L9)$RuqRn8iu;G;qCvbMGD07xf5-Knrmmo^;^ADQ1v zN9S~Q_H4KAiEny_?3UBAfWq@3x9e|~ZQNGo{emG5n|+5`t~fwoAm8l3bEAAf^CSD! zijz#SNnvw<#eWm4;L|RU$bWBiNPBcPkK!_JR_Cq?A%3*V$K*ur5firt%4+i8!s?0;BWn(r_2Qy zw}iFS&gJ}o@q9hRi2zh&hL~#eAh~kn)6ba=hs-NK!*FP{4|#RU=Wk}oyG1&L8OhRv z5hsfeINqBnsXimV#m=br;H2J`e0!lHkBnrYjlZGumMy~R!EbXyYT-%Yd^R;f9W;@k z@D;YjFn$J((p335ySvr%u`{1Y8hMH`ofF0HjOZ3)HGO!HzH&&)*#Ck|tPo{3&WlL3 zJT_qz>ljEIUx+b9mo@Ubnz>attuMO0qeSD&g+6d*Jl)*jF=OKt+i$FR{aYo=Ys$0c z**`07dHI=hf|r7{q$Ca>zZBKt!+o3-(|5Ly&rAKGv21`)UUIc@s`<={C2Dn{+^22I zNGq>TvfEj)c}(sAbvX+eOJRu}MbS-``2I}}7RLb_ZsPQ?1!l)`it}|RE|#{q7-wn} zDVy0MCBG?_C~|4Na_g)UKWFIu^p(8r9$7nQ3_vy1Y_J^3S{t!Ek%Ob-cS;myS2AxW z%?M?!MztR8QkwtGYYsx)=7OG&qPo>$R=s&mgEwZ-50JS99( zhp~F1b1!X_iI*xqGm{H`!H}GJiTIK`9v{n%&Up+RD*t&rW0>jw<1i$=vB?ujyqXb=jDjHby%x?lGv3hu){JU*BC^*(bV)L9MWBEtlae zJPepZgF|VhHP+fb}*!B5YC?ACVlFV@FP85G1K!8{A`gNbR*Ke#YRfGL` zQT=Mkqa)3FN5y_lIldmcox(J^zmN9NJ6Xi5;p6pyh#4TZf#B20YgdxBArZC zdW-Sus@RNsa$bQ8=FJb*mu`mn2Cy4tP;`&}WQich7b zwKnU+RFz1HiIGZlDN1{nESnr39KEpQ$Z2Fepc-&?-$_LbFGT zNZw7#KXdT84B26I^)n|WzERX{m|gee`tFnxDnMh;^5Lh>i8_VRgBg9p(3&^F;hh`i zzuXQLXNrNaFQrX?@QEpR<=ibQpFeAvu=((ip+mu=ks)!!{aNb^r%W)7GIeL(QII5= z+qb*zVRf-Fjms!NZs#}Y*%DF@v~4yzJge?|+TRG>?^DJ@q_mKD`Z-Qt^!$?BI?rA2^*1B{*A^i?qy%Hk z0_8gR(y|^yR6iBEzQYu3=KEHXyjCdVq{|1reteu|WOSQx#`mSPOSf|66&Lm1H}D}_ zNrs!&6AcTB2dutj<+VR~@;l#<_7fMHxfYWrOxWvn6ut>8x8ZEQj7|N{{0?y~UuE@c zsnKz}S6T56Y#k7mx|KV7OVE^hC`Z?<|c*0b35AyR}sq~H1#EUC<@~>}p$9M;jUh+i=*6A=}fi+V5*A~y5gBZVI z(V*wL2|HSzTEg;RnAb$Qvf6SM*balN_tmjVj8hqB^UsLDs^_(|wxZJP5tSONJV97; zL!#ZYX1}mC73BRIBTBscSZ_Y|{LZ*t6OUB+%_XW-5K*aVSh3yI{wb^Gt!Iz(G8+31 z9u!uvQ<7Q@-_G90{oGOO^-q4jUgoutt(h?E6BT4Bx8A^I##S%fPX=Y3kC#^yqg?R= z*W*A3u6duth&1Q#lAImZM#;8cA5v@xl|R)MY=bGA>t$zc_cN3^wq`)D z^XaUa2~84meaKrDN+j|!Ls#*eCtuS23OjYPjaH1B(SqcG*F}GS9vg23gP0;MGSa%T zZ&8&Oy$ospPtI!zt@q;|g3pu)cJs`~uH%fnapU;8b(@nbMm}xW*>I?Qz$G-O%!pI3 zsA{E5K4>$=LD^vIs7FQP3P%6QnAc-k?|VYN*GlS*UczG5EH={R#1|{WeTs^@4-=;8 zqq$iE&joaXoZrvb0YSGc?%eM;Ue`2OU%pWK$<2WI7wra1xQp6P+m4st7og+yWS>mZ-fsZYxf>JV3%~q`;fkrH3Al&uqSKme*xg#0G#}e546g6 z)WBcKCw=cSy<-m!Hfm{>)RnYQz|yT2sED`h^sc#kY?}Y!;u;*@a@1qzx2yH)7cM@} ztm}o2W2n`c)#gjI1|57|Q8C5p&BkxqewDfA?FUURrM?#WxRlEOZkf=*eqen&cWm-q?O~ro@#S>#nL3sS8drJ z|1Q&B!(hN;fl^qdyG~+7M`_DdT*~RYSb8%TozcGO{`n6cu;itE-qJYOm$Lo(ounB3 zr_Yz7Zh{*r7jx0o==;DJ+@&5-qgVpLLYt?vXNrYI!j{N6b0%892k;z#An=pp>-Xu* zTea3q%nrg*ELHDhQhQM05E`1>*LolBU$!=k`me3E^)D|J%W2rhc;h2^!wEiP9pe%d zB4;r!6C!{0*|cXC3)+2Er0mx7kF;ALP>Y6I{%I=gOXtu1(%gG;oE0dUw|Dl1%?q!2 zX<(`M$trY&j!tiB>Ct1x5V_Saq9(r5%}zn~-5oc251NwL$jFqfT@o5-&tW^?%O4;5 zgkLV8M)V66_gE>l$3myY)cMIl&+*qsigdu7a1d51+rC`J7@9O2U7Fd^@qYPx zvxpq2o4TN-5f(?oKPp)!|E~sr;$z7X$4^{MZU;4GD!fO_MVP@a_#L&dU$*_TW%O%_H;nw_ z-`wjGprwbj*{O+c>$sYzl*vY-s!EqbUOel+Eq)- zUah0qreH2HNZ(6joYplZ`pw!C*RCCUw2EcjxJ$IXbW!qtltV9*b8z?mMuS1Mbc;-1 zxBL3ie>`YB1Vfek2E>1`YFsZV)hKK@$_IE?6oA4?PCRhoi^8!bk*fyS7Hc0~HS?vp zwa$az6a6+FzCZn_H8f%XizVL{I5rgNWo|y)YG>3I=X%Num=KjB9F5@{2c#?k9yLIS z3`6Mi=e3EdUV|Gxf1WsQT#8I@UF=~cG?;vLIL{pzWS47Ow`lW3QPS1_N@ZPnagC(o zp(n>GrYb5T>pvM7=+o6CWs?dKYaIayuFl}svS z<(7p62`8)gz{I{*yW#|p&0P;P934Wxm&e>d%EbbXkB#m&^ikiGOP4V0dBXDPwXFqp zKfE8QoVeYPbiP<-*W(Y*IwJh?y#^*C7E3&I2nY^}@tnEEx4dh@NdCE;4cn&zrghh_I<2EF$)juH?Mn8Ju@{>QgdP3t}2ZIkhAW_t8ZAn zdfx2W`Ikor9fh)0Smnh6=}d!fQlsyDxuY;}Ag8oSZ6Sqx^^FnQs?^NXS?fX_R;?P^ z8bpQ3`)c{;7z_mcUi)5kM-rnInnPWGYPF8(qXe86y_jGP7RICM7=BQzYYeHPB9E?A%!(xT|+u z#x#f@nwOV+JZ(f_-LhhF}rQOcA$6*Da zQ@rh!wv|%;NCq(7S>lSziwdOA1T{T*Kki5xX{ld*)5p8O!D63&3;1;FV-Ej*ZuIUVKo$1 zoBcLTy+3`Yb-^F$XB(_%e7rZ=e|CI9$^HT;Wz)7a?xSD8VX)rVb3UU%1~j)lsbDxVXkFNz0`=RwH%V z_w0BU=+h%C?r}n!Y0uG2rKtzAdFTMA;RiPDRzGgIjg6hW33u-YDq-S|kG^ByXM0Gu zn+AI}N8B4dqin^5sppTMN=m7SDXoKaqQ>gciNtjlY7c1oFSBYn0nAYJy!Sogzn;&^BMUvdXmWCwNy1n)DiIzYwe+btz;r%`)28XiE3Hy2#HZ|kw4 z4fV)+SEnJhS?m|0vE5w*62m6K*K}ITt^_$BG?}6QJFLr7?jt>lc+AqPYZIOCswgX? z1`9L%QeV$f>B+LD09Jcq2P7lB)2L;GjlVa;1qlsfW8?2Opo+Xml2_(`$wLsjoPO4v z*3m9Oc}%Z>fq?w0Cmp8Y;uO}HWUzuHH8s>@p^}C9BTa4`EPfcYTj-8~l9>DrjM`;_ zOc3c@%uh{W*9SQ8fw4=cd6s98FE}1&wXndQ>*!d9sgi?(tfJy`6h9D#`82_|J%*_7 zp6HI|4p3RRZ3%5u^^0%&ZK3ohF0qMg1`a3?6q#>imzqjguX#y4K;LE$!5h{l?qz~} zmBtC~HVR0_D(UGgajfD_f?ZQ|)Q*x!0bT5`pfEU;;ZR>yeO-PQ(mT-Cb<7O-Iyi`M ztOX4LLo#M@W4*6deWEO7?ZPWc3^H}-x|2dQ+CjTPQ|Z1$0*S@O>y()bZ~I;0%nt$+$SXZ|g%erqiUjyVnC2Ky^g< z=X_*&nHzmJ+}K5ElR-doe}6o}aa=l7rCCY|R^>^H*aH8nx^&^lsDHuHU_BZmXDGlf zKpxE1piS}Bx8T@bt6-pr3htKY^V481x%3hoa)5I21_r`F9qJN8NQT^(FJ0;*DVd^%^=bpi3!O#m44{I^ zL;@3+f5X zMG#8fZ&Ubthu>ncvon__3405qO}E35!vWII7(H%~vhr_+0E;@NFsflo6v!j!xt6r{ zPa8)MNqU)=7A`Yvk4+U8To)vXjtL#jyOSGbM+8fhC+Yk>;9UP{PJ;Tbwa-uL*IHH? zl8)hK1`_2VaCOinJ*lt^nMM`B%ViPnLWTppxp>tnPVwnhmUo{V?ClsCrS?H} z`kNQ!_C7rnr%ny;+$FVu2hZRbZ0W1;&gx1zFh@P2MNgRvO&K@N8;(WthW!Xu#TAj+Novo_{fl_ z9lxJk|FV5wbW7_)xMlT=v@AB(o-5WH@AKed?`J8~c0{&}oxW&hLP_Kza|uqCKy)P# z@=N!XlKSlZLQzyd4~!4eBsjrUB*?rHL8NuSaHXCv0z|>yWUKc}6^E|vXegk_N3B8J zz8g49QIU$KmHW#W^4fs#84|OO>Nj%?LF8CSF0i|pp9*qb3MtB%`1bSY0I2{4%E-KI zfe{2BC!r@UW)LA3UGbbleu7+yCXSUG@N3mKZp|DM?67!o>?fz{h+b_EfoX~_*1`W0 z<~>kX1f~&%3wRNFAXdEh8T&YOLkH7cK%sdK4nZ$|{pA1(^W)OMP88sayzdg~aFm^! zw}6=!B;)`-By9xKduIoSh`6{MRA;OyZyGow)WOc~G25a5sCQ-kWMH$qadS)Vv~B`3 z43Lm<3YdcTIzJ(g5Ilq_6Q>N|5$T5|6VOwmmn-$i`E{;b4GumXF#9x16DJfSGp(&H zPX2(}g2lF{axPNvp$Ua@{du|swF~(OSP1-^zb{kUZHUi15C_xW^iV6|cje)bbP&*? z2a5brKkl|J7KQYHS!ms_Ufp^pLJl#v#^S}8Xz+Am)v;;Y=IguL=Plw#SX;FXBogm} z?TFss^MS*jJ}vCp$=o1!RazsJbyffE`Ge-GaB;+>zh{y?gguki_r5io4z)ji6eBNn z^`*0ClQ(gCcVkErEG-bzU%Y)gy2F(>yPj3)0V)ZIU(|`FM(b6FSLrF$!f|1a)lON* zhY=P#Q(XR;^w|aGrL1fdgGf9}+gZK7@Sl=tb87xC7hrwHHDSU;LROpnfExB1${Fw< zP74A=^m!M4y02RY{QvmjLnNhnMX{L+=T~yH7@KJJ%^~sy>)zQr%$~{7bpIj-L4$pC zu&Q8UeEs@1$iSO zmJvO#hDHikfA(zeH`lxwM)Z%fwCX&{(kZjIK2DHFctIdsOucx(J)tan9Mt+<5Xn%diOiKy`21BOz ze-tIy-}iVVU86jV9bB~e4BaT4SyRI26Z>sV1r0kRJqON;03-(2M10!smomg}qV+}< zSH*GbUQHbTcW_Lt5UwBB{TMQ`SAu**6i<%)HfGLDotDszShsb|dg9Ly!T6)`Q3|8yv z!rjulY5#nnG_~%;$!@xP<|I1X1Pz`pTb3}cC2gnkiJhm8npNkX3b-*cavbUOQ3;sXsgnrMU+Ga>AZ!f001^C*@#hk&I%|03)09CNG1$Q0W$24 z{rT3aWa%~`MqC&nAJz93TPiWg#~A?vi45|>{{;6mFTFdyx=qU4*Jze!Dd@%_*@2RB zJ3^htXeY&;sxVqI?U7!A)SrX~VwNzpAb*3_2r?JVXX{qZAfZxQwS9eyt?ez>!Z2{T zH)UlPyX=N7q>w)O`$tOGn*D=?snf))1iRXGgWqn9PYStwSF7Rw6w+24zsmuKSNySIgk5Q7pHFME6kB3rKtv*b4&|Hvr2*#L!p93 zP<)k2gZcpn;3P0v2FQ9xg!o=%UdGHBjQMbT6f2S#!1OrG1c*3n%;O~w_US(=x&PqZ zjq(!I=z_i{FPaKd!*$`bq%H=N%me5@`Cbq+p+B`2`~nGtcrrjyQJ|v^oOB;{*P~T4 zb(i=akH)>OivYqKWEKV=f@0 zRVV?V`=&;KPd*qcZLX!Jx-lx}tNds%+?K5AB?_8e1h`~oYD#GiEpr-E7;a>%zU$zT z2*3s00uc?yqJ$W97z0kd>t>>6HFKsxK!?8C9JJh9`DmI0o_7m5z(W^)%GAvgBkxj; zk&yy3YT3MFqHZ`IYL_3q077b@u9^2*?cA)`tHBWi_ja5+SC8Tc(&8;DW!>>TM88u| z3zB1f_f3VNLWlGi{OFC_6p`*s_U;nlt=(3@2-fwH5^WT_@{b>FGyU-ta$o&92_x-F zSq6dj+zx(KoV1}h36xmW-bQ2QcNZhN8Ioa0^`@X;`TNn`L?$13A4@6eNH8>K92p80q+R{AR?ak-I+X$HKvmTp z9A&5l6t}`x78c@tDt&7l8;1MQjLgeg!pVl#0=a7O^^G5xCGC3gNlL^BoYsKjf|Cyj z=@M)LC;`IfE}qYxHO^lkixa@XP=Aw1c~rWSPK&vT&5a=*(`8UfEMS2)G;z*Y?-UD> zQX~D_qTB$%uSjt8u(5#|^af^Ih}JWD3)rFYO5>KZZU(IF2#OT9Ki%@Y(hx{;kpvVy zE^e-q(?UKUOj9N0G_jz|yGtzoqsNs(E$cseLF$<^4=yhjiAs)hjovKeE>ykqq{T2> z`R96Y==$%{r>xpilI}QKV}~T_DtlFb;U5)T}%a0N{(u zi+W=lbvGmuOqQzyde}wW0}~N?B>n}jn1t-oQ(R;gN4zU3DQRkI;@^^eAJci@;4$;S zD3;NzdNpw$4~--DA9(1jrF9ab6z)tfUcXl9?C!2M!TQjb{9rZF7sT%mW2eP)n2vtK2AP;r{!> zf-*7^R;V&Xn7Zq(V=0{fuF|@6QZfJ<%k_Mlf=cVKQ5=Ok3dfj=#CBmXQCYB-im%NQ zz;dXQmU=j-E&J77WOj`C`J07ra|3g4-el8=%FnrG?%U6Hv$#>KVhYI(=wC}y69oap z1&cV<$mh9~>lt7k8<(9^S5(|}B>?Fz_a#2QlN7qC_R%fJwCEA;|)$V(vMp!fu@D#pI49LC+!REb@rJ= z>$$FQ}%yHN>XR{eOiy5`$|r+~dEx;b#zEq!Zfz+v*?dUQ$ zWvyGm+U33Yqf+1Q`j5Yl=_JH%3MJS5?rYcb<(egrALlSJi+lyqqh^#U2NinDsdL>$ zCcT-huvNud_4vcHk2oi&VA(w2@kx}N)#Q7N$vk5zGAuI@8rIz5U_a}WDTxm&*a1UixxvVhfhD&4gADrXREzT7ry`Gq zF2&p&0G37S4?Yk|%N{)Fv-&B@yz%P>FJ_X?3jizuQXq{}Kh}c)QYYawzyLU?kC^B# z%rn3hn);~NSUOn`YCFK`=FmI%e}vsPclP%;N-P*Vd^mLIV|wcb_BVw7Fi2{RW;aou z!hiuQ!ut&v0J9RmaU*cVJ$jQRoT7HI5Q$$ ztx@@p;3oo#S^Z|Y>T(L+Rj*8zndEo7&xHmHA9HXG_A~zNn+^5pGbjaDzA_Q*hL_~& zCH|SlnTZT<$9v<(0xc(A1UdhdbZU?D^I|Pe!A8K%Pv{V|p5PeFn?E`S^#iIwOi=#^6(Qz`5(?VOYoB%o!-Xz}1uCC0FsSWtOURJT-F;9NANXZKP zKjZfuILl-N{ZlnnRpZJB%tJbar^F99T$}~&DSS+$ zfoK>}n2>&JY`n zSKE$t+T_Ru71vblvrrvUdhZ@HO=mVHd+C>NR?Uc&6x}{`=FBGmP3`B*4nOcWI9%|- zQAtB@0N4Irdyl|smbUM_RQ{zJkdJGYBe07XcAV_v#)W#y}~;8WMG>2o4sS-W&94j7;nbt~Wd3@^{LQ(a^= zbIu&a&iSsHdvW7>_3AYBfo1_)GHEt?ubnE{635%8gA&etT`!!YaAn5}H%?`8+$aU& z=D_obpG1Qfav`jb&mtPw*~xTHuSys(HxMs#pHijLjGUAuYGH`(p0KJQioPX^1Oah&$z zSe0IB-lF6I+{gD~(uWWfFii-3k}>;alfBf>xfgcZf0T0g9xLME@fzqLFusNL>q{L6 zh%9Rg_)ezKQy8q(OJrV9(+tHX;rQ{mz-fSUJU)s_3kd-dzHwut{!yM2C!0HT|8<8? zJi93SkC7o2?3g7+qhvh?EqXd<{?EvPKX2_%S>ow$8J? zZP@o!M9-!(a><05Nt5Qdyt`1b_H<$Xv|jRY-9*m-=rAXLztt;Dz4MFgu?c8mUVf0S z@9VWAX1IXS^G!5EQBEr#*rTCS>m>Hw77iAd@%dsr!2W&l;^e%1)z6nTWQNM0&5k^? zt?je2q=w@h!3hBKn%dFxBt_cKuzzuMG~FL9Jf70h;^-%|8_q54~#xSUOHmUKuAInKZ|DH8L9XCkZ|kJWXn)Q2bg4Zk_h4pV}nPedHND;LSCI zT&<}NsGj+hgdEZ$PC8fJ~%Q8Me-L`y}S%3lwK|(|C$gIn;_?zJ}NlZ+(_{W)qCiohpr6tJm z!}=e5FhhblJ_|Ht4>B+tZdkB0Ms=BQVNG*y6su8D7ar&5Mgx9^Vmg6C<#YGm7kW!9 z+f927v;M5oxL+2ykqN&OTgh`~QYZ0YSZI>s)kC*~-76>KY_ntd{$^QjQol6-ep!~w zQH%H38?MBq;sug1wPli!7@Vnrc(E)cLQ6z5?(5atRBm@K*qrm7o!K9b)ob{N<^1Z_ z){rpfIXg2FFYxv?v|%ZSP`MwG5)~#vCAigppZH|+P94kGfxG^UDhci$W<)M>(ZNgU zeUb$an1@X^HZ3VA*jejLX~7lx7y13wduq#i&io#$%ZH+BcasVlv<8>uobB8E?;0b>pL288CR|- zYH(n8}CO)a^m*PjsF z2v2PFy6VKQ%~!N_V2`Z*uk{6j2)kyox-03>%E^VZcO&%nEPgLi@a$GMTdOxRXY=ZZ z@jo|ZMCoIYzV?=zs)idr*x}C3;VV|I?D64Pa0*Rt4`o}qOq2&!>$i}IsyBH}ynpv@ zPd7u|Lmq3_YG0~xi-_9b!59&--z}TadW#Oxs{6tDeQ_w3v5GAI*z9`^Qmmx%UYL^a zGbg!7Kes1Y%BNTj)wlk}cd#{WzEjGcEYpwubZcp%wMSW0Hz5$^Cv$bLNhd_REvaO6 zQLUy(N}i_6+IYr|(YJOeXryLk*%u5<{RbF*rwL!1asK?!eCfuPabr}2WXoQ>7&bD( zDx$lNsdUet8@|U{St-iyI5Yd1LQ{b^-iFxaEZzV3 z5i4KqFo||XDV=z~V6c9|`Q2iZbU6AmR;?>ps#+Pe)F@8^7&d&4^*+e0XOojf z-vW0ov0mvpuSfKUo>R<5{k9PEbyPL@mw5Ai9-*>on|J7LS_2bDILMLtBWd*|yGbO! zAll8%Wu4YVC`fAab0Rs%oC9*5lHZERGzUi5+TQjNF-NPeaC!4T)?b@Nk zl_uXe%9>Jmi8bi^y4i$kZvNNgo4co{rkt;;dOxd+8vO*39{=5~If#2NS#TsVak`6? zD9q@Z+Jwv+6VptN51Gxaz>c+`zLJJF?z~cpc0-3@HLQnUH<|Ujt&cV~dV0}m$J@fjZ`df7X<#a?{JxlH=RD`( z^6ZS6gEBUa1}tMUVnaiIlZ{&R(d~*d_bqjNDY2klrtrjGs6$^j&XJ zJ-M0}lVNv6%J~c*_xbP`Mk(H}Tr_b`FMRL(j<>9#F(w(RNCl%5kyc6!!dsO^_<>2CQxhc~Xg zpoD)Q?(eZyO$e;|sBBj<<9n6gc(9;cILNX+i%ZB^p(K9dm5AG%}kS?z^V zYCJQL!VCL0ke9~DcPR$2dd|w4pGhT7h}JvyVe+sgI8PRvnBc>+gVmj8_K}Dg zCTGg`zw=yH{5T3}p8drNf$D8_e-$qU6@>8T<;#*kOD~*}^RT9QP_ z$f`7rhLVs`ltd|-B<-QlQbJ2n87(3tqMbG&r7{{sQ$tA^ZQczM-}}pXo!9mG{qcL> zuG@L*I?uel#`AeRkK=JX9*<){-IP-+`Fr$h=H6LrNF7=V*a;;XiN|WW)rP&BT|7Ke z&YW?488)I=>e6Bfk6eJfWI4OzjJE0}-rhLA^3&`$1}iXC6+S5`w;xGM zN_Mz)OE3TJdNa*o30*thU(>l}?5BNiKFWv^6Xa{orAc%#jvaVDFG~JNuEqh*j2?vo zcspCeQhJi=yuhJ01l(KRq~)0}?ou z)yG!FnB`mY2u;~H{RR+#{|T%-WcC5Ch^#p7(oyb)4~H2p{KfB{fvQl!5rA;|raI?8 z@!WxkD8tMXir;H{}1Fo&As_|5HRck-;7qh}1* ztQB#--ec=Q_Dq8da9W^Z)p^CG9}RcTRfvl6X;7mxvu{y9bd zrRbJKp~+v#m~FS`%{G(0vtHY6?d0nD-?b!K>(j#{Hm+3Pm3Swzo7L}rRvk9X*(yKm z5GHm<)6#ax-^_{jANtlcEh4Yemf6u)O*~Wk)GPVyc02yAHA-Lfb7u$bx#RjZ&bzUt zcW`bO&H0gzofnR99yQ49iKIqXS+#Q$ZhVpM8gzSfpD@R7Unf+FJ1U(xq3gfLEH}8L z_vab9wr%O$_0#R?`k&V9n0MpC)0+MB#&mdV*xRZ@NY?@>^;^mQ zeOGz>3cGD|>5=>oqfPz4LBXG3ZY+}RxKgPJ@2KFau5~IhAum?S?%Dd<|5Mqj`d+Kg zNM{#)y3u88K|3Xu6u8w^?viT!K1S(n~ND`pm^a#DX~>)_pK|PH~1{+e|c;7xhofL{X2Zs zgDRsnA8v*dVPX1Jt%;XWSpaA?PecEd41!kMJ(E{G>2UZ>*VS|OE*crRB;u;_+u;6V ze5@v&y_9sWvUlEOgd#pbwr zGD^K@k;06n<8CFlOPg}XYizW>S7iE>j%6lMVbjJ(zllh!+IDSrROVj77^b<3fzX*O5J+z-KknLa!)(@?LbLeQ@5t;CN=f0w6D>-q*WB%OLt_Z z_Q*u7G0!?))9CotHPYTna>Cnfu5O<0dfGiDMWTszW0Z9kwChptId06I13CenyIJVP z>zno1-#=t~=*8)yhECbMqkH!>qp_<;Jd3?isU(WfmR!1K+q+?IpCbz7)>|I7RC26S za_oG-%(i@Kf^OEF%~eL7yPi+e2^pHF+vn49o2FiM3a0kGQdB65VN22~Swp)IIWtoe zu%{XX$%|QLaVpH%2%0f#%vGI!!$ypiiB&i-XUDyO8xg~GBIS+t#x3t>I9uEJ&$xA0 z$Hy5??K0VS*N!3i@21twl@Qeodo&~HW;esP(Pj!MGRKpQEG!b*JR;uiKdFE{dT%*{ zxH8==hl6!SJ8Tr)X7wFh=XiPwp|A15sIN4aQ6SYrcoZnlr;MaTA{CWekJonFb6%m_ zfLT9+#Is!H4vbjSFK)b}txunbD!XGaW{EYF-tnbMsyXuUMcTYgrPxZz6OY!bXC@33 z74)>rkzi5C^n-~(M}e`4eag0rM27dK!zHGtj$k?OqP9pR>uWk{6bGEZ$@3~yIJUf& zKdtM=6N1nTW9RtElQbs=V;2QA*ymcIGG8hZ)#pL*JveCQPnc@Z6m~40*%Zl8K=^tf z{-`jRoAL-oBGep6IsARuHH2E)Ik+uS1|N~{X}BwR2BT1fPtdHg^78&inoRiPDa51z z$RwN&YNg?nq&WBdfTaKkf=pc3Y7u>Lj!rC;Y3y=NIjD7L`;HO=tj76@CfFH3T}$wf2InLrk=#~13)(#LJdX5TNek3O2RGcyR?0u zNb4zG|F-`pB@#d>RW-FC%L^6%{(=@>Y4*0Edrg3uc|9ul&@MmjHhKMwo;F|_jcDyMdgLPc zLP)>M7cPV&{dG+CP>-uR?lWlIwSC$JRjIM8WEdYn%5(bk{fpZ77_UhKZJS9#W?*Xz z0#f?mG^FOzlm7Totw)b1uU|j7$k{A>N(EaFe60i-liU#RG{$zfmyw|^{9JL}qJRG) z8ZIwhKzNfaJtbT|H``yZi-AFoTSD|@X;G7OnYWn><&LVJ* zz5UL`zR{R;Ekq+g(;6znm<~*5D7bM$+tBbF>B{0VelF;SWJn$s>wG3TIxZ;hA&M9s zv5~zidu9TB;w20sG-UpB*=CWsmCvzZC$J59L^n*4zJT@C9AovvH zJTNvhXZ*vz3%Mm6stgy2V2SxI6=Zo1u|w=`%=bpi0hyDzT}t%2vE6_a)BR@>@jSwp z))$nK>5NOko@>bR&Y}X9f8r4rRMi%mJUoJmO#GO%^Xtct>~-I{p7#)hMyNjkFn}b2 zPz)YY@MCTQ_m+CC9W$$hmvI*G7padsvv6Vn=XKj$$9r?ctP zpJUaNos|U_W)#dR=c8kp7BF()K(etdi`CjLDPk18KKjZi8KEc1o*YqQ{WFPveKcGN zL=4~NxPV$gG8B6^)#5AUmYF@a|UHBI@-q5+$o5c$( z_wBOFN2ohmT3XVLpEKvRt9!r?0vnDKcvuZ~JNJeLEFtJWu{if!N?KY0h5fT>L{AMBcG^ybOUEOe`e)p+!>FKx>e`FRPae_|bc6kW)VbD2_N9J7&ZhJnm1L(^k zS6Gd;1{%KMvG*^I%xecU1gQ*b3ParJ^WJWRfeg$K`w3IFx#dIoNsl0ge0^t-2Nn}f z2p5H=BlW+fkAS{QC_I2af9PLV_8dvy^WVo4 z5pwrEkp^@Ido*9@PCOpjRamxl>`2Amg#4YUG>exmy+)kk{Qc*h8OjUhdGwax8+w+@ ziy|cc7itVfG~(8Sj<&X6aV5 z7HKR9Oa$S%m(ani42pAVbm*?bNX&;EG4v z^GWQ5G0rI3yCed*`U?T4sndT0PJL$=9u6ZHeFTdPp)^-BZbqv8gAPONq}%!Vf0~*g zU|~)LG3!mwkz}s_y&ObZONjU6`<#W_6}C%lA>=S*0KpVnlAg|!v+mxD7puz4L-oJk zwstH7-+uo5_iW(bk6XH$2hC0nJ|g#jE=BmGp&?Huck6voo@Y7~DIB_>n-Ti>MGdTF zGJZsYl1~drcjlFkZCg1q1{~Pvb_A2yJ$ta1nqRJ-!5(|Z-zOh|goNfG0k#T=54^6J z%boeL8fCd&=F1i>y2@ZaU;rVh*zkW=4WYT4a5*Kd;Cat{L&tpkFa>oB{wu%Xz<0C4 z>e$fG5Z(?mOgWtA!PF5xGRlRNzyZJ#99EDH% zoQH<@;OqT{#7jc)Z+`QCFC|e(b)=*Y-rZJYvPA-i7q~Ip7PWR9EYUl67Ql5PQRcPZ z9Ofg?jfBq!V5=EKNRCAyUYgC(!`oZJB!WK+awGWB3>yX)GiCq&sW>0931Wr|2~~tD zzzFdM)x%xz0?x{s-v9p#*z$esnAcm7W)RZN_@5DNnJvl0w)ejmBywfT#WwNNm^<-2-+ZRWs%(w zf*fAZf|vMT2$cRO%kp?}d`uWP+Ggp_j11+Z!(U3U1Lg^<4p~5t`#+~acJ|@_oCYdd ztn}9#8}7rjtjJu=F#5>FzM_ZE+GLm{jJ#KZ&1O-zXif2e5;jXY7UU1<#?+F*2-%z1}+scj!hgi-jrX>vcfNY*Or%bJF*kyw9UesfzAjCogebu|jIhILtxBGHk- z<0i?Y&1JY;i|-bRjNv^7K$(RLvnw>zXVt2?{;duFE3m|6bfiL>(KrTb?be`WFuawM+5Bz3#Q+s`1y{{YZ{An0jQijm0U+B#TCIL)bpN%%g%1Sb!xZ=XXa3l>F zw0I%&!$2B?Or#{oq)6<7R+Kz>BCxYhx}HVcgV4rEL(Z%#s!iT9AO~bhGjsDz&(6)I zw1GXQuyboI6;k3-#5+_#G{2!BM(>eMTJ0%TqEhYC# zy+OezX?BdN5Lr&5G5VAbR5nxj8b_)l2Gt|c);1Q?^;vS~* zqWMRD3#p$-W^mk^P9lGagkt#Csb?8(N>auFf7UI1^NqAbHJ~Ouu4&1`>{|Cv$-$|^ zaC+?xRkQ>Q8TU*<5oM<*sMx52NDoxi)$jR@DA_`v4T}=k2+C?|@KV)PR7Nm;52Xk+ zR%11lCQ9$n>27Yg9(oha5#i4@?IB7@{P(&Pc|ZpG1_qdj=^jKm@}|1_7iOg#bw+AR z;&zznu_zGua86QjOi{dqYd<-tf4F+viWY??>F=~TPZ>h4Ka$QgnjD7^sqEmqw-LV1 zDaVeHkqtL9d->qO3WUL&ExT@Q*(4zfG8U$5LntTTN9MJ!)=gv@^6z32)^>JcR#x2W z#y(nF9$*g0DLY@#9y5st(vepbCTLHdQ?`>S3ih9DjlH9NsNwJX1@WAu%1LenuW=+T z^T}Eq@+GtHlib9G)sY5U_*CtH88T9NszFGwa4AunP~Tw^#ibZ)bg-=ccWcp4*-P81 z%voe#U45pJ((4yS>b=WGNVWL18K}fT6%lWkB8Y5msSMjor@zhF3e} z6R-`>2?l7Sl)Luqi8t}qEK3*76T0LXX*?D|eaqA}SWd#rGBNq(Nai3C=@&9P6EPDK zLrm7@nf3^B{P$&@p{V)9!x=rX>EaYJfO|4h4iD*mb?BhgH(~t5iL);*ddNg2!Es~7 z3df%XiD?_%SKl-l!-XIY3)j;>&#opP<7XziE+} zEM}jwipo`95=PTKl$4A#ItS_eyRSN@9+nK6lglG1bCiID#T`J%<|KkIE}TRL2HsR} z;5T03^r1|T%_H*FBnU@tvH71*=z3={C8I1oXvuf6m|ziyPVJj9s*84IdKg4z1>jOGYUns&-< zF1<|^6%{3=sq{D6md4+FTx+h)&D-AETx~%RUSQh8$(alXZx(0xI-*c0@uX>&ID11(}K3nhKH_lW6_lXK~v^R8sbPqq(i~vql+3EPbXUN`;clL4d-N zg8+T4OjpfM@88@0ESPI*$=k^JjEE}`)x3c7u%3?1lbD)%F`t?=_ z!2&1y=1qpXA^iNvcR{W?_D+&mZ~j+21!^un13PSVxb_Gso&g`jmBUO-=2feSYMj{z zinao8UZaE9h?EF?iXBV(A#5Lp1QWbDXRw92;{T(LFdWyy&s(a(V4s6X24(u?^$iP- z%?mbXkgd6~@%hBWH~2!KDtE@wNViG3vvkoy~lDrUw9WSv30sJv*DUpgBG;AJE8J=uaXi9mS zTAlxWIKoNb-xEW)k*SH-kfjsv(My4u1yFd>=_3In29voxXBhkQ1s%DC9h|d>josbR zB=J!0_zf%xB&yC|xKOxSW8=~%Pu3tuhoSF~FD)_*Vr^^!ZiRS7MZz!tX7PVwS@+9F zMM0WV=giqtsaCUwvm7B4B`M?)Sn|j8mtbE(NFmqbpa}f^O}k726vuGj@ZoEqi=x0~ zGOTi0x}^o9bre-qIZ*`Hb$1S5vX!b?E;^ArgjA);0wZ{PRMrr$Cz0ho{$bEF?g27i9&Wun z??TOy&$bEh<1rLx_Yy2D$awJipKscurWiD6+UXhlc_0xH9++2nzcxd9i6)cQx?j^3 zA-raFfOsw_pPr?^1LPtA1hwXIusY=Pz_s#`w{F6G0E*f%DT$y>Cv$Semx=FBC z9_aRs+~&e_w@tFyeF^vox`ACSV(-s6=W&AtP6}P*=OhySWgJf#G$JxW?ua!Vlyt7) zpS|u@bt;!YBl1^t93`EN85r;aN&>8}Xi76YZ3J7*&=7+kGmu>jEVSeeH2{e$C*g)2 zhkpeEHs|$H*~K^-k^vj=9-ndZ+{^FhtW~{AM?ba=F|Nb99K7^Frev_Cmb^!`nnxVopd{cVxCzCA@w@TVDj9*fxmhc85n5`tZ}*hCW%Wx zi!vzuU?(qooz;jBIzxzgf25;crh_@P-?2czoe&Q8E(;d@n+5b2QnpFDw@p|FE)mRS zfF8_Y1GeJ|1)B2mZ{EL0pxckO{INWVh&(n0y%YdA zVTd_I9f(n|R4<{rF&@kh8Qjdx<#dqxZziHr^Y7ie`1%gn#$jAzPpAiJ$55=+Qe+B2 z5J<+-(Ok&(fgZs41$gh?yLOkiXyp z1KNPpfBYyn_?HWSBP3AIYoaL)Dr)!Wn3((j#CwM0wLPGge9i<++-it!cqYedh_xX& z5ZCa0dkq|oW0^215DN$45Uk0g4kEiCBk)=Fu9K|Qe@1N*$XUY%Vj2Ln&Cas2RK4ij<_Rf*s1B|+hc!krM5Z`c2xV_YeuS~-BsGWu z_h^ou(dNJM_5FL{^_-u`t@-VQLryIK?wp18ljAb@zYhj7%c_i7Q0o1(L?zzL$>W3@ zq9L8zfD&SMTY@qRbeoj|bL`5p=bo%nz(Kb0-o1H3qGV`EL}5M?X+``<-?7l6YgdZm z6EZXXS!`&xmMl3zpJAqVw(SGksjx(a0x@70Mi5*tLPnYpcl`z$TK-%8q>)E|a*z{^ z4lS-GL#XK`)1{|SfFjDKhei=a3W>*XA;%yhDGCK>mCv3Nnax;L!b z7`gU|8dEUpbf}2;^5)#R zL53nx+71pFxbaPOEO1uQM*OFmdXPD?7>9K0iNadY;H_`y6MG*TsYkk7AYdmj`VZ1q zMjvQ~=f7RaixjlWe$k?mkU`VW<8ULe*f`voK>9jsz0h^T^4Phqy_5|@b4ax~D>*Tt zS|P1bR8TNx-hzM@5Nv~Tl6lN|FR`M<>$Gf8jbX zYEC9?>+QF0-v(hbRMEElns(xZDb00$8>)XNeew72$j=Vrsz9|BBge7b6a;4NPA7q+ zfKH|E6`l}{Vu;@iQ9>{`1zH1|8sXc+C~kH9ak}t*ufUaR#y;-hfr0J<@p5O_Dqk* zK5pK;vb)+6mVxz;UEP;IKc7OE^6y!ib;zw5=TP>?S;8C-WfEqgeo_0|GYc5YB>cUs5~ zeY$|2xV#qShQ*Fr3)Rm5&x?`j50dToI<1!y87x9c<1@f}2OR+SY*pEy{$On!?}EVj z#EDD_56(uG4krMIGdf;44*J>S6#05vXO1`+_KLO?flPpG07>T@hFc)?ecyKxh28Gd zp##1~ciX(0xOxbKf*%`d6`=@8J3xLw2DqNG7t!456DM@?hN;W-)z%KKElI3sI~~#@ zSje-#WOxu0z`ZtDXo_kA>5xC9wrpX+trXCRH?AgyWxAd^28>ND|Bv_@8(S%{!FYw$tXk#%ibHAQ-fYQ36yVNM8un_s(o~N_(M%i z0dczjEo>0~sj2V+K!II%Zf+ipMw+TCb13{P3E{NNX$~svWE%Khwdn(&04hXxk!>;( zKrE3<7)2BZ>MI2uw}I2<%!xmISi^dZgk>wqFyX*u=F9=Uqr&qA(5B+~g zk9Bqd0U@VG^WApE=$;1ySNtFvYHf|ATgV zKE)U5Z5=*v;K?878_fIqYKg@_dKeQlh{2 zNM#)!=sI=}yG?^?L4Qq9NlG$mSK6Sw77NTBDW~uRfExoa!MP75K}x{$f?h`eK`_KE z=9r?bGaH)fHXg5;KNB4!EDwc+bw2LV!bnQy$i4aUW$>?AE|S4-PM*zScZ^^n@suNy zB5|C+7y$-)`uOowh^nnK={@%9Rq=mrg6Be4p>(3oPMdlhjEq!-Acer3&U@^q@X0;&{m)9Z@_bI z$8Aran6@?u?KxvL?yfKQz%Bb-54-Itcv3!(Af@0aX!!LjwOHf9f(vkhwoch8<+mS` z<9~7zUX_rO&^X}?CMO|*!%mg>bm{WtqQ7}uG=M9n;%~wE>)uC8h~C(8N-$F{>F;F| zcK>b9=`&}3WqvR#Zr3fD5@ihT2`3o!kYQpNIv0GIgMm~VK?&&JX}5uW`g$?H_91cV zzTY_>TQO+_hIjOn7~z!JZk$e}Uhm!=+eymI+tSi0FOZ$&vw$&6-s81F%GbT?vfv$L zrG=bM;>`7k%1cU|gnzS1kZp?3?ZFKNYqaRy?Gq=V90q5=jRKZ9VmrZB8A<_CJOm?T z*|e%>ySp0%Y(%<80&1xh&0$K5A0R0@R9z&>tRiq7m>%m`w1P-3r(udW$#pCpe}+Ak zNSs2TD8I0964?e*4%b%R>PUouN#*ZiqZ26rBxINOWIBpUx^oJHio$9ex~N18T};Jm z;1tj|8L2HTp(*!PWOFiPQ<$yi2}9tX;_7<#)G1Vt3r%$>3~?u00-sJ{=Rffr5=_3I zji}@-1emfHF9^Pa4`LO?YP({r(akcj0q+9XYwzgrG;{R@Yo>4=?9O-|0ckT9Z3XKN z9y(Nx;TcuRTEv>itmz!s3cqIga#BHuGe`w?+EmumU}`1=vw&^k`Ae5AV=AtGn?uF|QysvOq@+q<4bDzHMtMRF zKY#YA_$rp57Yryz;Yj(n(9n@=VHppC*G;o$-z!w-L@Md|CZbngNHk&2`z(~<7^nQD zR>X_s`JZ5t_@M-cS8FRyyI+q_(m&!zTeNOq#|)MEk_sUl`GE?9PIRNj!XrR z$illoVJBIc^nL)@&%9eu@JY^(Aa9TylpLizcRl6yoniryOLNp32R(&CbuGnCP^e&4 zom=!UiA_ItGf4Gqq#gJ)jXL5~;F%8Ig!2*YJ&hhcwil@dtpK&H0Cq0?yWdVv$og<( zyf<>Rgs7=~k1!=o&G>!$Fgmeyb|Lvf{8%<-OyCv)vv#tbHCRZd!C^r_+JK}ofQ^2k zcrJILwzq5@3N2%J?B6ffz5;FJT4GE=(-ighvxgZ9g5w~M3U&KW9}hZl$JzQBXU?nu z&Lu@7)x%Eb#!hdMzZE?^aCDGkQ&UTQMb!BCg&cu8VIl>XK}&BJO4kHdosAB4j855G z%Rt@hIXuocBZuM&aPgv(;^Wd%s~-CS>S|LIKoKceHU6$gWngVB74W#IXbGbc$qA2n z6Re*;d{|pkjID1@j(GsFehR(hBq28;9)~93w1z2X!{}imCuu)9S`kVe+PPfQvtFp3 z!KOxCnc}@_<;s$>GCFSBt`MS4XSDQgmj|KoLE{Y@PS<)lcS2ik+chRK-=0 zgL>)BIQG!0VzBbzeaRiSS;0voo>2?Hs3A-5-ys*D$((_&l8+48RoPBjWW$&lSU%<6 zo2)x*yGhjKa1mNzw92#9V|O7B@;P$!;?b*9b$f&%s!+1nT6th6f(~f40cdN(;ygBg z8SUH}dq|X6;dG<<=&@sK*fuopXk`&mVTD)q3exo9cxC<2_X|xT%)Feo*GWGsur((k zp#mVk@PoX8qv`6B!v-k+>^K7kebC zXZHi~!R4f75!%+I`Dk$8w`ADpu41?ua_?Hy{LMLg+P+wI}eA-t=q30qg( z5^MXN+_B<2y%O3v^?NZRkW=gTOMAC=sSC;};ZNMg^$${cxe(OJJeUMFhQqZe07 zmaF*}%`&ZysEE2(X5Jd1V@3K=T5mLTsGw*O#Qjkn40E+xOCkbAnNKMOG|kq1BQyf; zade#m$~}KxFzV&0d|v8K2axL1bHReKsam%SLo|zzgsiGMGubJibZK9^L0b04RcoRq z+f9zr9D1NO5;77VE$pbM_2+aHEeA*0^US3s$8Z5ypNh4*#)M8bd}Q7AQ^`b)`TZBT zbkF`Bx}v$@;=+QoJ>pMO|0L@j_C2>Fte+6`Yx2lLxJ`V2_-Acr)+<-!mAu_yL)k(8 zrJ`a{NyYoBzA>@d6&0ngI+jKzCLicra;3ytTVwg~vf9@}LI-S9a&0U)b-?LdmvFz% zyM~r0r@k*&maucx=(kB?b?aOY56i(3-!o(E7w_WQdlJ6T$-5wL-5qizi^6_nd< zTbGy)eQUS5yojj%{L?N}%UU5>`ADMPwy2^(;w7RcSMhmK%&DYx89|j5rQI8fnh!ka zTe|m`SYfjNt?`*N(vKSJ)`m*>Hy(Uhkg`|SeqcN1dVF8E{j*2H8mH-WB34|nYZc~g z59%@5OE(~cZE*B{L;2H!@ssijhI*&9PLviW)%^_9nE&%ct)FV|(c^UvR_a*A*v9$X z-BQzRS)nxg;;Wn^R&vLNgp1`fUW#h;tdAW9iuW4KjDg7EyTp?oNpJ2n1Ak7SUOk15 zVYhX9Eva8_cPEFs!ba!F!u`Bs&i6ZSS0o?FSA3G=HC4i7Oc(v>DA|1&DhR|BNfj&eO;s;cSbhCPEQ=YZk$nsTqN_u z4zIDBjk?b~WL|dDFMzd`zw#+)*!{lz&R8xzR(>*iZS%~L$>eMUc z<7uV16`QmCOMR#MJ37JBLivLc6lK zs=YriX?Yc3oPY4%hp2#A(f;QHI@eAqT;@F1XNTWS?Va1wfKE=I?tSf`eZ~Hlq3Tg{ z7EyvcyV^N3%t`+gAV5Y6B!;0L<*v^ndhag{w38WSmynPw7bCs-bAAWA31h!E>W>+0 zJltmNwW>WEBkv|>Mddhb8Zbov7)cP9+^lzBg^qUVE@!jta^E(xcawb`k!g@y1K72b zlnA>`Ea<+|rtLCLf6n)X`pYV{vnW9G|JSBN_87^L+h6OB|2|owri1FhKHXjXQg^;C z-QqY72DoG}DIuRd4<>v*DP!Mx+=4lAv$Qnyqc$@XTS<1GR91(K#?%kJN)NtJ4vgrg z7PZFiY^TQ5aoVkx7S>;5Y5%LURgOo92`w&HPKxZ)U1oQJHDd36 zx3s5k8|b3TH_R#>eRfaI2(J&fYZGYgVPN3*!P%)t$8Rr@1Q!mR`q(15IyG6%!nk`c z(nqhjasJgKZ{I#k^ya){l)^rn{*r~7R;8y8?eURYEEjoC*Z0usL4!{J968SaVu)TZ zItj8xg+far=rsocBP?dknd8_eE2TQkuHrZ=$?*X;$&}f23RVPczc=3@@q~HpM4NKg zDGv|YyC3=_H?gKpn2^gS>i9sPn2W1HRrruNrpt6*gbtK-g80yHd6z=NXT6j(hUxk>2b+QBw=NMivfdQ4ssMWtE!tp-E)) zGp0?Waa8-H^XRuW;mxJ(K|HzsOo_6GlQN}>-!w?RkDNXrCU|&Ef(`R2#aCm=MhXX& z<_+~b3@%1{Q}vE`%ptddgAEM&jtp@$)@x*dfKus@?xiEgEwFO=vO_!PX_Nf<*ljPr z2gKav3<6D!ieFmweVvz6bVR%|edY@nFA4*t{sf9Rvx2raOJ(oUH0rZBM6-9FD-m}~ zudU+H0g?NumU8hrozxwh&zUD5Es}{j`FTj&>3T16PKPlO=^E@MnFQsuM2=q=LP**( zZ&)u>^UodUKm8atO>H3d*z>Z=dXuNdf?ifjB1aVsFO@R?JZ?ksi;-R)G4Xllxwj!h z)PmeC+fJxWJZP`YCoW!e`LK|p@P|cp?<4x-!CZl=F-RFYz1*EGeB0$#5cae-$66LpS*@79w`Ubn!i_D^3?ASVm)WoC^Z05khgJqY7@p4ORrPs@(1?HqG zdPOtxsn%Xvy7o4nTQ~YjzCw#-zv5#`8|l){KYRseY{qG2y&j3H%LT`ny(^UVn(Tk6 z$Gh5cWy*EEE4{5Z!5t~}oj}P@+WV%@hDp5S=$q{}V+w3MiPIHpnCzpYQVbhqHO*88 z6#N-b)vKmsfknx2mIS#$q3oLy*ZE_2jT%3y>O7)AQQU*AKNS~ho;RH2uV$AK6C(Tj zjoRt!QHy`tEsTfynYk$}`S2c@^#$`Cj6{!bEDFCj)jVO+(}WV=#Lq(}1^yi@FhX)0 zxQ+EXy)h4h{*R5qoJNF3xPY*jaknTq7)qqXj-qif{wu>KecQ~DJYoEJMhdK2Wz=Xt z10Vn1EOqhgkz`N2Rx(MNaaF0=+1bE|Jl@w^`#b6>;GeZu)_PNvOCBqE%5;u(iUf%6 zQQwt)eFfX_y<36$OATVrm2@Ul)$YcWE>djeGXX6QmC@)mpf^0oc}tg0IN#~;9MJRV zU7DuU(==l+4Y8Zn4?;8_u6pp`V^4>Gwqq1EbNckigyi1VO?P;L0B@c@`&nK`Qe05o zU_b3hh@`z5Hv3!87-F}|%7l%heP5;ar|j^f(*n3_z2cINA>!!d&fDO)+8g+l9|^=J z#pAY~-!%jo=g;nRcq^Syd@#Oe-&gBPcPGspRVDdz*~jh?ftJ$Rt+MSr)32U7wQHAB zSz#&-7jPo_GV0dlS3VEv`dlyf)~$BKdUMARihyz`!L zh_=LqNfM@0@p~}YP*a7!%4DKcFYC)!55z*Ib|}f%zJlfny*c}0w?eOYj6a&}&g*K~ z4}-7aSB`+f)38NZU_k{v00)9BI07zEd~2UUD*mn_>_bWoagxKpttWQeuiS5%&i0Uh z9=!A7GMtn66kfZ6i1^WRT#(B4x^0E9&R-^1E=%uf!?_3)0U0;>_%g1X+!lU*>G)9RYnV=)tDk&=3#EH86$vtXb3N z;}7SJqQ9|rdi}9B0>OZ~UN|x2%c_650EOBKJCc&Xo|%=n0BRe&MSNWG%EbB>*!it` z)p+PYKv6lu@R$w1gxc-G1z{cbu%Z!}*__AQ#3ZcjZHWP!CUPI`%)N&XPaGVvdL(7$ z#&+A;L_RpU=#KMzvEPyY-m%s#S?4Ir6>V8|=L;j~Be5{;r0$fLAF5p^;crj7ho$b@ zsyzMdam@Y78(`u!fJ`$|9pggN*EwUfSV zX*}={ffL>@F77+B`mF`vac_l=A$c6Xs%*-|b#P;3)rQNKy&Z4WB2lAQKdI|_>}i?0 zNN2Ke0T3|XSXpU{>Y;HyGXnvLPaa-(dwB7FmYbC#U&o7)R zUxbOvk=W5FSJr_-!-uxJ6im}@{Z#u0yofOu+ROATeWN{E()2Udwo?>si)VHLsg?Z= z^=-(8^Jc;;%`Jrl#%AD^9yl;|$qI_n=>rdN(1e79G+3~Vj2hZo0!)=QxQ!e+lI;Un z%yuK5CifavDU6G7Xn(RlhdSgAyGvaPhl8KM?!J9{KAbea!@KEM9MH|R1o0cITuDW) zHG1@6mk12N@b>jVbp=%1)5w!JSSE5G(Yr^O@RG00K?eni)XI4uh& zK3mBGeT<10CPRSWHx|f@gdw5Xs>1a3Yu85Aed-sg5O$kQ>2GP0YVqz6KySo=JxB%2c<2Y>?N-NoK$K-;WrAlT;1ZJKf z(v2GRN7$#Qe)CU`W_Xp+(Y#JOgS)rZuMwGAJ-9h`QYh=9l_mkd4XVbRR?B)k(dL=D z?Z+e4X8iI3r>-M^ZN`&}M8fs=kDU#lL}GKF9>b&7K=S0Y;6oNpLbY91)#}4(yT9qX zcTe<|*)9-#s&9F&;`HLAYNLHHKw^)RKCWt-!e%<(d>v*^xnPJ$ATej z9mKuV@y_D;^M~Qxha)g~2$-8}n)FmU^h7myU@%0k~&8-m#L7VUc)iC&$|$^u9F zOH8Awhwm~>1GwMB85Ic6^dOj+N-T`tll}*3sW1+UadrECF(MVfh9<+4{QQtO)BJ}I zyDcpc&9Q+1_S~k9x5WEI^QFAg2J#y6E_%D1)*M-1O%l$-Nd#+!fk)#gB{>=Ev{e}| z35;kTBc6n@X*&HD`Aa@u8vLj`a;X9QB}#UL#E|ue&O>MliV`h%VH5*RFzM{mx9{G_ z3esT4?6I`CG2;(m0YM`Faio!S;$!w@GC2#@ASd8N2M2zf8N%rD@^`706|X6>X_0X1 zCS};_;_ABh-d>guSIR*SPmYGW@vKjD!kBxva>-c<6=xg>PpstOkf4!{7K)5x76eu{x!e8%$~fGFA-7~D3B~ooR_(%$-Y~JD{`UueLijPMimY|SQn>Hf9BzG zHH9JRFD6yC?W?|I*9e3WIA?6#+AhkH>)=}X{B3S#O2GCdYC&_R;2B!y%=vT3YP@TJj!A32j-C|Fw0$u#{SAoun1a z>}On!$kG|cU<0(S9y1MxCJd59uUA5du;QTErP%azt*^)(m~yHBK6@b8X0|u>;Tz_3=k@G zz>mkSl(W#}D?U%?k?Cr0U&zltNU%tVh=3D1&)a*_3B?gPHO!98v&z{hK5LA2p9}VVH^x1rq|a4 z-AVl8_w*#bmpxo9*VsPsYh76*&0lL_Gs!t$#;E<9%+un74Ofxv6b zIcWpzKRt&PhgO`+6^QnFdNwICi#YN3kRZ_gK?y+?;%{LHcSRtsr4Um3>gnMd(ZY}s z5IkivYElEj zi9xow6s9^eGcJwUB4ecSs=NjE@F7w0S%Ygve7-;hrLMO~O)@Hz#yBV7xy*=}PXk z5VExA?N9=Wk%lr&P_{g=f(E@Xni_#!W^mNWlCVnz% z<3ccsGtChz-_YbadFqt~0C|WIlDq7h(6%3_f&Y=*XAz}d8Z*i8eyn;=h(DP^S3In>p$e0)b$sz#bmZGoQ zgbJ}nt%XJuhyq#|?1=u^=oK$s%pCUX;{=W;M@J}Zm@A4K8q`QwU**}^lR$R@DB;HFNISudG_&N+S9Bv#ZOxSCuhhGm5Zpn4MG@!?k zBQp{cEupkAs)@)uYq0B>xY%WDv^VM0VDM!ZN(WZHc_WWJYl2qyER$pH4lr|S`=l4?Hf6EjG2!`H{fn8+{v4Z*Cr1o z0+B7lot@_D3bG50l6;u5Q+ee1qp~HnsiaJQuuVujyQayzPTaXt-~c^%aNv)dch>jq z)VIcSuXlKV{ie&9xpUw0dN#o@BnlEVkd!cIDEZ*YkCb899%K;^5L3W==KwYwI@INZ zMam(^!>0QW91tdiNLQb$5o2?HLeSskDUI+ALCo2) zd!Jp*!djt1(E+E1pC=ReIJsHTmLXf9^Dd34F&ug=&FeElIMUk_kMHt$J~T8Mw|8Lr zXF$F4hmMmK_jB*j(Qa+y3mG&}_4)IlU%N`jzkcCcld<8;8VWo#1L$p;N_2;O9P!a5 z474)bjYg+-ec!8hMd?r(-w6jbvq#s@?R$`9kVWl^Oo)jZ@RrGEG6IvdV!IJKF$@T7 z{w+)@B)=zh-yETMU5g~!mopKHB@Y+W0H&)~o!(Zfw;aNJQ(yTGPy?{BAlrKXdjIBI zb|1yQr$@wB|LJSqzA9m`cH=_!Bheo?VI~I*C!))@p*gebR_IsAeVv>?>&BaoDSjcg zs|=1#u+}m3KE5|B7{RTyfMb{-@5NY+r+aeUare~IA04mz?*Cfu zcRVBI&CE%Se`+Upx7~ivWL@gnEym8Py;CP2+I;88MaR}xMSd0gzE+M-$SHjTlYsg{ z7$8C^!+J#*^%^lY5pQ+NK?RZMagScTCiOT)PS%S#+i)FfcbNKd)8skK&B0hjPUWN}3F zssqH$q5v^V+gb7UTOo(7(u68W;?E4_i*Nvuw(jy9C-4adOS?Nc87{b|LL{dygzYF) zPZ+_VV=xW}`_0CGtlrkTlV|yq4=T3}6tNAy1_2spCwRw!2!_2Gus6XV+eRn^U%_A3 zQ|$CUC%F_r$6juFF0wC3Au4>k`=-$WW&FLG!+}pgoE>NrP(mpfpAl&=!Tf| z%ANqdc`^O`#fuvSA0-Gk%r}GtF+qd6gQImkjf3=fV zwfvs?=kt{cP5wiDYoGZ!S^`7-p~!0d_|T}i7ACz3!A^{6C21j*p{Jvxs|y#zz-$q9 zMaib-T;G(fta7r5T1bA)c{uFwc0~U+S_)hs0m2jz16= zhmD&tnK*x!f$d5BE$+CuE$kZS|Jt;ow(jU!@!aCOcM&~7Yx8H!_1e$nKVUjyP)iAP z>Felz4;G+MfO_Z^{DifGDgbL-2!UF!(!P{-M)QI71Whwx=fTn@aTMk*FrEuE+x!g> zFSqMuGaq3W>|S;Q%lET;m)kp?lU`P5maU8bn((Z5fyM07k`j4U)x{vV$B$=lKE$lK zbfR|hTI<3!>kgWJ=p-{qZ~R&})6etuEyPE(PQ-t_wc4~f<(}_8eG><9W%%Qk=K&Df zDb6nAATxY;OZ{Dg7i*mpCb_G>jkE(<>5a-}+wR?64&KE8z?nes)$9w&JUD8<(4H2X zvg*Pcv2rH^A)YQ~Y{$2-c8&p?RT%5r0L0f049F$rPA!Qk0#wU&+ zXXfy@rXHb5H*eld?uu84imkI?-H<`9$@m2`rbxwXfc$GbuRU`5Bz=T{_Fs;+espiu znR*AH$h-9g*)gw^dDc+cX##^!Q54VaWUj9d_CRtUb<*m{N}xWW<2t`LX9M8w(kG|B zH#8hI)j#F@L1q4&G!9Dua;nr1RaJHsn;5xres!qdcc(uG_eC=lZ}!HcRe>RH*EfxR zzSQy9Z{A5EB-C5Cwrb8JF~MOD!Clk}dnV1{HWHCjRNb)NES|PWSn;6MwbRoMftFIv zxI-&`_|OR5g|a39&vNGJ`nQeKA^$zv-}QGlcH3?m72tJkMHx z0rb}|AoBuvQRGIQhGY1zWsOwikBT6INg)y@inkDw$7vF2lA~X{qd&K z0qiP8+r#DonG^C$$S?`h_V07<{f;#Lb@X0C-I;ZO8-#`O)}6Z?-L2>{4SHz zH2Yn7+k`NM`k#+j-rB~dT}U1j-%U3*e37Q(!pfz^-qX~thCsaGOE6$BMGT&U<*u%2 z1LvD}ql=kz!U6$*!1p z;W>vADm?LSavV$^lJo@&j=(*-`wK-H0h!*fGHsPB^c&9WH%JqRcKtlspNJETKubKR zsPCD<&h-Nv=6`Q&<`}JGfbg?t7hy(?rI0fBpk2f9$l;0S_!a@Fnfs z*qC{6d)b@z7BxKi&w(4LaDRF~q!y>sjUgM@?pptIHadmH#RpaU3j92wg{fRbuI>2u zYbTS8UtiyubceE*YKlWX>LkQ!?As3b)d0ly9t|Ht{>)3|Lwf{h)> z#;p2DdmWSu)qQ_{nl2Zj`znBP?G}5Z>P}fo-*ta}E+;1d9^#}eF$uLvBEfK0e9xVw zY}jBvdTnv`(X6cIqv7WCwCFI8;AAh}?fL6r^U(yE#0rRYaF+pd%ke_G9!`%3nBMH z3x$+xYDE|Gu01kxH494g3bBouvpveX7g419;j4}z*Y~a8GhVLy>bvs_1J)3o5n~SQ zF#sdEloZ=~&n9t)a2p-iz@s<*J{KFScJOWhhLZD(k~3ZDGFVz7wK`-pZ>;_)_C)4Up~8(Dk7x*>d>z&V9JN+>awS5W+XMygr}fgo&>3OkA94E#Ny0L zXBCB!GaFPce?0&)ftN*8VOLx)ex?g9pb$qEWVBWUwoY^n(eIw+q+hji1n_rhNC2)g zinjL3#eyFjRx7|;UhT~?^4yBlkKXHy;myX-q^buiFdwiktc<1kR`z)I_?EQ@IuWmi z=l{WFs9d>%?$p+#G?M%Y6?R|7WghZES7vPO1FV34>UQU!`vZIQKY8(@;Ks|j$UmWT zjs9f`2Z##0(kFw>jA30(M*F(4Xv_C~shgk|#T2H{Bn(9>Qs%L@S%57K39WX$2%mla z217m}DUA|KFpD=yzrT(SYaj7HD<|TuREBjfrS-DmGc)_na&^r|A{E&-Q*p#rDHxdx6o2mrWVh9@FJdZDloldJ2tNAM2eq zm{|6oGGg$r*9Gq~uU;*=!u&>>R55dB5y0Te)=FO@_f@JLW#<0YV*+K+Bq}nRU#u3zt)&b|BtOV0n4%N+Ws#YO44L1DjH>q3Pq(9DrG1l zcZE`hBs8f+(QGIcDszM~RfZ^I6HU}@4k3~dQide_e<#oLf4}eB-fMfe`+lG9x~}Uy z&*NCfTKm4QeKnxHXHBwfcV5Z4Q=C!x`k&Om-o^!SzhYfBb{tA4*iZr{BpjSDJHSG_ zrgr8L<)??^@-}wr^2%s<`DeZS*&(e;Mg2$Kj-9o*%_jAAM&&5GH70W`tK!ygqzcNd zFZmoUr$Tg>j}<%>0o&k`#4iW2o}HB9D09-Wc@U2MvfnAPEf9W6MVv z&Uw@8-%i)U&XG1VEPRCXg=VUcbfvTzLa2Om_72%>US1I00cL{+od-oVMm=yic|Ppx zXV>((p#WOb2b!xT%&97SWhPR)jC8>YwGHADu}XF!pP8vND1+z$Nlnm>I?nDBb( zS^zx3;uSaUDPy18{kqCoej4vk0X_Ap8onEL-EKP~lh0f0yEwqke;$D{Kra+)lNs{^ zy6UcZ+x6L^p(Jf;U@z29GH7^f8?XiO z2l-LiM0ORO`fPcU%P0>Cl2(!#L*J{)uiboHHhIeUHhy%qyv5MCT&xwCflT2MaA1Rk zsvm9-5wZ@K>2LvI;uOE&>@7Yl)D-X&1o0JB1Hc4{@DZ~?GZ(%BHx_JM=**^=qxmT7 z+AgC3S}VurniPA9609bT5vANMsvqKJA7#Dv(ZLbRH(8gFd(glofN6=5#6$iRa;Sbp ztirG`4I(k?$*@;{-HRh^$H%Z41h<5{dcYZTX(`>;cJI^TJ4NY{$7Y~Xkx?>~?ccwh zoi8(?&FsXpra>I^0Is+g1k(jcs}w2=F}^w+&c{dR-tpB*l9h*q%Z>-UrxA&T6D6Au zE`j*2Xl$2m-LNRpQB|#De)(rj?&$-1YVoGurVdLA>hCs1kW@aD%e0cLpyux?wjsrL zG^HwU+T6U##~ql`-bE%A3>q)N^&Ie@=SS6hOy562IsSvJq@*PH!5LonVbC~Q*MUwR%D}&?L$}gV%RrFe39H%;wy%tBLU&Y3e76rB*uAZu}P@t2cl<&RIC)2Wxyp1eiV2bwMXknLCe zJu!Wq>h?Ip)wYET^s*9MQ>;{9A6x=Hq$K?sM?`M7ZTb1h1~JWpA^?(cc}Dst01iNd zp3g781bC+_MCYHgOX;Sh8y+%7^2DVxuHG*CO}>ZKzUS-eo=^E4Z2R_2OsrZ>xPG3D z@~8Omjk9kLh^_<>JN#I|7-|v<ZFLrY);wAi1Oy zV2QnFPxhuz`pj3HekI{ux&XQCN=d<*KkZ?&2vZf z35zt)GEON76g8!FWa^cuq9yErmfTslrFMi2N!gD3*mnp&IJ{F?SR!^Bd`?^y z7lr^pEKm`bOMy(m+jc~wr&N_@&|9QmxgWQ=s%-cY1zQUEUvf0(u@x6`@vV9su6;5gcYm%yz$Eb6x5$cm>)E|p3n^l}H zmGsr>uz-nO{e68*sgwO6SwJ;$)w^@51=!Ir@a83V@tSkXlN~jzo<1&2?=?~4C*UH8 zmd4n+9}%uS9Fnlp1X1GZ<0rIRorqpxK(iiP0_=S}CYEnrx=5$Zyg+9o7Jkx-dgevgBC}pRygK|_^j_UX-NxOnxo@NrpFGdDMNf0w`i={Ll`#3maXdAEoFBNd(K(wq zx_gTApkE`;zN(qFYjpRIv6b1mU;o|vL_4Ayl#=c?x(x`URn2wVtqrON>OKli-*I?^ zvhl^rj1+WNNX*L=@)-F;KVz*{N)29|8D_E$#aPm2&&MTOT_Uj8(W zP(T!;$B+NV?%oG_tU-2(%MT2mg;Y^6ZnOuV+x?-e1_+3c=sD1}O%;ncgiPeo7z(37yZMFTAu*afeRdeoVT0483#- zdrgmuokQHs%-_(!4TD8vceD~#;pP(X`kVie8fh9HIvMdyFxa<58mJT!D?#_S2#hr0h956^8W>Ss zR}{_SBCO#G37r}8?*0g(17Gy?#<+tEeGZ`^kTt&>`%;gzvj&agcg$uct{7MJdpb6H z{4uFSiIkq$t$K*p!zz`frDhVGx10LhDlIjniZSvMzh zRLlIya|=pJUg0By2uiv~k40{!daqesKzMu{_`2kYuj_osq-gH|A(U1a^_;?;ddz2s zHG?b(9Z#(P+5W0tuw31C5j;HN zb>s8ojn;~S6Cg z+<1Ic-E*iqk~XyQPcbh(8AX|<*uVcKkDY#YLEu(KP-KldozwBJ;6ONlu{+5=N0l0t# zuxE2lpMHc3<=u=f>&h4efWVnE_RPzL^X=zntTafJ&@c0MO7T-mG2rwKYH{bilvkSe z%3NmI!+~KZyLtPyfq{EW*3R*IbKvVIFk%!BsJTd`{4R|>yozj;K_`YE zkcQKWs0O&_#13evRE&DrS3;FPil`<%Dvl#|+POp_<;oFzczHbh5a){ts9@jC{>{%P z>LfGEL^G%BHCc1WzK2H{E`r=*8PGPDnmIqE@Z_4DudiRd(jB4|lo4tju7rc1>i5^rkD>bG)Hjr^9Wi%py~A{=#u+z; z8N+I^@Y!Xb#&>b)*AUlz$}#0z)w)Mz29>_b9`#u-C1OHK^qAhS{)DE~2c@HI)3^DS zrJsz8s=lLUETKyA%br;5?ta4M;#e%7UiQI$4;p(dkJ2#mY{kEEt9yw}+du5iIXR1C zdH3Sx&+s(yN!5PeSm5zUW%P9oLeZL`9{r*gkS zBOIO+DLa162WKy@CKAwk7pFktH7+FJxoVaOV;I=JNqc11Bt0ZRF|n~}`8w~7`X#-y zpS>115gJBrzd+3Q!Dm@O@5euSOGyBnP_7X#Nh|k#^5_c7DPakilkv?`k<~ z)|rzJrh1y#?o(@+vFp-%y0ZzMD7DCL1MSR$Z1fC_AGsbJ7117msLN*l$W|dQDd-?A zA=6TyvZXQbhsV@uKmN&hfsh`o*uw5&s9-IK6Wg1AcCWGv;!bl$`eH#?_Q3g+UJBXF zw(Uo7OPw@h#>mm5w>v)9E0}dk^T9owHqx&wO({iq2(SEN}NHeX)+R3&IfuMNMntwMi&{QYYsB&*Bl zK2Gutak1$H2A4!dvuzJt6W(}?>b|bD&;4tDPhu^$6bTiHb&8eU>w`yy$k18+Z)Bi0 zW`jAa9%^G{Fer+pr<^r)$XO%pg6ygl75{fgX!-iw_Cx2bx@GlSD)&xJy`JX$+dK8c zfFeD&&yL|s;NdO@f036NxfxTG*|E^2Shb>A_Ar>#X}I%+>asFa+gpNVKQ0w20(%)` zCh8Fc*9>v!L|Q^JEFo+JuQ+Uu`}CP?^5M;!ARzLVFMx7p64O%kB9#h`Kfr7Upv*@$ zs~$&l!-lOZSFFH!45!|q+S=D>crAdl9|?<*qveW1P|lnLz<$QW1yN;N3&-BTug83r zX`eEub}Tvsp2TObA7|*(r%%N;h(alIhkS4=xz%UDfaL&fD^^@arAFzB0_er?H)JU1 z?_Hv`38g=I4ecSjj$7H-JiZzlN-LaG*A&x5E-|Rz1m1Sj$uqdfAOvvm+FSktF2q7Y zU?EQFJv@t`P2-%xzN$~3X%l8_g*=AC4-ZY&)YFq!{S5JK^$QPNJPW6wsz3<_flx~z zctjNe(+#n8)hAbZKebsz76vqP3+R z&b)!LdCa3BaDb}~mD;sj5!z^e=_|)F!-~6D`o))gZ2cb{k zr~f4thXW@KUQi84q_OT3)j@aqIwDHN`V8&o3%AT`Kl@scI# z5h<5uR@&I7+5MaQ4-uLoZsL{lA1e#1*QfzQ(~|*TuzbN8&4eLiT5_{(vcdl-9fqb4 zr5}$fYe@RBZwR(Jm~oR4uf0C|(}(=ZiiN2Cki8;zDU-X@@es5vvm`rD=Zjp=9z851 zf&v4p`>|WsU{Cu+VtyP0vGVSQ3&f&%Bd)oJvWWCsOWdG`kY>M7f2dmc#xH_1RXu_T zs#@bk@}zq->em)hYa_~(O1H%1A=ibB;YXAHF0lx5Gxdm==+b(zPot-`cN>*8Z75AG zhOn2PUiNl8v&pxkZFSI(L%EH&+tsa|;?K&TI~=|wIi#k9fQ{`0CON7j#Vp43kd}rc z8OW$0P-28YESz&-q!HEl06bc4!IpRUUilquYhp*AH1R2clho{x$KR8DFF`^_FWJXHlk$yu z{?j6lRCzQ$ic!7y#M3hC!Ooid-n}BPh%a$lTYYwF&UDRQFSFPE_THHCljR-);WYT{H=l;3uQ0YZNb9aqTGgO#e}g)?5I_}%leJ!aS4 za^_^sA0aD|G2|=28)X=!_{C=%lCS5j`KVI1zQpD{=dj(}Yeg5mtfoy`ar_f@*HfoY z-v{zJZ}=qk3Xz;#qeZ7~P%kY5Pb_I0Yd`FMzla|PPV65wc*OAGPsnm<<00#8t|jrG zB7v#!i>&&6q_03;wDX)MQAQxZeWuT(vw2LuXMfGTGi?ala-<+Lq5p33X{kBga+aJP;Q_Dro(k~6 ziFxY2OGamAWM(QOK_&XKnciV=NcMxzJ|K(5im*yOXmQ+5G_xMFqCZWwyNWG7{~xGJ zd55#@DI2@@34)0xPy;-$^P5c+9I6rM50)*in}wpr)#*o#E2=N3s~gCq{nUq-b#-zI3V|eXbQv`K|8;^}2mz9+hk9XKp3Zhh zYjP{xVqf_{)inOz7E~t9s?5|6d%VV}(I|m%kL_1i?@X;(o*J&VZ)WPLB{U*vvD>cs z5_{t_Vh1<*B32~fW~4>ZhbDHEL>e2AI5uEL_+tBRzHsnJ9;p}Y<*SS*U=>&NRb%jA zjtCc*H=AEIG)VU!9TPlBM}4Xk4}sIY;q~j6tNY#ke*Xuz5#Q`VXHx1t?90l_h3SHL zI;HH9Gks5y(02T9eoruGfamtbMHfH3p5lr+)~a>?j?FqJP2H-EZ%@6uQ+l6ELj^54 z)hL>9gA>Z(HVKO{7{PeYpZ}DMhzyfHkygwJ(a_rba&?NS6-=<9wAUfTcYmrMT{JJx zhXelu->&#qF2Id2B|~4|DD>B*OS`|SE&h1U`Mq5D{Bhk6m?|$}jZ(wLn;)|u(x{yD zzi!)%&eN6Lj`-W2O0d3Q&CwlgZFz^BK7~A#v-QP_yzwzSQ66W%T1oM9%b%nxuRE2# z+~V*EUz?}%N7vsYPx|0jpAME7vVwle%wNGtD++shhio z{24uUukhwg!9@8P7Nh>EPjqfjU?NEV-S{s>)7qQ`5cj=R>D{n4iMbja7+89uhTfQ_ zDw-FNmM+DLDeigCL1>-92|vj_`LRjyj?nn9=1}@FW0g4?Mt9v-%^y*;Vp>X3{gibm z*jSW?I~)_~vtsSMdj0y&p2;NQmX^`qtx-FyX?j>N>KcxZV`yi=y|EJ6`K?>hD~oN; zB7_sg&e&(18thIPjtv1?7>;n;N4^MPBV_k&w(Y$8W?%hnDFf{6WB=TvpYD~>ITb)&8BNA!3#uvd8d0^Oxj5=|0ou8?Jq)^w_@*_Z# z>8j`am;ICrZ%CMPFxRiS$##A~PD*FrF(AwPKqZKcFaQkvEY`R(QTC#0rMe9S%)oV@ z6Og!(VPvEYXc!fgWhDRR;N0mYxo0ZU9DfRcED3PL+|h3qR~u@rsHxMd)zs?LB~NK* zz*@!GNe9$jd)ofkV_0naE@grGx(n7huM?-bxYSbfQgqkvy*+4(!Db+LD1}@CJQQVQ zmx3eM^>Oxk-W#fABz0_1<;mr?7k@aH)eSXwYS?+{vdZPbwXIZ(DMqmg?C$`P9MJgQ%VKs6nQ1RL4uv^@7B%Vu69V1`aVxyd1?;&!WYAyZ`xw^=O_F&mUBOJUuq0 zR{K)x_J=2nD))?|S1KEG6QzV;o5!o|hpt#*Mms=I(V&__C?Zj9&aYfmTca+*$EZLcA?3q01>fH!GJzDXu1Z zS)6BBlr}&_E_j_6rQm=app zYuBzx-|yH5tbrM^ex8GGx5UQcy0r(12wL8QZ$tK%hN!PHo?JQhN8_P*S##*A z%6G0Uy>*8aWt*(HEN9sNN6RalARAC_9K#1|Yg=hdL`%shBuF%9c7OjcV5*eX`ABQ= ziPtV}M?P?S&+Mlwj=HPO?cqRh?3iU?v53AR$rHX5K&XV+hI1G0EkAg2#9{ZHsHYlM z8Itq5MPi}8QC6_>8+ZHubZvmQi4r{(^&;M?{p+>6u)L{X4O& zg$K7|bZ2lFS#xgNjRmRB3!$Qki1@-JSM)i461C&?>rrjHTb}AP_|e$$g7b}Y1i`8O z!o9T?p-WYkz4ElVQ8nG;-Z6(nqeOw2@$}HOJ45mgOuFfwJ_|BOTefw89+LftZ4YNf z^b;NV$C9tad_e{M#kQCMPEeDT(3-Jtc=9L^cg0-G`LDM3s2-^0_2?{^>${Hct&%5N z*A+g|EvSB2_kLC7;cl`iM#F6{N#&Uj9DP#We`Bv38XYC|{Zk`id;9C`I{x^=>O+o4 zt0xF3I~M|m97?A8*3nFy@;`RpzNe5Q7Z3KSSB{flE?>OIX1L>iRgJO5r$L%CQA6ME>2_z6RXAG5(<^I#ZI%S&Cwwkunr}f~Y z(Kyn`>S-d8hE~pM?4j^716%LeL^e3QA79=L9;+biy|OJmwaN^nej7#PCa$E4X2g`u z73Ri)LD@a_{qojn$Y^D(%^qlG#`_$9x6Vttfm z7HTQhWpIt?Nr6ZR_=tUZLL0!#*-9uO%gXYs}v)21Ptr43Tu1IZAH zZ%M$2%eWfSp5R`z^!fr0=f_89416J?JB>YVXlMxaS>`ZBDl?(K)|XKNOunR{G8GDG zus41Tg~Oy))|{SvI_ehNvHf!85^eFtXXa{P?W z(4oY4dCC!2 zed16b&1a($`se+VQ4S$6eEucJY17`D{{9~Cw!E)I7r3Xn%;sPs#k8!unoHNT+$3}& z+fhM!H)T3i;im_5Q-J&{3>c8S zbLZ^`5Zz^NIUH{p=SXR3ElgG_Fk0Tx_A=gnrPWcbXE82X4OH#J`=Yg;a=L?bl>>(! z;j#nIX)@)ZKkFyN5*1Cp$0iJD$^+EgrbFIN@D@97XAUaED8mjCbk1 zKR&N3?ZKcLm*IE0TbVwOR_90mB6TjmC#23>9FEUP-MxF(&q!LAq@yu#oM~@wE?qub z|I(gjbcCh*g>;1wqwz%JW6y#!C5%HWEutQO$TkKMqdo~qM`fg~y_8wZvkfG(bccK? zI3zbE^$!v6zd<3h+`8*$OH~#U6_|R&&XEgr#$Cv0AGO9{)aZafBc;|{F$pQ=|sC@Knnr0&;8;hzUmKb#v*^^CgyM853KyMOQAk?Ymf;(fuZ(F}l^nJOl z?&|K?AJ?l(lF}^|jVW0n;CWI6_k@m@k>UG=546|H{(bp=QJ_FC;4 z2B>%8FLcDhgiDuH6j+Gqt(P*Sp$WLmxEuwgbhbKbp+7k)ogT0Jo8 zK^Mymv5x(}JH%gKBv)%fqQR<%p?fCC?R__xg|+JQi<~=pJYkx!VCCtLUqYA@dS!fk zf@viKHI2s{*tM`r!&YXpUWTARAtGQ6=n$o&vDE(xiw5#;-%lEo^lL-%1MA9(M@)2P z;0U9iolfcLt?X@X#RMceQ&1Ha?G`j^Al`zkaQpT>hO6CBAFimq%~RtMoO^lIA_gIP zj~+!%%ZO9-C_TV37PzMEJQV@x12a%gQq?S8+UZVnziM-Lxz^I%14TAkUbnp7_?CGG;qlQP(@5MuxJ3$19kn5 z_we2|I$d9&^rh^k8Mf}#X#DNzac_M8ep;>SZfixxR8h7IivGusegXq{V_=*gZ|6335=pB?y6({w`P7 zyG)xl!q|9E{(8M6y$EDO#M2l3goe^P4;~;FS`0U%c)zGVzxO>A)J9BarXQ61i`gmX z{x86Nw$ydk-VNVoHsnDD(k-47zJ@QmaiiGV1!YgZ@hr#i-RN?=a^)#rcb-EL1@ z$WjsL=g*CHWxaHl<<3sA>e<@&*l@H9Y=ht5t3pA7S#Ta7jMYZ+yW~S(zI|g7*%#7X zeBdD!l-|yIq}fJ_ML*1^Z~;Fea19)|p9$_|!}WmyAOpn3#n~#q&>P*Y?aaYV{Cg^7 zKbtnO4pBgn#ZVDrFm$YTd99)3Y_UrVmW;jiB~z7^~Q|(^6AsO z?1dX}yCBR!Yax#*neV98ukpi&U$EO~XN(LDg-Mi%w$S>sG63NiqcZd!I4yo~%R{IH|VwcUjnf{OV|(ky&MC*{IU4{w3km@#C|o0PCAp zTy7%&$@?=t><0-Nl>rbgm;rM@!o=gPbyGc~)+nYxEz__>TLN&JilQPc(2BPoL~akP zLM?#tg+ZNEmso-zWCa}f!tc_=3*CPG$H+kuAgPhQZjI-q6NYSwDOh_m-Q@ku>0*uX z2QfhapTBzk{I}B{6+-*8$=a`#dySIIu56lWIe$&(veUp>mCH8viODG{EUs9n(0*X0 z!{Bj(fJ?w!1OZp#rF@i1d-m+1uV%vjt2`QNxPRANF;OiHr>|umI3!#VhtV%MINh3y zb9Cs}kR33<2+0DyuOJs*&#oX7n9ZIs=1j3|cg}MjAhlI|L;agCOW!Vwj#Zn}`JDKY zrE5#Qb{yO@HE)a1^{dyhMt}7)h5SIb&r0D(@la=-nxY^j^}`cy@18vx6&e3JMN@2+ zze0CF^#il$h%vv=&G7#Hj#{nt*_?n1`vXm7*1a&&YQ6d48K^n+&fZ6zBm z9y@{TVM&f7;g!WZR#&$`i2cmMh?Ip&KucMS2ox ztOxct0yIu&9J6ejl%Jr%V!}qpF!~`mv=Xy;I|%M0eP!=xGGK+k!nguPV7cXB+tNUT za6nS(c2zvavvP70g56>89^P4GFY|}gm1T;{G=M=G`ltK*pQFy<7L^4&?~*FSda~T+ zhOiaPbGzVOJARi~=&$dTE_szMMQ{epcBAzSvD8s;q<;U*T8+k)L_)Y~LKgT68X<=n zGqRFyY0AfEzi(4sv(kmK|6c>V(NbYt0N}% z&m`y{L_Yv%y{=2a(bz55JzRaaqhN7gO-$`WY4ePNSrZ zVmN95i~dX%+IuUtN54K=_C?u|L#=p3#3jM$0j8aO69S=qclfZ#S@Y*#fsuurhYJyE z5d;T&2YXAtCM>g>?y4gj0Bg~!;L9j|?6y(X$Iq|$apXcFZ*Kpt08mZ`03MieoXmf% zo-kD@oB4)3EHhFQ@pjL9VptOi9xR(Sy=eUonT@}}-Q>xq$5LzqNt1XZrMmA2yA#=wtLxg9Z&Uvn?(@mGCQSA>stYoXo7G1Z{Z!d=zJ~XW8@T&#^ek z47KXnP}+OS#>#KIu9&n8J*sfPbkpHu$2>_kcvjK)7O|#jhdl72{f_h+izCW$3Puf| z3nZm19!xosT%hrJ;ttR`pJovzdM`C1e`dpfwlQ ztX#S@111thZ|@10H*JCju#11; zEAS}a-U?S~vwO?Dy|0IVf-$DnB&OsLW>_;6kR#`vc8BWuICv*zq90+C?ZI$5Jd!Bj z{Llj}yeu*^&>A=HJE^1~cOy_!!7=k?(vjY4#78%GmQ$xbfC29uK2L9H^?`4` zO#Yg^0Zp5b8`HF$le3y-#YW#g{`YBp(a%m{Dsl2(1{325&%}zs_xBT@@-JSzx_?$@ z(Sb=w4oDrzarPx9wjgQos?8sxt=;%@Pd=h*QfP!Evly|;$ga3nzt}J58PY5fGQm_s zqn_RV~%o=^KqLZ8Vx7$VjL=OCSI2h`)Xof+C1h zffI``;;<=q;k#GdGYLLSSNA*GS;ld~CLDdSe0e9)wLMg?US-Vu0Jxz#52X0H``-BJ z0GL;L%EzsKF)28h+wlXAeLVI{-^Pl%iLU*44Wq<2q)PB{b2|uWC_M6o1!C0$Dd;*6 zJ#0$mF@e9u(Sd4&0XO5Qu5#x%F$M4)X?`dyGFKQK74^2UG30B_60Q(xH2C+3g4O$| zxJ(0^4{-B-w?DWl{H;&DRjSQ8W_FyDm5ijZ3A|MpMK*bI36USPKo9ks*2_uTnCflT zPo3UVkTWD~wl+32E$?NK;$(!=)zhk{qq)6GX)_h@38VHP20@G{Dk#_@Goz3qkUJ<_ zMpnFfk??~H067uNittHd@ywoT$TZ<74J|DKJAZqT??L!y$^KxwfZb;Ktq`a+EQo{y zE-8s*lf*21<3`TQk-nWL@_b|`P>g3ICnczNc{{#jFEvT7hK{-UWdTm(@Svwfzwbd%G zZVl-wIuOnvPL%8e2N=^P4IC%bnAE_p{&}pB2#wu%3>1(})yhp_j05K-4t9oe;&T+c ziteQoA%_od;y?xwCFmND2Md=tMg1vV--~ni8>jn?_*vKb9dJ|%f~{vy9+$_N z8q6_CNVry&CS=ohhp{xoLcU!EE1jOQ^+J713*VX!9v!p0}3@omCJ#Ud&pb})CVeN+1X8; z^0}hC9G06&uGBK?LP8K@UMub-(!@9O3(K&yw3OHkti)*?8u}ZbYpy6iFK@_2{}$Y! zW5NyDf;hjvsl9mq>eZ`Qs$^wlF>J2#h*;P+>&cV5_T9M5r(lRmGdvh^ubp2)O{L+*i z$iSMqwm&a==+*`Y7jEh;IVEj$e$zX3lBmfhiz|INh7xX7q+!Mt%}2@@A(b-IamsL@P_sA$*>&LE!%6&M512ws!`m!)x2q& zGVD+6`8H9z_x0jpL>!_UH--%xhE@ggmM64YCyt!x^kWH8?HS<3whs38crY{g z`3?&b3|k$l~@x*F{LJAODuaa`XP^FA!J|C7HKPyHSgT{mB+|4 z!1v}TZOix~T)5Eo{rkG+0 h<87A^bg`VHIbQgy2nK@}FTQl9gda%R?qMJ+3YH~x z*Jy&>sUS>u{_&Ob!)g!HhorP{=!g*@*n(#sBf zj4m@nkX=V+YphaLSt%8Nf6D-aTX*i9IljdhJW^aVJN3g!Q*Z}CJ{X+?y%V}pOm=1v@uq2d1JlZK+1YV2x+LZC{#979Q9OS5|& z8Swg0*e_9-{F}0Nb@^D#IXS{(EsL?}>>{=SpWMC!Pnc6y5yQ*9hlu(JDUta?p^eqn7{yVm*=9L(wbDYIwKwy+3in{mmpO;WVb-8bz9X$#uf{$;t+`+{}= zgc`40;>Pp~in?x-IPb>4y!&Un#EYt2x5maYIy{Da7V9)gj>RZW4u%lkH@RPrKfA>s zStikrty2a`z$Z#V70w5D^XJY<`z}rIOy4{1NH74@b?77m0&-D>jvafX;RQiVn=`^fN7u#Fg{F zFp4>zZ0+kWKOy~QW?gpM^B;FQkFXFm4q{~p^_=cUXs?KEU`^}K5YyR@!Oue`lUuMY zNy>zE=E1yKyyuY*4b)xRO-AKO)&f(<{^Fu~bM4+xC@+`F?A+VbmB_g3t(nCV5*La@al4jVpR9p%~=bB-}=TyXo~_RiVOyYp*YAM0OD zukAyDxwF}fLm3$o$0ZZ`?Ky!qzrLX%DzXH<0H&&1CcT)nFn_*E0$;3m$Q^DJe4pjH z`G{i?cV;ww6$?u&$g;g}l>LaD=lS!^wC?4)!{*!9V#8auW$v>duh09K#=J;UOThm=z?t!B)-y{-3dM;^5qg)~5LTgU`+3_m5Gxvshpw$B2+6)Y()f?~T7rZpgNa^~d658oGljKn=Z zJiPPg(iUc%#@tl#YJI-nFj{`M$)u-1j5#%(m)DDG71Cxp;th+p{g^;Z(es>j%wujY zPBJL0sZoq~$`SI{{z4WldUPN^f0xD!+2yD6@)T9mc_d%}D_6#=8^N)Z48)#Jb%^-P z)dceF_(|&poh2*#(n1~?dnR#7xGBQ#LXpF3e3xVVyB&SW8U~?ax?jXLGMKN2+P}Wv zd}sA2*pYJYF`^j4Ka5b$zL{ke6-9Omdnz||*43zsDyq}ISXbLqB|bVyCi}}q6XwO3 zcQ{E$hldwB+BQAy^e&^bOC%|H`9;g@-U)&+wO6>|C{<|J4HYnCP#< zQK|Ux+6Q|kSL|HJb4GE=lcIxP5bi#D^rDJ-fb^!^+fY<$$<3y6@b=cZ)mr2|o75eP zgrMiZD4R=eyitY#dOpQp<$$S~OsCJ5_t=K+Zru|Gy;w#4ws2*|hfrMugSp;kI+^KE zEvzn97$%d(8t6C3yI)-Ea~avXH^y*%))CS5#&wxmfy>O~->RNwsoy#Db%l8Tcw?0; z#xHaUifO{xW>!xXz2lMB-^~(z*oj_bB!9{po(V_Dry1xz z9ZD~B2^QmB_z#%1vswSljk&;{A&QNhuwk$;8jsV{e%6iV89Xr_uE&CjGhhsMEVW^D zK-uC!I;Rp6jB-MB8tgacoKIyt=%Q~gO9s=R=XdK-I!R=2>x4P}^dSEzKw!9cDQ@oxI zsRWf-_~h@ZoLDTr^B=peW}#uHdNj_M3)(tQ&j@!+#-vyck{uL5bbjB|WF@+RGAnRC z=`TUQ{EFCt8>+Gryd$Ov+2<#Jxde6r*s>~zZuTTCPnKUp6L>Yx*{Xv1z>&@ovS=#`BrWuONXBCC(!^ir zJR@u$c^cXtZhi1E6ejw5+Y z>6paLBTr(THP1M+hVyq}=WaUULAqO;?9SwF2Dzs8S(8@t7XpaCX8%aLtT$r;~r^`DA5xid#$#on7^dF546f)H_(WX`tHna_%cRBO%i9X_G2JFGT zF-#*FG2*b+lx@R(jPtE4og-$Fx@9{>{v7tg?F+~_WCr3ZUZZYTUq=3WY`u8E*5)>Y z>8Dk@8))c+%f47;NEv_koMguFw6M^p7IdN=(y`chMzgt!IIOw(wT`g&qj zz;xl=?_T?=R4c#1%&a~#LsId#&WI5uk`|W+MQo!SJA7DkdaZiU*oW$R1_t6mZ=UiP zUlfSSx>_|KGM|bOO2Wh;UT2@n7`k`vW*dKy%<0IHs1)6YeyT5RdCYD4=jKufxGk2N zxAoff>)l?gGZFU^#%v|ltZWn~vQuGd$^i+vVvlpthYEO-|;^YzbJbH!xWdVwuu zo(?xn%5n^W3UQsYL}tS=%a}?l7poKYvnS_VHu|D zCCUkpu((uJcc9v&;-(EJRe~kl{#8kIwYdEwyKUy7^5K0ekjFM|Non!>kscs`5u%w| z7qZkA$rn$$9@qERRR8n9)LS1%6$kS*Y7p<>Br?Y

%$KK99LqKYcxeyZE0K z{g~;#NB&o>$igf%xc33mu&FpsPM(&&ZPy~f-1KKJzSgm5O)aA~CSG3QhA^gTbIC?K zUx%=&*MgFnPM>D!dqryY>R&gEPan2ZVd{~VO_@Txkk68997>X9-)l&EgJaC(ek947 zT`il2MHg+z8S2nEdi$pfeUdA792<8x>+`=Vp26C2qjsmHd~Vk1_9vT}y_bDV3YZ!+ zAaT)gg=4ENer~b6`DqEq-q_tIsUVIXjkYi)JpKdZ7ZnB6YT_%R?Q-9ZmwYS9E*nvo zk~z(q#Y|Z?LXh!%{93F}r1c-h7YR(44DGwQKsADFc;X`N9!FcB>MM= zG}lSqe2%Tz)pBi43QLluT36+)lRQ)}@!stv(-NxC4|$FK@pHkL>T`eV0FDn{V8QfJ z;MotRR#8y_)Wk3Z5GBr-v`pG>6DQ0KZ=ZL2gh!4ej?&kziEJcy_S3bQ0$0Kz&$3zy zL3M;h<={HLRV!0_wr^LPspQqLVx%Y>x%UU^zUvy=;FHH%ZS&<1+;18l7^yfb$+%R* z^a7j|QVzZZ9592R3-W|8x9=mIvS9GMeQg&%OXieVZJ+7+WLvw_Tk}MP6Dr)2%`$xz zJ19a@7;Wb`929Z*aE@m7_rsNii>1Eq%hnn4s%3|?%J{x7Tu~yXIY)nIaAuHH;ths|Z1?$YCL{jYG7qT1WGAnKoL_w{@9z^RDX%uXlqYO2xgp*ClGqiVj?m*eh?TlD?9EV*4;9lV!&v9~K^X zDyg@0()!68hdi{M_)%+Yg3R5tA-gVa_}KZYe#z~W<^zLLEzN&!IU~DJv-^SLMK3=I znNEsmaOBLx-E1Utn{pmK4^dW$pZoJ_tC(g%dDz}zm;Nzwa4KyZHeGU(M4g#TqL!YL zv?T@e+j&ckM`w2TRj^Lb2sZB>zFoC&<073#7ej^33jglUy>=TRI(Oicp~0_fi_esK zS~xmpSWkZ_cQCWB$Q`vYfXE_k97)kh0U#y0OOU5cA8(`*cPvzVjCNSGMd^~aO-f2` z2eMN857O!@At!sVd$Cx-Seai=YKC2QZE<(&XKn62acE+=$)q@Ttxyk@NU9ZXT-iV1 zb&1#&Tkjah{=NG4YEkSFm(r%7sOazc&nhOB1browBHIJ8rX6pF%2nWxug!V0!o&*3 zTX-=BE{ghJH=#hM&wSV25yL%#B^&qpEnFV)`Bt%^YCw-TeX zrQXT=pWWRMwXIrrb4lSbbC+hRAM*?!i=zEvMk^N0Z#FXkPuPy4j)UH*-@xLRK2OU&%!QN^J#wh3-`i``ORu8~kp zJg>3r-ol#&`%CnO;|8tLb|_0}TaQ0Sr|4H`#<(-1hh34~AK~}CIPt)Ypn_^~%cX_Z zpP%;2J)dE@^mbUx_sDgspKrZO$`g|(i2XSb)wbuAt3s3PpW4IoYV15ejA1-TT<@n7y=HM$A0( z12Qv`j>SYKAyYx?L-#&i!8wP+@;`Kp^3=Hn`A3{A@mb9Nq396W_yJ<8 zMbsWn!fK`{`|Q}ca}M?)8na$xxGKE}v6dcTGbQ%TtI2QG9J=i4?sy1@uO6Xl0`2DM_3!nSs z;o*A|0F04Ln3%1h;P5!#Xp%s3rC;gh=GNTJQ={LFXV0EhRjmX-%8nfOpD*Ay;O||X zIqmOzrgr+`#{lwv73*TsanCu8^A%dN>8X0Jkf2h(mU}12qosAZ z5)u>=(t+^++g6Vt>C$n0Gg!-9jWf z*8T6l|NJ;Z$9`^0SW~B=>{Rws{N~M0Ic_EUOx8a1%02CrF5kOPAI3C%MKQTQq@|1} zK&p%G?G?ulKvdb|-pb0gbj09s`r(80%3gmzO61q)f4S{u=a}T7_PrA+23aF18*mt8 z6lhKMN@<*u79(bWFZ=q(h=;u3IG^wHgtlOoI~fbw>o)wh>vGb}ZC~uQD_UWws7<&4qDW$=QP7P9dk=$I=Puyp zgUl(&7+atp4Y&68IKJcTWTYW;@?SNz z7u<$yuPHZzSj*gd@F1aY(6y)=#j3~o3>yT8P;m|H#PwLN6Y=kWvK|x|aF}Fc|Q$a_`0cqN-jGWacSe{y<1S zksi}dw!QY?2Mg98s}Wg;K8jS7#_J%RcoyH%nD|>?2l2ol7r`#(Z(Y2WA4*%>K~M@&cfy4A2)~3mMfdJGaM`M;5QL~e z!HLZ#cHvO*SxtTYEi&>y{oPGD&Pwb79;%1Ie>Z=AxBq4RS8H(xVP*qY49Fbj7#I{Z z(LSb-B~t@qO;i!=0_#iV{>1eRBSz;51~Dj2k;x1k=hJ0pzqekEzHMd>|9RM9D*wyn zwS`jHv$hh`KK=JxnfDkTi|TCH#njqVPDm!h{o~^D6U~!g=Gyf5%X)1_RnP#$Vnog9 z!!`$8X89d8`x9d_PMx~Xq#z$3wLC-4-v?CD0s0$$$6q#z-+b)tZ_)ZCuFt+HcdD7| z@vz{;lUaB4*8Nj?qkW$DQli-|r#y$nCTgC?mw9-`Ykp`?8M3=+H*y1sqFYi%m80~p zn%=sz&bxDAOsVqo)?bU}7ws*&ykK#ZZgk8zd;Q3=EbSRjStAKj}ngVLnv)@~q3bBm-1_x$6CW5I>#4|02PP z3q68t;6&Q}-vY2X+X*&1Gk(}Zm7mjIUn^CCljDCcB4eg)5`x=PoA(DVJVUT#E+&- z4|pFFrITtqXXZ?^xpSa@V9nST2r&Et4v@DbS9loN3tu4YSGZ76{mhY#_oS62ShOYPYe2ZVS;Fbb_k*hBDs(9jT z58;hRjaG%k7v1gk2V+1@BMWEjzG^(yJVSPejPEcrhj2_xTt}uerM1lddm?@V8q%<$!Xd=|Wy`IN8jwpyr%ELH`{5kXH zeSf<4V?viq1Z0@!Wh=}1ZB5-whP(}zfuu43ETkfDJga#&???SvMk@V;3pF$O^9A@W z-O{<=^tkF(h9JqNtlo4G-n+DTYy@M{9=n<|(o4Rtj&mT%9#nD`COJ+{yWhtg5XX3* z=7x!od~mk>kJ!TncNYLKEMBa&H_>tC%t6T&6z()#W@=#f#dEEZ{u2|PC;UIxz7XTW z%3HUN+(=|VX&*^pV#Z$2Ud-hnBnG5ZE3;DB`)F6TsF(y}T>;ePW!dEzqT5=+1r~4& z)PO4-sH9XGFsWH$``a0bn~P4Ef}F9G|MQ^-mi*Z^U;c70tfN`(eRiOKAk)*a++0*x z2rc_iy4UQ3Gqn+moy9?bz60Vdy+hp7&r|H|{zVzeS&)x!C$^rY*RF+e6AUOx21kH+Ns~C^ z6(76)$?0bH{_|j6{`+7b(z5m@Zv}iZ1vNk){RfQ`7tn=>p9r_rhMPlZ7u?I4%f{J~ zJ?Gy8c5~-`qhllV%e}cszvE(KSNr+78>uzwGumO~$oNyKR9Kl;U%!|%S(tX5txTmP z5=q(p-}kn0u6f$FeG61|b#rk4M9Y2i=2n$E%DQe{n0;(y+t``Y4{;`)jm!#NK**CZ^00jxTWkLyE)Y&)lAlK;p!S`cHv0^bwU7ELe3)|%<-M}$`Vd-Q1r;dORAaJWr^`vojzZ}- zLn{fz>ZO+QVHBeHw7KanY#ft0JD*NbO!rae2qA0q=(ANaNP`gezNWvh7MlUssRav6 zhYo#-W5fJsFP*m5NoM`79gYS}4ozXh^x)XZrCj?8h@XIrR7xB17FSY1~?= zPyS&_69YX+!;;F@v!2dB4Hi1S5pXoLu8tkw3_^JVE{vuwjDxqt#0dV9gZGD!uRn~A zW?EV!sTV&|!c6kyNx|_Hgc;nFu=As>?Q`7*GeK*~Yu3@}nx=ZbtC?$M@8neRx)o>W zXs=+8_3Qu4m!e&o8YW=)-)j+7hgP5Yn~B7u<6~6RW|h#JRaJAfduu$7+LB;7X%ghr zmpVDTe9v<^fHu@l6P}DPKEjP>szW2wA&$>E8H`=^#XXf%bdm)XKO%2oY&*?OueCHa z1z(@!1?z*}pL@`5;@&ubgbLXy4gJ+z5esnhSft zfFO%|Q*_8|q+inJGlJWe*Wc= zM55->bo*ssQi}7ZAv-NEPb_nXG(s(^@mNbkBO>p%tW+>n3z}dddLu<;Y}cEnrjG1H z9&IXyl)g6TQ8siIKega$T>b7&TZ`)Gsat~m2O{TG_9^Yew9vh*um9Y(b{}Hd?L%gz zIMI5py@{+Zuw$>qGyC%YkF7U>>v`SU|G(Jg5E7wM+6fV5Oo|4PP$@zsNs$IahSFps zL(+tV=0X{hqLSJYr8F5UGNl2bfsFNkf8w0ydH%28_jS%b4sGAhaNp}*>sr@!t=!R! zBn$|^T!u;+%y~~y?ta)-2M>a2vxVN@etMOP7Atew?B#v``ty3e5BhWRMcJ!{or{VZ z5gz@YGgS-z0+MTd+TLY}E$Fr96=&a{kid-a+R6x^>aJywi`FR zm^Ly~<`6%hxdO(S@H8RHS_E!CQx1`nh2TEw{K9QkwATR z)`Dz-h|=@5|5VgD-=}C>gOlAxr^S%sc>EJ_&f(?5*Q6u^pT##pZgBTMFaCk-pBL{tKFOTceJHpZ+v4caCC0|wC%mS= zI{yp6k!O9nBYc@i=uxTjDb>P8aIpkR=ySp>3Nk@mXQTFg=xGiea&~kS_9Ws2_wX)q zp#_3m6%j^ z85*YNmS&ONF^?vi2^h_lWx&wr(xDcQ5znRGrzs z(;&|#{1M5_`QpwyN2sb|C64p{@ASAGZYNHq?a$kPJ?}UOI-=#Sdq_=%$c>~pwF~*$ z*I|VM^?)?>*u3~MC~&YuYkdCBPsO~2@diD5_fEca$$Ca%L?)*ePM=ZU*S9ymB94&7 z#GOCCKzKQ}Bd(^&I3;Rd>?N#Jzg-IzNVoi0bQ zFveaOo4K{2!fKJ(8-z)cDv@+EbZj`2E40-M+q> zb`B$czYS6L$k`p0xF|acI)gC5eNn%_MFxHXthnVmA`Ie#535Oba@Hn8bu|1eOi-8{8Pf+&CRX2SA2>w?BrOALJge$-7FB*&G231?{I^Y@CqEu!TV<_9x2^2CYxwXZOF+5aao)NNh)AJo%Fe@_SF zNaV8<@=Mv~N(BXleY)}>E0l^mXHDQ>%*&grqM|ZBv|#J8371Ao9H5VkN0%^*5e;ml zn*GAa_ggXh;`l<$y%fr|p^NhP@gG9Mb7sAK^=cEF2DT*;2@*3#zs<6_e+zS9N05vkA+cW-T6 zkyz|7XZ|N`kGl8;aqSw_Wwi~QYiiHMX8+#PlvgM#4$u>CX?bs<$n;V0UQ|J2R8&Ie zD)5FV2th0a5`4SQswn30QBOn2chiX2ySKZiCUgN{l7`Q1h=-aUE?qfVqSt<1kY$QF zww$^0ha4^8@~!t(l82(5qC(2vDr^$(j>48-Y`JAIc_d@7*{BkF^iWB%iSU|jbtAHK zXs_YX@`lt7oY1eMaldAU{E(mQLCEQX_KD(5@G8?8b42{f!|7MeMi15>A&za>pCtWf zzuzh5pXy-NI69o@u}XD(IB#fEqlU>;g0v z%Z_rcU=mjV@;O(5agG}{Z;su(5$-m(_q+77UfU6W&W!qV7Pk!SKpO&!VA9$FRM*z( z_j}E{C$;{a3Ak?C_MClZYpXydZ=`Q$&FGCPgvu*iGMO^Vefz{HB?A6xYDS#= zmytHG@aimvUse(jla4Bf2jDZi@Bb+VKm8SSq9*_86x*nUix#=yCIwIVvuKmzhbSm|KP!=7})dEmHj*Y^f%FV;uQREvb3;~Zxb#0 zUH|>3u#Zxi74>s%*9men&*Fh~Z zA>@=~LSSchGvs|PV2J?AGiHIaJ&K}_9~Vs(5$h)U3x$04YaJo5|M$UaNRmY`(CAS# zvjqNIw6$2T;Qe+xIQx9wn$T?4UYn;Z6FucW}zNjM#N}@8;)6>pu|2U>F|;K9HKv zLhIG9-{brDDL8#DaCaD2Oc*(GHK9QmT+PA4gKRNWk`P7_qqGz3KMV~I0-f{w8l5{< zl=hM(M-3I>@KUkIZ{A4K2SUIFSX*}>yZ^oMmRVztFwrtFO+auuR`v97Cpadgkx)5V zg;CwfGdQwV0akMqIXWtLb6=H>?($E*gzX3U(lv|m!ZktQSd1%dc<%g)LI01$L8!QW zj(6Pqe=k6ZStd%#%5nO+Ah~}J=;^uF-P9BVi`WTcx^zfruUI$19T@6;9QNf<=XT9Apvfn2J9(HFb(`qf+q? z$8`WcUDXDUQdSlldy%)BZRGpEVLl%@jG4PB2~ou?HoOhKXSP<6AF;f`Camf#yxIP|2^wmyc$?lkA)z;M z^G59cU{C&jvr+9GjjI1(f!1fN?J7B{GhtB|*Jfm7+|OCckSd-X5w2vaU_rx~C(L<9 zwm@A(cL89kq?M_3q%6XG9!faIkaVBloEG=_Mrn$#y6p3&B0BF}>h^^d8pF<`keGf23pW!UpW0~5yQfCaFWCajYZ zlPl0moH~u-HddU67+Xee38^+kG5*N>_+-6ycTa{|1FYa8QIyhrf8FJ zewFI_Q)%Rdtb!E5vE#` zWP?EhpaJw=mhLa=t-h$~^kKhUS5~urhwOteOp_pE^d(wYSQNLc(BQW%|LkZnYmw3{Q-yxle{l%Xas53I+ue`7`G71!yUI$v%QuzG2mWs;9RjZFMG*WZ$U2o%LhQU(nov9xNLe$!}9aXU>rW|dVTdgP{k z(yeP={qI~Wz3ZR>17a9!%Ucxg_bV!G!8C^NDpGOVu|wD#?K)r!Mjfhd{&yQlnxSI| zh$qtAva*z*k#xY>!t{;5pUaF=IshUL zwjO?G61{%LdBoE7PZ}_YKl=lk&uWh=y27U)vjJ5slqRRbj?sFm*VR^G35aq)heaGDZgjjHhV zX{15arP$I5jxq%WW1+DsD<5na(&>T3H~aG6;3+-YUH|+;@W&F1l!hT2P~L>IdSd4@ z((z`RYL(6sj z!|BYLNUT}CTBq1)VE_KnXU~$QiyU4u=l45UZGOcU=yk#}0@s2_4eFQ$d|KGKYe_8X zB;=m>OK|#qqG=BwnnnWnLsgZgni@R8V-2EC500CTA*jZjaH1SRWCHR?6TaiD_3>Ja zQO%k)dgJ|jq6MP~&0iFldxpzg`@1gkX%ooJ*c11wMC3qxP_LW=aJF|ZU3g$upNelZ z>4oWsE3W}agc*Jb@cy})6pP3B{&u$sKE!EiRlv>b=g<4M?InZT&f_N)2~e0KdPsOC zV1nG*s)hm61qAeacA>dr2XX~Nc0+++IM3he?Y?(jBTC`sx5l!2M*IQa7ws&}>gg9mdz5ePv2|>9i>Y`u;NUt7$a0$J}?bqo|;FNx- zA&Nh2>u3w$HZavzbs0&rx(-bgnAKTXT1OQ`b$h^Y{D)7!938#WRk@61xoek>mlt)z zlod&x#B_19U|{Jv?BXkmUT#2que$z`@{*X&Lu~PyLHv{v_I|>oNfK926S!x6q*UPH zUrk9dmdJgx31~uawn*K@gZZlRdu@rW0}(!I5HHj+&n`kybuZnCx&-|JH6aK3rr6|J zBR(}XF|pQjm|C_GEeJ*+Y{DKY!L;I=S;u5mU)Z6o-qxZn4+#E|J3ge1UkMeS=#QF^ z<@0O!pzNr}RHq>!%?gq-N9HOB1uo#mB{ZO-?7$}0!E3xOOcEF+o}8YJQZ0}?^nad| zj_I;xyb^buy!eMi%pcko{98G}Jy3`5yk=1WTU?$zZ{O z7|?rgUNl*W@5gXZDJkVqT3~GlbXD89ebKb7T&YoSEDug1Jq;}gN5zY$TX|ogwHMyM zm*0eH`hP$!E>AAaRt%Y^FaSL~(Cgkor|!&UQB%GEv&}X>?B~~e=2bek46&=f^Qlv} zEUww5;!6FhWu1AN1NQ4i8(nX_JxsvSwC;49ATEq$CUQ8hTZd?0LT^jhqf^n*6B)S5 z!D!t>HtPAl+*c^XfA_Y){@}R0{g^iGH~wYW_JLqUONum&Qv>leP!So>_o*J-k@Zf~ zF_S4Y*RB;nA;8c7dqY<+MO;HBM_~pGXWKin*u}wt<40bL-Hdo_Te3P@!4aZQUm<0+ z4z}99(=C3^>E55VYhc0$ejU=TOCs~-=3hgsfA0d`B@`+er)0wP{YWZw<)aIj?~V}> zuYFhUv`)UNHDZ43VTkic!EuRn-h=-Hl$YR}pbI}R=*@d|kWtZ!Asr_Q$Q=^av);nF zrLoZ^^YbI7k02pk(7J}?@=TYG^*<{M$b7_fSsGx3JElS_1he$hMVXoN4ff2yxf$mq z?2+xHxn6;5w65GjTm*mKkxq-wrc`LFKLXG(KN$P&&X^@ew^I8FW=A zD`cu2D!sp_cjEg?OB)kTN02P>X>M&D^`n!|9ri^S64hhJhJe)u1(jA+U5JjJz3>pO zG52Ap{JgX>ZhcSoDP(=)!`ji}ez~rl!VDjASNEb(B4jKxJ`|(GDd6PPus8GWn@!N1 z>tw^m$*NC2dSSDu+ZDijg#W?^JK6YnM*M07lKk*nfCZ?`FV?>ee250t)3f~6KbLDrz`*VW9TE> zg6x>nr}f<%>I+;h@3A-`U@PT1(xi|04CgDay=1w&E z78tE)m!dQA?a>cE>K};Dl-KKblDZAzA0&YUQ}{o^bWbKR05v-~-8;8puD|Z>8s1Jz z`#;~#h}LdOyAJG)T4$0SoUM89_Lz<=oFZ1=C~$B(x@rUW3qByKVTBZCix-Ey-}`>? zI_n)#QD=v0M!Hb5{(6`wS2=Hic5vq~|KVje(I?cA@0&6YS^u=_w2da?LCXh`YH;)5JR~V(5ki z(#t~=rdO*bHjk9*)$nsI&vmHw8Duz0ZH>LUHo@aZBI%BDVppBKVf47s0p23Fq;wlGmuQItIzeDk_ouw!)k-L=vByY|Vr zm_>hFP;b(@dAXA1myn3gZ{^&yUF~<8wC*%H-oJjh{Z^flj0TK;{9L7m4H*&`H=H%X zR|KD0P5%^`0w3;6CO_nkVR`-aqQe6)D?ybZcnlR8TPqFRqWxG6S|Jx1mniqp&sfmtB>h?rezBDH|FkM4Ee4oxz)i@iu>FT^b*Yh?_q0UvkL|aO# zsuo_g>cL3ksmqV`8Par8Y>37=zsPc5xd6+WaD-&+c0{!YO5fH!^;y+-)cq~}OFh%a z^^84{b^OA_^f=R=CFlFr3?I@mVrKqC8_6B!-3BBF>3C(Hm~d>t;HHz4)(KjumKOeb zxI(R3zh=c>pJ=QFb9~5r8MAE9+qBvUkVz1u_n(cS9OR1e1geckU-dPe9k)m4gVpvY zeKU$xqLU5JWGP;@v07BRKsvo|eUn+ton`jvvYp>=?4}ZbU|&SfgEkMWT^FA&DDPY6 zs8I7dXH&3@WVeLPXg=YS%+=GiuVuDu@H^$#3n$3-6N$OUT|H2KKygvs*`}ff````t z-&Dy={3*8sM=xl7@;d+Qk6{bbBXgJ3KX|kz*mJ6y#TWT532JO-_5S(^ zzYL33R$tMZ8j-*G+}W<8370y`KWEn6nfY+4|5t*$uHVpEc^aHi&{XY!U_4hB-ws|8gU{~8K9e!-{AXeERhA+P!Q zb)#nF^IyFN1I_Wri>m#7I-P~bO#_32kd&SezbH^RQPjLV=%SLPztWmxbf1HJ+c-KJ zUM`t?eD2hO={pCg+Adrqt$X|Dv1Sj~qVs5I#P{nOjBN!JE7?D-FmAZ&X9%HtIs$t9 zo;`X~{a%PP5|QE}hD!fiXQ@hIQl}=}(f*{+Fgi*m=jW(j%-_8^&b{>h{n*<1*6%(_ zm%s~^vbI)~)Nsr`MF5JJn$-N&AJw4R=)L8sNTlH;#Z;k>`&kICvD- zx;(wM5rh7gF{VVX)YPLFJP<4sT8>Tbmw!p_aJOQl@#|iq=&0v~I_P5=1;3JHfx^TQ z?~6$si^aV2JW-s<0U3<}s=5Z>3;b~T*V6@R2a7_y%vT&Q_fkx#J!YMgn8g8^ zFBd2z6BFe_%M0@@kso5wm0nm_*F4=c$~$nOx80^)Wi5Ku^TkJ;W{zj$Kb zUD>fyRv%83h?nJAr!QMDz_+i-OV{!^IsUyczpjBQ0Vd%(f*>i#S<}4qb1{+(6^UM> z4^VcY<62iSSDT!&`rz_q%S!E)?DTA1Tox7Y>oSY9pzyAK!gvZ>m9b-2D<*8aaHvi# z9O$rYPA8u^iJ{|Hlu{&GSFO<>bJY35eUSUWZ`~zcAJ07D>xv`Sv(IpWo~`*IRjVxW zxzPbk`HW(|+)JQ(Sa(@FTF1L>f{(;b0c%V@NTgxqk{9t$hFoLQ@deD?3+iTAt54hh+>&mdX9;o&+k=MAsZUBs*e~(A z`5U1nXUN>j+xhuDlN=(Q*2`t_e&K1_|4*RU({ZAGpVNebz>*UZDdg*FsmZxhBU z+-*|QzjggJjA=>2=isSujWIF*Xaq|d^LU$eaYC?LzkdE>y_SbZd0L7wq9KQ={OZnb zS`M7GX3gK8T@C^O;tDd|dvr<+`+8%qT{j+9RawEaTJs}~Ra~yWplDfKd7uxrM3et#b}EUp z=!+nncZB7C-7T4xIo@itE;yfUm&t-9QyyQw+ijV14w`~{_xg-Y%@$!Ae4u@yk_7a` zi!i!uJYKS?Fy4bTh0d+i@4+ZGj1Y7Td`$&&JsiqN(|HGl2EIe4I1Z@^_uZ*2AH-%T zI~!d(xsXje_hSS-vIMibx4Eq6)y4q<*P9FN#K0ne{>V{GatTVi^0lqS*REebu-Bf# z+hPfB>W`9T(5=W%6;wvV)gtG*#vg5karzg~%JKR2S=t^v)x{4N|4h7FPB z|2lFw)-`C)5^>SH-P&@=zn-RzGOCxD!cY|D>%F^#n~phu;ewsjC9QKy12ye&mt6f; zQ8eUL2Yyl_y+_*^M-kqBNQgk6z+D;vebBIfc}dU>`OxtJ;DmuAI9j>)7V8Rq4Hydx zKR$473v-Fu@}UK379Z%a@pB+K7#G5WrImN-{oDXh2w;)lF$BzN_UwPbYx3P^5-s8) z@E!_mADID zpI&s*GuDs^cxtt1=!CdFyVAS*Mp2ZNUC&~5yP24p>N=Z0M#u*b7B^nv1Z}X^*4CC5 z{0gjRRPUTU<_KbI>`<6^E+HZDR1khNR8|kY3(*}e?bC4!WQ1Ej+DW8m%zC`qrmgrW z6CogFke)I9_v)3rs2ucfj~@@7JRmz{9;~#^0%(ZLB4D;<)t@_tdYhYc^ z-O+OtL!M=MN0xitpld@go2Nc>nb-q_F_WSZ*iDd96N!!PD7wE2zFV7i3O-AGLPH-v zc|w6pYQvA^J)A7nm2GT4!(23(na^`BT+M%t`!#n*FeA^KY5w7(@PU9_y!Fz1Mm9vf z3xk6TR>iXcPvCsQMzCMMekR4&GBO~FBuguc3W!QPo_VKb>bqxow`=Md^JGv5Ky@0C zHf_irc$UbZ?ta5k=>tm}C}rYAX16!Dw8SMl2?60vj7VkWNrPE} zO>}?+q1CUxe*eyal_ag4JiDf)|sO^RV+aO&7wGsfd*@$x-zEq^>-^rS_*XjvPL0B(bK0k}-Jr@ZZL%HSgaS zZjBSYWu}hr$y|7=z}@+A+t{-T(|smI2bK5x^p$8oYiO}T+Ij9fHWp+ZN6t~xrX6z? zLyj)A$wqb+)u{<9*e7~!hgYaDsR&Zy>C=F~%+G;`*S&l9kf1THrkyl;+9VxQcolf? zD($~L!rZdimpAZ4pgnZI-=IP*NATxdrXx)rNA%z*5N){t02i}Z9{9FAu&0?jeV*u6XOeKw3wRTb&b!lo*)2P`O{jG~eIW8X`F?9}Qq zcg#^bpeS^wLjWBgN*{2~-n}7J`82nm9_gJu9)#ritjLomEmp3)c=qhD;lnkYpI*Zm znc*Uv(kxO}zxQYT3KM1^G9YXj-9h4H&Dy%s&86#Oy~#*4Szs%%LxY30$HZf#H9qv+ z%a@HbbcNX^VfGRKUQ-n7zbQ@Qf1IP|&11n#R8_S()Ht%Tn~$aKfwg5`3k?j?8MYf* zKqp%;tw!Q2bcGotmLxqA?&R-BYsX*2*(K!6j5+c8rM@=@J6uS#j?hVfy-22bz}epU zwwE~&sb{%J*))#&kHh%}+8_)VZUd}E`Stv{H)TGL0yk!= zC`bM7B1_VC)+=;pco3NzUTR~-2el^uCNzwHu)(VA^ktS}2>zv3wDIv*N=o}xdPV~{#R zApEoTu!LpCF@+zXW&4B0i%3&wd-f@DHTQ}3IeCfJ3)6;O{NRVRU`SB7&$HFy7 zjS;}b>=6nl#h%{8hSxs@>W|#%2<;yFAM(RWEcYF6}9GXppOADC(13O~K;O<6?Q2O%N;a!0s#%M>Q){A69 zCo`fPot(=1He`{@F_y=J<_8UN7#bv@?u9agdT0B9Hxq;BN(vfZU3pwjZ^0IAZw&Cp z%{W>5*>ik#aR0`b{1wtSo1`aIRXm?Y@}^-kww^;MSXtU>)^iH2Upz*g%kRh-iA4xO zn$Z?9ot)uwfem#c8Qk*PXw@*l0=`ZHBq+kLDPU$6b=%BkzI0Iwm7XsFARLFoey1KUz8nQoO+TGb|Au5?)eIbR8=AGPJ z;(@=ET1{m8&suE9*~sPxq5F9K`uv#9x7ye^2(W0rN~ZAm7@_9cNhBs=lk}DKf+Kkw z6o0y((Bp-HN_dX1L+3Nd1eqU0=C^U{^oU4zpbiLDy(7e5e6jHPH=1c{(1v*hg0vR+ z1##R;<_NQ|F3$)(s4Fjc;UgNMX-@lJtyrV^>E>d8S*3c@(^+14^M`+La!AtuL z1Q4m|N$+$6lp9zT4ej2|2o@TuGJo&|?AMEQ#l~K|9@94XClq%{zgz;t`%~>nKU7J$ zNGi#bJkT-8el};L3!)K|iF8989EQ^U95Q6C%lpd;A$Z6N++$g|uI+TViFLnuLNyoG z(mvx^0(njPJ$w=W&05vO_j_ZMsP&fgBiK;_i^0RAx2YE<^W4zPW=Xsl<3_h4Y*}jM zmYb6k`*`oW@KIN5Z-5L5%Hp=~h!}%X)+*>s#;CM}V)X#lkcJM&V8R|InPmGNS-eE#k zP2yd*bVq-)k79?_xz98U8Ch9xf(4>+I#P?mRM#*@M3BAKPWZr-?STWEVQ@kW_J=~k zRkQGywx_64rZY>8NXBeYt(4Cxi>rgZ}f51f0X^i0#4CW~^zi9zC|#gxRP6ykzP< z)wEp#rq*YMN@Sv#^2~_XbHL@$aggRLD96ML6^m=uHa1NRrBWew!YOmS)3sz5#4@O1 zwFD|kCR&qC1iCr?L*t9qy;BW)kB)_K$N^GRG+tTxT(edMf-LwYeD?rx@d?Z<9=~V< z{3SCpPV#SfX^{TFTcH>K>{;gKB<>FxSaOHMs~ZOpUlCq#X1neIf&UL1rj^}~n}5kL z*yjUc4%WzJqQTxdRw zMxQ`jD#{!$61_Kho{D_QXACP@pfe919J;N_&7R|qJd#Jio;0m3JVA2Jv7hD}}$8CF`aULG z0o=(s37CJGSr9B-QhMC;g?WcT+VGyEqOv&^%gf!M90^Jci1PXehZr+V;Ay{FIaoO@ zbG?TKg@@mJxdHaOk<$B`*B?$D8zYxsQnS&u-N|$;fE~l|)JaRIzn*)xK4ZfGAKze9 zBoD1VQY26-Syoz~A1nqiV>*@x{^!i(z0ejmRnSq;K*``xiT9RzSitGahOoyL1U|<5 z+@xz37gAHVnwXR^N)tfq3a8AKE3}@4N56B{Cnj45J0una8}j=+=(uh39ok&7ei6~( z@RCkxpr9l;7H%h`4ip!^wB_9r&B(~($KTe}d_fB|Huv0JFNt2c83Ds%fzG!2_1rFV#5|l z72s5hzL$V)@cOoF+O(V{Pg({hL<;cu+_Kn+u&EMy(nN#%I;=s}`4PnZoE(&4RZ2=q zq|Gd)u*vI>CM49e-9mzc&*&#ep$&w2*A&&#^Ig!F+i?bczD;Y_4KO|bd5D=xRrgcC zP)KR|C3e4PW7^)1dIBt-NfT51vvSKBJWhpowlZ{@T%B85KiS z#KQtGA1~?0^by#CYnWcfH-Vm|$C)cfsBhsN-B`YOF>Z_-dGBmw6xU}8o(WuSF4}fV z6R3CdH=d@Dr1P4Y;!U_TSJrfE61=+6Gjr^^TOOM?=E(TvSJsjR5vO*9c3l{$ITHaI zxfM@LkoBP|MwZua#)UGirKAolxgUw<5T+4Rm<25O9B>XlNOCu#1r%GzX-X<8-u3aJ zqzyA_-8IWHqVJv_5t6S7^@71C!K!NP@yva;G4^axHxmQD=_)N&;85rHCL z$i!EKf?j)Ugsncu|Ls#Po$n$`3k!aF$;y@OFaF)d&SF;Bgdd`Wtn><=(N-{e-X*&^ zaHVm@i81;LJU=V9H%FK+7p}hLK$<0C05f}JKD^)357n?ASyaZEL+VAOiVC3*q2WMT zGm!Qf^TFVV5GZM-Xh%<&GG!|xF5r?GsZVzGeu~oXpBrB9ElkFTQf~^&fglwA!#bmV zhuP@nX{POc&ve0PnkQUaQ-gGuIsvKCO-#dyRGy!mMWU8IT}i}!KBKLgK4S(z&_#|) zC5g+@hYY;hzS$p$LEEP5#r!!8MI#ycY%-AsJeXpQr0VV$Z{Oa1swC+LBE^KPCFbUN zPraZBata=ZAzhSR7fHC0jw~+D%d>WlR&u}$W9FQ>h7lUyRq;_r(e zK$7Sqkhn+{g{d=DRc#7~0A4wH9Whs8Y#wn1o#3dAhcy|lbp`Re!MfKZl@zsMD^S9Z z9UnU1y}h0ei9hkNvz{?z`lzXi=4!~)c+4_6i!&qSOcUgOR7cJ|;;`Vz8TVhB;=;MX z&XJk?RVMa3^OA_2VZ*357Z{szfhsX?!#i2lijxZpejK3%(lgy%mn?Y{GQ$WB3`Psr zEItrm8FD!)!Ss9ep@+5D0>sA0Z)vKYS92RaX3)B;uU@^%lMFM&E_yQBWYD`*uoFN6 zXC^H)YeF-mKMTbYU_w1ho$-qYV(QKg0Jb(Wo9y}B^+3#{i!0lw?b*+P z)~#cF(aE;IWNV*oe{$Mr*)pjC1DXH}vOYgrVfkob<|&&qZ_mZXzUH>rrxxQ|IUI&4 z%^WvDx4NcA6Mul=eeLnyrr37e8=HuYGD@0UjS`pyBNoN&-tvGEt64U zsv+<6=I#;O=t8QYZC+-%&R1@{oom{-FY|*B6MPy$tC?Pc!12HxX)LcuMAB8tGc?T~ zJ`b@PQj+-(J+jM}H$j;~TS7mGX+m9|brBO~kMAh6eI8U)d}k9ZmY9s=5}dPS>7tQz z&1kvUTlEW+>lOcUXVG8*U#nJ>srhu9?AOp3eX~t%7f%eJW-2~+t*WKco61Z;JE=4a zXUu9SVJ3;bUFuy#?Ww4#X_adU7X>02oe8@nZq!&}3zLf<>Zp5soxl=Xdcjt5q>J*i z{>(mZAlNM%U16EA70^=SZR9Cz;7kw!|1G84iZ;N){D@8B?l|`+nNhE<8i%9A<-H7J zV(Rwo9C;f1bxx{pVHCM5i7%z8*MJ}1F^Y>KhD%E?xGg6$<;qjfu-2*_u(b9oB)CR; z-2dg3K76YsW3pUqmk=viNzjeLuBhG@I;l&l*VW>Gfxq;`H+YEGtk?-U}}ptKk>6MLdo8 z;jb%Sp1I@d&YiQZt*z-hJpTZj%!?(@GNDeFRa!Uo76s?_nnJ0Xqcp}?8f1nit3X_1 znY;*=P?RMQkyfoL^=$HwIeYx&#^8nN=j?l!Ipk@68)Xu|eDU9jcMNaLqd5akAKUHu zYE$$#V>c_zi#9^)yjUXsfxGd<#44%3w)72(h+y1oer+=nitIwmcVmH*07wyQGB~z9 z2GcQ0Da>g97MZLmnYSt;{9IztgdM>vNL>u zjugCs8vL%Zde|TxY4P924vx0At~gkwuI>a=(|P&nFD zNdoQCD9`p|+9_>PgMNt2>Kzt0?$h10=`zGlJpN=RfAcN(KV^`l`98$-SHJh0mJ)FQ zoFQ2vpj-Ur8hf^prI_(QJMXWKoks#peXAkkLY-aM4&yqtgVEGYFpq~~D9~d9wnSl# z6tDCat?C+QP+N=ITS4Ph%JPC}ltKmH7>i}Ms zXq;qN6CH4%BrD!seKqhnR=^}IUA(6$W*K3%4X+~NhEZWzn zbdRWBasHFk`x`pW@|6?l?*G|XbP$7>WJYO5`uE)-QaieYmRwQM2OiA(v!0jh7;=d2 zeO0P>USM|4^0g~c9R*25m{y{Rn#RJid7+;VYQL$txTtNvX~HAZ{fUo1>a1Dw2C1I= zV2=RfEes{qI}pR9<0`*1?)D`QreidSi45fE;@aXcwbL%mtJ@;+A6x&{yuG_-&z-x) zJLf6R7!&i4P0A9}#PiF$uDED*{!#kl^ebC4pPftj@~Fz;-PQA$W@Ua!JpU*&^^x>O zo%2dDXY%Av9QD+{8GK1WE=#NC&Cj9*+S8N=bdGG5bqhZ!7BX*cL}8d)(Mf?#JwjZ0 z`IUVqj=PMWG!*@6D0Ng-V-Gc_ib+%MPk|Gixy$bqKd}~YQ9b;EUj9_|@IzJ;v|UOs zN)CEd@5%}uuwP;7)NK?!y8}6!wJ-Y6)Im49Edbru<;z2-I3BC7y;oFJb!nAO^2I&& zLa9G={MB;D%H=I8_t#Ex54h1%Hh*^@-g!;lw@+0pn*4cXNxf@Bghx(t!cpx6)3V&1 z2WyNwxgO)_VTh!$!T>gU(W@qXAgsR!|sr_CljMYcWG6!y<(`YAQv^=Bx2*s1;}Th<@j&*7AW2enI;<=@y`x z>+m|B>DZ^v36OER;sG{HvdfUK${2NAgR1l{r!03u2||yMUSYi?;7TL8{rmBeiI@)v z@E^^_MZt=@&pdp~Cdvd3b%n^dpse2YX$E8u@!-RUADHff@K3w4et^L^_jS7i**)Mj zffrWH`Z)BLPDJkV2MTTXrko!adAsN~z5=N9LI1FS5)5Q_m<<~==ri@07mh7NIkjJA zZ+DZ*VJvhW;a;BY<;Ok<-pYHE?3w&x_u_{B}HRWPpNS?kl2S^33ji zhnK9K)biH)0sCaP_A2@U*FOLC*I#QV4OPE=D>(v5gMq>T;5|0KP>c%G2N_AqrcQg< zG&-ky=LsGB`H>g4$XMNzK8Ix_jgOvG{Y+%UR%^!| zI`eerXiIRmM)@U}r}OBGk5BgdHGF+a1&rN++I2&__%69<^$YCk#F-LZTfqX5Q;6K+ zDxl8KpB|d`)+bJhFn$;9wZG-&(Do%MuRQq~L9bl3Fv8zu-R{Q@DdeKzhKf7h99g=& zH1Scr>cqX`7-KIvXTA|6n)|=FJS8zR@fQLrey@=b*hb#w^s{uT-`B5S@phq+aq5tM zUm1-dl9F?J;3kebY3RMU>E3NWDcv{+kDom0_;nQb%CUq);o`9s&C~!eFA#T~qK}#W z37mnqPcAO>TeR~i^sWU^jzXk+x8*^+Ny6T}`t%%@oSocwkfxAerJLO6^)g%>1b)ys zr(x~hQgXYkcKD5Dmd*%qji$VjKO;-8Nd79Qh-p~R;238zy5T8U&tC9}$=fzZ{rFz1 z)t<(Y3h;@((7?s#OfwY#d>7EtNP~DRTuOvz$mvQW)mcjOwc5`+qHkg;jk5ihC)@e^ z`OQzZoH5fkXRXJ9l-I{>EB9VMwJ_YE2ZLGn)!PWE!;Z)W>z@sATv0XfKAUZXoGprG zHX9Osx>*FB8t1+u*Y)z7g+U%sYj}1fFJYLypv5_sF>ajTDA=REG`9P;(@g5xvF~h5 zj1Af7?m$WxSR!0y7EvV9^NZBjK$>_xqUR@mdf*uD%N$S`{Hw1V1g&7*KY51I;`{RD zrVn`$3|a%&6PJ`0PXN;C0}}_EN{?iYYgD@^5C^NT|ECf^Jh~50J9)qtS~>7 z8Q~{nx|TAhU>VU0;;iGQr>eVee|4+p2ZgIv4weiGb#-m&F6%J?L}paPc6^pU(m0Uy zY$?^3^}KZ#@(BcwZs208E7wB{n@j%5PvVP0OAfukG`v{bKZp0YQ$d zZ%H&%WXNhB&Qn!5M02>@{G^k2n%#TWGfVeW&d}l|joz}?7M%`Ndk}NGIp7l!ErV;N z|AT`(vD*GhY?b3#u`O4R=?HC8nnwa`jeNc??4A?EvMRUQa7@(-T;Hy+zukVOmQ6R* z{}dV@+$;Y5cNHsilD**N(9VBzZ6=PRWX>ds1RYC#SSqwIAYo7f_7oFyw0sE^gC=gq z?m(J^$fRK;tjzNA1WRc#j+Ne`y)QX=Gw+PnsZ{}Kp8k{&MhQJMoxj{zSy6G$M`KN= z^|?F8Fha!(q*9Q`z79WnW7wROx|NR(^PI+;U$RCF?b@lQPD9y)g=_J3VBb?%ADiNI zyk5u4XRjQ+X`c00mgI?{PwT2-`t^_l@&VNqnucIch{J^0=M<`?vYl_bc%0IUPAQL` z5;0Pnjffdc`o`DYEHCH&6)|#mgKA*j>BQ82Zl7_WmAr87oUN9hZ}EwUh^hzsyEn|} zRvDuj_rt%}0B!aV-NotF**=tMz84mWiL50%*hRgY!46Q4YJGXKy2g3#`jQ7bGN1N# zUm9;R`7=Xpo{%8Q2jQ=(fB?!ZUm@=8s^-J!fKxnMCJqM0egn%F%Le zk?-#tvceQMLbCAVcgrUHD*J+^5Fr;MJaO&Lojb$fp&DhZsli3}k}?~O6SuO&wy>gE zoAhK(^-W&Uy6%0UqoX4MKgl+K0OvS4~A#eMut zW_=VDIb&E3WtJIQa+iEmls{pTc8CXz9P zS6R~06*lPmC;v8e`?@rqT{Z)93+n*U1GN1uDz`3AQ&+zMkjGY-Um4;;sJ(`pnq<7~ z7CSg0GOD&cuJ$fH8nJ(W-UvxQVT3pSC0~Mma6o*+U;~JCo~!xm z`-zg7{hpqVZ67?I1Jro;;9h$`2oUZ7O>kZ`JkQEn)uQ%nOlRHIg-s_@)!sL2#4*~if_*zT9_(^(>w|gsW#3m<+a{PAT!sF6!eQRn16Up~ zDR`6YoBOG$KZjqZ*R?6_Cyq!irD{{o>a|}!MJrbmry!#10?+7*_=NELbhX-v>4KjK z9kv?h-mWt^mci^!EuEFDJE`eCv}usomR{%8!#@xuya|ico-@|mmH3|xrdiCJrqQM% zG|XH!O2lqIB%WF@OFlBOLbG}ebVNx%VA0RPE-ewE#VYeN4*uJuqO&E(c((RFSa}ct z=znbkXYrBhAe|~3B01)izxV9Gl9=xKbT^)cw~Qb5-1g+jlcYZPJ|vnZ!dif`#Vq8q zRmT{3#5As_HzBD&>&}2NQ z>F_G_Rcvh82HXJlXk2?|b{`fkm`Ps+EG=lv`&;stB6y$Qv~$CSh@ij9YBh4Kwb zd-JDHYge1K=*k|Sv1S>(hN|ta{e@F`gW_eqH5KpDu8~)$(;nNpjhdOQF@N5?=*N3r z(qWl?HLmJ&UpDp(EU+k41OC>5mn#g}fm0aMdjY{B$^2YvqhhznPj>I3&H8Iwkv2`a z3+|?heU8wozICf)@zbw|*GZC}dRXobByGNHtGadeULmG`{i^Wy-??}0XknQ!F43SE zv{Fc;;eQMb@9HxDUN{9pWYyhI=V1XB*Q&3V1cuwn{&NrQb88q z(i^|NR8k*%Fp1gyUHGvJ{@xE9T2+GfU%H)aHqM<>EtDbSgZF)%bR)ih#FpteO}135 zzsS)jWo35P_3Xj75kgif(F>lRo7J+yE2OsMFQ=+z`DJURUOIdc(e0&hfJY(9VFLP- z#Ytvzk|>qEv&)=|dbukx+>+Z}J7%Tt9(s_Pn`;aoIhhkZ*hD(^X~U3k3B z8sSfAW#tKHp-sK;!%zpkq>uDE$l0Ijf4g#ii&ppDft-FA%eM-@SL8AOd7oH{yY+X; zX!_CX0Kl4L7G-(7Vmkd%sI}ZhScY7QG%zT9mtj^hMJ6TB710YF%}^BYAzbckR#k7pn*erj#CZeZY%ej9+EH`f}dnaEX zZnSppm?Jl^v*6KH!ZT)n_msoaKw-Bn$x`}E-z}TYbHH$)kmJy>y;)pI1bP18z~lvH zTObYqAd>Kf_w6pOy<7Vwf7A|6LcRI{kFXcdZEBh|=1Y&a8@Fzy_5Z+6ff7lJ4i5g6;nQmt`}Wgx;-BL5PWl4gN_zK@ z<8WXZuwp(q{hYmU@Q?|ymq-?+ZsjeHiYz5cW3IkRqx}x z*5QK`cAvy?UqRv4)AP(vzt?nXTqjY~388+*uKGsX_w}2n#!dF^D{astzp${HCOxCi z%ic%cA1GcP3CBi{vxH?fr#G1#hJPwZ5cRgG3XG7<9osOJ-d0^PDP8{TORs@urJYu0 z_TEr3*8Kiio2zH$fds%dwV=CBjX>dDCwfPdtIzy7dU}l2e(OK-M7HNTC>GwrQ+E@= zN+hRm`7Yr&s@P_ngUS38PRoaB`G*_PEM{foHlI6Z=jQHSbZp{lxB)mCH<+y2>qh-$ z1^AvCDj*~Nhq?{*9WfKe^3$NZxZqH;XTC}JtbK=s20ecJ_jfAJVbFvz9k!GG_CROv zw8=yJTs>&@WmKf9$ES5_k*egQfY<^1_Psb7y$uuKoFd29i_P9?Tt9wKUQyAG1A|Q3 zuJSC9%vGw_M&5Z!=9uJ4nq2&+&P-znJv0KrK<)9C>9#Ew`59<^<&JMWGo*fC?oWSPsy<6PiS1qWk~8!`?kiww=ZWs~_K^nM&p`Q37ukK4QM}0@ zNy)xLC(U(|ch(kU%oYkFNhcSF zS}>{%$MVZW)PuvO7Cm*^`C%=H0e>%i4E0FcLksijN@He?cp>w$Nha%PDt<&GN^?Sl+`0{3BZD zlp4MhGV0Q>f8xJ^CJ^C)5elCLKI+k<2d-16M&6XLbQP>jpW48)C0Q!{Q3piF`nYsYyP_j^{!OsK@60gGpAc+vQ6#*YH_<2Yx7*qD0u$fEnwnL zwW3Fv!;4JUTcE!6y}6s)^wvcrMSU{2`9M#*Dswo(r0ThJH70ntX71$2}Q|URX{hn z-S{_MdszILjDLVOJ`o95BNrZ^kDXE;Y~gw<5ye|Z$OU6prb@Yoj{hqYtJo#8gdWZ- zoO2}~>Zmkb#(dRT%Dr*g7wOu?z7remnuNtql}S(jpH;sg!;RQITw{Bnx7qpTPYxfp zQ%b{UDAxROZpm4v&|c5E!z6PL%-pvo+_Mm@0DGi){z!ePL%rSKFPCb{w_ki*T#vv0 zflRWm;iZ%mP3g}>ky9WJVK2vg`nQ)V9WBdaZ2 zFLAVniC;!Zk+WBzwr}vhR)2RQjEZwUb7D8r_e@Scd-3AbkUU9N$p}Xe9#@kVnUrm{^k+UDsUj*rKNlvq8zDuzU0W9u3OZf(X|1LXKu{B zXX2u|4;`v6$#pmsYe#ROH!x@MSamh`XXTWpr5P+a_@3}_4_Zyk*>wd7PhZAucSnMd zihGbdEFrL>8X2E@*K|%JJnP{UYn?x{8IxVa0&sku2D5TGL?>IGh_*XBX7uQ!t5>&e z-1xHSFJ<#fKH^PW;gvJ<*nlq&HB_ZDsOLZ&}~9p zs>?m;g!_umOO6~xibfBmKG+@bVqTALkfWYp8TA1W6Il3Csd>tBMu6}s{6ZmkoAJss z4%|8&0rv#!m)X0zC0)Nja}}_O3O>5wITko>JUwj?PHg(IrHPz@G;;G zlO~y}^p3Wq3KJYyXhwiWHmumZciHvrNxJgWx4s{;trq%X-GJGSi#$B8gNNaE_(aBi zPl7Or&P=Vn`UL#s<8cM|Mmie5BuSqiZxTCif@^dvG5&ZJhEInL;@|ICn^FPE26_}? z^W$Dtu@&a;p|*yTl4KMlJ88790cc0?0_xWN>J!@HUyEjZywW1IqD3Wk`C*TN($WoY z-lW*w8#-{!_9+idPHK7{_dSF$_vWiz(}jL?{+lLh-oHnUI>FBQ0@}anj+LwWGcpdQ zp-i>h^}lz|+hf%ywhj+*{48=Lkz~Hb%SFfb*GltzruKijHh)RLzfB&CT3jQIXNNCb zihc?z9jhD9q^pnDsfk4A?Gg$sJBwybz3T8V5v&Tjo6>{{8vwcm)=Rq{%?n7?tbf?y z@QMXGF1t1-+P%#4x zldNKKo=_lU<&Z!N+6>Mk0Gs4B}&!?g0z>h(>_bj|oz4mLXJb(Ej>Y4=IhU=nc(Pc;$8J|KsaSzZ4k~z($B*{=B4KhU`QZkeZnJOw%M0WLl zR`x#UeXi?$pLLx>Tebdct@~d0{TseR%-O`FM+IYwV{g20ccCC?|i3_7tpF z-52Xzc*+FhNK+sp&_DqSl1(<(i$;xe-S1h^+ogi{@*cryaV|PK`H!+5k;VirjJMux z(sZ+xF1t)dhnqgxy?9Wo(*y5hH*@#O-`9)uqU1JOT3l3BQ(Hx84QiecBmj(JfS6(W zkR-{^FCL#8g5$eNFdee3?2G{jCMnYTC_bcvf+B|;wR z^Ehdg)#Ti_?fYg@Gm{BS_J!;R>0xcxAntD&mg)J@`U5Xz zeHI(-riRSw86^Ub>d28mI-KPA8A)0bU&a=G;EMs{GAU56}IwezX7F;@0atn^r)9Ec1-&Kr+o+-#pj; zzDOscz{~2fm2SU2kK5DhMVD5DTPF$3I{yEla*#+gsOU?^RQx)Ps__tFm5-*#@d8{6 zRwrEI1tc2$;@8czv@`_!W}2Wdaoy)RmIk-uWI=>Syy)z=)%ci=>UStRycjVG4)AC7 zNR{2)&q3R#@GGcW=RDSI(-Vji4HXmOEj5=F_V+)r%&wdoAN_*Hd*70FB9o&Jgb)zt z{Ifqkf5xprt*K+)#E(g*PXo)ec6B{zoz(dKd)W5vZYwr?X0>Q+Y66s_yWviwSvfLl z)F&iW2JLgB@^EICNxXfw@96AoknCBd8MT6c5cl$6+z!jMFjMa1Pg#{ei&xz}IVmj8 zo}M5RViTnc$Z4zKRxtUxf3Fo#r&JG;Po1iP^tRzsPE+I8d3wU~Z=7Jgw&{M1?ieVk zfl=&$3$Ja8`+e$#cy^gT1n`>saso;uE$!Q0k}s&mj})ZLb}NkCsoy_o_}H=EVRRAT z_1v+9*PP+izJC=)tKr($Q|Hf*hPFZs5cYl+N_bd5Q63mQ+CoiEXc7st&dK1xp{ie` z3SM)T3XN<4epxNB{??XzklV58rDjuXX{5fgep1|Fg78cl48uY`|Gc($Rt@sX zxLLj?nk(Pf8bcr~fN#+jrcU~j7ZVV|eA5BL5Y*xH!bVcL7-wiR9P7vpOMhVCEnMuE}`KsZkm99-4Expp>($3p&ZLf59pqW5$ z@V=qym%-S|)1WGd4%x#1M8OM?8ZhZ^y0ELwk{iA2Bd183{C{!h9a-(mf& zc{e7BGNtF765i%MF#nBbtY80&Rh;{P0Mqv4^0m85w0b>E_S^d;&)J{gre)-*Y0V8@4mb zeMPk&s5YS^%QiQWHsG1oF;Kp%9E%aMC!qmK#?zeIza)kmSeS4WZe zL;>v@bi1IiuocvofHqFGyW)7`FQOXziuJLg>vKROGlXrwU$)Tz#ze*U$?3V42H$P+ zC;C9>A(?CY)YdviUq1jI0{bdupp=8NB}E!2nR-(CA;O29pbY1W0P3-3uSe`~zxgP_ z5O~hO951t8G}MNRincD-iDtWHj~c3_^$6|=b8k_W_wxMvMMx+LXP_CJL8KSdL2MDq zMoeTR&;72?5i?4J5Ojf-BEvxnjb+jYgw9qJ)M|KcJwmT6X4IMFhw9qqWX78sv$L`K( zfzc_*&(E)@?-=BJ)H;bylAx=NO$pf!j8g$yAK<)kTd|wYHW{Vol(|^Isb;WRkENGG z)ZnwhOmwuK%@7qDYGz(_rJBdA#`8U4kPfGm?xRyASj=)uaL-Am_)XM)?d6R+)$C_S zhXwSIRkV#)i@Ga`AFT|5T4NEL+VYvU?HVQ<%Q-m<%Uv9o1l32aMpao<KIpotk zXJEmuPGXL0VZp_t zEID{ERBdF{1iCcO6%rJ>H$UFHE8M+vM*wY8F~#A47Y;syl>G17-k+RtDvw`Y($Xy z%=lsfDnro<22Pg$cih;0pcE6`3ZnNe{ZghSJA&5s8C65dvtQ;VtJc4Ci4QP(0J9j347z3c^gh z=-%)+DR5#6hZY&2|F5j%hpBM^q|98~eeSsFvYA&g_Rm1VR$m_C< z=qDqJW5Fdat$A z^H(-J?O?P3{4m@x#-fCwOg?jlFd|OF=_V`SaEs{2{R&r$+_KR?1=fXRHao%0v$lY5 zBYlryl6n8a&Hkc`vsSE7waCDE@Grt24M!thcp)dD)OaF9%&2->`x9+urv?fQP5y3H z9VhEQcEw{xnrXc!IZ`ZgApi$5FY;K>7XnKF#0CYSlBLNIMCov^Zs_tA&u8=M@6asf z&YoTM>61V{-a~g83eNcP@+%jMc~4BR?ni$JjvPbr#X(d3C%Zo1XWvh?}$=yhgq809*8Dk?6RGY8V2(Xv)@5lj^C zR~lVK*S+?%JjOHRFcd%m8g-SHR$`)Zp3#Fgbu>bTixJ06SfOuUKSr3I2rW#;cK`c| z#*fb)R!DbRXLrzaDUFG^D-e&6u}9}ocqY88Z91&hYfpAoepHjtQj*vaZ-&Z$>ZSlKpNn^yW=P#A_~#0+vMtO#p!3xn5)TkSJFE; zOJ!I836F%Oe08Uq-CPw^Ba2I*<}EBOpVfLpRN+|&;4KX&W3XcPEAz&&s3Z2y180s1 z5y|M$$OFqrN(!O}1ajoc_>QF385#{gR#M9}J2#0n<*02k^f!DWIC!h>B1B_%Z-=y~ z{+&?WCzduhzt1SssgzLRL(gzHL0?mgcR&!BPMZiI2%Lnn9teL* zkfS)IO1q6SAPZvoJ9|}lPRGqGeeN#)hHj)@Ng&gnB7K<28&qln#_-cyx=8azid>(G zltXwM=)6#Cc+_f6m7nxIC#Qwyw>(1dT0&v0;=u+xIO=A@zvGa}QewY&@u!*^dB6=k z8h?FSg6d`QVl}bEVMwhY3M}+Ei*4bOBTGOeUt>L&u{#vAUaF`8%ai4Uep7>Ch3Dr1 ze{dyJr*{;WR-idMkp8uoZm*-=tdpNmgFa?x2f|2*P)3Y^LryZjB-y&^L%pbIY&%UM zNY)t5YXlUi!H`+=$<(DKRT!C7$}4%+1R2oAD@J!t5z&cae2;G{;cQIKjP_!dmxsqu zu?17u6E&TuT3K27Wj8Fqz?ZATU2|5i&ZHy~Dv(qw9AX*6stWy}lWcD##d^^za0@wM zqYAd_!E!}J@gS?Zu+3|(e8K~RlF58)juB_Ee>Q9Iw)Gr1}w}9p3C)SMVK3R zTvw{2U_oO>THiYz+nZ3?*HDVD+3VD?F#F_sd7;b0b6(FGnhPPt%$p0OgwmCp9G5-= z)j$d&Yr`f6`4*$b0l6|ZRv@keRy}cIh{C?Q`g*qA+C8m+47jzw#wZ!35vU05eudfI zuh=mDl6{SvRhoyJ-Yn9AeZ?|GUH{;7E_M^!cvy35afQ(6k^-PH`pFc~;$bj--x6gt23|;_=Wmc7yzGxm1Y>qfx z60U@D5|tte6?nCDS#VC6q(J$)cW)iwehr6KyUYju&s?M%7&;EQAS4=|J&WiZ^`d)Z zcB%-27Av0XP3bXI16TpsU`YsdQYLaD4C(4^%f_ALE#$fstld@-@a>+J*emq5P_s7) zC)#p;H(I3rJmFYzh{<7)UeBM;Z!R$xlL?#1e93saUq#_!<`;6v-G6`5?5)4QJ&_OS zpf{#@<)%G@fku*7CO=Uy8Pb1Ahl}0fuQ}I!j@9p3^Xb!`F-juTS9ntjT5jOFb7Df- z#a?$drSS;XE*d5BUf&K9abr1sz^+U2jz29e5oBV8L(^<{FdIF(MC4}>fH;19cvA52 z;X^Ewy3I3+up!4qObsw2AqA}pc+7_nFS4~YLC1MWT_~~;GxAS(bTtbF;SW@VF{I?* z`~tn1;``S~6QLvi0CCx+KmCQkz&7;_bGy~Jv&?eObe^cy*sY>g0a9vsa7Jb(;LK}G z;l>5B{rguW&l}^VTehFr2)!RpxD+9Tb{ZYH+sjx{$iDvY!E}o+Q;9wW)06|;;e*O0 zLEFsE+E!Or6Wg|dR&ES{&~?%4BSGV(HCyE{msEFZEk+Oe{PX9#VSPamM~2?CSqcqQ0&%@{@6Hxj%f{U z&AcN$_KEb2Sm#YmySDU+SE{9}zSVfLMCzqW2`$TZhIMid>tFMgb_c5lY$Ug5yOlE| z4jt-gOm)S-sllRq`Ev0wRD?hqR#wCaZ4aJNt@c=Ush0;vTW$RV=AyVWC6YZ zigo|V75@C`7%8P@%(nMKj0xH}ju(8EDZV4Xkj4$3rVA;2ZNszNy*M#?^*BXR!K;N{ zp7D0`VeykgWPbcIe1*qWgRPjnpqn@BO4@k3OHWpeRhg}zySxiSWNE8k|7#BiGlJvVwQJk9i8n_%7PFGn_OLA!TNP9I zu9q>AA@m5`)Q&5%zu%c5R$~LirDT9(nJ0CeU|Di$Vr;$UeE;Ic@k*gNo0lG|&PzG0 z{c#3UqQp{Psn1*2WIAQetJjG{?fa*weuvDd)GfWTzR+^uZ+_j-(2zfQL-_NdI8|kP#7T8^cJEpE2?kGW4B*gP z`Xp_crT@qJdW$Jx=`9Q#<)B$)9}DV`yKm z*~w1`0V-sqZnv&vK?1gx*X#KmN=t( ze7pal@~Z0UZKKZn^fXP_-?@MEZlko52o2W09stsWfHWn#nFp=x5>VS zCyt(4_q*Tyi^m&_dnEy~Zdv9&Mvup1Xn3U670<|`C2qd+N4~4C51Tz%6m+tEJ3Ts> z)i5RZ6Zd|o#tH-0^GTB>V1YBey`y7iMNx(o4*L5X85yq}`n|GDesYK(+7aS$qZsh0IK+q5(gTrfz+~$@=u|nWtYYOAV}`XK3wt$*|EnS2*#VC0gIE%BEN? z?a%r$ZQ8VonSY_J939Jz-d(~tu-ji8^T6W5jh#($JLA?_yy;$W;rLx?9s5{W`X5u; zJ^?V}dGKT49ha!wA*sB5yYrlwA`>ktW=Vn8C9_mk>Zd%&v8ZCKN zi}5!*)}U(Jxd1)YyH4U3j3>Fm#a%Oo95-Barf(%PQ?JXN$fmhLTdh^Yy@%`#lm0NY ztSJ1fWa5BnYfvREOY*bxmlYlD)7}JkboDOXeK^K!O>OZyt_hyE;Ksfox-Y<)94sUTj`&U=V1{1#uAn$=wH!e4v#&lsZi zWY2@4DT~6mzWTc}60Eha_OF=%eesx8QP7lLTQk?^uJVsrG+-xk$w#ZVr`2|782xp6 zb`0f12t#_CZOHx(qzdnn^9<(UnIZL3C2IZUHe>|!A8>tnMWq!A^RG9f8FI)ew_}g; zT%~D#$VCY+TE}7=m*~|~h8G4j);Q(O>m-UhVx9E%WSUn%vA>}cFq^vkP#MV!*$noX zZeiWN`wpNqOZ4>PxO70_nK=&(WYZ`ztX-@5Jn-Z)r>HOQL?Um4_D#O#F@^ceetjx3 zEq%L)H3jXIbp}`=bFE)aHohCgr;LALq0E%~uJdNkrVS8ox|A^BcAN77>xyh9Qr#NR z1Q}-C9Rdw6@_QW*Z8DD$X| zDo`roDn5Kvv@aVV=9cL@MQoI}JO^itwD%6)Z6bf-S&$@a(2qq@+qT%}hv;V;NsvWV z6%MMYd?Zc$%_6C?;79o5%emwelv=aRt*C5^+QLtRvb$$K&Y0EC* zKfo)%C2!oAW1WOy!l`rTp1SEU#~RaFp_ONh>b(<)Mf8%~^zSACBjZQTG5O>y>0BeP zRv2m0YkbBpr{2;dK^4FMJN0kFtjU*`4*_0eorD^K?Vx={9VR12uypAY>EV|%dYFt! zce^xytMQGKS@w>{qBb#o1BlO2%<8&nz!!fJSlj5%r1DX zw>NDi?gS}H^@x(X{^PQbKHJu#>sIcs^2+cj%H}qdYaf+b&z(zb#FSVrLAPXH60>b* zp*W5CMUxR=j!2%wfK!a2b#p>Pk5#XlonuJ>?}$w^o^(BDj#3Fzj(m}e<%vFhgj`p5cX zw(iH<-1BrscVt@p%8!VNQpPijO%7=9@;#BcGQt{LD4ibLq(zTf(+F`%UUG5_>gSa# z9^wC9xk)0<_iT36T0O3c0I_zmKaq;T3`zp*3KzFnbT3`rW}jqZOF%exZto(EPwL`dUg6y-J?y~G8;^=d#Xp&7lpk)l&Ivi5 z<4hoTAYaj~t^37`x0s0JtV_FuN_o$ZYn2Qg()jz%*$21!hRq7;xABg~E)DNT+q1V7 zD)*S>=6q~Rp;w8snryMely5(x!!#Id?}`J3pJ|~8(T%5R-~L+ny^eo8&cH>V>vvQ; zKDSKAKjoI~)8$o%$DP)_f411I=6t54&S8(=+HWT}g~%G0c)QJ%TTt5dFEu&My6$0G zsf|aAr^@A}7`m25IcaY5o!{8Hw@|t$+rQwU{PE>8g*_l)hx+(ivm#x~4pjU-JUuPx zg+kCfXO-KY3MY(%G+$0iAww)>k8$4Dh-pPR7$pO z3fc4MI$X+3t+#>MdS`y$Qcl=5zwv2`_|okrHakzwd$d|15GHAiw(Eq5Rm%rOuU?+D zNfe~M=iKR=yKmXQ7xxqGRd>46{LnOk%J8A@8WtX&p0D3uI?&c4ef+Lou(t7?o-d^X zPn)+*jJgxKY|A>6eyv^84C5%l69}ba72A@H!yMNKAe0yhwC5PwBq%vOUH9oE3aCKHiX>waRIVt6ET`gx1N# z_2%Z|qYhg<)34vt$8}1uiCDL8Q@ySBOFMWDmeNg^AJ(&j$nxYfzbcuew6(tJP8Y>; zT@Tu;IhXFuQE`_JF&~qgd*Ls6xn8CHC+b>6eGe_2I;rw#;O&L;&DU5A?3Gz4Dq3-I zTW-$M2_x$2);COO4ECCMyFQ2f&12M%yykId(v}>ooi05ktaf@?Rd?OLQqS$Kiud+C z^x~1*lr_sc_!oTL>yauY5~XRUUhP)zSn_hrQ>XVbA5&kd{Rqw5v?U|z^}*UB!%L!! zh8?QXG@rfn+R}<`Qh}MJlXI$yj?|9bY@QQCAuOSNIToosp}={8Z0z8{-d?($L|KRj zUW6k5GRsFO(Lj9S)inA&Wet{i97h)p>>zk2tI=$~wSKSYvsLlZA<;Rr1f#Zp4)0~{ zd#uPT!A8}>p{+B$gPP8@?Ge$xKQgR6o)xL}vAxazu)FDeEZ{ZYe#4xT+6!BAw4Cz zng9MouYWIN)Ju_(`1PfU|NYkWKh~f@?RVxa`X3j!{LjUOJ|=h&Q5VN|!ifHJ{O7~jL9Ip{oH69RyM)Ml?f>%%2xj=-W+TUj5Rp;e_NK-E<1|Ch zP2e(&)a0HCGwuI>88e3TJhK6ZvVV_%XX<|IB(9~MOtdkh4l~++kH|muwv-vdIOuh0 zcNY5Rd;gBJAT-aV2g&@IRsVeVKlhlz)Ag46-v&_l<=ra#W(Ys|pX1*-%J7nmL~~C? z-*?=8O=U1{)-s;Oe-5MJ|4xMVOANusEEwVO$p7)a&HV425@xoyD0+in8T-GSr;Eka zB#_R2Q~vWoqN4|TMrF6Z16NQ$!*Y+@*;Y-Ca20-sz?kQ4Y?m}%H0;1=1VV*w!h=qt zP#UR%Oa!D?t0OZdd(i4QVF>MqJ2Zva~x+ul2d{&VsP#TmY0${P_fNU_6 zu)nZGQvWh$3(9)lcVI8BviP$ar5OF0Fa@%5a(wu;wN1!=Q?3QQdDeDRL;=k7gBlYIAY%621tU1rQX=``nr-WF?dXl`51)HK%EG z3KZq=#70oY7<-qMm-}r<7Jl>jGy-B|8E6z#P2q#VV=Vkjcyb);Zr*FZ7fn=IoibW8 ztw!{p;5mdFII2X^;rI9ju~y?^A^?q%o7JJyImC5fTzHrYM2#eZtLKn#)9%hm3!iXq?$0waM>U#H2tH_37Hy z*5d$oZs7lC=#F-i?YfnQ_`COIb_*838B}Z^aL#vj%b$H_$eferbhGN*@rwX5bH9`k zxCtgBF=B%@^hxb>VRJmHL^B-j)5uHn4S<6E`9}m0k5^ro!?zu0+NjQj%>k4_QpNb# zXHW#8VQ}B#MMn$#!N~Hh)3(WyDjhcu96mfBUDk>)VKTW5K-ZiqhIclACkhf3_g(l` zj#u5M|2ZxAbYL_jq&1a-6!Vq(4ngNuBVMfb%@yiO`)CE7TSB{{@$^+51+B*EJPP43 zO0EhLCWhMmmu;9QP;{7iYW8gx?GM+7 zhgUbe7fvD)*=hWl2wS5juJ?3ti8mT(chQY~19=hA(NP%BfP0XW=)|mF+A2&PZ)veE zQV{e81vh?pvXXX~pa3EqL7m%W@gBttiD&9p++UQTe2YH?LZHY>2y*{?OfEE4;G3q~oQt2zv35|*? znToBYe%;>(~}r%AXkT%gBA)O*`Ckmc4w>cIrR8pCm zWP@;otg%}^TgkI2r%!`r?rF?u<0>04X#UJO4^TBi^mwX1@rIq6`-j<-LNgr-8?c-v zq0geX&*(|05V`l<5iIKrFZ{sWnt0$q!LrA!YF@oE8pSY>8DHE*?STx;P7Tv%66B{( z56$f;T0J>lTsOUF`{Z0Rl6QC}H%_3zhV_-=pqlt#S-FF5j|b%qnn}BFhii(PYj0g! z`DtPjZ(h9`*t@Hw3JEHcB3M?z;7(iK2rg%{QISolrquY8@dJ1m-a_f};)0t7g|C&J z*8VoUW!IEfb4;$jZ|bt*?Bwa6Jj}ZrJX-!?_Vd|Jewo|m4bz)=(JWWB`o}UirzmsR z)vGPK7rE`3ab6cCP>!Oz?c2=JP?Y^q0;RY{fIAIck zPfNq=fKsqK3?kvs(|cq-Fi0J0Q_3Au+bLpZY30 zd=XDW<1|?=6>XtdVNzUZPD*a@_;gazv26>z1Id>8Zws|?ak5jC`#Y^ISuL5E?|d;J zSIfrk+IHta_uiL}Oj+Ec$t^VDxBP9-pb-sKee~5`#>)GytLe~J_tNuQipIS*y8X2q zT`Geh2t19BJAbUe)&50?2(BL6tDXnj*YTrU+Q;PjO~@3?8%k0<`8CqI8Gh2+{K`rb z$o1oPznfL|=u8hRaEGU%(B|g4tIAZ$b^Win%~g^~-j5?^<&})|I%NH)z{NmXC!G*$ z^dA18uGj_ly2M(?4gh#`Ww${phtTS$< zv8CU*oTs7oi>{W9?c-ajXT-a=rqyyx!hUzfr|3G|>KM8xw(7;Blr5bm1Nb=&!G)HF z{W6pDYvTPPVw}y2U55Iv8+zdGE_XO1(F1bj{HVFqjZfAe;8ffy^}w8YSCfgR9{j*y>|C43L;SRl#$b|zS172#C>Ii#`9=+7B z;NHEAKnPq?y~axQ8ebDM#$d8+T4G!$zdpyA$?@xLDx`B}ZzBAm;Opn-7a`7$4Vw&n zL(SRj?10NkH@3;qwa~dqWuBvG{2crMa38!mi%)5?oH}S@>ZTz)GRX@<*O z+)3ug@)gGW0+Rin>USHXb2DW%rDTq*2L@|c36E;y4FS^P-4CytV!6w0@t1!bW+jeJ zxRFyJXHN}%Mx{nJdwhDKY<;!7uTMrx|C|#`qRjIQ(vNRc^j$Tr$twcJyl`j)3!7h% z3~L=8QS%%eR;^or4)4>C!-Cl+OEw$kbn2$}@^0RSrCL$_7Rz{_!1D6Y?kR~~Yt;T) zbh?vYe|t4KS`zElum4_GR`U5No^!usE2pw%(t-I;AEsO#h^?C1+eI$3hV-nRrmXkT zXxTv7^;wGk6EyVfZ$HRCUamIkU`+fQ)L5#joaDXe_6R*DT}hFs`xL6ORMuzD?vb-p z34N-YbRgWD>?q&u4lCB_mMFg0?D@OmmD@n#(iGGU-#-^^^PKC4SVQSx;2VTmU43a$ zIgy`#K){=e?Ky5x+p`yO`P1gMw22w!io~?fT*wvW>W$dGvs>uvbB>38+;z}Ot62Tp z#oGr!i7>n3Prg30SLYR4>3u4}dg65{ z9L!ui_D($e{A+^%W6XBj-%0xyP3X69b1N2IH?mp0_~DS(0(9bp`g-&1q!?PnS;Nqw^b`H$u=JKs+cx4Wwm27yL}{3W5Z zAQE{T`P1*JhVmcU2IONlMf^0kOgyPHY9oRjE9^7j^$M}`*?kD4=l{{)5(n)$>XvId#EuoEha=b?0_k@v5(t&5Y zHfXcSFI@QG#9x|bJ>Zf6NYmBPDRQO}uG3@svbPjf$GZRSAbK?k6p*vAlJ9jdtO@5| zCn)DG5H(s>+aua~RYhLz7F>U$U4v3198RA)g~XiGU*CYsYfRofb4kI8$5U`nKM z=MPBc081Eq0*8ZKR+q8pRxa;)|DX!ZD)iKy>O#c4x0u937!Uw+gutBd!aZO$OSSr` z;1DEeF$^3ykQ8CyKqJ-gE}c4&!m?qxT;AwGd0fEF4YmZ#AV9pmya--u*RX<@=E^Lb z)T!^Yo7Zm+IgqM-mnJ)R<+hpYEGiC>(Qh1n=xzs*e1En^Y0nXHR#ej$(6gyFUjIiY9sZD9cS?CIWeA#&5o{t5~oV0hIH>pqcA zAg=;yCQEv{v)oInY?$=2yj=gYde&b#xEgoen&+-{J!cTx>>x*)yV@fcV6-N{7#QY$ z9w)4qdw8S(Wc59OJV7^}EW!uZ5JvTyJ7wm~9k$5{$7BwysQviSEW&7tTt_KWx&p&q zBbrtetF7>s7y<*V+S zied(Whvbk^90%C3^x3m{gZ2S%l+Vr)wQO}zf5^$3 zoOHc$hs;*veN6l4Xw&J3fk$NNuy-2-lz#dC9X+TKUss7FR0yGvd)PgWx7_>lNLAHG zZgjiH(eL-Z*bjmkU`?XSU6qSOLE_QYN%w&mtepj<1QeCqi*eFErNuJFN}^$RoCd`L z2cl(n)^_E}m5M!k-rR7SX_nh26aYm=ZSA|kx>vwg$b;FfC=){P;~;%g-ZOmP3^02z zs+CDh8MjZL9#($~tF#N+oCuB%%t$+IM4LbL^$%r6#thbIW1Eeiz7&|+^X2{ykBDtwVy24^s zcc`OCO7I#LA!sV?O3EnOsu)yPKR!Mt| zIc0Mi1U=YXm|`VI=%QdQMKlSzQLy71WULzgb=^dl%NIWt+guY?;J0tdNNE7lUIHrt z>q1LXmnQF)3!g-J$DgdgVmhk#bsjY$Kh!M3O@Vz1j}jw$W8=MxyNQa1(^*{}3)~Gw ztR!l(&YBN4o{-dK_~ku2Gy}yhEGRGo#$;&;Vii2o7-EBOzknD5g@=QH7%!9~U0?0? zSWvnBwGEmKP(#oUY|MUzWH5spZdz*|2&M?!=iUZT%q5Da*-)s09kc81!2xaf&ofT})4BvmeJJ<}eh>9ntplx%7^s zV#OMDO%q@xg%ySZwge|p{`@(J_p!r-!(`At`RvUOF!(512}&}q?9y!*gjt84A5Srj-1E@!f@c8M|_io=Fs;atEJzdkk(TyQ? z(UpBQd3Zzt%l7j0wZMH_3lW3+64v`t3$dU@`wHFxGWG703{+NJd>bsLRmdR%y&uOV zyO--IJm5Y9kw_QF3)p;s#MY^D735kqrbvh~ptNumI=`6r;IVe^p1^z>+Z-t-@@V6= z`x{CJK|LEw@6g-)8uv&Rp69M0^0veU6SBhcK?LK=9UWhxScdHawW+X59pMgwZggp% zQlHV{K|$;jPCU#BqBW$+7Nj^%1|@znU(_Xms1$)?0bC5RkSC6Eqh0!{D0J9b*36tc z_X*PxOOJ(CRvIw(sAwQ#APTJzNog_b`Mq7Cq1mt{rVg@Cb-oRFJ4}nJOU1ITTVG#( zxv3oRBzKRO)%%A>uA$|JU8tI;n8tXJru`wo0=7AO>Qp4t6;h0HK%FLK@;cx4S6!-S zc^aiAt)=zxF11M9hz8~d+ zXYHFWm^<(;cj~wqk2C7wpIxLFv1iZ2w%>tmW8eYta>3WDys)&J#SKg}pKe?^1v5SL z08zQ1=EwCPh>P0@K@HYwQ~7 z&BDiQ{reYF$tL+p^XR&-@-0x_oAoEF$$gyX0|tz59+F-9q>X;OzGp{3wM&TJ`2l8WaSwTNhnv(W^yx z_?2h*Bp*5Q5=!k-XpYNTY)lM|J5 zhnJ`?-se3$JZWq6-mGvaTf9FF@B<__x{_@hbwpMn7=7k2_1yoa)h>Emjd(eu% zu(W`!9c>V}BM>kW&7+JZh(^hA^~Qo(DLIh<8LdY5@WX0h4b3Mqf$fFm<|LIRydV6h z(vQoQssc}3LIv3wkd8=_5bQ#lKo1%z0heb#(17V-%-iwg`Sa7oZVnM z988@)cuSxn0jTV(Z9q%24>(YC2mx}n5H@zJy;Xj#_4|35#a|Fba(8zpiKBqMn$VJ> zpqbeM=p{n>@|HEz{4OSE5P3*pjqgAPKD9WUB>@uSsc+{A!trX%Q+LfcO!j7chqp!W z>Uy$**Bln`dgD8aYpxudqduIp6tJPpNrHKpMDN&mru-3+rS*}qIW0Z@YUy%c{|_vI~s{kAap!`~Bw(h{0kdOj;y zj#*@v-4*Py^c2yGH#*;AN&4Ts{=s``6--OS8zpw)dMy<;fvq{%HV9$)(sfz)!<{yu z%ND#B$&|>rH;*p&LAauclXTts*GOtq_o~0)zg`wtLq_l7Qu5+O;VSz}EBdIsCGNR@ zh}RF!12VnZ3kY8{c_9h4-NTuV{rAk?rmwT_Jt%!Gk^$ozOURT(bcwx5&{sWqp`k0Fq@rn;VzsKB(y#(fY$nHT(n5 z7x3)>ttEFL4~13s7X@9NO|-0}^q~5iU`_#i%|$n{T%aEkeSG=#_lqGfz1M5K`?=G> zSw5ud)lQc;6Ar)2_K_P`G+F(7%Lze5I}V#o0kzGhsMRP zA!|0@)fIH%kssq$v%*K^Ui4yr(kuUc`X;>MTFZoz@1r9hj{*ny<_jBbp8B^xI%4(D zryUtLc5GBkjAgXWj)ks}EQ9xQR?jiH30oX?&t9fKIlh?sRLhiyIZOO}lApCW(3C}y zblt=kY`yximutd^X5#YSoV8Co$+)dqdG#4)(rlt-NgC{5-r*x%3kXgmRO*(G^Y^+u z)a+2rW2DhQ^W)?^YuWVDU#lO(0Wy)83HNOc&S0TdBbMx6Lu_u5KSJ!TE_t0^LAUyf z%#yskkeo@j5#MQ&3nHz0iV>q1m`D`c2b@WZk_QCnZZamc*cac={be`b$*-Mx)gE;; zgby}%j`GhOMH2M9Y~Ket^%f;rGTFlc4HK02Kq1p}P?8aeRbp#QHGG@2qA$6M%jJaO zY+9($3x1}RZ)0%m@jV_GF?`PM^LK`Z9LthAH-6mVH};2r+m{l?JT951f5R@-p}SP{ zz~K~m;Xd5BQ5D$M6c|=1Cya2==6k{X)#w7JHrg<-wU_0@eS7y7RV}O*?~rxoatGLF z1mkt1p~peDKCav87{b@{DWCT0HMn=z?<*rA#j;VTjUF8}nk2Ujt_zBfHO4P1zha=< z!x*A<;fvC3eFV*ibLzRcD>1{^#cIi1<|Nr*hDZq}QVa6qz+fpzgOe;wO{uq&7c6nj)6L|y$jduf;McKff5_TTK5fH$?m%fqkY*NGq7FaC`5}q; zp50ww-%%sr#hi5J*CKY2Y~l@wo1Hs$NGpbg!7!lQOhulZYC6Im1bF4%Yc zV&&~+jI?b*>uRJj*#jt?=J8&<6a*DXaS$snNu(^Ow0rg{;uASJxw_H3zINoCGln>) zd)x1ci>m}ca5ioBF)LYPNL$Vs9Y{*RGLW?-_A?k|!mK23Swf=6-XHu;^R*xaN?M>> z6!M}DiGPb_d;Hh8*5|e=KoNREbS=2c1ripox$-3>mF{Yh7ik8TQNV>8$%&WuAD=?z zg}X*G@}HjG-fvsyp#$?BCYGwK1mRs|*uul+6Rr-5kPp%7@33~w#E7(sO=&4rGM;A= z&-)q&hQ8C&j{GYY`XspkQU~vdF=km;rCW@%b5F!6EYNPs=zTwwbV!-;+)Li-# zr4MS4(F@0B>j6DzJT31X97*l`)5bXHru|r{kXMNs%?dFj7-fqqxnD_+NyNO1dkfeN zGMh4N2wr{sSiWW6o}D`f1q=4&Nj5LyyZW%5q7Hc{Uazt{f4eMf z_KPHJK|vydCTQ#gz?)BgvLcCrnZs5~>=%P4BGh*~wMC?pCMF?OKfE(XK}>`K3H^Nm zQVz%@pzI=l>g*p)8n*AaKCfQ9-0Vz-EHL~q7_qkApNgMNQ7?rsSMmv7#fcuTf;QRS z3QB?2%5SSVWTZrV_b`m|ZR#U^OCw}U@}fr*3cL)vDK!IS%j6O4LV}0;*+n^7 zy24(_P_TaTBoCHhia2w;L+2SeC~=W{3nF9>6<$&F04HyqL~Xob_EtTdQ&u=Rr6NuJ zBM-j(VS*L|gG!?0d-qmwS3(|z;o-yH{L-M{8uQ^riFC><3L1}YQBodkLN$?XoWp8_ zRJU%UMvW?mPe(N`V9x&4BtT^3KUr<`M_o^SCRQ+b&&01p7w%;j5-eAc#jM1AY+>1^ z>$RVM_RN`5S5>vPp<)7ARqx(QFnm^47C86-96qRhD|!0#2cBS$M(t;PfgQO-tBh?u zs$-?X27gCnCb(QOikvgmBtPs}f<)N1q*xH=AI?$s@U@l9cUJQB@fqsp*Eio~s>Db& zwGFR#3@6;3ZD*IkUVDUiM)*+0NOj9HLgcX#-ax-+7tR+)h$&5sG(ST|SHg|K-{ z<jSTWHb%S+Js-PnTo>4(1$y}5gF!TA|Ja=G4wuxqYNYDpuE^@vca zD=WKV>1(`fp-cAk)Cw{Aw~aO~J|wp2#JP_i^5Uf+ z%Fn98_zVuFO}x4^#R*+%!90W_On^f*f09+qJ3Bw2ZIrsbDJzGdj)EeH%ewEqb1%*x z{mpaZqGP)$sM6`!gzt)8lY!vQiARYBdLX1WtPe1hHy|Z02o5=@fBL#LLnowoi;Ti} zlVDji9eG9Be-o_j7cFYNH|o6F;;VmSJy5)6Leu_Lq%VN*^)BD|&@;lIqA`&^V)G=hrvK~U;9yF)_m*6sTpMN12XerUnoGAtT|l4#RCK);T)6LVRC@Hn zU&i;oOP0q)HEt@45t1)55yGVgHNVjmit5#UWfni&>2v4$h{QP|J=pqUVvq-OV5?=@ z5+o4s51VipgSbso=ikr7?r2y+YuN73&J+ju-HiSVDW5xY6B zj3`a~gptEyy=(70wdxy2_1vN8m|Ez06gw8939zLv!q7ui;ycSPw2E6G^A=|!T zW6>1IS#@FF2_GTCE4D63u+BvNhWgh6)$kv6Pb5{S+OVjwD;5{0J{psVPw}XgnjV z92+MpKdh{5VPjBLSARjWj}qPw^zwdO7^3nP=^SLGnxw9%M;K_!B4_+4X4&_{+!J%< zh>cI1&$#-h<+KBzb%U65>h)_IvYRP2aZ+pMULB4lcb z5M4pRx2C4yvEoM#mt9_S1Q45$(5g~V31AZg29#*8(vMA}$47vT=fM%fg1P zU2@hc@5vXY7WFw)I-c0y1$^vZ=C^G855Ma)ls+VDiqrO_lD87(?TQr&+qZnCVMPp^ zH}qPt^O38d#z3c*{&zZK^umSAkGUT)Oak&cGO*s=BBx|Wr9>)Ms=C!H-a5K$(*Ku;8lG2_IIo-Ckwmr_ua$cXe8HjFr>!D2PH53-!r=I-y}QKl_6%ig|>Poc#6a`+`0N}otX zs3kWXc;1l4{!I)J_r^>DVG<7?p9H^fNW45Wg|RDo2bf+bG%YCjNX+D&9wbZm-k(M7 z*`k23^=;IX@I59jtuLQmFm?L$n8?VAcrzECKq|U0ps?O3uz@g(=_ep`qIX3)b`sXL zy?!>Ike|DFu|VkGY^32{G5%(~yw8kksn<|uB~nRrGs`;Utjd9Qk&`N({a1yOm%2j+ z(o5r8cU)C&x@?GM7sc|$%`5JA5M?|>4-sqMCJLVYvQ-S$lEN|&b}4lEpyxOLWIG%8 z`7*7dK-@h_7i7em2YlWccU!~ZuTc$Q%jCxwcunZ$RqNQDIN?IC=9y~Gq9*ryi_)C1 zFVpeJ1Gdd^{dzl&vgR7ZUBT6Z+QU2cz&v*|OHkn$3C~PVVNgXPQDuY!1F^NE|R8vdy0bvJ0VqNi}gW}Ttr)|=>sfvr5S zF@Qz?@AvOr-~aUeIbiB#{P5ntuB4d3ggM`Gt$0-?Ul+e?1aihiS`4NJg^HJvtXZfy zsDC_DlMU1n{u{||z~{?0G=iBlSc4H7lIgXZ8L`P5oJ+~4eo0)JY;;V#^S4VKKnhHD!NSyv~P&xc9A#+ z9ZtD)sRg^@Z=9+AMNWC?>RJK}RUnEovwm*agw@2xin6f2Wxz3_ey{SJ@K31NJ_*nw zeU}-#HPbsKd)c>1Ws)t!4}HAcSA!Q3<|}xRiLG6*|#qj*(!Q1X5wH*gz!36Ct`kS9qRQqp-+x zpX%#17E%N(VcS6GS182%xSC$53S#0g#R{4?k=dq{QE~FaiL*KKoHB{=^>aI0X-lRy zCLx5vk04M|Hg6En66tM~%9=oKDz8s4?+eu4P7}RwYnQ6iU?6~p8$~UI? zVW6;!Jm31SCkhSx{dYaArY&|0dk+d${VOgtqu77y z^lAUP)O`P#rfj2t^sx6&QaOhJ7b8s zs!El|E(sMjvN9s}+viN%q>K+3(wV%miYtNn+7FS+oFzvZ-RBiKjibYG-Ebbo2^K&) z&C-u^6RwHf7P@rkD)rnUI0JdC36&J3pkOcP?jm}JTLpo;j;~ikGBP9AaJ#ubM}2sS zyQXZeht#a zPc*z;5MPkJ9M^dDCPiq<)ac&&wZ$YRR`Yg7Ig>6A4QA1sJ?sEzVWg4a$)h$1&0cFM zeQG!Zn9~6J9;@&G(K__O3C&8+zP1i=5x>-bHx+ss-Be7;Rq1b6;FCl0x@@SA`H@L+ zvS0HQ*T|13sa(`#iOz~3hW=|qE1LqR`+cRAdEPn6+^h52lHj)Ba<3s8T z@2$$D=ZFwG1+|)m$@3R1pxMt(ilA2<5>1Q69wh(f(dd6PW*svcI35L>s>8mvmaJMd zty$9qf2AK96f%W;;ey#C#k>9F*9AD3AG5*)2UEz8jem#Zm85?6oxE_jSy_iCc)^yS zCx^muCB$FV@E2D%##ENj&;+@FWED1_AT=mXaWQyh{_vbfd1QdT@BM;;P~JMeFgm~7 zdNyhJW-W*2^0s14Je}SSX!|)GetCO=MVtzQKp_q{6}&o_N7X+!T%{lM@ z1>~qisw$b+jz4vjDmg2Jw3Jr^JmeZuwp4tqZ4t_=iEnO5dKMlr9H5{;M*4+jK)>wK z?QyM`yg>eObi6}Kpy|?OPreYa>Bruy-*(XS9VrymYE)=G0aQThO<5Vm!f~{4)WMh! zg-Ft|!@^NpCy}uq*tAuU-=cL`)%U?eR^?yS6x-=rLWV>F?lN+NBo!_DSq6JabfP04 zEIa845&^?z#m)%)erNC=&SfmICqI5E+-eklSn_}zm6n+`&E3k%=(KERZrxi<G9} zv=1ny0rVq3;JmfwYyLcDY8{LUY1p)>Q@zJ;y`QkFN=8dc=%N}W0|aCTM!eG#z)^Qo z-x?mlr3)A6*Y2%y?jKNThy(K#Mwl>n;$Pa{}oX);d{C`d|n5qgN|6!((TU**1L z%`@LkE8;1+mfh8Tes0!Dhei8}?WJ-JGW_c=J)=-uV1M}aI=KcKEy85|$s78~OYPY) zag{sNyd%f1lv7!PhQ=vMY(AO|!bsnYn>D0JK0(z4N{q-scws+eUV?o=WEVvu78kINl(50l@R=duT_aK)Plr ztc+kEWGEYA2LX?yf5+}3`GbIJE>W~#p`2=B!oK3mhusg`By#*rTe9s)+pi<+YCv#i zMQT*h24ZPAnX1^nBWS#8$EtTZ%t=*74yd_}H)I}Nd00gnu z3btX`t_5%1O7!4L+nma7Q<^k-RP-!JC9ffu$ySTZ_5}p1!C(C@>mtf9WESDfN^`K4 zWH2Qqg<2l^m4=#U`-nt`rV_KYF^)MpN((hIODYl(zNs^k2#pi;h|1}|7F znwb?aciVnF41Ar{j^7EoD$p=R@}~f1HsS7p+F}cKL+D+yYr7#|2(E!@x&Sb#5mN{o z#)O|Vsa2>}uo|PfjH5H!eW03LD?)f|tD!2<*5@^+UrS)(10Zu`*c=TGmWRxYk`67Ftut@Mb`GQNKgKbZITjg1oGb^JWpZ* z1adh6*g05FglqlSO@$6WhohCi3KdDMqi#fvPjG8x>85XmSSbZ04wK#ecVv15bx1sZ z{5AC@!XY485NYxGV=g{rvldz<=jAd$-ji)jKz)yZWUjq^ZC%|&v{d1>5ohS>>nmpa zh_26t6{F+@c*1_+!ur>*jq_tM%#944cEl7MxZKRClWl00@q?l~$Z+4uv;}$>Fu4)E zXGQCb1Q5xgp;072iq7E>B`8HBl}S-*&B~SCWMmGL_7DNmGW-v`=MQ3R9W7pZsj3uY zCQZ5uu(xn4R=}D-(8iC?SZq-)CR%`Q8a+>fM|_du_U!|-+e1mofQ%lbC+j|!zp!Y< zB{CPD==ME(sL$CX*$7gJlnxPY27LzhcUPs6sA%rIc@cbl@StGz$&yFM4-|`bR5o>p zQdA>r7NgHs+q;z@7t1MV2`GST*8<)DL9j>>LdV+*N-fS0;mWK~7U%C`G zZnc6%%(uoyoqw@c6WXy?15Z3z0SX&!K9YX4VIDnyz5*RQf-9s=Y3y&`E&(0L<#LZ# zn`!h2?xde$)b@OEGt;*o%qnhs zVPyEg4tMPmhSlAov&Ruy{3xhJ&*gbJ*xN66b;Yy(4i6?DJnFP*RbqcK&yDSwr~KQR z4GW;(Xumx9WCfy=JaT4dzjmnV&V-SohD$`?>`i?jkh@nFhGBeA_aS8L$&IAK0AuSD6{ikaGSIV=KY*3+NxG8!HCaPrfD_16m zg-JiSh0Z&Nyw;NKsWC`D2i;wA0m0H?FQHhhc6aB+HklGf$EK6GI2=_M-Cm9XC{fvx)Qe)O1anp&W=dCw3JUZH+ z8^@uY%OkzK_nLnOWh-SvK-^D_2*5M5;{JUsFx(J8_Euj(*)~IbsOY+ZHMZS-<+BOt zL1DlU`9D=%dpwkB8=gL`nF@_^h?r7qlv+7OO*QN&htjYqhft9krsFb*w=c3;{ecoTBhM!M#!3ii0d<3hnwbs3_<-`>$L9kOg{h0Q z8_ID-Ev6u!WT}|K1v=kjX^^#kIlh!oEHP<7sSDW;(7X*|z%MClq6p9l2=`h2ze2+k zUyFP%XhU?YE4N$U&dbrG)cJPuBXPLXioiUvSV(^|JnX7JF-=25>|i=$3NY0vn|}gc z|Dv{*W;nEGAy-2)K^{{%U-wwVf7I08NXhbv>@itB>7S5QZw&bi8!o*G{N`ZzauZZ{ zlUf>ooDK;$kS~Ba-@mn_>p3_stfBiMzrak9WB@dMa8Q>{2Q)qGzw41doLimQdCLo& z3m&jI6~q0+QyQWs&`8HlxR{>~PBJ=>2d5@an7XqyhHWTTT%ni9`&!QejBcwdxz0FUj2!%4?>;Nq+ zkzn~vd}jGg!*Ihw5Xqr1g$vIyF(7Y4s4cxh5Yy8{!Aj-U;kWCUTy^#i9u8asSJp8CJnsVy88O$RsFK2 zzP>)mCtdZcn5*L{yyGiTYXO2yNJj#*+_zO@86Y2sV1Y@xvv(=#9i9+oiH~Q0aPmft zExx!uBMM3~xXNd1$@bVbx2tyVnA>c?j*}n)D)%QBKZ=56{U`3laXV-j3W^Au8l$1V zFS}^vUfA0@}A+khDb=@C&18V$Dw(9}2=L@2zM{|&kV zI81yLJDb|ei2b43h(IJ=@sUa}c|Z$A!QMD-_yHKx=d>2kX*I71uoGU9)FGIh$`bq$;%73xJm2 zj4tKpbAIxk$pI2zq~vE5f&n65tk+Rv1Ux3XYkQS3Lx7znxS`6*O#lySh7TX~WoBw~ zfH#7+q-iuxQ@{z^5VeerjWw)(LC#^BJ&8Y&@j|S@N7A-A-R!;qyPx*!1q2~lrcj7P z8TB6p5fM%tHRV^Jg2Qr$Zh&MLdP(zLB?Lbg61w9sn!&-sOg;!I~wc5U-Lz{yg4@7c4i z0D^pre0pS7&Ie2Tb*9lCZtCnHs0UQvy!qt3_jK#=p1XC_MWX)fLsY(KF7=fuanp%Z zJ@TH@OTq90L6_^l0Sf@))5yhRUW9%fXerqDv^~(Wf2)d{AEkK{3*-&>T9we_?=Cxw-gNu-~(n$njAN$w6!PzZi5#D-}B2E&~!p};wWfd zp03iRdTFasGDM0-S&pfK%X5Lw)Fbmw&_)bbLA)F(dDso|KA5N|3Ut8gMW#&Coi|1@8aN)%-*qg&<(h*j%BH; z+{XCDGt#=}T*r_MZ%1kYbEV;hLf+SB0cw_#<77PUJCuByOG8s3h*wwZokR}~3+wUQ zsk^=+i0wZGQP1!CguzkNKa1pO*a_I1c>LUW7dPp5;>YVMq>@gLo*>!Cegwd5B6!~w z9Ly=Y^8(cmcXzZ&6<3QUAliU=nh0YYX+g~PFeud~Ig)gDWoh@45Jc$2j}~PavPQ#s zFMKX{jjh|PiVmu>6z&S%43Gs#Lw?mGm*;ONIhT{u z#lJqFXpYnNn3*GxYlmJu%sr|3@~xVhYwEp1%NT~Ow)pz>O=b(lmAwMzS%5$Qc7TyQ z;kvqMDl5A2DGfAh*81BRq6cAn1YmOmIpcYiL8+gDyu31T{-4Le;90KW`K*BI;>uoh9wP*za|t6l zOnssH%GRbuJ69aE(zP?JDs$kC#NE!HQ3P<(-ZzOy;E^>)g<51=9W68~Ef{^>6vC z+d${g0?!`F)&~)_=y9rC705dAD=@H9*RD`o!jym6BzonXO}&<0Ufs2)HTq%eAMIm) z39M3&d$J|>M*?cO-=D<5we7Adul_>j(XQ*4)#+Hcp=W?r-n&)f;xhY<)nE(P3+&j@X(!?fWy@#XWTI#+r>~;hGo~xjQUv5 ztv9{$I>l|s*fFb<$LtR(@*EAB+g|MKpytLiFcG(tZTXw%Mv)9D^$*AFdGXIY>`yEU|O4nihaM{nE!zr9LWo)0GCN@<-!x zDHrF(0(IFvp!v>fEzgf#pUp~n^xCGzB7>_@uk&)h;%o9J6RA|vxsosbeTb!L tf1W7GV?+LV_4nUc8Y$KE|L?9+PE5L-5tv=Q*qQu?+ZyibtDJ3z{|7Q5t9Sqa From 38b7f516bc52df2d49baff23b15c74359fd35615 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Tue, 31 Oct 2023 16:32:39 +0100 Subject: [PATCH 094/155] fix(ds): Change the directory where message database is stored --- apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index 32ca85935..4140c0ed7 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -368,7 +368,7 @@ rocksdb_open(Shard, Options) -> -spec db_dir(shard_id()) -> file:filename(). db_dir({DB, ShardId}) -> - filename:join("data", lists:flatten([atom_to_list(DB), $:, atom_to_list(ShardId)])). + filename:join(["data", atom_to_list(DB), atom_to_list(ShardId)]). %%-------------------------------------------------------------------------------- %% Schema access From 46d8301bc0b1d75eb3cdaf293555fd52016ae5c4 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Mon, 30 Oct 2023 19:55:14 +0700 Subject: [PATCH 095/155] feat(emqx): expose timestamp function in `emqx_message` So that the code that relies on it would not need to guess clock source and precision. --- apps/emqx/src/emqx_message.erl | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/apps/emqx/src/emqx_message.erl b/apps/emqx/src/emqx_message.erl index 509d4c90d..4ff36504d 100644 --- a/apps/emqx/src/emqx_message.erl +++ b/apps/emqx/src/emqx_message.erl @@ -66,7 +66,8 @@ -export([ is_expired/1, - update_expiry/1 + update_expiry/1, + timestamp_now/0 ]). -export([ @@ -113,14 +114,13 @@ make(From, Topic, Payload) -> emqx_types:payload() ) -> emqx_types:message(). make(From, QoS, Topic, Payload) when ?QOS_0 =< QoS, QoS =< ?QOS_2 -> - Now = erlang:system_time(millisecond), #message{ id = emqx_guid:gen(), qos = QoS, from = From, topic = Topic, payload = Payload, - timestamp = Now + timestamp = timestamp_now() }. -spec make( @@ -137,7 +137,6 @@ make(From, QoS, Topic, Payload, Flags, Headers) when is_map(Flags), is_map(Headers) -> - Now = erlang:system_time(millisecond), #message{ id = emqx_guid:gen(), qos = QoS, @@ -146,7 +145,7 @@ make(From, QoS, Topic, Payload, Flags, Headers) when headers = Headers, topic = Topic, payload = Payload, - timestamp = Now + timestamp = timestamp_now() }. -spec make( @@ -164,7 +163,6 @@ make(MsgId, From, QoS, Topic, Payload, Flags, Headers) when is_map(Flags), is_map(Headers) -> - Now = erlang:system_time(millisecond), #message{ id = MsgId, qos = QoS, @@ -173,7 +171,7 @@ make(MsgId, From, QoS, Topic, Payload, Flags, Headers) when headers = Headers, topic = Topic, payload = Payload, - timestamp = Now + timestamp = timestamp_now() }. %% optimistic esitmation of a message size after serialization @@ -403,6 +401,11 @@ from_map(#{ extra = Extra }. +%% @doc Get current timestamp in milliseconds. +-spec timestamp_now() -> integer(). +timestamp_now() -> + erlang:system_time(millisecond). + %% MilliSeconds elapsed(Since) -> - max(0, erlang:system_time(millisecond) - Since). + max(0, timestamp_now() - Since). From 7a94db25c30029867be5fb3ab584a2313b6b2ae8 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Mon, 30 Oct 2023 19:58:59 +0700 Subject: [PATCH 096/155] fix(ds): don't iterate over incomplete epoch in bitmask lts storage --- .../src/emqx_ds_storage_bitfield_lts.erl | 125 +++++++++++------- 1 file changed, 78 insertions(+), 47 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index 85f4f5aa7..4dddaff67 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -59,7 +59,8 @@ db :: rocksdb:db_handle(), data :: rocksdb:cf_handle(), trie :: emqx_ds_lts:trie(), - keymappers :: array:array(emqx_ds_bitmask_keymapper:keymapper()) + keymappers :: array:array(emqx_ds_bitmask_keymapper:keymapper()), + ts_offset :: non_neg_integer() }). -type s() :: #s{}. @@ -147,7 +148,13 @@ open(_Shard, DBHandle, GenId, CFRefs, Schema) -> || N <- lists:seq(0, MaxWildcardLevels) ] ), - #s{db = DBHandle, data = DataCF, trie = Trie, keymappers = KeymapperCache}. + #s{ + db = DBHandle, + data = DataCF, + trie = Trie, + keymappers = KeymapperCache, + ts_offset = TSOffsetBits + }. -spec store_batch( emqx_ds_replication_layer:shard_id(), s(), [emqx_types:message()], emqx_ds:message_store_opts() @@ -177,13 +184,26 @@ make_iterator(_Shard, _Data, #stream{storage_key = StorageKey}, TopicFilter, Sta storage_key = StorageKey }}. -next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> +next(_Shard, Schema = #s{ts_offset = TSOffset}, It, BatchSize) -> + %% Compute safe cutoff time. + %% It's the point in time where the last complete epoch ends, so we need to know + %% the current time to compute it. + Now = emqx_message:timestamp_now(), + SafeCutoffTime = (Now bsr TSOffset) bsl TSOffset, + next_until(Schema, It, SafeCutoffTime, BatchSize). + +next_until(_Schema, It, SafeCutoffTime, _BatchSize) when It#it.start_time >= SafeCutoffTime -> + %% We're in the middle of the current epoch, so we can't yet iterate over it. + %% It would be unsafe otherwise: messages can be stored in the current epoch + %% concurrently with iterating over it. They can end up earlier (in the iteration + %% order) due to the nature of keymapping, potentially causing us to miss them. + {ok, It, []}; +next_until(#s{db = DB, data = CF, keymappers = Keymappers}, It, SafeCutoffTime, BatchSize) -> #it{ start_time = StartTime, - storage_key = StorageKey - } = It0, + storage_key = {TopicIndex, Varying} + } = It, %% Make filter: - {TopicIndex, Varying} = StorageKey, Inequations = [ {'=', TopicIndex}, {'>=', StartTime} @@ -197,10 +217,8 @@ next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> Varying ) ], - %% Obtain a keymapper for the current number of varying - %% levels. Magic constant 2: we have two extra dimensions of topic - %% index and time; the rest of dimensions are varying levels. - NVarying = length(Inequations) - 2, + %% Obtain a keymapper for the current number of varying levels. + NVarying = length(Varying), %% Assert: NVarying =< ?WILDCARD_LIMIT orelse error({too_many_varying_topic_levels, NVarying}), @@ -215,7 +233,7 @@ next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> ]), try put(?COUNTER, 0), - next_loop(ITHandle, Keymapper, Filter, It0, [], BatchSize) + next_loop(ITHandle, Keymapper, Filter, SafeCutoffTime, It, [], BatchSize) after rocksdb:iterator_close(ITHandle), erase(?COUNTER) @@ -225,9 +243,9 @@ next(_Shard, #s{db = DB, data = CF, keymappers = Keymappers}, It0, BatchSize) -> %% Internal functions %%================================================================================ -next_loop(_ITHandle, _KeyMapper, _Filter, It, Acc, 0) -> +next_loop(_ITHandle, _KeyMapper, _Filter, _Cutoff, It, Acc, 0) -> {ok, It, lists:reverse(Acc)}; -next_loop(ITHandle, KeyMapper, Filter, It0, Acc0, N0) -> +next_loop(ITHandle, KeyMapper, Filter, Cutoff, It0, Acc0, N0) -> inc_counter(), #it{last_seen_key = Key0} = It0, case emqx_ds_bitmask_keymapper:bin_increment(Filter, Key0) of @@ -238,51 +256,64 @@ next_loop(ITHandle, KeyMapper, Filter, It0, Acc0, N0) -> true = Key1 > Key0, case rocksdb:iterator_move(ITHandle, {seek, Key1}) of {ok, Key, Val} -> - It1 = It0#it{last_seen_key = Key}, - case check_message(Filter, It1, Val) of - {true, Msg} -> - N1 = N0 - 1, - Acc1 = [Msg | Acc0]; - false -> - N1 = N0, - Acc1 = Acc0 - end, - {N, It, Acc} = traverse_interval(ITHandle, KeyMapper, Filter, It1, Acc1, N1), - next_loop(ITHandle, KeyMapper, Filter, It, Acc, N); + {N, It, Acc} = + traverse_interval(ITHandle, Filter, Cutoff, Key, Val, It0, Acc0, N0), + next_loop(ITHandle, KeyMapper, Filter, Cutoff, It, Acc, N); {error, invalid_iterator} -> {ok, It0, lists:reverse(Acc0)} end end. -traverse_interval(_ITHandle, _KeyMapper, _Filter, It, Acc, 0) -> - {0, It, Acc}; -traverse_interval(ITHandle, KeyMapper, Filter, It0, Acc, N) -> - inc_counter(), - case rocksdb:iterator_move(ITHandle, next) of - {ok, Key, Val} -> - It = It0#it{last_seen_key = Key}, - case check_message(Filter, It, Val) of - {true, Msg} -> - traverse_interval(ITHandle, KeyMapper, Filter, It, [Msg | Acc], N - 1); - false -> - traverse_interval(ITHandle, KeyMapper, Filter, It, Acc, N) - end; - {error, invalid_iterator} -> - {0, It0, Acc} - end. - --spec check_message(emqx_ds_bitmask_keymapper:filter(), iterator(), binary()) -> - {true, emqx_types:message()} | false. -check_message(Filter, #it{last_seen_key = Key}, Val) -> +traverse_interval(ITHandle, Filter, Cutoff, Key, Val, It0, Acc0, N) -> + It = It0#it{last_seen_key = Key}, case emqx_ds_bitmask_keymapper:bin_checkmask(Filter, Key) of true -> Msg = deserialize(Val), - %% TODO: check strict time and hash collisions - {true, Msg}; + case check_message(Cutoff, It, Msg) of + true -> + Acc = [Msg | Acc0], + traverse_interval(ITHandle, Filter, Cutoff, It, Acc, N - 1); + false -> + traverse_interval(ITHandle, Filter, Cutoff, It, Acc0, N); + overflow -> + {0, It0, Acc0} + end; false -> - false + {N, It, Acc0} end. +traverse_interval(_ITHandle, _Filter, _Cutoff, It, Acc, 0) -> + {0, It, Acc}; +traverse_interval(ITHandle, Filter, Cutoff, It, Acc, N) -> + inc_counter(), + case rocksdb:iterator_move(ITHandle, next) of + {ok, Key, Val} -> + traverse_interval(ITHandle, Filter, Cutoff, Key, Val, It, Acc, N); + {error, invalid_iterator} -> + {0, It, Acc} + end. + +-spec check_message(emqx_ds:time(), iterator(), emqx_types:message()) -> + true | false | overflow. +check_message( + Cutoff, + _It, + #message{timestamp = Timestamp} +) when Timestamp >= Cutoff -> + %% We hit the current epoch, we can't continue iterating over it yet. + %% It would be unsafe otherwise: messages can be stored in the current epoch + %% concurrently with iterating over it. They can end up earlier (in the iteration + %% order) due to the nature of keymapping, potentially causing us to miss them. + overflow; +check_message( + _Cutoff, + #it{start_time = StartTime, topic_filter = TopicFilter}, + #message{timestamp = Timestamp, topic = Topic} +) when Timestamp >= StartTime -> + emqx_topic:match(emqx_topic:words(Topic), TopicFilter); +check_message(_Cutoff, _It, _Msg) -> + false. + format_key(KeyMapper, Key) -> Vec = [integer_to_list(I, 16) || I <- emqx_ds_bitmask_keymapper:key_to_vector(KeyMapper, Key)], lists:flatten(io_lib:format("~.16B (~s)", [Key, string:join(Vec, ",")])). From 54951c273f2844d666dd605b85f3638d496a7be2 Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Mon, 30 Oct 2023 20:59:53 +0700 Subject: [PATCH 097/155] feat(ds): mix safe cutoff into keymapper filter during iteration --- .../emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl | 7 +++++-- .../src/emqx_ds_storage_bitfield_lts.erl | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index a67dbc0eb..ee2173000 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -154,7 +154,8 @@ -opaque keymapper() :: #keymapper{}. --type scalar_range() :: any | {'=', scalar() | infinity} | {'>=', scalar()}. +-type scalar_range() :: + any | {'=', scalar() | infinity} | {'>=', scalar()} | {scalar(), '..', scalar()}. -include("emqx_ds_bitmask.hrl"). @@ -523,7 +524,9 @@ constraints_to_ranges(#keymapper{dim_sizeof = DimSizeof}, Filter) -> ({'=', Val}, _Bitsize) -> {Val, Val}; ({'>=', Val}, Bitsize) -> - {Val, ones(Bitsize)} + {Val, ones(Bitsize)}; + ({Min, '..', Max}, _Bitsize) -> + {Min, Max} end, Filter, DimSizeof diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index 4dddaff67..129c2500e 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -206,7 +206,7 @@ next_until(#s{db = DB, data = CF, keymappers = Keymappers}, It, SafeCutoffTime, %% Make filter: Inequations = [ {'=', TopicIndex}, - {'>=', StartTime} + {StartTime, '..', SafeCutoffTime - 1} | lists:map( fun ('+') -> From 74cb43f8b19ecf79bc20136ff594c61b00669594 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Thu, 2 Nov 2023 11:47:28 +0100 Subject: [PATCH 098/155] fix(ds): Add unique ID to the key --- apps/emqx/src/emqx_persistent_session_ds.erl | 10 ++++++++-- .../test/emqx_persistent_messages_SUITE.erl | 18 ++++++++++-------- .../test/emqx_persistent_session_SUITE.erl | 13 ++++++++----- apps/emqx_durable_storage/include/emqx_ds.hrl | 19 +++++++++++++++++++ apps/emqx_durable_storage/src/emqx_ds.erl | 1 + .../src/emqx_ds_bitmask_keymapper.erl | 5 +++-- .../src/emqx_ds_storage_bitfield_lts.erl | 15 +++++++++++---- .../src/emqx_ds_storage_layer.erl | 6 ++++-- 8 files changed, 64 insertions(+), 23 deletions(-) create mode 100644 apps/emqx_durable_storage/include/emqx_ds.hrl diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index abecb72a2..9a9e05a7a 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -115,6 +115,7 @@ session(). create(#{clientid := ClientID}, _ConnInfo, Conf) -> % TODO: expiration + ensure_timers(), ensure_session(ClientID, Conf). -spec open(clientinfo(), conninfo()) -> @@ -127,10 +128,9 @@ open(#{clientid := ClientID}, _ConnInfo) -> %% somehow isolate those idling not-yet-expired sessions into a separate process %% space, and move this call back into `emqx_cm` where it belongs. ok = emqx_cm:discard_session(ClientID), - ensure_timer(pull), - ensure_timer(get_streams), case open_session(ClientID) of Session = #{} -> + ensure_timers(), {true, Session, []}; false -> false @@ -705,6 +705,12 @@ export_record(Record, I, [Field | Rest], Acc) -> export_record(_, _, [], Acc) -> Acc. +%% TODO: find a more reliable way to perform actions that have side +%% effects. Add `CBM:init' callback to the session behavior? +ensure_timers() -> + ensure_timer(pull), + ensure_timer(get_streams). + -spec ensure_timer(pull | get_streams) -> ok. ensure_timer(Type) -> _ = emqx_utils:start_timer(100, {emqx_session, Type}), diff --git a/apps/emqx/test/emqx_persistent_messages_SUITE.erl b/apps/emqx/test/emqx_persistent_messages_SUITE.erl index db025a457..52ba090b5 100644 --- a/apps/emqx/test/emqx_persistent_messages_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_messages_SUITE.erl @@ -26,9 +26,6 @@ -import(emqx_common_test_helpers, [on_exit/1]). --define(DEFAULT_KEYSPACE, default). --define(DS_SHARD_ID, <<"local">>). --define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}). -define(PERSISTENT_MESSAGE_DB, emqx_persistent_message). all() -> @@ -49,6 +46,7 @@ init_per_testcase(t_session_subscription_iterators = TestCase, Config) -> Nodes = emqx_cth_cluster:start(Cluster, #{work_dir => emqx_cth_suite:work_dir(TestCase, Config)}), [{nodes, Nodes} | Config]; init_per_testcase(TestCase, Config) -> + ok = emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB), Apps = emqx_cth_suite:start( app_specs(), #{work_dir => emqx_cth_suite:work_dir(TestCase, Config)} @@ -59,9 +57,9 @@ end_per_testcase(t_session_subscription_iterators, Config) -> Nodes = ?config(nodes, Config), emqx_common_test_helpers:call_janitor(60_000), ok = emqx_cth_cluster:stop(Nodes), - ok; + end_per_testcase(common, Config); end_per_testcase(_TestCase, Config) -> - Apps = ?config(apps, Config), + Apps = proplists:get_value(apps, Config, []), emqx_common_test_helpers:call_janitor(60_000), clear_db(), emqx_cth_suite:stop(Apps), @@ -97,6 +95,7 @@ t_messages_persisted(_Config) -> Results = [emqtt:publish(CP, Topic, Payload, 1) || {Topic, Payload} <- Messages], ct:pal("Results = ~p", [Results]), + timer:sleep(2000), Persisted = consume(['#'], 0), @@ -141,6 +140,8 @@ t_messages_persisted_2(_Config) -> {ok, #{reason_code := ?RC_NO_MATCHING_SUBSCRIBERS}} = emqtt:publish(CP, T(<<"client/2/topic">>), <<"8">>, 1), + timer:sleep(2000), + Persisted = consume(['#'], 0), ct:pal("Persisted = ~p", [Persisted]), @@ -251,13 +252,14 @@ connect(Opts0 = #{}) -> {ok, _} = emqtt:connect(Client), Client. -consume(TopicFiler, StartMS) -> +consume(TopicFilter, StartMS) -> + Streams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartMS), lists:flatmap( fun({_Rank, Stream}) -> - {ok, It} = emqx_ds:make_iterator(Stream, StartMS, 0), + {ok, It} = emqx_ds:make_iterator(Stream, TopicFilter, StartMS), consume(It) end, - emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFiler, StartMS) + Streams ). consume(It) -> diff --git a/apps/emqx/test/emqx_persistent_session_SUITE.erl b/apps/emqx/test/emqx_persistent_session_SUITE.erl index 008305671..5a14e0bc9 100644 --- a/apps/emqx/test/emqx_persistent_session_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_session_SUITE.erl @@ -24,6 +24,8 @@ -compile(export_all). -compile(nowarn_export_all). +-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message). + %%-------------------------------------------------------------------- %% SUITE boilerplate %%-------------------------------------------------------------------- @@ -131,6 +133,7 @@ get_listener_port(Type, Name) -> end_per_group(Group, Config) when Group == tcp; Group == ws; Group == quic -> ok = emqx_cth_suite:stop(?config(group_apps, Config)); end_per_group(_, _Config) -> + ok = emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB), ok. init_per_testcase(TestCase, Config) -> @@ -188,7 +191,7 @@ receive_messages(Count, Msgs) -> receive_messages(Count - 1, [Msg | Msgs]); _Other -> receive_messages(Count, Msgs) - after 5000 -> + after 15000 -> Msgs end. @@ -227,11 +230,11 @@ wait_for_cm_unregister(ClientId, N) -> end. publish(Topic, Payloads) -> - publish(Topic, Payloads, false). + publish(Topic, Payloads, false, 2). -publish(Topic, Payloads, WaitForUnregister) -> +publish(Topic, Payloads, WaitForUnregister, QoS) -> Fun = fun(Client, Payload) -> - {ok, _} = emqtt:publish(Client, Topic, Payload, 2) + {ok, _} = emqtt:publish(Client, Topic, Payload, QoS) end, do_publish(Payloads, Fun, WaitForUnregister). @@ -532,7 +535,7 @@ t_publish_while_client_is_gone_qos1(Config) -> ok = emqtt:disconnect(Client1), maybe_kill_connection_process(ClientId, Config), - ok = publish(Topic, [Payload1, Payload2]), + ok = publish(Topic, [Payload1, Payload2], false, 1), {ok, Client2} = emqtt:start_link([ {proto_ver, v5}, diff --git a/apps/emqx_durable_storage/include/emqx_ds.hrl b/apps/emqx_durable_storage/include/emqx_ds.hrl new file mode 100644 index 000000000..c9ee4b7f7 --- /dev/null +++ b/apps/emqx_durable_storage/include/emqx_ds.hrl @@ -0,0 +1,19 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-ifndef(EMQX_DS_HRL_HRL). +-define(EMQX_DS_HRL_HRL, true). + +-endif. diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index 1e7f88367..27a0745bc 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -111,6 +111,7 @@ open_db(DB, Opts = #{backend := builtin}) -> emqx_ds_replication_layer:open_db(DB, Opts). %% @doc TODO: currently if one or a few shards are down, they won't be + %% deleted. -spec drop_db(db()) -> ok. drop_db(DB) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index ee2173000..5666b45ae 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -214,7 +214,7 @@ vector_to_key(#keymapper{scanner = [Actions | Scanner]}, [Coord | Vector]) -> bin_vector_to_key(Keymapper = #keymapper{dim_sizeof = DimSizeof, size = Size}, Binaries) -> Vec = lists:zipwith( fun(Bin, SizeOf) -> - <> = Bin, + <> = Bin, Int end, Binaries, @@ -402,7 +402,8 @@ bin_increment( Filter = #filter{size = Size, bitmask = Bitmask, bitfilter = Bitfilter, range_max = RangeMax}, KeyBin ) -> - <> = KeyBin, + %% The key may contain random suffix, skip it: + <> = KeyBin, Key1 = Key0 + 1, if Key1 band Bitmask =:= Bitfilter, Key1 =< RangeMax -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index 129c2500e..fe198c207 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -206,7 +206,10 @@ next_until(#s{db = DB, data = CF, keymappers = Keymappers}, It, SafeCutoffTime, %% Make filter: Inequations = [ {'=', TopicIndex}, - {StartTime, '..', SafeCutoffTime - 1} + {StartTime, '..', SafeCutoffTime - 1}, + %% Unique integer: + any + %% Varying topic levels: | lists:map( fun ('+') -> @@ -337,9 +340,12 @@ make_key(#s{keymappers = KeyMappers, trie = Trie}, #message{timestamp = Timestam ]) -> binary(). make_key(KeyMapper, TopicIndex, Timestamp, Varying) -> + UniqueInteger = erlang:unique_integer([monotonic, positive]), emqx_ds_bitmask_keymapper:key_to_bitstring( KeyMapper, - emqx_ds_bitmask_keymapper:vector_to_key(KeyMapper, [TopicIndex, Timestamp | Varying]) + emqx_ds_bitmask_keymapper:vector_to_key(KeyMapper, [ + TopicIndex, Timestamp, UniqueInteger | Varying + ]) ). %% TODO: don't hardcode the thresholds @@ -366,9 +372,10 @@ make_keymapper(TopicIndexBytes, BitsPerTopicLevel, TSBits, TSOffsetBits, N) -> %% Dimension Offset Bitsize [{1, 0, TopicIndexBytes * ?BYTE_SIZE}, %% Topic index {2, TSOffsetBits, TSBits - TSOffsetBits }] ++ %% Timestamp epoch - [{2 + I, 0, BitsPerTopicLevel } %% Varying topic levels + [{3 + I, 0, BitsPerTopicLevel } %% Varying topic levels || I <- lists:seq(1, N)] ++ - [{2, 0, TSOffsetBits }], %% Timestamp offset + [{2, 0, TSOffsetBits }, %% Timestamp offset + {3, 0, 64 }], %% Unique integer Keymapper = emqx_ds_bitmask_keymapper:make_keymapper(lists:reverse(Bitsources)), %% Assert: case emqx_ds_bitmask_keymapper:bitsize(Keymapper) rem 8 of diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index 4140c0ed7..57af33d61 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -24,6 +24,8 @@ -export([start_link/2, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]). %% internal exports: +-export([db_dir/1]). + -export_type([gen_id/0, generation/0, cf_refs/0, stream/0, iterator/0]). -include_lib("snabbkaffe/include/snabbkaffe.hrl"). @@ -132,7 +134,7 @@ open_shard(Shard, Options) -> -spec drop_shard(shard_id()) -> ok. drop_shard(Shard) -> - emqx_ds_storage_layer_sup:stop_shard(Shard), + catch emqx_ds_storage_layer_sup:stop_shard(Shard), ok = rocksdb:destroy(db_dir(Shard), []). -spec store_batch(shard_id(), [emqx_types:message()], emqx_ds:message_store_opts()) -> @@ -368,7 +370,7 @@ rocksdb_open(Shard, Options) -> -spec db_dir(shard_id()) -> file:filename(). db_dir({DB, ShardId}) -> - filename:join(["data", atom_to_list(DB), atom_to_list(ShardId)]). + filename:join([emqx:data_dir(), atom_to_list(DB), atom_to_list(ShardId)]). %%-------------------------------------------------------------------------------- %% Schema access From 7cb032285687d88c66aa6f7bd9049ee31be39aad Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Mon, 6 Nov 2023 13:32:41 +0100 Subject: [PATCH 099/155] fix(emqx): Move bpapi and emqx message record to emqx_utils app --- apps/emqx/include/bpapi.hrl | 7 +-- apps/emqx/include/emqx.hrl | 24 +---------- .../src/emqx_ds_storage_bitfield_lts.erl | 2 +- .../src/emqx_ds_storage_reference.erl | 2 +- .../src/emqx_durable_storage.app.src | 2 +- .../src/proto/emqx_ds_proto_v1.erl | 2 +- apps/emqx_utils/include/bpapi.hrl | 22 ++++++++++ apps/emqx_utils/include/emqx_message.hrl | 43 +++++++++++++++++++ .../src/bpapi/emqx_bpapi_trans.erl | 0 9 files changed, 71 insertions(+), 33 deletions(-) create mode 100644 apps/emqx_utils/include/bpapi.hrl create mode 100644 apps/emqx_utils/include/emqx_message.hrl rename apps/{emqx => emqx_utils}/src/bpapi/emqx_bpapi_trans.erl (100%) diff --git a/apps/emqx/include/bpapi.hrl b/apps/emqx/include/bpapi.hrl index 1373e0381..ed7693e78 100644 --- a/apps/emqx/include/bpapi.hrl +++ b/apps/emqx/include/bpapi.hrl @@ -14,9 +14,4 @@ %% limitations under the License. %%-------------------------------------------------------------------- --ifndef(EMQX_BPAPI_HRL). --define(EMQX_BPAPI_HRL, true). - --compile({parse_transform, emqx_bpapi_trans}). - --endif. +-include_lib("emqx_utils/include/bpapi.hrl"). diff --git a/apps/emqx/include/emqx.hrl b/apps/emqx/include/emqx.hrl index 664ec5803..86a64d8bb 100644 --- a/apps/emqx/include/emqx.hrl +++ b/apps/emqx/include/emqx.hrl @@ -55,29 +55,7 @@ -record(subscription, {topic, subid, subopts}). -%% See 'Application Message' in MQTT Version 5.0 --record(message, { - %% Global unique message ID - id :: binary(), - %% Message QoS - qos = 0, - %% Message from - from :: atom() | binary(), - %% Message flags - flags = #{} :: emqx_types:flags(), - %% Message headers. May contain any metadata. e.g. the - %% protocol version number, username, peerhost or - %% the PUBLISH properties (MQTT 5.0). - headers = #{} :: emqx_types:headers(), - %% Topic that the message is published to - topic :: emqx_types:topic(), - %% Message Payload - payload :: emqx_types:payload(), - %% Timestamp (Unit: millisecond) - timestamp :: integer(), - %% not used so far, for future extension - extra = [] :: term() -}). +-include_lib("emqx_utils/include/emqx_message.hrl"). -record(delivery, { %% Sender of the delivery diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index fe198c207..d8352df18 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -31,7 +31,7 @@ -export_type([options/0]). --include_lib("emqx/include/emqx.hrl"). +-include_lib("emqx_utils/include/emqx_message.hrl"). -include_lib("snabbkaffe/include/trace.hrl"). %%================================================================================ diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl index ec00f1310..6676faf88 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_reference.erl @@ -34,7 +34,7 @@ -export_type([options/0]). --include_lib("emqx/include/emqx.hrl"). +-include_lib("emqx_utils/include/emqx_message.hrl"). %%================================================================================ %% Type declarations diff --git a/apps/emqx_durable_storage/src/emqx_durable_storage.app.src b/apps/emqx_durable_storage/src/emqx_durable_storage.app.src index 6edbfda9b..f106494c8 100644 --- a/apps/emqx_durable_storage/src/emqx_durable_storage.app.src +++ b/apps/emqx_durable_storage/src/emqx_durable_storage.app.src @@ -5,7 +5,7 @@ {vsn, "0.1.6"}, {modules, []}, {registered, []}, - {applications, [kernel, stdlib, rocksdb, gproc, mria]}, + {applications, [kernel, stdlib, rocksdb, gproc, mria, emqx_utils]}, {mod, {emqx_ds_app, []}}, {env, []} ]}. diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index c974b253f..17e873ecd 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -17,7 +17,7 @@ -behavior(emqx_bpapi). --include_lib("emqx/include/bpapi.hrl"). +-include_lib("emqx_utils/include/bpapi.hrl"). %% API: -export([open_shard/3, drop_shard/2, get_streams/4, make_iterator/5, next/4]). diff --git a/apps/emqx_utils/include/bpapi.hrl b/apps/emqx_utils/include/bpapi.hrl new file mode 100644 index 000000000..1373e0381 --- /dev/null +++ b/apps/emqx_utils/include/bpapi.hrl @@ -0,0 +1,22 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2017-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-ifndef(EMQX_BPAPI_HRL). +-define(EMQX_BPAPI_HRL, true). + +-compile({parse_transform, emqx_bpapi_trans}). + +-endif. diff --git a/apps/emqx_utils/include/emqx_message.hrl b/apps/emqx_utils/include/emqx_message.hrl new file mode 100644 index 000000000..a0d196fa9 --- /dev/null +++ b/apps/emqx_utils/include/emqx_message.hrl @@ -0,0 +1,43 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2022-2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- +-ifndef(EMQX_MESSAGE_HRL). +-define(EMQX_MESSAGE_HRL, true). + +%% See 'Application Message' in MQTT Version 5.0 +-record(message, { + %% Global unique message ID + id :: binary(), + %% Message QoS + qos = 0, + %% Message from + from :: atom() | binary(), + %% Message flags + flags = #{} :: emqx_types:flags(), + %% Message headers. May contain any metadata. e.g. the + %% protocol version number, username, peerhost or + %% the PUBLISH properties (MQTT 5.0). + headers = #{} :: emqx_types:headers(), + %% Topic that the message is published to + topic :: emqx_types:topic(), + %% Message Payload + payload :: emqx_types:payload(), + %% Timestamp (Unit: millisecond) + timestamp :: integer(), + %% not used so far, for future extension + extra = [] :: term() +}). + +-endif. diff --git a/apps/emqx/src/bpapi/emqx_bpapi_trans.erl b/apps/emqx_utils/src/bpapi/emqx_bpapi_trans.erl similarity index 100% rename from apps/emqx/src/bpapi/emqx_bpapi_trans.erl rename to apps/emqx_utils/src/bpapi/emqx_bpapi_trans.erl From c030188eb74198e10884e6e60a5322f0ddb4054e Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Mon, 6 Nov 2023 19:04:14 +0100 Subject: [PATCH 100/155] chore(ds): Add rebar.config file to app/emqx_durable_storage --- apps/emqx_durable_storage/rebar.config | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 apps/emqx_durable_storage/rebar.config diff --git a/apps/emqx_durable_storage/rebar.config b/apps/emqx_durable_storage/rebar.config new file mode 100644 index 000000000..f04819025 --- /dev/null +++ b/apps/emqx_durable_storage/rebar.config @@ -0,0 +1,3 @@ +%% -*- mode:erlang -*- +{deps, + [{emqx_utils, {path, "../emqx_utils"}}]}. From ef0bebd7b924b7556a3d76e75122b3275f06744b Mon Sep 17 00:00:00 2001 From: JimMoen Date: Wed, 8 Nov 2023 11:08:11 +0800 Subject: [PATCH 101/155] chore: rm `lib-ce` and `lib-ee` in scripts --- scripts/gen-erlang-ls-config.sh | 4 +--- scripts/update_appup.escript | 4 ++-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/scripts/gen-erlang-ls-config.sh b/scripts/gen-erlang-ls-config.sh index 0f5bcdecd..d1e6e44b7 100755 --- a/scripts/gen-erlang-ls-config.sh +++ b/scripts/gen-erlang-ls-config.sh @@ -73,7 +73,7 @@ deps_dirs() for dir in _build/default/lib/*; do app=$(basename "${dir}") ## Only add applications that are not part of EMQX umbrella project: - [ -d "apps/${app}" ] || [ -d "lib-ce/${app}" ] || [ -d "lib-ee/${app}" ] || + [ -d "apps/${app}" ] || echo " - \"${dir}\"" done } @@ -91,8 +91,6 @@ EOF cat < , beams_dir => "_build/emqx/rel/emqx/lib/" , check => false , prev_tag => undefined - , src_dirs => "{src,apps,lib-*}/**/" + , src_dirs => "{src,apps}/**/" , prev_beams_dir => undefined }. From a1cdbaa76d1dc4f7a9e1287f8ba09300512e6001 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Wed, 8 Nov 2023 04:22:36 +0100 Subject: [PATCH 102/155] fix(ds): Address code review remarks --- Makefile | 2 +- apps/emqx/src/emqx_persistent_session_ds.erl | 9 +++--- .../src/emqx_ds_bitmask_keymapper.erl | 28 ++++++++++--------- .../src/emqx_ds_storage_bitfield_lts.erl | 7 +---- 4 files changed, 22 insertions(+), 24 deletions(-) diff --git a/Makefile b/Makefile index 8e8f4b493..254a4b0f9 100644 --- a/Makefile +++ b/Makefile @@ -85,7 +85,7 @@ $(REL_PROFILES:%=%-compile): $(REBAR) merge-config .PHONY: ct ct: $(REBAR) merge-config - @ENABLE_COVER_COMPILE=1 $(REBAR) ct --name $(CT_NODE_NAME) -c -v --cover_export_name $(CT_COVER_EXPORT_PREFIX)-ct + @$(REBAR) ct --name $(CT_NODE_NAME) -c -v --cover_export_name $(CT_COVER_EXPORT_PREFIX)-ct ## only check bpapi for enterprise profile because it's a super-set. .PHONY: static_checks diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index 9a9e05a7a..f3027f500 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -605,9 +605,11 @@ session_read_subscriptions(DSSessionId) -> ), mnesia:select(?SESSION_SUBSCRIPTIONS_TAB, MS, read). --spec new_subscription_id(id(), topic_filter()) -> {subscription_id(), emqx_ds:time()}. +-spec new_subscription_id(id(), topic_filter()) -> {subscription_id(), integer()}. new_subscription_id(DSSessionId, TopicFilter) -> - NowMS = erlang:system_time(microsecond), + %% Note: here we use _milliseconds_ to match with the timestamp + %% field of `#message' record. + NowMS = erlang:system_time(millisecond), DSSubId = {DSSessionId, TopicFilter}, {DSSubId, NowMS}. @@ -662,8 +664,7 @@ renew_streams(Id, ExistingStreams, TopicFilter, StartTime) -> ok; false -> mnesia:write(?SESSION_STREAM_TAB, Rec, write), - % StartTime), - {ok, Iterator} = emqx_ds:make_iterator(Stream, TopicFilter, 0), + {ok, Iterator} = emqx_ds:make_iterator(Stream, TopicFilter, StartTime), IterRec = #ds_iter{id = {Id, Stream}, iter = Iterator}, mnesia:write(?SESSION_ITER_TAB, IterRec, write) end diff --git a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl index 5666b45ae..a3b65c7e6 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_bitmask_keymapper.erl @@ -512,22 +512,24 @@ make_bitfilter(Keymapper = #keymapper{dim_sizeof = DimSizeof}, Ranges) -> {Bitmask, Bitfilter} = lists:unzip(L), {vector_to_key(Keymapper, Bitmask), vector_to_key(Keymapper, Bitfilter)}. -%% Transform inequalities into a list of closed intervals that the +%% Transform constraints into a list of closed intervals that the %% vector elements should lie in. constraints_to_ranges(#keymapper{dim_sizeof = DimSizeof}, Filter) -> lists:zipwith( - fun - (any, Bitsize) -> - {0, ones(Bitsize)}; - ({'=', infinity}, Bitsize) -> - Val = ones(Bitsize), - {Val, Val}; - ({'=', Val}, _Bitsize) -> - {Val, Val}; - ({'>=', Val}, Bitsize) -> - {Val, ones(Bitsize)}; - ({Min, '..', Max}, _Bitsize) -> - {Min, Max} + fun(Constraint, Bitsize) -> + Max = ones(Bitsize), + case Constraint of + any -> + {0, Max}; + {'=', infinity} -> + {Max, Max}; + {'=', Val} when Val =< Max -> + {Val, Val}; + {'>=', Val} when Val =< Max -> + {Val, Max}; + {A, '..', B} when A =< Max, B =< Max -> + {A, B} + end end, Filter, DimSizeof diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index d8352df18..d57d8013c 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -27,7 +27,7 @@ -export([create/4, open/5, store_batch/4, get_streams/4, make_iterator/5, next/4]). %% internal exports: --export([format_key/2, format_keyfilter/1]). +-export([format_key/2]). -export_type([options/0]). @@ -321,11 +321,6 @@ format_key(KeyMapper, Key) -> Vec = [integer_to_list(I, 16) || I <- emqx_ds_bitmask_keymapper:key_to_vector(KeyMapper, Key)], lists:flatten(io_lib:format("~.16B (~s)", [Key, string:join(Vec, ",")])). -format_keyfilter(any) -> - any; -format_keyfilter({Op, Val}) -> - {Op, integer_to_list(Val, 16)}. - -spec make_key(s(), emqx_types:message()) -> {binary(), [binary()]}. make_key(#s{keymappers = KeyMappers, trie = Trie}, #message{timestamp = Timestamp, topic = TopicBin}) -> Tokens = emqx_topic:tokens(TopicBin), From 3fd5ab2782d9e9ec5548b92e435f5e02dea6d86e Mon Sep 17 00:00:00 2001 From: JimMoen Date: Wed, 8 Nov 2023 11:32:49 +0800 Subject: [PATCH 103/155] build(Makefile): erlfmt all escript files --- Makefile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Makefile b/Makefile index 254a4b0f9..f117d5e9d 100644 --- a/Makefile +++ b/Makefile @@ -317,6 +317,8 @@ $(foreach tt,$(ALL_ELIXIR_TGZS),$(eval $(call gen-elixir-tgz-target,$(tt)))) fmt: $(REBAR) @$(SCRIPTS)/erlfmt -w 'apps/*/{src,include,priv,test,integration_test}/**/*.{erl,hrl,app.src,eterm}' @$(SCRIPTS)/erlfmt -w 'rebar.config.erl' + @$(SCRIPTS)/erlfmt -w '$(SCRIPTS)/**/*.escript' + @$(SCRIPTS)/erlfmt -w 'bin/**/*.escript' @mix format .PHONY: clean-test-cluster-config From 72eb34658d31fb38130421949cff262efab51139 Mon Sep 17 00:00:00 2001 From: JimMoen Date: Wed, 8 Nov 2023 11:51:10 +0800 Subject: [PATCH 104/155] style: erlfmt all remaining escripts --- bin/install_upgrade.escript | 246 ++++++++++------ scripts/check-deps-integrity.escript | 17 +- scripts/check-i18n-style.escript | 14 +- scripts/merge-config.escript | 20 +- scripts/relup-build/inject-relup.escript | 89 +++--- scripts/update_appup.escript | 344 +++++++++++++---------- 6 files changed, 444 insertions(+), 286 deletions(-) diff --git a/bin/install_upgrade.escript b/bin/install_upgrade.escript index 3e39c787b..421e63b21 100755 --- a/bin/install_upgrade.escript +++ b/bin/install_upgrade.escript @@ -4,9 +4,11 @@ %% ex: ft=erlang ts=4 sw=4 et -define(TIMEOUT, 300000). --define(INFO(Fmt,Args), io:format(standard_io, Fmt++"~n",Args)). --define(ERROR(Fmt,Args), io:format(standard_error, "ERROR: "++Fmt++"~n",Args)). --define(SEMVER_RE, <<"^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(-[a-zA-Z\\d][-a-zA-Z.\\d]*)?(\\+[a-zA-Z\\d][-a-zA-Z.\\d]*)?$">>). +-define(INFO(Fmt, Args), io:format(standard_io, Fmt ++ "~n", Args)). +-define(ERROR(Fmt, Args), io:format(standard_error, "ERROR: " ++ Fmt ++ "~n", Args)). +-define(SEMVER_RE, + <<"^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(-[a-zA-Z\\d][-a-zA-Z.\\d]*)?(\\+[a-zA-Z\\d][-a-zA-Z.\\d]*)?$">> +). -mode(compile). @@ -17,14 +19,15 @@ main([Command0, DistInfoStr | CommandArgs]) -> %% convert arguments into a proplist Opts = parse_arguments(CommandArgs), %% invoke the command passed as argument - F = case Command0 of - "install" -> fun(A, B) -> install(A, B) end; - "unpack" -> fun(A, B) -> unpack(A, B) end; - "upgrade" -> fun(A, B) -> upgrade(A, B) end; - "downgrade" -> fun(A, B) -> downgrade(A, B) end; - "uninstall" -> fun(A, B) -> uninstall(A, B) end; - "versions" -> fun(A, B) -> versions(A, B) end - end, + F = + case Command0 of + "install" -> fun(A, B) -> install(A, B) end; + "unpack" -> fun(A, B) -> unpack(A, B) end; + "upgrade" -> fun(A, B) -> upgrade(A, B) end; + "downgrade" -> fun(A, B) -> downgrade(A, B) end; + "uninstall" -> fun(A, B) -> uninstall(A, B) end; + "versions" -> fun(A, B) -> versions(A, B) end + end, F(DistInfo, Opts); main(Args) -> ?INFO("unknown args: ~p", [Args]), @@ -38,15 +41,15 @@ unpack({RelName, NameTypeArg, NodeName, Cookie}, Opts) -> ?INFO("Unpacked successfully: ~p", [Vsn]); old -> %% no need to unpack, has been installed previously - ?INFO("Release ~s is marked old.",[Version]); + ?INFO("Release ~s is marked old.", [Version]); unpacked -> - ?INFO("Release ~s is already unpacked.",[Version]); + ?INFO("Release ~s is already unpacked.", [Version]); current -> - ?INFO("Release ~s is already installed and current.",[Version]); + ?INFO("Release ~s is already installed and current.", [Version]); permanent -> - ?INFO("Release ~s is already installed and set permanent.",[Version]); + ?INFO("Release ~s is already installed and set permanent.", [Version]); {error, Reason} -> - ?INFO("Unpack failed: ~p.",[Reason]), + ?INFO("Unpack failed: ~p.", [Reason]), print_existing_versions(TargetNode), erlang:halt(2) end; @@ -64,38 +67,46 @@ install({RelName, NameTypeArg, NodeName, Cookie}, Opts) -> maybe_permafy(TargetNode, RelName, Vsn, Opts); old -> %% no need to unpack, has been installed previously - ?INFO("Release ~s is marked old, switching to it.",[Version]), + ?INFO("Release ~s is marked old, switching to it.", [Version]), check_and_install(TargetNode, Version), maybe_permafy(TargetNode, RelName, Version, Opts); unpacked -> - ?INFO("Release ~s is already unpacked, now installing.",[Version]), + ?INFO("Release ~s is already unpacked, now installing.", [Version]), check_and_install(TargetNode, Version), maybe_permafy(TargetNode, RelName, Version, Opts); current -> case proplists:get_value(permanent, Opts, true) of true -> - ?INFO("Release ~s is already installed and current, making permanent.", - [Version]), + ?INFO( + "Release ~s is already installed and current, making permanent.", + [Version] + ), permafy(TargetNode, RelName, Version); false -> - ?INFO("Release ~s is already installed and current.", - [Version]) + ?INFO( + "Release ~s is already installed and current.", + [Version] + ) end; permanent -> %% this release is marked permanent, however it might not the %% one currently running case current_release_version(TargetNode) of Version -> - ?INFO("Release ~s is already installed, running and set permanent.", - [Version]); + ?INFO( + "Release ~s is already installed, running and set permanent.", + [Version] + ); CurrentVersion -> - ?INFO("Release ~s is the currently running version.", - [CurrentVersion]), + ?INFO( + "Release ~s is the currently running version.", + [CurrentVersion] + ), check_and_install(TargetNode, Version), maybe_permafy(TargetNode, RelName, Version, Opts) end; {error, Reason} -> - ?INFO("Unpack failed: ~p",[Reason]), + ?INFO("Unpack failed: ~p", [Reason]), print_existing_versions(TargetNode), erlang:halt(2) end; @@ -119,8 +130,10 @@ uninstall({_RelName, NameTypeArg, NodeName, Cookie}, Opts) -> ?INFO("Release ~s is marked old, uninstalling it.", [Version]), remove_release(TargetNode, Version); unpacked -> - ?INFO("Release ~s is marked unpacked, uninstalling it", - [Version]), + ?INFO( + "Release ~s is marked unpacked, uninstalling it", + [Version] + ), remove_release(TargetNode, Version); current -> ?INFO("Uninstall failed: Release ~s is marked current.", [Version]), @@ -140,10 +153,11 @@ parse_arguments(Args) -> IsEnterprise = os:getenv("IS_ENTERPRISE") == "yes", parse_arguments(Args, [{is_enterprise, IsEnterprise}]). -parse_arguments([], Acc) -> Acc; -parse_arguments(["--no-permanent"|Rest], Acc) -> +parse_arguments([], Acc) -> + Acc; +parse_arguments(["--no-permanent" | Rest], Acc) -> parse_arguments(Rest, [{permanent, false}] ++ Acc); -parse_arguments([VersionStr|Rest], Acc) -> +parse_arguments([VersionStr | Rest], Acc) -> Version = parse_version(VersionStr), parse_arguments(Rest, [{version, Version}] ++ Acc). @@ -162,18 +176,29 @@ unpack_release(RelName, TargetNode, Version, Opts) -> {_, undefined} -> {error, release_package_not_found}; {ReleasePackage, ReleasePackageLink} -> - ?INFO("Release ~s not found, attempting to unpack ~s", - [Version, ReleasePackage]), - case rpc:call(TargetNode, release_handler, unpack_release, - [ReleasePackageLink], ?TIMEOUT) of - {ok, Vsn} -> {ok, Vsn}; + ?INFO( + "Release ~s not found, attempting to unpack ~s", + [Version, ReleasePackage] + ), + case + rpc:call( + TargetNode, + release_handler, + unpack_release, + [ReleasePackageLink], + ?TIMEOUT + ) + of + {ok, Vsn} -> + {ok, Vsn}; {error, {existing_release, Vsn}} -> %% sometimes the user may have removed the release/ dir %% for an `unpacked` release, then we need to re-unpack it from %% the .tar ball untar_for_unpacked_release(str(RelName), Vsn), {ok, Vsn}; - {error, _} = Error -> Error + {error, _} = Error -> + Error end end; Other -> @@ -198,8 +223,8 @@ untar_for_unpacked_release(RelName, Vsn) -> extract_tar(Cwd, Tar) -> case erl_tar:extract(Tar, [keep_old_files, {cwd, Cwd}, compressed]) of ok -> ok; - {error, {Name, Reason}} -> % New erl_tar (R3A). - throw({error, {cannot_extract_file, Name, Reason}}) + % New erl_tar (R3A). + {error, {Name, Reason}} -> throw({error, {cannot_extract_file, Name, Reason}}) end. %% 1. look for a release package tarball with the provided version: @@ -217,8 +242,11 @@ find_and_link_release_package(Version, RelName, IsEnterprise) -> ReleaseHandlerPackageLink = filename:join(Version, RelNameStr), %% this is the symlink name we'll create once %% we've found where the actual release package is located - ReleaseLink = filename:join(["releases", Version, - RelNameStr ++ ".tar.gz"]), + ReleaseLink = filename:join([ + "releases", + Version, + RelNameStr ++ ".tar.gz" + ]), ReleaseNamePattern = case IsEnterprise of false -> RelNameStr; @@ -240,14 +268,18 @@ find_and_link_release_package(Version, RelName, IsEnterprise) -> make_symlink_or_copy(filename:absname(Filename), ReleaseLink), {Filename, ReleaseHandlerPackageLink}; Files -> - ?ERROR("Found more than one package for version: '~s', " - "files: ~p", [Version, Files]), + ?ERROR( + "Found more than one package for version: '~s', " + "files: ~p", + [Version, Files] + ), erlang:halt(47) end. make_symlink_or_copy(Filename, ReleaseLink) -> case file:make_symlink(Filename, ReleaseLink) of - ok -> ok; + ok -> + ok; {error, eexist} -> ?INFO("Symlink ~p already exists, recreate it", [ReleaseLink]), ok = file:delete(ReleaseLink), @@ -260,36 +292,55 @@ make_symlink_or_copy(Filename, ReleaseLink) -> end. parse_version(V) when is_list(V) -> - hd(string:tokens(V,"/")). + hd(string:tokens(V, "/")). check_and_install(TargetNode, Vsn) -> %% Backup the sys.config, this will be used when we check and install release %% NOTE: We cannot backup the old sys.config directly, because the %% configs for plugins are only in app-envs, not in the old sys.config Configs0 = - [{AppName, rpc:call(TargetNode, application, get_all_env, [AppName], ?TIMEOUT)} - || {AppName, _, _} <- rpc:call(TargetNode, application, which_applications, [], ?TIMEOUT)], + [ + {AppName, rpc:call(TargetNode, application, get_all_env, [AppName], ?TIMEOUT)} + || {AppName, _, _} <- rpc:call(TargetNode, application, which_applications, [], ?TIMEOUT) + ], Configs1 = [{AppName, Conf} || {AppName, Conf} <- Configs0, Conf =/= []], - ok = file:write_file(filename:join(["releases", Vsn, "sys.config"]), io_lib:format("~p.", [Configs1])), + ok = file:write_file( + filename:join(["releases", Vsn, "sys.config"]), io_lib:format("~p.", [Configs1]) + ), %% check and install release - case rpc:call(TargetNode, release_handler, - check_install_release, [Vsn], ?TIMEOUT) of + case + rpc:call( + TargetNode, + release_handler, + check_install_release, + [Vsn], + ?TIMEOUT + ) + of {ok, _OtherVsn, _Desc} -> ok; {error, Reason} -> ?ERROR("Call release_handler:check_install_release failed: ~p.", [Reason]), erlang:halt(3) end, - case rpc:call(TargetNode, release_handler, install_release, - [Vsn, [{update_paths, true}]], ?TIMEOUT) of + case + rpc:call( + TargetNode, + release_handler, + install_release, + [Vsn, [{update_paths, true}]], + ?TIMEOUT + ) + of {ok, _, _} -> ?INFO("Installed Release: ~s.", [Vsn]), ok; {error, {no_such_release, Vsn}} -> VerList = iolist_to_binary( - [io_lib:format("* ~s\t~s~n",[V,S]) || {V,S} <- which_releases(TargetNode)]), + [io_lib:format("* ~s\t~s~n", [V, S]) || {V, S} <- which_releases(TargetNode)] + ), ?INFO("Installed versions:~n~s", [VerList]), ?ERROR("Unable to revert to '~s' - not installed.", [Vsn]), erlang:halt(2); @@ -298,11 +349,13 @@ check_and_install(TargetNode, Vsn) -> %% If the value is soft_purge, release_handler:install_release/1 %% returns {error,{old_processes,Mod}} {error, {old_processes, Mod}} -> - ?ERROR("Unable to install '~s' - old processes still running code from module ~p", - [Vsn, Mod]), + ?ERROR( + "Unable to install '~s' - old processes still running code from module ~p", + [Vsn, Mod] + ), erlang:halt(3); {error, Reason1} -> - ?ERROR("Call release_handler:install_release failed: ~p",[Reason1]), + ?ERROR("Call release_handler:install_release failed: ~p", [Reason1]), erlang:halt(4) end. @@ -310,22 +363,34 @@ maybe_permafy(TargetNode, RelName, Vsn, Opts) -> case proplists:get_value(permanent, Opts, true) of true -> permafy(TargetNode, RelName, Vsn); - false -> ok + false -> + ok end. permafy(TargetNode, RelName, Vsn) -> RelNameStr = atom_to_list(RelName), - ok = rpc:call(TargetNode, release_handler, - make_permanent, [Vsn], ?TIMEOUT), + ok = rpc:call( + TargetNode, + release_handler, + make_permanent, + [Vsn], + ?TIMEOUT + ), ?INFO("Made release permanent: ~p", [Vsn]), %% upgrade/downgrade the scripts by replacing them - Scripts = [RelNameStr, RelNameStr++"_ctl", "nodetool", "install_upgrade.escript"], - [{ok, _} = file:copy(filename:join(["bin", File++"-"++Vsn]), - filename:join(["bin", File])) - || File <- Scripts], + Scripts = [RelNameStr, RelNameStr ++ "_ctl", "nodetool", "install_upgrade.escript"], + [ + {ok, _} = file:copy( + filename:join(["bin", File ++ "-" ++ Vsn]), + filename:join(["bin", File]) + ) + || File <- Scripts + ], %% update the vars UpdatedVars = io_lib:format("REL_VSN=\"~s\"~nERTS_VSN=\"~s\"~n", [Vsn, erts_vsn()]), - file:write_file(filename:absname(filename:join(["releases", "emqx_vars"])), UpdatedVars, [append]). + file:write_file(filename:absname(filename:join(["releases", "emqx_vars"])), UpdatedVars, [ + append + ]). remove_release(TargetNode, Vsn) -> case rpc:call(TargetNode, release_handler, remove_release, [Vsn], ?TIMEOUT) of @@ -339,22 +404,31 @@ remove_release(TargetNode, Vsn) -> which_releases(TargetNode) -> R = rpc:call(TargetNode, release_handler, which_releases, [], ?TIMEOUT), - [ {V, S} || {_,V,_, S} <- R ]. + [{V, S} || {_, V, _, S} <- R]. %% the running release version is either the only one marked `current´ %% or, if none exists, the one marked `permanent` current_release_version(TargetNode) -> - R = rpc:call(TargetNode, release_handler, which_releases, - [], ?TIMEOUT), - Versions = [ {S, V} || {_,V,_, S} <- R ], + R = rpc:call( + TargetNode, + release_handler, + which_releases, + [], + ?TIMEOUT + ), + Versions = [{S, V} || {_, V, _, S} <- R], %% current version takes priority over the permanent - proplists:get_value(current, Versions, - proplists:get_value(permanent, Versions)). + proplists:get_value( + current, + Versions, + proplists:get_value(permanent, Versions) + ). print_existing_versions(TargetNode) -> VerList = iolist_to_binary([ - io_lib:format("* ~s\t~s~n",[V,S]) - || {V,S} <- which_releases(TargetNode) ]), + io_lib:format("* ~s\t~s~n", [V, S]) + || {V, S} <- which_releases(TargetNode) + ]), ?INFO("Installed versions:~n~s", [VerList]). start_distribution(TargetNode, NameTypeArg, Cookie) -> @@ -378,12 +452,12 @@ make_script_node(Node) -> %% get name type from arg get_name_type(NameTypeArg) -> - case NameTypeArg of - "-sname" -> - shortnames; - _ -> - longnames - end. + case NameTypeArg of + "-sname" -> + shortnames; + _ -> + longnames + end. erts_vsn() -> {ok, Str} = file:read_file(filename:join(["releases", "start_erl.data"])), @@ -393,11 +467,14 @@ erts_vsn() -> validate_target_version(TargetVersion, TargetNode) -> CurrentVersion = current_release_version(TargetNode), case {get_major_minor_vsn(CurrentVersion), get_major_minor_vsn(TargetVersion)} of - {{Major, Minor}, {Major, Minor}} -> ok; + {{Major, Minor}, {Major, Minor}} -> + ok; _ -> - ?ERROR("Cannot upgrade/downgrade from '~s' to '~s'~n" - "Hot upgrade is only supported between patch releases.", - [CurrentVersion, TargetVersion]), + ?ERROR( + "Cannot upgrade/downgrade from '~s' to '~s'~n" + "Hot upgrade is only supported between patch releases.", + [CurrentVersion, TargetVersion] + ), erlang:halt(48) end. @@ -409,7 +486,8 @@ get_major_minor_vsn(Version) -> parse_semver(Version) -> case re:run(Version, ?SEMVER_RE, [{capture, all_but_first, binary}]) of - {match, Parts} -> Parts; + {match, Parts} -> + Parts; nomatch -> ?ERROR("Invalid semantic version: '~s'~n", [Version]), erlang:halt(22) diff --git a/scripts/check-deps-integrity.escript b/scripts/check-deps-integrity.escript index 03cd509de..304c771fd 100755 --- a/scripts/check-deps-integrity.escript +++ b/scripts/check-deps-integrity.escript @@ -20,7 +20,8 @@ apps_rebar_config(Dir) -> %% collect a kv-list of {DepName, [{DepReference, RebarConfigFile}]} %% the value part should have unique DepReference -collect_deps([], Acc) -> maps:to_list(Acc); +collect_deps([], Acc) -> + maps:to_list(Acc); collect_deps([File | Files], Acc) -> Deps = try @@ -28,12 +29,13 @@ collect_deps([File | Files], Acc) -> {deps, Deps0} = lists:keyfind(deps, 1, Config), Deps0 catch - C : E : St -> + C:E:St -> erlang:raise(C, {E, {failed_to_find_deps_in_rebar_config, File}}, St) end, collect_deps(Files, do_collect_deps(Deps, File, Acc)). -do_collect_deps([], _File, Acc) -> Acc; +do_collect_deps([], _File, Acc) -> + Acc; %% ignore relative app dependencies do_collect_deps([{_Name, {path, _Path}} | Deps], File, Acc) -> do_collect_deps(Deps, File, Acc); @@ -41,7 +43,8 @@ do_collect_deps([{Name, Ref} | Deps], File, Acc) -> Refs = maps:get(Name, Acc, []), do_collect_deps(Deps, File, Acc#{Name => [{Ref, File} | Refs]}). -count_bad_deps([]) -> 0; +count_bad_deps([]) -> + 0; count_bad_deps([{Name, Refs0} | Rest]) -> Refs = lists:keysort(1, Refs0), case is_unique_ref(Refs) andalso not_branch_ref(Refs) of @@ -53,10 +56,8 @@ count_bad_deps([{Name, Refs0} | Rest]) -> end. is_unique_ref([_]) -> true; -is_unique_ref([{Ref, _File1}, {Ref, File2} | Rest]) -> - is_unique_ref([{Ref, File2} | Rest]); -is_unique_ref(_) -> - false. +is_unique_ref([{Ref, _File1}, {Ref, File2} | Rest]) -> is_unique_ref([{Ref, File2} | Rest]); +is_unique_ref(_) -> false. not_branch_ref([]) -> true; not_branch_ref([{{git, _Repo, {branch, _Branch}}, _File} | _Rest]) -> false; diff --git a/scripts/check-i18n-style.escript b/scripts/check-i18n-style.escript index e7e0ea42e..4aea1b6d5 100755 --- a/scripts/check-i18n-style.escript +++ b/scripts/check-i18n-style.escript @@ -46,7 +46,6 @@ logerr(Fmt, Args) -> _ = put(errors, N + 1), ok. - check(File) -> io:format(user, ".", []), {ok, C} = hocon:load(File), @@ -54,9 +53,12 @@ check(File) -> ok. check_one_field(Name, Field) -> - maps:foreach(fun(SubName, DescAndLabel) -> - check_desc_and_label([Name, ".", SubName], DescAndLabel) - end, Field). + maps:foreach( + fun(SubName, DescAndLabel) -> + check_desc_and_label([Name, ".", SubName], DescAndLabel) + end, + Field + ). check_desc_and_label(Name, D) -> case maps:keys(D) -- [<<"desc">>, <<"label">>] of @@ -84,8 +86,8 @@ check_desc_string(Name, <<>>) -> check_desc_string(Name, BinStr) -> Str = unicode:characters_to_list(BinStr, utf8), Err = fun(Reason) -> - logerr("~s: ~s~n", [Name, Reason]) - end, + logerr("~s: ~s~n", [Name, Reason]) + end, case Str of [$\s | _] -> Err("remove leading whitespace"); diff --git a/scripts/merge-config.escript b/scripts/merge-config.escript index 25593a323..aad33f7ac 100755 --- a/scripts/merge-config.escript +++ b/scripts/merge-config.escript @@ -90,14 +90,18 @@ merge_desc_files() -> do_merge_desc_files(BaseConf, Cfgs) -> lists:foldl( - fun(CfgFile, Acc) -> - case filelib:is_regular(CfgFile) of - true -> - {ok, Bin1} = file:read_file(CfgFile), - [Acc, io_lib:nl(), Bin1]; - false -> Acc - end - end, BaseConf, Cfgs). + fun(CfgFile, Acc) -> + case filelib:is_regular(CfgFile) of + true -> + {ok, Bin1} = file:read_file(CfgFile), + [Acc, io_lib:nl(), Bin1]; + false -> + Acc + end + end, + BaseConf, + Cfgs + ). get_all_desc_files() -> Dir = filename:join(["rel", "i18n"]), diff --git a/scripts/relup-build/inject-relup.escript b/scripts/relup-build/inject-relup.escript index b7d905979..7e252f741 100755 --- a/scripts/relup-build/inject-relup.escript +++ b/scripts/relup-build/inject-relup.escript @@ -20,9 +20,9 @@ inject_relup_file(File) -> case file:script(File) of {ok, {CurrRelVsn, UpVsnRUs, DnVsnRUs}} -> ?INFO("Injecting instructions to: ~p", [File]), - UpdatedContent = {CurrRelVsn, - inject_relup_instrs(up, UpVsnRUs), - inject_relup_instrs(down, DnVsnRUs)}, + UpdatedContent = + {CurrRelVsn, inject_relup_instrs(up, UpVsnRUs), + inject_relup_instrs(down, DnVsnRUs)}, file:write_file(File, term_to_text(UpdatedContent)); {ok, _BadFormat} -> ?ERROR("Bad formatted relup file: ~p", [File]), @@ -36,38 +36,49 @@ inject_relup_file(File) -> end. inject_relup_instrs(Type, RUs) -> - lists:map(fun({Vsn, Desc, Instrs}) -> - {Vsn, Desc, append_emqx_relup_instrs(Type, Vsn, Instrs)} - end, RUs). + lists:map( + fun({Vsn, Desc, Instrs}) -> + {Vsn, Desc, append_emqx_relup_instrs(Type, Vsn, Instrs)} + end, + RUs + ). append_emqx_relup_instrs(up, FromRelVsn, Instrs0) -> - {{UpExtra, _}, Instrs1} = filter_and_check_instrs(up, Instrs0), - Instrs1 ++ - [ {load, {emqx_release, brutal_purge, soft_purge}} - , {load, {emqx_relup, brutal_purge, soft_purge}} - , {apply, {emqx_relup, post_release_upgrade, [FromRelVsn, UpExtra]}} + {{UpExtra, _}, Instrs1} = filter_and_check_instrs(up, Instrs0), + Instrs1 ++ + [ + {load, {emqx_release, brutal_purge, soft_purge}}, + {load, {emqx_relup, brutal_purge, soft_purge}}, + {apply, {emqx_relup, post_release_upgrade, [FromRelVsn, UpExtra]}} ]; - append_emqx_relup_instrs(down, ToRelVsn, Instrs0) -> {{_, DnExtra}, Instrs1} = filter_and_check_instrs(down, Instrs0), %% NOTE: When downgrading, we apply emqx_relup:post_release_downgrade/2 before reloading %% or removing the emqx_relup module. - Instrs2 = Instrs1 ++ - [ {load, {emqx_release, brutal_purge, soft_purge}} - , {apply, {emqx_relup, post_release_downgrade, [ToRelVsn, DnExtra]}} - , {load, {emqx_relup, brutal_purge, soft_purge}} - ], + Instrs2 = + Instrs1 ++ + [ + {load, {emqx_release, brutal_purge, soft_purge}}, + {apply, {emqx_relup, post_release_downgrade, [ToRelVsn, DnExtra]}}, + {load, {emqx_relup, brutal_purge, soft_purge}} + ], Instrs2. filter_and_check_instrs(Type, Instrs) -> case filter_fetch_emqx_mods_and_extra(Instrs) of {_, DnExtra, _, _} when Type =:= up, DnExtra =/= undefined -> - ?ERROR("Got '{apply,{emqx_relup,post_release_downgrade,[_,Extra]}}'" - " from the upgrade instruction list, should be 'post_release_upgrade'", []), + ?ERROR( + "Got '{apply,{emqx_relup,post_release_downgrade,[_,Extra]}}'" + " from the upgrade instruction list, should be 'post_release_upgrade'", + [] + ), error({instruction_not_found, load_object_code}); {UpExtra, _, _, _} when Type =:= down, UpExtra =/= undefined -> - ?ERROR("Got '{apply,{emqx_relup,post_release_upgrade,[_,Extra]}}'" - " from the downgrade instruction list, should be 'post_release_downgrade'", []), + ?ERROR( + "Got '{apply,{emqx_relup,post_release_upgrade,[_,Extra]}}'" + " from the downgrade instruction list, should be 'post_release_downgrade'", + [] + ), error({instruction_not_found, load_object_code}); {_, _, [], _} -> ?ERROR("Cannot find any 'load_object_code' instructions for app emqx", []), @@ -81,12 +92,15 @@ filter_fetch_emqx_mods_and_extra(Instrs) -> lists:foldl(fun do_filter_and_get/2, {undefined, undefined, [], []}, Instrs). %% collect modules for emqx app -do_filter_and_get({load_object_code, {emqx, _AppVsn, Mods}} = Instr, - {UpExtra, DnExtra, EmqxMods, RemainInstrs}) -> +do_filter_and_get( + {load_object_code, {emqx, _AppVsn, Mods}} = Instr, + {UpExtra, DnExtra, EmqxMods, RemainInstrs} +) -> {UpExtra, DnExtra, EmqxMods ++ Mods, RemainInstrs ++ [Instr]}; %% remove 'load' instrs for emqx_relup and emqx_release -do_filter_and_get({load, {Mod, _, _}}, {UpExtra, DnExtra, EmqxMods, RemainInstrs}) - when Mod =:= emqx_relup; Mod =:= emqx_release -> +do_filter_and_get({load, {Mod, _, _}}, {UpExtra, DnExtra, EmqxMods, RemainInstrs}) when + Mod =:= emqx_relup; Mod =:= emqx_release +-> {UpExtra, DnExtra, EmqxMods, RemainInstrs}; %% remove 'remove' and 'purge' instrs for emqx_relup do_filter_and_get({remove, {emqx_relup, _, _}}, {UpExtra, DnExtra, EmqxMods, RemainInstrs}) -> @@ -94,22 +108,31 @@ do_filter_and_get({remove, {emqx_relup, _, _}}, {UpExtra, DnExtra, EmqxMods, Rem do_filter_and_get({purge, [emqx_relup]}, {UpExtra, DnExtra, EmqxMods, RemainInstrs}) -> {UpExtra, DnExtra, EmqxMods, RemainInstrs}; %% remove 'apply' instrs for upgrade, and collect the 'Extra' parameter -do_filter_and_get({apply, {emqx_relup, post_release_upgrade, [_, UpExtra0]}}, - {_, DnExtra, EmqxMods, RemainInstrs}) -> +do_filter_and_get( + {apply, {emqx_relup, post_release_upgrade, [_, UpExtra0]}}, + {_, DnExtra, EmqxMods, RemainInstrs} +) -> {UpExtra0, DnExtra, EmqxMods, RemainInstrs}; %% remove 'apply' instrs for downgrade, and collect the 'Extra' parameter -do_filter_and_get({apply, {emqx_relup, post_release_downgrade, [_, DnExtra0]}}, - {UpExtra, _, EmqxMods, RemainInstrs}) -> +do_filter_and_get( + {apply, {emqx_relup, post_release_downgrade, [_, DnExtra0]}}, + {UpExtra, _, EmqxMods, RemainInstrs} +) -> {UpExtra, DnExtra0, EmqxMods, RemainInstrs}; %% keep all other instrs unchanged do_filter_and_get(Instr, {UpExtra, DnExtra, EmqxMods, RemainInstrs}) -> {UpExtra, DnExtra, EmqxMods, RemainInstrs ++ [Instr]}. assert_mandatory_modules(_, Mods) -> - MandInstrs = [{load_module,emqx_release,brutal_purge,soft_purge,[]}, - {load_module,emqx_relup}], - assert(lists:member(emqx_relup, Mods) andalso lists:member(emqx_release, Mods), - "The following instructions are mandatory in every clause of the emqx.appup.src: ~p", [MandInstrs]). + MandInstrs = [ + {load_module, emqx_release, brutal_purge, soft_purge, []}, + {load_module, emqx_relup} + ], + assert( + lists:member(emqx_relup, Mods) andalso lists:member(emqx_release, Mods), + "The following instructions are mandatory in every clause of the emqx.appup.src: ~p", + [MandInstrs] + ). assert(true, _, _) -> ok; diff --git a/scripts/update_appup.escript b/scripts/update_appup.escript index 5ae0918bb..945a948b0 100755 --- a/scripts/update_appup.escript +++ b/scripts/update_appup.escript @@ -1,6 +1,7 @@ #!/usr/bin/env -S escript -c %% -*- erlang-indent-level:4 -*- +%% erlfmt-ignore usage() -> "A script that fills in boilerplate for appup files. @@ -35,51 +36,52 @@ Options: --src-dirs Directories where source code is found. Defaults to '{src,apps}/**/' ". --record(app, - { modules :: #{module() => binary()} - , version :: string() - }). +-record(app, { + modules :: #{module() => binary()}, + version :: string() +}). default_options() -> - #{ clone_url => find_upstream_repo("origin") - , make_command => "make emqx-rel" - , beams_dir => "_build/emqx/rel/emqx/lib/" - , check => false - , prev_tag => undefined - , src_dirs => "{src,apps}/**/" - , prev_beams_dir => undefined - }. + #{ + clone_url => find_upstream_repo("origin"), + make_command => "make emqx-rel", + beams_dir => "_build/emqx/rel/emqx/lib/", + check => false, + prev_tag => undefined, + src_dirs => "{src,apps}/**/", + prev_beams_dir => undefined + }. %% App-specific actions that should be added unconditionally to any update/downgrade: app_specific_actions(_) -> []. ignored_apps() -> - [gpb %% only a build tool - ] ++ otp_standard_apps(). + %% only a build tool + [gpb] ++ otp_standard_apps(). main(Args) -> #{prev_tag := Baseline} = Options = parse_args(Args, default_options()), init_globals(Options), main(Options, Baseline). -parse_args([PrevTag = [A|_]], State) when A =/= $- -> +parse_args([PrevTag = [A | _]], State) when A =/= $- -> State#{prev_tag => PrevTag}; -parse_args(["--check"|Rest], State) -> +parse_args(["--check" | Rest], State) -> parse_args(Rest, State#{check => true}); -parse_args(["--skip-build"|Rest], State) -> +parse_args(["--skip-build" | Rest], State) -> parse_args(Rest, State#{make_command => undefined}); -parse_args(["--repo", Repo|Rest], State) -> +parse_args(["--repo", Repo | Rest], State) -> parse_args(Rest, State#{clone_url => Repo}); -parse_args(["--remote", Remote|Rest], State) -> +parse_args(["--remote", Remote | Rest], State) -> parse_args(Rest, State#{clone_url => find_upstream_repo(Remote)}); -parse_args(["--make-command", Command|Rest], State) -> +parse_args(["--make-command", Command | Rest], State) -> parse_args(Rest, State#{make_command => Command}); -parse_args(["--release-dir", Dir|Rest], State) -> +parse_args(["--release-dir", Dir | Rest], State) -> parse_args(Rest, State#{beams_dir => Dir}); -parse_args(["--prev-release-dir", Dir|Rest], State) -> +parse_args(["--prev-release-dir", Dir | Rest], State) -> parse_args(Rest, State#{prev_beams_dir => Dir}); -parse_args(["--src-dirs", Pattern|Rest], State) -> +parse_args(["--src-dirs", Pattern | Rest], State) -> parse_args(Rest, State#{src_dirs => Pattern}); parse_args(_, _) -> fail(usage()). @@ -87,9 +89,11 @@ parse_args(_, _) -> main(Options, Baseline) -> {CurrRelDir, PrevRelDir} = prepare(Baseline, Options), putopt(prev_beams_dir, PrevRelDir), - log("~n===================================~n" + log( + "~n===================================~n" "Processing changes..." - "~n===================================~n"), + "~n===================================~n" + ), CurrAppsIdx = index_apps(CurrRelDir), PrevAppsIdx = index_apps(PrevRelDir), %% log("Curr: ~p~nPrev: ~p~n", [CurrAppsIdx, PrevAppsIdx]), @@ -98,6 +102,7 @@ main(Options, Baseline) -> ok = check_appup_files(), ok = warn_and_exit(is_valid()). +%% erlfmt-ignore warn_and_exit(true) -> log(" NOTE: Please review the changes manually. This script does not know about NIF @@ -109,9 +114,12 @@ warn_and_exit(false) -> halt(1). prepare(Baseline, Options = #{make_command := MakeCommand, beams_dir := BeamDir}) -> - log("~n===================================~n" + log( + "~n===================================~n" "Baseline: ~s" - "~n===================================~n", [Baseline]), + "~n===================================~n", + [Baseline] + ), log("Building the current version...~n"), ok = bash(MakeCommand), PrevRelDir = @@ -126,6 +134,7 @@ prepare(Baseline, Options = #{make_command := MakeCommand, beams_dir := BeamDir} end, {BeamDir, PrevRelDir}. +%% erlfmt-ignore build_prev_release(Baseline, #{clone_url := Repo, make_command := MakeCommand}) -> BaseDir = "/tmp/emqx-appup-base/", Dir = filename:basename(Repo, ".git") ++ [$-|Baseline], @@ -146,24 +155,27 @@ find_upstream_repo(Remote) -> find_appup_actions(CurrApps, PrevApps) -> maps:fold( - fun(App, CurrAppIdx, Acc) -> - case PrevApps of - #{App := PrevAppIdx} -> - find_appup_actions(App, CurrAppIdx, PrevAppIdx) ++ Acc; - _ -> - %% New app, nothing to upgrade here. - Acc - end - end, - [], - CurrApps). + fun(App, CurrAppIdx, Acc) -> + case PrevApps of + #{App := PrevAppIdx} -> + find_appup_actions(App, CurrAppIdx, PrevAppIdx) ++ Acc; + _ -> + %% New app, nothing to upgrade here. + Acc + end + end, + [], + CurrApps + ). find_appup_actions(_App, AppIdx, AppIdx) -> %% No changes to the app, ignore: []; -find_appup_actions(App, - CurrAppIdx = #app{version = CurrVersion}, - PrevAppIdx = #app{version = PrevVersion}) -> +find_appup_actions( + App, + CurrAppIdx = #app{version = CurrVersion}, + PrevAppIdx = #app{version = PrevVersion} +) -> {OldUpgrade0, OldDowngrade0} = find_base_appup_actions(App, PrevVersion), OldUpgrade = ensure_all_patch_versions(App, CurrVersion, OldUpgrade0), OldDowngrade = ensure_all_patch_versions(App, CurrVersion, OldDowngrade0), @@ -195,7 +207,10 @@ do_ensure_all_patch_versions(App, CurrVsn, OldActions) -> {ok, ExpectedVsns} -> CoveredVsns = [V || {V, _} <- OldActions, V =/= <<".*">>], ExpectedVsnStrs = [vsn_number_to_string(V) || V <- ExpectedVsns], - MissingActions = [{V, []} || V <- ExpectedVsnStrs, not contains_version(V, CoveredVsns)], + MissingActions = [ + {V, []} + || V <- ExpectedVsnStrs, not contains_version(V, CoveredVsns) + ], MissingActions ++ OldActions; {error, bad_version} -> log("WARN: Could not infer expected versions to upgrade from for ~p~n", [App]), @@ -206,23 +221,24 @@ do_ensure_all_patch_versions(App, CurrVsn, OldActions) -> %% in their current appup. diff_appup_instructions(ComputedChanges, PresentChanges) -> lists:foldr( - fun({VsnOrRegex, ComputedActions}, Acc) -> - case find_matching_version(VsnOrRegex, PresentChanges) of - undefined -> - [{VsnOrRegex, ComputedActions} | Acc]; - PresentActions -> - DiffActions = ComputedActions -- PresentActions, - case DiffActions of - [] -> - %% no diff - Acc; - _ -> - [{VsnOrRegex, DiffActions} | Acc] - end - end - end, - [], - ComputedChanges). + fun({VsnOrRegex, ComputedActions}, Acc) -> + case find_matching_version(VsnOrRegex, PresentChanges) of + undefined -> + [{VsnOrRegex, ComputedActions} | Acc]; + PresentActions -> + DiffActions = ComputedActions -- PresentActions, + case DiffActions of + [] -> + %% no diff + Acc; + _ -> + [{VsnOrRegex, DiffActions} | Acc] + end + end + end, + [], + ComputedChanges + ). %% checks if any missing diffs are present %% and groups them by `up' and `down' types. @@ -234,9 +250,10 @@ parse_appup_diffs(Upgrade, OldUpgrade, Downgrade, OldDowngrade) -> %% no diff for external dependency; ignore ok; _ -> - Diffs = #{ up => DiffUp - , down => DiffDown - }, + Diffs = #{ + up => DiffUp, + down => DiffDown + }, {diffs, Diffs} end. @@ -260,18 +277,21 @@ find_base_appup_actions(App, PrevVersion) -> {ensure_version(PrevVersion, Upgrade), ensure_version(PrevVersion, Downgrade)}. merge_update_actions(App, Changes, Vsns, PrevVersion) -> - lists:map(fun(Ret = {<<".*">>, _}) -> - Ret; - ({Vsn, Actions}) -> - case is_skipped_version(App, Vsn, PrevVersion) of - true -> - log("WARN: ~p has version ~s skipped over?~n", [App, Vsn]), - {Vsn, Actions}; - false -> - {Vsn, do_merge_update_actions(App, Changes, Actions)} - end - end, - Vsns). + lists:map( + fun + (Ret = {<<".*">>, _}) -> + Ret; + ({Vsn, Actions}) -> + case is_skipped_version(App, Vsn, PrevVersion) of + true -> + log("WARN: ~p has version ~s skipped over?~n", [App, Vsn]), + {Vsn, Actions}; + false -> + {Vsn, do_merge_update_actions(App, Changes, Actions)} + end + end, + Vsns + ). %% say current version is 1.1.3, and the compare base is version 1.1.1, %% but there is a 1.1.2 in appup we may skip merging instructions for @@ -306,7 +326,7 @@ do_merge_update_actions(App, {New0, Changed0, Deleted0}, OldActions) -> []; false -> [{load_module, M, brutal_purge, soft_purge, []} || M <- Changed] ++ - [{add_module, M} || M <- New] + [{add_module, M} || M <- New] end, {OldActionsWithStop, OldActionsAfterStop} = find_application_stop_instruction(App, OldActions), @@ -331,11 +351,14 @@ contains_restart_application(Application, Actions) -> find_application_stop_instruction(Application, Actions) -> {Before, After0} = lists:splitwith( - fun({apply, {application, stop, [App]}}) when App =:= Application -> - false; - (_) -> - true - end, Actions), + fun + ({apply, {application, stop, [App]}}) when App =:= Application -> + false; + (_) -> + true + end, + Actions + ), case After0 of [StopInst | After] -> {Before ++ [StopInst], After}; @@ -353,8 +376,10 @@ process_old_action({delete_module, Module}) -> [Module]; process_old_action({update, Module, _Change}) -> [Module]; -process_old_action(LoadModule) when is_tuple(LoadModule) andalso - element(1, LoadModule) =:= load_module -> +process_old_action(LoadModule) when + is_tuple(LoadModule) andalso + element(1, LoadModule) =:= load_module +-> element(2, LoadModule); process_old_action(_) -> []. @@ -370,17 +395,19 @@ ensure_version(Version, OldInstructions) -> contains_version(Needle, Haystack) when is_list(Needle) -> lists:any( - fun(Regex) when is_binary(Regex) -> - case re:run(Needle, Regex) of - {match, _} -> - true; - nomatch -> - false - end; - (Vsn) -> - Vsn =:= Needle - end, - Haystack). + fun + (Regex) when is_binary(Regex) -> + case re:run(Needle, Regex) of + {match, _} -> + true; + nomatch -> + false + end; + (Vsn) -> + Vsn =:= Needle + end, + Haystack + ). %% As a best effort approach, we assume that we only bump patch %% version numbers between release upgrades for our dependencies and @@ -413,9 +440,9 @@ vsn_number_to_string({Major, Minor, Patch}) -> read_appup(File) -> %% NOTE: appup file is a script, it may contain variables or functions. - case do_read_appup(File) of - {ok, {U, D}} -> {U, D}; - {error, Reason} -> fail("Failed to parse appup file ~p~n~p", [File, Reason]) + case do_read_appup(File) of + {ok, {U, D}} -> {U, D}; + {error, Reason} -> fail("Failed to parse appup file ~p~n~p", [File, Reason]) end. do_read_appup(File) -> @@ -434,10 +461,11 @@ check_appup_files() -> update_appups(Changes) -> lists:foreach( - fun({App, {Upgrade, Downgrade, OldUpgrade, OldDowngrade}}) -> - do_update_appup(App, Upgrade, Downgrade, OldUpgrade, OldDowngrade) - end, - Changes). + fun({App, {Upgrade, Downgrade, OldUpgrade, OldDowngrade}}) -> + do_update_appup(App, Upgrade, Downgrade, OldUpgrade, OldDowngrade) + end, + Changes + ). do_update_appup(App, Upgrade, Downgrade, OldUpgrade, OldDowngrade) -> case locate_current_src(App, ".appup.src") of @@ -469,8 +497,11 @@ check_appup(App, Upgrade, Downgrade, OldUpgrade, OldDowngrade) -> ok; {diffs, Diffs} -> set_invalid(), - log("ERROR: Appup file for '~p' is not complete.~n" - "Missing:~100p~n", [App, Diffs]), + log( + "ERROR: Appup file for '~p' is not complete.~n" + "Missing:~100p~n", + [App, Diffs] + ), notok end. @@ -496,9 +527,12 @@ render_appup(App, File, Up, Down) -> end. do_render_appup(File, Up, Down) -> - IOList = io_lib:format("%% -*- mode: erlang -*-~n" - "%% Unless you know what you are doing, DO NOT edit manually!!~n" - "{VSN,~n ~p,~n ~p}.~n", [Up, Down]), + IOList = io_lib:format( + "%% -*- mode: erlang -*-~n" + "%% Unless you know what you are doing, DO NOT edit manually!!~n" + "{VSN,~n ~p,~n ~p}.~n", + [Up, Down] + ), ok = file:write_file(File, IOList). create_stub(App) -> @@ -544,30 +578,37 @@ index_app(AppFile) -> %% Note: assuming that beams are always located in the same directory where app file is: EbinDir = filename:dirname(AppFile), Modules = hashsums(EbinDir), - {App, #app{ version = Vsn - , modules = Modules - }}. + {App, #app{ + version = Vsn, + modules = Modules + }}. -diff_app(UpOrDown, App, - #app{version = NewVersion, modules = NewModules}, - #app{version = OldVersion, modules = OldModules}) -> +diff_app( + UpOrDown, + App, + #app{version = NewVersion, modules = NewModules}, + #app{version = OldVersion, modules = OldModules} +) -> {New, Changed} = - maps:fold( fun(Mod, MD5, {New, Changed}) -> - case OldModules of - #{Mod := OldMD5} when MD5 =:= OldMD5 -> - {New, Changed}; - #{Mod := _} -> - {New, [Mod | Changed]}; - _ -> - {[Mod | New], Changed} - end - end - , {[], []} - , NewModules - ), + maps:fold( + fun(Mod, MD5, {New, Changed}) -> + case OldModules of + #{Mod := OldMD5} when MD5 =:= OldMD5 -> + {New, Changed}; + #{Mod := _} -> + {New, [Mod | Changed]}; + _ -> + {[Mod | New], Changed} + end + end, + {[], []}, + NewModules + ), Deleted = maps:keys(maps:without(maps:keys(NewModules), OldModules)), - Changes = lists:filter(fun({_T, L}) -> length(L) > 0 end, - [{added, New}, {changed, Changed}, {deleted, Deleted}]), + Changes = lists:filter( + fun({_T, L}) -> length(L) > 0 end, + [{added, New}, {changed, Changed}, {deleted, Deleted}] + ), case NewVersion =:= OldVersion of true when Changes =:= [] -> %% no change @@ -577,13 +618,17 @@ diff_app(UpOrDown, App, case UpOrDown =:= up of true -> %% only log for the upgrade case because it would be the same result - log("ERROR: Application '~p' contains changes, but its version is not updated. ~s", - [App, format_changes(Changes)]); + log( + "ERROR: Application '~p' contains changes, but its version is not updated. ~s", + [App, format_changes(Changes)] + ); false -> ok end; false -> - log("INFO: Application '~p' has been updated: ~p --[~p]--> ~p~n", [App, OldVersion, UpOrDown, NewVersion]), + log("INFO: Application '~p' has been updated: ~p --[~p]--> ~p~n", [ + App, OldVersion, UpOrDown, NewVersion + ]), log("INFO: changes [~p]: ~p~n", [UpOrDown, Changes]), ok end, @@ -594,14 +639,16 @@ format_changes(Changes) -> -spec hashsums(file:filename()) -> #{module() => binary()}. hashsums(EbinDir) -> - maps:from_list(lists:map( - fun(Beam) -> - File = filename:join(EbinDir, Beam), - {ok, Ret = {_Module, _MD5}} = beam_lib:md5(File), - Ret - end, - filelib:wildcard("*.beam", EbinDir) - )). + maps:from_list( + lists:map( + fun(Beam) -> + File = filename:join(EbinDir, Beam), + {ok, Ret = {_Module, _MD5}} = beam_lib:md5(File), + Ret + end, + filelib:wildcard("*.beam", EbinDir) + ) + ). is_app_external(App) -> Ext = ".app.src", @@ -674,12 +721,13 @@ do_locate(Dir, App, Suffix) -> end. find_app(Pattern) -> - lists:filter(fun(D) -> re:run(D, "apps/.*/_build") =:= nomatch end, - filelib:wildcard(Pattern)). + lists:filter( + fun(D) -> re:run(D, "apps/.*/_build") =:= nomatch end, + filelib:wildcard(Pattern) + ). bash(undefined) -> ok; -bash(Script) -> - bash(Script, []). +bash(Script) -> bash(Script, []). bash(Script, Env) -> log("+ ~s~n+ Env: ~p~n", [Script, Env]), @@ -695,12 +743,14 @@ cmd(Exec, Params) -> fail("Executable not found in $PATH: ~s", [Exec]); Path -> Params1 = maps:to_list(maps:with([env, args, cd], Params)), - Port = erlang:open_port( {spawn_executable, Path} - , [ exit_status - , nouse_stdio - | Params1 - ] - ), + Port = erlang:open_port( + {spawn_executable, Path}, + [ + exit_status, + nouse_stdio + | Params1 + ] + ), receive {Port, {exit_status, Status}} -> Status From 4ba1b2089400b9f7d67a43dd47299eadf43a819c Mon Sep 17 00:00:00 2001 From: JimMoen Date: Wed, 8 Nov 2023 11:51:47 +0800 Subject: [PATCH 105/155] style: add the last reformat commit to git-blam-ignore --- git-blame-ignore-revs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/git-blame-ignore-revs b/git-blame-ignore-revs index 41c6e5e49..c1bb2f5ea 100644 --- a/git-blame-ignore-revs +++ b/git-blame-ignore-revs @@ -33,5 +33,7 @@ b4451823350ec46126c49ca915b4b169dd4cf49e a4feb3e6e95c18cb531416112e57520c5ba00d40 # reformat apps/emqx_dashboard 07444e3da53c408695630bc0f57340f557106942 -# reformat all remaning apps +# reformat all remaining apps 02c3f87b316e8370287d5cd46de4f103ffe48433 +# erlfmt all remaining escripts +72eb34658d31fb38130421949cff262efab51139 From e0f4a2014d0738322752e6be16aaa31ff301a1a1 Mon Sep 17 00:00:00 2001 From: Shawn <506895667@qq.com> Date: Thu, 9 Nov 2023 14:58:35 +0800 Subject: [PATCH 106/155] fix: emqx failed to start if plugin dir is removed by the user --- apps/emqx_plugins/src/emqx_plugins.app.src | 2 +- apps/emqx_plugins/src/emqx_plugins.erl | 14 +++++++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/apps/emqx_plugins/src/emqx_plugins.app.src b/apps/emqx_plugins/src/emqx_plugins.app.src index d9c2d50df..963d1ec39 100644 --- a/apps/emqx_plugins/src/emqx_plugins.app.src +++ b/apps/emqx_plugins/src/emqx_plugins.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_plugins, [ {description, "EMQX Plugin Management"}, - {vsn, "0.1.6"}, + {vsn, "0.1.7"}, {modules, []}, {mod, {emqx_plugins_app, []}}, {applications, [kernel, stdlib, emqx]}, diff --git a/apps/emqx_plugins/src/emqx_plugins.erl b/apps/emqx_plugins/src/emqx_plugins.erl index 0e11062fc..41538daf6 100644 --- a/apps/emqx_plugins/src/emqx_plugins.erl +++ b/apps/emqx_plugins/src/emqx_plugins.erl @@ -433,9 +433,16 @@ do_ensure_started(NameVsn) -> tryit( "start_plugins", fun() -> - ok = ensure_exists_and_installed(NameVsn), - Plugin = do_read_plugin(NameVsn), - ok = load_code_start_apps(NameVsn, Plugin) + case ensure_exists_and_installed(NameVsn) of + ok -> + Plugin = do_read_plugin(NameVsn), + ok = load_code_start_apps(NameVsn, Plugin); + {error, plugin_not_found} -> + ?SLOG(error, #{ + msg => "plugin_not_found", + name_vsn => NameVsn + }) + end end ). @@ -665,6 +672,7 @@ do_load_plugin_app(AppName, Ebin) -> lists:foreach( fun(BeamFile) -> Module = list_to_atom(filename:basename(BeamFile, ".beam")), + _ = code:purge(Module), case code:load_file(Module) of {module, _} -> ok; From 2b97800c8b8c527cfeb8de8051de0d38954f9f6d Mon Sep 17 00:00:00 2001 From: firest Date: Thu, 9 Nov 2023 15:38:34 +0800 Subject: [PATCH 107/155] fix(limiter): simplify the descriptions for fields of the limiter --- .../emqx_limiter/src/emqx_limiter_schema.erl | 52 ++++---- apps/emqx/src/emqx_schema.erl | 2 +- rel/i18n/emqx_limiter_schema.hocon | 113 +++--------------- rel/i18n/emqx_schema.hocon | 21 ---- 4 files changed, 45 insertions(+), 143 deletions(-) diff --git a/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl b/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl index 802b29837..2dd4aa241 100644 --- a/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl +++ b/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl @@ -33,7 +33,7 @@ desc/1, types/0, short_paths/0, - short_paths_fields/1 + short_paths_fields/0 ]). -define(KILOBYTE, 1024). @@ -103,11 +103,11 @@ roots() -> ]. fields(limiter) -> - short_paths_fields(?MODULE, ?IMPORTANCE_HIDDEN) ++ + short_paths_fields(?IMPORTANCE_HIDDEN) ++ [ {Type, ?HOCON(?R_REF(node_opts), #{ - desc => ?DESC(Type), + desc => deprecated_desc(Type), importance => ?IMPORTANCE_HIDDEN, required => {false, recursively}, aliases => alias_of_type(Type) @@ -120,7 +120,7 @@ fields(limiter) -> ?HOCON( ?R_REF(client_fields), #{ - desc => ?DESC(client), + desc => deprecated_desc(client), importance => ?IMPORTANCE_HIDDEN, required => {false, recursively}, deprecated => {since, "5.0.25"} @@ -129,10 +129,10 @@ fields(limiter) -> ]; fields(node_opts) -> [ - {rate, ?HOCON(rate(), #{desc => ?DESC(rate), default => <<"infinity">>})}, + {rate, ?HOCON(rate(), #{desc => deprecated_desc(rate), default => <<"infinity">>})}, {burst, ?HOCON(burst_rate(), #{ - desc => ?DESC(burst), + desc => deprecated_desc(burst), default => <<"0">> })} ]; @@ -142,11 +142,12 @@ fields(bucket_opts) -> fields_of_bucket(<<"infinity">>); fields(client_opts) -> [ - {rate, ?HOCON(rate(), #{default => <<"infinity">>, desc => ?DESC(rate)})}, + {rate, ?HOCON(rate(), #{default => <<"infinity">>, desc => deprecated_desc(rate)})}, {initial, ?HOCON(initial(), #{ default => <<"0">>, - desc => ?DESC(initial), + + desc => deprecated_desc(initial), importance => ?IMPORTANCE_HIDDEN })}, %% low_watermark add for emqx_channel and emqx_session @@ -157,14 +158,14 @@ fields(client_opts) -> ?HOCON( initial(), #{ - desc => ?DESC(low_watermark), + desc => deprecated_desc(low_watermark), default => <<"0">>, importance => ?IMPORTANCE_HIDDEN } )}, {burst, ?HOCON(burst(), #{ - desc => ?DESC(burst), + desc => deprecated_desc(burst), default => <<"0">>, importance => ?IMPORTANCE_HIDDEN, aliases => [capacity] @@ -173,7 +174,7 @@ fields(client_opts) -> ?HOCON( boolean(), #{ - desc => ?DESC(divisible), + desc => deprecated_desc(divisible), default => true, importance => ?IMPORTANCE_HIDDEN } @@ -182,7 +183,7 @@ fields(client_opts) -> ?HOCON( emqx_schema:timeout_duration(), #{ - desc => ?DESC(max_retry_time), + desc => deprecated_desc(max_retry_time), default => <<"1h">>, importance => ?IMPORTANCE_HIDDEN } @@ -191,7 +192,7 @@ fields(client_opts) -> ?HOCON( failure_strategy(), #{ - desc => ?DESC(failure_strategy), + desc => deprecated_desc(failure_strategy), default => force, importance => ?IMPORTANCE_HIDDEN } @@ -204,14 +205,14 @@ fields(listener_client_fields) -> fields(Type) -> simple_bucket_field(Type). -short_paths_fields(DesModule) -> - short_paths_fields(DesModule, ?DEFAULT_IMPORTANCE). +short_paths_fields() -> + short_paths_fields(?DEFAULT_IMPORTANCE). -short_paths_fields(DesModule, Importance) -> +short_paths_fields(Importance) -> [ {Name, ?HOCON(rate(), #{ - desc => ?DESC(DesModule, Name), + desc => ?DESC(Name), required => false, importance => Importance, example => Example @@ -381,7 +382,7 @@ simple_bucket_field(Type) when is_atom(Type) -> ?HOCON( ?R_REF(?MODULE, client_opts), #{ - desc => ?DESC(client), + desc => deprecated_desc(client), required => {false, recursively}, importance => importance_of_type(Type), aliases => alias_of_type(Type) @@ -394,7 +395,7 @@ composite_bucket_fields(Types, ClientRef) -> [ {Type, ?HOCON(?R_REF(?MODULE, bucket_opts), #{ - desc => ?DESC(?MODULE, Type), + desc => deprecated_desc(Type), required => {false, recursively}, importance => importance_of_type(Type), aliases => alias_of_type(Type) @@ -406,7 +407,7 @@ composite_bucket_fields(Types, ClientRef) -> ?HOCON( ?R_REF(?MODULE, ClientRef), #{ - desc => ?DESC(client), + desc => deprecated_desc(client), required => {false, recursively} } )} @@ -414,10 +415,10 @@ composite_bucket_fields(Types, ClientRef) -> fields_of_bucket(Default) -> [ - {rate, ?HOCON(rate(), #{desc => ?DESC(rate), default => Default})}, + {rate, ?HOCON(rate(), #{desc => deprecated_desc(rate), default => Default})}, {burst, ?HOCON(burst(), #{ - desc => ?DESC(burst), + desc => deprecated_desc(burst), default => <<"0">>, importance => ?IMPORTANCE_HIDDEN, aliases => [capacity] @@ -425,7 +426,7 @@ fields_of_bucket(Default) -> {initial, ?HOCON(initial(), #{ default => <<"0">>, - desc => ?DESC(initial), + desc => deprecated_desc(initial), importance => ?IMPORTANCE_HIDDEN })} ]. @@ -434,7 +435,7 @@ client_fields(Types) -> [ {Type, ?HOCON(?R_REF(client_opts), #{ - desc => ?DESC(Type), + desc => deprecated_desc(Type), required => false, importance => importance_of_type(Type), aliases => alias_of_type(Type) @@ -457,3 +458,6 @@ alias_of_type(bytes) -> [bytes_in]; alias_of_type(_) -> []. + +deprecated_desc(_Field) -> + <<"Deprecated since v5.0.25">>. diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index 804a3a04c..3848e77b4 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -1849,7 +1849,7 @@ base_listener(Bind) -> default => true } )} - ] ++ emqx_limiter_schema:short_paths_fields(?MODULE). + ] ++ emqx_limiter_schema:short_paths_fields(). desc("persistent_session_store") -> "Settings for message persistence."; diff --git a/rel/i18n/emqx_limiter_schema.hocon b/rel/i18n/emqx_limiter_schema.hocon index b2958ce90..1a0ed5273 100644 --- a/rel/i18n/emqx_limiter_schema.hocon +++ b/rel/i18n/emqx_limiter_schema.hocon @@ -2,114 +2,33 @@ emqx_limiter_schema { max_conn_rate.desc: """Maximum connection rate.
-This is used to limit the connection rate for this node, -once the limit is reached, new connections will be deferred or refused""" +This is used to limit the connection rate for this node. +Once the limit is reached, new connections will be deferred or refused.
+For example:
+- 1000/s :: Only accepts 1000 connections per second
+- 1000/10s :: Only accepts 1000 connections every 10 seconds""" max_conn_rate.label: """Maximum Connection Rate""" messages_rate.desc: """Messages publish rate.
-This is used to limit the inbound message numbers for this node, -once the limit is reached, the restricted client will slow down and even be hung for a while.""" +This is used to limit the inbound message numbers for this node. +Once the limit is reached, the restricted client will slow down and even be hung for a while.
+For example:
+- 500/s :: Only the first 500 messages are sent per second and other messages are buffered.
+- 500/10s :: Only the first 500 messages are sent even 10 second and other messages are buffered.""" messages_rate.label: """Messages Publish Rate""" bytes_rate.desc: """Data publish rate.
-This is used to limit the inbound bytes rate for this node, -once the limit is reached, the restricted client will slow down and even be hung for a while.""" +This is used to limit the inbound bytes rate for this node. +Once the limit is reached, the restricted client will slow down and even be hung for a while.
+The unit of the bytes could be:KB MB GB.
+For example:
+- 500KB/s :: Only the first 500 kilobytes are sent per second and other messages are buffered.
+- 500MB/10s :: Only the first 500 megabytes are sent even 10 second and other messages are buffered.""" bytes_rate.label: """Data Publish Rate""" -bucket_cfg.desc: -"""Bucket Configs""" - -bucket_cfg.label: -"""Buckets""" - -burst.desc: -"""The burst, This value is based on rate.
- This value + rate = the maximum limit that can be achieved when limiter burst.""" - -burst.label: -"""Burst""" - -bytes.desc: -"""The `bytes` limiter. -This is used to limit the inbound bytes rate for this EMQX node. -Once the limit is reached, the restricted client will be slow down even be hung for a while.""" - -bytes.label: -"""Bytes""" - -client.desc: -"""The rate limit for each user of the bucket""" - -client.label: -"""Per Client""" - -connection.desc: -"""The connection limiter. -This is used to limit the connection rate for this EMQX node. -Once the limit is reached, new connections will be refused""" - -connection.label: -"""Connection""" - -divisible.desc: -"""Is it possible to split the number of requested tokens?""" - -divisible.label: -"""Divisible""" - -failure_strategy.desc: -"""The strategy when all the retries failed.""" - -failure_strategy.label: -"""Failure Strategy""" - -initial.desc: -"""The initial number of tokens for this bucket.""" - -initial.label: -"""Initial""" - -internal.desc: -"""Limiter for EMQX internal app.""" - -low_watermark.desc: -"""If the remaining tokens are lower than this value, -the check/consume will succeed, but it will be forced to wait for a short period of time.""" - -low_watermark.label: -"""Low Watermark""" - -max_retry_time.desc: -"""The maximum retry time when acquire failed.""" - -max_retry_time.label: -"""Max Retry Time""" - -message_routing.desc: -"""The message routing limiter. -This is used to limit the forwarding rate for this EMQX node. -Once the limit is reached, new publish will be refused""" - -message_routing.label: -"""Message Routing""" - -messages.desc: -"""The `messages` limiter. -This is used to limit the inbound message numbers for this EMQX node -Once the limit is reached, the restricted client will be slow down even be hung for a while.""" - -messages.label: -"""Messages""" - -rate.desc: -"""Rate for this bucket.""" - -rate.label: -"""Rate""" - } diff --git a/rel/i18n/emqx_schema.hocon b/rel/i18n/emqx_schema.hocon index 9ed579994..e1d086197 100644 --- a/rel/i18n/emqx_schema.hocon +++ b/rel/i18n/emqx_schema.hocon @@ -1039,27 +1039,6 @@ base_listener_limiter.desc: base_listener_limiter.label: """Type of the rate limit.""" -max_conn_rate.desc: -"""Maximum connection rate.
-This is used to limit the connection rate for this listener, -once the limit is reached, new connections will be deferred or refused""" -max_conn_rate.label: -"""Maximum Connection Rate""" - -messages_rate.desc: -"""Messages publish rate.
-This is used to limit the inbound message numbers for each client connected to this listener, -once the limit is reached, the restricted client will slow down and even be hung for a while.""" -messages_rate.label: -"""Messages Publish Rate""" - -bytes_rate.desc: -"""Data publish rate.
-This is used to limit the inbound bytes rate for each client connected to this listener, -once the limit is reached, the restricted client will slow down and even be hung for a while.""" -bytes_rate.label: -"""Data Publish Rate""" - persistent_session_store_backend.desc: """Database management system used to store information about persistent sessions and messages. - `builtin`: Use the embedded database (mria)""" From 3350bda446c9f29849a2ac8add8f0388722abcd4 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Thu, 9 Nov 2023 14:05:13 +0100 Subject: [PATCH 108/155] chore(hooks): Fix comment --- apps/emqx/src/emqx_hooks.erl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/apps/emqx/src/emqx_hooks.erl b/apps/emqx/src/emqx_hooks.erl index c3e9c2230..efe2c0de8 100644 --- a/apps/emqx/src/emqx_hooks.erl +++ b/apps/emqx/src/emqx_hooks.erl @@ -66,8 +66,9 @@ %% - Callbacks with greater priority values will be run before %% the ones with lower priority values. e.g. A Callback with %% priority = 2 precedes the callback with priority = 1. -%% - The execution order is the adding order of callbacks if they have -%% equal priority values. +%% - If the priorities of the hooks are equal then their execution +%% order is determined by the lexicographic of hook function +%% names. -type hookpoint() :: atom() | binary(). -type action() :: {module(), atom(), [term()] | undefined}. From cf4a46a78b64df9c05ddc7f235ffea2938e865f9 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Thu, 9 Nov 2023 14:22:32 +0100 Subject: [PATCH 109/155] fix(ds): Speed up polling for the new messages Poll immediately if the previous poll returned non-empty result --- .../src/emqx_persistent_message_ds_replayer.erl | 4 ++++ apps/emqx/src/emqx_persistent_session_ds.erl | 16 ++++++++++++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl index d137891a2..3964ee4e3 100644 --- a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl +++ b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl @@ -28,6 +28,10 @@ -include("emqx_persistent_session_ds.hrl"). +-ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). +-endif. + %%================================================================================ %% Type declarations %%================================================================================ diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index f3027f500..bf16567fd 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -344,7 +344,15 @@ deliver(_ClientInfo, _Delivers, Session) -> handle_timeout(_ClientInfo, pull, Session = #{id := Id, inflight := Inflight0}) -> WindowSize = 100, {Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, WindowSize), - ensure_timer(pull), + %% TODO: make these values configurable: + Timeout = + case Publishes of + [] -> + 100; + [_ | _] -> + 0 + end, + ensure_timer(pull, Timeout), {ok, Publishes, Session#{inflight => Inflight}}; handle_timeout(_ClientInfo, get_streams, Session = #{id := Id}) -> renew_streams(Id), @@ -714,5 +722,9 @@ ensure_timers() -> -spec ensure_timer(pull | get_streams) -> ok. ensure_timer(Type) -> - _ = emqx_utils:start_timer(100, {emqx_session, Type}), + ensure_timer(Type, 100). + +-spec ensure_timer(pull | get_streams, non_neg_integer()) -> ok. +ensure_timer(Type, Timeout) -> + _ = emqx_utils:start_timer(Timeout, {emqx_session, Type}), ok. From 42536bb5f7142b7e528cf8b0c7738a751dcde77e Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Wed, 8 Nov 2023 15:58:46 -0300 Subject: [PATCH 110/155] chore(ds): avoid using records in persistence / rpc Fixes https://emqx.atlassian.net/browse/EMQX-11279 --- apps/emqx/priv/bpapi.versions | 2 +- .../emqx_persistent_message_ds_replayer.erl | 12 ++- apps/emqx/src/emqx_persistent_session_ds.erl | 2 - apps/emqx/src/emqx_persistent_session_ds.hrl | 1 + .../src/emqx_ds_replication_layer.erl | 58 ++++++++----- .../src/emqx_ds_storage_bitfield_lts.erl | 84 +++++++++++++------ .../src/emqx_ds_storage_layer.erl | 64 ++++++++------ .../src/proto/emqx_ds_proto_v1.erl | 4 +- 8 files changed, 148 insertions(+), 79 deletions(-) diff --git a/apps/emqx/priv/bpapi.versions b/apps/emqx/priv/bpapi.versions index f647c660f..7042f5186 100644 --- a/apps/emqx/priv/bpapi.versions +++ b/apps/emqx/priv/bpapi.versions @@ -14,7 +14,7 @@ {emqx_conf,1}. {emqx_conf,2}. {emqx_conf,3}. -{emqx_connector, 1}. +{emqx_connector,1}. {emqx_dashboard,1}. {emqx_delayed,1}. {emqx_delayed,2}. diff --git a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl index d137891a2..86635ee97 100644 --- a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl +++ b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl @@ -132,7 +132,7 @@ fetch(_SessionId, Inflight, _Streams = [], _N, Acc) -> {lists:reverse(Acc), Inflight}; fetch(_SessionId, Inflight, _Streams, 0, Acc) -> {lists:reverse(Acc), Inflight}; -fetch(SessionId, Inflight0, [#ds_stream{stream = Stream} | Streams], N, Publishes0) -> +fetch(SessionId, Inflight0, [Stream | Streams], N, Publishes0) -> #inflight{next_seqno = FirstSeqNo, offset_ranges = Ranges0} = Inflight0, ItBegin = get_last_iterator(SessionId, Stream, Ranges0), {ok, ItEnd, Messages} = emqx_ds:next(ItBegin, N), @@ -162,6 +162,7 @@ fetch(SessionId, Inflight0, [#ds_stream{stream = Stream} | Streams], N, Publishe fetch(SessionId, Inflight1, Streams, N, Publishes) end. +-spec update_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream(), emqx_ds:iterator()) -> ok. update_iterator(SessionId, Stream, Iterator) -> mria:dirty_write(?SESSION_ITER_TAB, #ds_iter{id = {SessionId, Stream}, iter = Iterator}). @@ -173,13 +174,20 @@ get_last_iterator(SessionId, Stream, Ranges) -> Next end. +-spec get_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream()) -> emqx_ds:iterator(). get_iterator(SessionId, Stream) -> Id = {SessionId, Stream}, [#ds_iter{iter = It}] = mnesia:dirty_read(?SESSION_ITER_TAB, Id), It. +-spec get_streams(emqx_persistent_session_ds:id()) -> [emqx_ds:stream()]. get_streams(SessionId) -> - mnesia:dirty_read(?SESSION_STREAM_TAB, SessionId). + lists:map( + fun(#ds_stream{stream = Stream}) -> + Stream + end, + mnesia:dirty_read(?SESSION_STREAM_TAB, SessionId) + ). %% Packet ID as defined by MQTT protocol is a 16-bit integer in range %% 1..FFFF. This function translates internal session sequence number diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index f3027f500..dbd88cf30 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -97,8 +97,6 @@ props := map() }. -%% -type session() :: #session{}. - -type timestamp() :: emqx_utils_calendar:epoch_millisecond(). -type topic() :: emqx_types:topic(). -type clientinfo() :: emqx_types:clientinfo(). diff --git a/apps/emqx/src/emqx_persistent_session_ds.hrl b/apps/emqx/src/emqx_persistent_session_ds.hrl index 54b077795..81b997df5 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.hrl +++ b/apps/emqx/src/emqx_persistent_session_ds.hrl @@ -36,6 +36,7 @@ stream :: emqx_ds:stream(), rank :: emqx_ds:stream_rank() }). +-type ds_stream() :: #ds_stream{}. -record(ds_iter, { id :: {emqx_persistent_session_ds:id(), emqx_ds:stream()}, diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index d61dfa906..b81f43c4f 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -43,30 +43,41 @@ %% Type declarations %%================================================================================ +%% # "Record" integer keys. We use maps with integer keys to avoid persisting and sending +%% records over the wire. + +%% tags: +-define(stream, stream). +-define(it, it). + +%% keys: +-define(tag, 1). +-define(shard, 2). +-define(enc, 3). + -type db() :: emqx_ds:db(). -type shard_id() :: {db(), atom()}. -%% This record enapsulates the stream entity from the replication -%% level. +%% This enapsulates the stream entity from the replication level. %% %% TODO: currently the stream is hardwired to only support the %% internal rocksdb storage. In the future we want to add another %% implementations for emqx_ds, so this type has to take this into %% account. --record(stream, { - shard :: emqx_ds_replication_layer:shard_id(), - enc :: emqx_ds_storage_layer:stream() -}). +-opaque stream() :: + #{ + ?tag := ?stream, + ?shard := emqx_ds_replication_layer:shard_id(), + ?enc := emqx_ds_storage_layer:stream() + }. --opaque stream() :: #stream{}. - --record(iterator, { - shard :: emqx_ds_replication_layer:shard_id(), - enc :: enqx_ds_storage_layer:iterator() -}). - --opaque iterator() :: #iterator{}. +-opaque iterator() :: + #{ + ?tag := ?it, + ?shard := emqx_ds_replication_layer:shard_id(), + ?enc := emqx_ds_storage_layer:iterator() + }. -type message_id() :: emqx_ds_storage_layer:message_id(). @@ -124,9 +135,10 @@ get_streams(DB, TopicFilter, StartTime) -> fun({RankY, Stream}) -> RankX = Shard, Rank = {RankX, RankY}, - {Rank, #stream{ - shard = Shard, - enc = Stream + {Rank, #{ + ?tag => ?stream, + ?shard => Shard, + ?enc => Stream }} end, Streams @@ -138,18 +150,18 @@ get_streams(DB, TopicFilter, StartTime) -> -spec make_iterator(stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). make_iterator(Stream, TopicFilter, StartTime) -> - #stream{shard = Shard, enc = StorageStream} = Stream, + #{?tag := ?stream, ?shard := Shard, ?enc := StorageStream} = Stream, Node = node_of_shard(Shard), case emqx_ds_proto_v1:make_iterator(Node, Shard, StorageStream, TopicFilter, StartTime) of {ok, Iter} -> - {ok, #iterator{shard = Shard, enc = Iter}}; + {ok, #{?tag => ?it, ?shard => Shard, ?enc => Iter}}; Err = {error, _} -> Err end. -spec next(iterator(), pos_integer()) -> emqx_ds:next_result(iterator()). next(Iter0, BatchSize) -> - #iterator{shard = Shard, enc = StorageIter0} = Iter0, + #{?tag := ?it, ?shard := Shard, ?enc := StorageIter0} = Iter0, Node = node_of_shard(Shard), %% TODO: iterator can contain information that is useful for %% reconstructing messages sent over the network. For example, @@ -161,7 +173,7 @@ next(Iter0, BatchSize) -> %% replication layer. Or, perhaps, in the logic layer. case emqx_ds_proto_v1:next(Node, Shard, StorageIter0, BatchSize) of {ok, StorageIter, Batch} -> - Iter = #iterator{shard = Shard, enc = StorageIter}, + Iter = Iter0#{?enc := StorageIter}, {ok, Iter, Batch}; Other -> Other @@ -184,14 +196,14 @@ do_drop_shard_v1(Shard) -> emqx_ds_storage_layer:drop_shard(Shard). -spec do_get_streams_v1(shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> - [{integer(), _Stream}]. + [{integer(), emqx_ds_storage_layer:stream()}]. do_get_streams_v1(Shard, TopicFilter, StartTime) -> emqx_ds_storage_layer:get_streams(Shard, TopicFilter, StartTime). -spec do_make_iterator_v1( shard_id(), emqx_ds_storage_layer:stream(), emqx_ds:topic_filter(), emqx_ds:time() ) -> - {ok, iterator()} | {error, _}. + {ok, emqx_ds_storage_layer:iterator()} | {error, _}. do_make_iterator_v1(Shard, Stream, TopicFilter, StartTime) -> emqx_ds_storage_layer:make_iterator(Shard, Stream, TopicFilter, StartTime). diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index d57d8013c..d2c997ae1 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -38,6 +38,20 @@ %% Type declarations %%================================================================================ +%% # "Record" integer keys. We use maps with integer keys to avoid persisting and sending +%% records over the wire. + +%% tags: +-define(stream, stream). +-define(it, it). + +%% keys: +-define(tag, 1). +-define(topic_filter, 2). +-define(start_time, 3). +-define(storage_key, 4). +-define(last_seen_key, 5). + -type options() :: #{ bits_per_wildcard_level => pos_integer(), @@ -65,18 +79,20 @@ -type s() :: #s{}. --record(stream, { - storage_key :: emqx_ds_lts:msg_storage_key() -}). +-type stream() :: + #{ + ?tag := ?stream, + ?storage_key := emqx_ds_lts:msg_storage_key() + }. --record(it, { - topic_filter :: emqx_ds:topic_filter(), - start_time :: emqx_ds:time(), - storage_key :: emqx_ds_lts:msg_storage_key(), - last_seen_key = <<>> :: binary() -}). - --type iterator() :: #it{}. +-type iterator() :: + #{ + ?tag := ?it, + ?topic_filter := emqx_ds:topic_filter(), + ?start_time := emqx_ds:time(), + ?storage_key := emqx_ds_lts:msg_storage_key(), + ?last_seen_key := binary() + }. -define(COUNTER, emqx_ds_storage_bitfield_lts_counter). @@ -170,18 +186,35 @@ store_batch(_ShardId, S = #s{db = DB, data = Data}, Messages, _Options) -> Messages ). +-spec get_streams( + emqx_ds_replication_layer:shard_id(), + s(), + emqx_ds:topic_filter(), + emqx_ds:time() +) -> [stream()]. get_streams(_Shard, #s{trie = Trie}, TopicFilter, _StartTime) -> Indexes = emqx_ds_lts:match_topics(Trie, TopicFilter), - [#stream{storage_key = I} || I <- Indexes]. + [#{?tag => ?stream, ?storage_key => I} || I <- Indexes]. -make_iterator(_Shard, _Data, #stream{storage_key = StorageKey}, TopicFilter, StartTime) -> +-spec make_iterator( + emqx_ds_replication_layer:shard_id(), + s(), + stream(), + emqx_ds:topic_filter(), + emqx_ds:time() +) -> {ok, iterator()}. +make_iterator( + _Shard, _Data, #{?tag := ?stream, ?storage_key := StorageKey}, TopicFilter, StartTime +) -> %% Note: it's a good idea to keep the iterator structure lean, %% since it can be stored on a remote node that could update its %% code independently from us. - {ok, #it{ - topic_filter = TopicFilter, - start_time = StartTime, - storage_key = StorageKey + {ok, #{ + ?tag => ?it, + ?topic_filter => TopicFilter, + ?start_time => StartTime, + ?storage_key => StorageKey, + ?last_seen_key => <<>> }}. next(_Shard, Schema = #s{ts_offset = TSOffset}, It, BatchSize) -> @@ -192,16 +225,19 @@ next(_Shard, Schema = #s{ts_offset = TSOffset}, It, BatchSize) -> SafeCutoffTime = (Now bsr TSOffset) bsl TSOffset, next_until(Schema, It, SafeCutoffTime, BatchSize). -next_until(_Schema, It, SafeCutoffTime, _BatchSize) when It#it.start_time >= SafeCutoffTime -> +next_until(_Schema, It = #{?tag := ?it, ?start_time := StartTime}, SafeCutoffTime, _BatchSize) when + StartTime >= SafeCutoffTime +-> %% We're in the middle of the current epoch, so we can't yet iterate over it. %% It would be unsafe otherwise: messages can be stored in the current epoch %% concurrently with iterating over it. They can end up earlier (in the iteration %% order) due to the nature of keymapping, potentially causing us to miss them. {ok, It, []}; next_until(#s{db = DB, data = CF, keymappers = Keymappers}, It, SafeCutoffTime, BatchSize) -> - #it{ - start_time = StartTime, - storage_key = {TopicIndex, Varying} + #{ + ?tag := ?it, + ?start_time := StartTime, + ?storage_key := {TopicIndex, Varying} } = It, %% Make filter: Inequations = [ @@ -250,7 +286,7 @@ next_loop(_ITHandle, _KeyMapper, _Filter, _Cutoff, It, Acc, 0) -> {ok, It, lists:reverse(Acc)}; next_loop(ITHandle, KeyMapper, Filter, Cutoff, It0, Acc0, N0) -> inc_counter(), - #it{last_seen_key = Key0} = It0, + #{?tag := ?it, ?last_seen_key := Key0} = It0, case emqx_ds_bitmask_keymapper:bin_increment(Filter, Key0) of overflow -> {ok, It0, lists:reverse(Acc0)}; @@ -268,7 +304,7 @@ next_loop(ITHandle, KeyMapper, Filter, Cutoff, It0, Acc0, N0) -> end. traverse_interval(ITHandle, Filter, Cutoff, Key, Val, It0, Acc0, N) -> - It = It0#it{last_seen_key = Key}, + It = It0#{?last_seen_key := Key}, case emqx_ds_bitmask_keymapper:bin_checkmask(Filter, Key) of true -> Msg = deserialize(Val), @@ -310,7 +346,7 @@ check_message( overflow; check_message( _Cutoff, - #it{start_time = StartTime, topic_filter = TopicFilter}, + #{?tag := ?it, ?start_time := StartTime, ?topic_filter := TopicFilter}, #message{timestamp = Timestamp, topic = Topic} ) when Timestamp >= StartTime -> emqx_topic:match(emqx_topic:words(Topic), TopicFilter); diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index 57af33d61..c91ac49d5 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -34,6 +34,18 @@ %% Type declarations %%================================================================================ +%% # "Record" integer keys. We use maps with integer keys to avoid persisting and sending +%% records over the wire. + +%% tags: +-define(stream, stream). +-define(it, it). + +%% keys: +-define(tag, 1). +-define(generation, 2). +-define(enc, 3). + -type prototype() :: {emqx_ds_storage_reference, emqx_ds_storage_reference:options()} | {emqx_ds_storage_bitfield_lts, emqx_ds_storage_bitfield_lts:options()}. @@ -44,23 +56,21 @@ -type gen_id() :: 0..16#ffff. -%% Note: this record might be stored permanently on a remote node. --record(stream, { - generation :: gen_id(), - enc :: _EncapsulatedData, - misc = #{} :: map() -}). +%% Note: this might be stored permanently on a remote node. +-opaque stream() :: + #{ + ?tag := ?stream, + ?generation := gen_id(), + ?enc := term() + }. --opaque stream() :: #stream{}. - -%% Note: this record might be stored permanently on a remote node. --record(it, { - generation :: gen_id(), - enc :: _EncapsulatedData, - misc = #{} :: map() -}). - --opaque iterator() :: #it{}. +%% Note: this might be stored permanently on a remote node. +-opaque iterator() :: + #{ + ?tag := ?it, + ?generation := gen_id(), + ?enc := term() + }. %%%% Generation: @@ -154,9 +164,10 @@ get_streams(Shard, TopicFilter, StartTime) -> #{module := Mod, data := GenData} = generation_get(Shard, GenId), Streams = Mod:get_streams(Shard, GenData, TopicFilter, StartTime), [ - {GenId, #stream{ - generation = GenId, - enc = Stream + {GenId, #{ + ?tag => ?stream, + ?generation => GenId, + ?enc => Stream }} || Stream <- Streams ] @@ -166,13 +177,16 @@ get_streams(Shard, TopicFilter, StartTime) -> -spec make_iterator(shard_id(), stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). -make_iterator(Shard, #stream{generation = GenId, enc = Stream}, TopicFilter, StartTime) -> +make_iterator( + Shard, #{?tag := ?stream, ?generation := GenId, ?enc := Stream}, TopicFilter, StartTime +) -> #{module := Mod, data := GenData} = generation_get(Shard, GenId), case Mod:make_iterator(Shard, GenData, Stream, TopicFilter, StartTime) of {ok, Iter} -> - {ok, #it{ - generation = GenId, - enc = Iter + {ok, #{ + ?tag => ?it, + ?generation => GenId, + ?enc => Iter }}; {error, _} = Err -> Err @@ -180,7 +194,7 @@ make_iterator(Shard, #stream{generation = GenId, enc = Stream}, TopicFilter, Sta -spec next(shard_id(), iterator(), pos_integer()) -> emqx_ds:next_result(iterator()). -next(Shard, Iter = #it{generation = GenId, enc = GenIter0}, BatchSize) -> +next(Shard, Iter = #{?tag := ?it, ?generation := GenId, ?enc := GenIter0}, BatchSize) -> #{module := Mod, data := GenData} = generation_get(Shard, GenId), Current = generation_current(Shard), case Mod:next(Shard, GenData, GenIter0, BatchSize) of @@ -190,7 +204,7 @@ next(Shard, Iter = #it{generation = GenId, enc = GenIter0}, BatchSize) -> %% the stream has been fully replayed. {ok, end_of_stream}; {ok, GenIter, Batch} -> - {ok, Iter#it{enc = GenIter}, Batch}; + {ok, Iter#{?enc := GenIter}, Batch}; Error = {error, _} -> Error end. diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index 17e873ecd..6a79a4a61 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -41,7 +41,7 @@ drop_shard(Node, Shard) -> -spec get_streams( node(), emqx_ds_replication_layer:shard_id(), emqx_ds:topic_filter(), emqx_ds:time() ) -> - [{integer(), emqx_ds_replication_layer:stream()}]. + [{integer(), emqx_ds_storage_layer:stream()}]. get_streams(Node, Shard, TopicFilter, Time) -> erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [Shard, TopicFilter, Time]). @@ -52,7 +52,7 @@ get_streams(Node, Shard, TopicFilter, Time) -> emqx_ds:topic_filter(), emqx_ds:time() ) -> - {ok, emqx_ds_replication_layer:iterator()} | {error, _}. + {ok, emqx_ds_storage_layer:iterator()} | {error, _}. make_iterator(Node, Shard, Stream, TopicFilter, StartTime) -> erpc:call(Node, emqx_ds_replication_layer, do_make_iterator_v1, [ Shard, Stream, TopicFilter, StartTime From e15553f9bdfb77604d52c5483845c3aa7998bd45 Mon Sep 17 00:00:00 2001 From: Ivan Dyachkov Date: Thu, 9 Nov 2023 15:20:03 +0100 Subject: [PATCH 111/155] ci: fix pattern for on push branch --- .github/workflows/_push-entrypoint.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/_push-entrypoint.yaml b/.github/workflows/_push-entrypoint.yaml index b1ba0cdeb..e2eff6dc7 100644 --- a/.github/workflows/_push-entrypoint.yaml +++ b/.github/workflows/_push-entrypoint.yaml @@ -11,7 +11,7 @@ on: - 'e*' branches: - 'master' - - 'release-5?' + - 'release-5[0-9]' - 'ci/**' env: From aa4a2d93310b3df451b42a7d8ed1f6f82fd372ad Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Thu, 9 Nov 2023 16:25:55 +0100 Subject: [PATCH 112/155] build: start versioning i18n files --- scripts/pre-compile.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/pre-compile.sh b/scripts/pre-compile.sh index 632aabfe4..dfad7c869 100755 --- a/scripts/pre-compile.sh +++ b/scripts/pre-compile.sh @@ -25,10 +25,12 @@ cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/.." # generate merged config files and English translation of the desc (desc.en.hocon) ./scripts/merge-config.escript +I18N_REPO_BRANCH="v$(./pkg-vsn.sh "${PROFILE_STR}" | tr -d '.' | cut -c 1-2)" + # download desc (i18n) translations curl -L --silent --show-error \ --output "apps/emqx_dashboard/priv/desc.zh.hocon" \ - 'https://raw.githubusercontent.com/emqx/emqx-i18n/main/desc.zh.hocon' + "https://raw.githubusercontent.com/emqx/emqx-i18n/${I18N_REPO_BRANCH}/desc.zh.hocon" # TODO # make sbom a build artifcat From 5990f9835fac9f2aa182721ecedb4c8c2c6b4595 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Thu, 9 Nov 2023 16:44:14 +0100 Subject: [PATCH 113/155] build: generate desc.en.hocon file for i18n --- build | 3 +++ scripts/merge-i18n.escript | 41 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+) create mode 100755 scripts/merge-i18n.escript diff --git a/build b/build index 8b485f3b6..3ca5dfcec 100755 --- a/build +++ b/build @@ -133,6 +133,9 @@ make_docs() { erl -noshell -eval \ "ok = emqx_conf:dump_schema('$docdir', $SCHEMA_MODULE), \ halt(0)." + local desc="$docdir/desc.en.hocon" + log "Generating $desc" + scripts/merge-i18n.escript | jq --sort-keys . > "$desc" } ## arg1 is the profile for which the following args (as app names) should be excluded diff --git a/scripts/merge-i18n.escript b/scripts/merge-i18n.escript new file mode 100755 index 000000000..dfd76f01f --- /dev/null +++ b/scripts/merge-i18n.escript @@ -0,0 +1,41 @@ +#!/usr/bin/env escript + +%% This script is only used at build time to generate the merged desc.en.hocon in JSON format +%% but NOT the file generated to _build/$PROFILE/lib/emqx_dashboard/priv (which is HOCON format). +%% +%% The generated JSON file is used as the source of truth when translating to other languages. + +-mode(compile). + +-define(RED, "\e[31m"). +-define(RESET, "\e[39m"). + +main(_) -> + try + _ = hocon:module_info() + catch + _:_ -> + fail("hocon module not found, please make sure the project is compiled") + end, + %% wildcard all .hocon files in rel/i18n + Files = filelib:wildcard("rel/i18n/*.hocon"), + case Files of + [_ | _] -> + ok; + [] -> + fail("No .hocon files found in rel/i18n") + end, + case hocon:files(Files) of + {ok, Map} -> + JSON = jiffy:encode(Map), + io:format("~s~n", [JSON]); + {error, Reason} -> + fail("~p~n", [Reason]) + end. + +fail(Str) -> + fail(Str, []). + +fail(Str, Args) -> + io:format(standard_error, ?RED ++ "ERROR: " ++ Str ++ ?RESET ++ "~n", Args), + halt(1). From fd205f599c8e60739a8a693632df612bdb2f82c8 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Thu, 9 Nov 2023 18:14:56 +0100 Subject: [PATCH 114/155] build: do not generate desc.en.hocon if jq command is not found --- build | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/build b/build index 3ca5dfcec..c70f91b60 100755 --- a/build +++ b/build @@ -12,6 +12,12 @@ if [ "${DEBUG:-0}" -eq 1 ]; then export DIAGNOSTIC=1 fi +log_red() { + local RED='\033[0;31m' # Red + local NC='\033[0m' # No Color + echo -e "${RED}${1}${NC}" +} + PROFILE_ARG="$1" ARTIFACT="$2" @@ -34,7 +40,7 @@ case "$(is_enterprise "$PROFILE_ARG"),$(is_enterprise "$PROFILE_ENV")" in true ;; *) - echo "PROFILE env var is set to '$PROFILE_ENV', but '$0' arg1 is '$PROFILE_ARG'" + log_red "PROFILE env var is set to '$PROFILE_ENV', but '$0' arg1 is '$PROFILE_ARG'" exit 1 ;; esac @@ -134,8 +140,13 @@ make_docs() { "ok = emqx_conf:dump_schema('$docdir', $SCHEMA_MODULE), \ halt(0)." local desc="$docdir/desc.en.hocon" - log "Generating $desc" - scripts/merge-i18n.escript | jq --sort-keys . > "$desc" + if command -v jq &> /dev/null; then + log "Generating $desc" + scripts/merge-i18n.escript | jq --sort-keys . > "$desc" + else + # it is not a big deal if we cannot generate the desc + log_red "NOT Generated: $desc" + fi } ## arg1 is the profile for which the following args (as app names) should be excluded @@ -152,8 +163,8 @@ assert_no_excluded_deps() { for app in "${excluded_apps[@]}"; do found="$($FIND "$rel_dir" -maxdepth 1 -type d -name "$app-*")" if [ -n "${found}" ]; then - echo "ERROR: ${app} should not be included in ${PROFILE}" - echo "ERROR: found ${app} in ${rel_dir}" + log_red "ERROR: ${app} should not be included in ${PROFILE}" + log_red "ERROR: found ${app} in ${rel_dir}" exit 1 fi done @@ -294,7 +305,7 @@ make_tgz() { mkdir -p "${tard}/emqx" mkdir -p "${pkgpath}" if [ ! -f "$src_tarball" ]; then - log "ERROR: $src_tarball is not found" + log_red "ERROR: $src_tarball is not found" fi $TAR zxf "${src_tarball}" -C "${tard}/emqx" if [ -f "${tard}/emqx/releases/${PKG_VSN}/relup" ]; then From 2d539463cdf9234b0bedb59f81d934e854cd291b Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Thu, 9 Nov 2023 15:32:52 -0300 Subject: [PATCH 115/155] test: fix flaky test ``` Testing lib.emqx.emqx_cm_SUITE: *** FAILED test case 24 of 31 *** %%% emqx_cm_SUITE ==> t_stepdown_session_takeover_begin_timeout: FAILED %%% emqx_cm_SUITE ==> Failure/Error: ?assertEqual([<0.6635.8>,<0.6520.8>], lists : sort ( emqx_cm : lookup_channels ( ClientId ) )) expected: [<0.6635.8>,<0.6520.8>] got: [<0.6520.8>,<0.6635.8>] line: 309 ``` --- apps/emqx/test/emqx_cm_SUITE.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/emqx/test/emqx_cm_SUITE.erl b/apps/emqx/test/emqx_cm_SUITE.erl index 6afdfa478..9bfb4d5e7 100644 --- a/apps/emqx/test/emqx_cm_SUITE.erl +++ b/apps/emqx/test/emqx_cm_SUITE.erl @@ -306,7 +306,7 @@ test_stepdown_session(Action, Reason) -> ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo), ok = emqx_cm:register_channel(ClientId, Pid1, ConnInfo), ok = emqx_cm:register_channel(ClientId, Pid2, ConnInfo), - ?assertEqual([Pid1, Pid2], lists:sort(emqx_cm:lookup_channels(ClientId))), + ?assertEqual(lists:sort([Pid1, Pid2]), lists:sort(emqx_cm:lookup_channels(ClientId))), case Reason of noproc -> exit(Pid1, kill), From b812db1e3c659c1e7cc650466ec55a30d1b66d6c Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Thu, 9 Nov 2023 16:50:45 +0100 Subject: [PATCH 116/155] fix(ds): Fix packet id -> sequence number translation --- .../emqx_persistent_message_ds_replayer.erl | 116 ++++++++++++++---- apps/emqx/src/emqx_persistent_session_ds.erl | 2 +- 2 files changed, 94 insertions(+), 24 deletions(-) diff --git a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl index 3964ee4e3..bd64db8b8 100644 --- a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl +++ b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl @@ -26,9 +26,11 @@ -export_type([inflight/0]). +-include_lib("emqx/include/logger.hrl"). -include("emqx_persistent_session_ds.hrl"). -ifdef(TEST). +-include_lib("proper/include/proper.hrl"). -include_lib("eunit/include/eunit.hrl"). -endif. @@ -65,9 +67,17 @@ new() -> #inflight{}. -spec next_packet_id(inflight()) -> {emqx_types:packet_id(), inflight()}. -next_packet_id(Inflight0 = #inflight{next_seqno = LastSeqno}) -> - Inflight = Inflight0#inflight{next_seqno = LastSeqno + 1}, - {seqno_to_packet_id(LastSeqno), Inflight}. +next_packet_id(Inflight0 = #inflight{next_seqno = LastSeqNo}) -> + Inflight = Inflight0#inflight{next_seqno = LastSeqNo + 1}, + case LastSeqNo rem 16#10000 of + 0 -> + %% We skip sequence numbers that lead to PacketId = 0 to + %% simplify math. Note: it leads to occasional gaps in the + %% sequence numbers. + next_packet_id(Inflight); + PacketId -> + {PacketId, Inflight} + end. -spec replay(emqx_persistent_session_ds:id(), inflight()) -> emqx_session:replies(). @@ -83,8 +93,20 @@ commit_offset( acked_seqno = AckedSeqno0, next_seqno = NextSeqNo, offset_ranges = Ranges0 } ) -> - AckedSeqno = packet_id_to_seqno(NextSeqNo, PacketId), - true = AckedSeqno0 < AckedSeqno, + AckedSeqno = + case packet_id_to_seqno(NextSeqNo, PacketId) of + N when N > AckedSeqno0; AckedSeqno0 =:= 0 -> + N; + OutOfRange -> + ?SLOG(warning, #{ + msg => "out-of-order_ack", + prev_seqno => AckedSeqno0, + acked_seqno => OutOfRange, + next_seqno => NextSeqNo, + packet_id => PacketId + }), + AckedSeqno0 + end, Ranges = lists:filter( fun(#range{stream = Stream, last = LastSeqno, iterator_next = ItNext}) -> case LastSeqno =< AckedSeqno of @@ -140,18 +162,17 @@ fetch(SessionId, Inflight0, [#ds_stream{stream = Stream} | Streams], N, Publishe #inflight{next_seqno = FirstSeqNo, offset_ranges = Ranges0} = Inflight0, ItBegin = get_last_iterator(SessionId, Stream, Ranges0), {ok, ItEnd, Messages} = emqx_ds:next(ItBegin, N), - {Publishes, Inflight1} = + {NMessages, Publishes, Inflight1} = lists:foldl( - fun(Msg, {PubAcc0, InflightAcc0}) -> + fun(Msg, {N0, PubAcc0, InflightAcc0}) -> {PacketId, InflightAcc} = next_packet_id(InflightAcc0), PubAcc = [{PacketId, Msg} | PubAcc0], - {PubAcc, InflightAcc} + {N0 + 1, PubAcc, InflightAcc} end, - {Publishes0, Inflight0}, + {0, Publishes0, Inflight0}, Messages ), #inflight{next_seqno = LastSeqNo} = Inflight1, - NMessages = LastSeqNo - FirstSeqNo, case NMessages > 0 of true -> Range = #range{ @@ -185,25 +206,22 @@ get_iterator(SessionId, Stream) -> get_streams(SessionId) -> mnesia:dirty_read(?SESSION_STREAM_TAB, SessionId). -%% Packet ID as defined by MQTT protocol is a 16-bit integer in range -%% 1..FFFF. This function translates internal session sequence number -%% to MQTT packet ID by chopping off most significant bits and adding -%% 1. This assumes that there's never more FFFF in-flight packets at -%% any time: --spec seqno_to_packet_id(non_neg_integer()) -> emqx_types:packet_id(). -seqno_to_packet_id(Counter) -> - Counter rem 16#ffff + 1. - %% Reconstruct session counter by adding most significant bits from %% the current counter to the packet id. -spec packet_id_to_seqno(non_neg_integer(), emqx_types:packet_id()) -> non_neg_integer(). packet_id_to_seqno(NextSeqNo, PacketId) -> - N = ((NextSeqNo bsr 16) bsl 16) + PacketId, - case N > NextSeqNo of - true -> N - 16#10000; - false -> N + Epoch = NextSeqNo bsr 16, + case packet_id_to_seqno_(Epoch, PacketId) of + N when N =< NextSeqNo -> + N; + _ -> + packet_id_to_seqno_(Epoch - 1, PacketId) end. +-spec packet_id_to_seqno_(non_neg_integer(), emqx_types:packet_id()) -> non_neg_integer(). +packet_id_to_seqno_(Epoch, PacketId) -> + (Epoch bsl 16) + PacketId. + -spec shuffle([A]) -> [A]. shuffle(L0) -> L1 = lists:map( @@ -215,3 +233,55 @@ shuffle(L0) -> L2 = lists:sort(L1), {_, L} = lists:unzip(L2), L. + +-ifdef(TEST). + +%% This test only tests boundary conditions (to make sure property-based test didn't skip them): +packet_id_to_seqno_test() -> + %% Packet ID = 1; first epoch: + ?assertEqual(1, packet_id_to_seqno(1, 1)), + ?assertEqual(1, packet_id_to_seqno(10, 1)), + ?assertEqual(1, packet_id_to_seqno(1 bsl 16 - 1, 1)), + %% Packet ID = 1; second and 3rd epochs: + ?assertEqual(1 bsl 16 + 1, packet_id_to_seqno(1 bsl 16 + 1, 1)), + ?assertEqual(2 bsl 16 + 1, packet_id_to_seqno(2 bsl 16 + 1, 1)), + %% Packet ID = 16#ffff: + PID = 1 bsl 16 - 1, + ?assertEqual(PID, packet_id_to_seqno(PID, PID)), + ?assertEqual(PID, packet_id_to_seqno(1 bsl 16, PID)), + ?assertEqual(1 bsl 16 + PID, packet_id_to_seqno(2 bsl 16, PID)), + ok. + +packet_id_to_seqno_test_() -> + Opts = [{numtests, 1000}, {to_file, user}], + {timeout, 30, fun() -> ?assert(proper:quickcheck(packet_id_to_seqno_prop(), Opts)) end}. + +packet_id_to_seqno_prop() -> + ?FORALL( + NextSeqNo, + next_seqno_gen(), + ?FORALL( + SeqNo, + seqno_gen(NextSeqNo), + begin + PacketId = SeqNo rem 16#10000, + ?assertEqual(SeqNo, packet_id_to_seqno(NextSeqNo, PacketId)), + true + end + ) + ). + +next_seqno_gen() -> + ?LET( + {Epoch, Offset}, + {non_neg_integer(), non_neg_integer()}, + Epoch bsl 16 + Offset + ). + +seqno_gen(NextSeqNo) -> + WindowSize = 1 bsl 16 - 2, + Min = max(0, NextSeqNo - WindowSize), + Max = max(0, NextSeqNo - 1), + range(Min, Max). + +-endif. diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index bf16567fd..992b032a4 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -342,7 +342,7 @@ deliver(_ClientInfo, _Delivers, Session) -> -spec handle_timeout(clientinfo(), _Timeout, session()) -> {ok, replies(), session()} | {ok, replies(), timeout(), session()}. handle_timeout(_ClientInfo, pull, Session = #{id := Id, inflight := Inflight0}) -> - WindowSize = 100, + WindowSize = 1000, {Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, WindowSize), %% TODO: make these values configurable: Timeout = From 5aa9d026df82b8515ee5a3807c3c24979369701a Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Thu, 9 Nov 2023 21:16:20 +0100 Subject: [PATCH 117/155] fix(ds): Apply review remarks Co-authored-by: Thales Macedo Garitezi --- apps/emqx/src/emqx_persistent_message_ds_replayer.erl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl index bd64db8b8..3e8aa71cf 100644 --- a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl +++ b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl @@ -242,8 +242,10 @@ packet_id_to_seqno_test() -> ?assertEqual(1, packet_id_to_seqno(1, 1)), ?assertEqual(1, packet_id_to_seqno(10, 1)), ?assertEqual(1, packet_id_to_seqno(1 bsl 16 - 1, 1)), + ?assertEqual(1, packet_id_to_seqno(1 bsl 16, 1)), %% Packet ID = 1; second and 3rd epochs: ?assertEqual(1 bsl 16 + 1, packet_id_to_seqno(1 bsl 16 + 1, 1)), + ?assertEqual(1 bsl 16 + 1, packet_id_to_seqno(2 bsl 16, 1)), ?assertEqual(2 bsl 16 + 1, packet_id_to_seqno(2 bsl 16 + 1, 1)), %% Packet ID = 16#ffff: PID = 1 bsl 16 - 1, @@ -279,7 +281,7 @@ next_seqno_gen() -> ). seqno_gen(NextSeqNo) -> - WindowSize = 1 bsl 16 - 2, + WindowSize = 1 bsl 16 - 1, Min = max(0, NextSeqNo - WindowSize), Max = max(0, NextSeqNo - 1), range(Min, Max). From 3d20e566d884beffdec8c27337dc84d460dd3ff6 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 10:47:16 +0100 Subject: [PATCH 118/155] docs: add namespace to authn and authz schema --- apps/emqx_auth_http/src/emqx_authn_http_schema.erl | 5 ++++- apps/emqx_auth_http/src/emqx_authz_http_schema.erl | 5 ++++- apps/emqx_auth_ldap/src/emqx_authn_ldap_bind_schema.erl | 5 ++++- apps/emqx_auth_ldap/src/emqx_authz_ldap_schema.erl | 5 ++++- apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl | 5 ++++- apps/emqx_auth_mnesia/src/emqx_authz_mnesia_schema.erl | 5 ++++- apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl | 5 ++++- apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl | 5 ++++- apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl | 3 +++ apps/emqx_auth_mysql/src/emqx_authz_mysql_schema.erl | 3 +++ .../src/emqx_authn_postgresql_schema.erl | 3 +++ .../src/emqx_authz_postgresql_schema.erl | 3 +++ apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl | 3 +++ apps/emqx_auth_redis/src/emqx_authz_redis_schema.erl | 3 +++ 14 files changed, 50 insertions(+), 8 deletions(-) diff --git a/apps/emqx_auth_http/src/emqx_authn_http_schema.erl b/apps/emqx_auth_http/src/emqx_authn_http_schema.erl index 1eaac6378..9464b0aaf 100644 --- a/apps/emqx_auth_http/src/emqx_authn_http_schema.erl +++ b/apps/emqx_auth_http/src/emqx_authn_http_schema.erl @@ -27,7 +27,8 @@ validations/0, desc/1, refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). -define(NOT_EMPTY(MSG), emqx_resource_validator:not_empty(MSG)). @@ -38,6 +39,8 @@ }) ). +namespace() -> "authn". + refs() -> [?R_REF(http_get), ?R_REF(http_post)]. diff --git a/apps/emqx_auth_http/src/emqx_authz_http_schema.erl b/apps/emqx_auth_http/src/emqx_authz_http_schema.erl index 18ec23757..5f9af846b 100644 --- a/apps/emqx_auth_http/src/emqx_authz_http_schema.erl +++ b/apps/emqx_auth_http/src/emqx_authz_http_schema.erl @@ -26,7 +26,8 @@ fields/1, desc/1, source_refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). -export([ @@ -38,6 +39,8 @@ -import(emqx_schema, [mk_duration/2]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. source_refs() -> diff --git a/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind_schema.erl b/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind_schema.erl index 9a21766e3..580673d85 100644 --- a/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind_schema.erl +++ b/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind_schema.erl @@ -25,9 +25,12 @@ fields/1, desc/1, refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). +namespace() -> "authn". + refs() -> [?R_REF(ldap_bind)]. diff --git a/apps/emqx_auth_ldap/src/emqx_authz_ldap_schema.erl b/apps/emqx_auth_ldap/src/emqx_authz_ldap_schema.erl index 491b0debf..e6a060f42 100644 --- a/apps/emqx_auth_ldap/src/emqx_authz_ldap_schema.erl +++ b/apps/emqx_auth_ldap/src/emqx_authz_ldap_schema.erl @@ -26,9 +26,12 @@ fields/1, desc/1, source_refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. fields(ldap) -> diff --git a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl index bb5ccfe1a..373d95fc8 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authn_mnesia_schema.erl @@ -25,9 +25,12 @@ fields/1, desc/1, refs/1, - select_union_member/2 + select_union_member/2, + namespace/0 ]). +namespace() -> "authn". + refs(api_write) -> [?R_REF(builtin_db_api)]; refs(_) -> diff --git a/apps/emqx_auth_mnesia/src/emqx_authz_mnesia_schema.erl b/apps/emqx_auth_mnesia/src/emqx_authz_mnesia_schema.erl index cab544bf7..4d467397e 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authz_mnesia_schema.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authz_mnesia_schema.erl @@ -26,9 +26,12 @@ fields/1, desc/1, source_refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. fields(builtin_db) -> diff --git a/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl b/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl index 8f76bedc2..53c6a6a10 100644 --- a/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl +++ b/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl @@ -25,9 +25,12 @@ fields/1, desc/1, refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). +namespace() -> "authn". + refs() -> [ ?R_REF(mongo_single), diff --git a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl index aff399e68..ee20b962e 100644 --- a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl +++ b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl @@ -24,9 +24,12 @@ fields/1, desc/1, source_refs/0, - select_union_member/1 + select_union_member/1, + namespace/0 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. source_refs() -> diff --git a/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl b/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl index 0189ecc61..17609acb8 100644 --- a/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl +++ b/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl @@ -22,12 +22,15 @@ -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +namespace() -> "authn". + refs() -> [?R_REF(mysql)]. diff --git a/apps/emqx_auth_mysql/src/emqx_authz_mysql_schema.erl b/apps/emqx_auth_mysql/src/emqx_authz_mysql_schema.erl index a9ce422e6..43f6ca6fa 100644 --- a/apps/emqx_auth_mysql/src/emqx_authz_mysql_schema.erl +++ b/apps/emqx_auth_mysql/src/emqx_authz_mysql_schema.erl @@ -22,6 +22,7 @@ -behaviour(emqx_authz_schema). -export([ + namespace/0, type/0, fields/1, desc/1, @@ -29,6 +30,8 @@ select_union_member/1 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. fields(mysql) -> diff --git a/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl b/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl index 6b3b600ee..ef7d00df3 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authn_postgresql_schema.erl @@ -22,12 +22,15 @@ -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +namespace() -> "authn". + select_union_member( #{ <<"mechanism">> := ?AUTHN_MECHANISM_BIN, <<"backend">> := ?AUTHN_BACKEND_BIN diff --git a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl index 2be7e9387..296b00126 100644 --- a/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl +++ b/apps/emqx_auth_postgresql/src/emqx_authz_postgresql_schema.erl @@ -22,6 +22,7 @@ -behaviour(emqx_authz_schema). -export([ + namespace/0, type/0, fields/1, desc/1, @@ -29,6 +30,8 @@ select_union_member/1 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. fields(postgresql) -> diff --git a/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl b/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl index 7b5794c48..f3e124ca1 100644 --- a/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl +++ b/apps/emqx_auth_redis/src/emqx_authn_redis_schema.erl @@ -22,12 +22,15 @@ -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +namespace() -> "authn". + refs() -> [ ?R_REF(redis_single), diff --git a/apps/emqx_auth_redis/src/emqx_authz_redis_schema.erl b/apps/emqx_auth_redis/src/emqx_authz_redis_schema.erl index 755192bfc..5cd084795 100644 --- a/apps/emqx_auth_redis/src/emqx_authz_redis_schema.erl +++ b/apps/emqx_auth_redis/src/emqx_authz_redis_schema.erl @@ -22,6 +22,7 @@ -behaviour(emqx_authz_schema). -export([ + namespace/0, type/0, fields/1, desc/1, @@ -29,6 +30,8 @@ select_union_member/1 ]). +namespace() -> "authz". + type() -> ?AUTHZ_TYPE. fields(redis_single) -> From af3dfc91b120641a60c8b84f2c3450b7c29c99ed Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 16:31:57 +0100 Subject: [PATCH 119/155] refactor(schema): change log_lvel to enum type --- apps/emqx_conf/src/emqx_conf_schema.erl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index ba9560157..5c16bc56b 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -28,14 +28,12 @@ -include("emqx_conf.hrl"). --type log_level() :: debug | info | notice | warning | error | critical | alert | emergency | all. -type file() :: string(). -type cipher() :: map(). -behaviour(hocon_schema). -reflect_type([ - log_level/0, file/0, cipher/0 ]). @@ -1289,7 +1287,7 @@ log_handler_common_confs(Handler, Default) -> [ {"level", sc( - log_level(), + hoconsc:enum([debug, info, notice, warning, error, critical, alert, emergency, all]), #{ default => maps:get(level, Default, warning), desc => ?DESC(LevelDesc), From 7a0a0bee98a7af4c42fdbf0de46d7282da0c7b5d Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 16:33:07 +0100 Subject: [PATCH 120/155] refactor(schema): change file() type to string() --- apps/emqx_conf/src/emqx_conf_schema.erl | 12 +++++------- apps/emqx_enterprise/src/emqx_enterprise_schema.erl | 2 +- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index 5c16bc56b..2fad19061 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -28,13 +28,11 @@ -include("emqx_conf.hrl"). --type file() :: string(). -type cipher() :: map(). -behaviour(hocon_schema). -reflect_type([ - file/0, cipher/0 ]). @@ -546,7 +544,7 @@ fields("node") -> )}, {"crash_dump_file", sc( - file(), + string(), #{ mapping => "vm_args.-env ERL_CRASH_DUMP", desc => ?DESC(node_crash_dump_file), @@ -837,7 +835,7 @@ fields("rpc") -> )}, {"certfile", sc( - file(), + string(), #{ mapping => "gen_rpc.certfile", converter => fun ensure_unicode_path/2, @@ -846,7 +844,7 @@ fields("rpc") -> )}, {"keyfile", sc( - file(), + string(), #{ mapping => "gen_rpc.keyfile", converter => fun ensure_unicode_path/2, @@ -855,7 +853,7 @@ fields("rpc") -> )}, {"cacertfile", sc( - file(), + string(), #{ mapping => "gen_rpc.cacertfile", converter => fun ensure_unicode_path/2, @@ -1002,7 +1000,7 @@ fields("log_file_handler") -> [ {"path", sc( - file(), + string(), #{ desc => ?DESC("log_file_handler_file"), default => <<"${EMQX_LOG_DIR}/emqx.log">>, diff --git a/apps/emqx_enterprise/src/emqx_enterprise_schema.erl b/apps/emqx_enterprise/src/emqx_enterprise_schema.erl index 16474b424..658666fc7 100644 --- a/apps/emqx_enterprise/src/emqx_enterprise_schema.erl +++ b/apps/emqx_enterprise/src/emqx_enterprise_schema.erl @@ -53,7 +53,7 @@ fields("log_audit_handler") -> {"path", hoconsc:mk( - emqx_conf_schema:file(), + string(), #{ desc => ?DESC(emqx_conf_schema, "audit_file_handler_path"), default => <<"${EMQX_LOG_DIR}/audit.log">>, From 40c2ca134d355690e7a1ce11f9c83945599bd991 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 16:33:47 +0100 Subject: [PATCH 121/155] refactor(schema): delete unused type cipher() --- apps/emqx_conf/src/emqx_conf_schema.erl | 6 ------ 1 file changed, 6 deletions(-) diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index 2fad19061..6f1a13d4e 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -28,14 +28,8 @@ -include("emqx_conf.hrl"). --type cipher() :: map(). - -behaviour(hocon_schema). --reflect_type([ - cipher/0 -]). - -export([ namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1, tags/0 ]). From df96798c53116da20b476f1e293aa0731707775d Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 18:21:15 +0100 Subject: [PATCH 122/155] refactor(http_bridge): use more readable type for pool_type --- .../src/emqx_bridge_http_connector.erl | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl index b2f876d21..743ab97fe 100644 --- a/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl +++ b/apps/emqx_bridge_http/src/emqx_bridge_http_connector.erl @@ -46,14 +46,6 @@ -export([validate_method/1, join_paths/2]). --type connect_timeout() :: emqx_schema:duration() | infinity. --type pool_type() :: random | hash. - --reflect_type([ - connect_timeout/0, - pool_type/0 -]). - -define(DEFAULT_PIPELINE_SIZE, 100). -define(DEFAULT_REQUEST_TIMEOUT_MS, 30_000). @@ -89,7 +81,7 @@ fields(config) -> )}, {pool_type, sc( - pool_type(), + hoconsc:enum([random, hash]), #{ default => random, desc => ?DESC("pool_type") From b3df8604fca0feafe0e6ffd7c8fadc70731ce92d Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 18:45:41 +0100 Subject: [PATCH 123/155] refactor(emqx_conf_schema): use hocon enum type for log levels --- apps/emqx_conf/src/emqx_conf_schema.erl | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index 6f1a13d4e..79877184d 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -33,6 +33,9 @@ -export([ namespace/0, roots/0, fields/1, translations/0, translation/1, validations/0, desc/1, tags/0 ]). + +-export([log_level/0]). + -export([conf_get/2, conf_get/3, keys/2, filter/1]). -export([upgrade_raw_conf/1]). @@ -1279,7 +1282,7 @@ log_handler_common_confs(Handler, Default) -> [ {"level", sc( - hoconsc:enum([debug, info, notice, warning, error, critical, alert, emergency, all]), + log_level(), #{ default => maps:get(level, Default, warning), desc => ?DESC(LevelDesc), @@ -1528,3 +1531,6 @@ ensure_unicode_path(Path, _) when is_list(Path) -> Path; ensure_unicode_path(Path, _) -> throw({"not_string", Path}). + +log_level() -> + hoconsc:enum([debug, info, notice, warning, error, critical, alert, emergency, all]). From a2700771c96c1bc562ff706211b372d1055c5212 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 18:46:16 +0100 Subject: [PATCH 124/155] refactor(emqx_s3_schema): make type readable --- apps/emqx_s3/src/emqx_s3_schema.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/emqx_s3/src/emqx_s3_schema.erl b/apps/emqx_s3/src/emqx_s3_schema.erl index db37c6e2d..5478f6416 100644 --- a/apps/emqx_s3/src/emqx_s3_schema.erl +++ b/apps/emqx_s3/src/emqx_s3_schema.erl @@ -37,7 +37,7 @@ fields(s3) -> )}, {secret_access_key, mk( - secret_access_key(), + typerefl:alias("string", secret_access_key()), #{ desc => ?DESC("secret_access_key"), required => false, From ae24b45d1b7322294808b9fed9583c5a1f81f5f3 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 18:47:39 +0100 Subject: [PATCH 125/155] fix(schema): add type namespce to emqx_dashboard_sso_ldap --- apps/emqx_dashboard_sso/src/emqx_dashboard_sso_ldap.erl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_ldap.erl b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_ldap.erl index 499e24c5b..583f1d683 100644 --- a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_ldap.erl +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_ldap.erl @@ -12,6 +12,7 @@ -behaviour(emqx_dashboard_sso). -export([ + namespace/0, fields/1, desc/1 ]). @@ -30,6 +31,9 @@ %% Hocon Schema %%------------------------------------------------------------------------------ +namespace() -> + "sso". + hocon_ref() -> hoconsc:ref(?MODULE, ldap). From 46fffa401e90b992e7a85ffffce045c705356ee3 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 19:42:44 +0100 Subject: [PATCH 126/155] refactor(emqx_prometheus_schema): use typerefl alias --- apps/emqx_prometheus/src/emqx_prometheus_schema.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl index f34675c0b..91d176142 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl @@ -57,7 +57,7 @@ fields("prometheus") -> )}, {headers, ?HOCON( - list({string(), string()}), + typerefl:alias("map", list({string(), string()})), #{ default => #{}, required => false, From 3e2ce57cb1ae53f7b93560466faeafdeea274f87 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 19:44:58 +0100 Subject: [PATCH 127/155] refactor(emqx_bridge_greptimedb): use typerefl alias --- .../emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl | 2 +- apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl index d63103e2e..f5ae714d7 100644 --- a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl +++ b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.erl @@ -126,7 +126,7 @@ desc(_) -> undefined. write_syntax(type) -> - emqx_bridge_influxdb:write_syntax(); + emqx_bridge_influxdb:write_syntax_type(); write_syntax(required) -> true; write_syntax(validator) -> diff --git a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl index 47eeecb4e..acb295752 100644 --- a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl +++ b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.erl @@ -11,7 +11,8 @@ -import(hoconsc, [mk/2, enum/1, ref/2]). -export([ - conn_bridge_examples/1 + conn_bridge_examples/1, + write_syntax_type/0 ]). -export([ @@ -29,6 +30,9 @@ %% ------------------------------------------------------------------------------------------------- %% api +write_syntax_type() -> + typerefl:alias("string", write_syntax()). + conn_bridge_examples(Method) -> [ #{ @@ -154,7 +158,7 @@ desc(_) -> undefined. write_syntax(type) -> - ?MODULE:write_syntax(); + write_syntax_type(); write_syntax(required) -> true; write_syntax(validator) -> From 8dc8237331356a17f2f4e6fb8e941b4e91357192 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Thu, 9 Nov 2023 14:01:53 +0100 Subject: [PATCH 128/155] feat(ds): Introduce emqx_ds behavior --- .../emqx_persistent_message_ds_replayer.erl | 2 +- apps/emqx/src/emqx_persistent_session_ds.erl | 8 +- apps/emqx/src/emqx_persistent_session_ds.hrl | 2 + .../test/emqx_persistent_messages_SUITE.erl | 4 +- .../test/emqx_persistent_session_SUITE.erl | 2 +- apps/emqx_durable_storage/src/emqx_ds.erl | 54 ++++++-- .../src/emqx_ds_replication_layer.erl | 116 +++++++++--------- .../src/emqx_ds_storage_bitfield_lts.erl | 10 +- .../src/emqx_ds_storage_layer.erl | 8 +- .../src/emqx_ds_storage_layer_sup.erl | 9 +- .../src/proto/emqx_ds_proto_v1.erl | 44 ++++--- .../test/emqx_ds_SUITE.erl | 20 +-- 12 files changed, 164 insertions(+), 115 deletions(-) diff --git a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl index 1ef4ea293..156aa943e 100644 --- a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl +++ b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl @@ -161,7 +161,7 @@ fetch(_SessionId, Inflight, _Streams, 0, Acc) -> fetch(SessionId, Inflight0, [Stream | Streams], N, Publishes0) -> #inflight{next_seqno = FirstSeqNo, offset_ranges = Ranges0} = Inflight0, ItBegin = get_last_iterator(SessionId, Stream, Ranges0), - {ok, ItEnd, Messages} = emqx_ds:next(ItBegin, N), + {ok, ItEnd, Messages} = emqx_ds:next(?PERSISTENT_MESSAGE_DB, ItBegin, N), {NMessages, Publishes, Inflight1} = lists:foldl( fun(Msg, {N0, PubAcc0, InflightAcc0}) -> diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index b7b5d0df9..52c98c7d4 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -105,8 +105,6 @@ -export_type([id/0]). --define(PERSISTENT_MESSAGE_DB, emqx_persistent_message). - %% -spec create(clientinfo(), conninfo(), emqx_session:conf()) -> @@ -497,8 +495,6 @@ storage() -> %% @doc Called when a client connects. This function looks up a %% session or returns `false` if previous one couldn't be found. %% -%% This function also spawns replay agents for each iterator. -%% %% Note: session API doesn't handle session takeovers, it's the job of %% the broker. -spec session_open(id()) -> @@ -670,7 +666,9 @@ renew_streams(Id, ExistingStreams, TopicFilter, StartTime) -> ok; false -> mnesia:write(?SESSION_STREAM_TAB, Rec, write), - {ok, Iterator} = emqx_ds:make_iterator(Stream, TopicFilter, StartTime), + {ok, Iterator} = emqx_ds:make_iterator( + ?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartTime + ), IterRec = #ds_iter{id = {Id, Stream}, iter = Iterator}, mnesia:write(?SESSION_ITER_TAB, IterRec, write) end diff --git a/apps/emqx/src/emqx_persistent_session_ds.hrl b/apps/emqx/src/emqx_persistent_session_ds.hrl index 81b997df5..666874608 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.hrl +++ b/apps/emqx/src/emqx_persistent_session_ds.hrl @@ -16,6 +16,8 @@ -ifndef(EMQX_PERSISTENT_SESSION_DS_HRL_HRL). -define(EMQX_PERSISTENT_SESSION_DS_HRL_HRL, true). +-define(PERSISTENT_MESSAGE_DB, emqx_persistent_message). + -define(SESSION_TAB, emqx_ds_session). -define(SESSION_SUBSCRIPTIONS_TAB, emqx_ds_session_subscriptions). -define(SESSION_STREAM_TAB, emqx_ds_stream_tab). diff --git a/apps/emqx/test/emqx_persistent_messages_SUITE.erl b/apps/emqx/test/emqx_persistent_messages_SUITE.erl index 52ba090b5..45cf85a05 100644 --- a/apps/emqx/test/emqx_persistent_messages_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_messages_SUITE.erl @@ -256,14 +256,14 @@ consume(TopicFilter, StartMS) -> Streams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartMS), lists:flatmap( fun({_Rank, Stream}) -> - {ok, It} = emqx_ds:make_iterator(Stream, TopicFilter, StartMS), + {ok, It} = emqx_ds:make_iterator(?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartMS), consume(It) end, Streams ). consume(It) -> - case emqx_ds:next(It, 100) of + case emqx_ds:next(?PERSISTENT_MESSAGE_DB, It, 100) of {ok, _NIt, _Msgs = []} -> []; {ok, NIt, Msgs} -> diff --git a/apps/emqx/test/emqx_persistent_session_SUITE.erl b/apps/emqx/test/emqx_persistent_session_SUITE.erl index 5a14e0bc9..0f8929e23 100644 --- a/apps/emqx/test/emqx_persistent_session_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_session_SUITE.erl @@ -133,7 +133,7 @@ get_listener_port(Type, Name) -> end_per_group(Group, Config) when Group == tcp; Group == ws; Group == quic -> ok = emqx_cth_suite:stop(?config(group_apps, Config)); end_per_group(_, _Config) -> - ok = emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB), + catch emqx_ds:drop_db(?PERSISTENT_MESSAGE_DB), ok. init_per_testcase(TestCase, Config) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds.erl b/apps/emqx_durable_storage/src/emqx_ds.erl index 27a0745bc..725d62673 100644 --- a/apps/emqx_durable_storage/src/emqx_ds.erl +++ b/apps/emqx_durable_storage/src/emqx_ds.erl @@ -28,7 +28,7 @@ -export([store_batch/2, store_batch/3]). %% Message replay API: --export([get_streams/3, make_iterator/3, next/2]). +-export([get_streams/3, make_iterator/4, next/3]). %% Misc. API: -export([]). @@ -100,6 +100,26 @@ -type get_iterator_result(Iterator) :: {ok, Iterator} | undefined. +-define(persistent_term(DB), {emqx_ds_db_backend, DB}). + +-define(module(DB), (persistent_term:get(?persistent_term(DB)))). + +%%================================================================================ +%% Behavior callbacks +%%================================================================================ + +-callback open_db(db(), create_db_opts()) -> ok | {error, _}. + +-callback drop_db(db()) -> ok | {error, _}. + +-callback store_batch(db(), [emqx_types:message()], message_store_opts()) -> store_batch_result(). + +-callback get_streams(db(), topic_filter(), time()) -> [{stream_rank(), stream()}]. + +-callback make_iterator(db(), _Stream, topic_filter(), time()) -> make_iterator_result(_Iterator). + +-callback next(db(), Iterator, pos_integer()) -> next_result(Iterator). + %%================================================================================ %% API funcions %%================================================================================ @@ -107,19 +127,29 @@ %% @doc Different DBs are completely independent from each other. They %% could represent something like different tenants. -spec open_db(db(), create_db_opts()) -> ok. -open_db(DB, Opts = #{backend := builtin}) -> - emqx_ds_replication_layer:open_db(DB, Opts). +open_db(DB, Opts = #{backend := Backend}) when Backend =:= builtin -> + Module = + case Backend of + builtin -> emqx_ds_replication_layer + end, + persistent_term:put(?persistent_term(DB), Module), + ?module(DB):open_db(DB, Opts). %% @doc TODO: currently if one or a few shards are down, they won't be %% deleted. -spec drop_db(db()) -> ok. drop_db(DB) -> - emqx_ds_replication_layer:drop_db(DB). + case persistent_term:get(?persistent_term(DB), undefined) of + undefined -> + ok; + Module -> + Module:drop_db(DB) + end. -spec store_batch(db(), [emqx_types:message()], message_store_opts()) -> store_batch_result(). store_batch(DB, Msgs, Opts) -> - emqx_ds_replication_layer:store_batch(DB, Msgs, Opts). + ?module(DB):store_batch(DB, Msgs, Opts). -spec store_batch(db(), [emqx_types:message()]) -> store_batch_result(). store_batch(DB, Msgs) -> @@ -168,15 +198,15 @@ store_batch(DB, Msgs) -> %% replaying streams that depend on the given one. -spec get_streams(db(), topic_filter(), time()) -> [{stream_rank(), stream()}]. get_streams(DB, TopicFilter, StartTime) -> - emqx_ds_replication_layer:get_streams(DB, TopicFilter, StartTime). + ?module(DB):get_streams(DB, TopicFilter, StartTime). --spec make_iterator(stream(), topic_filter(), time()) -> make_iterator_result(). -make_iterator(Stream, TopicFilter, StartTime) -> - emqx_ds_replication_layer:make_iterator(Stream, TopicFilter, StartTime). +-spec make_iterator(db(), stream(), topic_filter(), time()) -> make_iterator_result(). +make_iterator(DB, Stream, TopicFilter, StartTime) -> + ?module(DB):make_iterator(DB, Stream, TopicFilter, StartTime). --spec next(iterator(), pos_integer()) -> next_result(). -next(Iter, BatchSize) -> - emqx_ds_replication_layer:next(Iter, BatchSize). +-spec next(db(), iterator(), pos_integer()) -> next_result(). +next(DB, Iter, BatchSize) -> + ?module(DB):next(DB, Iter, BatchSize). %%================================================================================ %% Internal exports diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index b81f43c4f..4a9240f95 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -18,23 +18,25 @@ %% replication on their own. -module(emqx_ds_replication_layer). +-behaviour(emqx_ds). + -export([ list_shards/1, open_db/2, drop_db/1, store_batch/3, get_streams/3, - make_iterator/3, - next/2 + make_iterator/4, + next/3 ]). %% internal exports: -export([ - do_open_shard_v1/2, - do_drop_shard_v1/1, - do_get_streams_v1/3, - do_make_iterator_v1/4, - do_next_v1/3 + do_open_shard_v1/3, + do_drop_shard_v1/2, + do_get_streams_v1/4, + do_make_iterator_v1/5, + do_next_v1/4 ]). -export_type([shard_id/0, stream/0, iterator/0, message_id/0]). @@ -57,7 +59,7 @@ -type db() :: emqx_ds:db(). --type shard_id() :: {db(), atom()}. +-type shard_id() :: atom(). %% This enapsulates the stream entity from the replication level. %% @@ -86,41 +88,36 @@ %%================================================================================ -spec list_shards(db()) -> [shard_id()]. -list_shards(DB) -> +list_shards(_DB) -> %% TODO: milestone 5 - lists:map( - fun(Node) -> - shard_id(DB, Node) - end, - list_nodes() - ). + list_nodes(). -spec open_db(db(), emqx_ds:create_db_opts()) -> ok | {error, _}. open_db(DB, Opts) -> %% TODO: improve error reporting, don't just crash lists:foreach( - fun(Node) -> - Shard = shard_id(DB, Node), - ok = emqx_ds_proto_v1:open_shard(Node, Shard, Opts) + fun(Shard) -> + Node = node_of_shard(DB, Shard), + ok = emqx_ds_proto_v1:open_shard(Node, DB, Shard, Opts) end, - list_nodes() + list_shards(DB) ). -spec drop_db(db()) -> ok | {error, _}. drop_db(DB) -> lists:foreach( - fun(Node) -> - Shard = shard_id(DB, Node), - ok = emqx_ds_proto_v1:drop_shard(Node, Shard) + fun(Shard) -> + Node = node_of_shard(DB, Shard), + ok = emqx_ds_proto_v1:drop_shard(Node, DB, Shard) end, - list_nodes() + list_shards(DB) ). -spec store_batch(db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> emqx_ds:store_batch_result(). store_batch(DB, Msg, Opts) -> %% TODO: Currently we store messages locally. - Shard = shard_id(DB, node()), + Shard = {DB, node()}, emqx_ds_storage_layer:store_batch(Shard, Msg, Opts). -spec get_streams(db(), emqx_ds:topic_filter(), emqx_ds:time()) -> @@ -129,8 +126,8 @@ get_streams(DB, TopicFilter, StartTime) -> Shards = list_shards(DB), lists:flatmap( fun(Shard) -> - Node = node_of_shard(Shard), - Streams = emqx_ds_proto_v1:get_streams(Node, Shard, TopicFilter, StartTime), + Node = node_of_shard(DB, Shard), + Streams = emqx_ds_proto_v1:get_streams(Node, DB, Shard, TopicFilter, StartTime), lists:map( fun({RankY, Stream}) -> RankX = Shard, @@ -147,22 +144,22 @@ get_streams(DB, TopicFilter, StartTime) -> Shards ). --spec make_iterator(stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> +-spec make_iterator(emqx_ds:db(), stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). -make_iterator(Stream, TopicFilter, StartTime) -> +make_iterator(DB, Stream, TopicFilter, StartTime) -> #{?tag := ?stream, ?shard := Shard, ?enc := StorageStream} = Stream, - Node = node_of_shard(Shard), - case emqx_ds_proto_v1:make_iterator(Node, Shard, StorageStream, TopicFilter, StartTime) of + Node = node_of_shard(DB, Shard), + case emqx_ds_proto_v1:make_iterator(Node, DB, Shard, StorageStream, TopicFilter, StartTime) of {ok, Iter} -> {ok, #{?tag => ?it, ?shard => Shard, ?enc => Iter}}; Err = {error, _} -> Err end. --spec next(iterator(), pos_integer()) -> emqx_ds:next_result(iterator()). -next(Iter0, BatchSize) -> +-spec next(emqx_ds:db(), iterator(), pos_integer()) -> emqx_ds:next_result(iterator()). +next(DB, Iter0, BatchSize) -> #{?tag := ?it, ?shard := Shard, ?enc := StorageIter0} = Iter0, - Node = node_of_shard(Shard), + Node = node_of_shard(DB, Shard), %% TODO: iterator can contain information that is useful for %% reconstructing messages sent over the network. For example, %% when we send messages with the learned topic index, we could @@ -171,7 +168,7 @@ next(Iter0, BatchSize) -> %% %% This kind of trickery should be probably done here in the %% replication layer. Or, perhaps, in the logic layer. - case emqx_ds_proto_v1:next(Node, Shard, StorageIter0, BatchSize) of + case emqx_ds_proto_v1:next(Node, DB, Shard, StorageIter0, BatchSize) of {ok, StorageIter, Batch} -> Iter = Iter0#{?enc := StorageIter}, {ok, Iter, Batch}; @@ -187,42 +184,49 @@ next(Iter0, BatchSize) -> %% Internal exports (RPC targets) %%================================================================================ --spec do_open_shard_v1(shard_id(), emqx_ds:create_db_opts()) -> ok. -do_open_shard_v1(Shard, Opts) -> - emqx_ds_storage_layer:open_shard(Shard, Opts). +-spec do_open_shard_v1(db(), emqx_ds_storage_layer:shard_id(), emqx_ds:create_db_opts()) -> + ok | {error, _}. +do_open_shard_v1(DB, Shard, Opts) -> + emqx_ds_storage_layer:open_shard({DB, Shard}, Opts). --spec do_drop_shard_v1(shard_id()) -> ok. -do_drop_shard_v1(Shard) -> - emqx_ds_storage_layer:drop_shard(Shard). +-spec do_drop_shard_v1(db(), emqx_ds_storage_layer:shard_id()) -> ok | {error, _}. +do_drop_shard_v1(DB, Shard) -> + emqx_ds_storage_layer:drop_shard({DB, Shard}). --spec do_get_streams_v1(shard_id(), emqx_ds:topic_filter(), emqx_ds:time()) -> +-spec do_get_streams_v1( + emqx_ds:db(), emqx_ds_replicationi_layer:shard_id(), emqx_ds:topic_filter(), emqx_ds:time() +) -> [{integer(), emqx_ds_storage_layer:stream()}]. -do_get_streams_v1(Shard, TopicFilter, StartTime) -> - emqx_ds_storage_layer:get_streams(Shard, TopicFilter, StartTime). +do_get_streams_v1(DB, Shard, TopicFilter, StartTime) -> + emqx_ds_storage_layer:get_streams({DB, Shard}, TopicFilter, StartTime). -spec do_make_iterator_v1( - shard_id(), emqx_ds_storage_layer:stream(), emqx_ds:topic_filter(), emqx_ds:time() + emqx_ds:db(), + emqx_ds_storage_layer:shard_id(), + emqx_ds_storage_layer:stream(), + emqx_ds:topic_filter(), + emqx_ds:time() ) -> {ok, emqx_ds_storage_layer:iterator()} | {error, _}. -do_make_iterator_v1(Shard, Stream, TopicFilter, StartTime) -> - emqx_ds_storage_layer:make_iterator(Shard, Stream, TopicFilter, StartTime). +do_make_iterator_v1(DB, Shard, Stream, TopicFilter, StartTime) -> + emqx_ds_storage_layer:make_iterator({DB, Shard}, Stream, TopicFilter, StartTime). --spec do_next_v1(shard_id(), emqx_ds_storage_layer:iterator(), pos_integer()) -> +-spec do_next_v1( + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:iterator(), + pos_integer() +) -> emqx_ds:next_result(emqx_ds_storage_layer:iterator()). -do_next_v1(Shard, Iter, BatchSize) -> - emqx_ds_storage_layer:next(Shard, Iter, BatchSize). +do_next_v1(DB, Shard, Iter, BatchSize) -> + emqx_ds_storage_layer:next({DB, Shard}, Iter, BatchSize). %%================================================================================ %% Internal functions %%================================================================================ -shard_id(DB, Node) -> - %% TODO: don't bake node name into the schema, don't repeat the - %% Mnesia's 1M$ mistake. - {DB, Node}. - --spec node_of_shard(shard_id()) -> node(). -node_of_shard({_DB, Node}) -> +-spec node_of_shard(emqx_ds:db(), shard_id()) -> node(). +node_of_shard(_DB, Node) -> Node. list_nodes() -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index d2c997ae1..50b6af5b6 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -110,7 +110,7 @@ %%================================================================================ -spec create( - emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:shard_id(), rocksdb:db_handle(), emqx_ds_storage_layer:gen_id(), options() @@ -137,7 +137,7 @@ create(_ShardId, DBHandle, GenId, Options) -> {Schema, [{DataCFName, DataCFHandle}, {TrieCFName, TrieCFHandle}]}. -spec open( - emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:shard_id(), rocksdb:db_handle(), emqx_ds_storage_layer:gen_id(), emqx_ds_storage_layer:cf_refs(), @@ -173,7 +173,7 @@ open(_Shard, DBHandle, GenId, CFRefs, Schema) -> }. -spec store_batch( - emqx_ds_replication_layer:shard_id(), s(), [emqx_types:message()], emqx_ds:message_store_opts() + emqx_ds_storage_layer:shard_id(), s(), [emqx_types:message()], emqx_ds:message_store_opts() ) -> emqx_ds:store_batch_result(). store_batch(_ShardId, S = #s{db = DB, data = Data}, Messages, _Options) -> @@ -187,7 +187,7 @@ store_batch(_ShardId, S = #s{db = DB, data = Data}, Messages, _Options) -> ). -spec get_streams( - emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:shard_id(), s(), emqx_ds:topic_filter(), emqx_ds:time() @@ -197,7 +197,7 @@ get_streams(_Shard, #s{trie = Trie}, TopicFilter, _StartTime) -> [#{?tag => ?stream, ?storage_key => I} || I <- Indexes]. -spec make_iterator( - emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:shard_id(), s(), stream(), emqx_ds:topic_filter(), diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index c91ac49d5..8c2e55510 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -50,7 +50,7 @@ {emqx_ds_storage_reference, emqx_ds_storage_reference:options()} | {emqx_ds_storage_bitfield_lts, emqx_ds_storage_bitfield_lts:options()}. --type shard_id() :: emqx_ds_replication_layer:shard_id(). +-type shard_id() :: {emqx_ds:db(), emqx_ds_replication_layer:shard_id()}. -type cf_refs() :: [{string(), rocksdb:cf_handle()}]. @@ -217,7 +217,7 @@ next(Shard, Iter = #{?tag := ?it, ?generation := GenId, ?enc := GenIter0}, Batch -spec start_link(shard_id(), emqx_ds:builtin_db_opts()) -> {ok, pid()}. -start_link(Shard, Options) -> +start_link(Shard = {_, _}, Options) -> gen_server:start_link(?REF(Shard), ?MODULE, {Shard, Options}, []). -record(s, { @@ -417,11 +417,11 @@ generations_since(Shard, Since) -> -define(PERSISTENT_TERM(SHARD), {emqx_ds_storage_layer, SHARD}). -spec get_schema_runtime(shard_id()) -> shard(). -get_schema_runtime(Shard) -> +get_schema_runtime(Shard = {_, _}) -> persistent_term:get(?PERSISTENT_TERM(Shard)). -spec put_schema_runtime(shard_id(), shard()) -> ok. -put_schema_runtime(Shard, RuntimeSchema) -> +put_schema_runtime(Shard = {_, _}, RuntimeSchema) -> persistent_term:put(?PERSISTENT_TERM(Shard), RuntimeSchema), ok. diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl index fac7204bf..174312c4e 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl @@ -25,7 +25,7 @@ start_link() -> supervisor:start_link({local, ?SUP}, ?MODULE, []). --spec start_shard(emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> +-spec start_shard(emqx_ds_storage_layer:shard_id(), emqx_ds:create_db_opts()) -> supervisor:startchild_ret(). start_shard(Shard, Options) -> supervisor:start_child(?SUP, shard_child_spec(Shard, Options)). @@ -33,9 +33,10 @@ start_shard(Shard, Options) -> -spec stop_shard(emqx_ds:shard()) -> ok | {error, _}. stop_shard(Shard) -> ok = supervisor:terminate_child(?SUP, Shard), - ok = supervisor:delete_child(?SUP, Shard). + Ok = supervisor:delete_child(?SUP, Shard). --spec ensure_shard(emqx_ds:shard(), emqx_ds_storage_layer:options()) -> ok | {error, _Reason}. +-spec ensure_shard(emqx_ds_storage_layer:shard_id(), emqx_ds_storage_layer:options()) -> + ok | {error, _Reason}. ensure_shard(Shard, Options) -> case start_shard(Shard, Options) of {ok, _Pid} -> @@ -63,7 +64,7 @@ init([]) -> %% Internal functions %%================================================================================ --spec shard_child_spec(emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> +-spec shard_child_spec(emqx_ds_storage_layer:shard_id(), emqx_ds:create_db_opts()) -> supervisor:child_spec(). shard_child_spec(Shard, Options) -> #{ diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index 6a79a4a61..c5fee4757 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -19,7 +19,7 @@ -include_lib("emqx_utils/include/bpapi.hrl"). %% API: --export([open_shard/3, drop_shard/2, get_streams/4, make_iterator/5, next/4]). +-export([open_shard/4, drop_shard/3, get_streams/5, make_iterator/6, next/5]). %% behavior callbacks: -export([introduced_in/0]). @@ -28,44 +28,58 @@ %% API funcions %%================================================================================ --spec open_shard(node(), emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> +-spec open_shard( + node(), + emqx_ds_replication_layer:db(), + emqx_ds_replication_layer:shard_id(), + emqx_ds:create_db_opts() +) -> ok. -open_shard(Node, Shard, Opts) -> - erpc:call(Node, emqx_ds_replication_layer, do_open_shard_v1, [Shard, Opts]). +open_shard(Node, DB, Shard, Opts) -> + erpc:call(Node, emqx_ds_replication_layer, do_open_shard_v1, [DB, Shard, Opts]). --spec drop_shard(node(), emqx_ds_replication_layer:shard_id()) -> +-spec drop_shard(node(), emqx_ds_replication_layer:db(), emqx_ds_replication_layer:shard_id()) -> ok. -drop_shard(Node, Shard) -> - erpc:call(Node, emqx_ds_replication_layer, do_drop_shard_v1, [Shard]). +drop_shard(Node, DB, Shard) -> + erpc:call(Node, emqx_ds_replication_layer, do_drop_shard_v1, [DB, Shard]). -spec get_streams( - node(), emqx_ds_replication_layer:shard_id(), emqx_ds:topic_filter(), emqx_ds:time() + node(), + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + emqx_ds:topic_filter(), + emqx_ds:time() ) -> [{integer(), emqx_ds_storage_layer:stream()}]. -get_streams(Node, Shard, TopicFilter, Time) -> - erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [Shard, TopicFilter, Time]). +get_streams(Node, DB, Shard, TopicFilter, Time) -> + erpc:call(Node, emqx_ds_replication_layer, do_get_streams_v1, [DB, Shard, TopicFilter, Time]). -spec make_iterator( node(), + emqx_ds:db(), emqx_ds_replication_layer:shard_id(), emqx_ds_storage_layer:stream(), emqx_ds:topic_filter(), emqx_ds:time() ) -> {ok, emqx_ds_storage_layer:iterator()} | {error, _}. -make_iterator(Node, Shard, Stream, TopicFilter, StartTime) -> +make_iterator(Node, DB, Shard, Stream, TopicFilter, StartTime) -> erpc:call(Node, emqx_ds_replication_layer, do_make_iterator_v1, [ - Shard, Stream, TopicFilter, StartTime + DB, Shard, Stream, TopicFilter, StartTime ]). -spec next( - node(), emqx_ds_replication_layer:shard_id(), emqx_ds_storage_layer:iterator(), pos_integer() + node(), + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + emqx_ds_storage_layer:iterator(), + pos_integer() ) -> {ok, emqx_ds_storage_layer:iterator(), [emqx_types:messages()]} | {ok, end_of_stream} | {error, _}. -next(Node, Shard, Iter, BatchSize) -> - erpc:call(Node, emqx_ds_replication_layer, do_next_v1, [Shard, Iter, BatchSize]). +next(Node, DB, Shard, Iter, BatchSize) -> + erpc:call(Node, emqx_ds_replication_layer, do_next_v1, [DB, Shard, Iter, BatchSize]). %%================================================================================ %% behavior callbacks diff --git a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl index 9637431d3..9b74e3227 100644 --- a/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl +++ b/apps/emqx_durable_storage/test/emqx_ds_SUITE.erl @@ -54,7 +54,7 @@ t_02_smoke_get_streams_start_iter(_Config) -> TopicFilter = ['#'], [{Rank, Stream}] = emqx_ds:get_streams(DB, TopicFilter, StartTime), ?assertMatch({_, _}, Rank), - ?assertMatch({ok, _Iter}, emqx_ds:make_iterator(Stream, TopicFilter, StartTime)). + ?assertMatch({ok, _Iter}, emqx_ds:make_iterator(DB, Stream, TopicFilter, StartTime)). %% A simple smoke test that verifies that it's possible to iterate %% over messages. @@ -70,8 +70,8 @@ t_03_smoke_iterate(_Config) -> ], ?assertMatch(ok, emqx_ds:store_batch(DB, Msgs)), [{_, Stream}] = emqx_ds:get_streams(DB, TopicFilter, StartTime), - {ok, Iter0} = emqx_ds:make_iterator(Stream, TopicFilter, StartTime), - {ok, Iter, Batch} = iterate(Iter0, 1), + {ok, Iter0} = emqx_ds:make_iterator(DB, Stream, TopicFilter, StartTime), + {ok, Iter, Batch} = iterate(DB, Iter0, 1), ?assertEqual(Msgs, Batch, {Iter0, Iter}). %% Verify that iterators survive restart of the application. This is @@ -91,14 +91,14 @@ t_04_restart(_Config) -> ], ?assertMatch(ok, emqx_ds:store_batch(DB, Msgs)), [{_, Stream}] = emqx_ds:get_streams(DB, TopicFilter, StartTime), - {ok, Iter0} = emqx_ds:make_iterator(Stream, TopicFilter, StartTime), + {ok, Iter0} = emqx_ds:make_iterator(DB, Stream, TopicFilter, StartTime), %% Restart the application: ?tp(warning, emqx_ds_SUITE_restart_app, #{}), ok = application:stop(emqx_durable_storage), {ok, _} = application:ensure_all_started(emqx_durable_storage), ok = emqx_ds:open_db(DB, opts()), %% The old iterator should be still operational: - {ok, Iter, Batch} = iterate(Iter0, 1), + {ok, Iter, Batch} = iterate(DB, Iter0, 1), ?assertEqual(Msgs, Batch, {Iter0, Iter}). message(Topic, Payload, PublishedAt) -> @@ -109,15 +109,15 @@ message(Topic, Payload, PublishedAt) -> id = emqx_guid:gen() }. -iterate(It, BatchSize) -> - iterate(It, BatchSize, []). +iterate(DB, It, BatchSize) -> + iterate(DB, It, BatchSize, []). -iterate(It0, BatchSize, Acc) -> - case emqx_ds:next(It0, BatchSize) of +iterate(DB, It0, BatchSize, Acc) -> + case emqx_ds:next(DB, It0, BatchSize) of {ok, It, []} -> {ok, It, Acc}; {ok, It, Msgs} -> - iterate(It, BatchSize, Acc ++ Msgs); + iterate(DB, It, BatchSize, Acc ++ Msgs); Ret -> Ret end. From feef23fc0836ac6f112715f31a82cb344958b9fa Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Fri, 10 Nov 2023 03:10:13 +0100 Subject: [PATCH 129/155] feat(ds): Pass store_batch through RPC --- .../src/emqx_ds_replication_layer.erl | 19 ++++++++++++++----- .../src/emqx_ds_storage_layer_sup.erl | 2 +- .../src/proto/emqx_ds_proto_v1.erl | 13 ++++++++++++- 3 files changed, 27 insertions(+), 7 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index 4a9240f95..df957a740 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -34,6 +34,7 @@ -export([ do_open_shard_v1/3, do_drop_shard_v1/2, + do_store_batch_v1/4, do_get_streams_v1/4, do_make_iterator_v1/5, do_next_v1/4 @@ -115,10 +116,11 @@ drop_db(DB) -> -spec store_batch(db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> emqx_ds:store_batch_result(). -store_batch(DB, Msg, Opts) -> +store_batch(DB, Batch, Opts) -> %% TODO: Currently we store messages locally. - Shard = {DB, node()}, - emqx_ds_storage_layer:store_batch(Shard, Msg, Opts). + Shard = node(), + Node = node_of_shard(DB, Shard), + emqx_ds_proto_v1:store_batch(Node, DB, Shard, Batch, Opts). -spec get_streams(db(), emqx_ds:topic_filter(), emqx_ds:time()) -> [{emqx_ds:stream_rank(), stream()}]. @@ -184,15 +186,22 @@ next(DB, Iter0, BatchSize) -> %% Internal exports (RPC targets) %%================================================================================ --spec do_open_shard_v1(db(), emqx_ds_storage_layer:shard_id(), emqx_ds:create_db_opts()) -> +-spec do_open_shard_v1(db(), emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> ok | {error, _}. do_open_shard_v1(DB, Shard, Opts) -> emqx_ds_storage_layer:open_shard({DB, Shard}, Opts). --spec do_drop_shard_v1(db(), emqx_ds_storage_layer:shard_id()) -> ok | {error, _}. +-spec do_drop_shard_v1(db(), emqx_ds_replication_layer:shard_id()) -> ok | {error, _}. do_drop_shard_v1(DB, Shard) -> emqx_ds_storage_layer:drop_shard({DB, Shard}). +-spec do_store_batch_v1( + db(), emqx_ds_replication_layer:shard_id(), [emqx_types:message()], emqx_ds:message_store_opts() +) -> + emqx_ds:store_batch_result(). +do_store_batch_v1(DB, Shard, Batch, Options) -> + emqx_ds_storage_layer:store_batch({DB, Shard}, Batch, Options). + -spec do_get_streams_v1( emqx_ds:db(), emqx_ds_replicationi_layer:shard_id(), emqx_ds:topic_filter(), emqx_ds:time() ) -> diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl index 174312c4e..c2eee8dcb 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer_sup.erl @@ -33,7 +33,7 @@ start_shard(Shard, Options) -> -spec stop_shard(emqx_ds:shard()) -> ok | {error, _}. stop_shard(Shard) -> ok = supervisor:terminate_child(?SUP, Shard), - Ok = supervisor:delete_child(?SUP, Shard). + ok = supervisor:delete_child(?SUP, Shard). -spec ensure_shard(emqx_ds_storage_layer:shard_id(), emqx_ds_storage_layer:options()) -> ok | {error, _Reason}. diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index c5fee4757..ae6932072 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -19,7 +19,7 @@ -include_lib("emqx_utils/include/bpapi.hrl"). %% API: --export([open_shard/4, drop_shard/3, get_streams/5, make_iterator/6, next/5]). +-export([open_shard/4, drop_shard/3, store_batch/5, get_streams/5, make_iterator/6, next/5]). %% behavior callbacks: -export([introduced_in/0]). @@ -81,6 +81,17 @@ make_iterator(Node, DB, Shard, Stream, TopicFilter, StartTime) -> next(Node, DB, Shard, Iter, BatchSize) -> erpc:call(Node, emqx_ds_replication_layer, do_next_v1, [DB, Shard, Iter, BatchSize]). +-spec store_batch( + node(), + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + [emqx_types:message()], + emqx_ds:message_store_opts() +) -> + emqx_ds:store_batch_result(). +store_batch(Node, DB, Shard, Batch, Options) -> + erpc:call(Node, emqx_ds_replication_layer, do_store_batch_v1, [DB, Shard, Batch, Options]). + %%================================================================================ %% behavior callbacks %%================================================================================ From 2ade6da7a6642ef996b9e3b713706999bd672316 Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Fri, 10 Nov 2023 12:53:30 +0100 Subject: [PATCH 130/155] fix(ds): Remove redundand type --- .../src/emqx_ds_replication_layer.erl | 23 +++++++++++-------- .../src/proto/emqx_ds_proto_v1.erl | 4 ++-- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index df957a740..54a946436 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -58,8 +58,6 @@ -define(shard, 2). -define(enc, 3). --type db() :: emqx_ds:db(). - -type shard_id() :: atom(). %% This enapsulates the stream entity from the replication level. @@ -88,12 +86,12 @@ %% API functions %%================================================================================ --spec list_shards(db()) -> [shard_id()]. +-spec list_shards(emqx_ds:db()) -> [shard_id()]. list_shards(_DB) -> %% TODO: milestone 5 list_nodes(). --spec open_db(db(), emqx_ds:create_db_opts()) -> ok | {error, _}. +-spec open_db(emqx_ds:db(), emqx_ds:create_db_opts()) -> ok | {error, _}. open_db(DB, Opts) -> %% TODO: improve error reporting, don't just crash lists:foreach( @@ -104,7 +102,7 @@ open_db(DB, Opts) -> list_shards(DB) ). --spec drop_db(db()) -> ok | {error, _}. +-spec drop_db(emqx_ds:db()) -> ok | {error, _}. drop_db(DB) -> lists:foreach( fun(Shard) -> @@ -114,7 +112,7 @@ drop_db(DB) -> list_shards(DB) ). --spec store_batch(db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> +-spec store_batch(emqx_ds:db(), [emqx_types:message()], emqx_ds:message_store_opts()) -> emqx_ds:store_batch_result(). store_batch(DB, Batch, Opts) -> %% TODO: Currently we store messages locally. @@ -122,7 +120,7 @@ store_batch(DB, Batch, Opts) -> Node = node_of_shard(DB, Shard), emqx_ds_proto_v1:store_batch(Node, DB, Shard, Batch, Opts). --spec get_streams(db(), emqx_ds:topic_filter(), emqx_ds:time()) -> +-spec get_streams(emqx_ds:db(), emqx_ds:topic_filter(), emqx_ds:time()) -> [{emqx_ds:stream_rank(), stream()}]. get_streams(DB, TopicFilter, StartTime) -> Shards = list_shards(DB), @@ -186,17 +184,22 @@ next(DB, Iter0, BatchSize) -> %% Internal exports (RPC targets) %%================================================================================ --spec do_open_shard_v1(db(), emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts()) -> +-spec do_open_shard_v1( + emqx_ds:db(), emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts() +) -> ok | {error, _}. do_open_shard_v1(DB, Shard, Opts) -> emqx_ds_storage_layer:open_shard({DB, Shard}, Opts). --spec do_drop_shard_v1(db(), emqx_ds_replication_layer:shard_id()) -> ok | {error, _}. +-spec do_drop_shard_v1(emqx_ds:db(), emqx_ds_replication_layer:shard_id()) -> ok | {error, _}. do_drop_shard_v1(DB, Shard) -> emqx_ds_storage_layer:drop_shard({DB, Shard}). -spec do_store_batch_v1( - db(), emqx_ds_replication_layer:shard_id(), [emqx_types:message()], emqx_ds:message_store_opts() + emqx_ds:db(), + emqx_ds_replication_layer:shard_id(), + [emqx_types:message()], + emqx_ds:message_store_opts() ) -> emqx_ds:store_batch_result(). do_store_batch_v1(DB, Shard, Batch, Options) -> diff --git a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl index ae6932072..10d1ed7a5 100644 --- a/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl +++ b/apps/emqx_durable_storage/src/proto/emqx_ds_proto_v1.erl @@ -30,7 +30,7 @@ -spec open_shard( node(), - emqx_ds_replication_layer:db(), + emqx_ds:db(), emqx_ds_replication_layer:shard_id(), emqx_ds:create_db_opts() ) -> @@ -38,7 +38,7 @@ open_shard(Node, DB, Shard, Opts) -> erpc:call(Node, emqx_ds_replication_layer, do_open_shard_v1, [DB, Shard, Opts]). --spec drop_shard(node(), emqx_ds_replication_layer:db(), emqx_ds_replication_layer:shard_id()) -> +-spec drop_shard(node(), emqx_ds:db(), emqx_ds_replication_layer:shard_id()) -> ok. drop_shard(Node, DB, Shard) -> erpc:call(Node, emqx_ds_replication_layer, do_drop_shard_v1, [DB, Shard]). From b24b66081ab7f3fd15369f2f304b18db6655d613 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 19:45:39 +0100 Subject: [PATCH 131/155] refactor(authn/authz_http_schema): use typerefl alias --- apps/emqx_auth/test/emqx_authn/emqx_authn_schema_SUITE.erl | 4 ++-- apps/emqx_auth_http/src/emqx_authn_http_schema.erl | 2 +- apps/emqx_auth_http/src/emqx_authz_http_schema.erl | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_SUITE.erl b/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_SUITE.erl index 6d6ea420f..23532b4af 100644 --- a/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_SUITE.erl +++ b/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_SUITE.erl @@ -54,7 +54,7 @@ t_check_schema(_Config) -> ?assertThrow( #{ path := "authentication.1.password_hash_algorithm.name", - matched_type := "builtin_db/authn-hash:simple", + matched_type := "authn:builtin_db/authn-hash:simple", reason := unable_to_convert_to_enum_symbol }, Check(ConfigNotOk) @@ -73,7 +73,7 @@ t_check_schema(_Config) -> #{ path := "authentication.1.password_hash_algorithm", reason := "algorithm_name_missing", - matched_type := "builtin_db" + matched_type := "authn:builtin_db" }, Check(ConfigMissingAlgoName) ). diff --git a/apps/emqx_auth_http/src/emqx_authn_http_schema.erl b/apps/emqx_auth_http/src/emqx_authn_http_schema.erl index 9464b0aaf..19704d152 100644 --- a/apps/emqx_auth_http/src/emqx_authn_http_schema.erl +++ b/apps/emqx_auth_http/src/emqx_authn_http_schema.erl @@ -100,7 +100,7 @@ common_fields() -> {backend, emqx_authn_schema:backend(?AUTHN_BACKEND)}, {url, fun url/1}, {body, - hoconsc:mk(map([{fuzzy, term(), binary()}]), #{ + hoconsc:mk(typerefl:alias("map", map([{fuzzy, term(), binary()}])), #{ required => false, desc => ?DESC(body) })}, {request_timeout, fun request_timeout/1} diff --git a/apps/emqx_auth_http/src/emqx_authz_http_schema.erl b/apps/emqx_auth_http/src/emqx_authz_http_schema.erl index 5f9af846b..21f70de64 100644 --- a/apps/emqx_auth_http/src/emqx_authz_http_schema.erl +++ b/apps/emqx_auth_http/src/emqx_authz_http_schema.erl @@ -99,7 +99,7 @@ http_common_fields() -> mk_duration("Request timeout", #{ required => false, default => <<"30s">>, desc => ?DESC(request_timeout) })}, - {body, ?HOCON(map(), #{required => false, desc => ?DESC(body)})} + {body, ?HOCON(hoconsc:map(name, binary()), #{required => false, desc => ?DESC(body)})} ] ++ lists:keydelete( pool_type, @@ -108,7 +108,7 @@ http_common_fields() -> ). headers(type) -> - list({binary(), binary()}); + typerefl:alias("map", list({binary(), binary()})); headers(desc) -> ?DESC(?FUNCTION_NAME); headers(converter) -> @@ -121,7 +121,7 @@ headers(_) -> undefined. headers_no_content_type(type) -> - list({binary(), binary()}); + typerefl:alias("map", list({binary(), binary()})); headers_no_content_type(desc) -> ?DESC(?FUNCTION_NAME); headers_no_content_type(converter) -> From 6b3aaf5b242a8f6f6f183172c0dfb5ea03582d14 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 19:46:08 +0100 Subject: [PATCH 132/155] refactor(emqx_bridge_gcp_pubsub): use typerefl alias --- apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl index 685fd3397..a42047b43 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl @@ -101,7 +101,7 @@ fields(connector_config) -> )}, {service_account_json, sc( - service_account_json(), + typerefl:alias("map", ?MODULE:service_account_json()), #{ required => true, validator => fun ?MODULE:service_account_json_validator/1, From 7b59d4685456f74c86835e3974514180392b99dd Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 22:16:22 +0100 Subject: [PATCH 133/155] fix(schema): add namesapce to sso schema --- apps/emqx_dashboard_sso/src/emqx_dashboard_sso_schema.erl | 4 +++- apps/emqx_ldap/src/emqx_ldap.erl | 5 ++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_schema.erl b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_schema.erl index 92f9ba519..aa032a3cc 100644 --- a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_schema.erl +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso_schema.erl @@ -8,7 +8,7 @@ -include_lib("typerefl/include/types.hrl"). %% Hocon --export([fields/1, desc/1]). +-export([namespace/0, fields/1, desc/1]). -export([ common_backend_schema/1, @@ -21,6 +21,8 @@ %%------------------------------------------------------------------------------ %% Hocon Schema %%------------------------------------------------------------------------------ +namespace() -> "sso". + fields(sso) -> lists:map( fun({Type, Module}) -> diff --git a/apps/emqx_ldap/src/emqx_ldap.erl b/apps/emqx_ldap/src/emqx_ldap.erl index a77a8ecf0..bb16efbda 100644 --- a/apps/emqx_ldap/src/emqx_ldap.erl +++ b/apps/emqx_ldap/src/emqx_ldap.erl @@ -37,7 +37,7 @@ %% ecpool connect & reconnect -export([connect/1]). --export([roots/0, fields/1, desc/1]). +-export([namespace/0, roots/0, fields/1, desc/1]). -export([do_get_status/1]). @@ -57,6 +57,9 @@ %%===================================================================== %% Hocon schema + +namespace() -> "ldap". + roots() -> [{config, #{type => hoconsc:ref(?MODULE, config)}}]. From d603de10e6ca26b010b587f46d1b3a9c17c45bae Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 23:13:26 +0100 Subject: [PATCH 134/155] refactor(schema): add namespace/0 to all schema modules --- .../src/emqx_bridge_rabbitmq_connector.erl | 4 +++- apps/emqx_ldap/src/emqx_ldap.erl | 2 +- apps/emqx_license/src/emqx_license_schema.erl | 4 +++- apps/emqx_rule_engine/src/emqx_rule_api_schema.erl | 4 +++- apps/emqx_telemetry/src/emqx_telemetry_schema.erl | 4 ++++ 5 files changed, 14 insertions(+), 4 deletions(-) diff --git a/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq_connector.erl b/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq_connector.erl index ff439b676..2af1c16c8 100644 --- a/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq_connector.erl +++ b/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq_connector.erl @@ -20,7 +20,7 @@ -behaviour(ecpool_worker). %% hocon_schema callbacks --export([roots/0, fields/1]). +-export([namespace/0, roots/0, fields/1]). %% HTTP API callbacks -export([values/1]). @@ -43,6 +43,8 @@ %% Internal callbacks -export([publish_messages/3]). +namespace() -> "rabbitmq". + roots() -> [{config, #{type => hoconsc:ref(?MODULE, config)}}]. diff --git a/apps/emqx_ldap/src/emqx_ldap.erl b/apps/emqx_ldap/src/emqx_ldap.erl index bb16efbda..1c0c7124f 100644 --- a/apps/emqx_ldap/src/emqx_ldap.erl +++ b/apps/emqx_ldap/src/emqx_ldap.erl @@ -59,7 +59,7 @@ %% Hocon schema namespace() -> "ldap". - + roots() -> [{config, #{type => hoconsc:ref(?MODULE, config)}}]. diff --git a/apps/emqx_license/src/emqx_license_schema.erl b/apps/emqx_license/src/emqx_license_schema.erl index 8f2d7f20d..f2b91811e 100644 --- a/apps/emqx_license/src/emqx_license_schema.erl +++ b/apps/emqx_license/src/emqx_license_schema.erl @@ -13,12 +13,14 @@ -behaviour(hocon_schema). --export([roots/0, fields/1, validations/0, desc/1, tags/0]). +-export([namespace/0, roots/0, fields/1, validations/0, desc/1, tags/0]). -export([ default_license/0 ]). +namespace() -> "license". + roots() -> [ {license, diff --git a/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl b/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl index 0424bfb60..e9adbbdf6 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_api_schema.erl @@ -24,7 +24,7 @@ -export([check_params/2]). --export([roots/0, fields/1]). +-export([namespace/0, roots/0, fields/1]). -type tag() :: rule_creation | rule_test | rule_engine. @@ -46,6 +46,8 @@ check_params(Params, Tag) -> %%====================================================================================== %% Hocon Schema Definitions +namespace() -> "rule_engine". + roots() -> [ {"rule_engine", sc(ref("rule_engine"), #{desc => ?DESC("root_rule_engine")})}, diff --git a/apps/emqx_telemetry/src/emqx_telemetry_schema.erl b/apps/emqx_telemetry/src/emqx_telemetry_schema.erl index 1e1f547c5..586b70f72 100644 --- a/apps/emqx_telemetry/src/emqx_telemetry_schema.erl +++ b/apps/emqx_telemetry/src/emqx_telemetry_schema.erl @@ -22,11 +22,15 @@ -behaviour(hocon_schema). -export([ + namespace/0, roots/0, fields/1, desc/1 ]). +%% 'emqxtel' to distinguish open-telemetry +namespace() -> "emqxtel". + roots() -> ["telemetry"]. fields("telemetry") -> From 8a4fba431e39a20307f5bf959fbad1506ab1b01c Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 19:43:56 +0100 Subject: [PATCH 135/155] refactor(emqx_limiter_schema): use typerefl alias --- .../emqx_limiter/src/emqx_limiter_schema.erl | 24 +++++++++++++------ .../src/emqx_retainer_schema.erl | 2 +- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl b/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl index 2dd4aa241..09ab6099c 100644 --- a/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl +++ b/apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl @@ -33,7 +33,8 @@ desc/1, types/0, short_paths/0, - short_paths_fields/0 + short_paths_fields/0, + rate_type/0 ]). -define(KILOBYTE, 1024). @@ -129,9 +130,9 @@ fields(limiter) -> ]; fields(node_opts) -> [ - {rate, ?HOCON(rate(), #{desc => deprecated_desc(rate), default => <<"infinity">>})}, + {rate, ?HOCON(rate_type(), #{desc => deprecated_desc(rate), default => <<"infinity">>})}, {burst, - ?HOCON(burst_rate(), #{ + ?HOCON(burst_rate_type(), #{ desc => deprecated_desc(burst), default => <<"0">> })} @@ -142,7 +143,7 @@ fields(bucket_opts) -> fields_of_bucket(<<"infinity">>); fields(client_opts) -> [ - {rate, ?HOCON(rate(), #{default => <<"infinity">>, desc => deprecated_desc(rate)})}, + {rate, ?HOCON(rate_type(), #{default => <<"infinity">>, desc => deprecated_desc(rate)})}, {initial, ?HOCON(initial(), #{ default => <<"0">>, @@ -164,7 +165,7 @@ fields(client_opts) -> } )}, {burst, - ?HOCON(burst(), #{ + ?HOCON(burst_type(), #{ desc => deprecated_desc(burst), default => <<"0">>, importance => ?IMPORTANCE_HIDDEN, @@ -211,7 +212,7 @@ short_paths_fields() -> short_paths_fields(Importance) -> [ {Name, - ?HOCON(rate(), #{ + ?HOCON(rate_type(), #{ desc => ?DESC(Name), required => false, importance => Importance, @@ -415,7 +416,7 @@ composite_bucket_fields(Types, ClientRef) -> fields_of_bucket(Default) -> [ - {rate, ?HOCON(rate(), #{desc => deprecated_desc(rate), default => Default})}, + {rate, ?HOCON(rate_type(), #{desc => deprecated_desc(rate), default => Default})}, {burst, ?HOCON(burst(), #{ desc => deprecated_desc(burst), @@ -461,3 +462,12 @@ alias_of_type(_) -> deprecated_desc(_Field) -> <<"Deprecated since v5.0.25">>. + +rate_type() -> + typerefl:alias("string", rate()). + +burst_type() -> + typerefl:alias("string", burst()). + +burst_rate_type() -> + typerefl:alias("string", burst_rate()). diff --git a/apps/emqx_retainer/src/emqx_retainer_schema.erl b/apps/emqx_retainer/src/emqx_retainer_schema.erl index 7b1a9675e..983b27601 100644 --- a/apps/emqx_retainer/src/emqx_retainer_schema.erl +++ b/apps/emqx_retainer/src/emqx_retainer_schema.erl @@ -77,7 +77,7 @@ fields("retainer") -> )}, {delivery_rate, ?HOCON( - emqx_limiter_schema:rate(), + emqx_limiter_schema:rate_type(), #{ required => false, desc => ?DESC(delivery_rate), From 86110824eb2ade0a5b5a9a3fa3a89af6b3841941 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Fri, 10 Nov 2023 08:52:52 +0100 Subject: [PATCH 136/155] feat: upgrade hocon to 0.40.0 which supports union type display name --- apps/emqx/rebar.config | 2 +- apps/emqx/src/emqx_schema.erl | 2 +- apps/emqx_auth/src/emqx_authz/emqx_authz_schema.erl | 4 ++-- apps/emqx_auth/test/emqx_authn/emqx_authn_chains_SUITE.erl | 1 - apps/emqx_conf/src/emqx_conf.erl | 2 +- apps/emqx_conf/src/emqx_conf_schema.erl | 2 +- apps/emqx_dashboard/src/emqx_dashboard_swagger.erl | 2 +- apps/emqx_management/src/emqx_mgmt_api_listeners.erl | 2 +- apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl | 2 +- mix.exs | 2 +- rebar.config | 2 +- 11 files changed, 11 insertions(+), 12 deletions(-) diff --git a/apps/emqx/rebar.config b/apps/emqx/rebar.config index 9f67caf5d..71f581267 100644 --- a/apps/emqx/rebar.config +++ b/apps/emqx/rebar.config @@ -30,7 +30,7 @@ {esockd, {git, "https://github.com/emqx/esockd", {tag, "5.9.7"}}}, {ekka, {git, "https://github.com/emqx/ekka", {tag, "0.15.16"}}}, {gen_rpc, {git, "https://github.com/emqx/gen_rpc", {tag, "3.2.1"}}}, - {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.19"}}}, + {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.40.0"}}}, {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}}, {pbkdf2, {git, "https://github.com/emqx/erlang-pbkdf2.git", {tag, "2.0.4"}}}, {recon, {git, "https://github.com/ferd/recon", {tag, "2.5.1"}}}, diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index 3848e77b4..4ed8a3107 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -3316,7 +3316,7 @@ get_tombstone_map_value_type(Schema) -> %% hoconsc:map_value_type(Schema) ?MAP(_Name, Union) = hocon_schema:field_schema(Schema, type), %% TODO: violation of abstraction, fix hoconsc:union_members/1 - ?UNION(Members) = Union, + ?UNION(Members, _) = Union, Tombstone = tombstone(), [Type, Tombstone] = hoconsc:union_members(Members), Type. diff --git a/apps/emqx_auth/src/emqx_authz/emqx_authz_schema.erl b/apps/emqx_auth/src/emqx_authz/emqx_authz_schema.erl index ac2c2503d..426c7a9f6 100644 --- a/apps/emqx_auth/src/emqx_authz/emqx_authz_schema.erl +++ b/apps/emqx_auth/src/emqx_authz/emqx_authz_schema.erl @@ -136,7 +136,7 @@ authz_fields() -> [ {sources, ?HOCON( - ?ARRAY(?UNION(UnionMemberSelector)), + ?ARRAY(hoconsc:union(UnionMemberSelector)), #{ default => [default_authz()], desc => ?DESC(sources), @@ -153,7 +153,7 @@ api_authz_fields() -> [{sources, ?HOCON(?ARRAY(api_source_type()), #{desc => ?DESC(sources)})}]. api_source_type() -> - ?UNION(api_authz_refs()). + hoconsc:union(api_authz_refs()). api_authz_refs() -> lists:concat([api_source_refs(Mod) || Mod <- source_schema_mods()]). diff --git a/apps/emqx_auth/test/emqx_authn/emqx_authn_chains_SUITE.erl b/apps/emqx_auth/test/emqx_authn/emqx_authn_chains_SUITE.erl index 747a1d15a..61a15b139 100644 --- a/apps/emqx_auth/test/emqx_authn/emqx_authn_chains_SUITE.erl +++ b/apps/emqx_auth/test/emqx_authn/emqx_authn_chains_SUITE.erl @@ -16,7 +16,6 @@ -module(emqx_authn_chains_SUITE). --behaviour(hocon_schema). -behaviour(emqx_authn_provider). -compile(export_all). diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl index c4bd0efc9..1fb3bab28 100644 --- a/apps/emqx_conf/src/emqx_conf.erl +++ b/apps/emqx_conf/src/emqx_conf.erl @@ -292,7 +292,7 @@ hocon_schema_to_spec(?MAP(Name, Type), LocalModule) -> }, SubRefs }; -hocon_schema_to_spec(?UNION(Types), LocalModule) -> +hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) -> {OneOf, Refs} = lists:foldl( fun(Type, {Acc, RefsAcc}) -> {Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule), diff --git a/apps/emqx_conf/src/emqx_conf_schema.erl b/apps/emqx_conf/src/emqx_conf_schema.erl index 79877184d..3a2b5d972 100644 --- a/apps/emqx_conf/src/emqx_conf_schema.erl +++ b/apps/emqx_conf/src/emqx_conf_schema.erl @@ -978,7 +978,7 @@ fields("log") -> })}, {"file", sc( - ?UNION([ + hoconsc:union([ ?R_REF("log_file_handler"), ?MAP(handler_name, ?R_REF("log_file_handler")) ]), diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl index 75e93fdd1..4a8072804 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl @@ -776,7 +776,7 @@ hocon_schema_to_spec(?MAP(Name, Type), LocalModule) -> }, SubRefs }; -hocon_schema_to_spec(?UNION(Types), LocalModule) -> +hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) -> {OneOf, Refs} = lists:foldl( fun(Type, {Acc, RefsAcc}) -> {Schema, SubRefs} = hocon_schema_to_spec(Type, LocalModule), diff --git a/apps/emqx_management/src/emqx_mgmt_api_listeners.erl b/apps/emqx_management/src/emqx_mgmt_api_listeners.erl index 8295047b9..1718a14cf 100644 --- a/apps/emqx_management/src/emqx_mgmt_api_listeners.erl +++ b/apps/emqx_management/src/emqx_mgmt_api_listeners.erl @@ -313,7 +313,7 @@ create_listener_schema(Opts) -> ], Example = maps:remove(id, tcp_schema_example()), emqx_dashboard_swagger:schema_with_example( - ?UNION(Schemas), + hoconsc:union(Schemas), Example#{name => <<"demo">>} ). diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl b/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl index d0019a1c5..c6d3c7ff8 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_engine_schema.erl @@ -262,7 +262,7 @@ actions() -> end. qos() -> - ?UNION([emqx_schema:qos(), binary()]). + hoconsc:union([emqx_schema:qos(), binary()]). rule_engine_settings() -> [ diff --git a/mix.exs b/mix.exs index 18e72a6a8..463642c93 100644 --- a/mix.exs +++ b/mix.exs @@ -72,7 +72,7 @@ defmodule EMQXUmbrella.MixProject do # in conflict by emqtt and hocon {:getopt, "1.0.2", override: true}, {:snabbkaffe, github: "kafka4beam/snabbkaffe", tag: "1.0.8", override: true}, - {:hocon, github: "emqx/hocon", tag: "0.39.19", override: true}, + {:hocon, github: "emqx/hocon", tag: "0.40.0", override: true}, {:emqx_http_lib, github: "emqx/emqx_http_lib", tag: "0.5.3", override: true}, {:esasl, github: "emqx/esasl", tag: "0.2.0"}, {:jose, github: "potatosalad/erlang-jose", tag: "1.11.2"}, diff --git a/rebar.config b/rebar.config index c7101abc3..fed48e3aa 100644 --- a/rebar.config +++ b/rebar.config @@ -75,7 +75,7 @@ , {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.3"}}} , {getopt, "1.0.2"} , {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.8"}}} - , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.39.19"}}} + , {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.40.0"}}} , {emqx_http_lib, {git, "https://github.com/emqx/emqx_http_lib.git", {tag, "0.5.3"}}} , {esasl, {git, "https://github.com/emqx/esasl", {tag, "0.2.0"}}} , {jose, {git, "https://github.com/potatosalad/erlang-jose", {tag, "1.11.2"}}} From f1de0aa1761636bffe43a50a27fc46ed1671336f Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Fri, 10 Nov 2023 09:33:15 +0100 Subject: [PATCH 137/155] fix(schema): add namespace to authn schemas --- .../src/emqx_authn/emqx_authn_schema.erl | 6 +++- .../emqx_authn/emqx_authn_schema_tests.erl | 30 ++++++++++++------- .../src/emqx_authn_http_schema.erl | 8 ++--- .../src/emqx_authn_jwt_schema.erl | 9 ++++-- .../src/emqx_authn_ldap_bind_schema.erl | 6 ++-- .../src/emqx_authn_ldap_schema.erl | 9 ++++-- .../src/emqx_authn_scram_mnesia_schema.erl | 3 ++ .../src/emqx_authn_mongodb_schema.erl | 10 +++---- .../src/emqx_authz_mongodb_schema.erl | 6 ++-- .../src/emqx_authn_mysql_schema.erl | 6 ++-- .../test/emqx_authn_postgresql_SUITE.erl | 2 +- .../test/emqx_authn_redis_SUITE.erl | 2 +- .../src/emqx_gcp_device_authn_schema.erl | 9 ++++-- apps/emqx_mongodb/src/emqx_mongodb.erl | 7 ++++- 14 files changed, 71 insertions(+), 42 deletions(-) diff --git a/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl b/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl index 9b9935a1f..371c6f2be 100644 --- a/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl +++ b/apps/emqx_auth/src/emqx_authn/emqx_authn_schema.erl @@ -38,7 +38,8 @@ authenticator_type_without/1, authenticator_type_without/2, mechanism/1, - backend/1 + backend/1, + namespace/0 ]). -export([ @@ -60,6 +61,7 @@ api_write %% config: schema for config validation | config. +-callback namespace() -> string(). -callback refs() -> [schema_ref()]. -callback refs(shema_kind()) -> [schema_ref()]. -callback select_union_member(emqx_config:raw_config()) -> [schema_ref()] | undefined | no_return(). @@ -74,6 +76,8 @@ refs/1 ]). +namespace() -> "authn". + roots() -> []. injected_fields(AuthnSchemaMods) -> diff --git a/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_tests.erl b/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_tests.erl index b0451e110..b4835cdaa 100644 --- a/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_tests.erl +++ b/apps/emqx_auth/test/emqx_authn/emqx_authn_schema_tests.erl @@ -22,6 +22,7 @@ -define(ERR(Reason), {error, Reason}). union_member_selector_mongo_test_() -> + ok = ensure_schema_load(), [ {"unknown", fun() -> ?assertMatch( @@ -31,25 +32,26 @@ union_member_selector_mongo_test_() -> end}, {"single", fun() -> ?assertMatch( - ?ERR(#{matched_type := "mongo_single"}), + ?ERR(#{matched_type := "authn:mongo_single"}), check("{mechanism = password_based, backend = mongodb, mongo_type = single}") ) end}, {"replica-set", fun() -> ?assertMatch( - ?ERR(#{matched_type := "mongo_rs"}), + ?ERR(#{matched_type := "authn:mongo_rs"}), check("{mechanism = password_based, backend = mongodb, mongo_type = rs}") ) end}, {"sharded", fun() -> ?assertMatch( - ?ERR(#{matched_type := "mongo_sharded"}), + ?ERR(#{matched_type := "authn:mongo_sharded"}), check("{mechanism = password_based, backend = mongodb, mongo_type = sharded}") ) end} ]. union_member_selector_jwt_test_() -> + ok = ensure_schema_load(), [ {"unknown", fun() -> ?assertMatch( @@ -59,25 +61,26 @@ union_member_selector_jwt_test_() -> end}, {"jwks", fun() -> ?assertMatch( - ?ERR(#{matched_type := "jwt_jwks"}), + ?ERR(#{matched_type := "authn:jwt_jwks"}), check("{mechanism = jwt, use_jwks = true}") ) end}, {"publick-key", fun() -> ?assertMatch( - ?ERR(#{matched_type := "jwt_public_key"}), + ?ERR(#{matched_type := "authn:jwt_public_key"}), check("{mechanism = jwt, use_jwks = false, public_key = 1}") ) end}, {"hmac-based", fun() -> ?assertMatch( - ?ERR(#{matched_type := "jwt_hmac"}), + ?ERR(#{matched_type := "authn:jwt_hmac"}), check("{mechanism = jwt, use_jwks = false}") ) end} ]. union_member_selector_redis_test_() -> + ok = ensure_schema_load(), [ {"unknown", fun() -> ?assertMatch( @@ -87,25 +90,26 @@ union_member_selector_redis_test_() -> end}, {"single", fun() -> ?assertMatch( - ?ERR(#{matched_type := "redis_single"}), + ?ERR(#{matched_type := "authn:redis_single"}), check("{mechanism = password_based, backend = redis, redis_type = single}") ) end}, {"cluster", fun() -> ?assertMatch( - ?ERR(#{matched_type := "redis_cluster"}), + ?ERR(#{matched_type := "authn:redis_cluster"}), check("{mechanism = password_based, backend = redis, redis_type = cluster}") ) end}, {"sentinel", fun() -> ?assertMatch( - ?ERR(#{matched_type := "redis_sentinel"}), + ?ERR(#{matched_type := "authn:redis_sentinel"}), check("{mechanism = password_based, backend = redis, redis_type = sentinel}") ) end} ]. union_member_selector_http_test_() -> + ok = ensure_schema_load(), [ {"unknown", fun() -> ?assertMatch( @@ -115,13 +119,13 @@ union_member_selector_http_test_() -> end}, {"get", fun() -> ?assertMatch( - ?ERR(#{matched_type := "http_get"}), + ?ERR(#{matched_type := "authn:http_get"}), check("{mechanism = password_based, backend = http, method = get}") ) end}, {"post", fun() -> ?assertMatch( - ?ERR(#{matched_type := "http_post"}), + ?ERR(#{matched_type := "authn:http_post"}), check("{mechanism = password_based, backend = http, method = post}") ) end} @@ -132,3 +136,7 @@ check(HoconConf) -> #{roots => emqx_authn_schema:global_auth_fields()}, ["authentication= ", HoconConf] ). + +ensure_schema_load() -> + _ = emqx_conf_schema:roots(), + ok. diff --git a/apps/emqx_auth_http/src/emqx_authn_http_schema.erl b/apps/emqx_auth_http/src/emqx_authn_http_schema.erl index 19704d152..7b7af727d 100644 --- a/apps/emqx_auth_http/src/emqx_authn_http_schema.erl +++ b/apps/emqx_auth_http/src/emqx_authn_http_schema.erl @@ -16,10 +16,6 @@ -module(emqx_authn_http_schema). --include("emqx_auth_http.hrl"). --include_lib("emqx_auth/include/emqx_authn.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ @@ -31,6 +27,10 @@ namespace/0 ]). +-include("emqx_auth_http.hrl"). +-include_lib("emqx_auth/include/emqx_authn.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + -define(NOT_EMPTY(MSG), emqx_resource_validator:not_empty(MSG)). -define(THROW_VALIDATION_ERROR(ERROR, MESSAGE), throw(#{ diff --git a/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl b/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl index fc7de7cd8..9118d3d1b 100644 --- a/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl +++ b/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl @@ -16,18 +16,21 @@ -module(emqx_authn_jwt_schema). --include("emqx_auth_jwt.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +-include("emqx_auth_jwt.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +namespace() -> "authn". + refs() -> [ ?R_REF(jwt_hmac), diff --git a/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind_schema.erl b/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind_schema.erl index 580673d85..e5e83daa1 100644 --- a/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind_schema.erl +++ b/apps/emqx_auth_ldap/src/emqx_authn_ldap_bind_schema.erl @@ -16,9 +16,6 @@ -module(emqx_authn_ldap_bind_schema). --include("emqx_auth_ldap.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ @@ -29,6 +26,9 @@ namespace/0 ]). +-include("emqx_auth_ldap.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + namespace() -> "authn". refs() -> diff --git a/apps/emqx_auth_ldap/src/emqx_authn_ldap_schema.erl b/apps/emqx_auth_ldap/src/emqx_authn_ldap_schema.erl index c26ca94e8..fe9917fa1 100644 --- a/apps/emqx_auth_ldap/src/emqx_authn_ldap_schema.erl +++ b/apps/emqx_auth_ldap/src/emqx_authn_ldap_schema.erl @@ -16,18 +16,21 @@ -module(emqx_authn_ldap_schema). --include("emqx_auth_ldap.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +-include("emqx_auth_ldap.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +namespace() -> "authn". + refs() -> [?R_REF(ldap)]. diff --git a/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia_schema.erl b/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia_schema.erl index fa22693b3..ef4ec6e05 100644 --- a/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia_schema.erl +++ b/apps/emqx_auth_mnesia/src/emqx_authn_scram_mnesia_schema.erl @@ -22,12 +22,15 @@ -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +namespace() -> "authn". + refs() -> [?R_REF(scram)]. diff --git a/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl b/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl index 53c6a6a10..b72a1e83a 100644 --- a/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl +++ b/apps/emqx_auth_mongodb/src/emqx_authn_mongodb_schema.erl @@ -16,19 +16,19 @@ -module(emqx_authn_mongodb_schema). --include("emqx_auth_mongodb.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, - select_union_member/1, - namespace/0 + select_union_member/1 ]). +-include("emqx_auth_mongodb.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + namespace() -> "authn". refs() -> diff --git a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl index ee20b962e..bdde704f9 100644 --- a/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl +++ b/apps/emqx_auth_mongodb/src/emqx_authz_mongodb_schema.erl @@ -16,9 +16,6 @@ -module(emqx_authz_mongodb_schema). --include("emqx_auth_mongodb.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -export([ type/0, fields/1, @@ -28,6 +25,9 @@ namespace/0 ]). +-include("emqx_auth_mongodb.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + namespace() -> "authz". type() -> ?AUTHZ_TYPE. diff --git a/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl b/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl index 17609acb8..6472794fe 100644 --- a/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl +++ b/apps/emqx_auth_mysql/src/emqx_authn_mysql_schema.erl @@ -16,9 +16,6 @@ -module(emqx_authn_mysql_schema). --include("emqx_auth_mysql.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ @@ -29,6 +26,9 @@ select_union_member/1 ]). +-include("emqx_auth_mysql.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + namespace() -> "authn". refs() -> diff --git a/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl b/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl index ea44c0a45..af1f1db2d 100644 --- a/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl +++ b/apps/emqx_auth_postgresql/test/emqx_authn_postgresql_SUITE.erl @@ -104,7 +104,7 @@ t_update_with_invalid_config(_Config) -> ?assertMatch( {error, #{ kind := validation_error, - matched_type := "postgresql", + matched_type := "authn:postgresql", path := "authentication.1.server", reason := required_field }}, diff --git a/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl b/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl index 081c4e641..e7673b790 100644 --- a/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl +++ b/apps/emqx_auth_redis/test/emqx_authn_redis_SUITE.erl @@ -170,7 +170,7 @@ test_create_invalid_config(InvalidAuthConfig, Path) -> ?assertMatch( {error, #{ kind := validation_error, - matched_type := "redis_single", + matched_type := "authn:redis_single", path := Path }}, emqx:update_config(?PATH, {create_authenticator, ?GLOBAL, InvalidAuthConfig}) diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device_authn_schema.erl b/apps/emqx_gcp_device/src/emqx_gcp_device_authn_schema.erl index a01c6d0e4..975e17ff0 100644 --- a/apps/emqx_gcp_device/src/emqx_gcp_device_authn_schema.erl +++ b/apps/emqx_gcp_device/src/emqx_gcp_device_authn_schema.erl @@ -16,18 +16,21 @@ -module(emqx_gcp_device_authn_schema). --include("emqx_gcp_device.hrl"). --include_lib("hocon/include/hoconsc.hrl"). - -behaviour(emqx_authn_schema). -export([ + namespace/0, fields/1, desc/1, refs/0, select_union_member/1 ]). +-include("emqx_gcp_device.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +namespace() -> "authn". + refs() -> [?R_REF(gcp_device)]. select_union_member(#{<<"mechanism">> := ?AUTHN_MECHANISM_BIN}) -> diff --git a/apps/emqx_mongodb/src/emqx_mongodb.erl b/apps/emqx_mongodb/src/emqx_mongodb.erl index 77161911a..a5795a554 100644 --- a/apps/emqx_mongodb/src/emqx_mongodb.erl +++ b/apps/emqx_mongodb/src/emqx_mongodb.erl @@ -22,6 +22,7 @@ -include_lib("snabbkaffe/include/snabbkaffe.hrl"). -behaviour(emqx_resource). +-behaviour(hocon_schema). %% callbacks of behaviour emqx_resource -export([ @@ -29,7 +30,8 @@ on_start/2, on_stop/2, on_query/3, - on_get_status/2 + on_get_status/2, + namespace/0 ]). %% ecpool callback @@ -50,6 +52,9 @@ }). %%===================================================================== + +namespace() -> "mongo". + roots() -> [ {config, #{ From 855b3c5b294e104195e7e32c471932c9904c3d3d Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Fri, 10 Nov 2023 09:58:02 +0100 Subject: [PATCH 138/155] test: ensure atom exists --- apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl index 7169ea3d2..29299dcc9 100644 --- a/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl +++ b/apps/emqx_bridge_pulsar/test/emqx_bridge_pulsar_tests.erl @@ -11,7 +11,7 @@ %%=========================================================================== pulsar_producer_validations_test() -> - Name = my_producer, + Name = list_to_atom("my_producer"), Conf0 = pulsar_producer_hocon(), Conf1 = Conf0 ++ From 1d77d07774e34d45f89404406299ab6393b1ab3f Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Fri, 10 Nov 2023 10:30:22 +0100 Subject: [PATCH 139/155] build: log red text for error message --- scripts/apps-version-check.sh | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/scripts/apps-version-check.sh b/scripts/apps-version-check.sh index b32b39fd1..b76e8d345 100755 --- a/scripts/apps-version-check.sh +++ b/scripts/apps-version-check.sh @@ -4,6 +4,12 @@ set -euo pipefail # ensure dir cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")/.." +log_red() { + local RED='\033[0;31m' # Red + local NC='\033[0m' # No Color + echo -e "${RED}${1}${NC}" +} + # match any official release tag 'e*' and 'v*' latest_release="$(env PREV_TAG_MATCH_PATTERN='*' ./scripts/find-prev-rel-tag.sh)" echo "Version check compare base: $latest_release" @@ -47,7 +53,7 @@ for app in ${APPS}; do -- "$app_path/priv" \ -- "$app_path/c_src" | wc -l ) " if [ "$changed_lines" -gt 0 ]; then - echo "ERROR: $src_file needs a vsn bump" + log_red "ERROR: $src_file needs a vsn bump" bad_app_count=$(( bad_app_count + 1)) fi else From e653c6b4e08ac17f04e4b2e53de9f09b5459e31c Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Wed, 8 Nov 2023 23:41:06 +0100 Subject: [PATCH 140/155] chore: bump app versions --- apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src | 2 +- apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src | 2 +- apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq.app.src | 2 +- apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src | 2 +- apps/emqx_mongodb/src/emqx_mongodb.app.src | 2 +- apps/emqx_telemetry/src/emqx_telemetry.app.src | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src index 4c5a15b79..a8a938a0b 100644 --- a/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src +++ b/apps/emqx_bridge_greptimedb/src/emqx_bridge_greptimedb.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_greptimedb, [ {description, "EMQX GreptimeDB Bridge"}, - {vsn, "0.1.3"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src index 27fe1659c..c6236d97c 100644 --- a/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src +++ b/apps/emqx_bridge_influxdb/src/emqx_bridge_influxdb.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_influxdb, [ {description, "EMQX Enterprise InfluxDB Bridge"}, - {vsn, "0.1.5"}, + {vsn, "0.1.6"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq.app.src b/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq.app.src index c7d931c93..7e32b5a89 100644 --- a/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq.app.src +++ b/apps/emqx_bridge_rabbitmq/src/emqx_bridge_rabbitmq.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_rabbitmq, [ {description, "EMQX Enterprise RabbitMQ Bridge"}, - {vsn, "0.1.5"}, + {vsn, "0.1.6"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src index e00a3cbfa..71788947b 100644 --- a/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src +++ b/apps/emqx_dashboard_sso/src/emqx_dashboard_sso.app.src @@ -1,6 +1,6 @@ {application, emqx_dashboard_sso, [ {description, "EMQX Dashboard Single Sign-On"}, - {vsn, "0.1.1"}, + {vsn, "0.1.2"}, {registered, [emqx_dashboard_sso_sup]}, {applications, [ kernel, diff --git a/apps/emqx_mongodb/src/emqx_mongodb.app.src b/apps/emqx_mongodb/src/emqx_mongodb.app.src index eb846a7ab..2212ac7d4 100644 --- a/apps/emqx_mongodb/src/emqx_mongodb.app.src +++ b/apps/emqx_mongodb/src/emqx_mongodb.app.src @@ -1,6 +1,6 @@ {application, emqx_mongodb, [ {description, "EMQX MongoDB Connector"}, - {vsn, "0.1.2"}, + {vsn, "0.1.3"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_telemetry/src/emqx_telemetry.app.src b/apps/emqx_telemetry/src/emqx_telemetry.app.src index d9483298f..32c2baa91 100644 --- a/apps/emqx_telemetry/src/emqx_telemetry.app.src +++ b/apps/emqx_telemetry/src/emqx_telemetry.app.src @@ -1,6 +1,6 @@ {application, emqx_telemetry, [ {description, "Report telemetry data for EMQX Opensource edition"}, - {vsn, "0.1.2"}, + {vsn, "0.1.3"}, {registered, [emqx_telemetry_sup, emqx_telemetry]}, {mod, {emqx_telemetry_app, []}}, {applications, [ From 101990b8fcf61781caa384e5c24da5f73bfb98b3 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Fri, 10 Nov 2023 11:43:04 +0100 Subject: [PATCH 141/155] test: fix schema tests after types change --- .../src/emqx_dashboard_swagger.erl | 28 ++-- .../test/emqx_swagger_response_SUITE.erl | 125 +++++++++--------- 2 files changed, 83 insertions(+), 70 deletions(-) diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl index 4a8072804..4a316e248 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl @@ -345,15 +345,7 @@ parse_spec_ref(Module, Path, Options) -> erlang:apply(Module, schema, [Path]) catch Error:Reason:Stacktrace -> - %% This error is intended to fail the build - %% hence print to standard_error - io:format( - standard_error, - "Failed to generate swagger for path ~p in module ~p~n" - "error:~p~nreason:~p~n~p~n", - [Module, Path, Error, Reason, Stacktrace] - ), - error({failed_to_generate_swagger_spec, Module, Path}) + failed_to_generate_swagger_spec(Module, Path, Error, Reason, Stacktrace) end, OperationId = maps:get('operationId', Schema), {Specs, Refs} = maps:fold( @@ -369,6 +361,24 @@ parse_spec_ref(Module, Path, Options) -> RouteOpts = generate_route_opts(Schema, Options), {OperationId, Specs, Refs, RouteOpts}. +-ifdef(TEST). +-spec failed_to_generate_swagger_spec(_, _, _, _, _) -> no_return(). +failed_to_generate_swagger_spec(Module, Path, _Error, _Reason, _Stacktrace) -> + error({failed_to_generate_swagger_spec, Module, Path}). +-else. +-spec failed_to_generate_swagger_spec(_, _, _, _, _) -> no_return(). +failed_to_generate_swagger_spec(Module, Path, Error, Reason, Stacktrace) -> + %% This error is intended to fail the build + %% hence print to standard_error + io:format( + standard_error, + "Failed to generate swagger for path ~p in module ~p~n" + "error:~p~nreason:~p~n~p~n", + [Module, Path, Error, Reason, Stacktrace] + ), + error({failed_to_generate_swagger_spec, Module, Path}). + +-endif. generate_route_opts(Schema, Options) -> #{filter => compose_filters(filter(Options), custom_filter(Schema))}. diff --git a/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl b/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl index d84f17c44..376ace5c2 100644 --- a/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl @@ -317,68 +317,72 @@ t_sub_fields(_Config) -> validate(Path, Object, ExpectRefs), ok. -t_complicated_type(_Config) -> +t_complex_type(_Config) -> Path = "/ref/complex_type", - Object = #{ - <<"content">> => #{ - <<"application/json">> => - #{ - <<"schema">> => #{ - <<"properties">> => - [ - {<<"no_neg_integer">>, #{minimum => 0, type => integer}}, - {<<"url">>, #{example => <<"http://127.0.0.1">>, type => string}}, - {<<"server">>, #{example => <<"127.0.0.1:80">>, type => string}}, - {<<"connect_timeout">>, #{ - example => infinity, - <<"oneOf">> => [ - #{example => infinity, type => string}, - #{type => integer} - ] - }}, - {<<"pool_type">>, #{enum => [random, hash], type => string}}, - {<<"timeout">>, #{ - example => infinity, - <<"oneOf">> => [ - #{example => infinity, type => string}, #{type => integer} - ] - }}, - {<<"bytesize">>, #{example => <<"32MB">>, type => string}}, - {<<"wordsize">>, #{example => <<"1024KB">>, type => string}}, - {<<"maps">>, #{example => #{}, type => object}}, - {<<"comma_separated_list">>, #{ - example => <<"item1,item2">>, type => string - }}, - {<<"comma_separated_atoms">>, #{ - example => <<"item1,item2">>, type => string - }}, - {<<"log_level">>, #{ - enum => [ - debug, - info, - notice, - warning, - error, - critical, - alert, - emergency, - all - ], - type => string - }}, - {<<"fix_integer">>, #{ - default => 100, enum => [100], type => integer - }} - ], - <<"type">> => object - } - } - } - }, {OperationId, Spec, Refs, #{}} = emqx_dashboard_swagger:parse_spec_ref(?MODULE, Path, #{}), ?assertEqual(test, OperationId), Response = maps:get(responses, maps:get(post, Spec)), - ?assertEqual(Object, maps:get(<<"200">>, Response)), + ResponseBody = maps:get(<<"200">>, Response), + Content = maps:get(<<"content">>, ResponseBody), + JsonContent = maps:get(<<"application/json">>, Content), + Schema = maps:get(<<"schema">>, JsonContent), + ?assertMatch(#{<<"type">> := object}, Schema), + Properties = maps:get(<<"properties">>, Schema), + ?assertMatch( + [ + {<<"no_neg_integer">>, #{minimum := 0, type := integer}}, + {<<"url">>, #{ + example := <<"http://127.0.0.1">>, type := string + }}, + {<<"server">>, #{ + example := <<"127.0.0.1:80">>, type := string + }}, + {<<"connect_timeout">>, #{ + example := _, type := string + }}, + {<<"pool_type">>, #{ + enum := [random, hash], type := string + }}, + {<<"timeout">>, #{ + example := infinity, + <<"oneOf">> := [ + #{example := infinity, type := string}, + #{type := integer} + ] + }}, + {<<"bytesize">>, #{ + example := <<"32MB">>, type := string + }}, + {<<"wordsize">>, #{ + example := <<"1024KB">>, type := string + }}, + {<<"maps">>, #{example := #{}, type := object}}, + {<<"comma_separated_list">>, #{ + example := <<"item1,item2">>, type := string + }}, + {<<"comma_separated_atoms">>, #{ + example := <<"item1,item2">>, type := string + }}, + {<<"log_level">>, #{ + enum := [ + debug, + info, + notice, + warning, + error, + critical, + alert, + emergency, + all + ], + type := string + }}, + {<<"fix_integer">>, #{ + default := 100, enum := [100], type := integer + }} + ], + Properties + ), ?assertEqual([], Refs), ok. @@ -647,9 +651,8 @@ schema("/ref/complex_type") -> {no_neg_integer, hoconsc:mk(non_neg_integer(), #{})}, {url, hoconsc:mk(url(), #{})}, {server, hoconsc:mk(emqx_schema:ip_port(), #{})}, - {connect_timeout, - hoconsc:mk(emqx_bridge_http_connector:connect_timeout(), #{})}, - {pool_type, hoconsc:mk(emqx_bridge_http_connector:pool_type(), #{})}, + {connect_timeout, hoconsc:mk(emqx_schema:timeout_duration(), #{})}, + {pool_type, hoconsc:mk(hoconsc:enum([random, hash]), #{})}, {timeout, hoconsc:mk(timeout(), #{})}, {bytesize, hoconsc:mk(emqx_schema:bytesize(), #{})}, {wordsize, hoconsc:mk(emqx_schema:wordsize(), #{})}, From 088de9476cd267f2d611ed33cbe1dfb628d8c96d Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Fri, 10 Nov 2023 10:01:41 -0300 Subject: [PATCH 142/155] fix(ds): use integer for tag values Follow up to https://github.com/emqx/emqx/pull/11906#discussion_r1389115973 --- .../src/emqx_ds_replication_layer.erl | 16 +++++++------- .../src/emqx_ds_storage_bitfield_lts.erl | 22 +++++++++---------- .../src/emqx_ds_storage_layer.erl | 16 +++++++------- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl index 54a946436..a06af104d 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_replication_layer.erl @@ -50,8 +50,8 @@ %% records over the wire. %% tags: --define(stream, stream). --define(it, it). +-define(STREAM, 1). +-define(IT, 2). %% keys: -define(tag, 1). @@ -68,14 +68,14 @@ %% account. -opaque stream() :: #{ - ?tag := ?stream, + ?tag := ?STREAM, ?shard := emqx_ds_replication_layer:shard_id(), ?enc := emqx_ds_storage_layer:stream() }. -opaque iterator() :: #{ - ?tag := ?it, + ?tag := ?IT, ?shard := emqx_ds_replication_layer:shard_id(), ?enc := emqx_ds_storage_layer:iterator() }. @@ -133,7 +133,7 @@ get_streams(DB, TopicFilter, StartTime) -> RankX = Shard, Rank = {RankX, RankY}, {Rank, #{ - ?tag => ?stream, + ?tag => ?STREAM, ?shard => Shard, ?enc => Stream }} @@ -147,18 +147,18 @@ get_streams(DB, TopicFilter, StartTime) -> -spec make_iterator(emqx_ds:db(), stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). make_iterator(DB, Stream, TopicFilter, StartTime) -> - #{?tag := ?stream, ?shard := Shard, ?enc := StorageStream} = Stream, + #{?tag := ?STREAM, ?shard := Shard, ?enc := StorageStream} = Stream, Node = node_of_shard(DB, Shard), case emqx_ds_proto_v1:make_iterator(Node, DB, Shard, StorageStream, TopicFilter, StartTime) of {ok, Iter} -> - {ok, #{?tag => ?it, ?shard => Shard, ?enc => Iter}}; + {ok, #{?tag => ?IT, ?shard => Shard, ?enc => Iter}}; Err = {error, _} -> Err end. -spec next(emqx_ds:db(), iterator(), pos_integer()) -> emqx_ds:next_result(iterator()). next(DB, Iter0, BatchSize) -> - #{?tag := ?it, ?shard := Shard, ?enc := StorageIter0} = Iter0, + #{?tag := ?IT, ?shard := Shard, ?enc := StorageIter0} = Iter0, Node = node_of_shard(DB, Shard), %% TODO: iterator can contain information that is useful for %% reconstructing messages sent over the network. For example, diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl index 50b6af5b6..2d4949919 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_bitfield_lts.erl @@ -42,8 +42,8 @@ %% records over the wire. %% tags: --define(stream, stream). --define(it, it). +-define(STREAM, 1). +-define(IT, 2). %% keys: -define(tag, 1). @@ -81,13 +81,13 @@ -type stream() :: #{ - ?tag := ?stream, + ?tag := ?STREAM, ?storage_key := emqx_ds_lts:msg_storage_key() }. -type iterator() :: #{ - ?tag := ?it, + ?tag := ?IT, ?topic_filter := emqx_ds:topic_filter(), ?start_time := emqx_ds:time(), ?storage_key := emqx_ds_lts:msg_storage_key(), @@ -194,7 +194,7 @@ store_batch(_ShardId, S = #s{db = DB, data = Data}, Messages, _Options) -> ) -> [stream()]. get_streams(_Shard, #s{trie = Trie}, TopicFilter, _StartTime) -> Indexes = emqx_ds_lts:match_topics(Trie, TopicFilter), - [#{?tag => ?stream, ?storage_key => I} || I <- Indexes]. + [#{?tag => ?STREAM, ?storage_key => I} || I <- Indexes]. -spec make_iterator( emqx_ds_storage_layer:shard_id(), @@ -204,13 +204,13 @@ get_streams(_Shard, #s{trie = Trie}, TopicFilter, _StartTime) -> emqx_ds:time() ) -> {ok, iterator()}. make_iterator( - _Shard, _Data, #{?tag := ?stream, ?storage_key := StorageKey}, TopicFilter, StartTime + _Shard, _Data, #{?tag := ?STREAM, ?storage_key := StorageKey}, TopicFilter, StartTime ) -> %% Note: it's a good idea to keep the iterator structure lean, %% since it can be stored on a remote node that could update its %% code independently from us. {ok, #{ - ?tag => ?it, + ?tag => ?IT, ?topic_filter => TopicFilter, ?start_time => StartTime, ?storage_key => StorageKey, @@ -225,7 +225,7 @@ next(_Shard, Schema = #s{ts_offset = TSOffset}, It, BatchSize) -> SafeCutoffTime = (Now bsr TSOffset) bsl TSOffset, next_until(Schema, It, SafeCutoffTime, BatchSize). -next_until(_Schema, It = #{?tag := ?it, ?start_time := StartTime}, SafeCutoffTime, _BatchSize) when +next_until(_Schema, It = #{?tag := ?IT, ?start_time := StartTime}, SafeCutoffTime, _BatchSize) when StartTime >= SafeCutoffTime -> %% We're in the middle of the current epoch, so we can't yet iterate over it. @@ -235,7 +235,7 @@ next_until(_Schema, It = #{?tag := ?it, ?start_time := StartTime}, SafeCutoffTim {ok, It, []}; next_until(#s{db = DB, data = CF, keymappers = Keymappers}, It, SafeCutoffTime, BatchSize) -> #{ - ?tag := ?it, + ?tag := ?IT, ?start_time := StartTime, ?storage_key := {TopicIndex, Varying} } = It, @@ -286,7 +286,7 @@ next_loop(_ITHandle, _KeyMapper, _Filter, _Cutoff, It, Acc, 0) -> {ok, It, lists:reverse(Acc)}; next_loop(ITHandle, KeyMapper, Filter, Cutoff, It0, Acc0, N0) -> inc_counter(), - #{?tag := ?it, ?last_seen_key := Key0} = It0, + #{?tag := ?IT, ?last_seen_key := Key0} = It0, case emqx_ds_bitmask_keymapper:bin_increment(Filter, Key0) of overflow -> {ok, It0, lists:reverse(Acc0)}; @@ -346,7 +346,7 @@ check_message( overflow; check_message( _Cutoff, - #{?tag := ?it, ?start_time := StartTime, ?topic_filter := TopicFilter}, + #{?tag := ?IT, ?start_time := StartTime, ?topic_filter := TopicFilter}, #message{timestamp = Timestamp, topic = Topic} ) when Timestamp >= StartTime -> emqx_topic:match(emqx_topic:words(Topic), TopicFilter); diff --git a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl index 8c2e55510..0fe719dbc 100644 --- a/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl +++ b/apps/emqx_durable_storage/src/emqx_ds_storage_layer.erl @@ -38,8 +38,8 @@ %% records over the wire. %% tags: --define(stream, stream). --define(it, it). +-define(STREAM, 1). +-define(IT, 2). %% keys: -define(tag, 1). @@ -59,7 +59,7 @@ %% Note: this might be stored permanently on a remote node. -opaque stream() :: #{ - ?tag := ?stream, + ?tag := ?STREAM, ?generation := gen_id(), ?enc := term() }. @@ -67,7 +67,7 @@ %% Note: this might be stored permanently on a remote node. -opaque iterator() :: #{ - ?tag := ?it, + ?tag := ?IT, ?generation := gen_id(), ?enc := term() }. @@ -165,7 +165,7 @@ get_streams(Shard, TopicFilter, StartTime) -> Streams = Mod:get_streams(Shard, GenData, TopicFilter, StartTime), [ {GenId, #{ - ?tag => ?stream, + ?tag => ?STREAM, ?generation => GenId, ?enc => Stream }} @@ -178,13 +178,13 @@ get_streams(Shard, TopicFilter, StartTime) -> -spec make_iterator(shard_id(), stream(), emqx_ds:topic_filter(), emqx_ds:time()) -> emqx_ds:make_iterator_result(iterator()). make_iterator( - Shard, #{?tag := ?stream, ?generation := GenId, ?enc := Stream}, TopicFilter, StartTime + Shard, #{?tag := ?STREAM, ?generation := GenId, ?enc := Stream}, TopicFilter, StartTime ) -> #{module := Mod, data := GenData} = generation_get(Shard, GenId), case Mod:make_iterator(Shard, GenData, Stream, TopicFilter, StartTime) of {ok, Iter} -> {ok, #{ - ?tag => ?it, + ?tag => ?IT, ?generation => GenId, ?enc => Iter }}; @@ -194,7 +194,7 @@ make_iterator( -spec next(shard_id(), iterator(), pos_integer()) -> emqx_ds:next_result(iterator()). -next(Shard, Iter = #{?tag := ?it, ?generation := GenId, ?enc := GenIter0}, BatchSize) -> +next(Shard, Iter = #{?tag := ?IT, ?generation := GenId, ?enc := GenIter0}, BatchSize) -> #{module := Mod, data := GenData} = generation_get(Shard, GenId), Current = generation_current(Shard), case Mod:next(Shard, GenData, GenIter0, BatchSize) of From ff5916e33fc5b15ade26e71299da21745cb2cae2 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Fri, 10 Nov 2023 22:19:37 +0100 Subject: [PATCH 143/155] docs: fix bad html tag --- apps/emqx_dashboard/src/emqx_dashboard_swagger.erl | 11 ----------- rel/i18n/emqx_bridge_api.hocon | 2 +- rel/i18n/emqx_bridge_cassandra.hocon | 2 +- rel/i18n/emqx_bridge_clickhouse.hocon | 2 +- rel/i18n/emqx_bridge_dynamo.hocon | 2 +- rel/i18n/emqx_bridge_gcp_pubsub.hocon | 2 +- rel/i18n/emqx_bridge_greptimedb.hocon | 6 +++--- rel/i18n/emqx_bridge_greptimedb_connector.hocon | 4 ++-- rel/i18n/emqx_bridge_hstreamdb.hocon | 2 +- rel/i18n/emqx_bridge_influxdb.hocon | 6 +++--- rel/i18n/emqx_bridge_influxdb_connector.hocon | 4 ++-- rel/i18n/emqx_bridge_kinesis.hocon | 2 +- rel/i18n/emqx_bridge_mysql.hocon | 2 +- rel/i18n/emqx_bridge_oracle.hocon | 2 +- rel/i18n/emqx_bridge_pgsql.hocon | 2 +- rel/i18n/emqx_bridge_redis.hocon | 2 +- rel/i18n/emqx_bridge_rocketmq.hocon | 2 +- rel/i18n/emqx_bridge_sqlserver.hocon | 2 +- rel/i18n/emqx_bridge_tdengine.hocon | 2 +- rel/i18n/emqx_schema.hocon | 8 ++++---- 20 files changed, 28 insertions(+), 39 deletions(-) diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl index 4a316e248..091e85da0 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl @@ -851,17 +851,8 @@ typename_to_spec("file()", _Mod) -> #{type => string, example => <<"/path/to/file">>}; typename_to_spec("ip_port()", _Mod) -> #{type => string, example => <<"127.0.0.1:80">>}; -typename_to_spec("write_syntax()", _Mod) -> - #{ - type => string, - example => - <<"${topic},clientid=${clientid}", " ", "payload=${payload},", - "${clientid}_int_value=${payload.int_key}i,", "bool=${payload.bool}">> - }; typename_to_spec("url()", _Mod) -> #{type => string, example => <<"http://127.0.0.1">>}; -typename_to_spec("connect_timeout()", Mod) -> - typename_to_spec("timeout()", Mod); typename_to_spec("timeout()", _Mod) -> #{ <<"oneOf">> => [ @@ -916,8 +907,6 @@ typename_to_spec("json_binary()", _Mod) -> #{type => string, example => <<"{\"a\": [1,true]}">>}; typename_to_spec("port_number()", _Mod) -> range("1..65535"); -typename_to_spec("secret_access_key()", _Mod) -> - #{type => string, example => <<"TW8dPwmjpjJJuLW....">>}; typename_to_spec(Name, Mod) -> try_convert_to_spec(Name, Mod, [ fun try_remote_module_type/2, diff --git a/rel/i18n/emqx_bridge_api.hocon b/rel/i18n/emqx_bridge_api.hocon index 8b7950cdc..3567f03cc 100644 --- a/rel/i18n/emqx_bridge_api.hocon +++ b/rel/i18n/emqx_bridge_api.hocon @@ -49,7 +49,7 @@ desc_api8.label: """Node Bridge Operate""" desc_api9.desc: -"""Test creating a new bridge by given ID
+"""Test creating a new bridge by given ID
The ID must be of format '{type}:{name}'""" desc_api9.label: diff --git a/rel/i18n/emqx_bridge_cassandra.hocon b/rel/i18n/emqx_bridge_cassandra.hocon index d598d3921..a96315340 100644 --- a/rel/i18n/emqx_bridge_cassandra.hocon +++ b/rel/i18n/emqx_bridge_cassandra.hocon @@ -32,7 +32,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to Cassandra. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_clickhouse.hocon b/rel/i18n/emqx_bridge_clickhouse.hocon index 726d1eb7c..7d1961f98 100644 --- a/rel/i18n/emqx_bridge_clickhouse.hocon +++ b/rel/i18n/emqx_bridge_clickhouse.hocon @@ -32,7 +32,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to Clickhouse. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_dynamo.hocon b/rel/i18n/emqx_bridge_dynamo.hocon index 417b43c0c..a014aae9f 100644 --- a/rel/i18n/emqx_bridge_dynamo.hocon +++ b/rel/i18n/emqx_bridge_dynamo.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to DynamoDB. All MQTT `PUBLISH` messages with the topic -matching the `local_topic` will be forwarded.
+matching the `local_topic` will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also `local_topic` is configured, then both the data got from the rule and the MQTT messages that match `local_topic` will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_gcp_pubsub.hocon b/rel/i18n/emqx_bridge_gcp_pubsub.hocon index b5dffec1f..68a6f8578 100644 --- a/rel/i18n/emqx_bridge_gcp_pubsub.hocon +++ b/rel/i18n/emqx_bridge_gcp_pubsub.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to GCP PubSub. All MQTT 'PUBLISH' messages with the topic -matching `local_topic` will be forwarded.
+matching `local_topic` will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_greptimedb.hocon b/rel/i18n/emqx_bridge_greptimedb.hocon index 93d783332..977e6e064 100644 --- a/rel/i18n/emqx_bridge_greptimedb.hocon +++ b/rel/i18n/emqx_bridge_greptimedb.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to the GreptimeDB. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" @@ -37,8 +37,8 @@ local_topic.label: write_syntax.desc: """Conf of GreptimeDB gRPC protocol to write data points. Write syntax is a text-based format that provides the measurement, tag set, field set, and timestamp of a data point, and placeholder supported, which is the same as InfluxDB line protocol. See also [InfluxDB 2.3 Line Protocol](https://docs.influxdata.com/influxdb/v2.3/reference/syntax/line-protocol/) and -[GreptimeDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/)
-TLDR:
+[GreptimeDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/)
+TLDR:
``` [,=[,=]] =[,=] [] ``` diff --git a/rel/i18n/emqx_bridge_greptimedb_connector.hocon b/rel/i18n/emqx_bridge_greptimedb_connector.hocon index 9cb10951f..0a509ebfc 100644 --- a/rel/i18n/emqx_bridge_greptimedb_connector.hocon +++ b/rel/i18n/emqx_bridge_greptimedb_connector.hocon @@ -31,8 +31,8 @@ protocol.label: """Protocol""" server.desc: -"""The IPv4 or IPv6 address or the hostname to connect to.
-A host entry has the following form: `Host[:Port]`.
+"""The IPv4 or IPv6 address or the hostname to connect to.
+A host entry has the following form: `Host[:Port]`.
The GreptimeDB default port 8086 is used if `[:Port]` is not specified.""" server.label: diff --git a/rel/i18n/emqx_bridge_hstreamdb.hocon b/rel/i18n/emqx_bridge_hstreamdb.hocon index 809c60588..de9989953 100644 --- a/rel/i18n/emqx_bridge_hstreamdb.hocon +++ b/rel/i18n/emqx_bridge_hstreamdb.hocon @@ -32,7 +32,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to the HStreamDB. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_influxdb.hocon b/rel/i18n/emqx_bridge_influxdb.hocon index 4299f41ab..48454bbd3 100644 --- a/rel/i18n/emqx_bridge_influxdb.hocon +++ b/rel/i18n/emqx_bridge_influxdb.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to the InfluxDB. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" @@ -37,8 +37,8 @@ local_topic.label: write_syntax.desc: """Conf of InfluxDB line protocol to write data points. It is a text-based format that provides the measurement, tag set, field set, and timestamp of a data point, and placeholder supported. See also [InfluxDB 2.3 Line Protocol](https://docs.influxdata.com/influxdb/v2.3/reference/syntax/line-protocol/) and -[InfluxDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/)
-TLDR:
+[InfluxDB 1.8 Line Protocol](https://docs.influxdata.com/influxdb/v1.8/write_protocols/line_protocol_tutorial/)
+TLDR:
``` [,=[,=]] =[,=] [] ``` diff --git a/rel/i18n/emqx_bridge_influxdb_connector.hocon b/rel/i18n/emqx_bridge_influxdb_connector.hocon index 4169ce065..ce79c2a93 100644 --- a/rel/i18n/emqx_bridge_influxdb_connector.hocon +++ b/rel/i18n/emqx_bridge_influxdb_connector.hocon @@ -49,8 +49,8 @@ protocol.label: """Protocol""" server.desc: -"""The IPv4 or IPv6 address or the hostname to connect to.
-A host entry has the following form: `Host[:Port]`.
+"""The IPv4 or IPv6 address or the hostname to connect to.
+A host entry has the following form: `Host[:Port]`.
The InfluxDB default port 8086 is used if `[:Port]` is not specified.""" server.label: diff --git a/rel/i18n/emqx_bridge_kinesis.hocon b/rel/i18n/emqx_bridge_kinesis.hocon index 42329bcd6..188ab82f3 100644 --- a/rel/i18n/emqx_bridge_kinesis.hocon +++ b/rel/i18n/emqx_bridge_kinesis.hocon @@ -32,7 +32,7 @@ pool_size.label: local_topic.desc: """The MQTT topic filter to be forwarded to Amazon Kinesis. All MQTT `PUBLISH` messages with the topic -matching the `local_topic` will be forwarded.
+matching the `local_topic` will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also `local_topic` is configured, then both the data got from the rule and the MQTT messages that match `local_topic` will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_mysql.hocon b/rel/i18n/emqx_bridge_mysql.hocon index 10a02589c..37326be81 100644 --- a/rel/i18n/emqx_bridge_mysql.hocon +++ b/rel/i18n/emqx_bridge_mysql.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to MySQL. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_oracle.hocon b/rel/i18n/emqx_bridge_oracle.hocon index c0c8142e6..bcf41ea2c 100644 --- a/rel/i18n/emqx_bridge_oracle.hocon +++ b/rel/i18n/emqx_bridge_oracle.hocon @@ -2,7 +2,7 @@ emqx_bridge_oracle { local_topic { desc = "The MQTT topic filter to be forwarded to Oracle Database. All MQTT 'PUBLISH' messages with the topic" - " matching the local_topic will be forwarded.
" + " matching the local_topic will be forwarded.
" "NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is" " configured, then both the data got from the rule and the MQTT messages that match local_topic" " will be forwarded." diff --git a/rel/i18n/emqx_bridge_pgsql.hocon b/rel/i18n/emqx_bridge_pgsql.hocon index 5295abb35..0a5ca2b04 100644 --- a/rel/i18n/emqx_bridge_pgsql.hocon +++ b/rel/i18n/emqx_bridge_pgsql.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to PostgreSQL. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_redis.hocon b/rel/i18n/emqx_bridge_redis.hocon index 8e8c18de0..05c8d95a6 100644 --- a/rel/i18n/emqx_bridge_redis.hocon +++ b/rel/i18n/emqx_bridge_redis.hocon @@ -34,7 +34,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to Redis. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_rocketmq.hocon b/rel/i18n/emqx_bridge_rocketmq.hocon index ac5deb757..a2449c1a9 100644 --- a/rel/i18n/emqx_bridge_rocketmq.hocon +++ b/rel/i18n/emqx_bridge_rocketmq.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to RocketMQ. All MQTT `PUBLISH` messages with the topic -matching the `local_topic` will be forwarded.
+matching the `local_topic` will be forwarded.
NOTE: if the bridge is used as a rule action, `local_topic` should be left empty otherwise the messages will be duplicated.""" local_topic.label: diff --git a/rel/i18n/emqx_bridge_sqlserver.hocon b/rel/i18n/emqx_bridge_sqlserver.hocon index 0e0801f42..24e4615f3 100644 --- a/rel/i18n/emqx_bridge_sqlserver.hocon +++ b/rel/i18n/emqx_bridge_sqlserver.hocon @@ -32,7 +32,7 @@ driver.label: local_topic.desc: """The MQTT topic filter to be forwarded to Microsoft SQL Server. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_bridge_tdengine.hocon b/rel/i18n/emqx_bridge_tdengine.hocon index 2d1059d28..ec6c10779 100644 --- a/rel/i18n/emqx_bridge_tdengine.hocon +++ b/rel/i18n/emqx_bridge_tdengine.hocon @@ -26,7 +26,7 @@ desc_type.label: local_topic.desc: """The MQTT topic filter to be forwarded to TDengine. All MQTT 'PUBLISH' messages with the topic -matching the local_topic will be forwarded.
+matching the local_topic will be forwarded.
NOTE: if this bridge is used as the action of a rule (EMQX rule engine), and also local_topic is configured, then both the data got from the rule and the MQTT messages that match local_topic will be forwarded.""" diff --git a/rel/i18n/emqx_schema.hocon b/rel/i18n/emqx_schema.hocon index e1d086197..3eb816f3b 100644 --- a/rel/i18n/emqx_schema.hocon +++ b/rel/i18n/emqx_schema.hocon @@ -92,7 +92,7 @@ mqtt_max_topic_alias.label: """Max Topic Alias""" common_ssl_opts_schema_user_lookup_fun.desc: -"""EMQX-internal callback that is used to lookup pre-shared key (PSK) identity.
+"""EMQX-internal callback that is used to lookup pre-shared key (PSK) identity.
Has no effect when TLS version is configured (or negotiated) to 1.3""" common_ssl_opts_schema_user_lookup_fun.label: @@ -1207,7 +1207,7 @@ The SSL application already takes measures to counter-act such attempts, but client-initiated renegotiation can be strictly disabled by setting this option to false. The default value is true. Note that disabling renegotiation can result in long-lived connections becoming unusable due to limits on -the number of messages the underlying cipher suite can encipher.
+the number of messages the underlying cipher suite can encipher.
Has no effect when TLS version is configured (or negotiated) to 1.3""" server_ssl_opts_schema_client_renegotiation.label: @@ -1294,7 +1294,7 @@ common_ssl_opts_schema_secure_renegotiate.desc: """SSL parameter renegotiation is a feature that allows a client and a server to renegotiate the parameters of the SSL connection on the fly. RFC 5746 defines a more secure way of doing this. By enabling secure renegotiation, -you drop support for the insecure renegotiation, prone to MitM attacks.
+you drop support for the insecure renegotiation, prone to MitM attacks.
Has no effect when TLS version is configured (or negotiated) to 1.3""" common_ssl_opts_schema_secure_renegotiate.label: @@ -1330,7 +1330,7 @@ mqtt_max_packet_size.label: """Max Packet Size""" common_ssl_opts_schema_reuse_sessions.desc: -"""Enable TLS session reuse.
+"""Enable TLS session reuse.
Has no effect when TLS version is configured (or negotiated) to 1.3""" common_ssl_opts_schema_reuse_sessions.label: From 8be718b22fed172bcab990126e858712e2892937 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Sun, 12 Nov 2023 11:27:28 +0100 Subject: [PATCH 144/155] refactor(emqx_schema): delete unused type bar_separated_list --- apps/emqx/src/emqx_schema.erl | 7 ------- 1 file changed, 7 deletions(-) diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index 4ed8a3107..4f4c7e481 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -51,7 +51,6 @@ -type comma_separated_list() :: list(). -type comma_separated_binary() :: [binary()]. -type comma_separated_atoms() :: [atom()]. --type bar_separated_list() :: list(). -type ip_port() :: tuple() | integer(). -type cipher() :: map(). -type port_number() :: 1..65535. @@ -75,7 +74,6 @@ -typerefl_from_string({percent/0, emqx_schema, to_percent}). -typerefl_from_string({comma_separated_list/0, emqx_schema, to_comma_separated_list}). -typerefl_from_string({comma_separated_binary/0, emqx_schema, to_comma_separated_binary}). --typerefl_from_string({bar_separated_list/0, emqx_schema, to_bar_separated_list}). -typerefl_from_string({ip_port/0, emqx_schema, to_ip_port}). -typerefl_from_string({cipher/0, emqx_schema, to_erl_cipher_suite}). -typerefl_from_string({comma_separated_atoms/0, emqx_schema, to_comma_separated_atoms}). @@ -118,7 +116,6 @@ to_percent/1, to_comma_separated_list/1, to_comma_separated_binary/1, - to_bar_separated_list/1, to_ip_port/1, to_erl_cipher_suite/1, to_comma_separated_atoms/1, @@ -157,7 +154,6 @@ file/0, comma_separated_list/0, comma_separated_binary/0, - bar_separated_list/0, ip_port/0, cipher/0, comma_separated_atoms/0, @@ -2564,9 +2560,6 @@ to_json_binary(Str) -> Error end. -to_bar_separated_list(Str) -> - {ok, string:tokens(Str, "| ")}. - %% @doc support the following format: %% - 127.0.0.1:1883 %% - ::1:1883 From 06e440260f6251cef526a7f09f5630900e3ad190 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Fri, 10 Nov 2023 21:55:20 +0100 Subject: [PATCH 145/155] refactor(schema): add args to map alias --- apps/emqx_auth_http/src/emqx_authz_http_schema.erl | 4 ++-- apps/emqx_dashboard/src/emqx_dashboard_swagger.erl | 3 ++- apps/emqx_prometheus/src/emqx_prometheus_schema.erl | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/apps/emqx_auth_http/src/emqx_authz_http_schema.erl b/apps/emqx_auth_http/src/emqx_authz_http_schema.erl index 21f70de64..90a7439a2 100644 --- a/apps/emqx_auth_http/src/emqx_authz_http_schema.erl +++ b/apps/emqx_auth_http/src/emqx_authz_http_schema.erl @@ -108,7 +108,7 @@ http_common_fields() -> ). headers(type) -> - typerefl:alias("map", list({binary(), binary()})); + typerefl:alias("map", list({binary(), binary()}), #{}, [binary(), binary()]); headers(desc) -> ?DESC(?FUNCTION_NAME); headers(converter) -> @@ -121,7 +121,7 @@ headers(_) -> undefined. headers_no_content_type(type) -> - typerefl:alias("map", list({binary(), binary()})); + typerefl:alias("map", list({binary(), binary()}), #{}, [binary(), binary()]); headers_no_content_type(desc) -> ?DESC(?FUNCTION_NAME); headers_no_content_type(converter) -> diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl index 091e85da0..36dfb92be 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl @@ -865,7 +865,8 @@ typename_to_spec("bytesize()", _Mod) -> #{type => string, example => <<"32MB">>}; typename_to_spec("wordsize()", _Mod) -> #{type => string, example => <<"1024KB">>}; -typename_to_spec("map()", _Mod) -> +typename_to_spec("map(" ++ Map, _Mod) -> + [$) | _MapArgs] = lists:reverse(Map), #{type => object, example => #{}}; typename_to_spec("service_account_json()", _Mod) -> #{type => object, example => #{}}; diff --git a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl index 91d176142..3aaf4292f 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_schema.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_schema.erl @@ -57,7 +57,7 @@ fields("prometheus") -> )}, {headers, ?HOCON( - typerefl:alias("map", list({string(), string()})), + typerefl:alias("map", list({string(), string()}), #{}, [string(), string()]), #{ default => #{}, required => false, From 021f7e6b49e31b5e77b0c55d792e0671ea31550b Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Sun, 12 Nov 2023 11:32:36 +0100 Subject: [PATCH 146/155] refactor(schema): comma_separated_list is list(string()) not list(any()) --- apps/emqx/src/emqx_schema.erl | 2 +- apps/emqx_gateway/src/emqx_gateway_schema.erl | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index 4f4c7e481..1a617c6c8 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -48,7 +48,7 @@ -type wordsize() :: bytesize(). -type percent() :: float(). -type file() :: string(). --type comma_separated_list() :: list(). +-type comma_separated_list() :: list(string()). -type comma_separated_binary() :: [binary()]. -type comma_separated_atoms() :: [atom()]. -type ip_port() :: tuple() | integer(). diff --git a/apps/emqx_gateway/src/emqx_gateway_schema.erl b/apps/emqx_gateway/src/emqx_gateway_schema.erl index e58e552e2..ed149d1f5 100644 --- a/apps/emqx_gateway/src/emqx_gateway_schema.erl +++ b/apps/emqx_gateway/src/emqx_gateway_schema.erl @@ -32,19 +32,16 @@ -type duration() :: non_neg_integer(). -type duration_s() :: non_neg_integer(). -type bytesize() :: pos_integer(). --type comma_separated_list() :: list(). -typerefl_from_string({ip_port/0, emqx_schema, to_ip_port}). -typerefl_from_string({duration/0, emqx_schema, to_duration}). -typerefl_from_string({duration_s/0, emqx_schema, to_duration_s}). -typerefl_from_string({bytesize/0, emqx_schema, to_bytesize}). --typerefl_from_string({comma_separated_list/0, emqx_schema, to_comma_separated_list}). -reflect_type([ duration/0, duration_s/0, bytesize/0, - comma_separated_list/0, ip_port/0 ]). -elvis([{elvis_style, dont_repeat_yourself, disable}]). From a32cd20758573f46f52044f3bfdbedc112d0058e Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Sun, 12 Nov 2023 11:35:08 +0100 Subject: [PATCH 147/155] refactor(emqx_authn_jwt_schema): add alias for verify_claims verifiy_claims is a list after converted, but input is a map --- apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl b/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl index 9118d3d1b..63da372ff 100644 --- a/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl +++ b/apps/emqx_auth_jwt/src/emqx_authn_jwt_schema.erl @@ -152,7 +152,8 @@ refresh_interval(validator) -> [fun(I) -> I > 0 end]; refresh_interval(_) -> undefined. verify_claims(type) -> - list(); + %% user input is a map, converted to a list of {binary(), binary()} + typerefl:alias("map", list()); verify_claims(desc) -> ?DESC(?FUNCTION_NAME); verify_claims(default) -> From b7d2c38213a86fa7800d528a8371f55ede515c0a Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Fri, 10 Nov 2023 22:06:11 +0100 Subject: [PATCH 148/155] refactor: delete stale type converters Some of the types are refactored to use typerefl alias or hoconsc:enum, no need to keep the special function clauses to translate them to swagger spec --- apps/emqx_conf/src/emqx_conf.erl | 24 ++----------------- .../src/emqx_dashboard_swagger.erl | 22 ----------------- 2 files changed, 2 insertions(+), 44 deletions(-) diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl index 1fb3bab28..733cb437f 100644 --- a/apps/emqx_conf/src/emqx_conf.erl +++ b/apps/emqx_conf/src/emqx_conf.erl @@ -355,34 +355,14 @@ typename_to_spec("comma_separated_list()", _Mod) -> #{type => comma_separated_string}; typename_to_spec("comma_separated_atoms()", _Mod) -> #{type => comma_separated_string}; -typename_to_spec("pool_type()", _Mod) -> - #{type => enum, symbols => [random, hash]}; -typename_to_spec("log_level()", _Mod) -> - #{ - type => enum, - symbols => [ - debug, - info, - notice, - warning, - error, - critical, - alert, - emergency, - all - ] - }; -typename_to_spec("rate()", _Mod) -> - #{type => string}; typename_to_spec("capacity()", _Mod) -> #{type => string}; -typename_to_spec("burst_rate()", _Mod) -> - #{type => string}; typename_to_spec("failure_strategy()", _Mod) -> #{type => enum, symbols => [force, drop, throw]}; typename_to_spec("initial()", _Mod) -> #{type => string}; -typename_to_spec("map()", _Mod) -> +typename_to_spec("map(" ++ Map, _Mod) -> + [$) | _MapArgs] = lists:reverse(Map), #{type => object}; typename_to_spec("#{" ++ _, Mod) -> typename_to_spec("map()", Mod); diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl index 36dfb92be..9a8b553d1 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl @@ -799,8 +799,6 @@ hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) -> hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) -> {#{type => string, enum => [Atom]}, []}. -typename_to_spec("term()", _Mod) -> - #{type => string, example => <<"any">>}; typename_to_spec("boolean()", _Mod) -> #{type => boolean}; typename_to_spec("binary()", _Mod) -> @@ -884,26 +882,6 @@ typename_to_spec("comma_separated_binary()", _Mod) -> #{type => string, example => <<"item1,item2">>}; typename_to_spec("comma_separated_atoms()", _Mod) -> #{type => string, example => <<"item1,item2">>}; -typename_to_spec("pool_type()", _Mod) -> - #{type => string, enum => [random, hash]}; -typename_to_spec("log_level()", _Mod) -> - #{ - type => string, - enum => [debug, info, notice, warning, error, critical, alert, emergency, all] - }; -typename_to_spec("rate()", _Mod) -> - #{type => string, example => <<"10MB">>}; -typename_to_spec("burst()", _Mod) -> - #{type => string, example => <<"100MB">>}; -typename_to_spec("burst_rate()", _Mod) -> - %% 0/0s = no burst - #{type => string, example => <<"10MB">>}; -typename_to_spec("failure_strategy()", _Mod) -> - #{type => string, example => <<"force">>}; -typename_to_spec("initial()", _Mod) -> - #{type => string, example => <<"0MB">>}; -typename_to_spec("bucket_name()", _Mod) -> - #{type => string, example => <<"retainer">>}; typename_to_spec("json_binary()", _Mod) -> #{type => string, example => <<"{\"a\": [1,true]}">>}; typename_to_spec("port_number()", _Mod) -> From 2fda91ec59fd12bbb5924a35d680d7a5a0e6e97f Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Fri, 10 Nov 2023 22:06:11 +0100 Subject: [PATCH 149/155] refactor: delete default type converter for dashboard schema json prior to this commit, dashboard scheam json type conversion has a 'fallback' type for all unknown types, which is 'string' this commit removes this fallback and add all possible types to the function clauses. also, some of the old types which are no longer in use are deleted from both swagger spec converter and dashboard sechema converter --- apps/emqx_conf/src/emqx_conf.erl | 24 +++++++------------ .../src/emqx_dashboard_swagger.erl | 18 -------------- 2 files changed, 9 insertions(+), 33 deletions(-) diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl index 733cb437f..6f50cf831 100644 --- a/apps/emqx_conf/src/emqx_conf.erl +++ b/apps/emqx_conf/src/emqx_conf.erl @@ -305,10 +305,6 @@ hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) -> hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) -> {#{type => enum, symbols => [Atom]}, []}. -typename_to_spec("user_id_type()", _Mod) -> - #{type => enum, symbols => [clientid, username]}; -typename_to_spec("term()", _Mod) -> - #{type => string}; typename_to_spec("boolean()", _Mod) -> #{type => boolean}; typename_to_spec("binary()", _Mod) -> @@ -317,6 +313,8 @@ typename_to_spec("float()", _Mod) -> #{type => number}; typename_to_spec("integer()", _Mod) -> #{type => number}; +typename_to_spec("pos_integer()", _Mod) -> + #{type => integer}; typename_to_spec("non_neg_integer()", _Mod) -> #{type => number, minimum => 0}; typename_to_spec("number()", _Mod) -> @@ -355,26 +353,22 @@ typename_to_spec("comma_separated_list()", _Mod) -> #{type => comma_separated_string}; typename_to_spec("comma_separated_atoms()", _Mod) -> #{type => comma_separated_string}; -typename_to_spec("capacity()", _Mod) -> - #{type => string}; -typename_to_spec("failure_strategy()", _Mod) -> - #{type => enum, symbols => [force, drop, throw]}; -typename_to_spec("initial()", _Mod) -> - #{type => string}; typename_to_spec("map(" ++ Map, _Mod) -> [$) | _MapArgs] = lists:reverse(Map), #{type => object}; -typename_to_spec("#{" ++ _, Mod) -> - typename_to_spec("map()", Mod); +typename_to_spec("port_number()", _Mod) -> + #{type => integer}; typename_to_spec(Name, Mod) -> Spec = range(Name), Spec1 = remote_module_type(Spec, Name, Mod), Spec2 = typerefl_array(Spec1, Name, Mod), Spec3 = integer(Spec2, Name), - default_type(Spec3). + default_type(Mod, Name, Spec3). -default_type(nomatch) -> #{type => string}; -default_type(Type) -> Type. +default_type(Mod, Name, nomatch) -> + error({unknown_type, Mod, Name}); +default_type(_Mod, _Name, Type) -> + Type. range(Name) -> case string:split(Name, "..") of diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl index 9a8b553d1..ef10d33af 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl @@ -845,20 +845,10 @@ typename_to_spec("timeout_duration_ms()", _Mod) -> #{type => string, example => <<"32s">>}; typename_to_spec("percent()", _Mod) -> #{type => number, example => <<"12%">>}; -typename_to_spec("file()", _Mod) -> - #{type => string, example => <<"/path/to/file">>}; typename_to_spec("ip_port()", _Mod) -> #{type => string, example => <<"127.0.0.1:80">>}; typename_to_spec("url()", _Mod) -> #{type => string, example => <<"http://127.0.0.1">>}; -typename_to_spec("timeout()", _Mod) -> - #{ - <<"oneOf">> => [ - #{type => string, example => infinity}, - #{type => integer} - ], - example => infinity - }; typename_to_spec("bytesize()", _Mod) -> #{type => string, example => <<"32MB">>}; typename_to_spec("wordsize()", _Mod) -> @@ -866,16 +856,8 @@ typename_to_spec("wordsize()", _Mod) -> typename_to_spec("map(" ++ Map, _Mod) -> [$) | _MapArgs] = lists:reverse(Map), #{type => object, example => #{}}; -typename_to_spec("service_account_json()", _Mod) -> - #{type => object, example => #{}}; -typename_to_spec("#{" ++ _, Mod) -> - typename_to_spec("map()", Mod); typename_to_spec("qos()", _Mod) -> #{type => integer, minimum => 0, maximum => 2, example => 0}; -typename_to_spec("{binary(), binary()}", _Mod) -> - #{type => object, example => #{}}; -typename_to_spec("{string(), string()}", _Mod) -> - #{type => object, example => #{}}; typename_to_spec("comma_separated_list()", _Mod) -> #{type => string, example => <<"item1,item2">>}; typename_to_spec("comma_separated_binary()", _Mod) -> From 65efa2672e97516069da276e85f604aef2ff8e18 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Mon, 13 Nov 2023 08:59:22 +0100 Subject: [PATCH 150/155] test(schema): delete stale reference to emqx_schema:file() type --- apps/emqx/src/emqx_schema.erl | 2 -- apps/emqx_conf/src/emqx_conf.erl | 2 -- .../test/emqx_swagger_requestBody_SUITE.erl | 2 +- .../test/emqx_swagger_response_SUITE.erl | 10 +++++----- 4 files changed, 6 insertions(+), 10 deletions(-) diff --git a/apps/emqx/src/emqx_schema.erl b/apps/emqx/src/emqx_schema.erl index 1a617c6c8..3ad03c4d4 100644 --- a/apps/emqx/src/emqx_schema.erl +++ b/apps/emqx/src/emqx_schema.erl @@ -47,7 +47,6 @@ -type bytesize() :: integer(). -type wordsize() :: bytesize(). -type percent() :: float(). --type file() :: string(). -type comma_separated_list() :: list(string()). -type comma_separated_binary() :: [binary()]. -type comma_separated_atoms() :: [atom()]. @@ -151,7 +150,6 @@ bytesize/0, wordsize/0, percent/0, - file/0, comma_separated_list/0, comma_separated_binary/0, ip_port/0, diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl index 6f50cf831..8b51c2161 100644 --- a/apps/emqx_conf/src/emqx_conf.erl +++ b/apps/emqx_conf/src/emqx_conf.erl @@ -337,8 +337,6 @@ typename_to_spec("timeout_duration_ms()", _Mod) -> #{type => duration}; typename_to_spec("percent()", _Mod) -> #{type => percent}; -typename_to_spec("file()", _Mod) -> - #{type => string}; typename_to_spec("ip_port()", _Mod) -> #{type => ip_port}; typename_to_spec("url()", _Mod) -> diff --git a/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl b/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl index 2457cd56a..b5c55622b 100644 --- a/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_swagger_requestBody_SUITE.erl @@ -816,7 +816,7 @@ to_schema(Body) -> fields(good_ref) -> [ {'webhook-host', mk(emqx_schema:ip_port(), #{default => <<"127.0.0.1:80">>})}, - {log_dir, mk(emqx_schema:file(), #{example => "var/log/emqx"})}, + {log_dir, mk(string(), #{example => "var/log/emqx"})}, {tag, mk(binary(), #{desc => <<"tag">>})} ]; fields(nest_ref) -> diff --git a/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl b/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl index 376ace5c2..745db76f0 100644 --- a/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl @@ -344,10 +344,9 @@ t_complex_type(_Config) -> enum := [random, hash], type := string }}, {<<"timeout">>, #{ - example := infinity, <<"oneOf">> := [ - #{example := infinity, type := string}, - #{type := integer} + #{example := _, type := string}, + #{enum := [infinity], type := string} ] }}, {<<"bytesize">>, #{ @@ -653,7 +652,8 @@ schema("/ref/complex_type") -> {server, hoconsc:mk(emqx_schema:ip_port(), #{})}, {connect_timeout, hoconsc:mk(emqx_schema:timeout_duration(), #{})}, {pool_type, hoconsc:mk(hoconsc:enum([random, hash]), #{})}, - {timeout, hoconsc:mk(timeout(), #{})}, + {timeout, + hoconsc:mk(hoconsc:union([infinity, emqx_schema:timeout_duration()]), #{})}, {bytesize, hoconsc:mk(emqx_schema:bytesize(), #{})}, {wordsize, hoconsc:mk(emqx_schema:wordsize(), #{})}, {maps, hoconsc:mk(map(), #{})}, @@ -687,7 +687,7 @@ to_schema(Object) -> fields(good_ref) -> [ {'webhook-host', mk(emqx_schema:ip_port(), #{default => <<"127.0.0.1:80">>})}, - {log_dir, mk(emqx_schema:file(), #{example => "var/log/emqx"})}, + {log_dir, mk(string(), #{example => "var/log/emqx"})}, {tag, mk(binary(), #{desc => <<"tag">>})} ]; fields(nest_ref) -> From 45dad2ed3afd23070887d3041a5760e67e2f464f Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Fri, 10 Nov 2023 17:06:16 -0300 Subject: [PATCH 151/155] feat(ds): implement session discard Fixes https://emqx.atlassian.net/browse/EMQX-9739 Fixes some issues to ensure the session is discarded when the client connects with `clean_start = true`, and added some cleanup to subscriptions/routes/iterators/streams. > There is an API that session garbage collector can use to perform cleaning We already have `emqx_session:destroy/1`, which could serve as an API for a periodic session GC to use. --- .../emqx_persistent_session_ds_SUITE.erl | 97 ++++++++++++ apps/emqx/src/emqx_cm.erl | 6 +- .../emqx_persistent_message_ds_replayer.erl | 14 +- apps/emqx/src/emqx_persistent_session_ds.erl | 138 ++++++++++++++++-- apps/emqx/src/emqx_persistent_session_ds.hrl | 3 +- apps/emqx/src/emqx_session.erl | 10 +- apps/emqx/src/emqx_session_mem.erl | 2 + .../test/emqx_persistent_session_SUITE.erl | 1 + 8 files changed, 251 insertions(+), 20 deletions(-) diff --git a/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl b/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl index ee5d203e4..f22a4f97e 100644 --- a/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl +++ b/apps/emqx/integration_test/emqx_persistent_session_ds_SUITE.erl @@ -11,6 +11,8 @@ -include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl"). +-include_lib("emqx/src/emqx_persistent_session_ds.hrl"). + -define(DEFAULT_KEYSPACE, default). -define(DS_SHARD_ID, <<"local">>). -define(DS_SHARD, {?DEFAULT_KEYSPACE, ?DS_SHARD_ID}). @@ -118,6 +120,7 @@ start_client(Opts0 = #{}) -> properties => #{'Session-Expiry-Interval' => 300} }, Opts = maps:to_list(emqx_utils_maps:deep_merge(Defaults, Opts0)), + ct:pal("starting client with opts:\n ~p", [Opts]), {ok, Client} = emqtt:start_link(Opts), on_exit(fun() -> catch emqtt:stop(Client) end), Client. @@ -148,6 +151,9 @@ restart_node(Node, NodeSpec) -> ?tp(restarted_node, #{}), ok. +is_persistent_connect_opts(#{properties := #{'Session-Expiry-Interval' := EI}}) -> + EI > 0. + %%------------------------------------------------------------------------------ %% Testcases %%------------------------------------------------------------------------------ @@ -309,3 +315,94 @@ t_session_unsubscription_idempotency(Config) -> end ), ok. + +t_session_discard_persistent_to_non_persistent(_Config) -> + ClientId = atom_to_binary(?FUNCTION_NAME), + Params = #{ + client_id => ClientId, + reconnect_opts => + #{ + clean_start => true, + %% we set it to zero so that a new session is not created. + properties => #{'Session-Expiry-Interval' => 0}, + proto_ver => v5 + } + }, + do_t_session_discard(Params). + +t_session_discard_persistent_to_persistent(_Config) -> + ClientId = atom_to_binary(?FUNCTION_NAME), + Params = #{ + client_id => ClientId, + reconnect_opts => + #{ + clean_start => true, + properties => #{'Session-Expiry-Interval' => 30}, + proto_ver => v5 + } + }, + do_t_session_discard(Params). + +do_t_session_discard(Params) -> + #{ + client_id := ClientId, + reconnect_opts := ReconnectOpts0 + } = Params, + ReconnectOpts = ReconnectOpts0#{clientid => ClientId}, + SubTopicFilter = <<"t/+">>, + ?check_trace( + begin + ?tp(notice, "starting", #{}), + Client0 = start_client(#{ + clientid => ClientId, + clean_start => false, + properties => #{'Session-Expiry-Interval' => 30}, + proto_ver => v5 + }), + {ok, _} = emqtt:connect(Client0), + ?tp(notice, "subscribing", #{}), + {ok, _, [?RC_GRANTED_QOS_2]} = emqtt:subscribe(Client0, SubTopicFilter, qos2), + %% Store some matching messages so that streams and iterators are created. + ok = emqtt:publish(Client0, <<"t/1">>, <<"1">>), + ok = emqtt:publish(Client0, <<"t/2">>, <<"2">>), + ?retry( + _Sleep0 = 100, + _Attempts0 = 50, + true = map_size(emqx_persistent_session_ds:list_all_streams()) > 0 + ), + ?retry( + _Sleep0 = 100, + _Attempts0 = 50, + true = map_size(emqx_persistent_session_ds:list_all_iterators()) > 0 + ), + ok = emqtt:stop(Client0), + ?tp(notice, "disconnected", #{}), + + ?tp(notice, "reconnecting", #{}), + %% we still have iterators and streams + ?assert(map_size(emqx_persistent_session_ds:list_all_streams()) > 0), + ?assert(map_size(emqx_persistent_session_ds:list_all_iterators()) > 0), + Client1 = start_client(ReconnectOpts), + {ok, _} = emqtt:connect(Client1), + ?assertEqual([], emqtt:subscriptions(Client1)), + case is_persistent_connect_opts(ReconnectOpts) of + true -> + ?assertMatch(#{ClientId := _}, emqx_persistent_session_ds:list_all_sessions()); + false -> + ?assertEqual(#{}, emqx_persistent_session_ds:list_all_sessions()) + end, + ?assertEqual(#{}, emqx_persistent_session_ds:list_all_subscriptions()), + ?assertEqual([], emqx_persistent_session_ds_router:topics()), + ?assertEqual(#{}, emqx_persistent_session_ds:list_all_streams()), + ?assertEqual(#{}, emqx_persistent_session_ds:list_all_iterators()), + ok = emqtt:stop(Client1), + ?tp(notice, "disconnected", #{}), + + ok + end, + fun(Trace) -> + ct:pal("trace:\n ~p", [Trace]), + ok + end + ), + ok. diff --git a/apps/emqx/src/emqx_cm.erl b/apps/emqx/src/emqx_cm.erl index 1e4940965..537c60876 100644 --- a/apps/emqx/src/emqx_cm.erl +++ b/apps/emqx/src/emqx_cm.erl @@ -258,21 +258,21 @@ set_chan_stats(ClientId, ChanPid, Stats) -> end. %% @doc Open a session. --spec open_session(boolean(), emqx_types:clientinfo(), emqx_types:conninfo()) -> +-spec open_session(_CleanStart :: boolean(), emqx_types:clientinfo(), emqx_types:conninfo()) -> {ok, #{ session := emqx_session:t(), present := boolean(), replay => _ReplayContext }} | {error, Reason :: term()}. -open_session(true, ClientInfo = #{clientid := ClientId}, ConnInfo) -> +open_session(_CleanStart = true, ClientInfo = #{clientid := ClientId}, ConnInfo) -> Self = self(), emqx_cm_locker:trans(ClientId, fun(_) -> ok = discard_session(ClientId), ok = emqx_session:destroy(ClientInfo, ConnInfo), create_register_session(ClientInfo, ConnInfo, Self) end); -open_session(false, ClientInfo = #{clientid := ClientId}, ConnInfo) -> +open_session(_CleanStart = false, ClientInfo = #{clientid := ClientId}, ConnInfo) -> Self = self(), emqx_cm_locker:trans(ClientId, fun(_) -> case emqx_session:open(ClientInfo, ConnInfo) of diff --git a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl index 156aa943e..98bb069b0 100644 --- a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl +++ b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl @@ -188,8 +188,12 @@ fetch(SessionId, Inflight0, [Stream | Streams], N, Publishes0) -> end. -spec update_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream(), emqx_ds:iterator()) -> ok. -update_iterator(SessionId, Stream, Iterator) -> - mria:dirty_write(?SESSION_ITER_TAB, #ds_iter{id = {SessionId, Stream}, iter = Iterator}). +update_iterator(DSSessionId, Stream, Iterator) -> + %% Workaround: we convert `Stream' to a binary before attempting to store it in + %% mnesia(rocksdb) because of a bug in `mnesia_rocksdb' when trying to do + %% `mnesia:dirty_all_keys' later. + StreamBin = term_to_binary(Stream), + mria:dirty_write(?SESSION_ITER_TAB, #ds_iter{id = {DSSessionId, StreamBin}, iter = Iterator}). get_last_iterator(SessionId, Stream, Ranges) -> case lists:keyfind(Stream, #range.stream, lists:reverse(Ranges)) of @@ -200,8 +204,10 @@ get_last_iterator(SessionId, Stream, Ranges) -> end. -spec get_iterator(emqx_persistent_session_ds:id(), emqx_ds:stream()) -> emqx_ds:iterator(). -get_iterator(SessionId, Stream) -> - Id = {SessionId, Stream}, +get_iterator(DSSessionId, Stream) -> + %% See comment in `update_iterator'. + StreamBin = term_to_binary(Stream), + Id = {DSSessionId, StreamBin}, [#ds_iter{iter = It}] = mnesia:dirty_read(?SESSION_ITER_TAB, Id), It. diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index 52c98c7d4..bc60a1277 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -16,6 +16,8 @@ -module(emqx_persistent_session_ds). +-behaviour(emqx_session). + -include("emqx.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("stdlib/include/ms_transform.hrl"). @@ -69,7 +71,13 @@ ]). -ifdef(TEST). --export([session_open/1]). +-export([ + session_open/1, + list_all_sessions/0, + list_all_subscriptions/0, + list_all_streams/0, + list_all_iterators/0 +]). -endif. %% Currently, this is the clientid. We avoid `emqx_types:clientid()' because that can be @@ -537,14 +545,24 @@ session_create(SessionId, Props) -> -spec session_drop(id()) -> ok. session_drop(DSSessionId) -> transaction(fun() -> - %% TODO: ensure all iterators from this clientid are closed? ok = session_drop_subscriptions(DSSessionId), + ok = session_drop_iterators(DSSessionId), + ok = session_drop_streams(DSSessionId), ok = mnesia:delete(?SESSION_TAB, DSSessionId, write) end). +-spec session_drop_subscriptions(id()) -> ok. session_drop_subscriptions(DSSessionId) -> - IteratorRefs = session_read_subscriptions(DSSessionId), - ok = lists:foreach(fun session_del_subscription/1, IteratorRefs). + Subscriptions = session_read_subscriptions(DSSessionId), + lists:foreach( + fun(#ds_sub{id = DSSubId} = DSSub) -> + TopicFilter = subscription_id_to_topic_filter(DSSubId), + TopicFilterBin = emqx_topic:join(TopicFilter), + ok = emqx_persistent_session_ds_router:do_delete_route(TopicFilterBin, DSSessionId), + ok = session_del_subscription(DSSub) + end, + Subscriptions + ). %% @doc Called when a client subscribes to a topic. Idempotent. -spec session_add_subscription(id(), topic_filter(), _Props :: map()) -> @@ -615,6 +633,10 @@ new_subscription_id(DSSessionId, TopicFilter) -> DSSubId = {DSSessionId, TopicFilter}, {DSSubId, NowMS}. +-spec subscription_id_to_topic_filter(subscription_id()) -> topic_filter(). +subscription_id_to_topic_filter({_DSSessionId, TopicFilter}) -> + TopicFilter. + %%-------------------------------------------------------------------- %% RPC targets (v1) %%-------------------------------------------------------------------- @@ -639,24 +661,26 @@ do_ensure_all_iterators_closed(_DSSessionID) -> %% Reading batches %%-------------------------------------------------------------------- -renew_streams(Id) -> - Subscriptions = ro_transaction(fun() -> session_read_subscriptions(Id) end), - ExistingStreams = ro_transaction(fun() -> mnesia:read(?SESSION_STREAM_TAB, Id) end), +-spec renew_streams(id()) -> ok. +renew_streams(DSSessionId) -> + Subscriptions = ro_transaction(fun() -> session_read_subscriptions(DSSessionId) end), + ExistingStreams = ro_transaction(fun() -> mnesia:read(?SESSION_STREAM_TAB, DSSessionId) end), lists:foreach( fun(#ds_sub{id = {_, TopicFilter}, start_time = StartTime}) -> - renew_streams(Id, ExistingStreams, TopicFilter, StartTime) + renew_streams(DSSessionId, ExistingStreams, TopicFilter, StartTime) end, Subscriptions ). -renew_streams(Id, ExistingStreams, TopicFilter, StartTime) -> +-spec renew_streams(id(), [ds_stream()], emqx_ds:topic_filter(), emqx_ds:time()) -> ok. +renew_streams(DSSessionId, ExistingStreams, TopicFilter, StartTime) -> AllStreams = emqx_ds:get_streams(?PERSISTENT_MESSAGE_DB, TopicFilter, StartTime), transaction( fun() -> lists:foreach( fun({Rank, Stream}) -> Rec = #ds_stream{ - session = Id, + session = DSSessionId, topic_filter = TopicFilter, stream = Stream, rank = Rank @@ -669,7 +693,12 @@ renew_streams(Id, ExistingStreams, TopicFilter, StartTime) -> {ok, Iterator} = emqx_ds:make_iterator( ?PERSISTENT_MESSAGE_DB, Stream, TopicFilter, StartTime ), - IterRec = #ds_iter{id = {Id, Stream}, iter = Iterator}, + %% Workaround: we convert `Stream' to a binary before + %% attempting to store it in mnesia(rocksdb) because of a bug + %% in `mnesia_rocksdb' when trying to do + %% `mnesia:dirty_all_keys' later. + StreamBin = term_to_binary(Stream), + IterRec = #ds_iter{id = {DSSessionId, StreamBin}, iter = Iterator}, mnesia:write(?SESSION_ITER_TAB, IterRec, write) end end, @@ -678,6 +707,33 @@ renew_streams(Id, ExistingStreams, TopicFilter, StartTime) -> end ). +%% must be called inside a transaction +-spec session_drop_streams(id()) -> ok. +session_drop_streams(DSSessionId) -> + MS = ets:fun2ms( + fun(#ds_stream{session = DSSessionId0}) when DSSessionId0 =:= DSSessionId -> + DSSessionId0 + end + ), + StreamIDs = mnesia:select(?SESSION_STREAM_TAB, MS, write), + lists:foreach(fun(Key) -> mnesia:delete(?SESSION_STREAM_TAB, Key, write) end, StreamIDs). + +%% must be called inside a transaction +-spec session_drop_iterators(id()) -> ok. +session_drop_iterators(DSSessionId) -> + MS = ets:fun2ms( + fun(#ds_iter{id = {DSSessionId0, StreamBin}}) when DSSessionId0 =:= DSSessionId -> + StreamBin + end + ), + StreamBins = mnesia:select(?SESSION_ITER_TAB, MS, write), + lists:foreach( + fun(StreamBin) -> + mnesia:delete(?SESSION_ITER_TAB, {DSSessionId, StreamBin}, write) + end, + StreamBins + ). + %%-------------------------------------------------------------------------------- transaction(Fun) -> @@ -724,3 +780,63 @@ ensure_timer(Type) -> ensure_timer(Type, Timeout) -> _ = emqx_utils:start_timer(Timeout, {emqx_session, Type}), ok. + +-ifdef(TEST). +list_all_sessions() -> + DSSessionIds = mnesia:dirty_all_keys(?SESSION_TAB), + Sessions = lists:map( + fun(SessionID) -> + {ok, Session, Subscriptions} = session_open(SessionID), + {SessionID, #{session => Session, subscriptions => Subscriptions}} + end, + DSSessionIds + ), + maps:from_list(Sessions). + +list_all_subscriptions() -> + DSSubIds = mnesia:dirty_all_keys(?SESSION_SUBSCRIPTIONS_TAB), + Subscriptions = lists:map( + fun(DSSubId) -> + [DSSub] = mnesia:dirty_read(?SESSION_SUBSCRIPTIONS_TAB, DSSubId), + {DSSubId, export_subscription(DSSub)} + end, + DSSubIds + ), + maps:from_list(Subscriptions). + +list_all_streams() -> + DSStreamIds = mnesia:dirty_all_keys(?SESSION_STREAM_TAB), + DSStreams = lists:map( + fun(DSStreamId) -> + Records = mnesia:dirty_read(?SESSION_STREAM_TAB, DSStreamId), + ExtDSStreams = + lists:map( + fun(Record) -> + export_record( + Record, + #ds_stream.session, + [session, topic_filter, stream, rank], + #{} + ) + end, + Records + ), + {DSStreamId, ExtDSStreams} + end, + DSStreamIds + ), + maps:from_list(DSStreams). + +list_all_iterators() -> + DSIterIds = mnesia:dirty_all_keys(?SESSION_ITER_TAB), + DSIters = lists:map( + fun(DSIterId) -> + [Record] = mnesia:dirty_read(?SESSION_ITER_TAB, DSIterId), + {DSIterId, export_record(Record, #ds_iter.id, [id, iter], #{})} + end, + DSIterIds + ), + maps:from_list(DSIters). + +%% ifdef(TEST) +-endif. diff --git a/apps/emqx/src/emqx_persistent_session_ds.hrl b/apps/emqx/src/emqx_persistent_session_ds.hrl index 666874608..cc995ce66 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.hrl +++ b/apps/emqx/src/emqx_persistent_session_ds.hrl @@ -39,9 +39,10 @@ rank :: emqx_ds:stream_rank() }). -type ds_stream() :: #ds_stream{}. +-type ds_stream_bin() :: binary(). -record(ds_iter, { - id :: {emqx_persistent_session_ds:id(), emqx_ds:stream()}, + id :: {emqx_persistent_session_ds:id(), ds_stream_bin()}, iter :: emqx_ds:iterator() }). diff --git a/apps/emqx/src/emqx_session.erl b/apps/emqx/src/emqx_session.erl index 8bdd47392..52342d7ee 100644 --- a/apps/emqx/src/emqx_session.erl +++ b/apps/emqx/src/emqx_session.erl @@ -176,6 +176,7 @@ t(). -callback open(clientinfo(), conninfo()) -> {_IsPresent :: true, t(), _ReplayContext} | false. +-callback destroy(t() | clientinfo()) -> ok. %%-------------------------------------------------------------------- %% Create a Session @@ -247,7 +248,14 @@ get_mqtt_conf(Zone, Key) -> -spec destroy(clientinfo(), conninfo()) -> ok. destroy(ClientInfo, ConnInfo) -> - (choose_impl_mod(ConnInfo)):destroy(ClientInfo). + %% When destroying/discarding a session, the current `ClientInfo' might suggest an + %% implementation which does not correspond to the one previously used by this client. + %% An example of this is a client that first connects with `Session-Expiry-Interval' > + %% 0, and later reconnects with `Session-Expiry-Interval' = 0 and `clean_start' = + %% true. So we may simply destroy sessions from all implementations, since the key + %% (ClientID) is the same. + Mods = choose_impl_candidates(ConnInfo), + lists:foreach(fun(Mod) -> Mod:destroy(ClientInfo) end, Mods). -spec destroy(t()) -> ok. destroy(Session) -> diff --git a/apps/emqx/src/emqx_session_mem.erl b/apps/emqx/src/emqx_session_mem.erl index e72feffd5..3ea4f9f3b 100644 --- a/apps/emqx/src/emqx_session_mem.erl +++ b/apps/emqx/src/emqx_session_mem.erl @@ -44,6 +44,8 @@ %% State is stored in-memory in the process heap. -module(emqx_session_mem). +-behaviour(emqx_session). + -include("emqx.hrl"). -include("emqx_mqtt.hrl"). -include("emqx_session_mem.hrl"). diff --git a/apps/emqx/test/emqx_persistent_session_SUITE.erl b/apps/emqx/test/emqx_persistent_session_SUITE.erl index 0f8929e23..bd7ca1c46 100644 --- a/apps/emqx/test/emqx_persistent_session_SUITE.erl +++ b/apps/emqx/test/emqx_persistent_session_SUITE.erl @@ -599,6 +599,7 @@ t_publish_while_client_is_gone(Config) -> ok = emqtt:disconnect(Client2). +%% TODO: don't skip after QoS2 support is added to DS. t_clean_start_drops_subscriptions(init, Config) -> skip_ds_tc(Config); t_clean_start_drops_subscriptions('end', _Config) -> ok. t_clean_start_drops_subscriptions(Config) -> From 4c5d64abc24a2eadf9dcd83a27a9575db503d29b Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Mon, 13 Nov 2023 12:50:34 +0100 Subject: [PATCH 152/155] refactor(schema): keep type converters close --- .../src/emqx_bridge_gcp_pubsub.erl | 2 +- apps/emqx_conf/src/emqx_conf.erl | 117 +----- apps/emqx_conf/src/emqx_conf_schema_types.erl | 335 ++++++++++++++++++ .../src/emqx_connector_schema_lib.erl | 4 +- .../src/emqx_dashboard_swagger.erl | 136 +------ .../test/emqx_swagger_response_SUITE.erl | 8 +- 6 files changed, 343 insertions(+), 259 deletions(-) create mode 100644 apps/emqx_conf/src/emqx_conf_schema_types.erl diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl index a42047b43..bb4a13875 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.erl @@ -101,7 +101,7 @@ fields(connector_config) -> )}, {service_account_json, sc( - typerefl:alias("map", ?MODULE:service_account_json()), + ?MODULE:service_account_json(), #{ required => true, validator => fun ?MODULE:service_account_json_validator/1, diff --git a/apps/emqx_conf/src/emqx_conf.erl b/apps/emqx_conf/src/emqx_conf.erl index 8b51c2161..31f51d92d 100644 --- a/apps/emqx_conf/src/emqx_conf.erl +++ b/apps/emqx_conf/src/emqx_conf.erl @@ -305,121 +305,8 @@ hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) -> hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) -> {#{type => enum, symbols => [Atom]}, []}. -typename_to_spec("boolean()", _Mod) -> - #{type => boolean}; -typename_to_spec("binary()", _Mod) -> - #{type => string}; -typename_to_spec("float()", _Mod) -> - #{type => number}; -typename_to_spec("integer()", _Mod) -> - #{type => number}; -typename_to_spec("pos_integer()", _Mod) -> - #{type => integer}; -typename_to_spec("non_neg_integer()", _Mod) -> - #{type => number, minimum => 0}; -typename_to_spec("number()", _Mod) -> - #{type => number}; -typename_to_spec("string()", _Mod) -> - #{type => string}; -typename_to_spec("atom()", _Mod) -> - #{type => string}; -typename_to_spec("duration()", _Mod) -> - #{type => duration}; -typename_to_spec("timeout_duration()", _Mod) -> - #{type => duration}; -typename_to_spec("duration_s()", _Mod) -> - #{type => duration}; -typename_to_spec("timeout_duration_s()", _Mod) -> - #{type => duration}; -typename_to_spec("duration_ms()", _Mod) -> - #{type => duration}; -typename_to_spec("timeout_duration_ms()", _Mod) -> - #{type => duration}; -typename_to_spec("percent()", _Mod) -> - #{type => percent}; -typename_to_spec("ip_port()", _Mod) -> - #{type => ip_port}; -typename_to_spec("url()", _Mod) -> - #{type => url}; -typename_to_spec("bytesize()", _Mod) -> - #{type => 'byteSize'}; -typename_to_spec("wordsize()", _Mod) -> - #{type => 'byteSize'}; -typename_to_spec("qos()", _Mod) -> - #{type => enum, symbols => [0, 1, 2]}; -typename_to_spec("comma_separated_list()", _Mod) -> - #{type => comma_separated_string}; -typename_to_spec("comma_separated_atoms()", _Mod) -> - #{type => comma_separated_string}; -typename_to_spec("map(" ++ Map, _Mod) -> - [$) | _MapArgs] = lists:reverse(Map), - #{type => object}; -typename_to_spec("port_number()", _Mod) -> - #{type => integer}; -typename_to_spec(Name, Mod) -> - Spec = range(Name), - Spec1 = remote_module_type(Spec, Name, Mod), - Spec2 = typerefl_array(Spec1, Name, Mod), - Spec3 = integer(Spec2, Name), - default_type(Mod, Name, Spec3). - -default_type(Mod, Name, nomatch) -> - error({unknown_type, Mod, Name}); -default_type(_Mod, _Name, Type) -> - Type. - -range(Name) -> - case string:split(Name, "..") of - %% 1..10 1..inf -inf..10 - [MinStr, MaxStr] -> - Schema = #{type => number}, - Schema1 = add_integer_prop(Schema, minimum, MinStr), - add_integer_prop(Schema1, maximum, MaxStr); - _ -> - nomatch - end. - -%% Module:Type -remote_module_type(nomatch, Name, Mod) -> - case string:split(Name, ":") of - [_Module, Type] -> typename_to_spec(Type, Mod); - _ -> nomatch - end; -remote_module_type(Spec, _Name, _Mod) -> - Spec. - -%% [string()] or [integer()] or [xxx]. -typerefl_array(nomatch, Name, Mod) -> - case string:trim(Name, leading, "[") of - Name -> - nomatch; - Name1 -> - case string:trim(Name1, trailing, "]") of - Name1 -> - notmatch; - Name2 -> - Schema = typename_to_spec(Name2, Mod), - #{type => array, items => Schema} - end - end; -typerefl_array(Spec, _Name, _Mod) -> - Spec. - -%% integer(1) -integer(nomatch, Name) -> - case string:to_integer(Name) of - {Int, []} -> #{type => enum, symbols => [Int], default => Int}; - _ -> nomatch - end; -integer(Spec, _Name) -> - Spec. - -add_integer_prop(Schema, Key, Value) -> - case string:to_integer(Value) of - {error, no_integer} -> Schema; - {Int, []} when Key =:= minimum -> Schema#{Key => Int}; - {Int, []} -> Schema#{Key => Int} - end. +typename_to_spec(TypeStr, Module) -> + emqx_conf_schema_types:readable_dashboard(Module, TypeStr). to_bin(List) when is_list(List) -> case io_lib:printable_list(List) of diff --git a/apps/emqx_conf/src/emqx_conf_schema_types.erl b/apps/emqx_conf/src/emqx_conf_schema_types.erl new file mode 100644 index 000000000..3c097b1e2 --- /dev/null +++ b/apps/emqx_conf/src/emqx_conf_schema_types.erl @@ -0,0 +1,335 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2023 EMQ Technologies Co., Ltd. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%%-------------------------------------------------------------------- + +-module(emqx_conf_schema_types). + +-export([readable/2]). +-export([readable_swagger/2, readable_dashboard/2, readable_docgen/2]). + +%% Takes a typerefl name or hocon schema's display name and returns +%% a map of different flavors of more readable type specs. +%% - swagger: for swagger spec +%% - dashboard: to facilitate the dashboard UI rendering +%% - docgen: for documenation generation +readable(Module, TypeStr) when is_binary(TypeStr) -> + readable(Module, binary_to_list(TypeStr)); +readable(Module, TypeStr) when is_list(TypeStr) -> + try + %% Module is ignored so far as all types are distinguished by their names + readable(TypeStr) + catch + throw:unknown_type -> + fail(#{reason => unknown_type, type => TypeStr, module => Module}) + end. + +readable_swagger(Module, TypeStr) -> + get_readable(Module, TypeStr, swagger). + +readable_dashboard(Module, TypeStr) -> + get_readable(Module, TypeStr, dashboard). + +readable_docgen(Module, TypeStr) -> + get_readable(Module, TypeStr, docgen). + +get_readable(Module, TypeStr, Flavor) -> + Map = readable(Module, TypeStr), + case maps:get(Flavor, Map, undefined) of + undefined -> fail(#{reason => unknown_type, module => Module, type => TypeStr}); + Value -> Value + end. + +%% Fail the build or test. Production code should never get here. +-spec fail(_) -> no_return(). +fail(Reason) -> + io:format(standard_error, "ERROR: ~p~n", [Reason]), + error(Reason). + +readable("boolean()") -> + #{ + swagger => #{type => boolean}, + dashboard => #{type => boolean}, + docgen => #{type => "Boolean"} + }; +readable("binary()") -> + #{ + swagger => #{type => string}, + dashboard => #{type => string}, + docgen => #{type => "String"} + }; +readable("float()") -> + #{ + swagger => #{type => number}, + dashboard => #{type => number}, + docgen => #{type => "Float"} + }; +readable("integer()") -> + #{ + swagger => #{type => integer}, + dashboard => #{type => integer}, + docgen => #{type => "Integer"} + }; +readable("non_neg_integer()") -> + #{ + swagger => #{type => integer, minimum => 0}, + dashboard => #{type => integer, minimum => 0}, + docgen => #{type => "Integer(0..+inf)"} + }; +readable("pos_integer()") -> + #{ + swagger => #{type => integer, minimum => 1}, + dashboard => #{type => integer, minimum => 1}, + docgen => #{type => "Integer(1..+inf)"} + }; +readable("number()") -> + #{ + swagger => #{type => number}, + dashboard => #{type => number}, + docgen => #{type => "Number"} + }; +readable("string()") -> + #{ + swagger => #{type => string}, + dashboard => #{type => string}, + docgen => #{type => "String"} + }; +readable("atom()") -> + #{ + swagger => #{type => string}, + dashboard => #{type => string}, + docgen => #{type => "String"} + }; +readable("epoch_second()") -> + %% only for swagger + #{ + swagger => #{ + <<"oneOf">> => [ + #{type => integer, example => 1640995200, description => <<"epoch-second">>}, + #{ + type => string, + example => <<"2022-01-01T00:00:00.000Z">>, + format => <<"date-time">> + } + ] + } + }; +readable("epoch_millisecond()") -> + %% only for swagger + #{ + swagger => #{ + <<"oneOf">> => [ + #{ + type => integer, + example => 1640995200000, + description => <<"epoch-millisecond">> + }, + #{ + type => string, + example => <<"2022-01-01T00:00:00.000Z">>, + format => <<"date-time">> + } + ] + } + }; +readable("duration()") -> + #{ + swagger => #{type => string, example => <<"12m">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"12m">>} + }; +readable("duration_s()") -> + #{ + swagger => #{type => string, example => <<"1h">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"1h">>} + }; +readable("duration_ms()") -> + #{ + swagger => #{type => string, example => <<"32s">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"32s">>} + }; +readable("timeout_duration()") -> + #{ + swagger => #{type => string, example => <<"12m">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"12m">>} + }; +readable("timeout_duration_s()") -> + #{ + swagger => #{type => string, example => <<"1h">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"1h">>} + }; +readable("timeout_duration_ms()") -> + #{ + swagger => #{type => string, example => <<"32s">>}, + dashboard => #{type => duration}, + docgen => #{type => "String", example => <<"32s">>} + }; +readable("percent()") -> + #{ + swagger => #{type => string, example => <<"12%">>}, + dashboard => #{type => percent}, + docgen => #{type => "String", example => <<"12%">>} + }; +readable("ip_port()") -> + #{ + swagger => #{type => string, example => <<"127.0.0.1:80">>}, + dashboard => #{type => ip_port}, + docgen => #{type => "String", example => <<"127.0.0.1:80">>} + }; +readable("url()") -> + #{ + swagger => #{type => string, example => <<"http://127.0.0.1">>}, + dashboard => #{type => url}, + docgen => #{type => "String", example => <<"http://127.0.0.1">>} + }; +readable("bytesize()") -> + #{ + swagger => #{type => string, example => <<"32MB">>}, + dashboard => #{type => 'byteSize'}, + docgen => #{type => "String", example => <<"32MB">>} + }; +readable("wordsize()") -> + #{ + swagger => #{type => string, example => <<"1024KB">>}, + dashboard => #{type => 'wordSize'}, + docgen => #{type => "String", example => <<"1024KB">>} + }; +readable("map(" ++ Map) -> + [$) | _MapArgs] = lists:reverse(Map), + %% TODO: for docgen, parse map args. e.g. Map(String,String) + #{ + swagger => #{type => object, example => #{}}, + dashboard => #{type => object}, + docgen => #{type => "Map", example => #{}} + }; +readable("qos()") -> + #{ + swagger => #{type => integer, minimum => 0, maximum => 2, example => 0}, + dashboard => #{type => enum, symbols => [0, 1, 2]}, + docgen => #{type => "Integer(0..2)", example => 0} + }; +readable("comma_separated_list()") -> + #{ + swagger => #{type => string, example => <<"item1,item2">>}, + dashboard => #{type => comma_separated_string}, + docgen => #{type => "String", example => <<"item1,item2">>} + }; +readable("comma_separated_binary()") -> + #{ + swagger => #{type => string, example => <<"item1,item2">>}, + dashboard => #{type => comma_separated_string}, + docgen => #{type => "String", example => <<"item1,item2">>} + }; +readable("comma_separated_atoms()") -> + #{ + swagger => #{type => string, example => <<"item1,item2">>}, + dashboard => #{type => comma_separated_string}, + docgen => #{type => "String", example => <<"item1,item2">>} + }; +readable("service_account_json()") -> + %% This is a bit special, + %% service_account_josn in swagger spec is an object + %% the same in documenation. + %% However, dashboard wish it to be a string + %% TODO: + %% - Change type definition to stirng(). + %% - Convert the embedded object to a escaped JSON string. + %% - Delete this function clause once the above is done. + #{ + swagger => #{type => object}, + dashboard => #{type => string}, + docgen => #{type => "Map"} + }; +readable("json_binary()") -> + #{ + swagger => #{type => string, example => <<"{\"a\": [1,true]}">>}, + dashboard => #{type => object}, + docgen => #{type => "String", example => <<"{\"a\": [1,true]}">>} + }; +readable("port_number()") -> + Result = try_range("1..65535"), + true = is_map(Result), + Result; +readable(TypeStr0) -> + case string:split(TypeStr0, ":") of + [ModuleStr, TypeStr] -> + Module = list_to_existing_atom(ModuleStr), + readable(Module, TypeStr); + _ -> + parse(TypeStr0) + end. + +parse(TypeStr) -> + try_parse(TypeStr, [ + fun try_typerefl_array/1, + fun try_range/1 + ]). + +try_parse(_TypeStr, []) -> + throw(unknown_type); +try_parse(TypeStr, [ParseFun | More]) -> + case ParseFun(TypeStr) of + nomatch -> + try_parse(TypeStr, More); + Result -> + Result + end. + +%% [string()] or [integer()] or [xxx] or [xxx,...] +try_typerefl_array(Name) -> + case string:trim(Name, leading, "[") of + Name -> + nomatch; + Name1 -> + case string:trim(Name1, trailing, ",.]") of + Name1 -> + notmatch; + Name2 -> + Flavors = readable(Name2), + DocgenSpec = maps:get(docgen, Flavors), + DocgenType = maps:get(type, DocgenSpec), + #{ + swagger => #{type => array, items => maps:get(swagger, Flavors)}, + dashboard => #{type => array, items => maps:get(dashboard, Flavors)}, + docgen => #{type => "Array(" ++ DocgenType ++ ")"} + } + end + end. + +try_range(Name) -> + case string:split(Name, "..") of + %% 1..10 1..inf -inf..10 + [MinStr, MaxStr] -> + Schema0 = #{type => integer}, + Schema1 = add_integer_prop(Schema0, minimum, MinStr), + Schema = add_integer_prop(Schema1, maximum, MaxStr), + #{ + swagger => Schema, + dashboard => Schema, + docgen => #{type => "Integer(" ++ MinStr ++ ".." ++ MaxStr ++ ")"} + }; + _ -> + nomatch + end. + +add_integer_prop(Schema, Key, Value) -> + case string:to_integer(Value) of + {error, no_integer} -> Schema; + {Int, []} when Key =:= minimum -> Schema#{Key => Int}; + {Int, []} -> Schema#{Key => Int} + end. diff --git a/apps/emqx_connector/src/emqx_connector_schema_lib.erl b/apps/emqx_connector/src/emqx_connector_schema_lib.erl index a277fe8c8..07e7fe375 100644 --- a/apps/emqx_connector/src/emqx_connector_schema_lib.erl +++ b/apps/emqx_connector/src/emqx_connector_schema_lib.erl @@ -20,13 +20,13 @@ -include_lib("hocon/include/hoconsc.hrl"). -export([ + pool_size/1, relational_db_fields/0, ssl_fields/0, prepare_statement_fields/0 ]). -export([ - pool_size/1, database/1, username/1, password/1, @@ -35,13 +35,11 @@ ]). -type database() :: binary(). --type pool_size() :: pos_integer(). -type username() :: binary(). -type password() :: binary(). -reflect_type([ database/0, - pool_size/0, username/0, password/0 ]). diff --git a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl index ef10d33af..c1379d4d6 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_swagger.erl @@ -799,140 +799,8 @@ hocon_schema_to_spec(?UNION(Types, _DisplayName), LocalModule) -> hocon_schema_to_spec(Atom, _LocalModule) when is_atom(Atom) -> {#{type => string, enum => [Atom]}, []}. -typename_to_spec("boolean()", _Mod) -> - #{type => boolean}; -typename_to_spec("binary()", _Mod) -> - #{type => string}; -typename_to_spec("float()", _Mod) -> - #{type => number}; -typename_to_spec("integer()", _Mod) -> - #{type => integer}; -typename_to_spec("non_neg_integer()", _Mod) -> - #{type => integer, minimum => 0}; -typename_to_spec("pos_integer()", _Mod) -> - #{type => integer, minimum => 1}; -typename_to_spec("number()", _Mod) -> - #{type => number}; -typename_to_spec("string()", _Mod) -> - #{type => string}; -typename_to_spec("atom()", _Mod) -> - #{type => string}; -typename_to_spec("epoch_second()", _Mod) -> - #{ - <<"oneOf">> => [ - #{type => integer, example => 1640995200, description => <<"epoch-second">>}, - #{type => string, example => <<"2022-01-01T00:00:00.000Z">>, format => <<"date-time">>} - ] - }; -typename_to_spec("epoch_millisecond()", _Mod) -> - #{ - <<"oneOf">> => [ - #{type => integer, example => 1640995200000, description => <<"epoch-millisecond">>}, - #{type => string, example => <<"2022-01-01T00:00:00.000Z">>, format => <<"date-time">>} - ] - }; -typename_to_spec("duration()", _Mod) -> - #{type => string, example => <<"12m">>}; -typename_to_spec("duration_s()", _Mod) -> - #{type => string, example => <<"1h">>}; -typename_to_spec("duration_ms()", _Mod) -> - #{type => string, example => <<"32s">>}; -typename_to_spec("timeout_duration()", _Mod) -> - #{type => string, example => <<"12m">>}; -typename_to_spec("timeout_duration_s()", _Mod) -> - #{type => string, example => <<"1h">>}; -typename_to_spec("timeout_duration_ms()", _Mod) -> - #{type => string, example => <<"32s">>}; -typename_to_spec("percent()", _Mod) -> - #{type => number, example => <<"12%">>}; -typename_to_spec("ip_port()", _Mod) -> - #{type => string, example => <<"127.0.0.1:80">>}; -typename_to_spec("url()", _Mod) -> - #{type => string, example => <<"http://127.0.0.1">>}; -typename_to_spec("bytesize()", _Mod) -> - #{type => string, example => <<"32MB">>}; -typename_to_spec("wordsize()", _Mod) -> - #{type => string, example => <<"1024KB">>}; -typename_to_spec("map(" ++ Map, _Mod) -> - [$) | _MapArgs] = lists:reverse(Map), - #{type => object, example => #{}}; -typename_to_spec("qos()", _Mod) -> - #{type => integer, minimum => 0, maximum => 2, example => 0}; -typename_to_spec("comma_separated_list()", _Mod) -> - #{type => string, example => <<"item1,item2">>}; -typename_to_spec("comma_separated_binary()", _Mod) -> - #{type => string, example => <<"item1,item2">>}; -typename_to_spec("comma_separated_atoms()", _Mod) -> - #{type => string, example => <<"item1,item2">>}; -typename_to_spec("json_binary()", _Mod) -> - #{type => string, example => <<"{\"a\": [1,true]}">>}; -typename_to_spec("port_number()", _Mod) -> - range("1..65535"); -typename_to_spec(Name, Mod) -> - try_convert_to_spec(Name, Mod, [ - fun try_remote_module_type/2, - fun try_typerefl_array/2, - fun try_range/2, - fun try_integer/2 - ]). - -range(Name) -> - #{} = try_range(Name, undefined). - -try_convert_to_spec(Name, Mod, []) -> - throw({error, #{msg => <<"Unsupported Type">>, type => Name, module => Mod}}); -try_convert_to_spec(Name, Mod, [Converter | Rest]) -> - case Converter(Name, Mod) of - nomatch -> try_convert_to_spec(Name, Mod, Rest); - Spec -> Spec - end. - -try_range(Name, _Mod) -> - case string:split(Name, "..") of - %% 1..10 1..inf -inf..10 - [MinStr, MaxStr] -> - Schema = #{type => integer}, - Schema1 = add_integer_prop(Schema, minimum, MinStr), - add_integer_prop(Schema1, maximum, MaxStr); - _ -> - nomatch - end. - -%% Module:Type -try_remote_module_type(Name, Mod) -> - case string:split(Name, ":") of - [_Module, Type] -> typename_to_spec(Type, Mod); - _ -> nomatch - end. - -%% [string()] or [integer()] or [xxx] or [xxx,...] -try_typerefl_array(Name, Mod) -> - case string:trim(Name, leading, "[") of - Name -> - nomatch; - Name1 -> - case string:trim(Name1, trailing, ",.]") of - Name1 -> - notmatch; - Name2 -> - Schema = typename_to_spec(Name2, Mod), - #{type => array, items => Schema} - end - end. - -%% integer(1) -try_integer(Name, _Mod) -> - case string:to_integer(Name) of - {Int, []} -> #{type => integer, enum => [Int], default => Int}; - _ -> nomatch - end. - -add_integer_prop(Schema, Key, Value) -> - case string:to_integer(Value) of - {error, no_integer} -> Schema; - {Int, []} when Key =:= minimum -> Schema#{Key => Int}; - {Int, []} -> Schema#{Key => Int} - end. +typename_to_spec(TypeStr, Module) -> + emqx_conf_schema_types:readable_swagger(Module, TypeStr). to_bin(List) when is_list(List) -> case io_lib:printable_list(List) of diff --git a/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl b/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl index 745db76f0..5987ad8fa 100644 --- a/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_swagger_response_SUITE.erl @@ -375,9 +375,6 @@ t_complex_type(_Config) -> all ], type := string - }}, - {<<"fix_integer">>, #{ - default := 100, enum := [100], type := integer }} ], Properties @@ -413,7 +410,7 @@ t_ref_array_with_key(_Config) -> {<<"percent_ex">>, #{ description => <<"percent example">>, example => <<"12%">>, - type => number + type => string }}, {<<"duration_ms_ex">>, #{ description => <<"duration ms example">>, @@ -659,8 +656,7 @@ schema("/ref/complex_type") -> {maps, hoconsc:mk(map(), #{})}, {comma_separated_list, hoconsc:mk(emqx_schema:comma_separated_list(), #{})}, {comma_separated_atoms, hoconsc:mk(emqx_schema:comma_separated_atoms(), #{})}, - {log_level, hoconsc:mk(emqx_conf_schema:log_level(), #{})}, - {fix_integer, hoconsc:mk(typerefl:integer(100), #{})} + {log_level, hoconsc:mk(emqx_conf_schema:log_level(), #{})} ] } } From ab1f3b6b2f1ce446aa59dc4463ead79c6c88e382 Mon Sep 17 00:00:00 2001 From: "Zaiming (Stone) Shi" Date: Mon, 13 Nov 2023 21:55:03 +0100 Subject: [PATCH 153/155] fix(emqx_conf_schema_types): address review comments --- apps/emqx_conf/src/emqx_conf_schema_types.erl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apps/emqx_conf/src/emqx_conf_schema_types.erl b/apps/emqx_conf/src/emqx_conf_schema_types.erl index 3c097b1e2..e948142fc 100644 --- a/apps/emqx_conf/src/emqx_conf_schema_types.erl +++ b/apps/emqx_conf/src/emqx_conf_schema_types.erl @@ -258,7 +258,7 @@ readable("service_account_json()") -> readable("json_binary()") -> #{ swagger => #{type => string, example => <<"{\"a\": [1,true]}">>}, - dashboard => #{type => object}, + dashboard => #{type => string}, docgen => #{type => "String", example => <<"{\"a\": [1,true]}">>} }; readable("port_number()") -> @@ -330,6 +330,5 @@ try_range(Name) -> add_integer_prop(Schema, Key, Value) -> case string:to_integer(Value) of {error, no_integer} -> Schema; - {Int, []} when Key =:= minimum -> Schema#{Key => Int}; {Int, []} -> Schema#{Key => Int} end. From 28a577ad0906e0adafedb901395969cf9869c82e Mon Sep 17 00:00:00 2001 From: Ivan Dyachkov Date: Tue, 14 Nov 2023 11:02:26 +0100 Subject: [PATCH 154/155] chore: bump apps versions --- apps/emqx/src/emqx.app.src | 2 +- apps/emqx_auth/src/emqx_auth.app.src | 2 +- apps/emqx_auth_http/src/emqx_auth_http.app.src | 2 +- apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src | 2 +- apps/emqx_auth_ldap/src/emqx_auth_ldap.app.src | 2 +- apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src | 2 +- apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src | 2 +- apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src | 2 +- apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src | 2 +- apps/emqx_auth_redis/src/emqx_auth_redis.app.src | 2 +- apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src | 2 +- apps/emqx_bridge_http/src/emqx_bridge_http.app.src | 2 +- apps/emqx_connector/src/emqx_connector.app.src | 2 +- apps/emqx_dashboard/src/emqx_dashboard.app.src | 2 +- apps/emqx_durable_storage/src/emqx_durable_storage.app.src | 2 +- apps/emqx_enterprise/src/emqx_enterprise.app.src | 2 +- apps/emqx_ft/src/emqx_ft.app.src | 2 +- apps/emqx_gateway/src/emqx_gateway.app.src | 2 +- apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src | 2 +- apps/emqx_gateway_stomp/src/emqx_gateway_stomp.app.src | 2 +- apps/emqx_gcp_device/src/emqx_gcp_device.app.src | 2 +- apps/emqx_ldap/src/emqx_ldap.app.src | 2 +- apps/emqx_machine/src/emqx_machine.app.src | 2 +- apps/emqx_management/src/emqx_management.app.src | 2 +- apps/emqx_mysql/src/emqx_mysql.app.src | 2 +- apps/emqx_redis/src/emqx_redis.app.src | 2 +- apps/emqx_rule_engine/src/emqx_rule_engine.app.src | 2 +- apps/emqx_s3/src/emqx_s3.app.src | 2 +- apps/emqx_utils/src/emqx_utils.app.src | 2 +- 29 files changed, 29 insertions(+), 29 deletions(-) diff --git a/apps/emqx/src/emqx.app.src b/apps/emqx/src/emqx.app.src index f8a02db2e..0545f36a5 100644 --- a/apps/emqx/src/emqx.app.src +++ b/apps/emqx/src/emqx.app.src @@ -2,7 +2,7 @@ {application, emqx, [ {id, "emqx"}, {description, "EMQX Core"}, - {vsn, "5.1.13"}, + {vsn, "5.1.14"}, {modules, []}, {registered, []}, {applications, [ diff --git a/apps/emqx_auth/src/emqx_auth.app.src b/apps/emqx_auth/src/emqx_auth.app.src index cfd2aa447..3d9109fd1 100644 --- a/apps/emqx_auth/src/emqx_auth.app.src +++ b/apps/emqx_auth/src/emqx_auth.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth, [ {description, "EMQX Authentication and authorization"}, - {vsn, "0.1.27"}, + {vsn, "0.1.28"}, {modules, []}, {registered, [emqx_auth_sup]}, {applications, [ diff --git a/apps/emqx_auth_http/src/emqx_auth_http.app.src b/apps/emqx_auth_http/src/emqx_auth_http.app.src index b5de90ad9..183b9a993 100644 --- a/apps/emqx_auth_http/src/emqx_auth_http.app.src +++ b/apps/emqx_auth_http/src/emqx_auth_http.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_http, [ {description, "EMQX External HTTP API Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_http_app, []}}, {applications, [ diff --git a/apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src b/apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src index 4679e43bb..b4b5ccf02 100644 --- a/apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src +++ b/apps/emqx_auth_jwt/src/emqx_auth_jwt.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_jwt, [ {description, "EMQX JWT Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_jwt_app, []}}, {applications, [ diff --git a/apps/emqx_auth_ldap/src/emqx_auth_ldap.app.src b/apps/emqx_auth_ldap/src/emqx_auth_ldap.app.src index 383c4822c..3d4d5f467 100644 --- a/apps/emqx_auth_ldap/src/emqx_auth_ldap.app.src +++ b/apps/emqx_auth_ldap/src/emqx_auth_ldap.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_ldap, [ {description, "EMQX LDAP Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_ldap_app, []}}, {applications, [ diff --git a/apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src b/apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src index 988d300fb..5cc2c2a31 100644 --- a/apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src +++ b/apps/emqx_auth_mnesia/src/emqx_auth_mnesia.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_mnesia, [ {description, "EMQX Buitl-in Database Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_mnesia_app, []}}, {applications, [ diff --git a/apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src b/apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src index 38cf0138f..8970329fe 100644 --- a/apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src +++ b/apps/emqx_auth_mongodb/src/emqx_auth_mongodb.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_mongodb, [ {description, "EMQX MongoDB Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_mongodb_app, []}}, {applications, [ diff --git a/apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src b/apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src index 933e8f819..38750b79a 100644 --- a/apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src +++ b/apps/emqx_auth_mysql/src/emqx_auth_mysql.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_mysql, [ {description, "EMQX MySQL Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_mysql_app, []}}, {applications, [ diff --git a/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src b/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src index 1d23ccac4..bae3da0cb 100644 --- a/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src +++ b/apps/emqx_auth_postgresql/src/emqx_auth_postgresql.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_postgresql, [ {description, "EMQX PostgreSQL Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_postgresql_app, []}}, {applications, [ diff --git a/apps/emqx_auth_redis/src/emqx_auth_redis.app.src b/apps/emqx_auth_redis/src/emqx_auth_redis.app.src index 388fd413c..bd33606d3 100644 --- a/apps/emqx_auth_redis/src/emqx_auth_redis.app.src +++ b/apps/emqx_auth_redis/src/emqx_auth_redis.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_auth_redis, [ {description, "EMQX Redis Authentication and Authorization"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {mod, {emqx_auth_redis_app, []}}, {applications, [ diff --git a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src index d0821ea83..d4c16e13c 100644 --- a/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src +++ b/apps/emqx_bridge_gcp_pubsub/src/emqx_bridge_gcp_pubsub.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_gcp_pubsub, [ {description, "EMQX Enterprise GCP Pub/Sub Bridge"}, - {vsn, "0.1.9"}, + {vsn, "0.1.10"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_http/src/emqx_bridge_http.app.src b/apps/emqx_bridge_http/src/emqx_bridge_http.app.src index e5c559bd5..87d7e57a6 100644 --- a/apps/emqx_bridge_http/src/emqx_bridge_http.app.src +++ b/apps/emqx_bridge_http/src/emqx_bridge_http.app.src @@ -1,6 +1,6 @@ {application, emqx_bridge_http, [ {description, "EMQX HTTP Bridge and Connector Application"}, - {vsn, "0.1.4"}, + {vsn, "0.1.5"}, {registered, []}, {applications, [kernel, stdlib, emqx_connector, emqx_resource, ehttpc]}, {env, []}, diff --git a/apps/emqx_connector/src/emqx_connector.app.src b/apps/emqx_connector/src/emqx_connector.app.src index 6b462986b..cc78829e7 100644 --- a/apps/emqx_connector/src/emqx_connector.app.src +++ b/apps/emqx_connector/src/emqx_connector.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_connector, [ {description, "EMQX Data Integration Connectors"}, - {vsn, "0.1.33"}, + {vsn, "0.1.34"}, {registered, []}, {mod, {emqx_connector_app, []}}, {applications, [ diff --git a/apps/emqx_dashboard/src/emqx_dashboard.app.src b/apps/emqx_dashboard/src/emqx_dashboard.app.src index 2e3eb1d32..97691c6cd 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard.app.src +++ b/apps/emqx_dashboard/src/emqx_dashboard.app.src @@ -2,7 +2,7 @@ {application, emqx_dashboard, [ {description, "EMQX Web Dashboard"}, % strict semver, bump manually! - {vsn, "5.0.29"}, + {vsn, "5.0.30"}, {modules, []}, {registered, [emqx_dashboard_sup]}, {applications, [ diff --git a/apps/emqx_durable_storage/src/emqx_durable_storage.app.src b/apps/emqx_durable_storage/src/emqx_durable_storage.app.src index f106494c8..2bce4ff8e 100644 --- a/apps/emqx_durable_storage/src/emqx_durable_storage.app.src +++ b/apps/emqx_durable_storage/src/emqx_durable_storage.app.src @@ -2,7 +2,7 @@ {application, emqx_durable_storage, [ {description, "Message persistence and subscription replays for EMQX"}, % strict semver, bump manually! - {vsn, "0.1.6"}, + {vsn, "0.1.7"}, {modules, []}, {registered, []}, {applications, [kernel, stdlib, rocksdb, gproc, mria, emqx_utils]}, diff --git a/apps/emqx_enterprise/src/emqx_enterprise.app.src b/apps/emqx_enterprise/src/emqx_enterprise.app.src index 1a5359db6..06bc500f4 100644 --- a/apps/emqx_enterprise/src/emqx_enterprise.app.src +++ b/apps/emqx_enterprise/src/emqx_enterprise.app.src @@ -1,6 +1,6 @@ {application, emqx_enterprise, [ {description, "EMQX Enterprise Edition"}, - {vsn, "0.1.4"}, + {vsn, "0.1.5"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_ft/src/emqx_ft.app.src b/apps/emqx_ft/src/emqx_ft.app.src index 2dd33479c..cb86c1450 100644 --- a/apps/emqx_ft/src/emqx_ft.app.src +++ b/apps/emqx_ft/src/emqx_ft.app.src @@ -1,6 +1,6 @@ {application, emqx_ft, [ {description, "EMQX file transfer over MQTT"}, - {vsn, "0.1.8"}, + {vsn, "0.1.9"}, {registered, []}, {mod, {emqx_ft_app, []}}, {applications, [ diff --git a/apps/emqx_gateway/src/emqx_gateway.app.src b/apps/emqx_gateway/src/emqx_gateway.app.src index 8dcbe500c..df681b00f 100644 --- a/apps/emqx_gateway/src/emqx_gateway.app.src +++ b/apps/emqx_gateway/src/emqx_gateway.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_gateway, [ {description, "The Gateway management application"}, - {vsn, "0.1.26"}, + {vsn, "0.1.27"}, {registered, []}, {mod, {emqx_gateway_app, []}}, {applications, [kernel, stdlib, emqx, emqx_auth, emqx_ctl]}, diff --git a/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src b/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src index 755532723..30c176139 100644 --- a/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src +++ b/apps/emqx_gateway_coap/src/emqx_gateway_coap.app.src @@ -1,6 +1,6 @@ {application, emqx_gateway_coap, [ {description, "CoAP Gateway"}, - {vsn, "0.1.4"}, + {vsn, "0.1.5"}, {registered, []}, {applications, [kernel, stdlib, emqx, emqx_gateway]}, {env, []}, diff --git a/apps/emqx_gateway_stomp/src/emqx_gateway_stomp.app.src b/apps/emqx_gateway_stomp/src/emqx_gateway_stomp.app.src index 22dd4efde..dfe49972d 100644 --- a/apps/emqx_gateway_stomp/src/emqx_gateway_stomp.app.src +++ b/apps/emqx_gateway_stomp/src/emqx_gateway_stomp.app.src @@ -1,6 +1,6 @@ {application, emqx_gateway_stomp, [ {description, "Stomp Gateway"}, - {vsn, "0.1.3"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [kernel, stdlib, emqx, emqx_gateway]}, {env, []}, diff --git a/apps/emqx_gcp_device/src/emqx_gcp_device.app.src b/apps/emqx_gcp_device/src/emqx_gcp_device.app.src index 01c722e98..7f1d81f14 100644 --- a/apps/emqx_gcp_device/src/emqx_gcp_device.app.src +++ b/apps/emqx_gcp_device/src/emqx_gcp_device.app.src @@ -1,6 +1,6 @@ {application, emqx_gcp_device, [ {description, "Application simplifying migration from GCP IoT Core"}, - {vsn, "0.1.2"}, + {vsn, "0.1.3"}, {registered, []}, {mod, {emqx_gcp_device_app, []}}, {applications, [ diff --git a/apps/emqx_ldap/src/emqx_ldap.app.src b/apps/emqx_ldap/src/emqx_ldap.app.src index 1db88b924..774f11bd4 100644 --- a/apps/emqx_ldap/src/emqx_ldap.app.src +++ b/apps/emqx_ldap/src/emqx_ldap.app.src @@ -1,6 +1,6 @@ {application, emqx_ldap, [ {description, "EMQX LDAP Connector"}, - {vsn, "0.1.4"}, + {vsn, "0.1.5"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_machine/src/emqx_machine.app.src b/apps/emqx_machine/src/emqx_machine.app.src index 8cf85e936..496afcd64 100644 --- a/apps/emqx_machine/src/emqx_machine.app.src +++ b/apps/emqx_machine/src/emqx_machine.app.src @@ -3,7 +3,7 @@ {id, "emqx_machine"}, {description, "The EMQX Machine"}, % strict semver, bump manually! - {vsn, "0.2.15"}, + {vsn, "0.2.16"}, {modules, []}, {registered, []}, {applications, [kernel, stdlib, emqx_ctl]}, diff --git a/apps/emqx_management/src/emqx_management.app.src b/apps/emqx_management/src/emqx_management.app.src index 3c13a1935..efa05ad37 100644 --- a/apps/emqx_management/src/emqx_management.app.src +++ b/apps/emqx_management/src/emqx_management.app.src @@ -2,7 +2,7 @@ {application, emqx_management, [ {description, "EMQX Management API and CLI"}, % strict semver, bump manually! - {vsn, "5.0.32"}, + {vsn, "5.0.33"}, {modules, []}, {registered, [emqx_management_sup]}, {applications, [kernel, stdlib, emqx_plugins, minirest, emqx, emqx_ctl, emqx_bridge_http]}, diff --git a/apps/emqx_mysql/src/emqx_mysql.app.src b/apps/emqx_mysql/src/emqx_mysql.app.src index da24c5071..135f6878e 100644 --- a/apps/emqx_mysql/src/emqx_mysql.app.src +++ b/apps/emqx_mysql/src/emqx_mysql.app.src @@ -1,6 +1,6 @@ {application, emqx_mysql, [ {description, "EMQX MySQL Database Connector"}, - {vsn, "0.1.3"}, + {vsn, "0.1.4"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_redis/src/emqx_redis.app.src b/apps/emqx_redis/src/emqx_redis.app.src index 36f4b0cab..c9513bcf9 100644 --- a/apps/emqx_redis/src/emqx_redis.app.src +++ b/apps/emqx_redis/src/emqx_redis.app.src @@ -1,6 +1,6 @@ {application, emqx_redis, [ {description, "EMQX Redis Database Connector"}, - {vsn, "0.1.2"}, + {vsn, "0.1.3"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src index cad752886..7feacee77 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_engine.app.src +++ b/apps/emqx_rule_engine/src/emqx_rule_engine.app.src @@ -2,7 +2,7 @@ {application, emqx_rule_engine, [ {description, "EMQX Rule Engine"}, % strict semver, bump manually! - {vsn, "5.0.28"}, + {vsn, "5.0.29"}, {modules, []}, {registered, [emqx_rule_engine_sup, emqx_rule_engine]}, {applications, [ diff --git a/apps/emqx_s3/src/emqx_s3.app.src b/apps/emqx_s3/src/emqx_s3.app.src index ba94f66e1..bd17dc6c4 100644 --- a/apps/emqx_s3/src/emqx_s3.app.src +++ b/apps/emqx_s3/src/emqx_s3.app.src @@ -1,6 +1,6 @@ {application, emqx_s3, [ {description, "EMQX S3"}, - {vsn, "5.0.10"}, + {vsn, "5.0.11"}, {modules, []}, {registered, [emqx_s3_sup]}, {applications, [ diff --git a/apps/emqx_utils/src/emqx_utils.app.src b/apps/emqx_utils/src/emqx_utils.app.src index 05e2d0162..a86a8d841 100644 --- a/apps/emqx_utils/src/emqx_utils.app.src +++ b/apps/emqx_utils/src/emqx_utils.app.src @@ -2,7 +2,7 @@ {application, emqx_utils, [ {description, "Miscellaneous utilities for EMQX apps"}, % strict semver, bump manually! - {vsn, "5.0.10"}, + {vsn, "5.0.11"}, {modules, [ emqx_utils, emqx_utils_api, From 124c0e2dba25aee43c086fb8f9bd028a46e377de Mon Sep 17 00:00:00 2001 From: ieQu1 <99872536+ieQu1@users.noreply.github.com> Date: Tue, 14 Nov 2023 15:28:13 +0100 Subject: [PATCH 155/155] fix(ds): Respect receive_maximum from the connection info --- .../emqx_persistent_message_ds_replayer.erl | 13 ++++- apps/emqx/src/emqx_persistent_session_ds.erl | 57 +++++++++++++------ 2 files changed, 52 insertions(+), 18 deletions(-) diff --git a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl index 98bb069b0..69b6675d8 100644 --- a/apps/emqx/src/emqx_persistent_message_ds_replayer.erl +++ b/apps/emqx/src/emqx_persistent_message_ds_replayer.erl @@ -19,7 +19,7 @@ -module(emqx_persistent_message_ds_replayer). %% API: --export([new/0, next_packet_id/1, replay/2, commit_offset/3, poll/3]). +-export([new/0, next_packet_id/1, replay/2, commit_offset/3, poll/3, n_inflight/1]). %% internal exports: -export([]). @@ -79,6 +79,17 @@ next_packet_id(Inflight0 = #inflight{next_seqno = LastSeqNo}) -> {PacketId, Inflight} end. +-spec n_inflight(inflight()) -> non_neg_integer(). +n_inflight(#inflight{next_seqno = NextSeqNo, acked_seqno = AckedSeqno}) -> + %% NOTE: this function assumes that gaps in the sequence ID occur + %% _only_ when the packet ID wraps: + case AckedSeqno >= ((NextSeqNo bsr 16) bsl 16) of + true -> + NextSeqNo - AckedSeqno; + false -> + NextSeqNo - AckedSeqno - 1 + end. + -spec replay(emqx_persistent_session_ds:id(), inflight()) -> emqx_session:replies(). replay(_SessionId, _Inflight = #inflight{offset_ranges = _Ranges}) -> diff --git a/apps/emqx/src/emqx_persistent_session_ds.erl b/apps/emqx/src/emqx_persistent_session_ds.erl index bc60a1277..6c0fc2dcc 100644 --- a/apps/emqx/src/emqx_persistent_session_ds.erl +++ b/apps/emqx/src/emqx_persistent_session_ds.erl @@ -101,6 +101,8 @@ iterators := #{topic() => subscription()}, %% Inflight messages inflight := emqx_persistent_message_ds_replayer:inflight(), + %% Receive maximum + receive_maximum := pos_integer(), %% props := map() }. @@ -111,20 +113,28 @@ -type conninfo() :: emqx_session:conninfo(). -type replies() :: emqx_session:replies(). +-define(STATS_KEYS, [ + subscriptions_cnt, + subscriptions_max, + inflight_cnt, + inflight_max, + next_pkt_id +]). + -export_type([id/0]). %% -spec create(clientinfo(), conninfo(), emqx_session:conf()) -> session(). -create(#{clientid := ClientID}, _ConnInfo, Conf) -> +create(#{clientid := ClientID}, ConnInfo, Conf) -> % TODO: expiration ensure_timers(), - ensure_session(ClientID, Conf). + ensure_session(ClientID, ConnInfo, Conf). -spec open(clientinfo(), conninfo()) -> {_IsPresent :: true, session(), []} | false. -open(#{clientid := ClientID}, _ConnInfo) -> +open(#{clientid := ClientID} = _ClientInfo, ConnInfo) -> %% NOTE %% The fact that we need to concern about discarding all live channels here %% is essentially a consequence of the in-memory session design, where we @@ -133,16 +143,19 @@ open(#{clientid := ClientID}, _ConnInfo) -> %% space, and move this call back into `emqx_cm` where it belongs. ok = emqx_cm:discard_session(ClientID), case open_session(ClientID) of - Session = #{} -> + Session0 = #{} -> ensure_timers(), + ReceiveMaximum = receive_maximum(ConnInfo), + Session = Session0#{receive_maximum => ReceiveMaximum}, {true, Session, []}; false -> false end. -ensure_session(ClientID, Conf) -> +ensure_session(ClientID, ConnInfo, Conf) -> {ok, Session, #{}} = session_ensure_new(ClientID, Conf), - Session#{iterators => #{}}. + ReceiveMaximum = receive_maximum(ConnInfo), + Session#{iterators => #{}, receive_maximum => ReceiveMaximum}. open_session(ClientID) -> case session_open(ClientID) of @@ -192,10 +205,10 @@ info(upgrade_qos, #{props := Conf}) -> maps:get(upgrade_qos, Conf); % info(inflight, #sessmem{inflight = Inflight}) -> % Inflight; -% info(inflight_cnt, #sessmem{inflight = Inflight}) -> -% emqx_inflight:size(Inflight); -% info(inflight_max, #sessmem{inflight = Inflight}) -> -% emqx_inflight:max_size(Inflight); +info(inflight_cnt, #{inflight := Inflight}) -> + emqx_persistent_message_ds_replayer:n_inflight(Inflight); +info(inflight_max, #{receive_maximum := ReceiveMaximum}) -> + ReceiveMaximum; info(retry_interval, #{props := Conf}) -> maps:get(retry_interval, Conf); % info(mqueue, #sessmem{mqueue = MQueue}) -> @@ -206,8 +219,9 @@ info(retry_interval, #{props := Conf}) -> % emqx_mqueue:max_len(MQueue); % info(mqueue_dropped, #sessmem{mqueue = MQueue}) -> % emqx_mqueue:dropped(MQueue); -info(next_pkt_id, #{}) -> - _PacketId = 'TODO'; +info(next_pkt_id, #{inflight := Inflight}) -> + {PacketId, _} = emqx_persistent_message_ds_replayer:next_packet_id(Inflight), + PacketId; % info(awaiting_rel, #sessmem{awaiting_rel = AwaitingRel}) -> % AwaitingRel; % info(awaiting_rel_cnt, #sessmem{awaiting_rel = AwaitingRel}) -> @@ -219,8 +233,7 @@ info(await_rel_timeout, #{props := Conf}) -> -spec stats(session()) -> emqx_types:stats(). stats(Session) -> - % TODO: stub - info([], Session). + info(?STATS_KEYS, Session). %%-------------------------------------------------------------------- %% Client -> Broker: SUBSCRIBE / UNSUBSCRIBE @@ -345,9 +358,12 @@ deliver(_ClientInfo, _Delivers, Session) -> -spec handle_timeout(clientinfo(), _Timeout, session()) -> {ok, replies(), session()} | {ok, replies(), timeout(), session()}. -handle_timeout(_ClientInfo, pull, Session = #{id := Id, inflight := Inflight0}) -> - WindowSize = 1000, - {Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, WindowSize), +handle_timeout( + _ClientInfo, + pull, + Session = #{id := Id, inflight := Inflight0, receive_maximum := ReceiveMaximum} +) -> + {Publishes, Inflight} = emqx_persistent_message_ds_replayer:poll(Id, Inflight0, ReceiveMaximum), %% TODO: make these values configurable: Timeout = case Publishes of @@ -781,6 +797,13 @@ ensure_timer(Type, Timeout) -> _ = emqx_utils:start_timer(Timeout, {emqx_session, Type}), ok. +-spec receive_maximum(conninfo()) -> pos_integer(). +receive_maximum(ConnInfo) -> + %% Note: the default value should be always set by the channel + %% with respect to the zone configuration, but the type spec + %% indicates that it's optional. + maps:get(receive_maximum, ConnInfo, 65_535). + -ifdef(TEST). list_all_sessions() -> DSSessionIds = mnesia:dirty_all_keys(?SESSION_TAB),

$*LYGw?t9b)0J&0U zIGE*+O@DN?Ct+B5`R02CV;Kj`>~jj1*E}wqKkq>Okt@hDP@CFP?k(C&8QrRj5e*OU z5U`7dljm4R44Y-KwNXvX-=S#rmr_~ZyGe%*y@E+ zm%tPKEXp7{H$QP{s=U60|8*4w9|S7JdSk=U$vwIu$gm7*C+jnPQ6~&)=K>pi8T#ixS_$T2+2uQ#u@ef>{*|QYZ2>#`?4eb z-L+~fYv*tu87qG2m8aKZUIwl$16J(sNS)8ZC0#kS9TXG%%OISU} z5n_s17Y_vzxvT6WORL2nl#)XI3Y&Lm+D3z}h8vbfiBcfOA`3ZgLL&3nSS^MH-!CmKh&4od z5Lx_sx$dIT^k{Q-56*u;Z#jh`BJWuTul8Dgz1%@gw6v4!F|{612M$x6nGkSa`M5e5Du4+P3JK0 zL6aDvpo|Pe?~&*3siyWppxOo(B3najoCx2UqY)op_3)u5*oc;6>2VcDZ*h?;0t`-? zs&fMvPHDXqdCxDbKXN2MzxW38VbAvg$|P_uFk7m?(*NY-U%e6R!A5bt>== zOa4fw*_WmoD@It*JIho-gAD0j7HV6IbuJ^JNro#3@Y5Iltw}ws5c;n)q&^v${f6EN zNlDK;NlJ_SyLId!HcJ_OyPNm!VdDbd#(S2E3ec(hP$!A5JsmLY951AYd&}71)tR)( z8XSgKF-gx2HlgYPT+cT@xR?BOJk$T*$V zsQe<9lEm_I7$G-sF8rWi(sD{D?fl)y^RDX1e;zDAClLm~jzx!wRasz2fz*{%<>mFM zcR9@(AX^Y8L<2U{DAjXm-Op4puBN7@1|lu4Xyu2uqu~3-@7lGJp-H}Vkm7`X8-ybf zT_&1;RwhP4vk=u?^tRwoK-G?BE&~mJ%a*C|aQ^+W5FVlO3@HdV8W=w&C zswCH`Q`46(Y7oFy6DEaUxRS3*JnBT1O_03PSRe*Gbx{g8ADa|Vnn z9CR7@nx@xL>%Y85p^N*Efhn8vn`OVh@wbdfpTWX9&J)(Mjsz2Mlp#2QFx)<1GUJ$J zlgBBrl<%$e6~c%e55-J1wQFjbsECVl?~eS%ONT|U5|^6rO?QPq0Pf> z&OaI^S}OPAK`Xd8mTmqi7buA?6m5hDb4|-Sd9sPG1Zxpbv zqN${-v}BagqEJcP&-eBH{qFnmxclRJTniuX_v>|z<2;VzBr4NyGtq{(!$K>;iJP~t zy8~Azo*sKsSYYYrT6^Q>G{vUf?J5#6!cv;>s4k=_cwvy`L;iJTHuun>mzp7gs)50wssKq)f$y z>Yb%kDD^-Ni19+FP*_OVY>4eG(p^gvD}3qLCD`NAFrWn=d%L9!tyzOn4LRy7Whrb6 zOAZmcD1dkoF=ut&TqzRuWHIDHf*r4c0eVzXHBT1NBtzK4cc-OiEr~Cj_J6qmS>GTc zxrbsw$yeukRhhngrWlkxeat~)-zHvZGx}4A$tVj1cUAP#JZCuWF+m_$yrwRkU3jX* z68`fsvZHh%8RhQLvH-W6Ehsm6U5#DU&!ai*M|EH?W>`2sQ36!&~y_#S0f) zP@DPsV*0E5xkW0^g?{JzuJDaHnmPlYfVd3G%Ki_OZCdE&;*WmZR z4Ly+9727|BgblI#AF@>XEwQ(l*kLq=S_dD8C#9tu*rf_t|LDL|>uC+Z)%TXEQ_bJP zCO6dp=!JFl3+BuLh5?>BWt(`SrF~Lh*I!|20`Gw8=ThmkGy3=T(ABXK^v#>K@JVr( zq99Y8J`EN&&lemZG9tpE!-E41i;V-An;U3lRbpbGHe}c^G^>BG;hcM8*d{_ImG;V& zLykOxOd}&LJ;ysPE6O*+@Nsgf5vPM?SDVuTl{lLXc8Xa)xB&$HrVV5?OnRgf#07MC?Ifu(a3Dc@sH_BVGP}hmVxN`vh^&h6br@_z7>*;>q2%^~=tnkX z(u9Qn^tDk1d^pe9f6%WM1QE%%AUeJ0#`Y8< zy3J4bv!_qn*x6CEPo6Ys$Si58fvk&pMzjqtcN8`Qqt=A5-`q4jMjxQ^7=OowBStDL)z?3UXxPpW&-Fw>jtbU^-zn~JZYwM z_zx|Ez82n7d%Qh0UQsVq+bwy=s*F;(lLw}aSa-5Tkuc@8apPfh@KM`zX54wYloe6OOthJ0Wo2@*9Ii0X*^@y75R@fKU*2j+Qlv)@;1PKKnh$t7 z*TyC)s%L(FzChkxdvBR820rY^J!N;S@xzB1_kU*^kD86&50?xQ45b@Y)4s1O<}F&( zQLG*afA3CdseOaiKbJ2z{qBG!409tQLZUjfZufDqNQdDaC>ij!fy_6nDCy&e4=df= zpvKVBKg3))*<+$&KBXcZ(w@!f6fdzfRFf)kM#Rt()lBsnqg3N?hu=S?L!tsRZa_ARh>kX3S0N9z%6-!A>F@f{$e)>e z4h(_Fj62T?L9MW(gVSd@SrMQtju@9cfgpb=G<&q89iZ~w`nbR-9$`$By zE6Zpp7ZLW;*8oLa=EzJsdn;e}Y)(!3+dj`o0)|?_`tMO zP2Vf;e=JzQ^Iz_%cePn?cE6-V>2d|y#j&yy`*PgPOg{vk9!gnzeEU~ntr-28u;7fq zMR8AfrdRi$+6_y|al?nZ(;jj3_;s0ARZ!HkKfe6x_nxi7zk>1z1Vrf@-G8)Whx3vf zoA14T{Res)J&jYlKBybjaMz#8E*Rf1`luS`sP6CNpZTVn(4rU0SZ`4ox|h8g35O5g z;FGXHt*lOPmO*NFLhU!?#uckpwGpJFi>iUFVM@HDQg3!AL2ZQ+Ra@&O6TS&YQGqA? zF?nh#a45OA@0B69%*cD;Uvb)usJy6BLdpDk$Kuz`k-p z(mnj|5nWePpxt+()`xKH$n5R-rosGP**`i>Kkv=OmC@DZm3(sj@)y0C*_jSF{DusS zRtzu!UR!79pAC(Ham!zZO!JRg7xqQiZuIZJ0T@cJ}q>_foSXdOy)PAkUUAR`sIfn7xB@(bU4sx)lVje0ghAw48)UuZ_j z>I`GcP@DFN4k|_SOor|yn8T_o0Bcn;EPCOYkrmSTKK!SBYNtX^&)@nM^KN9Mr~gDw zO=bZ39KG-=-}UbE=UXVt2*)VCg5&{sD2(O3zZ@#9!z?A*(`8$TtJ0B2Y;*d>1txHf z=pSwUc4$&*aX($hnQHOFfBvk@UeU|rYBwK5#~gS{-e%uhNN%p*yt#VSDj9|Sjp-&r6T`Y# z*&lPvNA=G_X;E3(g)7cly+731;Mc@wjm(NCKfvSJP`x8x_@z63d1t#AI?QZ!Uc7i6gz5877tWh! z+1|jB2y=ajl}T9TGiIh3^*}2n>H+I&7%gK6#F;ep?D~518|!}FSGN893BJLfpDiuP z+sSbG%d-J!!w0;igu~dzJf@ZaRH(Za*flDjl8pf|qaEr%7!O4~k!S={mabbjI-nF3 z@c6cm%FnYwJJ}|*TuE5DTV>RzQO?n0>-)TJQiXVwSVw7(86%U6-YNjO->-jY?f%2M z)02~zS*;5TuYPTE45KOGQq*5+Vt2hwTG_Cl|DZDdR$mXj@Dh2a`iRpyBj2Vx#I>7O zuUT`31{`W@WTlpTBp4frJMDVpXdFAJF?6M%kDB~Uo_dv>o3deu{5>icUqiL2x#nXI zs(JE|Z)B|_p;j~N%rytKWImGhM%gW>nFJL{iH=u<&N#6J>tRNa<`b6!k1bLxf z6D+PD9%7=V))UvmPHbKx0v9e_%CKp}%9YP6D~(hCmuLJbUT_X@X!m^I)Bl)U|7eW9 zY;WOekJAMdT`lA?Er+6F6DX{4dGdL(3Ijr%kemm#C1-TlKW7v^ZO5+*Om57>KjCh2TS`7`Wn4m2vdd3-NL4L0>^Yiqmtj!sDVc!o${wn}S{ zk<2~!QZGOrf-(Ff)``MGeNDCx?R4N!Q>sv$6aLUhS-h24gLKEest!Sib4P>K2a$8! z1Lw~E`&EPd<_=9i`d_$C_bPYl5j8q9b(6kg*U&c-IU~c|tnAID0cM2w>)s~~h#IoP z4K1+{VFPeX%pKje_->LgPZ=Cdnf-VE+|Nc1zlEg%$)n|>R;VtY5M`!ozQx!qZ9YpC zz$`N}J=h?4BOMd{*}H#yw{UlFboU z6%{QfHdG5F!pJyV3Y`}CgYZ{}%shboqp|R$Eq18c+EvwWox- z`%4SeG8O)g>xwK-Dli!x?s_>rng~FEI1(Q(=o86oYu1c>E7z^53!2AmRF&uspFP7Y z^5jYPRa)BG<{lfG%4UugG|;TQ($mA}9`gY{imz{9_9>94lm`xs%3UrW>4q#ZX~%cT zcc+kgutyfKbQ|^w`|KoHBuouT5Z`fXp3Z)ZM^N+Z`FJs`MPWA$1{B1hd$AO-YkxXm z>5gDm;gW|}hFrZH85=vuv5CTIJDwN&JGVT~*P$FRv|cUBZ#)a?i|(dJ0M-wAAlDI5X8W>JeJJL$<4!%mvspPz*0b_dY%#p0)RA zbs}ROy;} zJYEW6LpA&C`@FeVnF7ZRONvXKUD z_f4{I-(p?c?5(r57=}!fDXN=R?>1!r3+35UGiS5S8bjxkC&5(4xv#jsD1e3T=pdwD zeMXNw%;>Lc@^|Aon>esweNXB4eSAvq-}jRbtcKtQ=EnT%7ecJ0J2l1{eBwOa&sdcG zGUET|5R>{}uBA$2)|yc}!=w4p{}*H0Oy--Z-1o zhaJq4U)o#ok=lgqs(Z2%s+c{~qO*G%1cG@&?5`T~GP?<(cz&(8RM$MR@9`Xzrk;Cjj%|@JwSAvMRAL;Gl zFncu@O88ATZJKFim27Z8L#5@?*_YcfkU9BQ-TL0W6>;88tIETdZS4J5YX7R?vny_F z8QE+%QNQ}f@A}sX1D9z!FFC``Crb8&Q=J>$Tnj#!>+pM%bHS2arC%#?L*)P+0 zSMUOv!k1P9(3(+}!W0CXvUu@OQRxgq$WNa?$E?z`c>Ai^WKw_JM3HBz-Sr&H1AaEN^~9p{6S(ux5;aUwP%|^wk`j;^*&RKCorStqa^v3deqdH`cts zbSMDCsAU~B?ZF3MU`Ah^f7x9w%(cE=H`UnkL*-;cg{~A&nMaw7p_x(37l+jx*i(Ik z0x7m=`DpB_S{`>|Hu*3=Kjq}f6&caW942l8Q}C8?DlOftJAc*mB?njcVfVbFR3X4i z#3RhJ@q@{hU7Has2hc`}qi7!4=}z4NtlFz_5U{jKdqXev9jIJAKDZ>5y?he1jr0_9 z)%ouzXG^Oz%Cx-SeeR8!G_r6r0E8%C(F&$00v(bIEi;Ai@~yN+Sd7}e$FPo$)byEyW! z`;V7Y&t&oV_l47D0hxPk+O!v;-|2Ti&>@}obvKF{i$6^7`ZsxssP(Lrvg1$_QyYm) zp&Sspx6{w2L}k9Rx;;{@IAh7I=?rpQ5wLyy*!C;h_PzBl>@Q$KV~4SCef61-89NT_ zAG6~`ACH7-rwi3;b|03`Jd^&iG-EyGh@MoTjGCd^H53hSW{f% z!ou#d4aeD7W`FbNWLKwc?ar^HN?isfLiXYPQI;o+R)G%*!lQ|~1&9XoM)$(wz05LN zG!%Xe-xIuNw`=W$)Vc_VhK(K#2du*qP8fF@C$u{$Wqhbn>fW%N!7co_Bai0ZsFWAk zKLT#NzGyx8I5XL|*;3)lV`pH$UKan{HLBKjgH_um@8S6g=rD7 zrl91cY(db@Znp{Lm2{Fal9C*mTp;}#!}kIN1Fq9=X&wgX|D#SUp8#fn*+|b5g%UAV zI{O9FT}w;B!=!J6Wkp2=T_%`xTx2B7Q-+vl@u=4m@(CM5p$Gu&@w$ZlE5cZM=w9i8 zbC?+N*r;gZ;?yo@N39;`9$J0`$IUXMwR$Rr7g%B2a;|#P91NE$W6D>QeV}(oKkYt1U3`uL(8ElR?HCNe04Kmm z6>`RyQt_RiUKLjgYyJ6?QRCf`5{Z@FGo+^u5+AnThP)2@%zDr-noMjN6#n$8MSr0b z2FXJ9n|UQufn~!>4~=5fYh)yVl*F;}nw0PQQj7ON57u!Uo4`nOX0;Pii&Fxe2e!!C4oNMR6xJRSn+7@7D#U*bStVmb zuXWNj+$~tN5N;xaX*q`iy@-M@SFxyn*`0S7xo;rIim3+@?L}Kh*22 zIQf)zV5wijkfD38dp_4`IoH@$_@G;u&my(@4VjO-WvpI#TKcf$@S&mQ&y;3x!HcbR zhM)4(YCW%1hY7^7#6(bLhGj*|;d}-K{$@mip+hGh(EsOKANDPO^$J^?Q8-3?v?AVN zQth+T^zUj+QgNJTpPiIa7PGH4!7+y{EEC$I$(x0~#rT7&CCNB2K-dG%SZ(p*V`UfS z#IMc_KUdx0v|)nt3J*6xB;<3kv~U1YvazEq?g+kzz^2SRJt$!^J}z_KestS~;Nfi= zg(WAQwKdEhtyW0VGEIAsc6*`PlBmvK0>mEd_rSY|OaSn9Wby{wOk3#{8)SOLI1%Us zy9m_PXlaK4mfk4jUnwh-r&HerF!=@^VBp64HuHN42nV)$;=}=7Elx~bd=B{CgFN#f zYNw^`+BY9=&in-nD7c;O&D=k27h_c(!jZS&Gql}Ha zD9bn$tT|%%;v6soGW(!My_n2@2UQ1dNs)dho3^)5s^38H;thr{zHSZc6=|+qhmXVJ8gt?_J<|dl6F4|(jpZoBfOrh3;rF! zu&WqsZ^o9*U4brst9vxeii#LHe0jGbCAkaQo-x@Lk$2ltJ))LJ_B;_e?rE{OlbYNt zO}+AAvqNm0vO{`X?H_Y{R8gAG%S!bhn{s|u++FN)E${eG^;=sHR%|bQdwcsw0VI?g zmgBKxMtuJ=525K=vBC*9*TRKo($mA2B42`kI$eU{a>N9to-+~iT=QME^c5>{wr$^j zS_4B{%)S`qc2&=iN z%D+2)Zwi%NV*5->P>a(cFjvMexNy}as{WU2w^~_Awj9Llo*))@I-3rW`H?>d@S{T@J#%Tm%-eEX+>-I?53Pp4czmhJ9j=4i$N)P z!lOovV2b)u;91|^o;FqH?~=^ov{9TWW#ym|)2=h>QkYj+BMw`jo!lS$TY2QD~| zZg?>Bp875~zjJDtB`F27ot+KYRmq$R6m09Kf6wKpHxW-@j7qc0_h&bm002-z4&DiJ zros*nK1#oV19dj)-1s&yC?NOENxR)=yQMm=l$T8xmnhijpyWNir?r-%-!Prflo8L} zw#<`wcJi@ufReKE40CfY{i8yU%y@XkE7kX1&+rDJAO_!*D7_#fS>N}juc7b*LS%8S z72IIr8dj{erCP%u0ZGB>?=V z=<72JqylZ{3GxdWc)ib~bAKBB%v$%z;}1W-YMFlj`RB#|wMvtWyBgAy*j&4PP_f$DmT@v+DGi zG`Q_olPel!5RC?PJ(%>ciRieXoKPCmcNK0s$|Og~^IQy%hTnP7Y*UIuMM=jkbPv@$hKC z5-n=)cJbtZRAQ`VJdvo4j zbc3uPxb`qzcl>YDUW#BFsP$n3gSA*QCv@-`aQkCmbuLe|w4_AVr=OH0E&RdVyG<02 zmF;|?zIVLZZT;Q!3CxG5Nv|U4nd>~2opPYX(IXRhuOMShIqvOsO zpPpP7Pnb8o*!1wDX>>`9+X5L9)$RuqRn8iu;G;qCvbMGD07xf5-Knrmmo^;^ADQ1v zN9S~Q_H4KAiEny_?3UBAfWq@3x9e|~ZQNGo{emG5n|+5`t~fwoAm8l3bEAAf^CSD! zijz#SNnvw<#eWm4;L|RU$bWBiNPBcPkK!_JR_Cq?A%3*V$K*ur5firt%4+i8!s?0;BWn(r_2Qy zw}iFS&gJ}o@q9hRi2zh&hL~#eAh~kn)6ba=hs-NK!*FP{4|#RU=Wk}oyG1&L8OhRv z5hsfeINqBnsXimV#m=br;H2J`e0!lHkBnrYjlZGumMy~R!EbXyYT-%Yd^R;f9W;@k z@D;YjFn$J((p335ySvr%u`{1Y8hMH`ofF0HjOZ3)HGO!HzH&&)*#Ck|tPo{3&WlL3 zJT_qz>ljEIUx+b9mo@Ubnz>attuMO0qeSD&g+6d*Jl)*jF=OKt+i$FR{aYo=Ys$0c z**`07dHI=hf|r7{q$Ca>zZBKt!+o3-(|5Ly&rAKGv21`)UUIc@s`<={C2Dn{+^22I zNGq>TvfEj)c}(sAbvX+eOJRu}MbS-``2I}}7RLb_ZsPQ?1!l)`it}|RE|#{q7-wn} zDVy0MCBG?_C~|4Na_g)UKWFIu^p(8r9$7nQ3_vy1Y_J^3S{t!Ek%Ob-cS;myS2AxW z%?M?!MztR8QkwtGYYsx)=7OG&qPo>$R=s&mgEwZ-50JS99( zhp~F1b1!X_iI*xqGm{H`!H}GJiTIK`9v{n%&Up+RD*t&rW0>jw<1i$=vB?ujyqXb=jDjHby%x?lGv3hu){JU*BC^*(bV)L9MWBEtlae zJPepZgF|VhHP+fb}*!B5YC?ACVlFV@FP85G1K!8{A`gNbR*Ke#YRfGL` zQT=Mkqa)3FN5y_lIldmcox(J^zmN9NJ6Xi5;p6pyh#4TZf#B20YgdxBArZC zdW-Sus@RNsa$bQ8=FJb*mu`mn2Cy4tP;`&}WQich7b zwKnU+RFz1HiIGZlDN1{nESnr39KEpQ$Z2Fepc-&?-$_LbFGT zNZw7#KXdT84B26I^)n|WzERX{m|gee`tFnxDnMh;^5Lh>i8_VRgBg9p(3&^F;hh`i zzuXQLXNrNaFQrX?@QEpR<=ibQpFeAvu=((ip+mu=ks)!!{aNb^r%W)7GIeL(QII5= z+qb*zVRf-Fjms!NZs#}Y*%DF@v~4yzJge?|+TRG>?^DJ@q_mKD`Z-Qt^!$?BI?rA2^*1B{*A^i?qy%Hk z0_8gR(y|^yR6iBEzQYu3=KEHXyjCdVq{|1reteu|WOSQx#`mSPOSf|66&Lm1H}D}_ zNrs!&6AcTB2dutj<+VR~@;l#<_7fMHxfYWrOxWvn6ut>8x8ZEQj7|N{{0?y~UuE@c zsnKz}S6T56Y#k7mx|KV7OVE^hC`Z?<|c*0b35AyR}sq~H1#EUC<@~>}p$9M;jUh+i=*6A=}fi+V5*A~y5gBZVI z(V*wL2|HSzTEg;RnAb$Qvf6SM*balN_tmjVj8hqB^UsLDs^_(|wxZJP5tSONJV97; zL!#ZYX1}mC73BRIBTBscSZ_Y|{LZ*t6OUB+%_XW-5K*aVSh3yI{wb^Gt!Iz(G8+31 z9u!uvQ<7Q@-_G90{oGOO^-q4jUgoutt(h?E6BT4Bx8A^I##S%fPX=Y3kC#^yqg?R= z*W*A3u6duth&1Q#lAImZM#;8cA5v@xl|R)MY=bGA>t$zc_cN3^wq`)D z^XaUa2~84meaKrDN+j|!Ls#*eCtuS23OjYPjaH1B(SqcG*F}GS9vg23gP0;MGSa%T zZ&8&Oy$ospPtI!zt@q;|g3pu)cJs`~uH%fnapU;8b(@nbMm}xW*>I?Qz$G-O%!pI3 zsA{E5K4>$=LD^vIs7FQP3P%6QnAc-k?|VYN*GlS*UczG5EH={R#1|{WeTs^@4-=;8 zqq$iE&joaXoZrvb0YSGc?%eM;Ue`2OU%pWK$<2WI7wra1xQp6P+m4st7og+yWS>mZ-fsZYxf>JV3%~q`;fkrH3Al&uqSKme*xg#0G#}e546g6 z)WBcKCw=cSy<-m!Hfm{>)RnYQz|yT2sED`h^sc#kY?}Y!;u;*@a@1qzx2yH)7cM@} ztm}o2W2n`c)#gjI1|57|Q8C5p&BkxqewDfA?FUURrM?#WxRlEOZkf=*eqen&cWm-q?O~ro@#S>#nL3sS8drJ z|1Q&B!(hN;fl^qdyG~+7M`_DdT*~RYSb8%TozcGO{`n6cu;itE-qJYOm$Lo(ounB3 zr_Yz7Zh{*r7jx0o==;DJ+@&5-qgVpLLYt?vXNrYI!j{N6b0%892k;z#An=pp>-Xu* zTea3q%nrg*ELHDhQhQM05E`1>*LolBU$!=k`me3E^)D|J%W2rhc;h2^!wEiP9pe%d zB4;r!6C!{0*|cXC3)+2Er0mx7kF;ALP>Y6I{%I=gOXtu1(%gG;oE0dUw|Dl1%?q!2 zX<(`M$trY&j!tiB>Ct1x5V_Saq9(r5%}zn~-5oc251NwL$jFqfT@o5-&tW^?%O4;5 zgkLV8M)V66_gE>l$3myY)cMIl&+*qsigdu7a1d51+rC`J7@9O2U7Fd^@qYPx zvxpq2o4TN-5f(?oKPp)!|E~sr;$z7X$4^{MZU;4GD!fO_MVP@a_#L&dU$*_TW%O%_H;nw_ z-`wjGprwbj*{O+c>$sYzl*vY-s!EqbUOel+Eq)- zUah0qreH2HNZ(6joYplZ`pw!C*RCCUw2EcjxJ$IXbW!qtltV9*b8z?mMuS1Mbc;-1 zxBL3ie>`YB1Vfek2E>1`YFsZV)hKK@$_IE?6oA4?PCRhoi^8!bk*fyS7Hc0~HS?vp zwa$az6a6+FzCZn_H8f%XizVL{I5rgNWo|y)YG>3I=X%Num=KjB9F5@{2c#?k9yLIS z3`6Mi=e3EdUV|Gxf1WsQT#8I@UF=~cG?;vLIL{pzWS47Ow`lW3QPS1_N@ZPnagC(o zp(n>GrYb5T>pvM7=+o6CWs?dKYaIayuFl}svS z<(7p62`8)gz{I{*yW#|p&0P;P934Wxm&e>d%EbbXkB#m&^ikiGOP4V0dBXDPwXFqp zKfE8QoVeYPbiP<-*W(Y*IwJh?y#^*C7E3&I2nY^}@tnEEx4dh@NdCE;4cn&zrghh_I<2EF$)juH?Mn8Ju@{>QgdP3t}2ZIkhAW_t8ZAn zdfx2W`Ikor9fh)0Smnh6=}d!fQlsyDxuY;}Ag8oSZ6Sqx^^FnQs?^NXS?fX_R;?P^ z8bpQ3`)c{;7z_mcUi)5kM-rnInnPWGYPF8(qXe86y_jGP7RICM7=BQzYYeHPB9E?A%!(xT|+u z#x#f@nwOV+JZ(f_-LhhF}rQOcA$6*Da zQ@rh!wv|%;NCq(7S>lSziwdOA1T{T*Kki5xX{ld*)5p8O!D63&3;1;FV-Ej*ZuIUVKo$1 zoBcLTy+3`Yb-^F$XB(_%e7rZ=e|CI9$^HT;Wz)7a?xSD8VX)rVb3UU%1~j)lsbDxVXkFNz0`=RwH%V z_w0BU=+h%C?r}n!Y0uG2rKtzAdFTMA;RiPDRzGgIjg6hW33u-YDq-S|kG^ByXM0Gu zn+AI}N8B4dqin^5sppTMN=m7SDXoKaqQ>gciNtjlY7c1oFSBYn0nAYJy!Sogzn;&^BMUvdXmWCwNy1n)DiIzYwe+btz;r%`)28XiE3Hy2#HZ|kw4 z4fV)+SEnJhS?m|0vE5w*62m6K*K}ITt^_$BG?}6QJFLr7?jt>lc+AqPYZIOCswgX? z1`9L%QeV$f>B+LD09Jcq2P7lB)2L;GjlVa;1qlsfW8?2Opo+Xml2_(`$wLsjoPO4v z*3m9Oc}%Z>fq?w0Cmp8Y;uO}HWUzuHH8s>@p^}C9BTa4`EPfcYTj-8~l9>DrjM`;_ zOc3c@%uh{W*9SQ8fw4=cd6s98FE}1&wXndQ>*!d9sgi?(tfJy`6h9D#`82_|J%*_7 zp6HI|4p3RRZ3%5u^^0%&ZK3ohF0qMg1`a3?6q#>imzqjguX#y4K;LE$!5h{l?qz~} zmBtC~HVR0_D(UGgajfD_f?ZQ|)Q*x!0bT5`pfEU;;ZR>yeO-PQ(mT-Cb<7O-Iyi`M ztOX4LLo#M@W4*6deWEO7?ZPWc3^H}-x|2dQ+CjTPQ|Z1$0*S@O>y()bZ~I;0%nt$+$SXZ|g%erqiUjyVnC2Ky^g< z=X_*&nHzmJ+}K5ElR-doe}6o}aa=l7rCCY|R^>^H*aH8nx^&^lsDHuHU_BZmXDGlf zKpxE1piS}Bx8T@bt6-pr3htKY^V481x%3hoa)5I21_r`F9qJN8NQT^(FJ0;*DVd^%^=bpi3!O#m44{I^ zL;@3+f5X zMG#8fZ&Ubthu>ncvon__3405qO}E35!vWII7(H%~vhr_+0E;@NFsflo6v!j!xt6r{ zPa8)MNqU)=7A`Yvk4+U8To)vXjtL#jyOSGbM+8fhC+Yk>;9UP{PJ;Tbwa-uL*IHH? zl8)hK1`_2VaCOinJ*lt^nMM`B%ViPnLWTppxp>tnPVwnhmUo{V?ClsCrS?H} z`kNQ!_C7rnr%ny;+$FVu2hZRbZ0W1;&gx1zFh@P2MNgRvO&K@N8;(WthW!Xu#TAj+Novo_{fl_ z9lxJk|FV5wbW7_)xMlT=v@AB(o-5WH@AKed?`J8~c0{&}oxW&hLP_Kza|uqCKy)P# z@=N!XlKSlZLQzyd4~!4eBsjrUB*?rHL8NuSaHXCv0z|>yWUKc}6^E|vXegk_N3B8J zz8g49QIU$KmHW#W^4fs#84|OO>Nj%?LF8CSF0i|pp9*qb3MtB%`1bSY0I2{4%E-KI zfe{2BC!r@UW)LA3UGbbleu7+yCXSUG@N3mKZp|DM?67!o>?fz{h+b_EfoX~_*1`W0 z<~>kX1f~&%3wRNFAXdEh8T&YOLkH7cK%sdK4nZ$|{pA1(^W)OMP88sayzdg~aFm^! zw}6=!B;)`-By9xKduIoSh`6{MRA;OyZyGow)WOc~G25a5sCQ-kWMH$qadS)Vv~B`3 z43Lm<3YdcTIzJ(g5Ilq_6Q>N|5$T5|6VOwmmn-$i`E{;b4GumXF#9x16DJfSGp(&H zPX2(}g2lF{axPNvp$Ua@{du|swF~(OSP1-^zb{kUZHUi15C_xW^iV6|cje)bbP&*? z2a5brKkl|J7KQYHS!ms_Ufp^pLJl#v#^S}8Xz+Am)v;;Y=IguL=Plw#SX;FXBogm} z?TFss^MS*jJ}vCp$=o1!RazsJbyffE`Ge-GaB;+>zh{y?gguki_r5io4z)ji6eBNn z^`*0ClQ(gCcVkErEG-bzU%Y)gy2F(>yPj3)0V)ZIU(|`FM(b6FSLrF$!f|1a)lON* zhY=P#Q(XR;^w|aGrL1fdgGf9}+gZK7@Sl=tb87xC7hrwHHDSU;LROpnfExB1${Fw< zP74A=^m!M4y02RY{QvmjLnNhnMX{L+=T~yH7@KJJ%^~sy>)zQr%$~{7bpIj-L4$pC zu&Q8UeEs@1$iSO zmJvO#hDHikfA(zeH`lxwM)Z%fwCX&{(kZjIK2DHFctIdsOucx(J)tan9Mt+<5Xn%diOiKy`21BOz ze-tIy-}iVVU86jV9bB~e4BaT4SyRI26Z>sV1r0kRJqON;03-(2M10!smomg}qV+}< zSH*GbUQHbTcW_Lt5UwBB{TMQ`SAu**6i<%)HfGLDotDszShsb|dg9Ly!T6)`Q3|8yv z!rjulY5#nnG_~%;$!@xP<|I1X1Pz`pTb3}cC2gnkiJhm8npNkX3b-*cavbUOQ3;sXsgnrMU+Ga>AZ!f001^C*@#hk&I%|03)09CNG1$Q0W$24 z{rT3aWa%~`MqC&nAJz93TPiWg#~A?vi45|>{{;6mFTFdyx=qU4*Jze!Dd@%_*@2RB zJ3^htXeY&;sxVqI?U7!A)SrX~VwNzpAb*3_2r?JVXX{qZAfZxQwS9eyt?ez>!Z2{T zH)UlPyX=N7q>w)O`$tOGn*D=?snf))1iRXGgWqn9PYStwSF7Rw6w+24zsmuKSNySIgk5Q7pHFME6kB3rKtv*b4&|Hvr2*#L!p93 zP<)k2gZcpn;3P0v2FQ9xg!o=%UdGHBjQMbT6f2S#!1OrG1c*3n%;O~w_US(=x&PqZ zjq(!I=z_i{FPaKd!*$`bq%H=N%me5@`Cbq+p+B`2`~nGtcrrjyQJ|v^oOB;{*P~T4 zb(i=akH)>OivYqKWEKV=f@0 zRVV?V`=&;KPd*qcZLX!Jx-lx}tNds%+?K5AB?_8e1h`~oYD#GiEpr-E7;a>%zU$zT z2*3s00uc?yqJ$W97z0kd>t>>6HFKsxK!?8C9JJh9`DmI0o_7m5z(W^)%GAvgBkxj; zk&yy3YT3MFqHZ`IYL_3q077b@u9^2*?cA)`tHBWi_ja5+SC8Tc(&8;DW!>>TM88u| z3zB1f_f3VNLWlGi{OFC_6p`*s_U;nlt=(3@2-fwH5^WT_@{b>FGyU-ta$o&92_x-F zSq6dj+zx(KoV1}h36xmW-bQ2QcNZhN8Ioa0^`@X;`TNn`L?$13A4@6eNH8>K92p80q+R{AR?ak-I+X$HKvmTp z9A&5l6t}`x78c@tDt&7l8;1MQjLgeg!pVl#0=a7O^^G5xCGC3gNlL^BoYsKjf|Cyj z=@M)LC;`IfE}qYxHO^lkixa@XP=Aw1c~rWSPK&vT&5a=*(`8UfEMS2)G;z*Y?-UD> zQX~D_qTB$%uSjt8u(5#|^af^Ih}JWD3)rFYO5>KZZU(IF2#OT9Ki%@Y(hx{;kpvVy zE^e-q(?UKUOj9N0G_jz|yGtzoqsNs(E$cseLF$<^4=yhjiAs)hjovKeE>ykqq{T2> z`R96Y==$%{r>xpilI}QKV}~T_DtlFb;U5)T}%a0N{(u zi+W=lbvGmuOqQzyde}wW0}~N?B>n}jn1t-oQ(R;gN4zU3DQRkI;@^^eAJci@;4$;S zD3;NzdNpw$4~--DA9(1jrF9ab6z)tfUcXl9?C!2M!TQjb{9rZF7sT%mW2eP)n2vtK2AP;r{!> zf-*7^R;V&Xn7Zq(V=0{fuF|@6QZfJ<%k_Mlf=cVKQ5=Ok3dfj=#CBmXQCYB-im%NQ zz;dXQmU=j-E&J77WOj`C`J07ra|3g4-el8=%FnrG?%U6Hv$#>KVhYI(=wC}y69oap z1&cV<$mh9~>lt7k8<(9^S5(|}B>?Fz_a#2QlN7qC_R%fJwCEA;|)$V(vMp!fu@D#pI49LC+!REb@rJ= z>$$FQ}%yHN>XR{eOiy5`$|r+~dEx;b#zEq!Zfz+v*?dUQ$ zWvyGm+U33Yqf+1Q`j5Yl=_JH%3MJS5?rYcb<(egrALlSJi+lyqqh^#U2NinDsdL>$ zCcT-huvNud_4vcHk2oi&VA(w2@kx}N)#Q7N$vk5zGAuI@8rIz5U_a}WDTxm&*a1UixxvVhfhD&4gADrXREzT7ry`Gq zF2&p&0G37S4?Yk|%N{)Fv-&B@yz%P>FJ_X?3jizuQXq{}Kh}c)QYYawzyLU?kC^B# z%rn3hn);~NSUOn`YCFK`=FmI%e}vsPclP%;N-P*Vd^mLIV|wcb_BVw7Fi2{RW;aou z!hiuQ!ut&v0J9RmaU*cVJ$jQRoT7HI5Q$$ ztx@@p;3oo#S^Z|Y>T(L+Rj*8zndEo7&xHmHA9HXG_A~zNn+^5pGbjaDzA_Q*hL_~& zCH|SlnTZT<$9v<(0xc(A1UdhdbZU?D^I|Pe!A8K%Pv{V|p5PeFn?E`S^#iIwOi=#^6(Qz`5(?VOYoB%o!-Xz}1uCC0FsSWtOURJT-F;9NANXZKP zKjZfuILl-N{ZlnnRpZJB%tJbar^F99T$}~&DSS+$ zfoK>}n2>&JY`n zSKE$t+T_Ru71vblvrrvUdhZ@HO=mVHd+C>NR?Uc&6x}{`=FBGmP3`B*4nOcWI9%|- zQAtB@0N4Irdyl|smbUM_RQ{zJkdJGYBe07XcAV_v#)W#y}~;8WMG>2o4sS-W&94j7;nbt~Wd3@^{LQ(a^= zbIu&a&iSsHdvW7>_3AYBfo1_)GHEt?ubnE{635%8gA&etT`!!YaAn5}H%?`8+$aU& z=D_obpG1Qfav`jb&mtPw*~xTHuSys(HxMs#pHijLjGUAuYGH`(p0KJQioPX^1Oah&$z zSe0IB-lF6I+{gD~(uWWfFii-3k}>;alfBf>xfgcZf0T0g9xLME@fzqLFusNL>q{L6 zh%9Rg_)ezKQy8q(OJrV9(+tHX;rQ{mz-fSUJU)s_3kd-dzHwut{!yM2C!0HT|8<8? zJi93SkC7o2?3g7+qhvh?EqXd<{?EvPKX2_%S>ow$8J? zZP@o!M9-!(a><05Nt5Qdyt`1b_H<$Xv|jRY-9*m-=rAXLztt;Dz4MFgu?c8mUVf0S z@9VWAX1IXS^G!5EQBEr#*rTCS>m>Hw77iAd@%dsr!2W&l;^e%1)z6nTWQNM0&5k^? zt?je2q=w@h!3hBKn%dFxBt_cKuzzuMG~FL9Jf70h;^-%|8_q54~#xSUOHmUKuAInKZ|DH8L9XCkZ|kJWXn)Q2bg4Zk_h4pV}nPedHND;LSCI zT&<}NsGj+hgdEZ$PC8fJ~%Q8Me-L`y}S%3lwK|(|C$gIn;_?zJ}NlZ+(_{W)qCiohpr6tJm z!}=e5FhhblJ_|Ht4>B+tZdkB0Ms=BQVNG*y6su8D7ar&5Mgx9^Vmg6C<#YGm7kW!9 z+f927v;M5oxL+2ykqN&OTgh`~QYZ0YSZI>s)kC*~-76>KY_ntd{$^QjQol6-ep!~w zQH%H38?MBq;sug1wPli!7@Vnrc(E)cLQ6z5?(5atRBm@K*qrm7o!K9b)ob{N<^1Z_ z){rpfIXg2FFYxv?v|%ZSP`MwG5)~#vCAigppZH|+P94kGfxG^UDhci$W<)M>(ZNgU zeUb$an1@X^HZ3VA*jejLX~7lx7y13wduq#i&io#$%ZH+BcasVlv<8>uobB8E?;0b>pL288CR|- zYH(n8}CO)a^m*PjsF z2v2PFy6VKQ%~!N_V2`Z*uk{6j2)kyox-03>%E^VZcO&%nEPgLi@a$GMTdOxRXY=ZZ z@jo|ZMCoIYzV?=zs)idr*x}C3;VV|I?D64Pa0*Rt4`o}qOq2&!>$i}IsyBH}ynpv@ zPd7u|Lmq3_YG0~xi-_9b!59&--z}TadW#Oxs{6tDeQ_w3v5GAI*z9`^Qmmx%UYL^a zGbg!7Kes1Y%BNTj)wlk}cd#{WzEjGcEYpwubZcp%wMSW0Hz5$^Cv$bLNhd_REvaO6 zQLUy(N}i_6+IYr|(YJOeXryLk*%u5<{RbF*rwL!1asK?!eCfuPabr}2WXoQ>7&bD( zDx$lNsdUet8@|U{St-iyI5Yd1LQ{b^-iFxaEZzV3 z5i4KqFo||XDV=z~V6c9|`Q2iZbU6AmR;?>ps#+Pe)F@8^7&d&4^*+e0XOojf z-vW0ov0mvpuSfKUo>R<5{k9PEbyPL@mw5Ai9-*>on|J7LS_2bDILMLtBWd*|yGbO! zAll8%Wu4YVC`fAab0Rs%oC9*5lHZERGzUi5+TQjNF-NPeaC!4T)?b@Nk zl_uXe%9>Jmi8bi^y4i$kZvNNgo4co{rkt;;dOxd+8vO*39{=5~If#2NS#TsVak`6? zD9q@Z+Jwv+6VptN51Gxaz>c+`zLJJF?z~cpc0-3@HLQnUH<|Ujt&cV~dV0}m$J@fjZ`df7X<#a?{JxlH=RD`( z^6ZS6gEBUa1}tMUVnaiIlZ{&R(d~*d_bqjNDY2klrtrjGs6$^j&XJ zJ-M0}lVNv6%J~c*_xbP`Mk(H}Tr_b`FMRL(j<>9#F(w(RNCl%5kyc6!!dsO^_<>2CQxhc~Xg zpoD)Q?(eZyO$e;|sBBj<<9n6gc(9;cILNX+i%ZB^p(K9dm5AG%}kS?z^V zYCJQL!VCL0ke9~DcPR$2dd|w4pGhT7h}JvyVe+sgI8PRvnBc>+gVmj8_K}Dg zCTGg`zw=yH{5T3}p8drNf$D8_e-$qU6@>8T<;#*kOD~*}^RT9QP_ z$f`7rhLVs`ltd|-B<-QlQbJ2n87(3tqMbG&r7{{sQ$tA^ZQczM-}}pXo!9mG{qcL> zuG@L*I?uel#`AeRkK=JX9*<){-IP-+`Fr$h=H6LrNF7=V*a;;XiN|WW)rP&BT|7Ke z&YW?488)I=>e6Bfk6eJfWI4OzjJE0}-rhLA^3&`$1}iXC6+S5`w;xGM zN_Mz)OE3TJdNa*o30*thU(>l}?5BNiKFWv^6Xa{orAc%#jvaVDFG~JNuEqh*j2?vo zcspCeQhJi=yuhJ01l(KRq~)0}?ou z)yG!FnB`mY2u;~H{RR+#{|T%-WcC5Ch^#p7(oyb)4~H2p{KfB{fvQl!5rA;|raI?8 z@!WxkD8tMXir;H{}1Fo&As_|5HRck-;7qh}1* ztQB#--ec=Q_Dq8da9W^Z)p^CG9}RcTRfvl6X;7mxvu{y9bd zrRbJKp~+v#m~FS`%{G(0vtHY6?d0nD-?b!K>(j#{Hm+3Pm3Swzo7L}rRvk9X*(yKm z5GHm<)6#ax-^_{jANtlcEh4Yemf6u)O*~Wk)GPVyc02yAHA-Lfb7u$bx#RjZ&bzUt zcW`bO&H0gzofnR99yQ49iKIqXS+#Q$ZhVpM8gzSfpD@R7Unf+FJ1U(xq3gfLEH}8L z_vab9wr%O$_0#R?`k&V9n0MpC)0+MB#&mdV*xRZ@NY?@>^;^mQ zeOGz>3cGD|>5=>oqfPz4LBXG3ZY+}RxKgPJ@2KFau5~IhAum?S?%Dd<|5Mqj`d+Kg zNM{#)y3u88K|3Xu6u8w^?viT!K1S(n~ND`pm^a#DX~>)_pK|PH~1{+e|c;7xhofL{X2Zs zgDRsnA8v*dVPX1Jt%;XWSpaA?PecEd41!kMJ(E{G>2UZ>*VS|OE*crRB;u;_+u;6V ze5@v&y_9sWvUlEOgd#pbwr zGD^K@k;06n<8CFlOPg}XYizW>S7iE>j%6lMVbjJ(zllh!+IDSrROVj77^b<3fzX*O5J+z-KknLa!)(@?LbLeQ@5t;CN=f0w6D>-q*WB%OLt_Z z_Q*u7G0!?))9CotHPYTna>Cnfu5O<0dfGiDMWTszW0Z9kwChptId06I13CenyIJVP z>zno1-#=t~=*8)yhECbMqkH!>qp_<;Jd3?isU(WfmR!1K+q+?IpCbz7)>|I7RC26S za_oG-%(i@Kf^OEF%~eL7yPi+e2^pHF+vn49o2FiM3a0kGQdB65VN22~Swp)IIWtoe zu%{XX$%|QLaVpH%2%0f#%vGI!!$ypiiB&i-XUDyO8xg~GBIS+t#x3t>I9uEJ&$xA0 z$Hy5??K0VS*N!3i@21twl@Qeodo&~HW;esP(Pj!MGRKpQEG!b*JR;uiKdFE{dT%*{ zxH8==hl6!SJ8Tr)X7wFh=XiPwp|A15sIN4aQ6SYrcoZnlr;MaTA{CWekJonFb6%m_ zfLT9+#Is!H4vbjSFK)b}txunbD!XGaW{EYF-tnbMsyXuUMcTYgrPxZz6OY!bXC@33 z74)>rkzi5C^n-~(M}e`4eag0rM27dK!zHGtj$k?OqP9pR>uWk{6bGEZ$@3~yIJUf& zKdtM=6N1nTW9RtElQbs=V;2QA*ymcIGG8hZ)#pL*JveCQPnc@Z6m~40*%Zl8K=^tf z{-`jRoAL-oBGep6IsARuHH2E)Ik+uS1|N~{X}BwR2BT1fPtdHg^78&inoRiPDa51z z$RwN&YNg?nq&WBdfTaKkf=pc3Y7u>Lj!rC;Y3y=NIjD7L`;HO=tj76@CfFH3T}$wf2InLrk=#~13)(#LJdX5TNek3O2RGcyR?0u zNb4zG|F-`pB@#d>RW-FC%L^6%{(=@>Y4*0Edrg3uc|9ul&@MmjHhKMwo;F|_jcDyMdgLPc zLP)>M7cPV&{dG+CP>-uR?lWlIwSC$JRjIM8WEdYn%5(bk{fpZ77_UhKZJS9#W?*Xz z0#f?mG^FOzlm7Totw)b1uU|j7$k{A>N(EaFe60i-liU#RG{$zfmyw|^{9JL}qJRG) z8ZIwhKzNfaJtbT|H``yZi-AFoTSD|@X;G7OnYWn><&LVJ* zz5UL`zR{R;Ekq+g(;6znm<~*5D7bM$+tBbF>B{0VelF;SWJn$s>wG3TIxZ;hA&M9s zv5~zidu9TB;w20sG-UpB*=CWsmCvzZC$J59L^n*4zJT@C9AovvH zJTNvhXZ*vz3%Mm6stgy2V2SxI6=Zo1u|w=`%=bpi0hyDzT}t%2vE6_a)BR@>@jSwp z))$nK>5NOko@>bR&Y}X9f8r4rRMi%mJUoJmO#GO%^Xtct>~-I{p7#)hMyNjkFn}b2 zPz)YY@MCTQ_m+CC9W$$hmvI*G7padsvv6Vn=XKj$$9r?ctP zpJUaNos|U_W)#dR=c8kp7BF()K(etdi`CjLDPk18KKjZi8KEc1o*YqQ{WFPveKcGN zL=4~NxPV$gG8B6^)#5AUmYF@a|UHBI@-q5+$o5c$( z_wBOFN2ohmT3XVLpEKvRt9!r?0vnDKcvuZ~JNJeLEFtJWu{if!N?KY0h5fT>L{AMBcG^ybOUEOe`e)p+!>FKx>e`FRPae_|bc6kW)VbD2_N9J7&ZhJnm1L(^k zS6Gd;1{%KMvG*^I%xecU1gQ*b3ParJ^WJWRfeg$K`w3IFx#dIoNsl0ge0^t-2Nn}f z2p5H=BlW+fkAS{QC_I2af9PLV_8dvy^WVo4 z5pwrEkp^@Ido*9@PCOpjRamxl>`2Amg#4YUG>exmy+)kk{Qc*h8OjUhdGwax8+w+@ ziy|cc7itVfG~(8Sj<&X6aV5 z7HKR9Oa$S%m(ani42pAVbm*?bNX&;EG4v z^GWQ5G0rI3yCed*`U?T4sndT0PJL$=9u6ZHeFTdPp)^-BZbqv8gAPONq}%!Vf0~*g zU|~)LG3!mwkz}s_y&ObZONjU6`<#W_6}C%lA>=S*0KpVnlAg|!v+mxD7puz4L-oJk zwstH7-+uo5_iW(bk6XH$2hC0nJ|g#jE=BmGp&?Huck6voo@Y7~DIB_>n-Ti>MGdTF zGJZsYl1~drcjlFkZCg1q1{~Pvb_A2yJ$ta1nqRJ-!5(|Z-zOh|goNfG0k#T=54^6J z%boeL8fCd&=F1i>y2@ZaU;rVh*zkW=4WYT4a5*Kd;Cat{L&tpkFa>oB{wu%Xz<0C4 z>e$fG5Z(?mOgWtA!PF5xGRlRNzyZJ#99EDH% zoQH<@;OqT{#7jc)Z+`QCFC|e(b)=*Y-rZJYvPA-i7q~Ip7PWR9EYUl67Ql5PQRcPZ z9Ofg?jfBq!V5=EKNRCAyUYgC(!`oZJB!WK+awGWB3>yX)GiCq&sW>0931Wr|2~~tD zzzFdM)x%xz0?x{s-v9p#*z$esnAcm7W)RZN_@5DNnJvl0w)ejmBywfT#WwNNm^<-2-+ZRWs%(w zf*fAZf|vMT2$cRO%kp?}d`uWP+Ggp_j11+Z!(U3U1Lg^<4p~5t`#+~acJ|@_oCYdd ztn}9#8}7rjtjJu=F#5>FzM_ZE+GLm{jJ#KZ&1O-zXif2e5;jXY7UU1<#?+F*2-%z1}+scj!hgi-jrX>vcfNY*Or%bJF*kyw9UesfzAjCogebu|jIhILtxBGHk- z<0i?Y&1JY;i|-bRjNv^7K$(RLvnw>zXVt2?{;duFE3m|6bfiL>(KrTb?be`WFuawM+5Bz3#Q+s`1y{{YZ{An0jQijm0U+B#TCIL)bpN%%g%1Sb!xZ=XXa3l>F zw0I%&!$2B?Or#{oq)6<7R+Kz>BCxYhx}HVcgV4rEL(Z%#s!iT9AO~bhGjsDz&(6)I zw1GXQuyboI6;k3-#5+_#G{2!BM(>eMTJ0%TqEhYC# zy+OezX?BdN5Lr&5G5VAbR5nxj8b_)l2Gt|c);1Q?^;vS~* zqWMRD3#p$-W^mk^P9lGagkt#Csb?8(N>auFf7UI1^NqAbHJ~Ouu4&1`>{|Cv$-$|^ zaC+?xRkQ>Q8TU*<5oM<*sMx52NDoxi)$jR@DA_`v4T}=k2+C?|@KV)PR7Nm;52Xk+ zR%11lCQ9$n>27Yg9(oha5#i4@?IB7@{P(&Pc|ZpG1_qdj=^jKm@}|1_7iOg#bw+AR z;&zznu_zGua86QjOi{dqYd<-tf4F+viWY??>F=~TPZ>h4Ka$QgnjD7^sqEmqw-LV1 zDaVeHkqtL9d->qO3WUL&ExT@Q*(4zfG8U$5LntTTN9MJ!)=gv@^6z32)^>JcR#x2W z#y(nF9$*g0DLY@#9y5st(vepbCTLHdQ?`>S3ih9DjlH9NsNwJX1@WAu%1LenuW=+T z^T}Eq@+GtHlib9G)sY5U_*CtH88T9NszFGwa4AunP~Tw^#ibZ)bg-=ccWcp4*-P81 z%voe#U45pJ((4yS>b=WGNVWL18K}fT6%lWkB8Y5msSMjor@zhF3e} z6R-`>2?l7Sl)Luqi8t}qEK3*76T0LXX*?D|eaqA}SWd#rGBNq(Nai3C=@&9P6EPDK zLrm7@nf3^B{P$&@p{V)9!x=rX>EaYJfO|4h4iD*mb?BhgH(~t5iL);*ddNg2!Es~7 z3df%XiD?_%SKl-l!-XIY3)j;>&#opP<7XziE+} zEM}jwipo`95=PTKl$4A#ItS_eyRSN@9+nK6lglG1bCiID#T`J%<|KkIE}TRL2HsR} z;5T03^r1|T%_H*FBnU@tvH71*=z3={C8I1oXvuf6m|ziyPVJj9s*84IdKg4z1>jOGYUns&-< zF1<|^6%{3=sq{D6md4+FTx+h)&D-AETx~%RUSQh8$(alXZx(0xI-*c0@uX>&ID11(}K3nhKH_lW6_lXK~v^R8sbPqq(i~vql+3EPbXUN`;clL4d-N zg8+T4OjpfM@88@0ESPI*$=k^JjEE}`)x3c7u%3?1lbD)%F`t?=_ z!2&1y=1qpXA^iNvcR{W?_D+&mZ~j+21!^un13PSVxb_Gso&g`jmBUO-=2feSYMj{z zinao8UZaE9h?EF?iXBV(A#5Lp1QWbDXRw92;{T(LFdWyy&s(a(V4s6X24(u?^$iP- z%?mbXkgd6~@%hBWH~2!KDtE@wNViG3vvkoy~lDrUw9WSv30sJv*DUpgBG;AJE8J=uaXi9mS zTAlxWIKoNb-xEW)k*SH-kfjsv(My4u1yFd>=_3In29voxXBhkQ1s%DC9h|d>josbR zB=J!0_zf%xB&yC|xKOxSW8=~%Pu3tuhoSF~FD)_*Vr^^!ZiRS7MZz!tX7PVwS@+9F zMM0WV=giqtsaCUwvm7B4B`M?)Sn|j8mtbE(NFmqbpa}f^O}k726vuGj@ZoEqi=x0~ zGOTi0x}^o9bre-qIZ*`Hb$1S5vX!b?E;^ArgjA);0wZ{PRMrr$Cz0ho{$bEF?g27i9&Wun z??TOy&$bEh<1rLx_Yy2D$awJipKscurWiD6+UXhlc_0xH9++2nzcxd9i6)cQx?j^3 zA-raFfOsw_pPr?^1LPtA1hwXIusY=Pz_s#`w{F6G0E*f%DT$y>Cv$Semx=FBC z9_aRs+~&e_w@tFyeF^vox`ACSV(-s6=W&AtP6}P*=OhySWgJf#G$JxW?ua!Vlyt7) zpS|u@bt;!YBl1^t93`EN85r;aN&>8}Xi76YZ3J7*&=7+kGmu>jEVSeeH2{e$C*g)2 zhkpeEHs|$H*~K^-k^vj=9-ndZ+{^FhtW~{AM?ba=F|Nb99K7^Frev_Cmb^!`nnxVopd{cVxCzCA@w@TVDj9*fxmhc85n5`tZ}*hCW%Wx zi!vzuU?(qooz;jBIzxzgf25;crh_@P-?2czoe&Q8E(;d@n+5b2QnpFDw@p|FE)mRS zfF8_Y1GeJ|1)B2mZ{EL0pxckO{INWVh&(n0y%YdA zVTd_I9f(n|R4<{rF&@kh8Qjdx<#dqxZziHr^Y7ie`1%gn#$jAzPpAiJ$55=+Qe+B2 z5J<+-(Ok&(fgZs41$gh?yLOkiXyp z1KNPpfBYyn_?HWSBP3AIYoaL)Dr)!Wn3((j#CwM0wLPGge9i<++-it!cqYedh_xX& z5ZCa0dkq|oW0^215DN$45Uk0g4kEiCBk)=Fu9K|Qe@1N*$XUY%Vj2Ln&Cas2RK4ij<_Rf*s1B|+hc!krM5Z`c2xV_YeuS~-BsGWu z_h^ou(dNJM_5FL{^_-u`t@-VQLryIK?wp18ljAb@zYhj7%c_i7Q0o1(L?zzL$>W3@ zq9L8zfD&SMTY@qRbeoj|bL`5p=bo%nz(Kb0-o1H3qGV`EL}5M?X+``<-?7l6YgdZm z6EZXXS!`&xmMl3zpJAqVw(SGksjx(a0x@70Mi5*tLPnYpcl`z$TK-%8q>)E|a*z{^ z4lS-GL#XK`)1{|SfFjDKhei=a3W>*XA;%yhDGCK>mCv3Nnax;L!b z7`gU|8dEUpbf}2;^5)#R zL53nx+71pFxbaPOEO1uQM*OFmdXPD?7>9K0iNadY;H_`y6MG*TsYkk7AYdmj`VZ1q zMjvQ~=f7RaixjlWe$k?mkU`VW<8ULe*f`voK>9jsz0h^T^4Phqy_5|@b4ax~D>*Tt zS|P1bR8TNx-hzM@5Nv~Tl6lN|FR`M<>$Gf8jbX zYEC9?>+QF0-v(hbRMEElns(xZDb00$8>)XNeew72$j=Vrsz9|BBge7b6a;4NPA7q+ zfKH|E6`l}{Vu;@iQ9>{`1zH1|8sXc+C~kH9ak}t*ufUaR#y;-hfr0J<@p5O_Dqk* zK5pK;vb)+6mVxz;UEP;IKc7OE^6y!ib;zw5=TP>?S;8C-WfEqgeo_0|GYc5YB>cUs5~ zeY$|2xV#qShQ*Fr3)Rm5&x?`j50dToI<1!y87x9c<1@f}2OR+SY*pEy{$On!?}EVj z#EDD_56(uG4krMIGdf;44*J>S6#05vXO1`+_KLO?flPpG07>T@hFc)?ecyKxh28Gd zp##1~ciX(0xOxbKf*%`d6`=@8J3xLw2DqNG7t!456DM@?hN;W-)z%KKElI3sI~~#@ zSje-#WOxu0z`ZtDXo_kA>5xC9wrpX+trXCRH?AgyWxAd^28>ND|Bv_@8(S%{!FYw$tXk#%ibHAQ-fYQ36yVNM8un_s(o~N_(M%i z0dczjEo>0~sj2V+K!II%Zf+ipMw+TCb13{P3E{NNX$~svWE%Khwdn(&04hXxk!>;( zKrE3<7)2BZ>MI2uw}I2<%!xmISi^dZgk>wqFyX*u=F9=Uqr&qA(5B+~g zk9Bqd0U@VG^WApE=$;1ySNtFvYHf|ATgV zKE)U5Z5=*v;K?878_fIqYKg@_dKeQlh{2 zNM#)!=sI=}yG?^?L4Qq9NlG$mSK6Sw77NTBDW~uRfExoa!MP75K}x{$f?h`eK`_KE z=9r?bGaH)fHXg5;KNB4!EDwc+bw2LV!bnQy$i4aUW$>?AE|S4-PM*zScZ^^n@suNy zB5|C+7y$-)`uOowh^nnK={@%9Rq=mrg6Be4p>(3oPMdlhjEq!-Acer3&U@^q@X0;&{m)9Z@_bI z$8Aran6@?u?KxvL?yfKQz%Bb-54-Itcv3!(Af@0aX!!LjwOHf9f(vkhwoch8<+mS` z<9~7zUX_rO&^X}?CMO|*!%mg>bm{WtqQ7}uG=M9n;%~wE>)uC8h~C(8N-$F{>F;F| zcK>b9=`&}3WqvR#Zr3fD5@ihT2`3o!kYQpNIv0GIgMm~VK?&&JX}5uW`g$?H_91cV zzTY_>TQO+_hIjOn7~z!JZk$e}Uhm!=+eymI+tSi0FOZ$&vw$&6-s81F%GbT?vfv$L zrG=bM;>`7k%1cU|gnzS1kZp?3?ZFKNYqaRy?Gq=V90q5=jRKZ9VmrZB8A<_CJOm?T z*|e%>ySp0%Y(%<80&1xh&0$K5A0R0@R9z&>tRiq7m>%m`w1P-3r(udW$#pCpe}+Ak zNSs2TD8I0964?e*4%b%R>PUouN#*ZiqZ26rBxINOWIBpUx^oJHio$9ex~N18T};Jm z;1tj|8L2HTp(*!PWOFiPQ<$yi2}9tX;_7<#)G1Vt3r%$>3~?u00-sJ{=Rffr5=_3I zji}@-1emfHF9^Pa4`LO?YP({r(akcj0q+9XYwzgrG;{R@Yo>4=?9O-|0ckT9Z3XKN z9y(Nx;TcuRTEv>itmz!s3cqIga#BHuGe`w?+EmumU}`1=vw&^k`Ae5AV=AtGn?uF|QysvOq@+q<4bDzHMtMRF zKY#YA_$rp57Yryz;Yj(n(9n@=VHppC*G;o$-z!w-L@Md|CZbngNHk&2`z(~<7^nQD zR>X_s`JZ5t_@M-cS8FRyyI+q_(m&!zTeNOq#|)MEk_sUl`GE?9PIRNj!XrR z$illoVJBIc^nL)@&%9eu@JY^(Aa9TylpLizcRl6yoniryOLNp32R(&CbuGnCP^e&4 zom=!UiA_ItGf4Gqq#gJ)jXL5~;F%8Ig!2*YJ&hhcwil@dtpK&H0Cq0?yWdVv$og<( zyf<>Rgs7=~k1!=o&G>!$Fgmeyb|Lvf{8%<-OyCv)vv#tbHCRZd!C^r_+JK}ofQ^2k zcrJILwzq5@3N2%J?B6ffz5;FJT4GE=(-ighvxgZ9g5w~M3U&KW9}hZl$JzQBXU?nu z&Lu@7)x%Eb#!hdMzZE?^aCDGkQ&UTQMb!BCg&cu8VIl>XK}&BJO4kHdosAB4j855G z%Rt@hIXuocBZuM&aPgv(;^Wd%s~-CS>S|LIKoKceHU6$gWngVB74W#IXbGbc$qA2n z6Re*;d{|pkjID1@j(GsFehR(hBq28;9)~93w1z2X!{}imCuu)9S`kVe+PPfQvtFp3 z!KOxCnc}@_<;s$>GCFSBt`MS4XSDQgmj|KoLE{Y@PS<)lcS2ik+chRK-=0 zgL>)BIQG!0VzBbzeaRiSS;0voo>2?Hs3A-5-ys*D$((_&l8+48RoPBjWW$&lSU%<6 zo2)x*yGhjKa1mNzw92#9V|O7B@;P$!;?b*9b$f&%s!+1nT6th6f(~f40cdN(;ygBg z8SUH}dq|X6;dG<<=&@sK*fuopXk`&mVTD)q3exo9cxC<2_X|xT%)Feo*GWGsur((k zp#mVk@PoX8qv`6B!v-k+>^K7kebC zXZHi~!R4f75!%+I`Dk$8w`ADpu41?ua_?Hy{LMLg+P+wI}eA-t=q30qg( z5^MXN+_B<2y%O3v^?NZRkW=gTOMAC=sSC;};ZNMg^$${cxe(OJJeUMFhQqZe07 zmaF*}%`&ZysEE2(X5Jd1V@3K=T5mLTsGw*O#Qjkn40E+xOCkbAnNKMOG|kq1BQyf; zade#m$~}KxFzV&0d|v8K2axL1bHReKsam%SLo|zzgsiGMGubJibZK9^L0b04RcoRq z+f9zr9D1NO5;77VE$pbM_2+aHEeA*0^US3s$8Z5ypNh4*#)M8bd}Q7AQ^`b)`TZBT zbkF`Bx}v$@;=+QoJ>pMO|0L@j_C2>Fte+6`Yx2lLxJ`V2_-Acr)+<-!mAu_yL)k(8 zrJ`a{NyYoBzA>@d6&0ngI+jKzCLicra;3ytTVwg~vf9@}LI-S9a&0U)b-?LdmvFz% zyM~r0r@k*&maucx=(kB?b?aOY56i(3-!o(E7w_WQdlJ6T$-5wL-5qizi^6_nd< zTbGy)eQUS5yojj%{L?N}%UU5>`ADMPwy2^(;w7RcSMhmK%&DYx89|j5rQI8fnh!ka zTe|m`SYfjNt?`*N(vKSJ)`m*>Hy(Uhkg`|SeqcN1dVF8E{j*2H8mH-WB34|nYZc~g z59%@5OE(~cZE*B{L;2H!@ssijhI*&9PLviW)%^_9nE&%ct)FV|(c^UvR_a*A*v9$X z-BQzRS)nxg;;Wn^R&vLNgp1`fUW#h;tdAW9iuW4KjDg7EyTp?oNpJ2n1Ak7SUOk15 zVYhX9Eva8_cPEFs!ba!F!u`Bs&i6ZSS0o?FSA3G=HC4i7Oc(v>DA|1&DhR|BNfj&eO;s;cSbhCPEQ=YZk$nsTqN_u z4zIDBjk?b~WL|dDFMzd`zw#+)*!{lz&R8xzR(>*iZS%~L$>eMUc z<7uV16`QmCOMR#MJ37JBLivLc6lK zs=YriX?Yc3oPY4%hp2#A(f;QHI@eAqT;@F1XNTWS?Va1wfKE=I?tSf`eZ~Hlq3Tg{ z7EyvcyV^N3%t`+gAV5Y6B!;0L<*v^ndhag{w38WSmynPw7bCs-bAAWA31h!E>W>+0 zJltmNwW>WEBkv|>Mddhb8Zbov7)cP9+^lzBg^qUVE@!jta^E(xcawb`k!g@y1K72b zlnA>`Ea<+|rtLCLf6n)X`pYV{vnW9G|JSBN_87^L+h6OB|2|owri1FhKHXjXQg^;C z-QqY72DoG}DIuRd4<>v*DP!Mx+=4lAv$Qnyqc$@XTS<1GR91(K#?%kJN)NtJ4vgrg z7PZFiY^TQ5aoVkx7S>;5Y5%LURgOo92`w&HPKxZ)U1oQJHDd36 zx3s5k8|b3TH_R#>eRfaI2(J&fYZGYgVPN3*!P%)t$8Rr@1Q!mR`q(15IyG6%!nk`c z(nqhjasJgKZ{I#k^ya){l)^rn{*r~7R;8y8?eURYEEjoC*Z0usL4!{J968SaVu)TZ zItj8xg+far=rsocBP?dknd8_eE2TQkuHrZ=$?*X;$&}f23RVPczc=3@@q~HpM4NKg zDGv|YyC3=_H?gKpn2^gS>i9sPn2W1HRrruNrpt6*gbtK-g80yHd6z=NXT6j(hUxk>2b+QBw=NMivfdQ4ssMWtE!tp-E)) zGp0?Waa8-H^XRuW;mxJ(K|HzsOo_6GlQN}>-!w?RkDNXrCU|&Ef(`R2#aCm=MhXX& z<_+~b3@%1{Q}vE`%ptddgAEM&jtp@$)@x*dfKus@?xiEgEwFO=vO_!PX_Nf<*ljPr z2gKav3<6D!ieFmweVvz6bVR%|edY@nFA4*t{sf9Rvx2raOJ(oUH0rZBM6-9FD-m}~ zudU+H0g?NumU8hrozxwh&zUD5Es}{j`FTj&>3T16PKPlO=^E@MnFQsuM2=q=LP**( zZ&)u>^UodUKm8atO>H3d*z>Z=dXuNdf?ifjB1aVsFO@R?JZ?ksi;-R)G4Xllxwj!h z)PmeC+fJxWJZP`YCoW!e`LK|p@P|cp?<4x-!CZl=F-RFYz1*EGeB0$#5cae-$66LpS*@79w`Ubn!i_D^3?ASVm)WoC^Z05khgJqY7@p4ORrPs@(1?HqG zdPOtxsn%Xvy7o4nTQ~YjzCw#-zv5#`8|l){KYRseY{qG2y&j3H%LT`ny(^UVn(Tk6 z$Gh5cWy*EEE4{5Z!5t~}oj}P@+WV%@hDp5S=$q{}V+w3MiPIHpnCzpYQVbhqHO*88 z6#N-b)vKmsfknx2mIS#$q3oLy*ZE_2jT%3y>O7)AQQU*AKNS~ho;RH2uV$AK6C(Tj zjoRt!QHy`tEsTfynYk$}`S2c@^#$`Cj6{!bEDFCj)jVO+(}WV=#Lq(}1^yi@FhX)0 zxQ+EXy)h4h{*R5qoJNF3xPY*jaknTq7)qqXj-qif{wu>KecQ~DJYoEJMhdK2Wz=Xt z10Vn1EOqhgkz`N2Rx(MNaaF0=+1bE|Jl@w^`#b6>;GeZu)_PNvOCBqE%5;u(iUf%6 zQQwt)eFfX_y<36$OATVrm2@Ul)$YcWE>djeGXX6QmC@)mpf^0oc}tg0IN#~;9MJRV zU7DuU(==l+4Y8Zn4?;8_u6pp`V^4>Gwqq1EbNckigyi1VO?P;L0B@c@`&nK`Qe05o zU_b3hh@`z5Hv3!87-F}|%7l%heP5;ar|j^f(*n3_z2cINA>!!d&fDO)+8g+l9|^=J z#pAY~-!%jo=g;nRcq^Syd@#Oe-&gBPcPGspRVDdz*~jh?ftJ$Rt+MSr)32U7wQHAB zSz#&-7jPo_GV0dlS3VEv`dlyf)~$BKdUMARihyz`!L zh_=LqNfM@0@p~}YP*a7!%4DKcFYC)!55z*Ib|}f%zJlfny*c}0w?eOYj6a&}&g*K~ z4}-7aSB`+f)38NZU_k{v00)9BI07zEd~2UUD*mn_>_bWoagxKpttWQeuiS5%&i0Uh z9=!A7GMtn66kfZ6i1^WRT#(B4x^0E9&R-^1E=%uf!?_3)0U0;>_%g1X+!lU*>G)9RYnV=)tDk&=3#EH86$vtXb3N z;}7SJqQ9|rdi}9B0>OZ~UN|x2%c_650EOBKJCc&Xo|%=n0BRe&MSNWG%EbB>*!it` z)p+PYKv6lu@R$w1gxc-G1z{cbu%Z!}*__AQ#3ZcjZHWP!CUPI`%)N&XPaGVvdL(7$ z#&+A;L_RpU=#KMzvEPyY-m%s#S?4Ir6>V8|=L;j~Be5{;r0$fLAF5p^;crj7ho$b@ zsyzMdam@Y78(`u!fJ`$|9pggN*EwUfSV zX*}={ffL>@F77+B`mF`vac_l=A$c6Xs%*-|b#P;3)rQNKy&Z4WB2lAQKdI|_>}i?0 zNN2Ke0T3|XSXpU{>Y;HyGXnvLPaa-(dwB7FmYbC#U&o7)R zUxbOvk=W5FSJr_-!-uxJ6im}@{Z#u0yofOu+ROATeWN{E()2Udwo?>si)VHLsg?Z= z^=-(8^Jc;;%`Jrl#%AD^9yl;|$qI_n=>rdN(1e79G+3~Vj2hZo0!)=QxQ!e+lI;Un z%yuK5CifavDU6G7Xn(RlhdSgAyGvaPhl8KM?!J9{KAbea!@KEM9MH|R1o0cITuDW) zHG1@6mk12N@b>jVbp=%1)5w!JSSE5G(Yr^O@RG00K?eni)XI4uh& zK3mBGeT<10CPRSWHx|f@gdw5Xs>1a3Yu85Aed-sg5O$kQ>2GP0YVqz6KySo=JxB%2c<2Y>?N-NoK$K-;WrAlT;1ZJKf z(v2GRN7$#Qe)CU`W_Xp+(Y#JOgS)rZuMwGAJ-9h`QYh=9l_mkd4XVbRR?B)k(dL=D z?Z+e4X8iI3r>-M^ZN`&}M8fs=kDU#lL}GKF9>b&7K=S0Y;6oNpLbY91)#}4(yT9qX zcTe<|*)9-#s&9F&;`HLAYNLHHKw^)RKCWt-!e%<(d>v*^xnPJ$ATej z9mKuV@y_D;^M~Qxha)g~2$-8}n)FmU^h7myU@%0k~&8-m#L7VUc)iC&$|$^u9F zOH8Awhwm~>1GwMB85Ic6^dOj+N-T`tll}*3sW1+UadrECF(MVfh9<+4{QQtO)BJ}I zyDcpc&9Q+1_S~k9x5WEI^QFAg2J#y6E_%D1)*M-1O%l$-Nd#+!fk)#gB{>=Ev{e}| z35;kTBc6n@X*&HD`Aa@u8vLj`a;X9QB}#UL#E|ue&O>MliV`h%VH5*RFzM{mx9{G_ z3esT4?6I`CG2;(m0YM`Faio!S;$!w@GC2#@ASd8N2M2zf8N%rD@^`706|X6>X_0X1 zCS};_;_ABh-d>guSIR*SPmYGW@vKjD!kBxva>-c<6=xg>PpstOkf4!{7K)5x76eu{x!e8%$~fGFA-7~D3B~ooR_(%$-Y~JD{`UueLijPMimY|SQn>Hf9BzG zHH9JRFD6yC?W?|I*9e3WIA?6#+AhkH>)=}X{B3S#O2GCdYC&_R;2B!y%=vT3YP@TJj!A32j-C|Fw0$u#{SAoun1a z>}On!$kG|cU<0(S9y1MxCJd59uUA5du;QTErP%azt*^)(m~yHBK6@b8X0|u>;Tz_3=k@G zz>mkSl(W#}D?U%?k?Cr0U&zltNU%tVh=3D1&)a*_3B?gPHO!98v&z{hK5LA2p9}VVH^x1rq|a4 z-AVl8_w*#bmpxo9*VsPsYh76*&0lL_Gs!t$#;E<9%+un74Ofxv6b zIcWpzKRt&PhgO`+6^QnFdNwICi#YN3kRZ_gK?y+?;%{LHcSRtsr4Um3>gnMd(ZY}s z5IkivYElEj zi9xow6s9^eGcJwUB4ecSs=NjE@F7w0S%Ygve7-;hrLMO~O)@Hz#yBV7xy*=}PXk z5VExA?N9=Wk%lr&P_{g=f(E@Xni_#!W^mNWlCVnz% z<3ccsGtChz-_YbadFqt~0C|WIlDq7h(6%3_f&Y=*XAz}d8Z*i8eyn;=h(DP^S3In>p$e0)b$sz#bmZGoQ zgbJ}nt%XJuhyq#|?1=u^=oK$s%pCUX;{=W;M@J}Zm@A4K8q`QwU**}^lR$R@DB;HFNISudG_&N+S9Bv#ZOxSCuhhGm5Zpn4MG@!?k zBQp{cEupkAs)@)uYq0B>xY%WDv^VM0VDM!ZN(WZHc_WWJYl2qyER$pH4lr|S`=l4?Hf6EjG2!`H{fn8+{v4Z*Cr1o z0+B7lot@_D3bG50l6;u5Q+ee1qp~HnsiaJQuuVujyQayzPTaXt-~c^%aNv)dch>jq z)VIcSuXlKV{ie&9xpUw0dN#o@BnlEVkd!cIDEZ*YkCb899%K;^5L3W==KwYwI@INZ zMam(^!>0QW91tdiNLQb$5o2?HLeSskDUI+ALCo2) zd!Jp*!djt1(E+E1pC=ReIJsHTmLXf9^Dd34F&ug=&FeElIMUk_kMHt$J~T8Mw|8Lr zXF$F4hmMmK_jB*j(Qa+y3mG&}_4)IlU%N`jzkcCcld<8;8VWo#1L$p;N_2;O9P!a5 z474)bjYg+-ec!8hMd?r(-w6jbvq#s@?R$`9kVWl^Oo)jZ@RrGEG6IvdV!IJKF$@T7 z{w+)@B)=zh-yETMU5g~!mopKHB@Y+W0H&)~o!(Zfw;aNJQ(yTGPy?{BAlrKXdjIBI zb|1yQr$@wB|LJSqzA9m`cH=_!Bheo?VI~I*C!))@p*gebR_IsAeVv>?>&BaoDSjcg zs|=1#u+}m3KE5|B7{RTyfMb{-@5NY+r+aeUare~IA04mz?*Cfu zcRVBI&CE%Se`+Upx7~ivWL@gnEym8Py;CP2+I;88MaR}xMSd0gzE+M-$SHjTlYsg{ z7$8C^!+J#*^%^lY5pQ+NK?RZMagScTCiOT)PS%S#+i)FfcbNKd)8skK&B0hjPUWN}3F zssqH$q5v^V+gb7UTOo(7(u68W;?E4_i*Nvuw(jy9C-4adOS?Nc87{b|LL{dygzYF) zPZ+_VV=xW}`_0CGtlrkTlV|yq4=T3}6tNAy1_2spCwRw!2!_2Gus6XV+eRn^U%_A3 zQ|$CUC%F_r$6juFF0wC3Au4>k`=-$WW&FLG!+}pgoE>NrP(mpfpAl&=!Tf| z%ANqdc`^O`#fuvSA0-Gk%r}GtF+qd6gQImkjf3=fV zwfvs?=kt{cP5wiDYoGZ!S^`7-p~!0d_|T}i7ACz3!A^{6C21j*p{Jvxs|y#zz-$q9 zMaib-T;G(fta7r5T1bA)c{uFwc0~U+S_)hs0m2jz16= zhmD&tnK*x!f$d5BE$+CuE$kZS|Jt;ow(jU!@!aCOcM&~7Yx8H!_1e$nKVUjyP)iAP z>Felz4;G+MfO_Z^{DifGDgbL-2!UF!(!P{-M)QI71Whwx=fTn@aTMk*FrEuE+x!g> zFSqMuGaq3W>|S;Q%lET;m)kp?lU`P5maU8bn((Z5fyM07k`j4U)x{vV$B$=lKE$lK zbfR|hTI<3!>kgWJ=p-{qZ~R&})6etuEyPE(PQ-t_wc4~f<(}_8eG><9W%%Qk=K&Df zDb6nAATxY;OZ{Dg7i*mpCb_G>jkE(<>5a-}+wR?64&KE8z?nes)$9w&JUD8<(4H2X zvg*Pcv2rH^A)YQ~Y{$2-c8&p?RT%5r0L0f049F$rPA!Qk0#wU&+ zXXfy@rXHb5H*eld?uu84imkI?-H<`9$@m2`rbxwXfc$GbuRU`5Bz=T{_Fs;+espiu znR*AH$h-9g*)gw^dDc+cX##^!Q54VaWUj9d_CRtUb<*m{N}xWW<2t`LX9M8w(kG|B zH#8hI)j#F@L1q4&G!9Dua;nr1RaJHsn;5xres!qdcc(uG_eC=lZ}!HcRe>RH*EfxR zzSQy9Z{A5EB-C5Cwrb8JF~MOD!Clk}dnV1{HWHCjRNb)NES|PWSn;6MwbRoMftFIv zxI-&`_|OR5g|a39&vNGJ`nQeKA^$zv-}QGlcH3?m72tJkMHx z0rb}|AoBuvQRGIQhGY1zWsOwikBT6INg)y@inkDw$7vF2lA~X{qd&K z0qiP8+r#DonG^C$$S?`h_V07<{f;#Lb@X0C-I;ZO8-#`O)}6Z?-L2>{4SHz zH2Yn7+k`NM`k#+j-rB~dT}U1j-%U3*e37Q(!pfz^-qX~thCsaGOE6$BMGT&U<*u%2 z1LvD}ql=kz!U6$*!1p z;W>vADm?LSavV$^lJo@&j=(*-`wK-H0h!*fGHsPB^c&9WH%JqRcKtlspNJETKubKR zsPCD<&h-Nv=6`Q&<`}JGfbg?t7hy(?rI0fBpk2f9$l;0S_!a@Fnfs z*qC{6d)b@z7BxKi&w(4LaDRF~q!y>sjUgM@?pptIHadmH#RpaU3j92wg{fRbuI>2u zYbTS8UtiyubceE*YKlWX>LkQ!?As3b)d0ly9t|Ht{>)3|Lwf{h)> z#;p2DdmWSu)qQ_{nl2Zj`znBP?G}5Z>P}fo-*ta}E+;1d9^#}eF$uLvBEfK0e9xVw zY}jBvdTnv`(X6cIqv7WCwCFI8;AAh}?fL6r^U(yE#0rRYaF+pd%ke_G9!`%3nBMH z3x$+xYDE|Gu01kxH494g3bBouvpveX7g419;j4}z*Y~a8GhVLy>bvs_1J)3o5n~SQ zF#sdEloZ=~&n9t)a2p-iz@s<*J{KFScJOWhhLZD(k~3ZDGFVz7wK`-pZ>;_)_C)4Up~8(Dk7x*>d>z&V9JN+>awS5W+XMygr}fgo&>3OkA94E#Ny0L zXBCB!GaFPce?0&)ftN*8VOLx)ex?g9pb$qEWVBWUwoY^n(eIw+q+hji1n_rhNC2)g zinjL3#eyFjRx7|;UhT~?^4yBlkKXHy;myX-q^buiFdwiktc<1kR`z)I_?EQ@IuWmi z=l{WFs9d>%?$p+#G?M%Y6?R|7WghZES7vPO1FV34>UQU!`vZIQKY8(@;Ks|j$UmWT zjs9f`2Z##0(kFw>jA30(M*F(4Xv_C~shgk|#T2H{Bn(9>Qs%L@S%57K39WX$2%mla z217m}DUA|KFpD=yzrT(SYaj7HD<|TuREBjfrS-DmGc)_na&^r|A{E&-Q*p#rDHxdx6o2mrWVh9@FJdZDloldJ2tNAM2eq zm{|6oGGg$r*9Gq~uU;*=!u&>>R55dB5y0Te)=FO@_f@JLW#<0YV*+K+Bq}nRU#u3zt)&b|BtOV0n4%N+Ws#YO44L1DjH>q3Pq(9DrG1l zcZE`hBs8f+(QGIcDszM~RfZ^I6HU}@4k3~dQide_e<#oLf4}eB-fMfe`+lG9x~}Uy z&*NCfTKm4QeKnxHXHBwfcV5Z4Q=C!x`k&Om-o^!SzhYfBb{tA4*iZr{BpjSDJHSG_ zrgr8L<)??^@-}wr^2%s<`DeZS*&(e;Mg2$Kj-9o*%_jAAM&&5GH70W`tK!ygqzcNd zFZmoUr$Tg>j}<%>0o&k`#4iW2o}HB9D09-Wc@U2MvfnAPEf9W6MVv z&Uw@8-%i)U&XG1VEPRCXg=VUcbfvTzLa2Om_72%>US1I00cL{+od-oVMm=yic|Ppx zXV>((p#WOb2b!xT%&97SWhPR)jC8>YwGHADu}XF!pP8vND1+z$Nlnm>I?nDBb( zS^zx3;uSaUDPy18{kqCoej4vk0X_Ap8onEL-EKP~lh0f0yEwqke;$D{Kra+)lNs{^ zy6UcZ+x6L^p(Jf;U@z29GH7^f8?XiO z2l-LiM0ORO`fPcU%P0>Cl2(!#L*J{)uiboHHhIeUHhy%qyv5MCT&xwCflT2MaA1Rk zsvm9-5wZ@K>2LvI;uOE&>@7Yl)D-X&1o0JB1Hc4{@DZ~?GZ(%BHx_JM=**^=qxmT7 z+AgC3S}VurniPA9609bT5vANMsvqKJA7#Dv(ZLbRH(8gFd(glofN6=5#6$iRa;Sbp ztirG`4I(k?$*@;{-HRh^$H%Z41h<5{dcYZTX(`>;cJI^TJ4NY{$7Y~Xkx?>~?ccwh zoi8(?&FsXpra>I^0Is+g1k(jcs}w2=F}^w+&c{dR-tpB*l9h*q%Z>-UrxA&T6D6Au zE`j*2Xl$2m-LNRpQB|#De)(rj?&$-1YVoGurVdLA>hCs1kW@aD%e0cLpyux?wjsrL zG^HwU+T6U##~ql`-bE%A3>q)N^&Ie@=SS6hOy562IsSvJq@*PH!5LonVbC~Q*MUwR%D}&?L$}gV%RrFe39H%;wy%tBLU&Y3e76rB*uAZu}P@t2cl<&RIC)2Wxyp1eiV2bwMXknLCe zJu!Wq>h?Ip)wYET^s*9MQ>;{9A6x=Hq$K?sM?`M7ZTb1h1~JWpA^?(cc}Dst01iNd zp3g781bC+_MCYHgOX;Sh8y+%7^2DVxuHG*CO}>ZKzUS-eo=^E4Z2R_2OsrZ>xPG3D z@~8Omjk9kLh^_<>JN#I|7-|v<ZFLrY);wAi1Oy zV2QnFPxhuz`pj3HekI{ux&XQCN=d<*KkZ?&2vZf z35zt)GEON76g8!FWa^cuq9yErmfTslrFMi2N!gD3*mnp&IJ{F?SR!^Bd`?^y z7lr^pEKm`bOMy(m+jc~wr&N_@&|9QmxgWQ=s%-cY1zQUEUvf0(u@x6`@vV9su6;5gcYm%yz$Eb6x5$cm>)E|p3n^l}H zmGsr>uz-nO{e68*sgwO6SwJ;$)w^@51=!Ir@a83V@tSkXlN~jzo<1&2?=?~4C*UH8 zmd4n+9}%uS9Fnlp1X1GZ<0rIRorqpxK(iiP0_=S}CYEnrx=5$Zyg+9o7Jkx-dgevgBC}pRygK|_^j_UX-NxOnxo@NrpFGdDMNf0w`i={Ll`#3maXdAEoFBNd(K(wq zx_gTApkE`;zN(qFYjpRIv6b1mU;o|vL_4Ayl#=c?x(x`URn2wVtqrON>OKli-*I?^ zvhl^rj1+WNNX*L=@)-F;KVz*{N)29|8D_E$#aPm2&&MTOT_Uj8(W zP(T!;$B+NV?%oG_tU-2(%MT2mg;Y^6ZnOuV+x?-e1_+3c=sD1}O%;ncgiPeo7z(37yZMFTAu*afeRdeoVT0483#- zdrgmuokQHs%-_(!4TD8vceD~#;pP(X`kVie8fh9HIvMdyFxa<58mJT!D?#_S2#hr0h956^8W>Ss zR}{_SBCO#G37r}8?*0g(17Gy?#<+tEeGZ`^kTt&>`%;gzvj&agcg$uct{7MJdpb6H z{4uFSiIkq$t$K*p!zz`frDhVGx10LhDlIjniZSvMzh zRLlIya|=pJUg0By2uiv~k40{!daqesKzMu{_`2kYuj_osq-gH|A(U1a^_;?;ddz2s zHG?b(9Z#(P+5W0tuw31C5j;HN zb>s8ojn;~S6Cg z+<1Ic-E*iqk~XyQPcbh(8AX|<*uVcKkDY#YLEu(KP-KldozwBJ;6ONlu{+5=N0l0t# zuxE2lpMHc3<=u=f>&h4efWVnE_RPzL^X=zntTafJ&@c0MO7T-mG2rwKYH{bilvkSe z%3NmI!+~KZyLtPyfq{EW*3R*IbKvVIFk%!BsJTd`{4R|>yozj;K_`YE zkcQKWs0O&_#13evRE&DrS3;FPil`<%Dvl#|+POp_<;oFzczHbh5a){ts9@jC{>{%P z>LfGEL^G%BHCc1WzK2H{E`r=*8PGPDnmIqE@Z_4DudiRd(jB4|lo4tju7rc1>i5^rkD>bG)Hjr^9Wi%py~A{=#u+z; z8N+I^@Y!Xb#&>b)*AUlz$}#0z)w)Mz29>_b9`#u-C1OHK^qAhS{)DE~2c@HI)3^DS zrJsz8s=lLUETKyA%br;5?ta4M;#e%7UiQI$4;p(dkJ2#mY{kEEt9yw}+du5iIXR1C zdH3Sx&+s(yN!5PeSm5zUW%P9oLeZL`9{r*gkS zBOIO+DLa162WKy@CKAwk7pFktH7+FJxoVaOV;I=JNqc11Bt0ZRF|n~}`8w~7`X#-y zpS>115gJBrzd+3Q!Dm@O@5euSOGyBnP_7X#Nh|k#^5_c7DPakilkv?`k<~ z)|rzJrh1y#?o(@+vFp-%y0ZzMD7DCL1MSR$Z1fC_AGsbJ7117msLN*l$W|dQDd-?A zA=6TyvZXQbhsV@uKmN&hfsh`o*uw5&s9-IK6Wg1AcCWGv;!bl$`eH#?_Q3g+UJBXF zw(Uo7OPw@h#>mm5w>v)9E0}dk^T9owHqx&wO({iq2(SEN}NHeX)+R3&IfuMNMntwMi&{QYYsB&*Bl zK2Gutak1$H2A4!dvuzJt6W(}?>b|bD&;4tDPhu^$6bTiHb&8eU>w`yy$k18+Z)Bi0 zW`jAa9%^G{Fer+pr<^r)$XO%pg6ygl75{fgX!-iw_Cx2bx@GlSD)&xJy`JX$+dK8c zfFeD&&yL|s;NdO@f036NxfxTG*|E^2Shb>A_Ar>#X}I%+>asFa+gpNVKQ0w20(%)` zCh8Fc*9>v!L|Q^JEFo+JuQ+Uu`}CP?^5M;!ARzLVFMx7p64O%kB9#h`Kfr7Upv*@$ zs~$&l!-lOZSFFH!45!|q+S=D>crAdl9|?<*qveW1P|lnLz<$QW1yN;N3&-BTug83r zX`eEub}Tvsp2TObA7|*(r%%N;h(alIhkS4=xz%UDfaL&fD^^@arAFzB0_er?H)JU1 z?_Hv`38g=I4ecSjj$7H-JiZzlN-LaG*A&x5E-|Rz1m1Sj$uqdfAOvvm+FSktF2q7Y zU?EQFJv@t`P2-%xzN$~3X%l8_g*=AC4-ZY&)YFq!{S5JK^$QPNJPW6wsz3<_flx~z zctjNe(+#n8)hAbZKebsz76vqP3+R z&b)!LdCa3BaDb}~mD;sj5!z^e=_|)F!-~6D`o))gZ2cb{k zr~f4thXW@KUQi84q_OT3)j@aqIwDHN`V8&o3%AT`Kl@scI# z5h<5uR@&I7+5MaQ4-uLoZsL{lA1e#1*QfzQ(~|*TuzbN8&4eLiT5_{(vcdl-9fqb4 zr5}$fYe@RBZwR(Jm~oR4uf0C|(}(=ZiiN2Cki8;zDU-X@@es5vvm`rD=Zjp=9z851 zf&v4p`>|WsU{Cu+VtyP0vGVSQ3&f&%Bd)oJvWWCsOWdG`kY>M7f2dmc#xH_1RXu_T zs#@bk@}zq->em)hYa_~(O1H%1A=ibB;YXAHF0lx5Gxdm==+b(zPot-`cN>*8Z75AG zhOn2PUiNl8v&pxkZFSI(L%EH&+tsa|;?K&TI~=|wIi#k9fQ{`0CON7j#Vp43kd}rc z8OW$0P-28YESz&-q!HEl06bc4!IpRUUilquYhp*AH1R2clho{x$KR8DFF`^_FWJXHlk$yu z{?j6lRCzQ$ic!7y#M3hC!Ooid-n}BPh%a$lTYYwF&UDRQFSFPE_THHCljR-);WYT{H=l;3uQ0YZNb9aqTGgO#e}g)?5I_}%leJ!aS4 za^_^sA0aD|G2|=28)X=!_{C=%lCS5j`KVI1zQpD{=dj(}Yeg5mtfoy`ar_f@*HfoY z-v{zJZ}=qk3Xz;#qeZ7~P%kY5Pb_I0Yd`FMzla|PPV65wc*OAGPsnm<<00#8t|jrG zB7v#!i>&&6q_03;wDX)MQAQxZeWuT(vw2LuXMfGTGi?ala-<+Lq5p33X{kBga+aJP;Q_Dro(k~6 ziFxY2OGamAWM(QOK_&XKnciV=NcMxzJ|K(5im*yOXmQ+5G_xMFqCZWwyNWG7{~xGJ zd55#@DI2@@34)0xPy;-$^P5c+9I6rM50)*in}wpr)#*o#E2=N3s~gCq{nUq-b#-zI3V|eXbQv`K|8;^}2mz9+hk9XKp3Zhh zYjP{xVqf_{)inOz7E~t9s?5|6d%VV}(I|m%kL_1i?@X;(o*J&VZ)WPLB{U*vvD>cs z5_{t_Vh1<*B32~fW~4>ZhbDHEL>e2AI5uEL_+tBRzHsnJ9;p}Y<*SS*U=>&NRb%jA zjtCc*H=AEIG)VU!9TPlBM}4Xk4}sIY;q~j6tNY#ke*Xuz5#Q`VXHx1t?90l_h3SHL zI;HH9Gks5y(02T9eoruGfamtbMHfH3p5lr+)~a>?j?FqJP2H-EZ%@6uQ+l6ELj^54 z)hL>9gA>Z(HVKO{7{PeYpZ}DMhzyfHkygwJ(a_rba&?NS6-=<9wAUfTcYmrMT{JJx zhXelu->&#qF2Id2B|~4|DD>B*OS`|SE&h1U`Mq5D{Bhk6m?|$}jZ(wLn;)|u(x{yD zzi!)%&eN6Lj`-W2O0d3Q&CwlgZFz^BK7~A#v-QP_yzwzSQ66W%T1oM9%b%nxuRE2# z+~V*EUz?}%N7vsYPx|0jpAME7vVwle%wNGtD++shhio z{24uUukhwg!9@8P7Nh>EPjqfjU?NEV-S{s>)7qQ`5cj=R>D{n4iMbja7+89uhTfQ_ zDw-FNmM+DLDeigCL1>-92|vj_`LRjyj?nn9=1}@FW0g4?Mt9v-%^y*;Vp>X3{gibm z*jSW?I~)_~vtsSMdj0y&p2;NQmX^`qtx-FyX?j>N>KcxZV`yi=y|EJ6`K?>hD~oN; zB7_sg&e&(18thIPjtv1?7>;n;N4^MPBV_k&w(Y$8W?%hnDFf{6WB=TvpYD~>ITb)&8BNA!3#uvd8d0^Oxj5=|0ou8?Jq)^w_@*_Z# z>8j`am;ICrZ%CMPFxRiS$##A~PD*FrF(AwPKqZKcFaQkvEY`R(QTC#0rMe9S%)oV@ z6Og!(VPvEYXc!fgWhDRR;N0mYxo0ZU9DfRcED3PL+|h3qR~u@rsHxMd)zs?LB~NK* zz*@!GNe9$jd)ofkV_0naE@grGx(n7huM?-bxYSbfQgqkvy*+4(!Db+LD1}@CJQQVQ zmx3eM^>Oxk-W#fABz0_1<;mr?7k@aH)eSXwYS?+{vdZPbwXIZ(DMqmg?C$`P9MJgQ%VKs6nQ1RL4uv^@7B%Vu69V1`aVxyd1?;&!WYAyZ`xw^=O_F&mUBOJUuq0 zR{K)x_J=2nD))?|S1KEG6QzV;o5!o|hpt#*Mms=I(V&__C?Zj9&aYfmTca+*$EZLcA?3q01>fH!GJzDXu1Z zS)6BBlr}&_E_j_6rQm=app zYuBzx-|yH5tbrM^ex8GGx5UQcy0r(12wL8QZ$tK%hN!PHo?JQhN8_P*S##*A z%6G0Uy>*8aWt*(HEN9sNN6RalARAC_9K#1|Yg=hdL`%shBuF%9c7OjcV5*eX`ABQ= ziPtV}M?P?S&+Mlwj=HPO?cqRh?3iU?v53AR$rHX5K&XV+hI1G0EkAg2#9{ZHsHYlM z8Itq5MPi}8QC6_>8+ZHubZvmQi4r{(^&;M?{p+>6u)L{X4O& zg$K7|bZ2lFS#xgNjRmRB3!$Qki1@-JSM)i461C&?>rrjHTb}AP_|e$$g7b}Y1i`8O z!o9T?p-WYkz4ElVQ8nG;-Z6(nqeOw2@$}HOJ45mgOuFfwJ_|BOTefw89+LftZ4YNf z^b;NV$C9tad_e{M#kQCMPEeDT(3-Jtc=9L^cg0-G`LDM3s2-^0_2?{^>${Hct&%5N z*A+g|EvSB2_kLC7;cl`iM#F6{N#&Uj9DP#We`Bv38XYC|{Zk`id;9C`I{x^=>O+o4 zt0xF3I~M|m97?A8*3nFy@;`RpzNe5Q7Z3KSSB{flE?>OIX1L>iRgJO5r$L%CQA6ME>2_z6RXAG5(<^I#ZI%S&Cwwkunr}f~Y z(Kyn`>S-d8hE~pM?4j^716%LeL^e3QA79=L9;+biy|OJmwaN^nej7#PCa$E4X2g`u z73Ri)LD@a_{qojn$Y^D(%^qlG#`_$9x6Vttfm z7HTQhWpIt?Nr6ZR_=tUZLL0!#*-9uO%gXYs}v)21Ptr43Tu1IZAH zZ%M$2%eWfSp5R`z^!fr0=f_89416J?JB>YVXlMxaS>`ZBDl?(K)|XKNOunR{G8GDG zus41Tg~Oy))|{SvI_ehNvHf!85^eFtXXa{P?W z(4oY4dCC!2 zed16b&1a($`se+VQ4S$6eEucJY17`D{{9~Cw!E)I7r3Xn%;sPs#k8!unoHNT+$3}& z+fhM!H)T3i;im_5Q-J&{3>c8S zbLZ^`5Zz^NIUH{p=SXR3ElgG_Fk0Tx_A=gnrPWcbXE82X4OH#J`=Yg;a=L?bl>>(! z;j#nIX)@)ZKkFyN5*1Cp$0iJD$^+EgrbFIN@D@97XAUaED8mjCbk1 zKR&N3?ZKcLm*IE0TbVwOR_90mB6TjmC#23>9FEUP-MxF(&q!LAq@yu#oM~@wE?qub z|I(gjbcCh*g>;1wqwz%JW6y#!C5%HWEutQO$TkKMqdo~qM`fg~y_8wZvkfG(bccK? zI3zbE^$!v6zd<3h+`8*$OH~#U6_|R&&XEgr#$Cv0AGO9{)aZafBc;|{F$pQ=|sC@Knnr0&;8;hzUmKb#v*^^CgyM853KyMOQAk?Ymf;(fuZ(F}l^nJOl z?&|K?AJ?l(lF}^|jVW0n;CWI6_k@m@k>UG=546|H{(bp=QJ_FC;4 z2B>%8FLcDhgiDuH6j+Gqt(P*Sp$WLmxEuwgbhbKbp+7k)ogT0Jo8 zK^Mymv5x(}JH%gKBv)%fqQR<%p?fCC?R__xg|+JQi<~=pJYkx!VCCtLUqYA@dS!fk zf@viKHI2s{*tM`r!&YXpUWTARAtGQ6=n$o&vDE(xiw5#;-%lEo^lL-%1MA9(M@)2P z;0U9iolfcLt?X@X#RMceQ&1Ha?G`j^Al`zkaQpT>hO6CBAFimq%~RtMoO^lIA_gIP zj~+!%%ZO9-C_TV37PzMEJQV@x12a%gQq?S8+UZVnziM-Lxz^I%14TAkUbnp7_?CGG;qlQP(@5MuxJ3$19kn5 z_we2|I$d9&^rh^k8Mf}#X#DNzac_M8ep;>SZfixxR8h7IivGusegXq{V_=*gZ|6335=pB?y6({w`P7 zyG)xl!q|9E{(8M6y$EDO#M2l3goe^P4;~;FS`0U%c)zGVzxO>A)J9BarXQ61i`gmX z{x86Nw$ydk-VNVoHsnDD(k-47zJ@QmaiiGV1!YgZ@hr#i-RN?=a^)#rcb-EL1@ z$WjsL=g*CHWxaHl<<3sA>e<@&*l@H9Y=ht5t3pA7S#Ta7jMYZ+yW~S(zI|g7*%#7X zeBdD!l-|yIq}fJ_ML*1^Z~;Fea19)|p9$_|!}WmyAOpn3#n~#q&>P*Y?aaYV{Cg^7 zKbtnO4pBgn#ZVDrFm$YTd99)3Y_UrVmW;jiB~z7^~Q|(^6AsO z?1dX}yCBR!Yax#*neV98ukpi&U$EO~XN(LDg-Mi%w$S>sG63NiqcZd!I4yo~%R{IH|VwcUjnf{OV|(ky&MC*{IU4{w3km@#C|o0PCAp zTy7%&$@?=t><0-Nl>rbgm;rM@!o=gPbyGc~)+nYxEz__>TLN&JilQPc(2BPoL~akP zLM?#tg+ZNEmso-zWCa}f!tc_=3*CPG$H+kuAgPhQZjI-q6NYSwDOh_m-Q@ku>0*uX z2QfhapTBzk{I}B{6+-*8$=a`#dySIIu56lWIe$&(veUp>mCH8viODG{EUs9n(0*X0 z!{Bj(fJ?w!1OZp#rF@i1d-m+1uV%vjt2`QNxPRANF;OiHr>|umI3!#VhtV%MINh3y zb9Cs}kR33<2+0DyuOJs*&#oX7n9ZIs=1j3|cg}MjAhlI|L;agCOW!Vwj#Zn}`JDKY zrE5#Qb{yO@HE)a1^{dyhMt}7)h5SIb&r0D(@la=-nxY^j^}`cy@18vx6&e3JMN@2+ zze0CF^#il$h%vv=&G7#Hj#{nt*_?n1`vXm7*1a&&YQ6d48K^n+&fZ6zBm z9y@{TVM&f7;g!WZR#&$`i2cmMh?Ip&KucMS2ox ztOxct0yIu&9J6ejl%Jr%V!}qpF!~`mv=Xy;I|%M0eP!=xGGK+k!nguPV7cXB+tNUT za6nS(c2zvavvP70g56>89^P4GFY|}gm1T;{G=M=G`ltK*pQFy<7L^4&?~*FSda~T+ zhOiaPbGzVOJARi~=&$dTE_szMMQ{epcBAzSvD8s;q<;U*T8+k)L_)Y~LKgT68X<=n zGqRFyY0AfEzi(4sv(kmK|6c>V(NbYt0N}% z&m`y{L_Yv%y{=2a(bz55JzRaaqhN7gO-$`WY4ePNSrZ zVmN95i~dX%+IuUtN54K=_C?u|L#=p3#3jM$0j8aO69S=qclfZ#S@Y*#fsuurhYJyE z5d;T&2YXAtCM>g>?y4gj0Bg~!;L9j|?6y(X$Iq|$apXcFZ*Kpt08mZ`03MieoXmf% zo-kD@oB4)3EHhFQ@pjL9VptOi9xR(Sy=eUonT@}}-Q>xq$5LzqNt1XZrMmA2yA#=wtLxg9Z&Uvn?(@mGCQSA>stYoXo7G1Z{Z!d=zJ~XW8@T&#^ek z47KXnP}+OS#>#KIu9&n8J*sfPbkpHu$2>_kcvjK)7O|#jhdl72{f_h+izCW$3Puf| z3nZm19!xosT%hrJ;ttR`pJovzdM`C1e`dpfwlQ ztX#S@111thZ|@10H*JCju#11; zEAS}a-U?S~vwO?Dy|0IVf-$DnB&OsLW>_;6kR#`vc8BWuICv*zq90+C?ZI$5Jd!Bj z{Llj}yeu*^&>A=HJE^1~cOy_!!7=k?(vjY4#78%GmQ$xbfC29uK2L9H^?`4` zO#Yg^0Zp5b8`HF$le3y-#YW#g{`YBp(a%m{Dsl2(1{325&%}zs_xBT@@-JSzx_?$@ z(Sb=w4oDrzarPx9wjgQos?8sxt=;%@Pd=h*QfP!Evly|;$ga3nzt}J58PY5fGQm_s zqn_RV~%o=^KqLZ8Vx7$VjL=OCSI2h`)Xof+C1h zffI``;;<=q;k#GdGYLLSSNA*GS;ld~CLDdSe0e9)wLMg?US-Vu0Jxz#52X0H``-BJ z0GL;L%EzsKF)28h+wlXAeLVI{-^Pl%iLU*44Wq<2q)PB{b2|uWC_M6o1!C0$Dd;*6 zJ#0$mF@e9u(Sd4&0XO5Qu5#x%F$M4)X?`dyGFKQK74^2UG30B_60Q(xH2C+3g4O$| zxJ(0^4{-B-w?DWl{H;&DRjSQ8W_FyDm5ijZ3A|MpMK*bI36USPKo9ks*2_uTnCflT zPo3UVkTWD~wl+32E$?NK;$(!=)zhk{qq)6GX)_h@38VHP20@G{Dk#_@Goz3qkUJ<_ zMpnFfk??~H067uNittHd@ywoT$TZ<74J|DKJAZqT??L!y$^KxwfZb;Ktq`a+EQo{y zE-8s*lf*21<3`TQk-nWL@_b|`P>g3ICnczNc{{#jFEvT7hK{-UWdTm(@Svwfzwbd%G zZVl-wIuOnvPL%8e2N=^P4IC%bnAE_p{&}pB2#wu%3>1(})yhp_j05K-4t9oe;&T+c ziteQoA%_od;y?xwCFmND2Md=tMg1vV--~ni8>jn?_*vKb9dJ|%f~{vy9+$_N z8q6_CNVry&CS=ohhp{xoLcU!EE1jOQ^+J713*VX!9v!p0}3@omCJ#Ud&pb})CVeN+1X8; z^0}hC9G06&uGBK?LP8K@UMub-(!@9O3(K&yw3OHkti)*?8u}ZbYpy6iFK@_2{}$Y! zW5NyDf;hjvsl9mq>eZ`Qs$^wlF>J2#h*;P+>&cV5_T9M5r(lRmGdvh^ubp2)O{L+*i z$iSMqwm&a==+*`Y7jEh;IVEj$e$zX3lBmfhiz|INh7xX7q+!Mt%}2@@A(b-IamsL@P_sA$*>&LE!%6&M512ws!`m!)x2q& zGVD+6`8H9z_x0jpL>!_UH--%xhE@ggmM64YCyt!x^kWH8?HS<3whs38crY{g z`3?&b3|k$l~@x*F{LJAODuaa`XP^FA!J|C7HKPyHSgT{mB+|4 z!1v}TZOix~T)5Eo{rkG+0 h<87A^bg`VHIbQgy2nK@}FTQl9gda%R?qMJ+3YH~x z*Jy&>sUS>u{_&Ob!)g!HhorP{=!g*@*n(#sBf zj4m@nkX=V+YphaLSt%8Nf6D-aTX*i9IljdhJW^aVJN3g!Q*Z}CJ{X+?y%V}pOm=1v@uq2d1JlZK+1YV2x+LZC{#979Q9OS5|& z8Swg0*e_9-{F}0Nb@^D#IXS{(EsL?}>>{=SpWMC!Pnc6y5yQ*9hlu(JDUta?p^eqn7{yVm*=9L(wbDYIwKwy+3in{mmpO;WVb-8bz9X$#uf{$;t+`+{}= zgc`40;>Pp~in?x-IPb>4y!&Un#EYt2x5maYIy{Da7V9)gj>RZW4u%lkH@RPrKfA>s zStikrty2a`z$Z#V70w5D^XJY<`z}rIOy4{1NH74@b?77m0&-D>jvafX;RQiVn=`^fN7u#Fg{F zFp4>zZ0+kWKOy~QW?gpM^B;FQkFXFm4q{~p^_=cUXs?KEU`^}K5YyR@!Oue`lUuMY zNy>zE=E1yKyyuY*4b)xRO-AKO)&f(<{^Fu~bM4+xC@+`F?A+VbmB_g3t(nCV5*La@al4jVpR9p%~=bB-}=TyXo~_RiVOyYp*YAM0OD zukAyDxwF}fLm3$o$0ZZ`?Ky!qzrLX%DzXH<0H&&1CcT)nFn_*E0$;3m$Q^DJe4pjH z`G{i?cV;ww6$?u&$g;g}l>LaD=lS!^wC?4)!{*!9V#8auW$v>duh09K#=J;UOThm=z?t!B)-y{-3dM;^5qg)~5LTgU`+3_m5Gxvshpw$B2+6)Y()f?~T7rZpgNa^~d658oGljKn=Z zJiPPg(iUc%#@tl#YJI-nFj{`M$)u-1j5#%(m)DDG71Cxp;th+p{g^;Z(es>j%wujY zPBJL0sZoq~$`SI{{z4WldUPN^f0xD!+2yD6@)T9mc_d%}D_6#=8^N)Z48)#Jb%^-P z)dceF_(|&poh2*#(n1~?dnR#7xGBQ#LXpF3e3xVVyB&SW8U~?ax?jXLGMKN2+P}Wv zd}sA2*pYJYF`^j4Ka5b$zL{ke6-9Omdnz||*43zsDyq}ISXbLqB|bVyCi}}q6XwO3 zcQ{E$hldwB+BQAy^e&^bOC%|H`9;g@-U)&+wO6>|C{<|J4HYnCP#< zQK|Ux+6Q|kSL|HJb4GE=lcIxP5bi#D^rDJ-fb^!^+fY<$$<3y6@b=cZ)mr2|o75eP zgrMiZD4R=eyitY#dOpQp<$$S~OsCJ5_t=K+Zru|Gy;w#4ws2*|hfrMugSp;kI+^KE zEvzn97$%d(8t6C3yI)-Ea~avXH^y*%))CS5#&wxmfy>O~->RNwsoy#Db%l8Tcw?0; z#xHaUifO{xW>!xXz2lMB-^~(z*oj_bB!9{po(V_Dry1xz z9ZD~B2^QmB_z#%1vswSljk&;{A&QNhuwk$;8jsV{e%6iV89Xr_uE&CjGhhsMEVW^D zK-uC!I;Rp6jB-MB8tgacoKIyt=%Q~gO9s=R=XdK-I!R=2>x4P}^dSEzKw!9cDQ@oxI zsRWf-_~h@ZoLDTr^B=peW}#uHdNj_M3)(tQ&j@!+#-vyck{uL5bbjB|WF@+RGAnRC z=`TUQ{EFCt8>+Gryd$Ov+2<#Jxde6r*s>~zZuTTCPnKUp6L>Yx*{Xv1z>&@ovS=#`BrWuONXBCC(!^ir zJR@u$c^cXtZhi1E6ejw5+Y z>6paLBTr(THP1M+hVyq}=WaUULAqO;?9SwF2Dzs8S(8@t7XpaCX8%aLtT$r;~r^`DA5xid#$#on7^dF546f)H_(WX`tHna_%cRBO%i9X_G2JFGT zF-#*FG2*b+lx@R(jPtE4og-$Fx@9{>{v7tg?F+~_WCr3ZUZZYTUq=3WY`u8E*5)>Y z>8Dk@8))c+%f47;NEv_koMguFw6M^p7IdN=(y`chMzgt!IIOw(wT`g&qj zz;xl=?_T?=R4c#1%&a~#LsId#&WI5uk`|W+MQo!SJA7DkdaZiU*oW$R1_t6mZ=UiP zUlfSSx>_|KGM|bOO2Wh;UT2@n7`k`vW*dKy%<0IHs1)6YeyT5RdCYD4=jKufxGk2N zxAoff>)l?gGZFU^#%v|ltZWn~vQuGd$^i+vVvlpthYEO-|;^YzbJbH!xWdVwuu zo(?xn%5n^W3UQsYL}tS=%a}?l7poKYvnS_VHu|D zCCUkpu((uJcc9v&;-(EJRe~kl{#8kIwYdEwyKUy7^5K0ekjFM|Non!>kscs`5u%w| z7qZkA$rn$$9@qERRR8n9)LS1%6$kS*Y7p<>Br?Y