From f2ccfff803b4d8c5e71a56dfb39c4470c4d03a55 Mon Sep 17 00:00:00 2001 From: Kjell Winblad Date: Mon, 3 Jun 2024 16:14:26 +0200 Subject: [PATCH 01/33] fix(pgsql connector): handle prepared statement already exists In a user's log file it was found that that the pgsql driver can end up in a situation where the prepared statement for a channel/action is not properly removed before a channel with the same name as the prepared statement is added to the connector. This commit handles this by attempting to remove the old prepared statement if one already exists when adding channel. Related issue: https://emqx.atlassian.net/browse/EEC-1036 --- .../test/emqx_bridge_pgsql_SUITE.erl | 53 +++++++++++++++++ .../src/emqx_postgresql.app.src | 2 +- apps/emqx_postgresql/src/emqx_postgresql.erl | 59 +++++++++++++++++-- 3 files changed, 108 insertions(+), 6 deletions(-) diff --git a/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl b/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl index f4917f387..fb9341ddb 100644 --- a/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl +++ b/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl @@ -715,6 +715,59 @@ t_missing_table(Config) -> connect_and_create_table(Config), ok. +t_prepared_statement_exists(Config) -> + Name = ?config(pgsql_name, Config), + BridgeType = ?config(pgsql_bridge_type, Config), + %% We should recover if the prepared statement name already exists in the + %% driver + ?check_trace( + begin + ?inject_crash( + #{?snk_kind := pgsql_fake_prepare_statement_exists}, + snabbkaffe_nemesis:recover_after(1) + ), + ?assertMatch({ok, _}, create_bridge(Config)), + ?retry( + _Sleep = 1_000, + _Attempts = 20, + ?assertMatch( + #{status := Status} when Status == connected, + emqx_bridge_v2:health_check(BridgeType, Name) + ) + ), + ok + end, + fun(Trace) -> + ?assertMatch([_ | _], ?of_kind(pgsql_prepared_statement_exists, Trace)), + ok + end + ), + %% We should get status disconnected if removing already existing statment don't help + ?check_trace( + begin + ?inject_crash( + #{?snk_kind := pgsql_fake_prepare_statement_exists}, + snabbkaffe_nemesis:recover_after(30) + ), + ?assertMatch({ok, _}, create_bridge(Config)), + ?retry( + _Sleep = 1_000, + _Attempts = 20, + ?assertMatch( + #{status := Status} when Status == disconnected, + emqx_bridge_v2:health_check(BridgeType, Name) + ) + ), + snabbkaffe_nemesis:cleanup(), + ok + end, + fun(Trace) -> + ?assertMatch([_ | _], ?of_kind(pgsql_prepared_statement_exists, Trace)), + ok + end + ), + ok. + t_table_removed(Config) -> Name = ?config(pgsql_name, Config), BridgeType = ?config(pgsql_bridge_type, Config), diff --git a/apps/emqx_postgresql/src/emqx_postgresql.app.src b/apps/emqx_postgresql/src/emqx_postgresql.app.src index 5faf0aa47..2cf3392bf 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.app.src +++ b/apps/emqx_postgresql/src/emqx_postgresql.app.src @@ -1,6 +1,6 @@ {application, emqx_postgresql, [ {description, "EMQX PostgreSQL Database Connector"}, - {vsn, "0.2.0"}, + {vsn, "0.2.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_postgresql/src/emqx_postgresql.erl b/apps/emqx_postgresql/src/emqx_postgresql.erl index ad674a07c..e3ce3c479 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.erl +++ b/apps/emqx_postgresql/src/emqx_postgresql.erl @@ -240,7 +240,9 @@ close_prepared_statement(ChannelId, #{pool_name := PoolName} = State) -> close_prepared_statement([WorkerPid | Rest], ChannelId, State) -> %% We ignore errors since any error probably means that the - %% prepared statement doesn't exist. + %% prepared statement doesn't exist. If it exists when we try + %% to insert one with the same name, we will try to remove it + %% again anyway. try ecpool_worker:client(WorkerPid) of {ok, Conn} -> Statement = get_prepared_statement(ChannelId, State), @@ -648,16 +650,21 @@ do_prepare_sql([], _Prepares, LastSts) -> {ok, LastSts}. prepare_sql_to_conn(Conn, Prepares) -> - prepare_sql_to_conn(Conn, Prepares, #{}). + prepare_sql_to_conn(Conn, Prepares, #{}, 0). -prepare_sql_to_conn(Conn, [], Statements) when is_pid(Conn) -> +prepare_sql_to_conn(Conn, [], Statements, _Attempts) when is_pid(Conn) -> {ok, Statements}; -prepare_sql_to_conn(Conn, [{Key, {SQL, _RowTemplate}} | Rest], Statements) when is_pid(Conn) -> +prepare_sql_to_conn(Conn, [{Key, _} | _Rest], _Statements, 3) when is_pid(Conn) -> + {error, {failed_to_remove_prev_prepared_statement, Key}}; +prepare_sql_to_conn( + Conn, [{Key, {SQL, _RowTemplate}} | Rest] = ToPrepare, Statements, Attempts +) when is_pid(Conn) -> LogMeta = #{msg => "postgresql_prepare_statement", name => Key, sql => SQL}, ?SLOG(info, LogMeta), + test_maybe_inject_prepared_statement_already_exists(Conn, Key, SQL), case epgsql:parse2(Conn, Key, SQL, []) of {ok, Statement} -> - prepare_sql_to_conn(Conn, Rest, Statements#{Key => Statement}); + prepare_sql_to_conn(Conn, Rest, Statements#{Key => Statement}, 0); {error, {error, error, _, undefined_table, _, _} = Error} -> %% Target table is not created ?tp(pgsql_undefined_table, #{}), @@ -668,6 +675,29 @@ prepare_sql_to_conn(Conn, [{Key, {SQL, _RowTemplate}} | Rest], Statements) when ), ?SLOG(error, LogMsg), {error, undefined_table}; + {error, {error, error, _, duplicate_prepared_statement, _, _}} = Error -> + ?tp(pgsql_prepared_statement_exists, #{}), + LogMsg = + maps:merge( + LogMeta#{ + msg => "postgresql_prepared_statment_with_same_name_already_exists", + explain => << + "A prepared statement with the same name already " + "exists in the driver. Will attempt to remove the " + "previous prepared statement with the name and then " + "try again." + >> + }, + translate_to_log_context(Error) + ), + ?SLOG(warning, LogMsg), + case epgsql:close(Conn, statement, Key) of + ok -> + ?SLOG(info, #{msg => "pqsql_closed_statement_succefully"}); + {error, Error} -> + ?SLOG(warning, #{msg => "pqsql_close_statement_failed", cause => Error}) + end, + prepare_sql_to_conn(Conn, ToPrepare, Statements, Attempts + 1); {error, Error} -> TranslatedError = translate_to_log_context(Error), LogMsg = @@ -679,6 +709,25 @@ prepare_sql_to_conn(Conn, [{Key, {SQL, _RowTemplate}} | Rest], Statements) when {error, export_error(TranslatedError)} end. +-ifdef(TEST). +test_maybe_inject_prepared_statement_already_exists(Conn, Key, SQL) -> + try + %% In test we inject a crash in the following trace point to test the + %% scenario when the prepared statement already exists. It is unknkown + %% in which scenario this can happen but it has been observed in a + %% production log file. See: + %% https://emqx.atlassian.net/browse/EEC-1036 + ?tp(pgsql_fake_prepare_statement_exists, #{}) + catch + _:_ -> + epgsql:parse2(Conn, Key, SQL, []) + end, + ok. +-else. +test_maybe_inject_prepared_statement_already_exists(_Conn, _Key, _SQL) -> + ok. +-endif. + to_bin(Bin) when is_binary(Bin) -> Bin; to_bin(Atom) when is_atom(Atom) -> From 572ca6433eafcd27ac0de33389ce1cf0aa3384f9 Mon Sep 17 00:00:00 2001 From: Kjell Winblad Date: Tue, 4 Jun 2024 09:46:55 +0200 Subject: [PATCH 02/33] fix(pgsql connector): improvements due to suggestions from @thalesmg Co-authored-by: Thales Macedo Garitezi --- apps/emqx_postgresql/src/emqx_postgresql.erl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/apps/emqx_postgresql/src/emqx_postgresql.erl b/apps/emqx_postgresql/src/emqx_postgresql.erl index e3ce3c479..9553b3ceb 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.erl +++ b/apps/emqx_postgresql/src/emqx_postgresql.erl @@ -654,7 +654,7 @@ prepare_sql_to_conn(Conn, Prepares) -> prepare_sql_to_conn(Conn, [], Statements, _Attempts) when is_pid(Conn) -> {ok, Statements}; -prepare_sql_to_conn(Conn, [{Key, _} | _Rest], _Statements, 3) when is_pid(Conn) -> +prepare_sql_to_conn(Conn, [{Key, _} | _Rest], _Statements, _MaxAttempts = 3) when is_pid(Conn) -> {error, {failed_to_remove_prev_prepared_statement, Key}}; prepare_sql_to_conn( Conn, [{Key, {SQL, _RowTemplate}} | Rest] = ToPrepare, Statements, Attempts @@ -665,7 +665,7 @@ prepare_sql_to_conn( case epgsql:parse2(Conn, Key, SQL, []) of {ok, Statement} -> prepare_sql_to_conn(Conn, Rest, Statements#{Key => Statement}, 0); - {error, {error, error, _, undefined_table, _, _} = Error} -> + {error, #error{severity = error, codename = undefined_table} = Error} -> %% Target table is not created ?tp(pgsql_undefined_table, #{}), LogMsg = @@ -675,7 +675,7 @@ prepare_sql_to_conn( ), ?SLOG(error, LogMsg), {error, undefined_table}; - {error, {error, error, _, duplicate_prepared_statement, _, _}} = Error -> + {error, #error{severity = error, codename = duplicate_prepared_statement}} = Error -> ?tp(pgsql_prepared_statement_exists, #{}), LogMsg = maps:merge( @@ -693,7 +693,7 @@ prepare_sql_to_conn( ?SLOG(warning, LogMsg), case epgsql:close(Conn, statement, Key) of ok -> - ?SLOG(info, #{msg => "pqsql_closed_statement_succefully"}); + ?SLOG(info, #{msg => "pqsql_closed_statement_successfully"}); {error, Error} -> ?SLOG(warning, #{msg => "pqsql_close_statement_failed", cause => Error}) end, From 336089f8a7fd5d24c83deeaad3a49051d2204467 Mon Sep 17 00:00:00 2001 From: Kjell Winblad Date: Wed, 5 Jun 2024 15:53:02 +0200 Subject: [PATCH 03/33] fix: bug found by dialyzer and make test case cleaner --- .../test/emqx_bridge_pgsql_SUITE.erl | 44 +++++++++++++++---- apps/emqx_postgresql/src/emqx_postgresql.erl | 24 +--------- 2 files changed, 38 insertions(+), 30 deletions(-) diff --git a/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl b/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl index fb9341ddb..c6eb99f83 100644 --- a/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl +++ b/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl @@ -715,17 +715,39 @@ t_missing_table(Config) -> connect_and_create_table(Config), ok. +%% We test that we can handle when the prepared statement with the channel +%% name already exists in the connection instance when we try to make a new +%% prepared statement. It is unknown in which scenario this can happen but it +%% has been observed in a production log file. +%% See: +%% https://emqx.atlassian.net/browse/EEC-1036 t_prepared_statement_exists(Config) -> Name = ?config(pgsql_name, Config), BridgeType = ?config(pgsql_bridge_type, Config), + emqx_common_test_helpers:on_exit(fun() -> + meck:unload() + end), + MeckOpts = [passthrough, no_link, no_history, non_strict], + meck:new(emqx_postgresql, MeckOpts), + InsertPrepStatementDupAndThenRemoveMeck = + fun(Conn, Key, SQL, List) -> + meck:passthrough([Conn, Key, SQL, List]), + meck:delete( + epgsql, + parse2, + 4 + ), + meck:passthrough([Conn, Key, SQL, List]) + end, + meck:expect( + epgsql, + parse2, + InsertPrepStatementDupAndThenRemoveMeck + ), %% We should recover if the prepared statement name already exists in the %% driver ?check_trace( begin - ?inject_crash( - #{?snk_kind := pgsql_fake_prepare_statement_exists}, - snabbkaffe_nemesis:recover_after(1) - ), ?assertMatch({ok, _}, create_bridge(Config)), ?retry( _Sleep = 1_000, @@ -742,13 +764,19 @@ t_prepared_statement_exists(Config) -> ok end ), + InsertPrepStatementDup = + fun(Conn, Key, SQL, List) -> + meck:passthrough([Conn, Key, SQL, List]), + meck:passthrough([Conn, Key, SQL, List]) + end, + meck:expect( + epgsql, + parse2, + InsertPrepStatementDup + ), %% We should get status disconnected if removing already existing statment don't help ?check_trace( begin - ?inject_crash( - #{?snk_kind := pgsql_fake_prepare_statement_exists}, - snabbkaffe_nemesis:recover_after(30) - ), ?assertMatch({ok, _}, create_bridge(Config)), ?retry( _Sleep = 1_000, diff --git a/apps/emqx_postgresql/src/emqx_postgresql.erl b/apps/emqx_postgresql/src/emqx_postgresql.erl index 9553b3ceb..ea83b951e 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.erl +++ b/apps/emqx_postgresql/src/emqx_postgresql.erl @@ -661,7 +661,6 @@ prepare_sql_to_conn( ) when is_pid(Conn) -> LogMeta = #{msg => "postgresql_prepare_statement", name => Key, sql => SQL}, ?SLOG(info, LogMeta), - test_maybe_inject_prepared_statement_already_exists(Conn, Key, SQL), case epgsql:parse2(Conn, Key, SQL, []) of {ok, Statement} -> prepare_sql_to_conn(Conn, Rest, Statements#{Key => Statement}, 0); @@ -694,8 +693,8 @@ prepare_sql_to_conn( case epgsql:close(Conn, statement, Key) of ok -> ?SLOG(info, #{msg => "pqsql_closed_statement_successfully"}); - {error, Error} -> - ?SLOG(warning, #{msg => "pqsql_close_statement_failed", cause => Error}) + {error, CloseError} -> + ?SLOG(warning, #{msg => "pqsql_close_statement_failed", cause => CloseError}) end, prepare_sql_to_conn(Conn, ToPrepare, Statements, Attempts + 1); {error, Error} -> @@ -709,25 +708,6 @@ prepare_sql_to_conn( {error, export_error(TranslatedError)} end. --ifdef(TEST). -test_maybe_inject_prepared_statement_already_exists(Conn, Key, SQL) -> - try - %% In test we inject a crash in the following trace point to test the - %% scenario when the prepared statement already exists. It is unknkown - %% in which scenario this can happen but it has been observed in a - %% production log file. See: - %% https://emqx.atlassian.net/browse/EEC-1036 - ?tp(pgsql_fake_prepare_statement_exists, #{}) - catch - _:_ -> - epgsql:parse2(Conn, Key, SQL, []) - end, - ok. --else. -test_maybe_inject_prepared_statement_already_exists(_Conn, _Key, _SQL) -> - ok. --endif. - to_bin(Bin) when is_binary(Bin) -> Bin; to_bin(Atom) when is_atom(Atom) -> From 2956e849eb4eac34897ef0f96a63cd81131a0e62 Mon Sep 17 00:00:00 2001 From: Kjell Winblad Date: Wed, 5 Jun 2024 16:58:14 +0200 Subject: [PATCH 04/33] fix(pgsql connector): better msg when failing to remove statement --- apps/emqx_postgresql/src/emqx_postgresql.erl | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/apps/emqx_postgresql/src/emqx_postgresql.erl b/apps/emqx_postgresql/src/emqx_postgresql.erl index ea83b951e..e0add780c 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.erl +++ b/apps/emqx_postgresql/src/emqx_postgresql.erl @@ -654,8 +654,8 @@ prepare_sql_to_conn(Conn, Prepares) -> prepare_sql_to_conn(Conn, [], Statements, _Attempts) when is_pid(Conn) -> {ok, Statements}; -prepare_sql_to_conn(Conn, [{Key, _} | _Rest], _Statements, _MaxAttempts = 3) when is_pid(Conn) -> - {error, {failed_to_remove_prev_prepared_statement, Key}}; +prepare_sql_to_conn(Conn, [{Key, _} | _Rest], _Statements, _MaxAttempts = 2) when is_pid(Conn) -> + failed_to_remove_prev_prepared_statement_error(); prepare_sql_to_conn( Conn, [{Key, {SQL, _RowTemplate}} | Rest] = ToPrepare, Statements, Attempts ) when is_pid(Conn) -> @@ -692,11 +692,12 @@ prepare_sql_to_conn( ?SLOG(warning, LogMsg), case epgsql:close(Conn, statement, Key) of ok -> - ?SLOG(info, #{msg => "pqsql_closed_statement_successfully"}); + ?SLOG(info, #{msg => "pqsql_closed_statement_successfully"}), + prepare_sql_to_conn(Conn, ToPrepare, Statements, Attempts + 1); {error, CloseError} -> - ?SLOG(warning, #{msg => "pqsql_close_statement_failed", cause => CloseError}) - end, - prepare_sql_to_conn(Conn, ToPrepare, Statements, Attempts + 1); + ?SLOG(error, #{msg => "pqsql_close_statement_failed", cause => CloseError}), + failed_to_remove_prev_prepared_statement_error() + end; {error, Error} -> TranslatedError = translate_to_log_context(Error), LogMsg = @@ -708,6 +709,13 @@ prepare_sql_to_conn( {error, export_error(TranslatedError)} end. +failed_to_remove_prev_prepared_statement_error() -> + Msg = + ("A previous prepared statement for the action already exists and " + "we are not able to close it. Please, try to disable and then enable " + "the connector to resolve this issue."), + {error, unicode:charactes_to_binary(Msg)}. + to_bin(Bin) when is_binary(Bin) -> Bin; to_bin(Atom) when is_atom(Atom) -> From 60556abc4f9d6818cfda809c97102f9cc3a8e53c Mon Sep 17 00:00:00 2001 From: Ilya Averyanov Date: Wed, 29 May 2024 16:45:23 +0300 Subject: [PATCH 05/33] fix(dashboard_monitor): correctly merge old and new sample data --- .../src/emqx_dashboard_monitor.erl | 4 +-- .../test/emqx_dashboard_monitor_SUITE.erl | 36 +++++++++++++++++++ changes/ce/fix-13156.en.md | 1 + 3 files changed, 39 insertions(+), 2 deletions(-) create mode 100644 changes/ce/fix-13156.en.md diff --git a/apps/emqx_dashboard/src/emqx_dashboard_monitor.erl b/apps/emqx_dashboard/src/emqx_dashboard_monitor.erl index a82be6a11..c4195e449 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_monitor.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_monitor.erl @@ -209,7 +209,7 @@ do_call(Request) -> gen_server:call(?MODULE, Request, 5000). do_sample(all, Time) -> - do_sample(mria:cluster_nodes(running), Time, #{}); + do_sample(emqx:running_nodes(), Time, #{}); do_sample(Node, Time) when Node == node() -> MS = match_spec(Time), internal_format(ets:select(?TAB, MS)); @@ -259,7 +259,7 @@ merge_cluster_sampler_map(M1, M2) -> Key =:= subscriptions_durable; Key =:= disconnected_durable_sessions -> - Map#{Key => maps:get(Key, M1)}; + Map#{Key => maps:get(Key, M1, maps:get(Key, M2, 0))}; (Key, Map) -> Map#{Key => maps:get(Key, M1, 0) + maps:get(Key, M2, 0)} end, diff --git a/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl b/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl index 8f561deca..c82367faf 100644 --- a/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl +++ b/apps/emqx_dashboard/test/emqx_dashboard_monitor_SUITE.erl @@ -27,6 +27,7 @@ -include_lib("common_test/include/ct.hrl"). -include_lib("snabbkaffe/include/snabbkaffe.hrl"). -include_lib("emqx/include/emqx_mqtt.hrl"). +-include_lib("emqx/include/asserts.hrl"). -define(SERVER, "http://127.0.0.1:18083"). -define(BASE_PATH, "/api/v5"). @@ -191,6 +192,41 @@ t_monitor_sampler_format(_Config) -> [?assert(lists:member(SamplerName, SamplerKeys)) || SamplerName <- ?SAMPLER_LIST], ok. +t_handle_old_monitor_data(_Config) -> + Now = erlang:system_time(second), + FakeOldData = maps:from_list( + lists:map( + fun(N) -> + Time = (Now - N) * 1000, + {Time, #{foo => 123}} + end, + lists:seq(0, 9) + ) + ), + + Self = self(), + + ok = meck:new(emqx, [passthrough, no_history]), + ok = meck:expect(emqx, running_nodes, fun() -> [node(), 'other@node'] end), + ok = meck:new(emqx_dashboard_proto_v1, [passthrough, no_history]), + ok = meck:expect(emqx_dashboard_proto_v1, do_sample, fun('other@node', _Time) -> + Self ! sample_called, + FakeOldData + end), + + {ok, _} = + snabbkaffe:block_until( + ?match_event(#{?snk_kind := dashboard_monitor_flushed}), + infinity + ), + ?assertMatch( + #{}, + hd(emqx_dashboard_monitor:samplers()) + ), + ?assertReceive(sample_called, 1_000), + ok = meck:unload([emqx, emqx_dashboard_proto_v1]), + ok. + t_monitor_api(_) -> {ok, _} = snabbkaffe:block_until( diff --git a/changes/ce/fix-13156.en.md b/changes/ce/fix-13156.en.md new file mode 100644 index 000000000..6f4dffd07 --- /dev/null +++ b/changes/ce/fix-13156.en.md @@ -0,0 +1 @@ +Fix crashes on monitor dashboard page happening after update to v5.7.0. From ef36350bf66bd9284d597252c3a58ced8af683b3 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Tue, 4 Jun 2024 18:19:20 -0300 Subject: [PATCH 06/33] feat: implement message transformation Fixes https://emqx.atlassian.net/browse/EMQX-12517 --- apps/emqx/include/emqx_hooks.hrl | 3 +- apps/emqx/src/emqx_hookpoints.erl | 1 + apps/emqx/src/emqx_metrics.erl | 6 + apps/emqx/test/emqx_cth_suite.erl | 2 + apps/emqx_conf/src/emqx_conf.app.src | 2 +- apps/emqx_conf/src/emqx_conf_cli.erl | 9 + .../emqx_dashboard/include/emqx_dashboard.hrl | 4 + .../src/emqx_dashboard_monitor.erl | 4 + .../src/emqx_dashboard_monitor_api.erl | 8 + .../src/emqx_enterprise.app.src | 2 +- .../src/emqx_enterprise_schema.erl | 4 +- apps/emqx_machine/priv/reboot_lists.eterm | 1 + apps/emqx_machine/src/emqx_machine.app.src | 2 +- apps/emqx_message_transformation/BSL.txt | 94 ++ apps/emqx_message_transformation/README.md | 29 + apps/emqx_message_transformation/rebar.config | 15 + .../src/emqx_message_transformation.app.src | 14 + .../src/emqx_message_transformation.erl | 820 ++++++++++ .../src/emqx_message_transformation_app.erl | 34 + .../src/emqx_message_transformation_bif.erl | 38 + .../emqx_message_transformation_http_api.erl | 656 ++++++++ .../emqx_message_transformation_registry.erl | 245 +++ .../emqx_message_transformation_schema.erl | 331 ++++ .../src/emqx_message_transformation_sup.erl | 47 + ..._message_transformation_http_api_SUITE.erl | 1443 +++++++++++++++++ .../emqx_message_transformation_tests.erl | 174 ++ .../include/emqx_prometheus.hrl | 5 +- .../src/emqx_prometheus.app.src | 2 +- .../src/emqx_prometheus_api.erl | 24 +- ...emqx_prometheus_message_transformation.erl | 222 +++ .../test/emqx_prometheus_data_SUITE.erl | 48 +- .../emqx_rule_engine/src/emqx_rule_events.erl | 81 +- .../src/emqx_rule_runtime.erl | 2 +- apps/emqx_schema_validation/README.md | 2 +- .../src/emqx_schema_validation.app.src | 2 +- .../src/emqx_schema_validation.erl | 2 +- apps/emqx_utils/src/emqx_variform.erl | 12 +- apps/emqx_utils/src/emqx_variform_parser.yrl | 1 + apps/emqx_utils/test/emqx_variform_tests.erl | 10 +- mix.exs | 1 + rebar.config.erl | 1 + ...emqx_message_transformation_http_api.hocon | 36 + rel/i18n/emqx_prometheus_api.hocon | 5 + 43 files changed, 4424 insertions(+), 20 deletions(-) create mode 100644 apps/emqx_message_transformation/BSL.txt create mode 100644 apps/emqx_message_transformation/README.md create mode 100644 apps/emqx_message_transformation/rebar.config create mode 100644 apps/emqx_message_transformation/src/emqx_message_transformation.app.src create mode 100644 apps/emqx_message_transformation/src/emqx_message_transformation.erl create mode 100644 apps/emqx_message_transformation/src/emqx_message_transformation_app.erl create mode 100644 apps/emqx_message_transformation/src/emqx_message_transformation_bif.erl create mode 100644 apps/emqx_message_transformation/src/emqx_message_transformation_http_api.erl create mode 100644 apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl create mode 100644 apps/emqx_message_transformation/src/emqx_message_transformation_schema.erl create mode 100644 apps/emqx_message_transformation/src/emqx_message_transformation_sup.erl create mode 100644 apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl create mode 100644 apps/emqx_message_transformation/test/emqx_message_transformation_tests.erl create mode 100644 apps/emqx_prometheus/src/emqx_prometheus_message_transformation.erl create mode 100644 rel/i18n/emqx_message_transformation_http_api.hocon diff --git a/apps/emqx/include/emqx_hooks.hrl b/apps/emqx/include/emqx_hooks.hrl index 2e632d545..70d377b9a 100644 --- a/apps/emqx/include/emqx_hooks.hrl +++ b/apps/emqx/include/emqx_hooks.hrl @@ -25,7 +25,8 @@ -define(HP_AUTHN, 970). -define(HP_AUTHZ, 960). -define(HP_SYS_MSGS, 950). --define(HP_MSG_VALIDATION, 945). +-define(HP_SCHEMA_VALIDATION, 945). +-define(HP_MESSAGE_TRANSFORMATION, 943). -define(HP_TOPIC_METRICS, 940). -define(HP_RETAINER, 930). -define(HP_AUTO_SUB, 920). diff --git a/apps/emqx/src/emqx_hookpoints.erl b/apps/emqx/src/emqx_hookpoints.erl index e33896719..0147f1ba6 100644 --- a/apps/emqx/src/emqx_hookpoints.erl +++ b/apps/emqx/src/emqx_hookpoints.erl @@ -60,6 +60,7 @@ 'message.publish', 'message.puback', 'message.dropped', + 'message.transformation_failed', 'schema.validation_failed', 'message.delivered', 'message.acked', diff --git a/apps/emqx/src/emqx_metrics.erl b/apps/emqx/src/emqx_metrics.erl index 13ac40c68..06d0046ec 100644 --- a/apps/emqx/src/emqx_metrics.erl +++ b/apps/emqx/src/emqx_metrics.erl @@ -211,6 +211,10 @@ {counter, 'messages.validation_failed'}, %% % Messages that passed validations {counter, 'messages.validation_succeeded'}, + %% % Messages that failed transformations + {counter, 'messages.transformation_failed'}, + %% % Messages that passed transformations + {counter, 'messages.transformation_succeeded'}, % QoS2 Messages expired {counter, 'messages.dropped.await_pubrel_timeout'}, % Messages dropped @@ -721,4 +725,6 @@ reserved_idx('overload_protection.new_conn') -> 404; reserved_idx('messages.validation_succeeded') -> 405; reserved_idx('messages.validation_failed') -> 406; reserved_idx('messages.persisted') -> 407; +reserved_idx('messages.transformation_succeeded') -> 408; +reserved_idx('messages.transformation_failed') -> 409; reserved_idx(_) -> undefined. diff --git a/apps/emqx/test/emqx_cth_suite.erl b/apps/emqx/test/emqx_cth_suite.erl index cf52afce1..ef83d7448 100644 --- a/apps/emqx/test/emqx_cth_suite.erl +++ b/apps/emqx/test/emqx_cth_suite.erl @@ -385,6 +385,8 @@ default_appspec(emqx_schema_registry, _SuiteOpts) -> #{schema_mod => emqx_schema_registry_schema, config => #{}}; default_appspec(emqx_schema_validation, _SuiteOpts) -> #{schema_mod => emqx_schema_validation_schema, config => #{}}; +default_appspec(emqx_message_transformation, _SuiteOpts) -> + #{schema_mod => emqx_message_transformation_schema, config => #{}}; default_appspec(_, _) -> #{}. diff --git a/apps/emqx_conf/src/emqx_conf.app.src b/apps/emqx_conf/src/emqx_conf.app.src index d09090a74..1c2fbc77a 100644 --- a/apps/emqx_conf/src/emqx_conf.app.src +++ b/apps/emqx_conf/src/emqx_conf.app.src @@ -1,6 +1,6 @@ {application, emqx_conf, [ {description, "EMQX configuration management"}, - {vsn, "0.2.0"}, + {vsn, "0.2.1"}, {registered, []}, {mod, {emqx_conf_app, []}}, {applications, [kernel, stdlib]}, diff --git a/apps/emqx_conf/src/emqx_conf_cli.erl b/apps/emqx_conf/src/emqx_conf_cli.erl index 08a86d49a..257cc7453 100644 --- a/apps/emqx_conf/src/emqx_conf_cli.erl +++ b/apps/emqx_conf/src/emqx_conf_cli.erl @@ -37,6 +37,7 @@ -define(AUDIT_MOD, audit). -define(UPDATE_READONLY_KEYS_PROHIBITED, <<"Cannot update read-only key '~s'.">>). -define(SCHEMA_VALIDATION_CONF_ROOT_BIN, <<"schema_validation">>). +-define(MESSAGE_TRANSFORMATION_CONF_ROOT_BIN, <<"message_transformation">>). -dialyzer({no_match, [load/0]}). @@ -335,6 +336,14 @@ update_config_cluster(?SCHEMA_VALIDATION_CONF_ROOT_BIN = Key, NewConf, #{mode := check_res(Key, emqx_conf:update([Key], {merge, NewConf}, ?OPTIONS), NewConf, Opts); update_config_cluster(?SCHEMA_VALIDATION_CONF_ROOT_BIN = Key, NewConf, #{mode := replace} = Opts) -> check_res(Key, emqx_conf:update([Key], {replace, NewConf}, ?OPTIONS), NewConf, Opts); +update_config_cluster( + ?MESSAGE_TRANSFORMATION_CONF_ROOT_BIN = Key, NewConf, #{mode := merge} = Opts +) -> + check_res(Key, emqx_conf:update([Key], {merge, NewConf}, ?OPTIONS), NewConf, Opts); +update_config_cluster( + ?MESSAGE_TRANSFORMATION_CONF_ROOT_BIN = Key, NewConf, #{mode := replace} = Opts +) -> + check_res(Key, emqx_conf:update([Key], {replace, NewConf}, ?OPTIONS), NewConf, Opts); update_config_cluster(Key, NewConf, #{mode := merge} = Opts) -> Merged = merge_conf(Key, NewConf), check_res(Key, emqx_conf:update([Key], Merged, ?OPTIONS), NewConf, Opts); diff --git a/apps/emqx_dashboard/include/emqx_dashboard.hrl b/apps/emqx_dashboard/include/emqx_dashboard.hrl index b2e2c1bae..b8415193b 100644 --- a/apps/emqx_dashboard/include/emqx_dashboard.hrl +++ b/apps/emqx_dashboard/include/emqx_dashboard.hrl @@ -67,6 +67,8 @@ %, sent_bytes validation_succeeded, validation_failed, + transformation_succeeded, + transformation_failed, dropped, persisted ]). @@ -90,6 +92,8 @@ sent => sent_msg_rate, validation_succeeded => validation_succeeded_rate, validation_failed => validation_failed_rate, + transformation_succeeded => transformation_succeeded_rate, + transformation_failed => transformation_failed_rate, dropped => dropped_msg_rate, persisted => persisted_rate }). diff --git a/apps/emqx_dashboard/src/emqx_dashboard_monitor.erl b/apps/emqx_dashboard/src/emqx_dashboard_monitor.erl index a82be6a11..5222e4876 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_monitor.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_monitor.erl @@ -474,6 +474,10 @@ stats(validation_succeeded) -> emqx_metrics:val('messages.validation_succeeded'); stats(validation_failed) -> emqx_metrics:val('messages.validation_failed'); +stats(transformation_succeeded) -> + emqx_metrics:val('messages.transformation_succeeded'); +stats(transformation_failed) -> + emqx_metrics:val('messages.transformation_failed'); stats(dropped) -> emqx_metrics:val('messages.dropped'); stats(persisted) -> diff --git a/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl b/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl index fafa821e3..7731be477 100644 --- a/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl +++ b/apps/emqx_dashboard/src/emqx_dashboard_monitor_api.erl @@ -198,6 +198,10 @@ swagger_desc(validation_succeeded) -> swagger_desc_format("Schema validations succeeded "); swagger_desc(validation_failed) -> swagger_desc_format("Schema validations failed "); +swagger_desc(transformation_succeeded) -> + swagger_desc_format("Message transformations succeeded "); +swagger_desc(transformation_failed) -> + swagger_desc_format("Message transformations failed "); swagger_desc(persisted) -> swagger_desc_format("Messages saved to the durable storage "); swagger_desc(disconnected_durable_sessions) -> @@ -230,6 +234,10 @@ swagger_desc(validation_succeeded_rate) -> swagger_desc_format("Schema validations succeeded ", per); swagger_desc(validation_failed_rate) -> swagger_desc_format("Schema validations failed ", per); +swagger_desc(transformation_succeeded_rate) -> + swagger_desc_format("Message transformations succeeded ", per); +swagger_desc(transformation_failed_rate) -> + swagger_desc_format("Message transformations failed ", per); swagger_desc(persisted_rate) -> swagger_desc_format("Messages saved to the durable storage ", per); swagger_desc(retained_msg_count) -> diff --git a/apps/emqx_enterprise/src/emqx_enterprise.app.src b/apps/emqx_enterprise/src/emqx_enterprise.app.src index 03dbe37bb..d5a866caf 100644 --- a/apps/emqx_enterprise/src/emqx_enterprise.app.src +++ b/apps/emqx_enterprise/src/emqx_enterprise.app.src @@ -1,6 +1,6 @@ {application, emqx_enterprise, [ {description, "EMQX Enterprise Edition"}, - {vsn, "0.2.0"}, + {vsn, "0.2.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_enterprise/src/emqx_enterprise_schema.erl b/apps/emqx_enterprise/src/emqx_enterprise_schema.erl index 909fb4109..f593dc877 100644 --- a/apps/emqx_enterprise/src/emqx_enterprise_schema.erl +++ b/apps/emqx_enterprise/src/emqx_enterprise_schema.erl @@ -16,6 +16,7 @@ emqx_license_schema, emqx_schema_registry_schema, emqx_schema_validation_schema, + emqx_message_transformation_schema, emqx_ft_schema ]). @@ -196,6 +197,7 @@ audit_log_conf() -> tr_prometheus_collectors(Conf) -> [ - {'/prometheus/schema_validation', emqx_prometheus_schema_validation} + {'/prometheus/schema_validation', emqx_prometheus_schema_validation}, + {'/prometheus/message_transformation', emqx_prometheus_message_transformation} | emqx_conf_schema:tr_prometheus_collectors(Conf) ]. diff --git a/apps/emqx_machine/priv/reboot_lists.eterm b/apps/emqx_machine/priv/reboot_lists.eterm index 8d5f83698..f13e3997a 100644 --- a/apps/emqx_machine/priv/reboot_lists.eterm +++ b/apps/emqx_machine/priv/reboot_lists.eterm @@ -89,6 +89,7 @@ emqx_license, emqx_enterprise, emqx_schema_validation, + emqx_message_transformation, emqx_connector_aggregator, emqx_bridge_kafka, emqx_bridge_pulsar, diff --git a/apps/emqx_machine/src/emqx_machine.app.src b/apps/emqx_machine/src/emqx_machine.app.src index 2a74027d9..228d69463 100644 --- a/apps/emqx_machine/src/emqx_machine.app.src +++ b/apps/emqx_machine/src/emqx_machine.app.src @@ -3,7 +3,7 @@ {id, "emqx_machine"}, {description, "The EMQX Machine"}, % strict semver, bump manually! - {vsn, "0.3.0"}, + {vsn, "0.3.1"}, {modules, []}, {registered, []}, {applications, [kernel, stdlib, emqx_ctl]}, diff --git a/apps/emqx_message_transformation/BSL.txt b/apps/emqx_message_transformation/BSL.txt new file mode 100644 index 000000000..127b85777 --- /dev/null +++ b/apps/emqx_message_transformation/BSL.txt @@ -0,0 +1,94 @@ +Business Source License 1.1 + +Licensor: Hangzhou EMQ Technologies Co., Ltd. +Licensed Work: EMQX Enterprise Edition + The Licensed Work is (c) 2023 + Hangzhou EMQ Technologies Co., Ltd. +Additional Use Grant: Students and educators are granted right to copy, + modify, and create derivative work for research + or education. +Change Date: 2028-06-05 +Change License: Apache License, Version 2.0 + +For information about alternative licensing arrangements for the Software, +please contact Licensor: https://www.emqx.com/en/contact + +Notice + +The Business Source License (this document, or the “License”) is not an Open +Source license. However, the Licensed Work will eventually be made available +under an Open Source License, as stated in this License. + +License text copyright (c) 2017 MariaDB Corporation Ab, All Rights Reserved. +“Business Source License” is a trademark of MariaDB Corporation Ab. + +----------------------------------------------------------------------------- + +Business Source License 1.1 + +Terms + +The Licensor hereby grants you the right to copy, modify, create derivative +works, redistribute, and make non-production use of the Licensed Work. The +Licensor may make an Additional Use Grant, above, permitting limited +production use. + +Effective on the Change Date, or the fourth anniversary of the first publicly +available distribution of a specific version of the Licensed Work under this +License, whichever comes first, the Licensor hereby grants you rights under +the terms of the Change License, and the rights granted in the paragraph +above terminate. + +If your use of the Licensed Work does not comply with the requirements +currently in effect as described in this License, you must purchase a +commercial license from the Licensor, its affiliated entities, or authorized +resellers, or you must refrain from using the Licensed Work. + +All copies of the original and modified Licensed Work, and derivative works +of the Licensed Work, are subject to this License. This License applies +separately for each version of the Licensed Work and the Change Date may vary +for each version of the Licensed Work released by Licensor. + +You must conspicuously display this License on each original or modified copy +of the Licensed Work. If you receive the Licensed Work in original or +modified form from a third party, the terms and conditions set forth in this +License apply to your use of that work. + +Any use of the Licensed Work in violation of this License will automatically +terminate your rights under this License for the current and all other +versions of the Licensed Work. + +This License does not grant you any right in any trademark or logo of +Licensor or its affiliates (provided that you may use a trademark or logo of +Licensor as expressly required by this License). + +TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON +AN “AS IS” BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS, +EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND +TITLE. + +MariaDB hereby grants you permission to use this License’s text to license +your works, and to refer to it using the trademark “Business Source License”, +as long as you comply with the Covenants of Licensor below. + +Covenants of Licensor + +In consideration of the right to use this License’s text and the “Business +Source License” name and trademark, Licensor covenants to MariaDB, and to all +other recipients of the licensed work to be provided by Licensor: + +1. To specify as the Change License the GPL Version 2.0 or any later version, + or a license that is compatible with GPL Version 2.0 or a later version, + where “compatible” means that software provided under the Change License can + be included in a program with software provided under GPL Version 2.0 or a + later version. Licensor may specify additional Change Licenses without + limitation. + +2. To either: (a) specify an additional grant of rights to use that does not + impose any additional restriction on the right granted in this License, as + the Additional Use Grant; or (b) insert the text “None”. + +3. To specify a Change Date. + +4. Not to modify this License in any other way. diff --git a/apps/emqx_message_transformation/README.md b/apps/emqx_message_transformation/README.md new file mode 100644 index 000000000..73abe40d6 --- /dev/null +++ b/apps/emqx_message_transformation/README.md @@ -0,0 +1,29 @@ +# EMQX Message Transformation + +This application encapsulates the functionality to transform incoming or internally +triggered published payloads and take an action upon failure, which can be to just drop +the message without further processing, or to disconnect the offending client as well. + +# Documentation + +Refer to [Message +Transformation](https://docs.emqx.com/en/enterprise/latest/data-integration/message-transformation.html) +for more information about the semantics. + +# HTTP APIs + +APIs are provided for transformation management, which includes creating, +updating, looking up, deleting, listing transformations. + +Refer to [API Docs - +Bridges](https://docs.emqx.com/en/enterprise/latest/admin/api-docs.html#tag/Message-Transformation) +for more detailed information. + + +# Contributing + +Please see our [contributing.md](../../CONTRIBUTING.md). + +# License + +EMQ Business Source License 1.1, refer to [LICENSE](BSL.txt). diff --git a/apps/emqx_message_transformation/rebar.config b/apps/emqx_message_transformation/rebar.config new file mode 100644 index 000000000..108973a33 --- /dev/null +++ b/apps/emqx_message_transformation/rebar.config @@ -0,0 +1,15 @@ +%% -*- mode: erlang -*- + +{erl_opts, [ + warn_unused_vars, + warn_shadow_vars, + warn_unused_import, + warn_obsolete_guard, + warnings_as_errors, + debug_info +]}. +{deps, [ + {emqx, {path, "../emqx"}}, + {emqx_utils, {path, "../emqx_utils"}}, + {emqx_schema_registry, {path, "../emqx_schema_registry"}} +]}. diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation.app.src b/apps/emqx_message_transformation/src/emqx_message_transformation.app.src new file mode 100644 index 000000000..2c54ed789 --- /dev/null +++ b/apps/emqx_message_transformation/src/emqx_message_transformation.app.src @@ -0,0 +1,14 @@ +{application, emqx_message_transformation, [ + {description, "EMQX Message Transformation"}, + {vsn, "0.1.0"}, + {registered, [emqx_message_transformation_sup, emqx_message_transformation_registry]}, + {mod, {emqx_message_transformation_app, []}}, + {applications, [ + kernel, + stdlib + ]}, + {env, []}, + {modules, []}, + + {links, []} +]}. diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation.erl b/apps/emqx_message_transformation/src/emqx_message_transformation.erl new file mode 100644 index 000000000..4baa0b00d --- /dev/null +++ b/apps/emqx_message_transformation/src/emqx_message_transformation.erl @@ -0,0 +1,820 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_message_transformation). + +-include_lib("snabbkaffe/include/trace.hrl"). +-include_lib("emqx_utils/include/emqx_message.hrl"). +-include_lib("emqx/include/emqx_hooks.hrl"). +-include_lib("emqx/include/logger.hrl"). + +%% API +-export([ + add_handler/0, + remove_handler/0, + + load/0, + unload/0, + + list/0, + reorder/1, + lookup/1, + insert/1, + update/1, + delete/1 +]). + +%% `emqx_hooks' API +-export([ + register_hooks/0, + unregister_hooks/0, + + on_message_publish/1 +]). + +%% `emqx_config_handler' API +-export([pre_config_update/3, post_config_update/5]). + +%% `emqx_config_backup' API +-behaviour(emqx_config_backup). +-export([import_config/1]). + +%%------------------------------------------------------------------------------ +%% Type declarations +%%------------------------------------------------------------------------------ + +-define(TRACE_TAG, "MESSAGE_TRANSFORMATION"). +-define(CONF_ROOT, message_transformation). +-define(CONF_ROOT_BIN, <<"message_transformation">>). +-define(TRANSFORMATIONS_CONF_PATH, [?CONF_ROOT, transformations]). + +-type transformation_name() :: binary(). +%% TODO +-type transformation() :: #{atom() => term()}. +%% TODO +-type variform() :: any(). +-type operation() :: #{key := [binary(), ...], value := variform()}. +-type qos() :: 0..2. +-type rendered_value() :: qos() | boolean() | binary(). + +-type eval_context() :: #{ + payload := _, + qos := _, + retain := _, + topic := _, + user_property := _, + dirty := #{ + payload => true, + qos => true, + retain => true, + topic => true, + user_property => true + } +}. + +%%------------------------------------------------------------------------------ +%% API +%%------------------------------------------------------------------------------ + +-spec add_handler() -> ok. +add_handler() -> + ok = emqx_config_handler:add_handler([?CONF_ROOT], ?MODULE), + ok = emqx_config_handler:add_handler(?TRANSFORMATIONS_CONF_PATH, ?MODULE), + ok. + +-spec remove_handler() -> ok. +remove_handler() -> + ok = emqx_config_handler:remove_handler(?TRANSFORMATIONS_CONF_PATH), + ok = emqx_config_handler:remove_handler([?CONF_ROOT]), + ok. + +load() -> + Transformations = emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []), + lists:foreach( + fun({Pos, Transformation}) -> + ok = emqx_message_transformation_registry:insert(Pos, Transformation) + end, + lists:enumerate(Transformations) + ). + +unload() -> + Transformations = emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []), + lists:foreach( + fun(Transformation) -> + ok = emqx_message_transformation_registry:delete(Transformation) + end, + Transformations + ). + +-spec list() -> [transformation()]. +list() -> + emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []). + +-spec reorder([transformation_name()]) -> + {ok, _} | {error, _}. +reorder(Order) -> + emqx_conf:update( + ?TRANSFORMATIONS_CONF_PATH, + {reorder, Order}, + #{override_to => cluster} + ). + +-spec lookup(transformation_name()) -> {ok, transformation()} | {error, not_found}. +lookup(Name) -> + Transformations = emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []), + do_lookup(Name, Transformations). + +-spec insert(transformation()) -> + {ok, _} | {error, _}. +insert(Transformation) -> + emqx_conf:update( + ?TRANSFORMATIONS_CONF_PATH, + {append, Transformation}, + #{override_to => cluster} + ). + +-spec update(transformation()) -> + {ok, _} | {error, _}. +update(Transformation) -> + emqx_conf:update( + ?TRANSFORMATIONS_CONF_PATH, + {update, Transformation}, + #{override_to => cluster} + ). + +-spec delete(transformation_name()) -> + {ok, _} | {error, _}. +delete(Name) -> + emqx_conf:update( + ?TRANSFORMATIONS_CONF_PATH, + {delete, Name}, + #{override_to => cluster} + ). + +%%------------------------------------------------------------------------------ +%% Hooks +%%------------------------------------------------------------------------------ + +-spec register_hooks() -> ok. +register_hooks() -> + emqx_hooks:put( + 'message.publish', {?MODULE, on_message_publish, []}, ?HP_MESSAGE_TRANSFORMATION + ). + +-spec unregister_hooks() -> ok. +unregister_hooks() -> + emqx_hooks:del('message.publish', {?MODULE, on_message_publish}). + +-spec on_message_publish(emqx_types:message()) -> + {ok, emqx_types:message()} | {stop, emqx_types:message()}. +on_message_publish(Message = #message{topic = Topic, headers = Headers}) -> + case emqx_message_transformation_registry:matching_transformations(Topic) of + [] -> + ok; + Transformations -> + case run_transformations(Transformations, Message) of + #message{} = FinalMessage -> + %% FIXME: must ensure final payload is a binary!! Check this. + emqx_metrics:inc('messages.transformation_succeeded'), + {ok, FinalMessage}; + drop -> + emqx_metrics:inc('messages.transformation_failed'), + {stop, Message#message{headers = Headers#{allow_publish => false}}}; + disconnect -> + emqx_metrics:inc('messages.transformation_failed'), + {stop, Message#message{ + headers = Headers#{ + allow_publish => false, + should_disconnect => true + } + }} + end + end. + +%%------------------------------------------------------------------------------ +%% `emqx_config_handler' API +%%------------------------------------------------------------------------------ + +pre_config_update(?TRANSFORMATIONS_CONF_PATH, {append, Transformation}, OldTransformations) -> + Transformations = OldTransformations ++ [Transformation], + {ok, Transformations}; +pre_config_update(?TRANSFORMATIONS_CONF_PATH, {update, Transformation}, OldTransformations) -> + replace(OldTransformations, Transformation); +pre_config_update(?TRANSFORMATIONS_CONF_PATH, {delete, Transformation}, OldTransformations) -> + delete(OldTransformations, Transformation); +pre_config_update(?TRANSFORMATIONS_CONF_PATH, {reorder, Order}, OldTransformations) -> + reorder(OldTransformations, Order); +pre_config_update([?CONF_ROOT], {merge, NewConfig}, OldConfig) -> + #{resulting_config := Config} = prepare_config_merge(NewConfig, OldConfig), + {ok, Config}; +pre_config_update([?CONF_ROOT], {replace, NewConfig}, _OldConfig) -> + {ok, NewConfig}. + +post_config_update( + ?TRANSFORMATIONS_CONF_PATH, {append, #{<<"name">> := Name}}, New, _Old, _AppEnvs +) -> + {Pos, Transformation} = fetch_with_index(New, Name), + ok = emqx_message_transformation_registry:insert(Pos, Transformation), + ok; +post_config_update(?TRANSFORMATIONS_CONF_PATH, {update, #{<<"name">> := Name}}, New, Old, _AppEnvs) -> + {_Pos, OldTransformation} = fetch_with_index(Old, Name), + {Pos, NewTransformation} = fetch_with_index(New, Name), + ok = emqx_message_transformation_registry:update(OldTransformation, Pos, NewTransformation), + ok; +post_config_update(?TRANSFORMATIONS_CONF_PATH, {delete, Name}, _New, Old, _AppEnvs) -> + {_Pos, Transformation} = fetch_with_index(Old, Name), + ok = emqx_message_transformation_registry:delete(Transformation), + ok; +post_config_update(?TRANSFORMATIONS_CONF_PATH, {reorder, _Order}, New, _Old, _AppEnvs) -> + ok = emqx_message_transformation_registry:reindex_positions(New), + ok; +post_config_update([?CONF_ROOT], {merge, _}, ResultingConfig, Old, _AppEnvs) -> + #{transformations := ResultingTransformations} = ResultingConfig, + #{transformations := OldTransformations} = Old, + #{added := NewTransformations0} = + emqx_utils:diff_lists( + ResultingTransformations, + OldTransformations, + fun(#{name := N}) -> N end + ), + NewTransformations = + lists:map( + fun(#{name := Name}) -> + {Pos, Transformation} = fetch_with_index(ResultingTransformations, Name), + ok = emqx_message_transformation_registry:insert(Pos, Transformation), + #{name => Name, pos => Pos} + end, + NewTransformations0 + ), + {ok, #{new_transformations => NewTransformations}}; +post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnvs) -> + #{ + new_transformations := NewTransformations, + changed_transformations := ChangedTransformations0, + deleted_transformations := DeletedTransformations + } = prepare_config_replace(Input, Old), + #{transformations := ResultingTransformations} = ResultingConfig, + #{transformations := OldTransformations} = Old, + lists:foreach( + fun(Name) -> + {_Pos, Transformation} = fetch_with_index(OldTransformations, Name), + ok = emqx_message_transformation_registry:delete(Transformation) + end, + DeletedTransformations + ), + lists:foreach( + fun(Name) -> + {Pos, Transformation} = fetch_with_index(ResultingTransformations, Name), + ok = emqx_message_transformation_registry:insert(Pos, Transformation) + end, + NewTransformations + ), + ChangedTransformations = + lists:map( + fun(Name) -> + {_Pos, OldTransformation} = fetch_with_index(OldTransformations, Name), + {Pos, NewTransformation} = fetch_with_index(ResultingTransformations, Name), + ok = emqx_message_transformation_registry:update( + OldTransformation, Pos, NewTransformation + ), + #{name => Name, pos => Pos} + end, + ChangedTransformations0 + ), + ok = emqx_message_transformation_registry:reindex_positions(ResultingTransformations), + {ok, #{changed_transformations => ChangedTransformations}}. + +%%------------------------------------------------------------------------------ +%% `emqx_config_backup' API +%%------------------------------------------------------------------------------ + +import_config(#{?CONF_ROOT_BIN := RawConf0}) -> + Result = emqx_conf:update( + [?CONF_ROOT], + {merge, RawConf0}, + #{override_to => cluster, rawconf_with_defaults => true} + ), + case Result of + {error, Reason} -> + {error, #{root_key => ?CONF_ROOT, reason => Reason}}; + {ok, _} -> + Keys0 = maps:keys(RawConf0), + ChangedPaths = Keys0 -- [<<"transformations">>], + {ok, #{root_key => ?CONF_ROOT, changed => ChangedPaths}} + end; +import_config(_RawConf) -> + {ok, #{root_key => ?CONF_ROOT, changed => []}}. + +%%------------------------------------------------------------------------------ +%% Internal exports +%%------------------------------------------------------------------------------ + +%%------------------------------------------------------------------------------ +%% Internal functions +%%------------------------------------------------------------------------------ + +-spec eval_operation(operation(), transformation(), eval_context()) -> {ok, eval_context()} | error. +eval_operation(Operation, Transformation, Context) -> + #{key := K, value := V} = Operation, + case eval_variform(K, V, Context) of + {error, Reason} -> + trace_failure(Transformation, "transformation_eval_operation_failure", #{ + reason => Reason + }), + error; + {ok, Rendered} -> + NewContext = put_value(K, Rendered, Context), + {ok, NewContext} + end. + +-spec eval_variform([binary(), ...], _, eval_context()) -> + {ok, rendered_value()} | {error, term()}. +eval_variform(K, V, Context) -> + case emqx_variform:render(V, Context) of + {error, Reason} -> + {error, Reason}; + {ok, Rendered} -> + map_result(Rendered, K) + end. + +-spec put_value([binary(), ...], rendered_value(), eval_context()) -> eval_context(). +put_value([<<"payload">> | Rest], Rendered, Context0) -> + Context = maps:update_with(dirty, fun(D) -> D#{payload => true} end, Context0), + maps:update_with( + payload, + fun(P) -> + case Rest of + [] -> + Rendered; + _ -> + emqx_utils_maps:deep_put(Rest, P, Rendered) + end + end, + Context + ); +put_value([<<"user_property">>, Key], Rendered, Context0) -> + Context = maps:update_with(dirty, fun(D) -> D#{user_property => true} end, Context0), + maps:update_with( + user_property, + fun(Ps) -> lists:keystore(Key, 1, Ps, {Key, Rendered}) end, + Context + ); +put_value([<<"qos">>], Rendered, Context0) -> + Context = maps:update_with(dirty, fun(D) -> D#{qos => true} end, Context0), + Context#{qos := Rendered}; +put_value([<<"retain">>], Rendered, Context0) -> + Context = maps:update_with(dirty, fun(D) -> D#{retain => true} end, Context0), + Context#{retain := Rendered}; +put_value([<<"topic">>], Rendered, Context0) -> + Context = maps:update_with(dirty, fun(D) -> D#{topic => true} end, Context0), + Context#{topic := Rendered}. + +-spec map_result(binary(), [binary(), ...]) -> + {ok, 0..2 | boolean() | binary()} | {error, map()}. +map_result(QoSBin, [<<"qos">>]) -> + case QoSBin of + <<"0">> -> {ok, 0}; + <<"1">> -> {ok, 1}; + <<"2">> -> {ok, 2}; + _ -> {error, #{reason => bad_qos_value, input => QoSBin}} + end; +map_result(RetainBin, [<<"retain">>]) -> + case RetainBin of + <<"true">> -> {ok, true}; + <<"false">> -> {ok, false}; + _ -> {error, #{reason => bad_retain_value, input => RetainBin}} + end; +map_result(Rendered, _Key) -> + {ok, Rendered}. + +replace(OldTransformations, Transformation = #{<<"name">> := Name}) -> + {Found, RevNewTransformations} = + lists:foldl( + fun + (#{<<"name">> := NameIn}, {_FoundIn, Acc}) when NameIn =:= Name -> + {true, [Transformation | Acc]}; + (Val, {FoundIn, Acc}) -> + {FoundIn, [Val | Acc]} + end, + {false, []}, + OldTransformations + ), + case Found of + true -> + {ok, lists:reverse(RevNewTransformations)}; + false -> + {error, not_found} + end. + +delete(OldTransformations, Name) -> + {Found, RevNewTransformations} = + lists:foldl( + fun + (#{<<"name">> := NameIn}, {_FoundIn, Acc}) when NameIn =:= Name -> + {true, Acc}; + (Val, {FoundIn, Acc}) -> + {FoundIn, [Val | Acc]} + end, + {false, []}, + OldTransformations + ), + case Found of + true -> + {ok, lists:reverse(RevNewTransformations)}; + false -> + {error, not_found} + end. + +reorder(Transformations, Order) -> + Context = #{ + not_found => sets:new([{version, 2}]), + duplicated => sets:new([{version, 2}]), + res => [], + seen => sets:new([{version, 2}]) + }, + reorder(Transformations, Order, Context). + +reorder(NotReordered, _Order = [], #{not_found := NotFound0, duplicated := Duplicated0, res := Res}) -> + NotFound = sets:to_list(NotFound0), + Duplicated = sets:to_list(Duplicated0), + case {NotReordered, NotFound, Duplicated} of + {[], [], []} -> + {ok, lists:reverse(Res)}; + {_, _, _} -> + Error = #{ + not_found => NotFound, + duplicated => Duplicated, + not_reordered => [N || #{<<"name">> := N} <- NotReordered] + }, + {error, Error} + end; +reorder(RemainingTransformations, [Name | Rest], Context0 = #{seen := Seen0}) -> + case sets:is_element(Name, Seen0) of + true -> + Context = maps:update_with( + duplicated, fun(S) -> sets:add_element(Name, S) end, Context0 + ), + reorder(RemainingTransformations, Rest, Context); + false -> + case safe_take(Name, RemainingTransformations) of + error -> + Context = maps:update_with( + not_found, fun(S) -> sets:add_element(Name, S) end, Context0 + ), + reorder(RemainingTransformations, Rest, Context); + {ok, {Transformation, Front, Rear}} -> + Context1 = maps:update_with( + seen, fun(S) -> sets:add_element(Name, S) end, Context0 + ), + Context = maps:update_with(res, fun(Vs) -> [Transformation | Vs] end, Context1), + reorder(Front ++ Rear, Rest, Context) + end + end. + +fetch_with_index([{Pos, #{name := Name} = Transformation} | _Rest], Name) -> + {Pos, Transformation}; +fetch_with_index([{_, _} | Rest], Name) -> + fetch_with_index(Rest, Name); +fetch_with_index(Transformations, Name) -> + fetch_with_index(lists:enumerate(Transformations), Name). + +safe_take(Name, Transformations) -> + case lists:splitwith(fun(#{<<"name">> := N}) -> N =/= Name end, Transformations) of + {_Front, []} -> + error; + {Front, [Found | Rear]} -> + {ok, {Found, Front, Rear}} + end. + +do_lookup(_Name, _Transformations = []) -> + {error, not_found}; +do_lookup(Name, [#{name := Name} = Transformation | _Rest]) -> + {ok, Transformation}; +do_lookup(Name, [_ | Rest]) -> + do_lookup(Name, Rest). + +run_transformations(Transformations, Message) -> + Fun = fun(Transformation, MessageAcc) -> + #{name := Name} = Transformation, + emqx_message_transformation_registry:inc_matched(Name), + case run_transformation(Transformation, MessageAcc) of + #message{} = NewAcc -> + emqx_message_transformation_registry:inc_succeeded(Name), + {cont, NewAcc}; + ignore -> + emqx_message_transformation_registry:inc_failed(Name), + run_message_transformation_failed_hook(Message, Transformation), + {cont, MessageAcc}; + FailureAction -> + trace_failure(Transformation, "transformation_failed", #{ + transformation => Name, + action => FailureAction + }), + emqx_message_transformation_registry:inc_failed(Name), + run_message_transformation_failed_hook(Message, Transformation), + {halt, FailureAction} + end + end, + case emqx_utils:foldl_while(Fun, Message, Transformations) of + #message{} = FinalMessage -> + case is_payload_properly_encoded(FinalMessage) of + true -> + FinalMessage; + false -> + %% Take the last validation's failure action, as it's the one + %% responsible for getting the right encoding. + LastTransformation = lists:last(Transformations), + #{failure_action := FailureAction} = LastTransformation, + trace_failure(LastTransformation, "transformation_bad_encoding", #{ + action => FailureAction, + explain => <<"final payload must be encoded as a binary">> + }), + FailureAction + end; + FailureAction -> + FailureAction + end. + +run_transformation(Transformation, MessageIn) -> + #{ + operations := Operations, + failure_action := FailureAction, + payload_decoder := PayloadDecoder + } = Transformation, + Fun = fun(Operation, Acc) -> + case eval_operation(Operation, Transformation, Acc) of + {ok, NewAcc} -> {cont, NewAcc}; + error -> {halt, FailureAction} + end + end, + PayloadIn = MessageIn#message.payload, + case decode(PayloadIn, PayloadDecoder, Transformation) of + {ok, InitPayload} -> + InitAcc = message_to_context(MessageIn, InitPayload, Transformation), + case emqx_utils:foldl_while(Fun, InitAcc, Operations) of + #{} = ContextOut -> + context_to_message(MessageIn, ContextOut, Transformation); + _ -> + FailureAction + end; + error -> + %% Error already logged + FailureAction + end. + +-spec message_to_context(emqx_types:message(), _Payload, transformation()) -> eval_context(). +message_to_context(#message{} = Message, Payload, Transformation) -> + #{ + payload_decoder := PayloadDecoder, + payload_encoder := PayloadEncoder + } = Transformation, + Dirty = + case PayloadEncoder =:= PayloadDecoder of + true -> #{}; + false -> #{payload => true} + end, + #{ + dirty => Dirty, + payload => Payload, + qos => Message#message.qos, + retain => emqx_message:get_flag(retain, Message, false), + topic => Message#message.topic, + user_property => maps:get( + 'User-Property', emqx_message:get_header(properties, Message, #{}), [] + ) + }. + +-spec context_to_message(emqx_types:message(), eval_context(), transformation()) -> + {ok, emqx_types:message()} | _TODO. +context_to_message(Message, Context, Transformation) -> + #{ + failure_action := FailureAction, + payload_encoder := PayloadEncoder + } = Transformation, + #{payload := PayloadOut} = Context, + case encode(PayloadOut, PayloadEncoder, Transformation) of + {ok, Payload} -> + take_from_context(Context#{payload := Payload}, Message); + error -> + FailureAction + end. + +take_from_context(Context, Message) -> + maps:fold( + fun + (payload, _, Acc) -> + Acc#message{payload = maps:get(payload, Context)}; + (qos, _, Acc) -> + Acc#message{qos = maps:get(qos, Context)}; + (topic, _, Acc) -> + Acc#message{topic = maps:get(topic, Context)}; + (retain, _, Acc) -> + emqx_message:set_flag(retain, maps:get(retain, Context), Acc); + (user_property, _, Acc) -> + Props0 = emqx_message:get_header(properties, Acc, #{}), + Props = maps:merge(Props0, #{'User-Property' => maps:get(user_property, Context)}), + emqx_message:set_header(properties, Props, Acc) + end, + Message, + maps:get(dirty, Context) + ). + +decode(Payload, #{type := none}, _Transformation) -> + {ok, Payload}; +decode(Payload, #{type := json}, Transformation) -> + case emqx_utils_json:safe_decode(Payload, [return_maps]) of + {ok, JSON} -> + {ok, JSON}; + {error, Reason} -> + trace_failure(Transformation, "payload_decode_failed", #{ + decoder => json, + reason => Reason + }), + error + end; +decode(Payload, #{type := avro, schema := SerdeName}, Transformation) -> + try + {ok, emqx_schema_registry_serde:decode(SerdeName, Payload)} + catch + error:{serde_not_found, _} -> + trace_failure(Transformation, "payload_decode_schema_not_found", #{ + decoder => avro, + schema_name => SerdeName + }), + error; + Class:Error:Stacktrace -> + trace_failure(Transformation, "payload_decode_schema_failure", #{ + decoder => avro, + schema_name => SerdeName, + kind => Class, + reason => Error, + stacktrace => Stacktrace + }), + error + end; +decode( + Payload, #{type := protobuf, schema := SerdeName, message_type := MessageType}, Transformation +) -> + try + {ok, emqx_schema_registry_serde:decode(SerdeName, Payload, [MessageType])} + catch + error:{serde_not_found, _} -> + trace_failure(Transformation, "payload_decode_schema_not_found", #{ + decoder => protobuf, + schema_name => SerdeName, + message_type => MessageType + }), + error; + Class:Error:Stacktrace -> + trace_failure(Transformation, "payload_decode_schema_failure", #{ + decoder => protobuf, + schema_name => SerdeName, + message_type => MessageType, + kind => Class, + reason => Error, + stacktrace => Stacktrace + }), + error + end. + +encode(Payload, #{type := none}, _Transformation) -> + {ok, Payload}; +encode(Payload, #{type := json}, Transformation) -> + case emqx_utils_json:safe_encode(Payload) of + {ok, Bin} -> + {ok, Bin}; + {error, Reason} -> + trace_failure(Transformation, "payload_encode_failed", #{ + encoder => json, + reason => Reason + }), + error + end; +encode(Payload, #{type := avro, schema := SerdeName}, Transformation) -> + try + {ok, emqx_schema_registry_serde:encode(SerdeName, Payload)} + catch + error:{serde_not_found, _} -> + trace_failure(Transformation, "payload_encode_schema_not_found", #{ + encoder => avro, + schema_name => SerdeName + }), + error; + Class:Error:Stacktrace -> + trace_failure(Transformation, "payload_encode_schema_failure", #{ + encoder => avro, + schema_name => SerdeName, + kind => Class, + reason => Error, + stacktrace => Stacktrace + }), + error + end; +encode( + Payload, #{type := protobuf, schema := SerdeName, message_type := MessageType}, Transformation +) -> + try + {ok, emqx_schema_registry_serde:encode(SerdeName, Payload, [MessageType])} + catch + error:{serde_not_found, _} -> + trace_failure(Transformation, "payload_encode_schema_not_found", #{ + encoder => protobuf, + schema_name => SerdeName, + message_type => MessageType + }), + error; + Class:Error:Stacktrace -> + trace_failure(Transformation, "payload_encode_schema_failure", #{ + encoder => protobuf, + schema_name => SerdeName, + message_type => MessageType, + kind => Class, + reason => Error, + stacktrace => Stacktrace + }), + error + end. + +trace_failure(#{log_failure := #{level := none}} = Transformation, _Msg, _Meta) -> + #{ + name := _Name, + failure_action := _Action + } = Transformation, + ?tp(message_transformation_failed, _Meta#{log_level => none, name => _Name, message => _Msg}), + ok; +trace_failure(#{log_failure := #{level := Level}} = Transformation, Msg, Meta0) -> + #{ + name := Name, + failure_action := _Action + } = Transformation, + Meta = maps:merge(#{name => Name}, Meta0), + ?tp(message_transformation_failed, Meta#{ + log_level => Level, name => Name, action => _Action, message => Msg + }), + ?TRACE(Level, ?TRACE_TAG, Msg, Meta). + +run_message_transformation_failed_hook(Message, Transformation) -> + #{name := Name} = Transformation, + TransformationContext = #{name => Name}, + emqx_hooks:run('message.transformation_failed', [Message, TransformationContext]). + +is_payload_properly_encoded(#message{payload = Payload}) -> + try iolist_size(Payload) of + _ -> + true + catch + error:badarg -> + false + end. + +%% "Merging" in the context of the transformation array means: +%% * Existing transformations (identified by `name') are left untouched. +%% * No transformations are removed. +%% * New transformations are appended to the existing list. +%% * Existing transformations are not reordered. +prepare_config_merge(NewConfig0, OldConfig) -> + {ImportedRawTransformations, NewConfigNoTransformations} = + case maps:take(<<"transformations">>, NewConfig0) of + error -> + {[], NewConfig0}; + {V, R} -> + {V, R} + end, + OldRawTransformations = maps:get(<<"transformations">>, OldConfig, []), + #{added := NewRawTransformations} = emqx_utils:diff_lists( + ImportedRawTransformations, + OldRawTransformations, + fun(#{<<"name">> := N}) -> N end + ), + Config0 = emqx_utils_maps:deep_merge(OldConfig, NewConfigNoTransformations), + Config = maps:update_with( + <<"transformations">>, + fun(OldVs) -> OldVs ++ NewRawTransformations end, + NewRawTransformations, + Config0 + ), + #{ + new_transformations => NewRawTransformations, + resulting_config => Config + }. + +prepare_config_replace(NewConfig, OldConfig) -> + ImportedRawTransformations = maps:get(<<"transformations">>, NewConfig, []), + OldTransformations = maps:get(transformations, OldConfig, []), + %% Since, at this point, we have an input raw config but a parsed old config, we + %% project both to the to have only their names, and consider common names as changed. + #{ + added := NewTransformations, + removed := DeletedTransformations, + changed := ChangedTransformations0, + identical := ChangedTransformations1 + } = emqx_utils:diff_lists( + lists:map(fun(#{<<"name">> := N}) -> N end, ImportedRawTransformations), + lists:map(fun(#{name := N}) -> N end, OldTransformations), + fun(N) -> N end + ), + #{ + new_transformations => NewTransformations, + changed_transformations => ChangedTransformations0 ++ ChangedTransformations1, + deleted_transformations => DeletedTransformations + }. diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_app.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_app.erl new file mode 100644 index 000000000..1d44962a6 --- /dev/null +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_app.erl @@ -0,0 +1,34 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_message_transformation_app). + +-behaviour(application). + +%% `application' API +-export([start/2, stop/1]). + +%%------------------------------------------------------------------------------ +%% Type declarations +%%------------------------------------------------------------------------------ + +%%------------------------------------------------------------------------------ +%% `application' API +%%------------------------------------------------------------------------------ + +-spec start(application:start_type(), term()) -> {ok, pid()}. +start(_Type, _Args) -> + {ok, Sup} = emqx_message_transformation_sup:start_link(), + ok = emqx_variform:inject_allowed_module(emqx_message_transformation_bif), + ok = emqx_message_transformation:add_handler(), + ok = emqx_message_transformation:register_hooks(), + ok = emqx_message_transformation:load(), + {ok, Sup}. + +-spec stop(term()) -> ok. +stop(_State) -> + ok = emqx_message_transformation:unload(), + ok = emqx_message_transformation:unregister_hooks(), + ok = emqx_message_transformation:remove_handler(), + ok = emqx_variform:erase_allowed_module(emqx_message_transformation_bif), + ok. diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_bif.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_bif.erl new file mode 100644 index 000000000..0eaf46578 --- /dev/null +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_bif.erl @@ -0,0 +1,38 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_message_transformation_bif). + +%% API +-export([ + json_decode/1, + json_encode/1 +]). + +%%------------------------------------------------------------------------------ +%% Type declarations +%%------------------------------------------------------------------------------ + +%%------------------------------------------------------------------------------ +%% API +%%------------------------------------------------------------------------------ + +json_encode(X) -> + case emqx_utils_json:safe_encode(X) of + {ok, JSON} -> + JSON; + {error, Reason} -> + throw(#{reason => json_encode_failure, detail => Reason}) + end. + +json_decode(JSON) -> + case emqx_utils_json:safe_decode(JSON, [return_maps]) of + {ok, X} -> + emqx_variform:skip_stringification(X); + {error, Reason} -> + throw(#{reason => json_decode_failure, detail => Reason}) + end. + +%%------------------------------------------------------------------------------ +%% Internal fns +%%------------------------------------------------------------------------------ diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_http_api.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_http_api.erl new file mode 100644 index 000000000..3b3132d0d --- /dev/null +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_http_api.erl @@ -0,0 +1,656 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_message_transformation_http_api). + +-behaviour(minirest_api). + +-include_lib("typerefl/include/types.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). +-include_lib("emqx/include/logger.hrl"). +-include_lib("emqx_utils/include/emqx_utils_api.hrl"). + +%% `minirest' and `minirest_trails' API +-export([ + namespace/0, + api_spec/0, + fields/1, + paths/0, + schema/1 +]). + +%% `minirest' handlers +-export([ + '/message_transformations'/2, + '/message_transformations/reorder'/2, + '/message_transformations/transformation/:name'/2, + '/message_transformations/transformation/:name/metrics'/2, + '/message_transformations/transformation/:name/metrics/reset'/2, + '/message_transformations/transformation/:name/enable/:enable'/2 +]). + +%%------------------------------------------------------------------------------------------------- +%% Type definitions +%%------------------------------------------------------------------------------------------------- + +-define(TAGS, [<<"Message Transformation">>]). +-define(METRIC_NAME, message_transformation). + +%%------------------------------------------------------------------------------------------------- +%% `minirest' and `minirest_trails' API +%%------------------------------------------------------------------------------------------------- + +namespace() -> "message_transformation_http_api". + +api_spec() -> + emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}). + +paths() -> + [ + "/message_transformations", + "/message_transformations/reorder", + "/message_transformations/transformation/:name", + "/message_transformations/transformation/:name/metrics", + "/message_transformations/transformation/:name/metrics/reset", + "/message_transformations/transformation/:name/enable/:enable" + ]. + +schema("/message_transformations") -> + #{ + 'operationId' => '/message_transformations', + get => #{ + tags => ?TAGS, + summary => <<"List transformations">>, + description => ?DESC("list_transformations"), + responses => + #{ + 200 => + emqx_dashboard_swagger:schema_with_examples( + array( + emqx_message_transformation_schema:api_schema(list) + ), + example_return_list() + ) + } + }, + post => #{ + tags => ?TAGS, + summary => <<"Append a new transformation">>, + description => ?DESC("append_transformation"), + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + emqx_message_transformation_schema:api_schema(post), + example_input_create() + ), + responses => + #{ + 201 => + emqx_dashboard_swagger:schema_with_examples( + emqx_message_transformation_schema:api_schema(post), + example_return_create() + ), + 400 => error_schema('ALREADY_EXISTS', "Transformation already exists") + } + }, + put => #{ + tags => ?TAGS, + summary => <<"Update a transformation">>, + description => ?DESC("update_transformation"), + 'requestBody' => emqx_dashboard_swagger:schema_with_examples( + emqx_message_transformation_schema:api_schema(put), + example_input_update() + ), + responses => + #{ + 200 => + emqx_dashboard_swagger:schema_with_examples( + emqx_message_transformation_schema:api_schema(put), + example_return_update() + ), + 404 => error_schema('NOT_FOUND', "Transformation not found"), + 400 => error_schema('BAD_REQUEST', "Bad params") + } + } + }; +schema("/message_transformations/reorder") -> + #{ + 'operationId' => '/message_transformations/reorder', + post => #{ + tags => ?TAGS, + summary => <<"Reorder all transformations">>, + description => ?DESC("reorder_transformations"), + 'requestBody' => + emqx_dashboard_swagger:schema_with_examples( + ref(reorder), + example_input_reorder() + ), + responses => + #{ + 204 => <<"No Content">>, + 400 => error_schema( + 'BAD_REQUEST', + <<"Bad request">>, + [ + {not_found, + mk(array(binary()), #{desc => "Transformations not found"})}, + {not_reordered, + mk(array(binary()), #{ + desc => "Transformations not referenced in input" + })}, + {duplicated, + mk(array(binary()), #{desc => "Duplicated transformations in input"})} + ] + ) + } + } + }; +schema("/message_transformations/transformation/:name") -> + #{ + 'operationId' => '/message_transformations/transformation/:name', + get => #{ + tags => ?TAGS, + summary => <<"Lookup a transformation">>, + description => ?DESC("lookup_transformation"), + parameters => [param_path_name()], + responses => + #{ + 200 => + emqx_dashboard_swagger:schema_with_examples( + array( + emqx_message_transformation_schema:api_schema(lookup) + ), + example_return_lookup() + ), + 404 => error_schema('NOT_FOUND', "Transformation not found") + } + }, + delete => #{ + tags => ?TAGS, + summary => <<"Delete a transformation">>, + description => ?DESC("delete_transformation"), + parameters => [param_path_name()], + responses => + #{ + 204 => <<"Transformation deleted">>, + 404 => error_schema('NOT_FOUND', "Transformation not found") + } + } + }; +schema("/message_transformations/transformation/:name/metrics") -> + #{ + 'operationId' => '/message_transformations/transformation/:name/metrics', + get => #{ + tags => ?TAGS, + summary => <<"Get transformation metrics">>, + description => ?DESC("get_transformation_metrics"), + parameters => [param_path_name()], + responses => + #{ + 200 => + emqx_dashboard_swagger:schema_with_examples( + ref(get_metrics), + example_return_metrics() + ), + 404 => error_schema('NOT_FOUND', "Transformation not found") + } + } + }; +schema("/message_transformations/transformation/:name/metrics/reset") -> + #{ + 'operationId' => '/message_transformations/transformation/:name/metrics/reset', + post => #{ + tags => ?TAGS, + summary => <<"Reset transformation metrics">>, + description => ?DESC("reset_transformation_metrics"), + parameters => [param_path_name()], + responses => + #{ + 204 => <<"No content">>, + 404 => error_schema('NOT_FOUND', "Transformation not found") + } + } + }; +schema("/message_transformations/transformation/:name/enable/:enable") -> + #{ + 'operationId' => '/message_transformations/transformation/:name/enable/:enable', + post => #{ + tags => ?TAGS, + summary => <<"Enable or disable transformation">>, + description => ?DESC("enable_disable_transformation"), + parameters => [param_path_name(), param_path_enable()], + responses => + #{ + 204 => <<"No content">>, + 404 => error_schema('NOT_FOUND', "Transformation not found"), + 400 => error_schema('BAD_REQUEST', "Bad params") + } + } + }. + +param_path_name() -> + {name, + mk( + binary(), + #{ + in => path, + required => true, + example => <<"my_transformation">>, + desc => ?DESC("param_path_name") + } + )}. + +param_path_enable() -> + {enable, + mk( + boolean(), + #{ + in => path, + required => true, + desc => ?DESC("param_path_enable") + } + )}. + +fields(front) -> + [{position, mk(front, #{default => front, required => true, in => body})}]; +fields(rear) -> + [{position, mk(rear, #{default => rear, required => true, in => body})}]; +fields('after') -> + [ + {position, mk('after', #{default => 'after', required => true, in => body})}, + {transformation, mk(binary(), #{required => true, in => body})} + ]; +fields(before) -> + [ + {position, mk(before, #{default => before, required => true, in => body})}, + {transformation, mk(binary(), #{required => true, in => body})} + ]; +fields(reorder) -> + [ + {order, mk(array(binary()), #{required => true, in => body})} + ]; +fields(get_metrics) -> + [ + {metrics, mk(ref(metrics), #{})}, + {node_metrics, mk(ref(node_metrics), #{})} + ]; +fields(metrics) -> + [ + {matched, mk(non_neg_integer(), #{})}, + {succeeded, mk(non_neg_integer(), #{})}, + {failed, mk(non_neg_integer(), #{})} + ]; +fields(node_metrics) -> + [ + {node, mk(binary(), #{})} + | fields(metrics) + ]. + +%%------------------------------------------------------------------------------------------------- +%% `minirest' handlers +%%------------------------------------------------------------------------------------------------- + +'/message_transformations'(get, _Params) -> + Transformations = emqx_message_transformation:list(), + ?OK(lists:map(fun transformation_out/1, Transformations)); +'/message_transformations'(post, #{body := Params = #{<<"name">> := Name}}) -> + with_transformation( + Name, + return(?BAD_REQUEST('ALREADY_EXISTS', <<"Transformation already exists">>)), + fun() -> + case emqx_message_transformation:insert(Params) of + {ok, _} -> + {ok, Res} = emqx_message_transformation:lookup(Name), + {201, transformation_out(Res)}; + {error, Error} -> + ?BAD_REQUEST(Error) + end + end + ); +'/message_transformations'(put, #{body := Params = #{<<"name">> := Name}}) -> + with_transformation( + Name, + fun() -> + case emqx_message_transformation:update(Params) of + {ok, _} -> + {ok, Res} = emqx_message_transformation:lookup(Name), + {200, transformation_out(Res)}; + {error, Error} -> + ?BAD_REQUEST(Error) + end + end, + not_found() + ). + +'/message_transformations/transformation/:name'(get, #{bindings := #{name := Name}}) -> + with_transformation( + Name, + fun(Transformation) -> ?OK(transformation_out(Transformation)) end, + not_found() + ); +'/message_transformations/transformation/:name'(delete, #{bindings := #{name := Name}}) -> + with_transformation( + Name, + fun() -> + case emqx_message_transformation:delete(Name) of + {ok, _} -> + ?NO_CONTENT; + {error, Error} -> + ?BAD_REQUEST(Error) + end + end, + not_found() + ). + +'/message_transformations/reorder'(post, #{body := #{<<"order">> := Order}}) -> + do_reorder(Order). + +'/message_transformations/transformation/:name/enable/:enable'(post, #{ + bindings := #{name := Name, enable := Enable} +}) -> + with_transformation( + Name, + fun(Transformation) -> do_enable_disable(Transformation, Enable) end, + not_found() + ). + +'/message_transformations/transformation/:name/metrics'(get, #{bindings := #{name := Name}}) -> + with_transformation( + Name, + fun() -> + Nodes = emqx:running_nodes(), + Results = emqx_metrics_proto_v2:get_metrics(Nodes, ?METRIC_NAME, Name, 5_000), + NodeResults = lists:zip(Nodes, Results), + NodeErrors = [Result || Result = {_Node, {NOk, _}} <- NodeResults, NOk =/= ok], + NodeErrors == [] orelse + ?SLOG(warning, #{ + msg => "rpc_get_transformation_metrics_errors", + errors => NodeErrors + }), + NodeMetrics = [format_metrics(Node, Metrics) || {Node, {ok, Metrics}} <- NodeResults], + Response = #{ + metrics => aggregate_metrics(NodeMetrics), + node_metrics => NodeMetrics + }, + ?OK(Response) + end, + not_found() + ). + +'/message_transformations/transformation/:name/metrics/reset'(post, #{bindings := #{name := Name}}) -> + with_transformation( + Name, + fun() -> + Nodes = emqx:running_nodes(), + Results = emqx_metrics_proto_v2:reset_metrics(Nodes, ?METRIC_NAME, Name, 5_000), + NodeResults = lists:zip(Nodes, Results), + NodeErrors = [Result || Result = {_Node, {NOk, _}} <- NodeResults, NOk =/= ok], + NodeErrors == [] orelse + ?SLOG(warning, #{ + msg => "rpc_reset_transformation_metrics_errors", + errors => NodeErrors + }), + ?NO_CONTENT + end, + not_found() + ). + +%%------------------------------------------------------------------------------------------------- +%% Internal fns +%%------------------------------------------------------------------------------------------------- + +ref(Struct) -> hoconsc:ref(?MODULE, Struct). +mk(Type, Opts) -> hoconsc:mk(Type, Opts). +array(Type) -> hoconsc:array(Type). + +%% FIXME: all examples +example_input_create() -> + #{ + <<"sql_check">> => + #{ + summary => <<"Using a SQL check">>, + value => example_transformation([example_sql_check()]) + }, + <<"avro_check">> => + #{ + summary => <<"Using an Avro schema check">>, + value => example_transformation([example_avro_check()]) + } + }. + +example_input_update() -> + #{ + <<"update">> => + #{ + summary => <<"Update">>, + value => example_transformation([example_sql_check()]) + } + }. + +example_input_reorder() -> + #{ + <<"reorder">> => + #{ + summary => <<"Update">>, + value => #{ + order => [<<"bar">>, <<"foo">>, <<"baz">>] + } + } + }. + +example_return_list() -> + OtherVal0 = example_transformation([example_avro_check()]), + OtherVal = OtherVal0#{name => <<"other_transformation">>}, + #{ + <<"list">> => + #{ + summary => <<"List">>, + value => [ + example_transformation([example_sql_check()]), + OtherVal + ] + } + }. + +example_return_create() -> + example_input_create(). + +example_return_update() -> + example_input_update(). + +example_return_lookup() -> + example_input_create(). + +example_return_metrics() -> + Metrics = #{ + matched => 2, + succeeded => 1, + failed => 1, + rate => 1.23, + rate_last5m => 0.88, + rate_max => 1.87 + }, + #{ + <<"metrics">> => + #{ + summary => <<"Metrics">>, + value => #{ + metrics => Metrics, + node_metrics => + [ + #{ + node => <<"emqx@127.0.0.1">>, + metrics => Metrics + } + ] + } + } + }. + +example_transformation(Checks) -> + #{ + name => <<"my_transformation">>, + enable => true, + description => <<"my transformation">>, + tags => [<<"transformation">>], + topics => [<<"t/+">>], + strategy => <<"all_pass">>, + failure_action => <<"drop">>, + log_failure => #{<<"level">> => <<"info">>}, + checks => Checks + }. + +example_sql_check() -> + #{ + type => <<"sql">>, + sql => <<"select payload.temp as t where t > 10">> + }. + +example_avro_check() -> + #{ + type => <<"avro">>, + schema => <<"my_avro_schema">> + }. + +error_schema(Code, Message) -> + error_schema(Code, Message, _ExtraFields = []). + +error_schema(Code, Message, ExtraFields) when is_atom(Code) -> + error_schema([Code], Message, ExtraFields); +error_schema(Codes, Message, ExtraFields) when is_list(Message) -> + error_schema(Codes, list_to_binary(Message), ExtraFields); +error_schema(Codes, Message, ExtraFields) when is_list(Codes) andalso is_binary(Message) -> + ExtraFields ++ emqx_dashboard_swagger:error_codes(Codes, Message). + +do_reorder(Order) -> + case emqx_message_transformation:reorder(Order) of + {ok, _} -> + ?NO_CONTENT; + {error, + {pre_config_update, _HandlerMod, #{ + not_found := NotFound, + duplicated := Duplicated, + not_reordered := NotReordered + }}} -> + Msg0 = ?ERROR_MSG('BAD_REQUEST', <<"Bad request">>), + Msg = Msg0#{ + not_found => NotFound, + duplicated => Duplicated, + not_reordered => NotReordered + }, + {400, Msg}; + {error, Error} -> + ?BAD_REQUEST(Error) + end. + +do_enable_disable(Transformation, Enable) -> + RawTransformation = make_serializable(Transformation), + case emqx_message_transformation:update(RawTransformation#{<<"enable">> => Enable}) of + {ok, _} -> + ?NO_CONTENT; + {error, Reason} -> + ?BAD_REQUEST(Reason) + end. + +with_transformation(Name, FoundFn, NotFoundFn) -> + case emqx_message_transformation:lookup(Name) of + {ok, Transformation} -> + {arity, Arity} = erlang:fun_info(FoundFn, arity), + case Arity of + 1 -> FoundFn(Transformation); + 0 -> FoundFn() + end; + {error, not_found} -> + NotFoundFn() + end. + +return(Response) -> + fun() -> Response end. + +not_found() -> + return(?NOT_FOUND(<<"Transformation not found">>)). + +make_serializable(Transformation0) -> + Schema = emqx_message_transformation_schema, + Transformation1 = transformation_out(Transformation0), + Transformation = emqx_utils_maps:binary_key_map(Transformation1), + RawConfig = #{ + <<"message_transformation">> => #{ + <<"transformations">> => + [Transformation] + } + }, + #{ + <<"message_transformation">> := #{ + <<"transformations">> := + [Serialized] + } + } = + hocon_tconf:make_serializable(Schema, RawConfig, #{}), + Serialized. + +format_metrics(Node, #{ + counters := #{ + 'matched' := Matched, + 'succeeded' := Succeeded, + 'failed' := Failed + }, + rate := #{ + 'matched' := #{ + current := MatchedRate, + last5m := Matched5mRate, + max := MatchedMaxRate + } + } +}) -> + #{ + metrics => #{ + 'matched' => Matched, + 'succeeded' => Succeeded, + 'failed' => Failed, + rate => MatchedRate, + rate_last5m => Matched5mRate, + rate_max => MatchedMaxRate + }, + node => Node + }; +format_metrics(Node, _) -> + #{ + metrics => #{ + 'matched' => 0, + 'succeeded' => 0, + 'failed' => 0, + rate => 0, + rate_last5m => 0, + rate_max => 0 + }, + node => Node + }. + +aggregate_metrics(NodeMetrics) -> + ErrorLogger = fun(_) -> ok end, + lists:foldl( + fun(#{metrics := Metrics}, Acc) -> + emqx_utils_maps:best_effort_recursive_sum(Metrics, Acc, ErrorLogger) + end, + #{}, + NodeMetrics + ). + +transformation_out(Transformation) -> + maps:update_with( + operations, + fun(Os) -> lists:map(fun operation_out/1, Os) end, + Transformation + ). + +operation_out(Operation0) -> + %% TODO: remove injected bif module + Operation = maps:update_with( + value, + fun(V) -> iolist_to_binary(emqx_variform:decompile(V)) end, + Operation0 + ), + maps:update_with( + key, + fun(Path) -> iolist_to_binary(lists:join(".", Path)) end, + Operation + ). diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl new file mode 100644 index 000000000..dd692a55c --- /dev/null +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl @@ -0,0 +1,245 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_message_transformation_registry). + +-behaviour(gen_server). + +%% API +-export([ + lookup/1, + insert/2, + update/3, + delete/1, + reindex_positions/1, + + matching_transformations/1, + + %% metrics + get_metrics/1, + inc_matched/1, + inc_succeeded/1, + inc_failed/1, + + start_link/0, + metrics_worker_spec/0 +]). + +%% `gen_server' API +-export([ + init/1, + handle_call/3, + handle_cast/2 +]). + +%%------------------------------------------------------------------------------ +%% Type declarations +%%------------------------------------------------------------------------------ + +-define(TRANSFORMATION_TOPIC_INDEX, emqx_message_transformation_index). +-define(TRANSFORMATION_TAB, emqx_message_transformation_tab). + +-define(METRIC_NAME, message_transformation). +-define(METRICS, [ + 'matched', + 'succeeded', + 'failed' +]). +-define(RATE_METRICS, ['matched']). + +-type transformation_name() :: binary(). +%% TODO +-type transformation() :: #{atom() => term()}. +-type position_index() :: pos_integer(). + +-record(reindex_positions, {transformations :: [transformation()]}). +-record(insert, {pos :: position_index(), transformation :: transformation()}). +-record(update, {old :: transformation(), pos :: position_index(), new :: transformation()}). +-record(delete, {transformation :: transformation()}). + +%%------------------------------------------------------------------------------ +%% API +%%------------------------------------------------------------------------------ + +-spec start_link() -> gen_server:start_ret(). +start_link() -> + gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). + +-spec lookup(transformation_name()) -> + {ok, transformation()} | {error, not_found}. +lookup(Name) -> + case emqx_utils_ets:lookup_value(?TRANSFORMATION_TAB, Name, undefined) of + undefined -> + {error, not_found}; + Transformation -> + {ok, Transformation} + end. + +-spec reindex_positions([transformation()]) -> ok. +reindex_positions(Transformations) -> + gen_server:call(?MODULE, #reindex_positions{transformations = Transformations}, infinity). + +-spec insert(position_index(), transformation()) -> ok. +insert(Pos, Transformation) -> + gen_server:call(?MODULE, #insert{pos = Pos, transformation = Transformation}, infinity). + +-spec update(transformation(), position_index(), transformation()) -> ok. +update(Old, Pos, New) -> + gen_server:call(?MODULE, #update{old = Old, pos = Pos, new = New}, infinity). + +-spec delete(transformation()) -> ok. +delete(Transformation) -> + gen_server:call(?MODULE, #delete{transformation = Transformation}, infinity). + +%% @doc Returns a list of matching transformation names, sorted by their configuration order. +-spec matching_transformations(emqx_types:topic()) -> [transformation()]. +matching_transformations(Topic) -> + Transformations0 = [ + {Pos, Transformation} + || M <- emqx_topic_index:matches(Topic, ?TRANSFORMATION_TOPIC_INDEX, [unique]), + [Pos] <- [emqx_topic_index:get_record(M, ?TRANSFORMATION_TOPIC_INDEX)], + {ok, Transformation} <- [ + lookup(emqx_topic_index:get_id(M)) + ] + ], + Transformations1 = lists:sort( + fun({Pos1, _V1}, {Pos2, _V2}) -> Pos1 =< Pos2 end, Transformations0 + ), + lists:map(fun({_Pos, V}) -> V end, Transformations1). + +-spec metrics_worker_spec() -> supervisor:child_spec(). +metrics_worker_spec() -> + emqx_metrics_worker:child_spec(message_transformation_metrics, ?METRIC_NAME). + +-spec get_metrics(transformation_name()) -> emqx_metrics_worker:metrics(). +get_metrics(Name) -> + emqx_metrics_worker:get_metrics(?METRIC_NAME, Name). + +-spec inc_matched(transformation_name()) -> ok. +inc_matched(Name) -> + emqx_metrics_worker:inc(?METRIC_NAME, Name, 'matched'). + +-spec inc_succeeded(transformation_name()) -> ok. +inc_succeeded(Name) -> + emqx_metrics_worker:inc(?METRIC_NAME, Name, 'succeeded'). + +-spec inc_failed(transformation_name()) -> ok. +inc_failed(Name) -> + emqx_metrics_worker:inc(?METRIC_NAME, Name, 'failed'). + +%%------------------------------------------------------------------------------ +%% `gen_server' API +%%------------------------------------------------------------------------------ + +init(_) -> + create_tables(), + State = #{}, + {ok, State}. + +handle_call(#reindex_positions{transformations = Transformations}, _From, State) -> + do_reindex_positions(Transformations), + {reply, ok, State}; +handle_call(#insert{pos = Pos, transformation = Transformation}, _From, State) -> + do_insert(Pos, Transformation), + {reply, ok, State}; +handle_call(#update{old = OldTransformation, pos = Pos, new = NewTransformation}, _From, State) -> + ok = do_update(OldTransformation, Pos, NewTransformation), + {reply, ok, State}; +handle_call(#delete{transformation = Transformation}, _From, State) -> + do_delete(Transformation), + {reply, ok, State}; +handle_call(_Call, _From, State) -> + {reply, ignored, State}. + +handle_cast(_Cast, State) -> + {noreply, State}. + +%%------------------------------------------------------------------------------ +%% Internal fns +%%------------------------------------------------------------------------------ + +create_tables() -> + _ = emqx_utils_ets:new(?TRANSFORMATION_TOPIC_INDEX, [ + public, ordered_set, {read_concurrency, true} + ]), + _ = emqx_utils_ets:new(?TRANSFORMATION_TAB, [public, ordered_set, {read_concurrency, true}]), + ok. + +do_reindex_positions(Transformations) -> + lists:foreach( + fun({Pos, Transformation}) -> + #{ + name := Name, + topics := Topics + } = Transformation, + do_insert_into_tab(Name, Transformation, Pos), + update_topic_index(Name, Pos, Topics) + end, + lists:enumerate(Transformations) + ). + +do_insert(Pos, Transformation) -> + #{ + enable := Enabled, + name := Name, + topics := Topics + } = Transformation, + maybe_create_metrics(Name), + do_insert_into_tab(Name, Transformation, Pos), + Enabled andalso update_topic_index(Name, Pos, Topics), + ok. + +do_update(OldTransformation, Pos, NewTransformation) -> + #{topics := OldTopics} = OldTransformation, + #{ + enable := Enabled, + name := Name, + topics := NewTopics + } = NewTransformation, + maybe_create_metrics(Name), + do_insert_into_tab(Name, NewTransformation, Pos), + delete_topic_index(Name, OldTopics), + Enabled andalso update_topic_index(Name, Pos, NewTopics), + ok. + +do_delete(Transformation) -> + #{ + name := Name, + topics := Topics + } = Transformation, + ets:delete(?TRANSFORMATION_TAB, Name), + delete_topic_index(Name, Topics), + drop_metrics(Name), + ok. + +do_insert_into_tab(Name, Transformation0, Pos) -> + Transformation = Transformation0#{pos => Pos}, + ets:insert(?TRANSFORMATION_TAB, {Name, Transformation}), + ok. + +maybe_create_metrics(Name) -> + case emqx_metrics_worker:has_metrics(?METRIC_NAME, Name) of + true -> + ok = emqx_metrics_worker:reset_metrics(?METRIC_NAME, Name); + false -> + ok = emqx_metrics_worker:create_metrics(?METRIC_NAME, Name, ?METRICS, ?RATE_METRICS) + end. + +drop_metrics(Name) -> + ok = emqx_metrics_worker:clear_metrics(?METRIC_NAME, Name). + +update_topic_index(Name, Pos, Topics) -> + lists:foreach( + fun(Topic) -> + true = emqx_topic_index:insert(Topic, Name, Pos, ?TRANSFORMATION_TOPIC_INDEX) + end, + Topics + ). + +delete_topic_index(Name, Topics) -> + lists:foreach( + fun(Topic) -> + true = emqx_topic_index:delete(Topic, Name, ?TRANSFORMATION_TOPIC_INDEX) + end, + Topics + ). diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_schema.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_schema.erl new file mode 100644 index 000000000..9dee9d5b0 --- /dev/null +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_schema.erl @@ -0,0 +1,331 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_message_transformation_schema). + +-include_lib("typerefl/include/types.hrl"). +-include_lib("hocon/include/hoconsc.hrl"). + +%% `hocon_schema' API +-export([ + namespace/0, + roots/0, + fields/1 +]). + +%% `minirest_trails' API +-export([ + api_schema/1 +]). + +%%------------------------------------------------------------------------------ +%% Type declarations +%%------------------------------------------------------------------------------ + +-define(BIF_MOD_STR, "emqx_message_transformation_bif"). + +-define(ALLOWED_ROOT_KEYS, [ + <<"payload">>, + <<"qos">>, + <<"retain">>, + <<"topic">>, + <<"user_property">> +]). + +-type key() :: list(binary()) | binary(). +-reflect_type([key/0]). + +%%------------------------------------------------------------------------------ +%% `hocon_schema' API +%%------------------------------------------------------------------------------ + +namespace() -> message_transformation. + +roots() -> + [ + {message_transformation, + mk(ref(message_transformation), #{importance => ?IMPORTANCE_HIDDEN})} + ]. + +fields(message_transformation) -> + [ + {transformations, + mk( + hoconsc:array(ref(transformation)), + #{ + default => [], + desc => ?DESC("transformations"), + validator => fun validate_unique_names/1 + } + )} + ]; +fields(transformation) -> + [ + {tags, emqx_schema:tags_schema()}, + {description, emqx_schema:description_schema()}, + {enable, mk(boolean(), #{desc => ?DESC("config_enable"), default => true})}, + {name, + mk( + binary(), + #{ + required => true, + validator => fun emqx_resource:validate_name/1, + desc => ?DESC("name") + } + )}, + {topics, + mk( + hoconsc:union([binary(), hoconsc:array(binary())]), + #{ + desc => ?DESC("topics"), + converter => fun ensure_array/2, + validator => fun validate_unique_topics/1, + required => true + } + )}, + {failure_action, + mk( + hoconsc:enum([drop, disconnect, ignore]), + #{desc => ?DESC("failure_action"), required => true} + )}, + {log_failure, + mk( + ref(log_failure), + #{desc => ?DESC("log_failure_at"), default => #{}} + )}, + {payload_decoder, + mk( + hoconsc:union(fun payload_serde_member_selector/1), + #{desc => ?DESC("payload_decoder"), default => #{<<"type">> => <<"none">>}} + )}, + {payload_encoder, + mk( + hoconsc:union(fun payload_serde_member_selector/1), + #{desc => ?DESC("payload_encoder"), default => #{<<"type">> => <<"none">>}} + )}, + {operations, + mk( + hoconsc:array(ref(operation)), + #{ + desc => ?DESC("operation"), + required => true, + validator => fun validate_operations/1 + } + )} + ]; +fields(log_failure) -> + [ + {level, + mk( + hoconsc:enum([error, warning, notice, info, debug, none]), + #{desc => ?DESC("log_failure_at"), default => info} + )} + ]; +fields(operation) -> + [ + %% TODO: more strict type check?? + {key, + mk( + typerefl:alias("string", key()), #{ + desc => ?DESC("operation_key"), + required => true, + converter => fun parse_key_path/2 + } + )}, + {value, + mk(typerefl:alias("string", any()), #{ + desc => ?DESC("operation_value"), + required => true, + converter => fun compile_variform/2 + })} + ]; +fields(payload_serde_none) -> + [{type, mk(none, #{default => none, desc => ?DESC("payload_serde_none_type")})}]; +fields(payload_serde_json) -> + [{type, mk(json, #{default => json, desc => ?DESC("payload_serde_json_type")})}]; +fields(payload_serde_avro) -> + [ + {type, mk(avro, #{default => avro, desc => ?DESC("payload_serde_avro_type")})}, + {schema, mk(binary(), #{required => true, desc => ?DESC("payload_serde_avro_schema")})} + ]; +fields(payload_serde_protobuf) -> + [ + {type, mk(protobuf, #{default => protobuf, desc => ?DESC("payload_serde_protobuf_type")})}, + {schema, mk(binary(), #{required => true, desc => ?DESC("payload_serde_protobuf_schema")})}, + {message_type, + mk(binary(), #{required => true, desc => ?DESC("payload_serde_protobuf_message_type")})} + ]. + +%%------------------------------------------------------------------------------ +%% `minirest_trails' API +%%------------------------------------------------------------------------------ + +api_schema(list) -> + hoconsc:array(ref(transformation)); +api_schema(lookup) -> + ref(transformation); +api_schema(post) -> + ref(transformation); +api_schema(put) -> + ref(transformation). + +%%------------------------------------------------------------------------------ +%% Internal exports +%%------------------------------------------------------------------------------ + +%%------------------------------------------------------------------------------ +%% Internal fns +%%------------------------------------------------------------------------------ + +mk(Type, Meta) -> hoconsc:mk(Type, Meta). +ref(Name) -> hoconsc:ref(?MODULE, Name). + +payload_serde_member_selector(all_union_members) -> + payload_serde_refs(); +payload_serde_member_selector({value, V}) -> + payload_serde_refs(V). + +payload_serde_refs() -> + [ + payload_serde_none, + payload_serde_json, + payload_serde_avro, + payload_serde_protobuf + ]. +payload_serde_refs(#{<<"type">> := Type} = V) when is_atom(Type) -> + payload_serde_refs(V#{<<"type">> := atom_to_binary(Type)}); +payload_serde_refs(#{<<"type">> := <<"none">>}) -> + [ref(payload_serde_none)]; +payload_serde_refs(#{<<"type">> := <<"json">>}) -> + [ref(payload_serde_json)]; +payload_serde_refs(#{<<"type">> := <<"avro">>}) -> + [ref(payload_serde_avro)]; +payload_serde_refs(#{<<"type">> := <<"protobuf">>}) -> + [ref(payload_serde_protobuf)]; +payload_serde_refs(_Value) -> + Expected = lists:join( + " | ", + [ + Name + || T <- payload_serde_refs(), + "payload_serde_" ++ Name <- [atom_to_list(T)] + ] + ), + throw(#{ + field_name => type, + expected => iolist_to_binary(Expected) + }). + +ensure_array(undefined, _) -> undefined; +ensure_array(L, _) when is_list(L) -> L; +ensure_array(B, _) -> [B]. + +validate_unique_names(Transformations0) -> + Transformations = emqx_utils_maps:binary_key_map(Transformations0), + do_validate_unique_names(Transformations, #{}). + +do_validate_unique_names(_Transformations = [], _Acc) -> + ok; +do_validate_unique_names([#{<<"name">> := Name} | _Rest], Acc) when is_map_key(Name, Acc) -> + {error, <<"duplicated name: ", Name/binary>>}; +do_validate_unique_names([#{<<"name">> := Name} | Rest], Acc) -> + do_validate_unique_names(Rest, Acc#{Name => true}). + +validate_unique_topics(Topics) -> + Grouped = maps:groups_from_list( + fun(T) -> T end, + Topics + ), + DuplicatedMap = maps:filter( + fun(_T, Ts) -> length(Ts) > 1 end, + Grouped + ), + case maps:keys(DuplicatedMap) of + [] -> + ok; + Duplicated -> + Msg = iolist_to_binary([ + <<"duplicated topics: ">>, + lists:join(", ", Duplicated) + ]), + {error, Msg} + end. + +validate_operations([]) -> + {error, <<"at least one operation must be defined">>}; +validate_operations([_ | _]) -> + ok. + +compile_variform(Expression, #{make_serializable := true}) -> + case is_binary(Expression) of + true -> + Expression; + false -> + emqx_variform:decompile(Expression) + end; +compile_variform(Expression, _Opts) -> + case emqx_variform:compile(Expression) of + {ok, Compiled} -> + transform_bifs(Compiled); + {error, Reason} -> + throw(#{expression => Expression, reason => Reason}) + end. + +transform_bifs(#{form := Form} = Compiled) -> + Compiled#{form := traverse_transform_bifs(Form)}. + +traverse_transform_bifs({call, FnName, Args}) -> + FQFnName = fully_qualify_local_bif(FnName), + {call, FQFnName, lists:map(fun traverse_transform_bifs/1, Args)}; +traverse_transform_bifs({array, Elems}) -> + {array, lists:map(fun traverse_transform_bifs/1, Elems)}; +traverse_transform_bifs(Node) -> + Node. + +fully_qualify_local_bif("json_encode") -> + ?BIF_MOD_STR ++ ".json_encode"; +fully_qualify_local_bif("json_decode") -> + ?BIF_MOD_STR ++ ".json_decode"; +fully_qualify_local_bif(FnName) -> + FnName. + +parse_key_path(<<"">>, _Opts) -> + throw(#{reason => <<"key must be non-empty">>}); +parse_key_path(Key, #{make_serializable := true}) -> + case is_binary(Key) of + true -> + Key; + false -> + iolist_to_binary(lists:join(".", Key)) + end; +parse_key_path(Key, _Opts) when is_binary(Key) -> + Parts = binary:split(Key, <<".">>, [global]), + case lists:any(fun(P) -> P =:= <<"">> end, Parts) of + true -> + throw(#{invalid_key => Key}); + false -> + ok + end, + case Parts of + [<<"payload">> | _] -> + ok; + [<<"qos">>] -> + ok; + [<<"retain">>] -> + ok; + [<<"topic">>] -> + ok; + [<<"user_property">>, _] -> + ok; + [<<"user_property">>] -> + throw(#{ + invalid_key => Key, reason => <<"must define exactly one key inside user property">> + }); + [<<"user_property">> | _] -> + throw(#{ + invalid_key => Key, reason => <<"must define exactly one key inside user property">> + }); + _ -> + throw(#{invalid_key => Key, allowed_root_keys => ?ALLOWED_ROOT_KEYS}) + end, + Parts. diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_sup.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_sup.erl new file mode 100644 index 000000000..5e627e9f7 --- /dev/null +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_sup.erl @@ -0,0 +1,47 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_message_transformation_sup). + +-behaviour(supervisor). + +%% API +-export([start_link/0]). + +%% `supervisor' API +-export([init/1]). + +%%------------------------------------------------------------------------------ +%% API +%%------------------------------------------------------------------------------ + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + +%%------------------------------------------------------------------------------ +%% `supervisor' API +%%------------------------------------------------------------------------------ + +init([]) -> + Registry = worker_spec(emqx_message_transformation_registry), + Metrics = emqx_message_transformation_registry:metrics_worker_spec(), + SupFlags = #{ + strategy => one_for_one, + intensity => 10, + period => 10 + }, + ChildSpecs = [Metrics, Registry], + {ok, {SupFlags, ChildSpecs}}. + +%%------------------------------------------------------------------------------ +%% Internal fns +%%------------------------------------------------------------------------------ + +worker_spec(Mod) -> + #{ + id => Mod, + start => {Mod, start_link, []}, + restart => permanent, + shutdown => 5_000, + type => worker + }. diff --git a/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl b/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl new file mode 100644 index 000000000..60779911c --- /dev/null +++ b/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl @@ -0,0 +1,1443 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_message_transformation_http_api_SUITE). + +-compile(export_all). +-compile(nowarn_export_all). + +-include_lib("eunit/include/eunit.hrl"). +-include_lib("common_test/include/ct.hrl"). +-include_lib("snabbkaffe/include/snabbkaffe.hrl"). +-include_lib("emqx/include/asserts.hrl"). +-include_lib("emqx/include/emqx_mqtt.hrl"). + +-import(emqx_common_test_helpers, [on_exit/1]). + +-define(RECORDED_EVENTS_TAB, recorded_actions). + +%%------------------------------------------------------------------------------ +%% CT boilerplate +%%------------------------------------------------------------------------------ + +all() -> + emqx_common_test_helpers:all(?MODULE). + +init_per_suite(Config) -> + Apps = emqx_cth_suite:start( + lists:flatten( + [ + emqx, + emqx_conf, + emqx_message_transformation, + emqx_management, + emqx_mgmt_api_test_util:emqx_dashboard(), + emqx_schema_registry, + emqx_rule_engine + ] + ), + #{work_dir => emqx_cth_suite:work_dir(Config)} + ), + {ok, _} = emqx_common_test_http:create_default_app(), + [{apps, Apps} | Config]. + +end_per_suite(Config) -> + Apps = ?config(apps, Config), + ok = emqx_cth_suite:stop(Apps), + ok. + +init_per_testcase(_TestCase, Config) -> + Config. + +end_per_testcase(_TestCase, _Config) -> + clear_all_transformations(), + snabbkaffe:stop(), + reset_all_global_metrics(), + emqx_common_test_helpers:call_janitor(), + ok. + +%%------------------------------------------------------------------------------ +%% Helper fns +%%------------------------------------------------------------------------------ + +-define(assertIndexOrder(EXPECTED, TOPIC), assert_index_order(EXPECTED, TOPIC, #{line => ?LINE})). + +bin(X) -> emqx_utils_conv:bin(X). + +clear_all_transformations() -> + lists:foreach( + fun(#{name := Name}) -> + {ok, _} = emqx_message_transformation:delete(Name) + end, + emqx_message_transformation:list() + ). + +reset_all_global_metrics() -> + lists:foreach( + fun({Name, _}) -> + emqx_metrics:set(Name, 0) + end, + emqx_metrics:all() + ). + +maybe_json_decode(X) -> + case emqx_utils_json:safe_decode(X, [return_maps]) of + {ok, Decoded} -> Decoded; + {error, _} -> X + end. + +request(Method, Path, Params) -> + AuthHeader = emqx_mgmt_api_test_util:auth_header_(), + Opts = #{return_all => true}, + case emqx_mgmt_api_test_util:request_api(Method, Path, "", AuthHeader, Params, Opts) of + {ok, {Status, Headers, Body0}} -> + Body = maybe_json_decode(Body0), + {ok, {Status, Headers, Body}}; + {error, {Status, Headers, Body0}} -> + Body = + case emqx_utils_json:safe_decode(Body0, [return_maps]) of + {ok, Decoded0 = #{<<"message">> := Msg0}} -> + Msg = maybe_json_decode(Msg0), + Decoded0#{<<"message">> := Msg}; + {ok, Decoded0} -> + Decoded0; + {error, _} -> + Body0 + end, + {error, {Status, Headers, Body}}; + Error -> + Error + end. + +transformation(Name, Operations) -> + transformation(Name, Operations, _Overrides = #{}). + +transformation(Name, Operations0, Overrides) -> + Operations = lists:map(fun normalize_operation/1, Operations0), + Default = #{ + <<"tags">> => [<<"some">>, <<"tags">>], + <<"description">> => <<"my transformation">>, + <<"enable">> => true, + <<"name">> => Name, + <<"topics">> => [<<"t/+">>], + <<"failure_action">> => <<"drop">>, + <<"log_failure">> => #{<<"level">> => <<"warning">>}, + <<"payload_decoder">> => #{<<"type">> => <<"json">>}, + <<"payload_encoder">> => #{<<"type">> => <<"json">>}, + <<"operations">> => Operations + }, + emqx_utils_maps:deep_merge(Default, Overrides). + +normalize_operation({K, V}) -> + #{<<"key">> => bin(K), <<"value">> => bin(V)}. + +dummy_operation() -> + topic_operation(<<"concat([topic, '/', payload.t])">>). + +topic_operation(VariformExpr) -> + operation(topic, VariformExpr). + +operation(Key, VariformExpr) -> + {Key, VariformExpr}. + +api_root() -> "message_transformations". + +simplify_result(Res) -> + case Res of + {error, {{_, Status, _}, _, Body}} -> + {Status, Body}; + {ok, {{_, Status, _}, _, Body}} -> + {Status, Body} + end. + +list() -> + Path = emqx_mgmt_api_test_util:api_path([api_root()]), + Res = request(get, Path, _Params = []), + ct:pal("list result:\n ~p", [Res]), + simplify_result(Res). + +lookup(Name) -> + Path = emqx_mgmt_api_test_util:api_path([api_root(), "transformation", Name]), + Res = request(get, Path, _Params = []), + ct:pal("lookup ~s result:\n ~p", [Name, Res]), + simplify_result(Res). + +insert(Params) -> + Path = emqx_mgmt_api_test_util:api_path([api_root()]), + Res = request(post, Path, Params), + ct:pal("insert result:\n ~p", [Res]), + simplify_result(Res). + +update(Params) -> + Path = emqx_mgmt_api_test_util:api_path([api_root()]), + Res = request(put, Path, Params), + ct:pal("update result:\n ~p", [Res]), + simplify_result(Res). + +delete(Name) -> + Path = emqx_mgmt_api_test_util:api_path([api_root(), "transformation", Name]), + Res = request(delete, Path, _Params = []), + ct:pal("delete result:\n ~p", [Res]), + simplify_result(Res). + +reorder(Order) -> + Path = emqx_mgmt_api_test_util:api_path([api_root(), "reorder"]), + Params = #{<<"order">> => Order}, + Res = request(post, Path, Params), + ct:pal("reorder result:\n ~p", [Res]), + simplify_result(Res). + +enable(Name) -> + Path = emqx_mgmt_api_test_util:api_path([api_root(), "transformation", Name, "enable", "true"]), + Res = request(post, Path, _Params = []), + ct:pal("enable result:\n ~p", [Res]), + simplify_result(Res). + +disable(Name) -> + Path = emqx_mgmt_api_test_util:api_path([api_root(), "transformation", Name, "enable", "false"]), + Res = request(post, Path, _Params = []), + ct:pal("disable result:\n ~p", [Res]), + simplify_result(Res). + +get_metrics(Name) -> + Path = emqx_mgmt_api_test_util:api_path([api_root(), "transformation", Name, "metrics"]), + Res = request(get, Path, _Params = []), + ct:pal("get metrics result:\n ~p", [Res]), + simplify_result(Res). + +reset_metrics(Name) -> + Path = emqx_mgmt_api_test_util:api_path([api_root(), "transformation", Name, "metrics", "reset"]), + Res = request(post, Path, _Params = []), + ct:pal("reset metrics result:\n ~p", [Res]), + simplify_result(Res). + +all_metrics() -> + Path = emqx_mgmt_api_test_util:api_path(["metrics"]), + Res = request(get, Path, _Params = []), + ct:pal("all metrics result:\n ~p", [Res]), + simplify_result(Res). + +monitor_metrics() -> + Path = emqx_mgmt_api_test_util:api_path(["monitor"]), + Res = request(get, Path, _Params = []), + ct:pal("monitor metrics result:\n ~p", [Res]), + simplify_result(Res). + +upload_backup(BackupFilePath) -> + Path = emqx_mgmt_api_test_util:api_path(["data", "files"]), + Res = emqx_mgmt_api_test_util:upload_request( + Path, + BackupFilePath, + "filename", + <<"application/octet-stream">>, + [], + emqx_mgmt_api_test_util:auth_header_() + ), + simplify_result(Res). + +export_backup() -> + Path = emqx_mgmt_api_test_util:api_path(["data", "export"]), + Res = request(post, Path, []), + simplify_result(Res). + +import_backup(BackupName) -> + Path = emqx_mgmt_api_test_util:api_path(["data", "import"]), + Body = #{<<"filename">> => unicode:characters_to_binary(BackupName)}, + Res = request(post, Path, Body), + simplify_result(Res). + +connect(ClientId) -> + connect(ClientId, _IsPersistent = false). + +connect(ClientId, IsPersistent) -> + Properties = emqx_utils_maps:put_if(#{}, 'Session-Expiry-Interval', 30, IsPersistent), + {ok, Client} = emqtt:start_link([ + {clean_start, true}, + {clientid, ClientId}, + {properties, Properties}, + {proto_ver, v5} + ]), + {ok, _} = emqtt:connect(Client), + on_exit(fun() -> catch emqtt:stop(Client) end), + Client. + +publish(Client, Topic, Payload) -> + publish(Client, Topic, Payload, _QoS = 0). + +publish(Client, Topic, {raw, Payload}, QoS) -> + case emqtt:publish(Client, Topic, Payload, QoS) of + ok -> ok; + {ok, _} -> ok; + Err -> Err + end; +publish(Client, Topic, Payload, QoS) -> + case emqtt:publish(Client, Topic, emqx_utils_json:encode(Payload), QoS) of + ok -> ok; + {ok, _} -> ok; + Err -> Err + end. + +json_valid_payloads() -> + [ + #{i => 10, s => <<"s">>}, + #{i => 10} + ]. + +json_invalid_payloads() -> + [ + #{i => <<"wrong type">>}, + #{x => <<"unknown property">>} + ]. + +json_create_serde(SerdeName) -> + Source = #{ + type => object, + properties => #{ + i => #{type => integer}, + s => #{type => string} + }, + required => [<<"i">>], + additionalProperties => false + }, + Schema = #{type => json, source => emqx_utils_json:encode(Source)}, + ok = emqx_schema_registry:add_schema(SerdeName, Schema), + on_exit(fun() -> ok = emqx_schema_registry:delete_schema(SerdeName) end), + ok. + +avro_valid_payloads(SerdeName) -> + lists:map( + fun(Payload) -> emqx_schema_registry_serde:encode(SerdeName, Payload) end, + [ + #{i => 10, s => <<"s">>}, + #{i => 10} + ] + ). + +avro_invalid_payloads() -> + [ + emqx_utils_json:encode(#{i => 10, s => <<"s">>}), + <<"">> + ]. + +avro_create_serde(SerdeName) -> + Source = #{ + type => record, + name => <<"test">>, + namespace => <<"emqx.com">>, + fields => [ + #{name => <<"i">>, type => <<"int">>}, + #{name => <<"s">>, type => [<<"null">>, <<"string">>], default => <<"null">>} + ] + }, + Schema = #{type => avro, source => emqx_utils_json:encode(Source)}, + ok = emqx_schema_registry:add_schema(SerdeName, Schema), + on_exit(fun() -> ok = emqx_schema_registry:delete_schema(SerdeName) end), + ok. + +protobuf_valid_payloads(SerdeName, MessageType) -> + lists:map( + fun(Payload) -> emqx_schema_registry_serde:encode(SerdeName, Payload, [MessageType]) end, + [ + #{<<"name">> => <<"some name">>, <<"id">> => 10, <<"email">> => <<"emqx@emqx.io">>}, + #{<<"name">> => <<"some name">>, <<"id">> => 10} + ] + ). + +protobuf_invalid_payloads() -> + [ + emqx_utils_json:encode(#{name => <<"a">>, id => 10, email => <<"email">>}), + <<"not protobuf">> + ]. + +protobuf_create_serde(SerdeName) -> + Source = + << + "message Person {\n" + " required string name = 1;\n" + " required int32 id = 2;\n" + " optional string email = 3;\n" + " }\n" + "message UnionValue {\n" + " oneof u {\n" + " int32 a = 1;\n" + " string b = 2;\n" + " }\n" + "}" + >>, + Schema = #{type => protobuf, source => Source}, + ok = emqx_schema_registry:add_schema(SerdeName, Schema), + on_exit(fun() -> ok = emqx_schema_registry:delete_schema(SerdeName) end), + ok. + +%% Checks that the internal order in the registry/index matches expectation. +assert_index_order(ExpectedOrder, Topic, Comment) -> + ?assertEqual( + ExpectedOrder, + [ + N + || #{name := N} <- emqx_message_transformation_registry:matching_transformations(Topic) + ], + Comment + ). + +create_failure_tracing_rule() -> + Params = #{ + enable => true, + sql => <<"select * from \"$events/message_transformation_failed\" ">>, + actions => [make_trace_fn_action()] + }, + Path = emqx_mgmt_api_test_util:api_path(["rules"]), + Res = request(post, Path, Params), + ct:pal("create failure tracing rule result:\n ~p", [Res]), + case Res of + {ok, {{_, 201, _}, _, #{<<"id">> := RuleId}}} -> + on_exit(fun() -> ok = emqx_rule_engine:delete_rule(RuleId) end), + simplify_result(Res); + _ -> + simplify_result(Res) + end. + +make_trace_fn_action() -> + persistent_term:put({?MODULE, test_pid}, self()), + Fn = <<(atom_to_binary(?MODULE))/binary, ":trace_rule">>, + emqx_utils_ets:new(?RECORDED_EVENTS_TAB, [named_table, public, ordered_set]), + #{function => Fn, args => #{}}. + +trace_rule(Data, Envs, _Args) -> + Now = erlang:monotonic_time(), + ets:insert(?RECORDED_EVENTS_TAB, {Now, #{data => Data, envs => Envs}}), + TestPid = persistent_term:get({?MODULE, test_pid}), + TestPid ! {action, #{data => Data, envs => Envs}}, + ok. + +get_traced_failures_from_rule_engine() -> + ets:tab2list(?RECORDED_EVENTS_TAB). + +assert_all_metrics(Line, Expected) -> + Keys = maps:keys(Expected), + ?retry( + 100, + 10, + begin + Res = all_metrics(), + ?assertMatch({200, _}, Res), + {200, [Metrics]} = Res, + ?assertEqual(Expected, maps:with(Keys, Metrics), #{line => Line}) + end + ), + ok. + +-define(assertAllMetrics(Expected), assert_all_metrics(?LINE, Expected)). + +%% check that dashboard monitor contains the success and failure metric keys +assert_monitor_metrics() -> + ok = snabbkaffe:start_trace(), + %% hack: force monitor to flush data now + {_, {ok, _}} = + ?wait_async_action( + emqx_dashboard_monitor ! {sample, erlang:system_time(millisecond)}, + #{?snk_kind := dashboard_monitor_flushed} + ), + Res = monitor_metrics(), + ?assertMatch({200, _}, Res), + {200, Metrics} = Res, + lists:foreach( + fun(M) -> + ?assertMatch( + #{ + <<"transformation_failed">> := _, + <<"transformation_succeeded">> := _ + }, + M + ) + end, + Metrics + ), + ok. + +%%------------------------------------------------------------------------------ +%% Testcases +%%------------------------------------------------------------------------------ + +%% Smoke test where we have a single check and `all_pass' strategy. +t_smoke_test(_Config) -> + Name1 = <<"foo">>, + Operations = [ + operation(qos, <<"payload.q">>), + operation(topic, <<"concat([topic, '/', payload.t])">>), + operation(retain, <<"payload.r">>), + operation(<<"user_property.a">>, <<"payload.u.a">>), + operation(<<"payload">>, <<"payload.p.hello">>) + ], + Transformation1 = transformation(Name1, Operations), + {201, _} = insert(Transformation1), + + lists:foreach( + fun({QoS, IsPersistent}) -> + ct:pal("qos = ~b, is persistent = ~p", [QoS, IsPersistent]), + C = connect(<<"c1">>, IsPersistent), + %% rap => retain as published + {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>, [{qos, 2}, {rap, true}]), + + {200, _} = update(Transformation1), + + ok = publish( + C, + <<"t/1">>, + #{ + p => #{<<"hello">> => <<"world">>}, + q => QoS, + r => true, + t => <<"t">>, + u => #{a => <<"b">>} + }, + _QosPub = 0 + ), + ?assertReceive( + {publish, #{ + payload := <<"\"world\"">>, + qos := QoS, + retain := true, + topic := <<"t/1/t">>, + properties := #{'User-Property' := [{<<"a">>, <<"b">>}]} + }} + ), + %% remember to clear retained message + on_exit(fun() -> emqx:publish(emqx_message:make(<<"t/1/t">>, <<"">>)) end), + + %% test `disconnect' failure action + Transformation2 = transformation( + Name1, + Operations, + #{<<"failure_action">> => <<"disconnect">>} + ), + {200, _} = update(Transformation2), + + unlink(C), + %% Missing `t' in the payload, so transformation fails + PubRes = publish(C, <<"t/1">>, #{z => <<"does not matter">>}, QoS), + case QoS =:= 0 of + true -> + ?assertMatch(ok, PubRes); + false -> + ?assertMatch( + {error, {disconnected, ?RC_IMPLEMENTATION_SPECIFIC_ERROR, _}}, + PubRes + ) + end, + ?assertNotReceive({publish, _}), + ?assertReceive({disconnected, ?RC_IMPLEMENTATION_SPECIFIC_ERROR, _}), + + ok + end, + [ + {QoS, IsPersistent} + || IsPersistent <- [false, true], + QoS <- [0, 1, 2] + ] + ), + + ok. + +t_crud(_Config) -> + ?assertMatch({200, []}, list()), + + Topic = <<"t/1">>, + Name1 = <<"foo">>, + Transformation1 = transformation(Name1, [dummy_operation()]), + + ?assertMatch({201, #{<<"name">> := Name1}}, insert(Transformation1)), + ?assertMatch({200, #{<<"name">> := Name1}}, lookup(Name1)), + ?assertMatch({200, [#{<<"name">> := Name1}]}, list()), + ?assertIndexOrder([Name1], Topic), + %% Duplicated name + ?assertMatch({400, #{<<"code">> := <<"ALREADY_EXISTS">>}}, insert(Transformation1)), + + Name2 = <<"bar">>, + Transformation2 = transformation(Name2, [dummy_operation()]), + %% Not found + ?assertMatch({404, _}, update(Transformation2)), + ?assertMatch({201, _}, insert(Transformation2)), + ?assertMatch( + {200, [#{<<"name">> := Name1}, #{<<"name">> := Name2}]}, + list() + ), + ?assertIndexOrder([Name1, Name2], Topic), + ?assertMatch({200, #{<<"name">> := Name2}}, lookup(Name2)), + Transformation1b = transformation(Name1, [dummy_operation(), dummy_operation()]), + ?assertMatch({200, _}, update(Transformation1b)), + ?assertMatch({200, #{<<"operations">> := [_, _]}}, lookup(Name1)), + %% order is unchanged + ?assertMatch( + {200, [#{<<"name">> := Name1}, #{<<"name">> := Name2}]}, + list() + ), + ?assertIndexOrder([Name1, Name2], Topic), + + ?assertMatch({204, _}, delete(Name1)), + ?assertMatch({404, _}, lookup(Name1)), + ?assertMatch({200, [#{<<"name">> := Name2}]}, list()), + ?assertIndexOrder([Name2], Topic), + ?assertMatch({404, _}, update(Transformation1)), + + ok. + +%% test the "reorder" API +t_reorder(_Config) -> + %% no transformations to reorder + ?assertMatch({204, _}, reorder([])), + + %% unknown transformation + ?assertMatch( + {400, #{<<"not_found">> := [<<"nonexistent">>]}}, + reorder([<<"nonexistent">>]) + ), + + Topic = <<"t">>, + + Name1 = <<"foo">>, + Transformation1 = transformation(Name1, [dummy_operation()], #{<<"topics">> => Topic}), + {201, _} = insert(Transformation1), + + %% unknown transformation + ?assertMatch( + {400, #{ + %% Note: minirest currently encodes empty lists as a "[]" string... + <<"duplicated">> := "[]", + <<"not_found">> := [<<"nonexistent">>], + <<"not_reordered">> := [Name1] + }}, + reorder([<<"nonexistent">>]) + ), + + %% repeated transformations + ?assertMatch( + {400, #{ + <<"not_found">> := "[]", + <<"duplicated">> := [Name1], + <<"not_reordered">> := "[]" + }}, + reorder([Name1, Name1]) + ), + + %% mixed known, unknown and repeated transformations + ?assertMatch( + {400, #{ + <<"not_found">> := [<<"nonexistent">>], + <<"duplicated">> := [Name1], + %% Note: minirest currently encodes empty lists as a "[]" string... + <<"not_reordered">> := "[]" + }}, + reorder([Name1, <<"nonexistent">>, <<"nonexistent">>, Name1]) + ), + + ?assertMatch({204, _}, reorder([Name1])), + ?assertMatch({200, [#{<<"name">> := Name1}]}, list()), + ?assertIndexOrder([Name1], Topic), + + Name2 = <<"bar">>, + Transformation2 = transformation(Name2, [dummy_operation()], #{<<"topics">> => Topic}), + {201, _} = insert(Transformation2), + Name3 = <<"baz">>, + Transformation3 = transformation(Name3, [dummy_operation()], #{<<"topics">> => Topic}), + {201, _} = insert(Transformation3), + + ?assertMatch( + {200, [#{<<"name">> := Name1}, #{<<"name">> := Name2}, #{<<"name">> := Name3}]}, + list() + ), + ?assertIndexOrder([Name1, Name2, Name3], Topic), + + %% Doesn't mention all transformations + ?assertMatch( + {400, #{ + %% Note: minirest currently encodes empty lists as a "[]" string... + <<"not_found">> := "[]", + <<"not_reordered">> := [_, _] + }}, + reorder([Name1]) + ), + ?assertMatch( + {200, [#{<<"name">> := Name1}, #{<<"name">> := Name2}, #{<<"name">> := Name3}]}, + list() + ), + ?assertIndexOrder([Name1, Name2, Name3], Topic), + + ?assertMatch({204, _}, reorder([Name3, Name2, Name1])), + ?assertMatch( + {200, [#{<<"name">> := Name3}, #{<<"name">> := Name2}, #{<<"name">> := Name1}]}, + list() + ), + ?assertIndexOrder([Name3, Name2, Name1], Topic), + + ?assertMatch({204, _}, reorder([Name1, Name3, Name2])), + ?assertMatch( + {200, [#{<<"name">> := Name1}, #{<<"name">> := Name3}, #{<<"name">> := Name2}]}, + list() + ), + ?assertIndexOrder([Name1, Name3, Name2], Topic), + + ok. + +t_enable_disable_via_update(_Config) -> + Topic = <<"t">>, + + Name1 = <<"foo">>, + AlwaysFailOp = topic_operation(<<"missing.var">>), + Transformation1 = transformation(Name1, [AlwaysFailOp], #{<<"topics">> => Topic}), + + {201, _} = insert(Transformation1#{<<"enable">> => false}), + ?assertIndexOrder([], Topic), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, Topic), + + ok = publish(C, Topic, #{}), + ?assertReceive({publish, _}), + + {200, _} = update(Transformation1#{<<"enable">> => true}), + ?assertIndexOrder([Name1], Topic), + + ok = publish(C, Topic, #{}), + ?assertNotReceive({publish, _}), + + {200, _} = update(Transformation1#{<<"enable">> => false}), + ?assertIndexOrder([], Topic), + + ok = publish(C, Topic, #{}), + ?assertReceive({publish, _}), + + %% Test index after delete; ensure it's in the index before + {200, _} = update(Transformation1#{<<"enable">> => true}), + ?assertIndexOrder([Name1], Topic), + {204, _} = delete(Name1), + ?assertIndexOrder([], Topic), + + ok. + +t_log_failure_none(_Config) -> + ?check_trace( + begin + Name1 = <<"foo">>, + AlwaysFailOp = topic_operation(<<"missing.var">>), + Transformation1 = transformation( + Name1, + [AlwaysFailOp], + #{<<"log_failure">> => #{<<"level">> => <<"none">>}} + ), + + {201, _} = insert(Transformation1), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>), + + ok = publish(C, <<"t/1">>, #{}), + ?assertNotReceive({publish, _}), + + ok + end, + fun(Trace) -> + ?assertMatch( + [#{log_level := none} | _], ?of_kind(message_transformation_failed, Trace) + ), + ok + end + ), + ok. + +t_action_ignore(_Config) -> + Name1 = <<"foo">>, + ?check_trace( + begin + AlwaysFailOp = topic_operation(<<"missing.var">>), + Transformation1 = transformation( + Name1, + [AlwaysFailOp], + #{<<"failure_action">> => <<"ignore">>} + ), + + {201, _} = insert(Transformation1), + + {201, _} = create_failure_tracing_rule(), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>), + + ok = publish(C, <<"t/1">>, #{}), + ?assertReceive({publish, _}), + + ok + end, + fun(Trace) -> + ?assertMatch([#{action := ignore}], ?of_kind(message_transformation_failed, Trace)), + ok + end + ), + ?assertMatch( + [{_, #{data := #{transformation := Name1, event := 'message.transformation_failed'}}}], + get_traced_failures_from_rule_engine() + ), + ok. + +t_enable_disable_via_api_endpoint(_Config) -> + Topic = <<"t">>, + + Name1 = <<"foo">>, + AlwaysFailOp = topic_operation(<<"missing.var">>), + Transformation1 = transformation(Name1, [AlwaysFailOp], #{<<"topics">> => Topic}), + + {201, _} = insert(Transformation1), + ?assertIndexOrder([Name1], Topic), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, Topic), + + ok = publish(C, Topic, #{}), + ?assertNotReceive({publish, _}), + + %% already enabled + {204, _} = enable(Name1), + ?assertIndexOrder([Name1], Topic), + ?assertMatch({200, #{<<"enable">> := true}}, lookup(Name1)), + + ok = publish(C, Topic, #{}), + ?assertNotReceive({publish, _}), + + {204, _} = disable(Name1), + ?assertIndexOrder([], Topic), + ?assertMatch({200, #{<<"enable">> := false}}, lookup(Name1)), + + ok = publish(C, Topic, #{}), + ?assertReceive({publish, _}), + + %% already disabled + {204, _} = disable(Name1), + ?assertIndexOrder([], Topic), + ?assertMatch({200, #{<<"enable">> := false}}, lookup(Name1)), + + ok = publish(C, Topic, #{}), + ?assertReceive({publish, _}), + + %% Re-enable + {204, _} = enable(Name1), + ?assertIndexOrder([Name1], Topic), + ?assertMatch({200, #{<<"enable">> := true}}, lookup(Name1)), + + ok = publish(C, Topic, #{}), + ?assertNotReceive({publish, _}), + + ok. + +t_metrics(_Config) -> + %% extra transformation that always passes at the head to check global metrics + Name0 = <<"bar">>, + Operation0 = topic_operation(<<"concat([topic, '/', 't'])">>), + Transformation0 = transformation(Name0, [Operation0]), + {201, _} = insert(Transformation0), + + Name1 = <<"foo">>, + Operation1 = topic_operation(<<"concat([topic, '/', payload.t])">>), + Transformation1 = transformation(Name1, [Operation1]), + + %% Non existent + ?assertMatch({404, _}, get_metrics(Name1)), + ?assertAllMetrics(#{ + <<"messages.dropped">> => 0, + <<"messages.transformation_failed">> => 0, + <<"messages.transformation_succeeded">> => 0 + }), + + {201, _} = insert(Transformation1), + + ?assertMatch( + {200, #{ + <<"metrics">> := + #{ + <<"matched">> := 0, + <<"succeeded">> := 0, + <<"failed">> := 0, + <<"rate">> := _, + <<"rate_last5m">> := _, + <<"rate_max">> := _ + }, + <<"node_metrics">> := + [ + #{ + <<"node">> := _, + <<"metrics">> := #{ + <<"matched">> := 0, + <<"succeeded">> := 0, + <<"failed">> := 0, + <<"rate">> := _, + <<"rate_last5m">> := _, + <<"rate_max">> := _ + } + } + ] + }}, + get_metrics(Name1) + ), + ?assertAllMetrics(#{ + <<"messages.dropped">> => 0, + <<"messages.transformation_failed">> => 0, + <<"messages.transformation_succeeded">> => 0 + }), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>), + + ok = publish(C, <<"t/1">>, #{t => <<"s">>}), + ?assertReceive({publish, #{topic := <<"t/1/t/s">>}}), + + ?retry( + 100, + 10, + ?assertMatch( + {200, #{ + <<"metrics">> := + #{ + <<"matched">> := 1, + <<"succeeded">> := 1, + <<"failed">> := 0 + }, + <<"node_metrics">> := + [ + #{ + <<"node">> := _, + <<"metrics">> := #{ + <<"matched">> := 1, + <<"succeeded">> := 1, + <<"failed">> := 0 + } + } + ] + }}, + get_metrics(Name1) + ) + ), + ?assertAllMetrics(#{ + <<"messages.dropped">> => 0, + <<"messages.transformation_failed">> => 0, + <<"messages.transformation_succeeded">> => 1 + }), + + ok = publish(C, <<"t/1">>, #{}), + ?assertNotReceive({publish, _}), + + ?retry( + 100, + 10, + ?assertMatch( + {200, #{ + <<"metrics">> := + #{ + <<"matched">> := 2, + <<"succeeded">> := 1, + <<"failed">> := 1 + }, + <<"node_metrics">> := + [ + #{ + <<"node">> := _, + <<"metrics">> := #{ + <<"matched">> := 2, + <<"succeeded">> := 1, + <<"failed">> := 1 + } + } + ] + }}, + get_metrics(Name1) + ) + ), + ?assertAllMetrics(#{ + <<"messages.dropped">> => 0, + <<"messages.transformation_failed">> => 1, + <<"messages.transformation_succeeded">> => 1 + }), + + ?assertMatch({204, _}, reset_metrics(Name1)), + ?retry( + 100, + 10, + ?assertMatch( + {200, #{ + <<"metrics">> := + #{ + <<"matched">> := 0, + <<"succeeded">> := 0, + <<"failed">> := 0, + <<"rate">> := _, + <<"rate_last5m">> := _, + <<"rate_max">> := _ + }, + <<"node_metrics">> := + [ + #{ + <<"node">> := _, + <<"metrics">> := #{ + <<"matched">> := 0, + <<"succeeded">> := 0, + <<"failed">> := 0, + <<"rate">> := _, + <<"rate_last5m">> := _, + <<"rate_max">> := _ + } + } + ] + }}, + get_metrics(Name1) + ) + ), + ?assertAllMetrics(#{ + <<"messages.dropped">> => 0, + <<"messages.transformation_failed">> => 1, + <<"messages.transformation_succeeded">> => 1 + }), + + %% updating a transformation resets its metrics + ok = publish(C, <<"t/1">>, #{}), + ?assertNotReceive({publish, _}), + ok = publish(C, <<"t/1">>, #{t => <<"u">>}), + ?assertReceive({publish, #{topic := <<"t/1/t/u">>}}), + ?retry( + 100, + 10, + ?assertMatch( + {200, #{ + <<"metrics">> := + #{ + <<"matched">> := 2, + <<"succeeded">> := 1, + <<"failed">> := 1 + }, + <<"node_metrics">> := + [ + #{ + <<"node">> := _, + <<"metrics">> := #{ + <<"matched">> := 2, + <<"succeeded">> := 1, + <<"failed">> := 1 + } + } + ] + }}, + get_metrics(Name1) + ) + ), + {200, _} = update(Transformation1), + ?retry( + 100, + 10, + ?assertMatch( + {200, #{ + <<"metrics">> := + #{ + <<"matched">> := 0, + <<"succeeded">> := 0, + <<"failed">> := 0 + }, + <<"node_metrics">> := + [ + #{ + <<"node">> := _, + <<"metrics">> := #{ + <<"matched">> := 0, + <<"succeeded">> := 0, + <<"failed">> := 0 + } + } + ] + }}, + get_metrics(Name1) + ) + ), + + assert_monitor_metrics(), + + ok. + +%% Checks that multiple transformations are run in order. +t_multiple_transformations(_Config) -> + {201, _} = create_failure_tracing_rule(), + + Name1 = <<"foo">>, + Operation1 = topic_operation(<<"concat([topic, '/', payload.x])">>), + Transformation1 = transformation(Name1, [Operation1], #{<<"failure_action">> => <<"drop">>}), + {201, _} = insert(Transformation1), + + Name2 = <<"bar">>, + Operation2 = topic_operation(<<"concat([topic, '/', payload.y])">>), + Transformation2 = transformation(Name2, [Operation2], #{ + <<"failure_action">> => <<"disconnect">> + }), + {201, _} = insert(Transformation2), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>), + + ok = publish(C, <<"t/0">>, #{x => 1, y => 2}), + ?assertReceive({publish, #{topic := <<"t/0/1/2">>}}), + %% Barred by `Name1' (missing var) + ok = publish(C, <<"t/0">>, #{y => 2}), + ?assertNotReceive({publish, _}), + ?assertNotReceive({disconnected, _, _}), + %% Barred by `Name2' (missing var) + unlink(C), + ok = publish(C, <<"t/1">>, #{x => 1}), + ?assertNotReceive({publish, _}), + ?assertReceive({disconnected, ?RC_IMPLEMENTATION_SPECIFIC_ERROR, _}), + + ?assertMatch( + [ + {_, #{data := #{transformation := Name1, event := 'message.transformation_failed'}}}, + {_, #{data := #{transformation := Name2, event := 'message.transformation_failed'}}} + ], + get_traced_failures_from_rule_engine() + ), + + ok. + +t_non_existent_serde(_Config) -> + SerdeName = <<"idontexist">>, + Name1 = <<"foo">>, + Operation1 = dummy_operation(), + PayloadSerde = #{<<"type">> => <<"avro">>, <<"schema">> => SerdeName}, + Transformation1 = transformation(Name1, [Operation1], #{ + <<"payload_decoder">> => PayloadSerde, + <<"payload_encoder">> => PayloadSerde + }), + {201, _} = insert(Transformation1), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>), + + ok = publish(C, <<"t/1">>, #{i => 10, s => <<"s">>}), + ?assertNotReceive({publish, _}), + + ok. + +t_avro(_Config) -> + SerdeName = <<"myserde">>, + avro_create_serde(SerdeName), + + Name1 = <<"foo">>, + Operation1 = operation(<<"payload.s">>, <<"concat(['hello'])">>), + PayloadSerde = #{<<"type">> => <<"avro">>, <<"schema">> => SerdeName}, + Transformation1 = transformation(Name1, [Operation1], #{ + <<"payload_decoder">> => PayloadSerde, + <<"payload_encoder">> => PayloadSerde + }), + {201, _} = insert(Transformation1), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>), + + lists:foreach( + fun(Payload) -> + ok = publish(C, <<"t/1">>, {raw, Payload}), + ?assertReceive({publish, _}) + end, + avro_valid_payloads(SerdeName) + ), + lists:foreach( + fun(Payload) -> + ok = publish(C, <<"t/1">>, {raw, Payload}), + ?assertNotReceive({publish, _}) + end, + avro_invalid_payloads() + ), + %% Transformation that produces invalid output according to schema + Operation2 = operation(<<"payload.i">>, <<"concat(['invalid'])">>), + Transformation2 = transformation(Name1, [Operation2], #{ + <<"payload_decoder">> => PayloadSerde, + <<"payload_encoder">> => PayloadSerde + }), + {200, _} = update(Transformation2), + lists:foreach( + fun(Payload) -> + ok = publish(C, <<"t/1">>, {raw, Payload}), + ?assertNotReceive({publish, _}) + end, + avro_valid_payloads(SerdeName) + ), + + ok. + +t_protobuf(_Config) -> + SerdeName = <<"myserde">>, + MessageType = <<"Person">>, + protobuf_create_serde(SerdeName), + + Name1 = <<"foo">>, + PayloadSerde = #{ + <<"type">> => <<"protobuf">>, + <<"schema">> => SerdeName, + <<"message_type">> => MessageType + }, + Operation1 = operation(<<"payload.name">>, <<"concat(['hello'])">>), + Transformation1 = transformation(Name1, [Operation1], #{ + <<"payload_decoder">> => PayloadSerde, + <<"payload_encoder">> => PayloadSerde + }), + {201, _} = insert(Transformation1), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>), + + lists:foreach( + fun(Payload) -> + ok = publish(C, <<"t/1">>, {raw, Payload}), + ?assertReceive({publish, _}) + end, + protobuf_valid_payloads(SerdeName, MessageType) + ), + lists:foreach( + fun(Payload) -> + ok = publish(C, <<"t/1">>, {raw, Payload}), + ?assertNotReceive({publish, _}) + end, + protobuf_invalid_payloads() + ), + + %% Bad config: unknown message name + BadPayloadSerde = PayloadSerde#{<<"message_type">> := <<"idontexist">>}, + Transformation2 = transformation(Name1, [Operation1], #{ + <<"payload_decoder">> => BadPayloadSerde, + <<"payload_encoder">> => BadPayloadSerde + }), + {200, _} = update(Transformation2), + + lists:foreach( + fun(Payload) -> + ok = publish(C, <<"t/1">>, {raw, Payload}), + ?assertNotReceive({publish, _}) + end, + protobuf_valid_payloads(SerdeName, MessageType) + ), + + %% Transformation that produces invalid output according to schema + Operation2 = operation(<<"payload.id">>, <<"concat(['invalid'])">>), + Transformation3 = transformation(Name1, [Operation2], #{ + <<"payload_decoder">> => PayloadSerde, + <<"payload_encoder">> => PayloadSerde + }), + {200, _} = update(Transformation3), + lists:foreach( + fun(Payload) -> + ok = publish(C, <<"t/1">>, {raw, Payload}), + ?assertNotReceive({publish, _}) + end, + protobuf_valid_payloads(SerdeName, MessageType) + ), + + ok. + +%% Tests that restoring a backup config works. +%% * Existing transformations (identified by `name') are left untouched. +%% * No transformations are removed. +%% * New transformations are appended to the existing list. +%% * Existing transformations are not reordered. +t_import_config_backup(_Config) -> + %% Setup backup file. + + %% Will clash with existing transformation; different order. + Name2 = <<"2">>, + Operation2B = topic_operation(<<"concat([topic, '/', 2, 'b'])">>), + Transformation2B = transformation(Name2, [Operation2B]), + {201, _} = insert(Transformation2B), + + %% Will clash with existing transformation. + Name1 = <<"1">>, + Operation1B = topic_operation(<<"concat([topic, '/', 1, 'b'])">>), + Transformation1B = transformation(Name1, [Operation1B]), + {201, _} = insert(Transformation1B), + + %% New transformation; should be appended + Name4 = <<"4">>, + Operation4 = topic_operation(<<"concat([topic, '/', 4])">>), + Transformation4 = transformation(Name4, [Operation4]), + {201, _} = insert(Transformation4), + + {200, #{<<"filename">> := BackupName}} = export_backup(), + + %% Clear this setup and pretend we have other data to begin with. + clear_all_transformations(), + {200, []} = list(), + + Operation1A = topic_operation(<<"concat([topic, '/', 1, 'a'])">>), + Transformation1A = transformation(Name1, [Operation1A]), + {201, _} = insert(Transformation1A), + + Operation2A = topic_operation(<<"concat([topic, '/', 2, 'a'])">>), + Transformation2A = transformation(Name2, [Operation2A]), + {201, _} = insert(Transformation2A), + + Name3 = <<"3">>, + Operation3 = topic_operation(<<"concat([topic, '/', 3])">>), + Transformation3 = transformation(Name3, [Operation3]), + {201, _} = insert(Transformation3), + + {204, _} = import_backup(BackupName), + + ExpectedTransformations = [ + Transformation1A, + Transformation2A, + Transformation3, + Transformation4 + ], + ?assertMatch({200, ExpectedTransformations}, list(), #{expected => ExpectedTransformations}), + ?assertIndexOrder([Name1, Name2, Name3, Name4], <<"t/a">>), + + ok. + +%% Tests that importing configurations from the CLI interface work. +t_load_config(_Config) -> + Name1 = <<"1">>, + Operation1A = topic_operation(<<"concat([topic, '/', 1, 'a'])">>), + Transformation1A = transformation(Name1, [Operation1A]), + {201, _} = insert(Transformation1A), + + Name2 = <<"2">>, + Operation2A = topic_operation(<<"concat([topic, '/', 2, 'a'])">>), + Transformation2A = transformation(Name2, [Operation2A]), + {201, _} = insert(Transformation2A), + + Name3 = <<"3">>, + Operation3 = topic_operation(<<"concat([topic, '/', 3])">>), + Transformation3 = transformation(Name3, [Operation3]), + {201, _} = insert(Transformation3), + + %% Config to load + %% Will replace existing config + Operation2B = topic_operation(<<"concat([topic, '/', 2, 'b'])">>), + Transformation2B = transformation(Name2, [Operation2B]), + + %% Will replace existing config + Operation1B = topic_operation(<<"concat([topic, '/', 1, 'b'])">>), + Transformation1B = transformation(Name1, [Operation1B]), + + %% New transformation; should be appended + Name4 = <<"4">>, + Operation4 = topic_operation(<<"concat([topic, '/', 4])">>), + Transformation4 = transformation(Name4, [Operation4]), + + ConfRootBin = <<"message_transformation">>, + ConfigToLoad1 = #{ + ConfRootBin => #{ + <<"transformations">> => [Transformation2B, Transformation1B, Transformation4] + } + }, + ConfigToLoadBin1 = iolist_to_binary(hocon_pp:do(ConfigToLoad1, #{})), + ?assertMatch(ok, emqx_conf_cli:load_config(ConfigToLoadBin1, #{mode => merge})), + ExpectedTransformations1 = [ + Transformation1A, + Transformation2A, + Transformation3, + Transformation4 + ], + ?assertMatch( + #{ + ConfRootBin := #{ + <<"transformations">> := ExpectedTransformations1 + } + }, + emqx_conf_cli:get_config(<<"message_transformation">>) + ), + ?assertIndexOrder([Name1, Name2, Name3, Name4], <<"t/a">>), + + %% Replace + Operation4B = topic_operation(<<"concat([topic, '/', 4, 'b'])">>), + Transformation4B = transformation(Name4, [Operation4B]), + + Name5 = <<"5">>, + Operation5 = topic_operation(<<"concat([topic, '/', 5])">>), + Transformation5 = transformation(Name5, [Operation5]), + + ConfigToLoad2 = #{ + ConfRootBin => #{ + <<"transformations">> => [ + Transformation4B, + Transformation3, + Transformation5 + ] + } + }, + ConfigToLoadBin2 = iolist_to_binary(hocon_pp:do(ConfigToLoad2, #{})), + ?assertMatch(ok, emqx_conf_cli:load_config(ConfigToLoadBin2, #{mode => replace})), + ExpectedTransformations2 = [ + Transformation4B, + Transformation3, + Transformation5 + ], + ?assertMatch( + #{ + ConfRootBin := #{ + <<"transformations">> := ExpectedTransformations2 + } + }, + emqx_conf_cli:get_config(<<"message_transformation">>) + ), + ?assertIndexOrder([Name4, Name3, Name5], <<"t/a">>), + + ok. + +%% We need to verify that the final `payload' output by the transformations is a binary. +t_final_payload_must_be_binary(_Config) -> + ?check_trace( + begin + Name = <<"foo">>, + Operations = [operation(<<"payload.hello">>, <<"concat(['world'])">>)], + Transformation = transformation(Name, Operations, #{ + <<"payload_decoder">> => #{<<"type">> => <<"json">>}, + <<"payload_encoder">> => #{<<"type">> => <<"none">>} + }), + {201, _} = insert(Transformation), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>), + ok = publish(C, <<"t/1">>, #{x => 1, y => true}), + ?assertNotReceive({publish, _}), + ok + end, + fun(Trace) -> + ?assertMatch( + [#{message := "transformation_bad_encoding"}], + ?of_kind(message_transformation_failed, Trace) + ), + ok + end + ), + ok. + +%% Smoke test for the `json_encode' and `json_decode' BIFs. +t_json_encode_decode_smoke_test(_Config) -> + ?check_trace( + begin + Name = <<"foo">>, + Operations = [ + operation( + <<"payload">>, + <<"json_decode('{\"hello\":\"world\"}')">> + ), + operation( + <<"payload">>, + <<"json_encode(maps.put('hello', 'planet', payload))">> + ) + ], + Transformation = transformation(Name, Operations, #{ + <<"payload_decoder">> => #{<<"type">> => <<"none">>}, + <<"payload_encoder">> => #{<<"type">> => <<"none">>} + }), + {201, _} = insert(Transformation), + + C = connect(<<"c1">>), + {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>), + ok = publish(C, <<"t/1">>, #{}), + ?assertReceive({publish, #{payload := <<"{\"hello\":\"planet\"}">>}}), + ok + end, + [] + ), + ok. diff --git a/apps/emqx_message_transformation/test/emqx_message_transformation_tests.erl b/apps/emqx_message_transformation/test/emqx_message_transformation_tests.erl new file mode 100644 index 000000000..3e86e3862 --- /dev/null +++ b/apps/emqx_message_transformation/test/emqx_message_transformation_tests.erl @@ -0,0 +1,174 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_message_transformation_tests). + +-include_lib("eunit/include/eunit.hrl"). + +-define(TRANSFORMATIONS_PATH, "message_transformation.transformations"). + +%%------------------------------------------------------------------------------ +%% Helper fns +%%------------------------------------------------------------------------------ + +bin(X) -> emqx_utils_conv:bin(X). + +parse_and_check(InnerConfigs) -> + RootBin = <<"message_transformation">>, + InnerBin = <<"transformations">>, + RawConf = #{RootBin => #{InnerBin => InnerConfigs}}, + #{RootBin := #{InnerBin := Checked}} = hocon_tconf:check_plain( + emqx_message_transformation_schema, + RawConf, + #{ + required => false, + atom_key => false, + make_serializable => false + } + ), + Checked. + +transformation(Name, Operations) -> + transformation(Name, Operations, _Overrides = #{}). + +transformation(Name, Operations0, Overrides) -> + Operations = lists:map(fun normalize_operation/1, Operations0), + Default = #{ + <<"tags">> => [<<"some">>, <<"tags">>], + <<"description">> => <<"my transformation">>, + <<"enable">> => true, + <<"name">> => Name, + <<"topics">> => [<<"t/+">>], + <<"failure_action">> => <<"drop">>, + <<"log_failure">> => #{<<"level">> => <<"warning">>}, + <<"payload_decoder">> => #{<<"type">> => <<"json">>}, + <<"payload_encoder">> => #{<<"type">> => <<"json">>}, + <<"operations">> => Operations + }, + emqx_utils_maps:deep_merge(Default, Overrides). + +normalize_operation({K, V}) -> + #{<<"key">> => bin(K), <<"value">> => bin(V)}. + +dummy_operation() -> + topic_operation(<<"concat([topic, '/', payload.t])">>). + +topic_operation(VariformExpr) -> + operation(topic, VariformExpr). + +operation(Key, VariformExpr) -> + {Key, VariformExpr}. + +%%------------------------------------------------------------------------------ +%% Test cases +%%------------------------------------------------------------------------------ + +schema_test_() -> + [ + {"topics is always a list 1", + ?_assertMatch( + [#{<<"topics">> := [<<"t/1">>]}], + parse_and_check([ + transformation( + <<"foo">>, + [dummy_operation()], + #{<<"topics">> => <<"t/1">>} + ) + ]) + )}, + {"topics is always a list 2", + ?_assertMatch( + [#{<<"topics">> := [<<"t/1">>]}], + parse_and_check([ + transformation( + <<"foo">>, + [dummy_operation()], + #{<<"topics">> => [<<"t/1">>]} + ) + ]) + )}, + {"names are unique", + ?_assertThrow( + {_Schema, [ + #{ + reason := <<"duplicated name:", _/binary>>, + path := ?TRANSFORMATIONS_PATH, + kind := validation_error + } + ]}, + parse_and_check([ + transformation(<<"foo">>, [dummy_operation()]), + transformation(<<"foo">>, [dummy_operation()]) + ]) + )}, + {"operations must be non-empty", + ?_assertThrow( + {_Schema, [ + #{ + reason := <<"at least one operation must be defined">>, + kind := validation_error + } + ]}, + parse_and_check([ + transformation( + <<"foo">>, + [] + ) + ]) + )}, + {"bogus check type: decoder", + ?_assertThrow( + {_Schema, [ + #{ + expected := <<"none", _/binary>>, + kind := validation_error, + field_name := type + } + ]}, + parse_and_check([ + transformation(<<"foo">>, [dummy_operation()], #{ + <<"payload_decoder">> => #{<<"type">> => <<"foo">>} + }) + ]) + )}, + {"bogus check type: encoder", + ?_assertThrow( + {_Schema, [ + #{ + expected := <<"none", _/binary>>, + kind := validation_error, + field_name := type + } + ]}, + parse_and_check([ + transformation(<<"foo">>, [dummy_operation()], #{ + <<"payload_encoder">> => #{<<"type">> => <<"foo">>} + }) + ]) + )} + ]. + +invalid_names_test_() -> + [ + {InvalidName, + ?_assertThrow( + {_Schema, [ + #{ + kind := validation_error, + path := "message_transformation.transformations.1.name" + } + ]}, + parse_and_check([transformation(InvalidName, [dummy_operation()])]) + )} + || InvalidName <- [ + <<"">>, + <<"_name">>, + <<"name$">>, + <<"name!">>, + <<"some name">>, + <<"nãme"/utf8>>, + <<"test_哈哈"/utf8>>, + %% long name + binary:copy(<<"a">>, 256) + ] + ]. diff --git a/apps/emqx_prometheus/include/emqx_prometheus.hrl b/apps/emqx_prometheus/include/emqx_prometheus.hrl index 32ba2a8e2..659e8b554 100644 --- a/apps/emqx_prometheus/include/emqx_prometheus.hrl +++ b/apps/emqx_prometheus/include/emqx_prometheus.hrl @@ -24,10 +24,13 @@ -define(PROMETHEUS_DATA_INTEGRATION_COLLECTOR, emqx_prometheus_data_integration). -define(PROMETHEUS_SCHEMA_VALIDATION_REGISTRY, '/prometheus/schema_validation'). -define(PROMETHEUS_SCHEMA_VALIDATION_COLLECTOR, emqx_prometheus_schema_validation). +-define(PROMETHEUS_MESSAGE_TRANSFORMATION_REGISTRY, '/prometheus/message_transformation'). +-define(PROMETHEUS_MESSAGE_TRANSFORMATION_COLLECTOR, emqx_prometheus_message_transformation). -if(?EMQX_RELEASE_EDITION == ee). -define(PROMETHEUS_EE_REGISTRIES, [ - ?PROMETHEUS_SCHEMA_VALIDATION_REGISTRY + ?PROMETHEUS_SCHEMA_VALIDATION_REGISTRY, + ?PROMETHEUS_MESSAGE_TRANSFORMATION_REGISTRY ]). %% ELSE if(?EMQX_RELEASE_EDITION == ee). -else. diff --git a/apps/emqx_prometheus/src/emqx_prometheus.app.src b/apps/emqx_prometheus/src/emqx_prometheus.app.src index 6666e5172..f571dcce6 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus.app.src +++ b/apps/emqx_prometheus/src/emqx_prometheus.app.src @@ -2,7 +2,7 @@ {application, emqx_prometheus, [ {description, "Prometheus for EMQX"}, % strict semver, bump manually! - {vsn, "5.2.0"}, + {vsn, "5.2.1"}, {modules, []}, {registered, [emqx_prometheus_sup]}, {applications, [kernel, stdlib, prometheus, emqx, emqx_auth, emqx_resource, emqx_management]}, diff --git a/apps/emqx_prometheus/src/emqx_prometheus_api.erl b/apps/emqx_prometheus/src/emqx_prometheus_api.erl index f9e499d82..3b796bceb 100644 --- a/apps/emqx_prometheus/src/emqx_prometheus_api.erl +++ b/apps/emqx_prometheus/src/emqx_prometheus_api.erl @@ -49,7 +49,8 @@ stats/2, auth/2, data_integration/2, - schema_validation/2 + schema_validation/2, + message_transformation/2 ]). -export([lookup_from_local_nodes/3]). @@ -73,7 +74,10 @@ paths() -> -if(?EMQX_RELEASE_EDITION == ee). paths_ee() -> - ["/prometheus/schema_validation"]. + [ + "/prometheus/schema_validation", + "/prometheus/message_transformation" + ]. %% ELSE if(?EMQX_RELEASE_EDITION == ee). -else. paths_ee() -> @@ -151,6 +155,19 @@ schema("/prometheus/schema_validation") -> responses => #{200 => prometheus_data_schema()} } + }; +schema("/prometheus/message_transformation") -> + #{ + 'operationId' => message_transformation, + get => + #{ + description => ?DESC(get_prom_message_transformation), + tags => ?TAGS, + parameters => [ref(mode)], + security => security(), + responses => + #{200 => prometheus_data_schema()} + } }. security() -> @@ -226,6 +243,9 @@ data_integration(get, #{headers := Headers, query_string := Qs}) -> schema_validation(get, #{headers := Headers, query_string := Qs}) -> collect(emqx_prometheus_schema_validation, collect_opts(Headers, Qs)). +message_transformation(get, #{headers := Headers, query_string := Qs}) -> + collect(emqx_prometheus_message_transformation, collect_opts(Headers, Qs)). + %%-------------------------------------------------------------------- %% Internal funcs %%-------------------------------------------------------------------- diff --git a/apps/emqx_prometheus/src/emqx_prometheus_message_transformation.erl b/apps/emqx_prometheus/src/emqx_prometheus_message_transformation.erl new file mode 100644 index 000000000..8b03a9e01 --- /dev/null +++ b/apps/emqx_prometheus/src/emqx_prometheus_message_transformation.erl @@ -0,0 +1,222 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- + +-module(emqx_prometheus_message_transformation). + +-if(?EMQX_RELEASE_EDITION == ee). +%% for bpapi +-behaviour(emqx_prometheus_cluster). + +%% Please don't remove this attribute, prometheus uses it to +%% automatically register collectors. +-behaviour(prometheus_collector). + +-include("emqx_prometheus.hrl"). +-include_lib("prometheus/include/prometheus.hrl"). + +-import( + prometheus_model_helpers, + [ + create_mf/5, + gauge_metrics/1, + counter_metrics/1 + ] +). + +-export([ + deregister_cleanup/1, + collect_mf/2, + collect_metrics/2 +]). + +%% `emqx_prometheus' API +-export([collect/1]). + +%% `emqx_prometheus_cluster' API +-export([ + fetch_from_local_node/1, + fetch_cluster_consistented_data/0, + aggre_or_zip_init_acc/0, + logic_sum_metrics/0 +]). + +%%-------------------------------------------------------------------- +%% Type definitions +%%-------------------------------------------------------------------- + +-define(MG(K, MAP), maps:get(K, MAP)). +-define(MG0(K, MAP), maps:get(K, MAP, 0)). + +-define(metrics_data_key, message_transformation_metrics_data). + +-define(key_enabled, emqx_message_transformation_enable). +-define(key_matched, emqx_message_transformation_matched). +-define(key_failed, emqx_message_transformation_failed). +-define(key_succeeded, emqx_message_transformation_succeeded). + +%%-------------------------------------------------------------------- +%% `emqx_prometheus_cluster' API +%%-------------------------------------------------------------------- + +fetch_from_local_node(Mode) -> + Validations = emqx_message_transformation:list(), + {node(), #{ + ?metrics_data_key => to_validation_data(Mode, Validations) + }}. + +fetch_cluster_consistented_data() -> + #{}. + +aggre_or_zip_init_acc() -> + #{ + ?metrics_data_key => maps:from_keys(message_transformation_metric(names), []) + }. + +logic_sum_metrics() -> + [ + ?key_enabled + ]. + +%%-------------------------------------------------------------------- +%% Collector API +%%-------------------------------------------------------------------- + +%% @private +deregister_cleanup(_) -> ok. + +%% @private +-spec collect_mf(_Registry, Callback) -> ok when + _Registry :: prometheus_registry:registry(), + Callback :: prometheus_collector:collect_mf_callback(). +collect_mf(?PROMETHEUS_MESSAGE_TRANSFORMATION_REGISTRY, Callback) -> + RawData = emqx_prometheus_cluster:raw_data(?MODULE, ?GET_PROM_DATA_MODE()), + + %% Schema Validation Metrics + RuleMetricDs = ?MG(?metrics_data_key, RawData), + ok = add_collect_family(Callback, message_transformation_metrics_meta(), RuleMetricDs), + + ok; +collect_mf(_, _) -> + ok. + +%% @private +collect(<<"json">>) -> + RawData = emqx_prometheus_cluster:raw_data(?MODULE, ?GET_PROM_DATA_MODE()), + #{ + message_transformations => collect_json_data(?MG(?metrics_data_key, RawData)) + }; +collect(<<"prometheus">>) -> + prometheus_text_format:format(?PROMETHEUS_MESSAGE_TRANSFORMATION_REGISTRY). + +%%==================== +%% API Helpers + +add_collect_family(Callback, MetricWithType, Data) -> + _ = [add_collect_family(Name, Data, Callback, Type) || {Name, Type} <- MetricWithType], + ok. + +add_collect_family(Name, Data, Callback, Type) -> + %% TODO: help document from Name + Callback(create_mf(Name, _Help = <<"">>, Type, ?MODULE, Data)). + +collect_metrics(Name, Metrics) -> + collect_mv(Name, Metrics). + +%%-------------------------------------------------------------------- +%% Collector +%%-------------------------------------------------------------------- + +%%======================================== +%% Schema Validation Metrics +%%======================================== +collect_mv(K = ?key_enabled, Data) -> gauge_metrics(?MG(K, Data)); +collect_mv(K = ?key_matched, Data) -> counter_metrics(?MG(K, Data)); +collect_mv(K = ?key_failed, Data) -> counter_metrics(?MG(K, Data)); +collect_mv(K = ?key_succeeded, Data) -> counter_metrics(?MG(K, Data)). + +%%-------------------------------------------------------------------- +%% Internal functions +%%-------------------------------------------------------------------- + +%%======================================== +%% Schema Validation Metrics +%%======================================== + +message_transformation_metrics_meta() -> + [ + {?key_enabled, gauge}, + {?key_matched, counter}, + {?key_failed, counter}, + {?key_succeeded, counter} + ]. + +message_transformation_metric(names) -> + emqx_prometheus_cluster:metric_names(message_transformation_metrics_meta()). + +to_validation_data(Mode, Validations) -> + lists:foldl( + fun(#{name := Name} = Validation, Acc) -> + merge_acc_with_validations(Mode, Name, get_validation_metrics(Validation), Acc) + end, + maps:from_keys(message_transformation_metric(names), []), + Validations + ). + +merge_acc_with_validations(Mode, Id, ValidationMetrics, PointsAcc) -> + maps:fold( + fun(K, V, AccIn) -> + AccIn#{K => [validation_point(Mode, Id, V) | ?MG(K, AccIn)]} + end, + PointsAcc, + ValidationMetrics + ). + +validation_point(Mode, Name, V) -> + {with_node_label(Mode, [{validation_name, Name}]), V}. + +get_validation_metrics(#{name := Name, enable := Enabled} = _Rule) -> + #{counters := Counters} = emqx_message_transformation_registry:get_metrics(Name), + #{ + ?key_enabled => emqx_prometheus_cluster:boolean_to_number(Enabled), + ?key_matched => ?MG0('matched', Counters), + ?key_failed => ?MG0('failed', Counters), + ?key_succeeded => ?MG0('succeeded', Counters) + }. + +%%-------------------------------------------------------------------- +%% Collect functions +%%-------------------------------------------------------------------- + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% merge / zip formatting funcs for type `application/json` + +collect_json_data(Data) -> + emqx_prometheus_cluster:collect_json_data(Data, fun zip_json_message_transformation_metrics/3). + +zip_json_message_transformation_metrics(Key, Points, [] = _AccIn) -> + lists:foldl( + fun({Labels, Metric}, AccIn2) -> + LabelsKVMap = maps:from_list(Labels), + Point = LabelsKVMap#{Key => Metric}, + [Point | AccIn2] + end, + [], + Points + ); +zip_json_message_transformation_metrics(Key, Points, AllResultsAcc) -> + ThisKeyResult = lists:foldl(emqx_prometheus_cluster:point_to_map_fun(Key), [], Points), + lists:zipwith(fun maps:merge/2, AllResultsAcc, ThisKeyResult). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Helper funcs + +with_node_label(?PROM_DATA_MODE__NODE, Labels) -> + Labels; +with_node_label(?PROM_DATA_MODE__ALL_NODES_AGGREGATED, Labels) -> + Labels; +with_node_label(?PROM_DATA_MODE__ALL_NODES_UNAGGREGATED, Labels) -> + [{node, node()} | Labels]. + +%% END if(?EMQX_RELEASE_EDITION == ee). +-endif. diff --git a/apps/emqx_prometheus/test/emqx_prometheus_data_SUITE.erl b/apps/emqx_prometheus/test/emqx_prometheus_data_SUITE.erl index 115279852..ce6eeb71d 100644 --- a/apps/emqx_prometheus/test/emqx_prometheus_data_SUITE.erl +++ b/apps/emqx_prometheus/test/emqx_prometheus_data_SUITE.erl @@ -82,7 +82,8 @@ all() -> {group, '/prometheus/stats'}, {group, '/prometheus/auth'}, {group, '/prometheus/data_integration'}, - [{group, '/prometheus/schema_validation'} || emqx_release:edition() == ee] + [{group, '/prometheus/schema_validation'} || emqx_release:edition() == ee], + [{group, '/prometheus/message_transformation'} || emqx_release:edition() == ee] ]). groups() -> @@ -101,6 +102,7 @@ groups() -> {'/prometheus/auth', ModeGroups}, {'/prometheus/data_integration', ModeGroups}, {'/prometheus/schema_validation', ModeGroups}, + {'/prometheus/message_transformation', ModeGroups}, {?PROM_DATA_MODE__NODE, AcceptGroups}, {?PROM_DATA_MODE__ALL_NODES_AGGREGATED, AcceptGroups}, {?PROM_DATA_MODE__ALL_NODES_UNAGGREGATED, AcceptGroups}, @@ -136,6 +138,10 @@ init_per_suite(Config) -> {emqx_schema_validation, #{config => schema_validation_config()}} || emqx_release:edition() == ee ], + [ + {emqx_message_transformation, #{config => message_transformation_config()}} + || emqx_release:edition() == ee + ], {emqx_prometheus, emqx_prometheus_SUITE:legacy_conf_default()} ]), #{ @@ -168,6 +174,8 @@ init_per_group('/prometheus/data_integration', Config) -> [{module, emqx_prometheus_data_integration} | Config]; init_per_group('/prometheus/schema_validation', Config) -> [{module, emqx_prometheus_schema_validation} | Config]; +init_per_group('/prometheus/message_transformation', Config) -> + [{module, emqx_prometheus_message_transformation} | Config]; init_per_group(?PROM_DATA_MODE__NODE, Config) -> [{mode, ?PROM_DATA_MODE__NODE} | Config]; init_per_group(?PROM_DATA_MODE__ALL_NODES_AGGREGATED, Config) -> @@ -357,6 +365,8 @@ metric_meta(<<"emqx_action_", _Tail/binary>>) -> ?meta(1, 1, 2); metric_meta(<<"emqx_connector_", _Tail/binary>>) -> ?meta(1, 1, 2); %% `/prometheus/schema_validation` metric_meta(<<"emqx_schema_validation_", _Tail/binary>>) -> ?meta(1, 1, 2); +%% `/prometheus/message_transformation` +metric_meta(<<"emqx_message_transformation_", _Tail/binary>>) -> ?meta(1, 1, 2); %% normal emqx metrics metric_meta(<<"emqx_", _Tail/binary>>) -> ?meta(0, 0, 1); metric_meta(_) -> #{}. @@ -840,6 +850,23 @@ assert_json_data__schema_validations(Ms, _) -> Ms ). +assert_json_data__message_transformations(Ms, _) -> + lists:foreach( + fun(M) -> + ?assertMatch( + #{ + validation_name := _, + emqx_message_transformation_enable := _, + emqx_message_transformation_matched := _, + emqx_message_transformation_failed := _, + emqx_message_transformation_succeeded := _ + }, + M + ) + end, + Ms + ). + schema_validation_config() -> Validation = #{ <<"enable">> => true, @@ -860,5 +887,24 @@ schema_validation_config() -> } }. +message_transformation_config() -> + Transformation = #{ + <<"enable">> => true, + <<"name">> => <<"my_transformation">>, + <<"topics">> => [<<"t/#">>], + <<"failure_action">> => <<"drop">>, + <<"operations">> => [ + #{ + <<"key">> => <<"topic">>, + <<"value">> => <<"concat([topic, '/', payload.t])">> + } + ] + }, + #{ + <<"message_transformation">> => #{ + <<"transformations">> => [Transformation] + } + }. + stop_apps(Apps) -> lists:foreach(fun application:stop/1, Apps). diff --git a/apps/emqx_rule_engine/src/emqx_rule_events.erl b/apps/emqx_rule_engine/src/emqx_rule_events.erl index 4f0214a9d..482bf8c20 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_events.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_events.erl @@ -45,6 +45,7 @@ on_session_unsubscribed/4, on_message_publish/2, on_message_dropped/4, + on_message_transformation_failed/3, on_schema_validation_failed/3, on_message_delivered/3, on_message_acked/3, @@ -80,6 +81,7 @@ event_names() -> 'message.delivered', 'message.acked', 'message.dropped', + 'message.transformation_failed', 'schema.validation_failed', 'delivery.dropped' ]. @@ -96,6 +98,7 @@ event_topics_enum() -> '$events/message_delivered', '$events/message_acked', '$events/message_dropped', + '$events/message_transformation_failed', '$events/schema_validation_failed', '$events/delivery_dropped' % '$events/message_publish' % not possible to use in SELECT FROM @@ -237,6 +240,19 @@ on_message_dropped(Message, _, Reason, Conf) -> end, {ok, Message}. +on_message_transformation_failed(Message, TransformationContext, Conf) -> + case ignore_sys_message(Message) of + true -> + ok; + false -> + apply_event( + 'message.transformation_failed', + fun() -> eventmsg_transformation_failed(Message, TransformationContext) end, + Conf + ) + end, + {ok, Message}. + on_schema_validation_failed(Message, ValidationContext, Conf) -> case ignore_sys_message(Message) of true -> @@ -535,6 +551,38 @@ eventmsg_dropped( #{headers => Headers} ). +eventmsg_transformation_failed( + Message = #message{ + id = Id, + from = ClientId, + qos = QoS, + flags = Flags, + topic = Topic, + headers = Headers, + payload = Payload, + timestamp = Timestamp + }, + TransformationContext +) -> + #{name := TransformationName} = TransformationContext, + with_basic_columns( + 'message.transformation_failed', + #{ + id => emqx_guid:to_hexstr(Id), + transformation => TransformationName, + clientid => ClientId, + username => emqx_message:get_header(username, Message, undefined), + payload => Payload, + peerhost => ntoa(emqx_message:get_header(peerhost, Message, undefined)), + topic => Topic, + qos => QoS, + flags => Flags, + pub_props => printable_maps(emqx_message:get_header(properties, Message, #{})), + publish_received_at => Timestamp + }, + #{headers => Headers} + ). + eventmsg_validation_failed( Message = #message{ id = Id, @@ -737,9 +785,17 @@ event_info_schema_validation_failed() -> {<<"messages that do not pass configured validations">>, <<"未通过验证的消息"/utf8>>}, <<"SELECT * FROM \"$events/schema_validation_failed\" WHERE topic =~ 't/#'">> ). +event_info_message_transformation_failed() -> + event_info_common( + 'message.transformation_failed', + {<<"message transformation failed">>, <<"message 验证失败"/utf8>>}, + {<<"messages that do not pass configured transformation">>, <<"未通过验证的消息"/utf8>>}, + <<"SELECT * FROM \"$events/message_transformation_failed\" WHERE topic =~ 't/#'">> + ). ee_event_info() -> [ - event_info_schema_validation_failed() + event_info_schema_validation_failed(), + event_info_message_transformation_failed() ]. -else. %% END (?EMQX_RELEASE_EDITION == ee). @@ -933,6 +989,9 @@ test_columns(Event) -> -if(?EMQX_RELEASE_EDITION == ee). ee_test_columns('schema.validation_failed') -> [{<<"validation">>, <<"myvalidation">>}] ++ + test_columns('message.publish'); +ee_test_columns('message.transformation_failed') -> + [{<<"transformation">>, <<"mytransformation">>}] ++ test_columns('message.publish'). %% ELSE (?EMQX_RELEASE_EDITION == ee). -else. @@ -997,6 +1056,23 @@ columns_with_exam('schema.validation_failed') -> {<<"timestamp">>, erlang:system_time(millisecond)}, {<<"node">>, node()} ]; +columns_with_exam('message.transformation_failed') -> + [ + {<<"event">>, 'message.transformation_failed'}, + {<<"validation">>, <<"my_transformation">>}, + {<<"id">>, emqx_guid:to_hexstr(emqx_guid:gen())}, + {<<"clientid">>, <<"c_emqx">>}, + {<<"username">>, <<"u_emqx">>}, + {<<"payload">>, <<"{\"msg\": \"hello\"}">>}, + {<<"peerhost">>, <<"192.168.0.10">>}, + {<<"topic">>, <<"t/a">>}, + {<<"qos">>, 1}, + {<<"flags">>, #{}}, + {<<"publish_received_at">>, erlang:system_time(millisecond)}, + columns_example_props(pub_props), + {<<"timestamp">>, erlang:system_time(millisecond)}, + {<<"node">>, node()} + ]; columns_with_exam('delivery.dropped') -> [ {<<"event">>, 'delivery.dropped'}, @@ -1200,6 +1276,7 @@ hook_fun('session.unsubscribed') -> fun ?MODULE:on_session_unsubscribed/4; hook_fun('message.delivered') -> fun ?MODULE:on_message_delivered/3; hook_fun('message.acked') -> fun ?MODULE:on_message_acked/3; hook_fun('message.dropped') -> fun ?MODULE:on_message_dropped/4; +hook_fun('message.transformation_failed') -> fun ?MODULE:on_message_transformation_failed/3; hook_fun('schema.validation_failed') -> fun ?MODULE:on_schema_validation_failed/3; hook_fun('delivery.dropped') -> fun ?MODULE:on_delivery_dropped/4; hook_fun('message.publish') -> fun ?MODULE:on_message_publish/2; @@ -1231,6 +1308,7 @@ event_name(<<"$events/session_unsubscribed">>) -> 'session.unsubscribed'; event_name(<<"$events/message_delivered">>) -> 'message.delivered'; event_name(<<"$events/message_acked">>) -> 'message.acked'; event_name(<<"$events/message_dropped">>) -> 'message.dropped'; +event_name(<<"$events/message_transformation_failed">>) -> 'message.transformation_failed'; event_name(<<"$events/schema_validation_failed">>) -> 'schema.validation_failed'; event_name(<<"$events/delivery_dropped">>) -> 'delivery.dropped'; event_name(_) -> 'message.publish'. @@ -1246,6 +1324,7 @@ event_topic('session.unsubscribed') -> <<"$events/session_unsubscribed">>; event_topic('message.delivered') -> <<"$events/message_delivered">>; event_topic('message.acked') -> <<"$events/message_acked">>; event_topic('message.dropped') -> <<"$events/message_dropped">>; +event_topic('message.transformation_failed') -> <<"$events/message_transformation_failed">>; event_topic('schema.validation_failed') -> <<"$events/schema_validation_failed">>; event_topic('delivery.dropped') -> <<"$events/delivery_dropped">>; event_topic('message.publish') -> <<"$events/message_publish">>. diff --git a/apps/emqx_rule_engine/src/emqx_rule_runtime.erl b/apps/emqx_rule_engine/src/emqx_rule_runtime.erl index 003a2b5a3..21a42c283 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_runtime.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_runtime.erl @@ -27,7 +27,7 @@ inc_action_metrics/2 ]). -%% Internal exports used by schema validation +%% Internal exports used by schema validation and message transformation. -export([evaluate_select/3, clear_rule_payload/0]). -import( diff --git a/apps/emqx_schema_validation/README.md b/apps/emqx_schema_validation/README.md index 9882209ba..fb0809623 100644 --- a/apps/emqx_schema_validation/README.md +++ b/apps/emqx_schema_validation/README.md @@ -6,7 +6,7 @@ the message without further processing, or to disconnect the offending client as # Documentation -Refer to [Message +Refer to [Schema Validation](https://docs.emqx.com/en/enterprise/latest/data-integration/schema-validation.html) for more information about the semantics and checks available. diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation.app.src b/apps/emqx_schema_validation/src/emqx_schema_validation.app.src index a2bdf30cc..773e0fff0 100644 --- a/apps/emqx_schema_validation/src/emqx_schema_validation.app.src +++ b/apps/emqx_schema_validation/src/emqx_schema_validation.app.src @@ -1,6 +1,6 @@ {application, emqx_schema_validation, [ {description, "EMQX Schema Validation"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, [emqx_schema_validation_sup, emqx_schema_validation_registry]}, {mod, {emqx_schema_validation_app, []}}, {applications, [ diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation.erl b/apps/emqx_schema_validation/src/emqx_schema_validation.erl index 3ec0e019d..bc7dac5bc 100644 --- a/apps/emqx_schema_validation/src/emqx_schema_validation.erl +++ b/apps/emqx_schema_validation/src/emqx_schema_validation.erl @@ -144,7 +144,7 @@ delete(Name) -> -spec register_hooks() -> ok. register_hooks() -> - emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []}, ?HP_MSG_VALIDATION). + emqx_hooks:put('message.publish', {?MODULE, on_message_publish, []}, ?HP_SCHEMA_VALIDATION). -spec unregister_hooks() -> ok. unregister_hooks() -> diff --git a/apps/emqx_utils/src/emqx_variform.erl b/apps/emqx_utils/src/emqx_variform.erl index 97096559d..e92be06dd 100644 --- a/apps/emqx_utils/src/emqx_variform.erl +++ b/apps/emqx_utils/src/emqx_variform.erl @@ -31,6 +31,7 @@ -export([render/2, render/3]). -export([compile/1, decompile/1]). +-export([skip_stringification/1]). -export_type([compiled/0]). @@ -43,6 +44,7 @@ ). -define(IS_EMPTY(X), (X =:= <<>> orelse X =:= "" orelse X =:= undefined)). +-define(SKIP_STRINGIFICATION, {?MODULE, '__skip_stringification__'}). %% @doc Render a variform expression with bindings. %% A variform expression is a template string which supports variable substitution @@ -88,11 +90,14 @@ eval_as_string(Expr, Bindings, _Opts) -> {error, #{exception => C, reason => E, stack_trace => S}} end. -%% Force the expression to return binary string. +%% Force the expression to return binary string (in most cases). return_str(Str) when is_binary(Str) -> Str; return_str(Num) when is_integer(Num) -> integer_to_binary(Num); return_str(Num) when is_float(Num) -> float_to_binary(Num, [{decimals, 10}, compact]); return_str(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8); +%% For usage by other modules (e.g.: message transformation) +return_str({?SKIP_STRINGIFICATION, X}) -> + X; return_str(Other) -> throw(#{ reason => bad_return, @@ -100,6 +105,9 @@ return_str(Other) -> got => Other }). +skip_stringification(X) -> + {?SKIP_STRINGIFICATION, X}. + %% @doc Compile varifom expression. -spec compile(string() | binary() | compiled()) -> {ok, compiled()} | {error, any()}. compile(#{form := _} = Compiled) -> @@ -313,7 +321,7 @@ assert_module_allowed(Mod) -> ok; false -> throw(#{ - reason => unallowed_veriform_module, + reason => unallowed_variform_module, module => Mod }) end. diff --git a/apps/emqx_utils/src/emqx_variform_parser.yrl b/apps/emqx_utils/src/emqx_variform_parser.yrl index 45d92696b..c4d5265c6 100644 --- a/apps/emqx_utils/src/emqx_variform_parser.yrl +++ b/apps/emqx_utils/src/emqx_variform_parser.yrl @@ -22,6 +22,7 @@ Rootsymbol expr -> call_or_var : '$1'. %% Function call or variable +call_or_var -> identifier '(' ')' : {call, element(3, '$1'), []}. call_or_var -> identifier '(' args ')' : {call, element(3, '$1'), '$3'}. call_or_var -> identifier : {var, element(3, '$1')}. diff --git a/apps/emqx_utils/test/emqx_variform_tests.erl b/apps/emqx_utils/test/emqx_variform_tests.erl index 2e3c6c4d5..4437280ea 100644 --- a/apps/emqx_utils/test/emqx_variform_tests.erl +++ b/apps/emqx_utils/test/emqx_variform_tests.erl @@ -126,7 +126,7 @@ inject_allowed_module_test() -> render(atom_to_list(?MODULE) ++ ".concat('a','b')", #{}) ), ?assertMatch( - {error, #{reason := unallowed_veriform_module, module := emqx}}, + {error, #{reason := unallowed_variform_module, module := emqx}}, render("emqx.concat('a','b')", #{}) ) after @@ -231,8 +231,12 @@ syntax_error_test_() -> {"const string single quote", fun() -> ?assertMatch(?SYNTAX_ERROR, render("'a'", #{})) end}, {"const string double quote", fun() -> ?assertMatch(?SYNTAX_ERROR, render(<<"\"a\"">>, #{})) - end}, - {"no arity", fun() -> ?assertMatch(?SYNTAX_ERROR, render("concat()", #{})) end} + end} + ]. + +maps_test_() -> + [ + {"arity zero", ?_assertEqual({ok, <<"0">>}, render(<<"maps.size(maps.new())">>, #{}))} ]. render(Expression, Bindings) -> diff --git a/mix.exs b/mix.exs index 5432b64ae..a488e5fce 100644 --- a/mix.exs +++ b/mix.exs @@ -190,6 +190,7 @@ defmodule EMQXUmbrella.MixProject do :emqx_bridge_s3, :emqx_schema_registry, :emqx_schema_validation, + :emqx_message_transformation, :emqx_enterprise, :emqx_bridge_kinesis, :emqx_bridge_azure_event_hub, diff --git a/rebar.config.erl b/rebar.config.erl index 8320cc62a..fe01c35df 100644 --- a/rebar.config.erl +++ b/rebar.config.erl @@ -117,6 +117,7 @@ is_community_umbrella_app("apps/emqx_gateway_ocpp") -> false; is_community_umbrella_app("apps/emqx_gateway_jt808") -> false; is_community_umbrella_app("apps/emqx_bridge_syskeeper") -> false; is_community_umbrella_app("apps/emqx_schema_validation") -> false; +is_community_umbrella_app("apps/emqx_message_transformation") -> false; is_community_umbrella_app("apps/emqx_eviction_agent") -> false; is_community_umbrella_app("apps/emqx_node_rebalance") -> false; is_community_umbrella_app(_) -> true. diff --git a/rel/i18n/emqx_message_transformation_http_api.hocon b/rel/i18n/emqx_message_transformation_http_api.hocon new file mode 100644 index 000000000..038e3e8ca --- /dev/null +++ b/rel/i18n/emqx_message_transformation_http_api.hocon @@ -0,0 +1,36 @@ +emqx_message_transformation_http_api { + + list_transformations.desc: + """List transformations""" + + lookup_transformation.desc: + """Lookup a transformation""" + + update_transformation.desc: + """Update a transformation""" + + delete_transformation.desc: + """Delete a transformation""" + + append_transformation.desc: + """Append a new transformation to the list of transformations""" + + reorder_transformations.desc: + """Reorder of all transformations""" + + enable_disable_transformation.desc: + """Enable or disable a particular transformation""" + + get_transformation_metrics.desc: + """Get metrics for a particular transformation""" + + reset_transformation_metrics.desc: + """Reset metrics for a particular transformation""" + + param_path_name.desc: + """Transformation name""" + + param_path_enable.desc: + """Enable or disable transformation""" + +} diff --git a/rel/i18n/emqx_prometheus_api.hocon b/rel/i18n/emqx_prometheus_api.hocon index 819d346d8..3c30685ee 100644 --- a/rel/i18n/emqx_prometheus_api.hocon +++ b/rel/i18n/emqx_prometheus_api.hocon @@ -30,4 +30,9 @@ get_prom_schema_validation.desc: get_prom_schema_validation.label: """Prometheus Metrics for Schema Validation""" +get_prom_message_transformation.desc: +"""Get Prometheus Metrics for Message Validation""" +get_prom_message_transformation.label: +"""Prometheus Metrics for Message Validation""" + } From e63dcc84b071ced9ba477e7bf9bc3ca5418360a8 Mon Sep 17 00:00:00 2001 From: Kjell Winblad Date: Fri, 7 Jun 2024 12:20:42 +0200 Subject: [PATCH 07/33] fix: unused variable and better error message --- apps/emqx_postgresql/src/emqx_postgresql.erl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/apps/emqx_postgresql/src/emqx_postgresql.erl b/apps/emqx_postgresql/src/emqx_postgresql.erl index e0add780c..b8caf36af 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.erl +++ b/apps/emqx_postgresql/src/emqx_postgresql.erl @@ -654,7 +654,7 @@ prepare_sql_to_conn(Conn, Prepares) -> prepare_sql_to_conn(Conn, [], Statements, _Attempts) when is_pid(Conn) -> {ok, Statements}; -prepare_sql_to_conn(Conn, [{Key, _} | _Rest], _Statements, _MaxAttempts = 2) when is_pid(Conn) -> +prepare_sql_to_conn(Conn, [{_Key, _} | _Rest], _Statements, _MaxAttempts = 2) when is_pid(Conn) -> failed_to_remove_prev_prepared_statement_error(); prepare_sql_to_conn( Conn, [{Key, {SQL, _RowTemplate}} | Rest] = ToPrepare, Statements, Attempts @@ -711,8 +711,8 @@ prepare_sql_to_conn( failed_to_remove_prev_prepared_statement_error() -> Msg = - ("A previous prepared statement for the action already exists and " - "we are not able to close it. Please, try to disable and then enable " + ("A previous prepared statement for the action already exists " + "but cannot be closed. Please, try to disable and then enable " "the connector to resolve this issue."), {error, unicode:charactes_to_binary(Msg)}. From 28a3c77e888cb479615d31d9bb8c90bab21598c9 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Fri, 7 Jun 2024 11:07:12 -0300 Subject: [PATCH 08/33] chore: remove redundant health check call Since `emqx_resource_manager:channel_health_check` already does a base resource health check, the call was repeated. --- apps/emqx_resource/src/emqx_resource_manager.erl | 1 - 1 file changed, 1 deletion(-) diff --git a/apps/emqx_resource/src/emqx_resource_manager.erl b/apps/emqx_resource/src/emqx_resource_manager.erl index 4763094d0..816c38301 100644 --- a/apps/emqx_resource/src/emqx_resource_manager.erl +++ b/apps/emqx_resource/src/emqx_resource_manager.erl @@ -381,7 +381,6 @@ channel_health_check(ResId, ChannelId) -> add_channel(ResId, ChannelId, Config) -> Result = safe_call(ResId, {add_channel, ChannelId, Config}, ?T_OPERATION), %% Wait for health_check to finish - _ = health_check(ResId), _ = channel_health_check(ResId, ChannelId), Result. From c13631102eb6c63a75cdf5cf50fb8379f954d9d3 Mon Sep 17 00:00:00 2001 From: Kjell Winblad Date: Fri, 7 Jun 2024 19:40:18 +0200 Subject: [PATCH 09/33] fix(PgSQL connector): typo in function name --- apps/emqx_postgresql/src/emqx_postgresql.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apps/emqx_postgresql/src/emqx_postgresql.erl b/apps/emqx_postgresql/src/emqx_postgresql.erl index b8caf36af..6f5200fb0 100644 --- a/apps/emqx_postgresql/src/emqx_postgresql.erl +++ b/apps/emqx_postgresql/src/emqx_postgresql.erl @@ -714,7 +714,7 @@ failed_to_remove_prev_prepared_statement_error() -> ("A previous prepared statement for the action already exists " "but cannot be closed. Please, try to disable and then enable " "the connector to resolve this issue."), - {error, unicode:charactes_to_binary(Msg)}. + {error, unicode:characters_to_binary(Msg)}. to_bin(Bin) when is_binary(Bin) -> Bin; From 672173c7fd4ddeb05b5e4b2cb2b342c42c3ffdbb Mon Sep 17 00:00:00 2001 From: zmstone Date: Sun, 9 Jun 2024 10:06:15 +0200 Subject: [PATCH 10/33] chore: pin system_monitor 3.0.5 --- mix.exs | 2 +- rebar.config | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mix.exs b/mix.exs index 5432b64ae..ebee54a55 100644 --- a/mix.exs +++ b/mix.exs @@ -67,7 +67,7 @@ defmodule EMQXUmbrella.MixProject do github: "emqx/emqtt", tag: "1.10.1", override: true, system_env: maybe_no_quic_env()}, {:rulesql, github: "emqx/rulesql", tag: "0.2.1"}, {:observer_cli, "1.7.1"}, - {:system_monitor, github: "ieQu1/system_monitor", tag: "3.0.3"}, + {:system_monitor, github: "ieQu1/system_monitor", tag: "3.0.5"}, {:telemetry, "1.1.0"}, # in conflict by emqtt and hocon {:getopt, "1.0.2", override: true}, diff --git a/rebar.config b/rebar.config index b260b366e..ba7cc63dc 100644 --- a/rebar.config +++ b/rebar.config @@ -94,7 +94,7 @@ {rulesql, {git, "https://github.com/emqx/rulesql", {tag, "0.2.1"}}}, % NOTE: depends on recon 2.5.x {observer_cli, "1.7.1"}, - {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.3"}}}, + {system_monitor, {git, "https://github.com/ieQu1/system_monitor", {tag, "3.0.5"}}}, {getopt, "1.0.2"}, {snabbkaffe, {git, "https://github.com/kafka4beam/snabbkaffe.git", {tag, "1.0.10"}}}, {hocon, {git, "https://github.com/emqx/hocon.git", {tag, "0.42.2"}}}, From 4347f3de3ec4f440c5b305e5863125886a123cf7 Mon Sep 17 00:00:00 2001 From: zmstone Date: Mon, 10 Jun 2024 10:27:45 +0200 Subject: [PATCH 11/33] fix(bridge/mqtt): respect client ID prefix --- .../src/emqx_bridge_mqtt.app.src | 2 +- .../src/emqx_bridge_mqtt_connector.erl | 18 ++++++++++--- .../src/emqx_bridge_mqtt_lib.erl | 23 +++++++++++----- .../test/emqx_bridge_mqtt_SUITE.erl | 26 ++++++++++++++++++- 4 files changed, 57 insertions(+), 12 deletions(-) diff --git a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src index 1e582a81c..e66d97a07 100644 --- a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src +++ b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt.app.src @@ -1,7 +1,7 @@ %% -*- mode: erlang -*- {application, emqx_bridge_mqtt, [ {description, "EMQX MQTT Broker Bridge"}, - {vsn, "0.2.0"}, + {vsn, "0.2.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_connector.erl b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_connector.erl index f133bf334..92491991f 100644 --- a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_connector.erl +++ b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_connector.erl @@ -57,6 +57,7 @@ -define(HEALTH_CHECK_TIMEOUT, 1000). -define(INGRESS, "I"). -define(EGRESS, "E"). +-define(IS_NO_PREFIX(P), (P =:= undefined orelse P =:= <<>>)). %% =================================================================== %% When use this bridge as a data source, ?MODULE:on_message_received will be called @@ -441,9 +442,9 @@ ms_to_s(Ms) -> clientid(Name, _Conf = #{clientid_prefix := Prefix}) when is_binary(Prefix) andalso Prefix =/= <<>> -> - emqx_bridge_mqtt_lib:clientid_base([Prefix, $:, Name]); + {Prefix, emqx_bridge_mqtt_lib:clientid_base(Name)}; clientid(Name, _Conf) -> - emqx_bridge_mqtt_lib:clientid_base([Name]). + {undefined, emqx_bridge_mqtt_lib:clientid_base(Name)}. %% @doc Start an ingress bridge worker. -spec connect([option() | {ecpool_worker_id, pos_integer()}]) -> @@ -481,8 +482,17 @@ mk_client_opts( msg_handler => mk_client_event_handler(Name, TopicToHandlerIndex) }. -mk_clientid(WorkerId, ClientId) -> - emqx_bridge_mqtt_lib:bytes23([ClientId], WorkerId). +mk_clientid(WorkerId, {Prefix, ClientId}) when ?IS_NO_PREFIX(Prefix) -> + %% When there is no prefix, try to keep the client ID length within 23 bytes + emqx_bridge_mqtt_lib:bytes23(ClientId, WorkerId); +mk_clientid(WorkerId, {Prefix, ClientId}) when size(Prefix) < 20 -> + %% Try to respect client ID prefix when it's less than 20 bytes + %% meaning there is at least 3 bytes to randomize + %% Must add $: for backward compatibility + emqx_bridge_mqtt_lib:bytes23_with_prefix(Prefix, ClientId, WorkerId); +mk_clientid(WorkerId, {Prefix, ClientId}) -> + %% There is no other option but to use a long client ID + iolist_to_binary([Prefix, ClientId, $:, integer_to_binary(WorkerId)]). mk_client_event_handler(Name, TopicToHandlerIndex) -> #{ diff --git a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_lib.erl b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_lib.erl index 740775192..12f445cb1 100644 --- a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_lib.erl +++ b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_lib.erl @@ -16,7 +16,7 @@ -module(emqx_bridge_mqtt_lib). --export([clientid_base/1, bytes23/2]). +-export([clientid_base/1, bytes23/2, bytes23_with_prefix/3]). %% @doc Make the base ID of client IDs. %% A base ID is used to concatenate with pool worker ID to build a @@ -28,18 +28,29 @@ clientid_base(Name) -> bin([Name, shortener(atom_to_list(node()), 8)]). %% @doc Limit the number of bytes for client ID under 23 bytes. -%% If Prefix and suffix concatenated is longer than 23 bytes +%% If ClientID base and suffix concatenated is longer than 23 bytes %% it hashes the concatenation and replace the non-random suffix. -bytes23(Prefix, SeqNo) -> +bytes23(ClientId, SeqNo) -> + bytes_n(ClientId, SeqNo, 23). + +bytes_n(ClientId, SeqNo, N) -> Suffix = integer_to_binary(SeqNo), - Concat = bin([Prefix, $:, Suffix]), - case size(Concat) =< 23 of + Concat = bin([ClientId, $:, Suffix]), + case size(Concat) =< N of true -> Concat; false -> - shortener(Concat, 23) + shortener(Concat, N) end. +%% @doc Limit the number of bytes for client ID under 23 bytes. +%% If Prefix, ClientID base and suffix concatenated is longer than 23 bytes +%% it hashes the ClientID and SeqNo before appended to the Prefix +bytes23_with_prefix(Prefix, ClientId, SeqNo) when Prefix =/= <<>> -> + SuffixLen = 23 - size(Prefix), + true = (SuffixLen > 0), + bin([Prefix, bytes_n(ClientId, SeqNo, SuffixLen)]). + %% @private SHA hash a string and return the prefix of %% the given length as hex string in binary format. shortener(Str, Length) when is_list(Str) -> diff --git a/apps/emqx_bridge_mqtt/test/emqx_bridge_mqtt_SUITE.erl b/apps/emqx_bridge_mqtt/test/emqx_bridge_mqtt_SUITE.erl index d784a5acb..5d4c82ca6 100644 --- a/apps/emqx_bridge_mqtt/test/emqx_bridge_mqtt_SUITE.erl +++ b/apps/emqx_bridge_mqtt/test/emqx_bridge_mqtt_SUITE.erl @@ -568,6 +568,7 @@ t_egress_short_clientid(_Config) -> Name = <<"abc01234">>, BaseId = emqx_bridge_mqtt_lib:clientid_base([Name]), ExpectedClientId = iolist_to_binary([BaseId, $:, "1"]), + ?assertMatch(<<"abc01234", _/binary>>, ExpectedClientId), test_egress_clientid(Name, ExpectedClientId). t_egress_long_clientid(_Config) -> @@ -578,11 +579,34 @@ t_egress_long_clientid(_Config) -> ExpectedClientId = emqx_bridge_mqtt_lib:bytes23(BaseId, 1), test_egress_clientid(Name, ExpectedClientId). +t_egress_with_short_prefix(_Config) -> + %% Expect the actual client ID in use is hashed from + %% head(sha1(:), 16) + Prefix = <<"012-">>, + Name = <<"345">>, + BaseId = emqx_bridge_mqtt_lib:clientid_base([Name]), + ExpectedClientId = emqx_bridge_mqtt_lib:bytes23_with_prefix(Prefix, BaseId, 1), + ?assertMatch(<<"012-", _/binary>>, ExpectedClientId), + test_egress_clientid(Name, Prefix, ExpectedClientId). + +t_egress_with_long_prefix(_Config) -> + %% Expect the actual client ID in use is hashed from + %% : + Prefix = <<"0123456789abcdef01234-">>, + Name = <<"345">>, + BaseId = emqx_bridge_mqtt_lib:clientid_base([Name]), + ExpectedClientId = iolist_to_binary([Prefix, BaseId, <<":1">>]), + test_egress_clientid(Name, Prefix, ExpectedClientId). + test_egress_clientid(Name, ExpectedClientId) -> + test_egress_clientid(Name, <<>>, ExpectedClientId). + +test_egress_clientid(Name, ClientIdPrefix, ExpectedClientId) -> BridgeIDEgress = create_bridge( ?SERVER_CONF#{ <<"name">> => Name, - <<"egress">> => (?EGRESS_CONF)#{<<"pool_size">> => 1} + <<"egress">> => (?EGRESS_CONF)#{<<"pool_size">> => 1}, + <<"clientid_prefix">> => ClientIdPrefix } ), LocalTopic = <>, From 7b2b2b527c66a8624aa701f6ab3395891cffe04b Mon Sep 17 00:00:00 2001 From: zmstone Date: Mon, 10 Jun 2024 10:48:04 +0200 Subject: [PATCH 12/33] docs: add changelog for pr #13216 --- changes/ce/fix-13216.en.md | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 changes/ce/fix-13216.en.md diff --git a/changes/ce/fix-13216.en.md b/changes/ce/fix-13216.en.md new file mode 100644 index 000000000..2fe85c6b2 --- /dev/null +++ b/changes/ce/fix-13216.en.md @@ -0,0 +1,10 @@ +Respcet `clientid_prefix` config for MQTT bridges. + +As of version 5.4.1, EMQX limits MQTT Client ID lengths to 23 bytes. +Previously, the system included the `clientid_prefix` in the hash calculation of the original, excessively long Client ID, thereby impacting the resulting shortened ID. + +Change Details: +- Without Prefix: Behavior remains unchanged; EMQX will hash the entire Client ID into a 23-byte space (when longer than 23 bytes). +- With Prefix: + - Prefix no more than 19 bytes: The prefix is preserved, and the remaining suffix is hashed into a 4-byte space. + - Prefix is 20 or more bytes: EMQX no longer attempts to shorten the Client ID, respecting the configured prefix in its entirety. From a885f0b41af862bf3bebd4ec8828c60934271612 Mon Sep 17 00:00:00 2001 From: Kjell Winblad Date: Mon, 10 Jun 2024 13:45:27 +0200 Subject: [PATCH 13/33] test(emqx_bridge_pgsql_SUITE): call test janitor --- apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl | 1 + 1 file changed, 1 insertion(+) diff --git a/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl b/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl index c6eb99f83..e2f5ac868 100644 --- a/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl +++ b/apps/emqx_bridge_pgsql/test/emqx_bridge_pgsql_SUITE.erl @@ -135,6 +135,7 @@ end_per_testcase(_Testcase, Config) -> connect_and_clear_table(Config), ok = snabbkaffe:stop(), delete_bridge(Config), + emqx_common_test_helpers:call_janitor(), ok. %%------------------------------------------------------------------------------ From 7a09b0470d054b8f32e0bdc11aa1f52000fee9f2 Mon Sep 17 00:00:00 2001 From: zmstone Date: Mon, 10 Jun 2024 14:22:25 +0200 Subject: [PATCH 14/33] chore: cosmetic changes --- .../src/emqx_bridge_mqtt_connector.erl | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_connector.erl b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_connector.erl index 92491991f..d507d11b8 100644 --- a/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_connector.erl +++ b/apps/emqx_bridge_mqtt/src/emqx_bridge_mqtt_connector.erl @@ -57,7 +57,9 @@ -define(HEALTH_CHECK_TIMEOUT, 1000). -define(INGRESS, "I"). -define(EGRESS, "E"). --define(IS_NO_PREFIX(P), (P =:= undefined orelse P =:= <<>>)). +-define(NO_PREFIX, <<>>). +-define(IS_NO_PREFIX(P), (P =:= undefined orelse P =:= ?NO_PREFIX)). +-define(MAX_PREFIX_BYTES, 19). %% =================================================================== %% When use this bridge as a data source, ?MODULE:on_message_received will be called @@ -444,7 +446,7 @@ clientid(Name, _Conf = #{clientid_prefix := Prefix}) when -> {Prefix, emqx_bridge_mqtt_lib:clientid_base(Name)}; clientid(Name, _Conf) -> - {undefined, emqx_bridge_mqtt_lib:clientid_base(Name)}. + {?NO_PREFIX, emqx_bridge_mqtt_lib:clientid_base(Name)}. %% @doc Start an ingress bridge worker. -spec connect([option() | {ecpool_worker_id, pos_integer()}]) -> @@ -485,10 +487,9 @@ mk_client_opts( mk_clientid(WorkerId, {Prefix, ClientId}) when ?IS_NO_PREFIX(Prefix) -> %% When there is no prefix, try to keep the client ID length within 23 bytes emqx_bridge_mqtt_lib:bytes23(ClientId, WorkerId); -mk_clientid(WorkerId, {Prefix, ClientId}) when size(Prefix) < 20 -> - %% Try to respect client ID prefix when it's less than 20 bytes - %% meaning there is at least 3 bytes to randomize - %% Must add $: for backward compatibility +mk_clientid(WorkerId, {Prefix, ClientId}) when size(Prefix) =< ?MAX_PREFIX_BYTES -> + %% Try to respect client ID prefix when it's no more than 19 bytes, + %% meaning there are at least 4 bytes as hash space. emqx_bridge_mqtt_lib:bytes23_with_prefix(Prefix, ClientId, WorkerId); mk_clientid(WorkerId, {Prefix, ClientId}) -> %% There is no other option but to use a long client ID From aa416cc0adccfcaea75a59358b33a4a8fe724f7a Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 09:53:02 -0300 Subject: [PATCH 15/33] docs: fix stale comments and dates --- apps/emqx_message_transformation/BSL.txt | 2 +- .../src/emqx_message_transformation.erl | 5 ++--- apps/emqx_schema_validation/BSL.txt | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/apps/emqx_message_transformation/BSL.txt b/apps/emqx_message_transformation/BSL.txt index 127b85777..023786aa5 100644 --- a/apps/emqx_message_transformation/BSL.txt +++ b/apps/emqx_message_transformation/BSL.txt @@ -2,7 +2,7 @@ Business Source License 1.1 Licensor: Hangzhou EMQ Technologies Co., Ltd. Licensed Work: EMQX Enterprise Edition - The Licensed Work is (c) 2023 + The Licensed Work is (c) 2024 Hangzhou EMQ Technologies Co., Ltd. Additional Use Grant: Students and educators are granted right to copy, modify, and create derivative work for research diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation.erl b/apps/emqx_message_transformation/src/emqx_message_transformation.erl index 4baa0b00d..d0c314553 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation.erl +++ b/apps/emqx_message_transformation/src/emqx_message_transformation.erl @@ -49,9 +49,9 @@ -define(TRANSFORMATIONS_CONF_PATH, [?CONF_ROOT, transformations]). -type transformation_name() :: binary(). -%% TODO +%% TODO: make more specific typespec -type transformation() :: #{atom() => term()}. -%% TODO +%% TODO: make more specific typespec -type variform() :: any(). -type operation() :: #{key := [binary(), ...], value := variform()}. -type qos() :: 0..2. @@ -174,7 +174,6 @@ on_message_publish(Message = #message{topic = Topic, headers = Headers}) -> Transformations -> case run_transformations(Transformations, Message) of #message{} = FinalMessage -> - %% FIXME: must ensure final payload is a binary!! Check this. emqx_metrics:inc('messages.transformation_succeeded'), {ok, FinalMessage}; drop -> diff --git a/apps/emqx_schema_validation/BSL.txt b/apps/emqx_schema_validation/BSL.txt index f0cd31c6f..c770ed9a2 100644 --- a/apps/emqx_schema_validation/BSL.txt +++ b/apps/emqx_schema_validation/BSL.txt @@ -2,7 +2,7 @@ Business Source License 1.1 Licensor: Hangzhou EMQ Technologies Co., Ltd. Licensed Work: EMQX Enterprise Edition - The Licensed Work is (c) 2023 + The Licensed Work is (c) 2024 Hangzhou EMQ Technologies Co., Ltd. Additional Use Grant: Students and educators are granted right to copy, modify, and create derivative work for research From 4aa1c3842f48a4043a13304527f75cd4b89185bb Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 09:53:29 -0300 Subject: [PATCH 16/33] fix: declare `emqx` as a dependency in `.app.src` --- .../src/emqx_message_transformation.app.src | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation.app.src b/apps/emqx_message_transformation/src/emqx_message_transformation.app.src index 2c54ed789..b8289c1f1 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation.app.src +++ b/apps/emqx_message_transformation/src/emqx_message_transformation.app.src @@ -5,7 +5,8 @@ {mod, {emqx_message_transformation_app, []}}, {applications, [ kernel, - stdlib + stdlib, + emqx ]}, {env, []}, {modules, []}, From d34558954d3dc4913ea1b4fa8506b818464ebef3 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 10:40:53 -0300 Subject: [PATCH 17/33] refactor: improve api and module organization --- .../src/emqx_message_transformation.erl | 349 +--------------- .../src/emqx_message_transformation_app.erl | 8 +- .../emqx_message_transformation_config.erl | 393 ++++++++++++++++++ 3 files changed, 408 insertions(+), 342 deletions(-) create mode 100644 apps/emqx_message_transformation/src/emqx_message_transformation_config.erl diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation.erl b/apps/emqx_message_transformation/src/emqx_message_transformation.erl index d0c314553..f39ddf22f 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation.erl +++ b/apps/emqx_message_transformation/src/emqx_message_transformation.erl @@ -10,12 +10,6 @@ %% API -export([ - add_handler/0, - remove_handler/0, - - load/0, - unload/0, - list/0, reorder/1, lookup/1, @@ -32,13 +26,6 @@ on_message_publish/1 ]). -%% `emqx_config_handler' API --export([pre_config_update/3, post_config_update/5]). - -%% `emqx_config_backup' API --behaviour(emqx_config_backup). --export([import_config/1]). - %%------------------------------------------------------------------------------ %% Type declarations %%------------------------------------------------------------------------------ @@ -72,84 +59,42 @@ } }. +-export_type([ + transformation/0, + transformation_name/0 +]). + %%------------------------------------------------------------------------------ %% API %%------------------------------------------------------------------------------ --spec add_handler() -> ok. -add_handler() -> - ok = emqx_config_handler:add_handler([?CONF_ROOT], ?MODULE), - ok = emqx_config_handler:add_handler(?TRANSFORMATIONS_CONF_PATH, ?MODULE), - ok. - --spec remove_handler() -> ok. -remove_handler() -> - ok = emqx_config_handler:remove_handler(?TRANSFORMATIONS_CONF_PATH), - ok = emqx_config_handler:remove_handler([?CONF_ROOT]), - ok. - -load() -> - Transformations = emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []), - lists:foreach( - fun({Pos, Transformation}) -> - ok = emqx_message_transformation_registry:insert(Pos, Transformation) - end, - lists:enumerate(Transformations) - ). - -unload() -> - Transformations = emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []), - lists:foreach( - fun(Transformation) -> - ok = emqx_message_transformation_registry:delete(Transformation) - end, - Transformations - ). - -spec list() -> [transformation()]. list() -> - emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []). + emqx_message_transformation_config:list(). -spec reorder([transformation_name()]) -> {ok, _} | {error, _}. reorder(Order) -> - emqx_conf:update( - ?TRANSFORMATIONS_CONF_PATH, - {reorder, Order}, - #{override_to => cluster} - ). + emqx_message_transformation_config:reorder(Order). -spec lookup(transformation_name()) -> {ok, transformation()} | {error, not_found}. lookup(Name) -> - Transformations = emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []), - do_lookup(Name, Transformations). + emqx_message_transformation_config:lookup(Name). -spec insert(transformation()) -> {ok, _} | {error, _}. insert(Transformation) -> - emqx_conf:update( - ?TRANSFORMATIONS_CONF_PATH, - {append, Transformation}, - #{override_to => cluster} - ). + emqx_message_transformation_config:insert(Transformation). -spec update(transformation()) -> {ok, _} | {error, _}. update(Transformation) -> - emqx_conf:update( - ?TRANSFORMATIONS_CONF_PATH, - {update, Transformation}, - #{override_to => cluster} - ). + emqx_message_transformation_config:update(Transformation). -spec delete(transformation_name()) -> {ok, _} | {error, _}. delete(Name) -> - emqx_conf:update( - ?TRANSFORMATIONS_CONF_PATH, - {delete, Name}, - #{override_to => cluster} - ). + emqx_message_transformation_config:delete(Name). %%------------------------------------------------------------------------------ %% Hooks @@ -190,120 +135,6 @@ on_message_publish(Message = #message{topic = Topic, headers = Headers}) -> end end. -%%------------------------------------------------------------------------------ -%% `emqx_config_handler' API -%%------------------------------------------------------------------------------ - -pre_config_update(?TRANSFORMATIONS_CONF_PATH, {append, Transformation}, OldTransformations) -> - Transformations = OldTransformations ++ [Transformation], - {ok, Transformations}; -pre_config_update(?TRANSFORMATIONS_CONF_PATH, {update, Transformation}, OldTransformations) -> - replace(OldTransformations, Transformation); -pre_config_update(?TRANSFORMATIONS_CONF_PATH, {delete, Transformation}, OldTransformations) -> - delete(OldTransformations, Transformation); -pre_config_update(?TRANSFORMATIONS_CONF_PATH, {reorder, Order}, OldTransformations) -> - reorder(OldTransformations, Order); -pre_config_update([?CONF_ROOT], {merge, NewConfig}, OldConfig) -> - #{resulting_config := Config} = prepare_config_merge(NewConfig, OldConfig), - {ok, Config}; -pre_config_update([?CONF_ROOT], {replace, NewConfig}, _OldConfig) -> - {ok, NewConfig}. - -post_config_update( - ?TRANSFORMATIONS_CONF_PATH, {append, #{<<"name">> := Name}}, New, _Old, _AppEnvs -) -> - {Pos, Transformation} = fetch_with_index(New, Name), - ok = emqx_message_transformation_registry:insert(Pos, Transformation), - ok; -post_config_update(?TRANSFORMATIONS_CONF_PATH, {update, #{<<"name">> := Name}}, New, Old, _AppEnvs) -> - {_Pos, OldTransformation} = fetch_with_index(Old, Name), - {Pos, NewTransformation} = fetch_with_index(New, Name), - ok = emqx_message_transformation_registry:update(OldTransformation, Pos, NewTransformation), - ok; -post_config_update(?TRANSFORMATIONS_CONF_PATH, {delete, Name}, _New, Old, _AppEnvs) -> - {_Pos, Transformation} = fetch_with_index(Old, Name), - ok = emqx_message_transformation_registry:delete(Transformation), - ok; -post_config_update(?TRANSFORMATIONS_CONF_PATH, {reorder, _Order}, New, _Old, _AppEnvs) -> - ok = emqx_message_transformation_registry:reindex_positions(New), - ok; -post_config_update([?CONF_ROOT], {merge, _}, ResultingConfig, Old, _AppEnvs) -> - #{transformations := ResultingTransformations} = ResultingConfig, - #{transformations := OldTransformations} = Old, - #{added := NewTransformations0} = - emqx_utils:diff_lists( - ResultingTransformations, - OldTransformations, - fun(#{name := N}) -> N end - ), - NewTransformations = - lists:map( - fun(#{name := Name}) -> - {Pos, Transformation} = fetch_with_index(ResultingTransformations, Name), - ok = emqx_message_transformation_registry:insert(Pos, Transformation), - #{name => Name, pos => Pos} - end, - NewTransformations0 - ), - {ok, #{new_transformations => NewTransformations}}; -post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnvs) -> - #{ - new_transformations := NewTransformations, - changed_transformations := ChangedTransformations0, - deleted_transformations := DeletedTransformations - } = prepare_config_replace(Input, Old), - #{transformations := ResultingTransformations} = ResultingConfig, - #{transformations := OldTransformations} = Old, - lists:foreach( - fun(Name) -> - {_Pos, Transformation} = fetch_with_index(OldTransformations, Name), - ok = emqx_message_transformation_registry:delete(Transformation) - end, - DeletedTransformations - ), - lists:foreach( - fun(Name) -> - {Pos, Transformation} = fetch_with_index(ResultingTransformations, Name), - ok = emqx_message_transformation_registry:insert(Pos, Transformation) - end, - NewTransformations - ), - ChangedTransformations = - lists:map( - fun(Name) -> - {_Pos, OldTransformation} = fetch_with_index(OldTransformations, Name), - {Pos, NewTransformation} = fetch_with_index(ResultingTransformations, Name), - ok = emqx_message_transformation_registry:update( - OldTransformation, Pos, NewTransformation - ), - #{name => Name, pos => Pos} - end, - ChangedTransformations0 - ), - ok = emqx_message_transformation_registry:reindex_positions(ResultingTransformations), - {ok, #{changed_transformations => ChangedTransformations}}. - -%%------------------------------------------------------------------------------ -%% `emqx_config_backup' API -%%------------------------------------------------------------------------------ - -import_config(#{?CONF_ROOT_BIN := RawConf0}) -> - Result = emqx_conf:update( - [?CONF_ROOT], - {merge, RawConf0}, - #{override_to => cluster, rawconf_with_defaults => true} - ), - case Result of - {error, Reason} -> - {error, #{root_key => ?CONF_ROOT, reason => Reason}}; - {ok, _} -> - Keys0 = maps:keys(RawConf0), - ChangedPaths = Keys0 -- [<<"transformations">>], - {ok, #{root_key => ?CONF_ROOT, changed => ChangedPaths}} - end; -import_config(_RawConf) -> - {ok, #{root_key => ?CONF_ROOT, changed => []}}. - %%------------------------------------------------------------------------------ %% Internal exports %%------------------------------------------------------------------------------ @@ -386,112 +217,6 @@ map_result(RetainBin, [<<"retain">>]) -> map_result(Rendered, _Key) -> {ok, Rendered}. -replace(OldTransformations, Transformation = #{<<"name">> := Name}) -> - {Found, RevNewTransformations} = - lists:foldl( - fun - (#{<<"name">> := NameIn}, {_FoundIn, Acc}) when NameIn =:= Name -> - {true, [Transformation | Acc]}; - (Val, {FoundIn, Acc}) -> - {FoundIn, [Val | Acc]} - end, - {false, []}, - OldTransformations - ), - case Found of - true -> - {ok, lists:reverse(RevNewTransformations)}; - false -> - {error, not_found} - end. - -delete(OldTransformations, Name) -> - {Found, RevNewTransformations} = - lists:foldl( - fun - (#{<<"name">> := NameIn}, {_FoundIn, Acc}) when NameIn =:= Name -> - {true, Acc}; - (Val, {FoundIn, Acc}) -> - {FoundIn, [Val | Acc]} - end, - {false, []}, - OldTransformations - ), - case Found of - true -> - {ok, lists:reverse(RevNewTransformations)}; - false -> - {error, not_found} - end. - -reorder(Transformations, Order) -> - Context = #{ - not_found => sets:new([{version, 2}]), - duplicated => sets:new([{version, 2}]), - res => [], - seen => sets:new([{version, 2}]) - }, - reorder(Transformations, Order, Context). - -reorder(NotReordered, _Order = [], #{not_found := NotFound0, duplicated := Duplicated0, res := Res}) -> - NotFound = sets:to_list(NotFound0), - Duplicated = sets:to_list(Duplicated0), - case {NotReordered, NotFound, Duplicated} of - {[], [], []} -> - {ok, lists:reverse(Res)}; - {_, _, _} -> - Error = #{ - not_found => NotFound, - duplicated => Duplicated, - not_reordered => [N || #{<<"name">> := N} <- NotReordered] - }, - {error, Error} - end; -reorder(RemainingTransformations, [Name | Rest], Context0 = #{seen := Seen0}) -> - case sets:is_element(Name, Seen0) of - true -> - Context = maps:update_with( - duplicated, fun(S) -> sets:add_element(Name, S) end, Context0 - ), - reorder(RemainingTransformations, Rest, Context); - false -> - case safe_take(Name, RemainingTransformations) of - error -> - Context = maps:update_with( - not_found, fun(S) -> sets:add_element(Name, S) end, Context0 - ), - reorder(RemainingTransformations, Rest, Context); - {ok, {Transformation, Front, Rear}} -> - Context1 = maps:update_with( - seen, fun(S) -> sets:add_element(Name, S) end, Context0 - ), - Context = maps:update_with(res, fun(Vs) -> [Transformation | Vs] end, Context1), - reorder(Front ++ Rear, Rest, Context) - end - end. - -fetch_with_index([{Pos, #{name := Name} = Transformation} | _Rest], Name) -> - {Pos, Transformation}; -fetch_with_index([{_, _} | Rest], Name) -> - fetch_with_index(Rest, Name); -fetch_with_index(Transformations, Name) -> - fetch_with_index(lists:enumerate(Transformations), Name). - -safe_take(Name, Transformations) -> - case lists:splitwith(fun(#{<<"name">> := N}) -> N =/= Name end, Transformations) of - {_Front, []} -> - error; - {Front, [Found | Rear]} -> - {ok, {Found, Front, Rear}} - end. - -do_lookup(_Name, _Transformations = []) -> - {error, not_found}; -do_lookup(Name, [#{name := Name} = Transformation | _Rest]) -> - {ok, Transformation}; -do_lookup(Name, [_ | Rest]) -> - do_lookup(Name, Rest). - run_transformations(Transformations, Message) -> Fun = fun(Transformation, MessageAcc) -> #{name := Name} = Transformation, @@ -765,55 +490,3 @@ is_payload_properly_encoded(#message{payload = Payload}) -> error:badarg -> false end. - -%% "Merging" in the context of the transformation array means: -%% * Existing transformations (identified by `name') are left untouched. -%% * No transformations are removed. -%% * New transformations are appended to the existing list. -%% * Existing transformations are not reordered. -prepare_config_merge(NewConfig0, OldConfig) -> - {ImportedRawTransformations, NewConfigNoTransformations} = - case maps:take(<<"transformations">>, NewConfig0) of - error -> - {[], NewConfig0}; - {V, R} -> - {V, R} - end, - OldRawTransformations = maps:get(<<"transformations">>, OldConfig, []), - #{added := NewRawTransformations} = emqx_utils:diff_lists( - ImportedRawTransformations, - OldRawTransformations, - fun(#{<<"name">> := N}) -> N end - ), - Config0 = emqx_utils_maps:deep_merge(OldConfig, NewConfigNoTransformations), - Config = maps:update_with( - <<"transformations">>, - fun(OldVs) -> OldVs ++ NewRawTransformations end, - NewRawTransformations, - Config0 - ), - #{ - new_transformations => NewRawTransformations, - resulting_config => Config - }. - -prepare_config_replace(NewConfig, OldConfig) -> - ImportedRawTransformations = maps:get(<<"transformations">>, NewConfig, []), - OldTransformations = maps:get(transformations, OldConfig, []), - %% Since, at this point, we have an input raw config but a parsed old config, we - %% project both to the to have only their names, and consider common names as changed. - #{ - added := NewTransformations, - removed := DeletedTransformations, - changed := ChangedTransformations0, - identical := ChangedTransformations1 - } = emqx_utils:diff_lists( - lists:map(fun(#{<<"name">> := N}) -> N end, ImportedRawTransformations), - lists:map(fun(#{name := N}) -> N end, OldTransformations), - fun(N) -> N end - ), - #{ - new_transformations => NewTransformations, - changed_transformations => ChangedTransformations0 ++ ChangedTransformations1, - deleted_transformations => DeletedTransformations - }. diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_app.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_app.erl index 1d44962a6..472d802d4 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation_app.erl +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_app.erl @@ -20,15 +20,15 @@ start(_Type, _Args) -> {ok, Sup} = emqx_message_transformation_sup:start_link(), ok = emqx_variform:inject_allowed_module(emqx_message_transformation_bif), - ok = emqx_message_transformation:add_handler(), + ok = emqx_message_transformation_config:add_handler(), ok = emqx_message_transformation:register_hooks(), - ok = emqx_message_transformation:load(), + ok = emqx_message_transformation_config:load(), {ok, Sup}. -spec stop(term()) -> ok. stop(_State) -> - ok = emqx_message_transformation:unload(), + ok = emqx_message_transformation_config:unload(), ok = emqx_message_transformation:unregister_hooks(), - ok = emqx_message_transformation:remove_handler(), + ok = emqx_message_transformation_config:remove_handler(), ok = emqx_variform:erase_allowed_module(emqx_message_transformation_bif), ok. diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_config.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_config.erl new file mode 100644 index 000000000..3ddd84741 --- /dev/null +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_config.erl @@ -0,0 +1,393 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_message_transformation_config). + +%% API +-export([ + add_handler/0, + remove_handler/0, + + load/0, + unload/0, + + list/0, + reorder/1, + lookup/1, + insert/1, + update/1, + delete/1 +]). + +%% `emqx_config_handler' API +-export([pre_config_update/3, post_config_update/5]). + +%% `emqx_config_backup' API +-behaviour(emqx_config_backup). +-export([import_config/1]). + +%%------------------------------------------------------------------------------ +%% Type declarations +%%------------------------------------------------------------------------------ + +-define(CONF_ROOT, message_transformation). +-define(CONF_ROOT_BIN, <<"message_transformation">>). +-define(TRANSFORMATIONS_CONF_PATH, [?CONF_ROOT, transformations]). + +-type transformation_name() :: emqx_message_transformation:transformation_name(). +-type transformation() :: emqx_message_transformation:transformation(). + +%%------------------------------------------------------------------------------ +%% API +%%------------------------------------------------------------------------------ + +-spec add_handler() -> ok. +add_handler() -> + ok = emqx_config_handler:add_handler([?CONF_ROOT], ?MODULE), + ok = emqx_config_handler:add_handler(?TRANSFORMATIONS_CONF_PATH, ?MODULE), + ok. + +-spec remove_handler() -> ok. +remove_handler() -> + ok = emqx_config_handler:remove_handler(?TRANSFORMATIONS_CONF_PATH), + ok = emqx_config_handler:remove_handler([?CONF_ROOT]), + ok. + +load() -> + Transformations = emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []), + lists:foreach( + fun({Pos, Transformation}) -> + ok = emqx_message_transformation_registry:insert(Pos, Transformation) + end, + lists:enumerate(Transformations) + ). + +unload() -> + Transformations = emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []), + lists:foreach( + fun(Transformation) -> + ok = emqx_message_transformation_registry:delete(Transformation) + end, + Transformations + ). + +-spec list() -> [transformation()]. +list() -> + emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []). + +-spec reorder([transformation_name()]) -> + {ok, _} | {error, _}. +reorder(Order) -> + emqx_conf:update( + ?TRANSFORMATIONS_CONF_PATH, + {reorder, Order}, + #{override_to => cluster} + ). + +-spec lookup(transformation_name()) -> {ok, transformation()} | {error, not_found}. +lookup(Name) -> + Transformations = emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []), + do_lookup(Name, Transformations). + +-spec insert(transformation()) -> + {ok, _} | {error, _}. +insert(Transformation) -> + emqx_conf:update( + ?TRANSFORMATIONS_CONF_PATH, + {append, Transformation}, + #{override_to => cluster} + ). + +-spec update(transformation()) -> + {ok, _} | {error, _}. +update(Transformation) -> + emqx_conf:update( + ?TRANSFORMATIONS_CONF_PATH, + {update, Transformation}, + #{override_to => cluster} + ). + +-spec delete(transformation_name()) -> + {ok, _} | {error, _}. +delete(Name) -> + emqx_conf:update( + ?TRANSFORMATIONS_CONF_PATH, + {delete, Name}, + #{override_to => cluster} + ). + +%%------------------------------------------------------------------------------ +%% `emqx_config_handler' API +%%------------------------------------------------------------------------------ + +pre_config_update(?TRANSFORMATIONS_CONF_PATH, {append, Transformation}, OldTransformations) -> + Transformations = OldTransformations ++ [Transformation], + {ok, Transformations}; +pre_config_update(?TRANSFORMATIONS_CONF_PATH, {update, Transformation}, OldTransformations) -> + replace(OldTransformations, Transformation); +pre_config_update(?TRANSFORMATIONS_CONF_PATH, {delete, Transformation}, OldTransformations) -> + delete(OldTransformations, Transformation); +pre_config_update(?TRANSFORMATIONS_CONF_PATH, {reorder, Order}, OldTransformations) -> + reorder(OldTransformations, Order); +pre_config_update([?CONF_ROOT], {merge, NewConfig}, OldConfig) -> + #{resulting_config := Config} = prepare_config_merge(NewConfig, OldConfig), + {ok, Config}; +pre_config_update([?CONF_ROOT], {replace, NewConfig}, _OldConfig) -> + {ok, NewConfig}. + +post_config_update( + ?TRANSFORMATIONS_CONF_PATH, {append, #{<<"name">> := Name}}, New, _Old, _AppEnvs +) -> + {Pos, Transformation} = fetch_with_index(New, Name), + ok = emqx_message_transformation_registry:insert(Pos, Transformation), + ok; +post_config_update(?TRANSFORMATIONS_CONF_PATH, {update, #{<<"name">> := Name}}, New, Old, _AppEnvs) -> + {_Pos, OldTransformation} = fetch_with_index(Old, Name), + {Pos, NewTransformation} = fetch_with_index(New, Name), + ok = emqx_message_transformation_registry:update(OldTransformation, Pos, NewTransformation), + ok; +post_config_update(?TRANSFORMATIONS_CONF_PATH, {delete, Name}, _New, Old, _AppEnvs) -> + {_Pos, Transformation} = fetch_with_index(Old, Name), + ok = emqx_message_transformation_registry:delete(Transformation), + ok; +post_config_update(?TRANSFORMATIONS_CONF_PATH, {reorder, _Order}, New, _Old, _AppEnvs) -> + ok = emqx_message_transformation_registry:reindex_positions(New), + ok; +post_config_update([?CONF_ROOT], {merge, _}, ResultingConfig, Old, _AppEnvs) -> + #{transformations := ResultingTransformations} = ResultingConfig, + #{transformations := OldTransformations} = Old, + #{added := NewTransformations0} = + emqx_utils:diff_lists( + ResultingTransformations, + OldTransformations, + fun(#{name := N}) -> N end + ), + NewTransformations = + lists:map( + fun(#{name := Name}) -> + {Pos, Transformation} = fetch_with_index(ResultingTransformations, Name), + ok = emqx_message_transformation_registry:insert(Pos, Transformation), + #{name => Name, pos => Pos} + end, + NewTransformations0 + ), + {ok, #{new_transformations => NewTransformations}}; +post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnvs) -> + #{ + new_transformations := NewTransformations, + changed_transformations := ChangedTransformations0, + deleted_transformations := DeletedTransformations + } = prepare_config_replace(Input, Old), + #{transformations := ResultingTransformations} = ResultingConfig, + #{transformations := OldTransformations} = Old, + lists:foreach( + fun(Name) -> + {_Pos, Transformation} = fetch_with_index(OldTransformations, Name), + ok = emqx_message_transformation_registry:delete(Transformation) + end, + DeletedTransformations + ), + lists:foreach( + fun(Name) -> + {Pos, Transformation} = fetch_with_index(ResultingTransformations, Name), + ok = emqx_message_transformation_registry:insert(Pos, Transformation) + end, + NewTransformations + ), + ChangedTransformations = + lists:map( + fun(Name) -> + {_Pos, OldTransformation} = fetch_with_index(OldTransformations, Name), + {Pos, NewTransformation} = fetch_with_index(ResultingTransformations, Name), + ok = emqx_message_transformation_registry:update( + OldTransformation, Pos, NewTransformation + ), + #{name => Name, pos => Pos} + end, + ChangedTransformations0 + ), + ok = emqx_message_transformation_registry:reindex_positions(ResultingTransformations), + {ok, #{changed_transformations => ChangedTransformations}}. + +%%------------------------------------------------------------------------------ +%% `emqx_config_backup' API +%%------------------------------------------------------------------------------ + +import_config(#{?CONF_ROOT_BIN := RawConf0}) -> + Result = emqx_conf:update( + [?CONF_ROOT], + {merge, RawConf0}, + #{override_to => cluster, rawconf_with_defaults => true} + ), + case Result of + {error, Reason} -> + {error, #{root_key => ?CONF_ROOT, reason => Reason}}; + {ok, _} -> + Keys0 = maps:keys(RawConf0), + ChangedPaths = Keys0 -- [<<"transformations">>], + {ok, #{root_key => ?CONF_ROOT, changed => ChangedPaths}} + end; +import_config(_RawConf) -> + {ok, #{root_key => ?CONF_ROOT, changed => []}}. + +%%------------------------------------------------------------------------------ +%% Internal fns +%%------------------------------------------------------------------------------ + +replace(OldTransformations, Transformation = #{<<"name">> := Name}) -> + {Found, RevNewTransformations} = + lists:foldl( + fun + (#{<<"name">> := NameIn}, {_FoundIn, Acc}) when NameIn =:= Name -> + {true, [Transformation | Acc]}; + (Val, {FoundIn, Acc}) -> + {FoundIn, [Val | Acc]} + end, + {false, []}, + OldTransformations + ), + case Found of + true -> + {ok, lists:reverse(RevNewTransformations)}; + false -> + {error, not_found} + end. + +delete(OldTransformations, Name) -> + {Found, RevNewTransformations} = + lists:foldl( + fun + (#{<<"name">> := NameIn}, {_FoundIn, Acc}) when NameIn =:= Name -> + {true, Acc}; + (Val, {FoundIn, Acc}) -> + {FoundIn, [Val | Acc]} + end, + {false, []}, + OldTransformations + ), + case Found of + true -> + {ok, lists:reverse(RevNewTransformations)}; + false -> + {error, not_found} + end. + +reorder(Transformations, Order) -> + Context = #{ + not_found => sets:new([{version, 2}]), + duplicated => sets:new([{version, 2}]), + res => [], + seen => sets:new([{version, 2}]) + }, + reorder(Transformations, Order, Context). + +reorder(NotReordered, _Order = [], #{not_found := NotFound0, duplicated := Duplicated0, res := Res}) -> + NotFound = sets:to_list(NotFound0), + Duplicated = sets:to_list(Duplicated0), + case {NotReordered, NotFound, Duplicated} of + {[], [], []} -> + {ok, lists:reverse(Res)}; + {_, _, _} -> + Error = #{ + not_found => NotFound, + duplicated => Duplicated, + not_reordered => [N || #{<<"name">> := N} <- NotReordered] + }, + {error, Error} + end; +reorder(RemainingTransformations, [Name | Rest], Context0 = #{seen := Seen0}) -> + case sets:is_element(Name, Seen0) of + true -> + Context = maps:update_with( + duplicated, fun(S) -> sets:add_element(Name, S) end, Context0 + ), + reorder(RemainingTransformations, Rest, Context); + false -> + case safe_take(Name, RemainingTransformations) of + error -> + Context = maps:update_with( + not_found, fun(S) -> sets:add_element(Name, S) end, Context0 + ), + reorder(RemainingTransformations, Rest, Context); + {ok, {Transformation, Front, Rear}} -> + Context1 = maps:update_with( + seen, fun(S) -> sets:add_element(Name, S) end, Context0 + ), + Context = maps:update_with(res, fun(Vs) -> [Transformation | Vs] end, Context1), + reorder(Front ++ Rear, Rest, Context) + end + end. + +fetch_with_index([{Pos, #{name := Name} = Transformation} | _Rest], Name) -> + {Pos, Transformation}; +fetch_with_index([{_, _} | Rest], Name) -> + fetch_with_index(Rest, Name); +fetch_with_index(Transformations, Name) -> + fetch_with_index(lists:enumerate(Transformations), Name). + +safe_take(Name, Transformations) -> + case lists:splitwith(fun(#{<<"name">> := N}) -> N =/= Name end, Transformations) of + {_Front, []} -> + error; + {Front, [Found | Rear]} -> + {ok, {Found, Front, Rear}} + end. + +do_lookup(_Name, _Transformations = []) -> + {error, not_found}; +do_lookup(Name, [#{name := Name} = Transformation | _Rest]) -> + {ok, Transformation}; +do_lookup(Name, [_ | Rest]) -> + do_lookup(Name, Rest). + +%% "Merging" in the context of the transformation array means: +%% * Existing transformations (identified by `name') are left untouched. +%% * No transformations are removed. +%% * New transformations are appended to the existing list. +%% * Existing transformations are not reordered. +prepare_config_merge(NewConfig0, OldConfig) -> + {ImportedRawTransformations, NewConfigNoTransformations} = + case maps:take(<<"transformations">>, NewConfig0) of + error -> + {[], NewConfig0}; + {V, R} -> + {V, R} + end, + OldRawTransformations = maps:get(<<"transformations">>, OldConfig, []), + #{added := NewRawTransformations} = emqx_utils:diff_lists( + ImportedRawTransformations, + OldRawTransformations, + fun(#{<<"name">> := N}) -> N end + ), + Config0 = emqx_utils_maps:deep_merge(OldConfig, NewConfigNoTransformations), + Config = maps:update_with( + <<"transformations">>, + fun(OldVs) -> OldVs ++ NewRawTransformations end, + NewRawTransformations, + Config0 + ), + #{ + new_transformations => NewRawTransformations, + resulting_config => Config + }. + +prepare_config_replace(NewConfig, OldConfig) -> + ImportedRawTransformations = maps:get(<<"transformations">>, NewConfig, []), + OldTransformations = maps:get(transformations, OldConfig, []), + %% Since, at this point, we have an input raw config but a parsed old config, we + %% project both to the to have only their names, and consider common names as changed. + #{ + added := NewTransformations, + removed := DeletedTransformations, + changed := ChangedTransformations0, + identical := ChangedTransformations1 + } = emqx_utils:diff_lists( + lists:map(fun(#{<<"name">> := N}) -> N end, ImportedRawTransformations), + lists:map(fun(#{name := N}) -> N end, OldTransformations), + fun(N) -> N end + ), + #{ + new_transformations => NewTransformations, + changed_transformations => ChangedTransformations0 ++ ChangedTransformations1, + deleted_transformations => DeletedTransformations + }. From e54cf2f2180a0bf93a954942cbd8299f507093f7 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 10:49:39 -0300 Subject: [PATCH 18/33] refactor: use option to skip rendering as string --- .../src/emqx_message_transformation.erl | 9 +++++++- .../src/emqx_message_transformation_bif.erl | 2 +- apps/emqx_utils/src/emqx_variform.erl | 21 +++++++++---------- 3 files changed, 19 insertions(+), 13 deletions(-) diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation.erl b/apps/emqx_message_transformation/src/emqx_message_transformation.erl index f39ddf22f..84af327f6 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation.erl +++ b/apps/emqx_message_transformation/src/emqx_message_transformation.erl @@ -160,7 +160,14 @@ eval_operation(Operation, Transformation, Context) -> -spec eval_variform([binary(), ...], _, eval_context()) -> {ok, rendered_value()} | {error, term()}. eval_variform(K, V, Context) -> - case emqx_variform:render(V, Context) of + Opts = + case K of + [<<"payload">> | _] -> + #{eval_as_string => false}; + _ -> + #{} + end, + case emqx_variform:render(V, Context, Opts) of {error, Reason} -> {error, Reason}; {ok, Rendered} -> diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_bif.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_bif.erl index 0eaf46578..96085c341 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation_bif.erl +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_bif.erl @@ -28,7 +28,7 @@ json_encode(X) -> json_decode(JSON) -> case emqx_utils_json:safe_decode(JSON, [return_maps]) of {ok, X} -> - emqx_variform:skip_stringification(X); + X; {error, Reason} -> throw(#{reason => json_decode_failure, detail => Reason}) end. diff --git a/apps/emqx_utils/src/emqx_variform.erl b/apps/emqx_utils/src/emqx_variform.erl index e92be06dd..7a0bc8118 100644 --- a/apps/emqx_utils/src/emqx_variform.erl +++ b/apps/emqx_utils/src/emqx_variform.erl @@ -31,7 +31,6 @@ -export([render/2, render/3]). -export([compile/1, decompile/1]). --export([skip_stringification/1]). -export_type([compiled/0]). @@ -44,7 +43,6 @@ ). -define(IS_EMPTY(X), (X =:= <<>> orelse X =:= "" orelse X =:= undefined)). --define(SKIP_STRINGIFICATION, {?MODULE, '__skip_stringification__'}). %% @doc Render a variform expression with bindings. %% A variform expression is a template string which supports variable substitution @@ -71,7 +69,7 @@ render(Expression, Bindings) -> render(Expression, Bindings, #{}). render(#{form := Form}, Bindings, Opts) -> - eval_as_string(Form, Bindings, Opts); + eval_render(Form, Bindings, Opts); render(Expression, Bindings, Opts) -> case compile(Expression) of {ok, Compiled} -> @@ -80,9 +78,16 @@ render(Expression, Bindings, Opts) -> {error, Reason} end. -eval_as_string(Expr, Bindings, _Opts) -> +eval_render(Expr, Bindings, Opts) -> + EvalAsStr = maps:get(eval_as_string, Opts, true), try - {ok, return_str(eval(Expr, Bindings, #{}))} + Result = eval(Expr, Bindings, #{}), + case EvalAsStr of + true -> + {ok, return_str(Result)}; + false -> + {ok, Result} + end catch throw:Reason -> {error, Reason}; @@ -95,9 +100,6 @@ return_str(Str) when is_binary(Str) -> Str; return_str(Num) when is_integer(Num) -> integer_to_binary(Num); return_str(Num) when is_float(Num) -> float_to_binary(Num, [{decimals, 10}, compact]); return_str(Atom) when is_atom(Atom) -> atom_to_binary(Atom, utf8); -%% For usage by other modules (e.g.: message transformation) -return_str({?SKIP_STRINGIFICATION, X}) -> - X; return_str(Other) -> throw(#{ reason => bad_return, @@ -105,9 +107,6 @@ return_str(Other) -> got => Other }). -skip_stringification(X) -> - {?SKIP_STRINGIFICATION, X}. - %% @doc Compile varifom expression. -spec compile(string() | binary() | compiled()) -> {ok, compiled()} | {error, any()}. compile(#{form := _} = Compiled) -> From 8c7a7cf9db8f65b519fdcceef8d7908adcb424e7 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 11:08:18 -0300 Subject: [PATCH 19/33] refactor: apply review remarks --- .../src/emqx_message_transformation.erl | 39 ++++++++++--------- .../emqx_message_transformation_registry.erl | 38 ++++++++++++------ 2 files changed, 48 insertions(+), 29 deletions(-) diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation.erl b/apps/emqx_message_transformation/src/emqx_message_transformation.erl index 84af327f6..0ffb9f606 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation.erl +++ b/apps/emqx_message_transformation/src/emqx_message_transformation.erl @@ -112,27 +112,12 @@ unregister_hooks() -> -spec on_message_publish(emqx_types:message()) -> {ok, emqx_types:message()} | {stop, emqx_types:message()}. -on_message_publish(Message = #message{topic = Topic, headers = Headers}) -> +on_message_publish(Message = #message{topic = Topic}) -> case emqx_message_transformation_registry:matching_transformations(Topic) of [] -> ok; Transformations -> - case run_transformations(Transformations, Message) of - #message{} = FinalMessage -> - emqx_metrics:inc('messages.transformation_succeeded'), - {ok, FinalMessage}; - drop -> - emqx_metrics:inc('messages.transformation_failed'), - {stop, Message#message{headers = Headers#{allow_publish => false}}}; - disconnect -> - emqx_metrics:inc('messages.transformation_failed'), - {stop, Message#message{ - headers = Headers#{ - allow_publish => false, - should_disconnect => true - } - }} - end + run_transformations(Transformations, Message) end. %%------------------------------------------------------------------------------ @@ -224,7 +209,25 @@ map_result(RetainBin, [<<"retain">>]) -> map_result(Rendered, _Key) -> {ok, Rendered}. -run_transformations(Transformations, Message) -> +run_transformations(Transformations, Message = #message{headers = Headers}) -> + case do_run_transformations(Transformations, Message) of + #message{} = FinalMessage -> + emqx_metrics:inc('messages.transformation_succeeded'), + {ok, FinalMessage}; + drop -> + emqx_metrics:inc('messages.transformation_failed'), + {stop, Message#message{headers = Headers#{allow_publish => false}}}; + disconnect -> + emqx_metrics:inc('messages.transformation_failed'), + {stop, Message#message{ + headers = Headers#{ + allow_publish => false, + should_disconnect => true + } + }} + end. + +do_run_transformations(Transformations, Message) -> Fun = fun(Transformation, MessageAcc) -> #{name := Name} = Transformation, emqx_message_transformation_registry:inc_matched(Name), diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl index dd692a55c..0e933f0e7 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl @@ -94,18 +94,34 @@ delete(Transformation) -> %% @doc Returns a list of matching transformation names, sorted by their configuration order. -spec matching_transformations(emqx_types:topic()) -> [transformation()]. matching_transformations(Topic) -> - Transformations0 = [ - {Pos, Transformation} - || M <- emqx_topic_index:matches(Topic, ?TRANSFORMATION_TOPIC_INDEX, [unique]), - [Pos] <- [emqx_topic_index:get_record(M, ?TRANSFORMATION_TOPIC_INDEX)], - {ok, Transformation} <- [ - lookup(emqx_topic_index:get_id(M)) - ] - ], - Transformations1 = lists:sort( - fun({Pos1, _V1}, {Pos2, _V2}) -> Pos1 =< Pos2 end, Transformations0 + Transformations0 = + lists:flatmap( + fun(M) -> + case emqx_topic_index:get_record(M, ?TRANSFORMATION_TOPIC_INDEX) of + [Pos] -> + [{Pos, emqx_topic_index:get_id(M)}]; + _ -> + [] + end + end, + emqx_topic_index:matches(Topic, ?TRANSFORMATION_TOPIC_INDEX, [unique]) + ), + Transformations1 = + lists:flatmap( + fun({Pos, Id}) -> + case lookup(Id) of + {ok, Transformation} -> + [{Pos, Transformation}]; + _ -> + [] + end + end, + Transformations0 + ), + Transformations2 = lists:sort( + fun({Pos1, _V1}, {Pos2, _V2}) -> Pos1 =< Pos2 end, Transformations1 ), - lists:map(fun({_Pos, V}) -> V end, Transformations1). + lists:map(fun({_Pos, V}) -> V end, Transformations2). -spec metrics_worker_spec() -> supervisor:child_spec(). metrics_worker_spec() -> From 5629fe60c12d0d7e94f3e95a6c05ee8b6a99ad55 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 12:12:29 -0300 Subject: [PATCH 20/33] refactor: index positions rather than names --- .../emqx_message_transformation_config.erl | 22 ++-- .../emqx_message_transformation_registry.erl | 107 +++++++++++------- 2 files changed, 75 insertions(+), 54 deletions(-) diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_config.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_config.erl index 3ddd84741..ea7f11fdf 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation_config.erl +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_config.erl @@ -65,10 +65,10 @@ load() -> unload() -> Transformations = emqx:get_config(?TRANSFORMATIONS_CONF_PATH, []), lists:foreach( - fun(Transformation) -> - ok = emqx_message_transformation_registry:delete(Transformation) + fun({Pos, Transformation}) -> + ok = emqx_message_transformation_registry:delete(Transformation, Pos) end, - Transformations + lists:enumerate(Transformations) ). -spec list() -> [transformation()]. @@ -147,11 +147,11 @@ post_config_update(?TRANSFORMATIONS_CONF_PATH, {update, #{<<"name">> := Name}}, ok = emqx_message_transformation_registry:update(OldTransformation, Pos, NewTransformation), ok; post_config_update(?TRANSFORMATIONS_CONF_PATH, {delete, Name}, _New, Old, _AppEnvs) -> - {_Pos, Transformation} = fetch_with_index(Old, Name), - ok = emqx_message_transformation_registry:delete(Transformation), + {Pos, Transformation} = fetch_with_index(Old, Name), + ok = emqx_message_transformation_registry:delete(Transformation, Pos), ok; -post_config_update(?TRANSFORMATIONS_CONF_PATH, {reorder, _Order}, New, _Old, _AppEnvs) -> - ok = emqx_message_transformation_registry:reindex_positions(New), +post_config_update(?TRANSFORMATIONS_CONF_PATH, {reorder, _Order}, New, Old, _AppEnvs) -> + ok = emqx_message_transformation_registry:reindex_positions(New, Old), ok; post_config_update([?CONF_ROOT], {merge, _}, ResultingConfig, Old, _AppEnvs) -> #{transformations := ResultingTransformations} = ResultingConfig, @@ -182,8 +182,8 @@ post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnv #{transformations := OldTransformations} = Old, lists:foreach( fun(Name) -> - {_Pos, Transformation} = fetch_with_index(OldTransformations, Name), - ok = emqx_message_transformation_registry:delete(Transformation) + {Pos, Transformation} = fetch_with_index(OldTransformations, Name), + ok = emqx_message_transformation_registry:delete(Transformation, Pos) end, DeletedTransformations ), @@ -206,7 +206,9 @@ post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnv end, ChangedTransformations0 ), - ok = emqx_message_transformation_registry:reindex_positions(ResultingTransformations), + ok = emqx_message_transformation_registry:reindex_positions( + ResultingTransformations, OldTransformations + ), {ok, #{changed_transformations => ChangedTransformations}}. %%------------------------------------------------------------------------------ diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl b/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl index 0e933f0e7..15e06ec7d 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl +++ b/apps/emqx_message_transformation/src/emqx_message_transformation_registry.erl @@ -10,8 +10,8 @@ lookup/1, insert/2, update/3, - delete/1, - reindex_positions/1, + delete/2, + reindex_positions/2, matching_transformations/1, @@ -52,10 +52,13 @@ -type transformation() :: #{atom() => term()}. -type position_index() :: pos_integer(). --record(reindex_positions, {transformations :: [transformation()]}). +-record(reindex_positions, { + new_transformations :: [transformation()], + old_transformations :: [transformation()] +}). -record(insert, {pos :: position_index(), transformation :: transformation()}). -record(update, {old :: transformation(), pos :: position_index(), new :: transformation()}). --record(delete, {transformation :: transformation()}). +-record(delete, {transformation :: transformation(), pos :: position_index()}). %%------------------------------------------------------------------------------ %% API @@ -75,9 +78,16 @@ lookup(Name) -> {ok, Transformation} end. --spec reindex_positions([transformation()]) -> ok. -reindex_positions(Transformations) -> - gen_server:call(?MODULE, #reindex_positions{transformations = Transformations}, infinity). +-spec reindex_positions([transformation()], [transformation()]) -> ok. +reindex_positions(NewTransformations, OldTransformations) -> + gen_server:call( + ?MODULE, + #reindex_positions{ + new_transformations = NewTransformations, + old_transformations = OldTransformations + }, + infinity + ). -spec insert(position_index(), transformation()) -> ok. insert(Pos, Transformation) -> @@ -87,9 +97,9 @@ insert(Pos, Transformation) -> update(Old, Pos, New) -> gen_server:call(?MODULE, #update{old = Old, pos = Pos, new = New}, infinity). --spec delete(transformation()) -> ok. -delete(Transformation) -> - gen_server:call(?MODULE, #delete{transformation = Transformation}, infinity). +-spec delete(transformation(), position_index()) -> ok. +delete(Transformation, Pos) -> + gen_server:call(?MODULE, #delete{transformation = Transformation, pos = Pos}, infinity). %% @doc Returns a list of matching transformation names, sorted by their configuration order. -spec matching_transformations(emqx_types:topic()) -> [transformation()]. @@ -98,30 +108,25 @@ matching_transformations(Topic) -> lists:flatmap( fun(M) -> case emqx_topic_index:get_record(M, ?TRANSFORMATION_TOPIC_INDEX) of - [Pos] -> - [{Pos, emqx_topic_index:get_id(M)}]; + [Name] -> + [Name]; _ -> [] end end, emqx_topic_index:matches(Topic, ?TRANSFORMATION_TOPIC_INDEX, [unique]) ), - Transformations1 = - lists:flatmap( - fun({Pos, Id}) -> - case lookup(Id) of - {ok, Transformation} -> - [{Pos, Transformation}]; - _ -> - [] - end - end, - Transformations0 - ), - Transformations2 = lists:sort( - fun({Pos1, _V1}, {Pos2, _V2}) -> Pos1 =< Pos2 end, Transformations1 - ), - lists:map(fun({_Pos, V}) -> V end, Transformations2). + lists:flatmap( + fun(Name) -> + case lookup(Name) of + {ok, Transformation} -> + [Transformation]; + _ -> + [] + end + end, + Transformations0 + ). -spec metrics_worker_spec() -> supervisor:child_spec(). metrics_worker_spec() -> @@ -152,8 +157,15 @@ init(_) -> State = #{}, {ok, State}. -handle_call(#reindex_positions{transformations = Transformations}, _From, State) -> - do_reindex_positions(Transformations), +handle_call( + #reindex_positions{ + new_transformations = NewTransformations, + old_transformations = OldTransformations + }, + _From, + State +) -> + do_reindex_positions(NewTransformations, OldTransformations), {reply, ok, State}; handle_call(#insert{pos = Pos, transformation = Transformation}, _From, State) -> do_insert(Pos, Transformation), @@ -161,8 +173,8 @@ handle_call(#insert{pos = Pos, transformation = Transformation}, _From, State) - handle_call(#update{old = OldTransformation, pos = Pos, new = NewTransformation}, _From, State) -> ok = do_update(OldTransformation, Pos, NewTransformation), {reply, ok, State}; -handle_call(#delete{transformation = Transformation}, _From, State) -> - do_delete(Transformation), +handle_call(#delete{transformation = Transformation, pos = Pos}, _From, State) -> + do_delete(Transformation, Pos), {reply, ok, State}; handle_call(_Call, _From, State) -> {reply, ignored, State}. @@ -181,7 +193,14 @@ create_tables() -> _ = emqx_utils_ets:new(?TRANSFORMATION_TAB, [public, ordered_set, {read_concurrency, true}]), ok. -do_reindex_positions(Transformations) -> +do_reindex_positions(NewTransformations, OldTransformations) -> + lists:foreach( + fun({Pos, Transformation}) -> + #{topics := Topics} = Transformation, + delete_topic_index(Pos, Topics) + end, + lists:enumerate(OldTransformations) + ), lists:foreach( fun({Pos, Transformation}) -> #{ @@ -189,9 +208,9 @@ do_reindex_positions(Transformations) -> topics := Topics } = Transformation, do_insert_into_tab(Name, Transformation, Pos), - update_topic_index(Name, Pos, Topics) + upsert_topic_index(Name, Pos, Topics) end, - lists:enumerate(Transformations) + lists:enumerate(NewTransformations) ). do_insert(Pos, Transformation) -> @@ -202,7 +221,7 @@ do_insert(Pos, Transformation) -> } = Transformation, maybe_create_metrics(Name), do_insert_into_tab(Name, Transformation, Pos), - Enabled andalso update_topic_index(Name, Pos, Topics), + Enabled andalso upsert_topic_index(Name, Pos, Topics), ok. do_update(OldTransformation, Pos, NewTransformation) -> @@ -214,17 +233,17 @@ do_update(OldTransformation, Pos, NewTransformation) -> } = NewTransformation, maybe_create_metrics(Name), do_insert_into_tab(Name, NewTransformation, Pos), - delete_topic_index(Name, OldTopics), - Enabled andalso update_topic_index(Name, Pos, NewTopics), + delete_topic_index(Pos, OldTopics), + Enabled andalso upsert_topic_index(Name, Pos, NewTopics), ok. -do_delete(Transformation) -> +do_delete(Transformation, Pos) -> #{ name := Name, topics := Topics } = Transformation, ets:delete(?TRANSFORMATION_TAB, Name), - delete_topic_index(Name, Topics), + delete_topic_index(Pos, Topics), drop_metrics(Name), ok. @@ -244,18 +263,18 @@ maybe_create_metrics(Name) -> drop_metrics(Name) -> ok = emqx_metrics_worker:clear_metrics(?METRIC_NAME, Name). -update_topic_index(Name, Pos, Topics) -> +upsert_topic_index(Name, Pos, Topics) -> lists:foreach( fun(Topic) -> - true = emqx_topic_index:insert(Topic, Name, Pos, ?TRANSFORMATION_TOPIC_INDEX) + true = emqx_topic_index:insert(Topic, Pos, Name, ?TRANSFORMATION_TOPIC_INDEX) end, Topics ). -delete_topic_index(Name, Topics) -> +delete_topic_index(Pos, Topics) -> lists:foreach( fun(Topic) -> - true = emqx_topic_index:delete(Topic, Name, ?TRANSFORMATION_TOPIC_INDEX) + true = emqx_topic_index:delete(Topic, Pos, ?TRANSFORMATION_TOPIC_INDEX) end, Topics ). From f6680ed0639147999afb85fb3eccbc557907cf5f Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 13:41:23 -0300 Subject: [PATCH 21/33] chore: bump OTP to 26.2.5-2 https://github.com/emqx/otp/pull/54 --- .../docker-compose-kafka.yaml | 2 +- .ci/docker-compose-file/docker-compose.yaml | 2 +- .github/workflows/_pr_entrypoint.yaml | 16 ++++++++-------- .github/workflows/_push-entrypoint.yaml | 16 ++++++++-------- .../workflows/build_and_push_docker_images.yaml | 4 ++-- .github/workflows/build_packages.yaml | 4 ++-- .github/workflows/build_packages_cron.yaml | 6 +++--- .github/workflows/build_slim_packages.yaml | 12 ++++++------ .github/workflows/codeql.yaml | 2 +- .github/workflows/performance_test.yaml | 2 +- .tool-versions | 2 +- Makefile | 2 +- build | 4 ++-- deploy/docker/Dockerfile | 2 +- scripts/buildx.sh | 4 ++-- scripts/pr-sanity-checks.sh | 4 ++-- scripts/relup-test/start-relup-test-cluster.sh | 2 +- 17 files changed, 43 insertions(+), 43 deletions(-) diff --git a/.ci/docker-compose-file/docker-compose-kafka.yaml b/.ci/docker-compose-file/docker-compose-kafka.yaml index 48bd85ac1..9a2ca988c 100644 --- a/.ci/docker-compose-file/docker-compose-kafka.yaml +++ b/.ci/docker-compose-file/docker-compose-kafka.yaml @@ -18,7 +18,7 @@ services: - /tmp/emqx-ci/emqx-shared-secret:/var/lib/secret kdc: hostname: kdc.emqx.net - image: ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-ubuntu22.04 + image: ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04 container_name: kdc.emqx.net expose: - 88 # kdc diff --git a/.ci/docker-compose-file/docker-compose.yaml b/.ci/docker-compose-file/docker-compose.yaml index 212ff78ed..2366f383a 100644 --- a/.ci/docker-compose-file/docker-compose.yaml +++ b/.ci/docker-compose-file/docker-compose.yaml @@ -3,7 +3,7 @@ version: '3.9' services: erlang: container_name: erlang - image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-ubuntu22.04} + image: ${DOCKER_CT_RUNNER_IMAGE:-ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04} env_file: - credentials.env - conf.env diff --git a/.github/workflows/_pr_entrypoint.yaml b/.github/workflows/_pr_entrypoint.yaml index 9f480d220..2994c1b03 100644 --- a/.github/workflows/_pr_entrypoint.yaml +++ b/.github/workflows/_pr_entrypoint.yaml @@ -17,16 +17,16 @@ env: jobs: sanity-checks: runs-on: ubuntu-22.04 - container: "ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-ubuntu22.04" + container: "ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04" outputs: ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} ct-host: ${{ steps.matrix.outputs.ct-host }} ct-docker: ${{ steps.matrix.outputs.ct-docker }} version-emqx: ${{ steps.matrix.outputs.version-emqx }} version-emqx-enterprise: ${{ steps.matrix.outputs.version-emqx-enterprise }} - builder: "ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-ubuntu22.04" - builder_vsn: "5.3-7" - otp_vsn: "26.2.5-1" + builder: "ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04" + builder_vsn: "5.3-8" + otp_vsn: "26.2.5-2" elixir_vsn: "1.15.7" permissions: @@ -96,13 +96,13 @@ jobs: MATRIX="$(echo "${APPS}" | jq -c ' [ (.[] | select(.profile == "emqx") | . + { - builder: "5.3-7", - otp: "26.2.5-1", + builder: "5.3-8", + otp: "26.2.5-2", elixir: "1.15.7" }), (.[] | select(.profile == "emqx-enterprise") | . + { - builder: "5.3-7", - otp: ["26.2.5-1"][], + builder: "5.3-8", + otp: ["26.2.5-2"][], elixir: "1.15.7" }) ] diff --git a/.github/workflows/_push-entrypoint.yaml b/.github/workflows/_push-entrypoint.yaml index 9c79eb42e..31ce9abd7 100644 --- a/.github/workflows/_push-entrypoint.yaml +++ b/.github/workflows/_push-entrypoint.yaml @@ -24,7 +24,7 @@ env: jobs: prepare: runs-on: ubuntu-22.04 - container: 'ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-ubuntu22.04' + container: 'ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04' outputs: profile: ${{ steps.parse-git-ref.outputs.profile }} release: ${{ steps.parse-git-ref.outputs.release }} @@ -32,9 +32,9 @@ jobs: ct-matrix: ${{ steps.matrix.outputs.ct-matrix }} ct-host: ${{ steps.matrix.outputs.ct-host }} ct-docker: ${{ steps.matrix.outputs.ct-docker }} - builder: 'ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-ubuntu22.04' - builder_vsn: '5.3-7' - otp_vsn: '26.2.5-1' + builder: 'ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04' + builder_vsn: '5.3-8' + otp_vsn: '26.2.5-2' elixir_vsn: '1.15.7' permissions: @@ -66,13 +66,13 @@ jobs: MATRIX="$(echo "${APPS}" | jq -c ' [ (.[] | select(.profile == "emqx") | . + { - builder: "5.3-7", - otp: "26.2.5-1", + builder: "5.3-8", + otp: "26.2.5-2", elixir: "1.15.7" }), (.[] | select(.profile == "emqx-enterprise") | . + { - builder: "5.3-7", - otp: ["26.2.5-1"][], + builder: "5.3-8", + otp: ["26.2.5-2"][], elixir: "1.15.7" }) ] diff --git a/.github/workflows/build_and_push_docker_images.yaml b/.github/workflows/build_and_push_docker_images.yaml index 6c49236d4..a43da1825 100644 --- a/.github/workflows/build_and_push_docker_images.yaml +++ b/.github/workflows/build_and_push_docker_images.yaml @@ -53,7 +53,7 @@ on: otp_vsn: required: false type: string - default: '26.2.5-1' + default: '26.2.5-2' elixir_vsn: required: false type: string @@ -61,7 +61,7 @@ on: builder_vsn: required: false type: string - default: '5.3-7' + default: '5.3-8' permissions: contents: read diff --git a/.github/workflows/build_packages.yaml b/.github/workflows/build_packages.yaml index d64416b9b..2fb323aa6 100644 --- a/.github/workflows/build_packages.yaml +++ b/.github/workflows/build_packages.yaml @@ -55,7 +55,7 @@ on: otp_vsn: required: false type: string - default: '26.2.5-1' + default: '26.2.5-2' elixir_vsn: required: false type: string @@ -63,7 +63,7 @@ on: builder_vsn: required: false type: string - default: '5.3-7' + default: '5.3-8' permissions: contents: read diff --git a/.github/workflows/build_packages_cron.yaml b/.github/workflows/build_packages_cron.yaml index c33ecc12a..a5617c5c0 100644 --- a/.github/workflows/build_packages_cron.yaml +++ b/.github/workflows/build_packages_cron.yaml @@ -23,8 +23,8 @@ jobs: fail-fast: false matrix: profile: - - ['emqx', 'master', '5.3-7:1.15.7-26.2.5-1'] - - ['emqx', 'release-57', '5.3-7:1.15.7-26.2.5-1'] + - ['emqx', 'master', '5.3-8:1.15.7-26.2.5-2'] + - ['emqx', 'release-57', '5.3-8:1.15.7-26.2.5-2'] os: - ubuntu22.04 - amzn2023 @@ -92,7 +92,7 @@ jobs: branch: - master otp: - - 26.2.5-1 + - 26.2.5-2 os: - macos-12-arm64 diff --git a/.github/workflows/build_slim_packages.yaml b/.github/workflows/build_slim_packages.yaml index cb7f53358..1faf49bbe 100644 --- a/.github/workflows/build_slim_packages.yaml +++ b/.github/workflows/build_slim_packages.yaml @@ -27,15 +27,15 @@ on: builder: required: false type: string - default: 'ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-ubuntu22.04' + default: 'ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04' builder_vsn: required: false type: string - default: '5.3-7' + default: '5.3-8' otp_vsn: required: false type: string - default: '26.2.5-1' + default: '26.2.5-2' elixir_vsn: required: false type: string @@ -54,9 +54,9 @@ jobs: fail-fast: false matrix: profile: - - ["emqx", "26.2.5-1", "ubuntu22.04", "elixir", "x64"] - - ["emqx", "26.2.5-1", "ubuntu22.04", "elixir", "arm64"] - - ["emqx-enterprise", "26.2.5-1", "ubuntu22.04", "erlang", "x64"] + - ["emqx", "26.2.5-2", "ubuntu22.04", "elixir", "x64"] + - ["emqx", "26.2.5-2", "ubuntu22.04", "elixir", "arm64"] + - ["emqx-enterprise", "26.2.5-2", "ubuntu22.04", "erlang", "x64"] container: "ghcr.io/emqx/emqx-builder/${{ inputs.builder_vsn }}:${{ inputs.elixir_vsn }}-${{ matrix.profile[1] }}-${{ matrix.profile[2] }}" diff --git a/.github/workflows/codeql.yaml b/.github/workflows/codeql.yaml index 774f0e344..128416f80 100644 --- a/.github/workflows/codeql.yaml +++ b/.github/workflows/codeql.yaml @@ -18,7 +18,7 @@ jobs: actions: read security-events: write container: - image: ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-ubuntu22.04 + image: ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04 strategy: fail-fast: false diff --git a/.github/workflows/performance_test.yaml b/.github/workflows/performance_test.yaml index 54645ceb7..c4e90b1ed 100644 --- a/.github/workflows/performance_test.yaml +++ b/.github/workflows/performance_test.yaml @@ -26,7 +26,7 @@ jobs: prepare: runs-on: ubuntu-latest if: github.repository_owner == 'emqx' - container: ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-ubuntu20.04 + container: ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu20.04 outputs: BENCH_ID: ${{ steps.prepare.outputs.BENCH_ID }} PACKAGE_FILE: ${{ steps.package_file.outputs.PACKAGE_FILE }} diff --git a/.tool-versions b/.tool-versions index b9c0e8deb..688b1e2da 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,2 +1,2 @@ -erlang 26.2.5-1 +erlang 26.2.5-2 elixir 1.15.7-otp-26 diff --git a/Makefile b/Makefile index 037a2b0df..b73c39a11 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,7 @@ REBAR = $(CURDIR)/rebar3 BUILD = $(CURDIR)/build SCRIPTS = $(CURDIR)/scripts export EMQX_RELUP ?= true -export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-debian12 +export EMQX_DEFAULT_BUILDER = ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-debian12 export EMQX_DEFAULT_RUNNER = public.ecr.aws/debian/debian:12-slim export EMQX_REL_FORM ?= tgz export QUICER_DOWNLOAD_FROM_RELEASE = 1 diff --git a/build b/build index bcea44a30..cfecd9eeb 100755 --- a/build +++ b/build @@ -397,9 +397,9 @@ function is_ecr_and_enterprise() { ## Build the default docker image based on debian 12. make_docker() { - local EMQX_BUILDER_VERSION="${EMQX_BUILDER_VERSION:-5.3-7}" + local EMQX_BUILDER_VERSION="${EMQX_BUILDER_VERSION:-5.3-8}" local EMQX_BUILDER_PLATFORM="${EMQX_BUILDER_PLATFORM:-debian12}" - local OTP_VSN="${OTP_VSN:-26.2.5-1}" + local OTP_VSN="${OTP_VSN:-26.2.5-2}" local ELIXIR_VSN="${ELIXIR_VSN:-1.15.7}" local EMQX_BUILDER=${EMQX_BUILDER:-ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VERSION}:${ELIXIR_VSN}-${OTP_VSN}-${EMQX_BUILDER_PLATFORM}} local EMQX_RUNNER="${EMQX_RUNNER:-${EMQX_DEFAULT_RUNNER}}" diff --git a/deploy/docker/Dockerfile b/deploy/docker/Dockerfile index 9eb9b2518..a81d3dbc2 100644 --- a/deploy/docker/Dockerfile +++ b/deploy/docker/Dockerfile @@ -1,4 +1,4 @@ -ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-debian12 +ARG BUILD_FROM=ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-debian12 ARG RUN_FROM=public.ecr.aws/debian/debian:12-slim ARG SOURCE_TYPE=src # tgz diff --git a/scripts/buildx.sh b/scripts/buildx.sh index c222127b3..1013a529a 100755 --- a/scripts/buildx.sh +++ b/scripts/buildx.sh @@ -9,7 +9,7 @@ ## example: ## ./scripts/buildx.sh --profile emqx --pkgtype tgz --arch arm64 \ -## --builder ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-debian12 +## --builder ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-debian12 set -euo pipefail @@ -24,7 +24,7 @@ help() { echo "--arch amd64|arm64: Target arch to build the EMQX package for" echo "--src_dir : EMQX source code in this dir, default to PWD" echo "--builder : Builder image to pull" - echo " E.g. ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-debian12" + echo " E.g. ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-debian12" } die() { diff --git a/scripts/pr-sanity-checks.sh b/scripts/pr-sanity-checks.sh index ad8fbaaa4..3d687e6f5 100755 --- a/scripts/pr-sanity-checks.sh +++ b/scripts/pr-sanity-checks.sh @@ -12,8 +12,8 @@ if ! type "yq" > /dev/null; then exit 1 fi -EMQX_BUILDER_VERSION=${EMQX_BUILDER_VERSION:-5.3-7} -OTP_VSN=${OTP_VSN:-26.2.5-1} +EMQX_BUILDER_VERSION=${EMQX_BUILDER_VERSION:-5.3-8} +OTP_VSN=${OTP_VSN:-26.2.5-2} ELIXIR_VSN=${ELIXIR_VSN:-1.15.7} EMQX_BUILDER_PLATFORM=${EMQX_BUILDER_PLATFORM:-ubuntu22.04} EMQX_BUILDER=${EMQX_BUILDER:-ghcr.io/emqx/emqx-builder/${EMQX_BUILDER_VERSION}:${ELIXIR_VSN}-${OTP_VSN}-${EMQX_BUILDER_PLATFORM}} diff --git a/scripts/relup-test/start-relup-test-cluster.sh b/scripts/relup-test/start-relup-test-cluster.sh index a972fa210..796ffdaa1 100755 --- a/scripts/relup-test/start-relup-test-cluster.sh +++ b/scripts/relup-test/start-relup-test-cluster.sh @@ -22,7 +22,7 @@ WEBHOOK="webhook.$NET" BENCH="bench.$NET" COOKIE='this-is-a-secret' ## Erlang image is needed to run webhook server and emqtt-bench -ERLANG_IMAGE="ghcr.io/emqx/emqx-builder/5.3-7:1.15.7-26.2.5-1-ubuntu22.04" +ERLANG_IMAGE="ghcr.io/emqx/emqx-builder/5.3-8:1.15.7-26.2.5-2-ubuntu22.04" # builder has emqtt-bench installed BENCH_IMAGE="$ERLANG_IMAGE" From 3c501e4f2a4f80c1fdf27ed86e136da4890cadeb Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 15:10:48 -0300 Subject: [PATCH 22/33] fix: remove another redundant health check Fixes https://emqx.atlassian.net/browse/EMQX-12521 --- apps/emqx_bridge/src/emqx_bridge_v2.erl | 8 +++-- .../src/emqx_resource_manager.erl | 34 +++++++++++++++++-- 2 files changed, 38 insertions(+), 4 deletions(-) diff --git a/apps/emqx_bridge/src/emqx_bridge_v2.erl b/apps/emqx_bridge/src/emqx_bridge_v2.erl index 79e8fc8f8..0b2e9277a 100644 --- a/apps/emqx_bridge/src/emqx_bridge_v2.erl +++ b/apps/emqx_bridge/src/emqx_bridge_v2.erl @@ -456,7 +456,7 @@ install_bridge_v2_helper( ConnectorId = emqx_connector_resource:resource_id( connector_type(BridgeV2Type), ConnectorName ), - emqx_resource_manager:add_channel( + _ = emqx_resource_manager:add_channel( ConnectorId, BridgeV2Id, augment_channel_config( @@ -786,7 +786,11 @@ create_dry_run_helper(ConfRootKey, BridgeV2Type, ConnectorRawConf, BridgeV2RawCo BridgeName, BridgeV2Conf ), - case emqx_resource_manager:add_channel(ConnectorId, ChannelTestId, AugmentedConf) of + %% We'll perform it ourselves to get the resulting status afterwards. + Opts = #{perform_health_check => false}, + case + emqx_resource_manager:add_channel(ConnectorId, ChannelTestId, AugmentedConf, Opts) + of {error, Reason} -> {error, Reason}; ok -> diff --git a/apps/emqx_resource/src/emqx_resource_manager.erl b/apps/emqx_resource/src/emqx_resource_manager.erl index 816c38301..c042054e3 100644 --- a/apps/emqx_resource/src/emqx_resource_manager.erl +++ b/apps/emqx_resource/src/emqx_resource_manager.erl @@ -14,6 +14,9 @@ %% limitations under the License. %%-------------------------------------------------------------------- -module(emqx_resource_manager). + +-feature(maybe_expr, enable). + -behaviour(gen_statem). -include("emqx_resource.hrl"). @@ -34,6 +37,7 @@ health_check/1, channel_health_check/2, add_channel/3, + add_channel/4, remove_channel/2, get_channels/1 ]). @@ -133,6 +137,12 @@ ST =:= ?status_connecting; ST =:= ?status_connected; ST =:= ?status_disconnected ). +-type add_channel_opts() :: #{ + %% Whether to immediately perform a health check after adding the channel. + %% Default: `true' + perform_health_check => boolean() +}. + %%------------------------------------------------------------------------------ %% API %%------------------------------------------------------------------------------ @@ -378,10 +388,30 @@ channel_health_check(ResId, ChannelId) -> _ = health_check(ResId), safe_call(ResId, {channel_health_check, ChannelId}, ?T_OPERATION). +-spec add_channel( + connector_resource_id(), + action_resource_id() | source_resource_id(), + _Config +) -> + ok | {error, term()}. add_channel(ResId, ChannelId, Config) -> + add_channel(ResId, ChannelId, Config, _Opts = #{}). + +-spec add_channel( + connector_resource_id(), + action_resource_id() | source_resource_id(), + _Config, + add_channel_opts() +) -> + ok | {error, term()}. +add_channel(ResId, ChannelId, Config, Opts) -> Result = safe_call(ResId, {add_channel, ChannelId, Config}, ?T_OPERATION), - %% Wait for health_check to finish - _ = channel_health_check(ResId, ChannelId), + maybe + true ?= maps:get(perform_health_check, Opts, true), + %% Wait for health_check to finish + _ = channel_health_check(ResId, ChannelId), + ok + end, Result. remove_channel(ResId, ChannelId) -> From 6e0ef893f4ec2ec45b6405893db0a7aebb5cafa0 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 14:38:16 -0300 Subject: [PATCH 23/33] feat: pass along client attributes down to message transformation context --- apps/emqx/src/emqx_channel.erl | 17 ++++---- .../src/emqx_message_transformation.erl | 2 + ..._message_transformation_http_api_SUITE.erl | 40 ++++++++++++++++++- 3 files changed, 51 insertions(+), 8 deletions(-) diff --git a/apps/emqx/src/emqx_channel.erl b/apps/emqx/src/emqx_channel.erl index eb54f6ba1..4b708e15a 100644 --- a/apps/emqx/src/emqx_channel.erl +++ b/apps/emqx/src/emqx_channel.erl @@ -685,20 +685,23 @@ process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), Channel) -> packet_to_message(Packet, #channel{ conninfo = #{proto_ver := ProtoVer}, - clientinfo = #{ - protocol := Protocol, - clientid := ClientId, - username := Username, - peerhost := PeerHost, - mountpoint := MountPoint - } + clientinfo = + #{ + protocol := Protocol, + clientid := ClientId, + username := Username, + peerhost := PeerHost, + mountpoint := MountPoint + } = ClientInfo }) -> + ClientAttrs = maps:get(client_attrs, ClientInfo, #{}), emqx_mountpoint:mount( MountPoint, emqx_packet:to_message( Packet, ClientId, #{ + client_attrs => ClientAttrs, proto_ver => ProtoVer, protocol => Protocol, username => Username, diff --git a/apps/emqx_message_transformation/src/emqx_message_transformation.erl b/apps/emqx_message_transformation/src/emqx_message_transformation.erl index 0ffb9f606..612a30f78 100644 --- a/apps/emqx_message_transformation/src/emqx_message_transformation.erl +++ b/apps/emqx_message_transformation/src/emqx_message_transformation.erl @@ -45,6 +45,7 @@ -type rendered_value() :: qos() | boolean() | binary(). -type eval_context() :: #{ + client_attrs := map(), payload := _, qos := _, retain := _, @@ -309,6 +310,7 @@ message_to_context(#message{} = Message, Payload, Transformation) -> end, #{ dirty => Dirty, + client_attrs => emqx_message:get_header(client_attrs, Message, #{}), payload => Payload, qos => Message#message.qos, retain => emqx_message:get_flag(retain, Message, false), diff --git a/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl b/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl index 60779911c..58efa69e0 100644 --- a/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl +++ b/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl @@ -250,7 +250,11 @@ connect(ClientId) -> connect(ClientId, _IsPersistent = false). connect(ClientId, IsPersistent) -> - Properties = emqx_utils_maps:put_if(#{}, 'Session-Expiry-Interval', 30, IsPersistent), + connect(ClientId, IsPersistent, _Opts = #{}). + +connect(ClientId, IsPersistent, Opts) -> + Properties0 = maps:get(properties, Opts, #{}), + Properties = emqx_utils_maps:put_if(Properties0, 'Session-Expiry-Interval', 30, IsPersistent), {ok, Client} = emqtt:start_link([ {clean_start, true}, {clientid, ClientId}, @@ -1441,3 +1445,37 @@ t_json_encode_decode_smoke_test(_Config) -> [] ), ok. + +%% Simple smoke test for client attributes support. +t_client_attrs(_Config) -> + {ok, Compiled} = emqx_variform:compile(<<"user_property.tenant">>), + ok = emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], [ + #{ + expression => Compiled, + set_as_attr => <<"tenant">> + } + ]), + on_exit(fun() -> ok = emqx_config:put_zone_conf(default, [mqtt, client_attrs_init], []) end), + ?check_trace( + begin + Name1 = <<"foo">>, + Operation1 = operation(topic, <<"concat([client_attrs.tenant, '/', topic])">>), + Transformation1 = transformation(Name1, [Operation1]), + {201, _} = insert(Transformation1), + + Tenant = <<"mytenant">>, + C = connect( + <<"c1">>, + _IsPersistent = false, + #{properties => #{'User-Property' => [{<<"tenant">>, Tenant}]}} + ), + {ok, _, [_]} = emqtt:subscribe(C, emqx_topic:join([Tenant, <<"#">>])), + + ok = publish(C, <<"t/1">>, #{x => 1, y => 2}), + ?assertReceive({publish, #{topic := <<"mytenant/t/1">>}}), + + ok + end, + [] + ), + ok. From d93c8540c85e4826488bce79fc27fcbcbb751554 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 14:48:24 -0300 Subject: [PATCH 24/33] feat: pass along peername down to transformation context --- apps/emqx/src/emqx_channel.erl | 6 +++++- ...mqx_message_transformation_http_api_SUITE.erl | 16 ++++++++++++++-- apps/emqx_rule_engine/src/emqx_rule_events.erl | 4 ++-- 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/apps/emqx/src/emqx_channel.erl b/apps/emqx/src/emqx_channel.erl index 4b708e15a..1306cf738 100644 --- a/apps/emqx/src/emqx_channel.erl +++ b/apps/emqx/src/emqx_channel.erl @@ -684,7 +684,10 @@ process_publish(Packet = ?PUBLISH_PACKET(QoS, Topic, PacketId), Channel) -> end. packet_to_message(Packet, #channel{ - conninfo = #{proto_ver := ProtoVer}, + conninfo = #{ + peername := PeerName, + proto_ver := ProtoVer + }, clientinfo = #{ protocol := Protocol, @@ -702,6 +705,7 @@ packet_to_message(Packet, #channel{ ClientId, #{ client_attrs => ClientAttrs, + peername => PeerName, proto_ver => ProtoVer, protocol => Protocol, username => Username, diff --git a/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl b/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl index 58efa69e0..b3b88ac69 100644 --- a/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl +++ b/apps/emqx_message_transformation/test/emqx_message_transformation_http_api_SUITE.erl @@ -1095,8 +1095,20 @@ t_multiple_transformations(_Config) -> ?assertMatch( [ - {_, #{data := #{transformation := Name1, event := 'message.transformation_failed'}}}, - {_, #{data := #{transformation := Name2, event := 'message.transformation_failed'}}} + {_, #{ + data := #{ + transformation := Name1, + event := 'message.transformation_failed', + peername := <<_/binary>> + } + }}, + {_, #{ + data := #{ + transformation := Name2, + event := 'message.transformation_failed', + peername := <<_/binary>> + } + }} ], get_traced_failures_from_rule_engine() ), diff --git a/apps/emqx_rule_engine/src/emqx_rule_events.erl b/apps/emqx_rule_engine/src/emqx_rule_events.erl index 482bf8c20..237b4e28a 100644 --- a/apps/emqx_rule_engine/src/emqx_rule_events.erl +++ b/apps/emqx_rule_engine/src/emqx_rule_events.erl @@ -573,7 +573,7 @@ eventmsg_transformation_failed( clientid => ClientId, username => emqx_message:get_header(username, Message, undefined), payload => Payload, - peerhost => ntoa(emqx_message:get_header(peerhost, Message, undefined)), + peername => ntoa(emqx_message:get_header(peername, Message, undefined)), topic => Topic, qos => QoS, flags => Flags, @@ -1064,7 +1064,7 @@ columns_with_exam('message.transformation_failed') -> {<<"clientid">>, <<"c_emqx">>}, {<<"username">>, <<"u_emqx">>}, {<<"payload">>, <<"{\"msg\": \"hello\"}">>}, - {<<"peerhost">>, <<"192.168.0.10">>}, + {<<"peername">>, <<"192.168.0.10:56431">>}, {<<"topic">>, <<"t/a">>}, {<<"qos">>, 1}, {<<"flags">>, #{}}, From 5eff4a75447d6c480ee4a817e7b3f0fcc650e90a Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 15:46:28 -0300 Subject: [PATCH 25/33] refactor(schema validation): improve api and module organization To conform to comment from another PR: https://github.com/emqx/emqx/pull/13199#discussion_r1632658742 ... since schema validation is the "inspiration" for message transformation. --- .../src/emqx_schema_validation.app.src | 2 +- .../src/emqx_schema_validation.erl | 348 +--------------- .../src/emqx_schema_validation_app.erl | 8 +- .../src/emqx_schema_validation_config.erl | 390 ++++++++++++++++++ 4 files changed, 406 insertions(+), 342 deletions(-) create mode 100644 apps/emqx_schema_validation/src/emqx_schema_validation_config.erl diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation.app.src b/apps/emqx_schema_validation/src/emqx_schema_validation.app.src index a2bdf30cc..773e0fff0 100644 --- a/apps/emqx_schema_validation/src/emqx_schema_validation.app.src +++ b/apps/emqx_schema_validation/src/emqx_schema_validation.app.src @@ -1,6 +1,6 @@ {application, emqx_schema_validation, [ {description, "EMQX Schema Validation"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, [emqx_schema_validation_sup, emqx_schema_validation_registry]}, {mod, {emqx_schema_validation_app, []}}, {applications, [ diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation.erl b/apps/emqx_schema_validation/src/emqx_schema_validation.erl index 3ec0e019d..70bebc5fc 100644 --- a/apps/emqx_schema_validation/src/emqx_schema_validation.erl +++ b/apps/emqx_schema_validation/src/emqx_schema_validation.erl @@ -10,12 +10,6 @@ %% API -export([ - add_handler/0, - remove_handler/0, - - load/0, - unload/0, - list/0, reorder/1, lookup/1, @@ -32,13 +26,6 @@ on_message_publish/1 ]). -%% `emqx_config_handler' API --export([pre_config_update/3, post_config_update/5]). - -%% `emqx_config_backup' API --behaviour(emqx_config_backup). --export([import_config/1]). - %% Internal exports -export([parse_sql_check/1]). @@ -52,91 +39,46 @@ %%------------------------------------------------------------------------------ -define(TRACE_TAG, "SCHEMA_VALIDATION"). --define(CONF_ROOT, schema_validation). --define(CONF_ROOT_BIN, <<"schema_validation">>). --define(VALIDATIONS_CONF_PATH, [?CONF_ROOT, validations]). -type validation_name() :: binary(). -type validation() :: _TODO. +-export_type([ + validation/0, + validation_name/0 +]). + %%------------------------------------------------------------------------------ %% API %%------------------------------------------------------------------------------ --spec add_handler() -> ok. -add_handler() -> - ok = emqx_config_handler:add_handler([?CONF_ROOT], ?MODULE), - ok = emqx_config_handler:add_handler(?VALIDATIONS_CONF_PATH, ?MODULE), - ok. - --spec remove_handler() -> ok. -remove_handler() -> - ok = emqx_config_handler:remove_handler(?VALIDATIONS_CONF_PATH), - ok = emqx_config_handler:remove_handler([?CONF_ROOT]), - ok. - -load() -> - Validations = emqx:get_config(?VALIDATIONS_CONF_PATH, []), - lists:foreach( - fun({Pos, Validation}) -> - ok = emqx_schema_validation_registry:insert(Pos, Validation) - end, - lists:enumerate(Validations) - ). - -unload() -> - Validations = emqx:get_config(?VALIDATIONS_CONF_PATH, []), - lists:foreach( - fun(Validation) -> - ok = emqx_schema_validation_registry:delete(Validation) - end, - Validations - ). - -spec list() -> [validation()]. list() -> - emqx:get_config(?VALIDATIONS_CONF_PATH, []). + emqx_schema_validation_config:list(). -spec reorder([validation_name()]) -> {ok, _} | {error, _}. reorder(Order) -> - emqx_conf:update( - ?VALIDATIONS_CONF_PATH, - {reorder, Order}, - #{override_to => cluster} - ). + emqx_schema_validation_config:reorder(Order). -spec lookup(validation_name()) -> {ok, validation()} | {error, not_found}. lookup(Name) -> - Validations = emqx:get_config(?VALIDATIONS_CONF_PATH, []), - do_lookup(Name, Validations). + emqx_schema_validation_config:lookup(Name). -spec insert(validation()) -> {ok, _} | {error, _}. insert(Validation) -> - emqx_conf:update( - ?VALIDATIONS_CONF_PATH, - {append, Validation}, - #{override_to => cluster} - ). + emqx_schema_validation_config:insert(Validation). -spec update(validation()) -> {ok, _} | {error, _}. update(Validation) -> - emqx_conf:update( - ?VALIDATIONS_CONF_PATH, - {update, Validation}, - #{override_to => cluster} - ). + emqx_schema_validation_config:update(Validation). -spec delete(validation_name()) -> {ok, _} | {error, _}. delete(Name) -> - emqx_conf:update( - ?VALIDATIONS_CONF_PATH, - {delete, Name}, - #{override_to => cluster} - ). + emqx_schema_validation_config:delete(Name). %%------------------------------------------------------------------------------ %% Hooks @@ -175,116 +117,6 @@ on_message_publish(Message = #message{topic = Topic, headers = Headers}) -> end end. -%%------------------------------------------------------------------------------ -%% `emqx_config_handler' API -%%------------------------------------------------------------------------------ - -pre_config_update(?VALIDATIONS_CONF_PATH, {append, Validation}, OldValidations) -> - Validations = OldValidations ++ [Validation], - {ok, Validations}; -pre_config_update(?VALIDATIONS_CONF_PATH, {update, Validation}, OldValidations) -> - replace(OldValidations, Validation); -pre_config_update(?VALIDATIONS_CONF_PATH, {delete, Validation}, OldValidations) -> - delete(OldValidations, Validation); -pre_config_update(?VALIDATIONS_CONF_PATH, {reorder, Order}, OldValidations) -> - reorder(OldValidations, Order); -pre_config_update([?CONF_ROOT], {merge, NewConfig}, OldConfig) -> - #{resulting_config := Config} = prepare_config_merge(NewConfig, OldConfig), - {ok, Config}; -pre_config_update([?CONF_ROOT], {replace, NewConfig}, _OldConfig) -> - {ok, NewConfig}. - -post_config_update(?VALIDATIONS_CONF_PATH, {append, #{<<"name">> := Name}}, New, _Old, _AppEnvs) -> - {Pos, Validation} = fetch_with_index(New, Name), - ok = emqx_schema_validation_registry:insert(Pos, Validation), - ok; -post_config_update(?VALIDATIONS_CONF_PATH, {update, #{<<"name">> := Name}}, New, Old, _AppEnvs) -> - {_Pos, OldValidation} = fetch_with_index(Old, Name), - {Pos, NewValidation} = fetch_with_index(New, Name), - ok = emqx_schema_validation_registry:update(OldValidation, Pos, NewValidation), - ok; -post_config_update(?VALIDATIONS_CONF_PATH, {delete, Name}, _New, Old, _AppEnvs) -> - {_Pos, Validation} = fetch_with_index(Old, Name), - ok = emqx_schema_validation_registry:delete(Validation), - ok; -post_config_update(?VALIDATIONS_CONF_PATH, {reorder, _Order}, New, _Old, _AppEnvs) -> - ok = emqx_schema_validation_registry:reindex_positions(New), - ok; -post_config_update([?CONF_ROOT], {merge, _}, ResultingConfig, Old, _AppEnvs) -> - #{validations := ResultingValidations} = ResultingConfig, - #{validations := OldValidations} = Old, - #{added := NewValidations0} = - emqx_utils:diff_lists( - ResultingValidations, - OldValidations, - fun(#{name := N}) -> N end - ), - NewValidations = - lists:map( - fun(#{name := Name}) -> - {Pos, Validation} = fetch_with_index(ResultingValidations, Name), - ok = emqx_schema_validation_registry:insert(Pos, Validation), - #{name => Name, pos => Pos} - end, - NewValidations0 - ), - {ok, #{new_validations => NewValidations}}; -post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnvs) -> - #{ - new_validations := NewValidations, - changed_validations := ChangedValidations0, - deleted_validations := DeletedValidations - } = prepare_config_replace(Input, Old), - #{validations := ResultingValidations} = ResultingConfig, - #{validations := OldValidations} = Old, - lists:foreach( - fun(Name) -> - {_Pos, Validation} = fetch_with_index(OldValidations, Name), - ok = emqx_schema_validation_registry:delete(Validation) - end, - DeletedValidations - ), - lists:foreach( - fun(Name) -> - {Pos, Validation} = fetch_with_index(ResultingValidations, Name), - ok = emqx_schema_validation_registry:insert(Pos, Validation) - end, - NewValidations - ), - ChangedValidations = - lists:map( - fun(Name) -> - {_Pos, OldValidation} = fetch_with_index(OldValidations, Name), - {Pos, NewValidation} = fetch_with_index(ResultingValidations, Name), - ok = emqx_schema_validation_registry:update(OldValidation, Pos, NewValidation), - #{name => Name, pos => Pos} - end, - ChangedValidations0 - ), - ok = emqx_schema_validation_registry:reindex_positions(ResultingValidations), - {ok, #{changed_validations => ChangedValidations}}. - -%%------------------------------------------------------------------------------ -%% `emqx_config_backup' API -%%------------------------------------------------------------------------------ - -import_config(#{?CONF_ROOT_BIN := RawConf0}) -> - Result = emqx_conf:update( - [?CONF_ROOT], - {merge, RawConf0}, - #{override_to => cluster, rawconf_with_defaults => true} - ), - case Result of - {error, Reason} -> - {error, #{root_key => ?CONF_ROOT, reason => Reason}}; - {ok, _} -> - Keys0 = maps:keys(RawConf0), - ChangedPaths = Keys0 -- [<<"validations">>], - {ok, #{root_key => ?CONF_ROOT, changed => ChangedPaths}} - end; -import_config(_RawConf) -> - {ok, #{root_key => ?CONF_ROOT, changed => []}}. - %%------------------------------------------------------------------------------ %% Internal exports %%------------------------------------------------------------------------------ @@ -370,112 +202,6 @@ evaluate_schema_check(Check, Validation, #message{payload = Data}) -> false end. -replace(OldValidations, Validation = #{<<"name">> := Name}) -> - {Found, RevNewValidations} = - lists:foldl( - fun - (#{<<"name">> := NameIn}, {_FoundIn, Acc}) when NameIn =:= Name -> - {true, [Validation | Acc]}; - (Val, {FoundIn, Acc}) -> - {FoundIn, [Val | Acc]} - end, - {false, []}, - OldValidations - ), - case Found of - true -> - {ok, lists:reverse(RevNewValidations)}; - false -> - {error, not_found} - end. - -delete(OldValidations, Name) -> - {Found, RevNewValidations} = - lists:foldl( - fun - (#{<<"name">> := NameIn}, {_FoundIn, Acc}) when NameIn =:= Name -> - {true, Acc}; - (Val, {FoundIn, Acc}) -> - {FoundIn, [Val | Acc]} - end, - {false, []}, - OldValidations - ), - case Found of - true -> - {ok, lists:reverse(RevNewValidations)}; - false -> - {error, not_found} - end. - -reorder(Validations, Order) -> - Context = #{ - not_found => sets:new([{version, 2}]), - duplicated => sets:new([{version, 2}]), - res => [], - seen => sets:new([{version, 2}]) - }, - reorder(Validations, Order, Context). - -reorder(NotReordered, _Order = [], #{not_found := NotFound0, duplicated := Duplicated0, res := Res}) -> - NotFound = sets:to_list(NotFound0), - Duplicated = sets:to_list(Duplicated0), - case {NotReordered, NotFound, Duplicated} of - {[], [], []} -> - {ok, lists:reverse(Res)}; - {_, _, _} -> - Error = #{ - not_found => NotFound, - duplicated => Duplicated, - not_reordered => [N || #{<<"name">> := N} <- NotReordered] - }, - {error, Error} - end; -reorder(RemainingValidations, [Name | Rest], Context0 = #{seen := Seen0}) -> - case sets:is_element(Name, Seen0) of - true -> - Context = maps:update_with( - duplicated, fun(S) -> sets:add_element(Name, S) end, Context0 - ), - reorder(RemainingValidations, Rest, Context); - false -> - case safe_take(Name, RemainingValidations) of - error -> - Context = maps:update_with( - not_found, fun(S) -> sets:add_element(Name, S) end, Context0 - ), - reorder(RemainingValidations, Rest, Context); - {ok, {Validation, Front, Rear}} -> - Context1 = maps:update_with( - seen, fun(S) -> sets:add_element(Name, S) end, Context0 - ), - Context = maps:update_with(res, fun(Vs) -> [Validation | Vs] end, Context1), - reorder(Front ++ Rear, Rest, Context) - end - end. - -fetch_with_index([{Pos, #{name := Name} = Validation} | _Rest], Name) -> - {Pos, Validation}; -fetch_with_index([{_, _} | Rest], Name) -> - fetch_with_index(Rest, Name); -fetch_with_index(Validations, Name) -> - fetch_with_index(lists:enumerate(Validations), Name). - -safe_take(Name, Validations) -> - case lists:splitwith(fun(#{<<"name">> := N}) -> N =/= Name end, Validations) of - {_Front, []} -> - error; - {Front, [Found | Rear]} -> - {ok, {Found, Front, Rear}} - end. - -do_lookup(_Name, _Validations = []) -> - {error, not_found}; -do_lookup(Name, [#{name := Name} = Validation | _Rest]) -> - {ok, Validation}; -do_lookup(Name, [_ | Rest]) -> - do_lookup(Name, Rest). - run_validations(Validations, Message) -> try emqx_rule_runtime:clear_rule_payload(), @@ -557,55 +283,3 @@ run_schema_validation_failed_hook(Message, Validation) -> #{name := Name} = Validation, ValidationContext = #{name => Name}, emqx_hooks:run('schema.validation_failed', [Message, ValidationContext]). - -%% "Merging" in the context of the validation array means: -%% * Existing validations (identified by `name') are left untouched. -%% * No validations are removed. -%% * New validations are appended to the existing list. -%% * Existing validations are not reordered. -prepare_config_merge(NewConfig0, OldConfig) -> - {ImportedRawValidations, NewConfigNoValidations} = - case maps:take(<<"validations">>, NewConfig0) of - error -> - {[], NewConfig0}; - {V, R} -> - {V, R} - end, - OldRawValidations = maps:get(<<"validations">>, OldConfig, []), - #{added := NewRawValidations} = emqx_utils:diff_lists( - ImportedRawValidations, - OldRawValidations, - fun(#{<<"name">> := N}) -> N end - ), - Config0 = emqx_utils_maps:deep_merge(OldConfig, NewConfigNoValidations), - Config = maps:update_with( - <<"validations">>, - fun(OldVs) -> OldVs ++ NewRawValidations end, - NewRawValidations, - Config0 - ), - #{ - new_validations => NewRawValidations, - resulting_config => Config - }. - -prepare_config_replace(NewConfig, OldConfig) -> - ImportedRawValidations = maps:get(<<"validations">>, NewConfig, []), - OldValidations = maps:get(validations, OldConfig, []), - %% Since, at this point, we have an input raw config but a parsed old config, we - %% project both to the to have only their names, and consider common names as changed. - #{ - added := NewValidations, - removed := DeletedValidations, - changed := ChangedValidations0, - identical := ChangedValidations1 - } = emqx_utils:diff_lists( - lists:map(fun(#{<<"name">> := N}) -> N end, ImportedRawValidations), - lists:map(fun(#{name := N}) -> N end, OldValidations), - fun(N) -> N end - ), - #{ - new_validations => NewValidations, - changed_validations => ChangedValidations0 ++ ChangedValidations1, - deleted_validations => DeletedValidations - }. diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation_app.erl b/apps/emqx_schema_validation/src/emqx_schema_validation_app.erl index 107ae4e8f..a06a9f303 100644 --- a/apps/emqx_schema_validation/src/emqx_schema_validation_app.erl +++ b/apps/emqx_schema_validation/src/emqx_schema_validation_app.erl @@ -19,14 +19,14 @@ -spec start(application:start_type(), term()) -> {ok, pid()}. start(_Type, _Args) -> {ok, Sup} = emqx_schema_validation_sup:start_link(), - ok = emqx_schema_validation:add_handler(), + ok = emqx_schema_validation_config:add_handler(), ok = emqx_schema_validation:register_hooks(), - ok = emqx_schema_validation:load(), + ok = emqx_schema_validation_config:load(), {ok, Sup}. -spec stop(term()) -> ok. stop(_State) -> - ok = emqx_schema_validation:unload(), + ok = emqx_schema_validation_config:unload(), ok = emqx_schema_validation:unregister_hooks(), - ok = emqx_schema_validation:remove_handler(), + ok = emqx_schema_validation_config:remove_handler(), ok. diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl b/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl new file mode 100644 index 000000000..df882a078 --- /dev/null +++ b/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl @@ -0,0 +1,390 @@ +%%-------------------------------------------------------------------- +%% Copyright (c) 2024 EMQ Technologies Co., Ltd. All Rights Reserved. +%%-------------------------------------------------------------------- +-module(emqx_schema_validation_config). + +%% API +-export([ + add_handler/0, + remove_handler/0, + + load/0, + unload/0, + + list/0, + reorder/1, + lookup/1, + insert/1, + update/1, + delete/1 +]). + +%% `emqx_config_handler' API +-export([pre_config_update/3, post_config_update/5]). + +%% `emqx_config_backup' API +-behaviour(emqx_config_backup). +-export([import_config/1]). + +%%------------------------------------------------------------------------------ +%% Type declarations +%%------------------------------------------------------------------------------ + +-define(CONF_ROOT, schema_validation). +-define(CONF_ROOT_BIN, <<"schema_validation">>). +-define(VALIDATIONS_CONF_PATH, [?CONF_ROOT, validations]). + +-type validation_name() :: emqx_schema_validation:validation_name(). +-type validation() :: emqx_schema_validation:validation(). +-type raw_validation() :: #{binary() => _}. + +%%------------------------------------------------------------------------------ +%% API +%%------------------------------------------------------------------------------ + +-spec add_handler() -> ok. +add_handler() -> + ok = emqx_config_handler:add_handler([?CONF_ROOT], ?MODULE), + ok = emqx_config_handler:add_handler(?VALIDATIONS_CONF_PATH, ?MODULE), + ok. + +-spec remove_handler() -> ok. +remove_handler() -> + ok = emqx_config_handler:remove_handler(?VALIDATIONS_CONF_PATH), + ok = emqx_config_handler:remove_handler([?CONF_ROOT]), + ok. + +load() -> + Validations = emqx:get_config(?VALIDATIONS_CONF_PATH, []), + lists:foreach( + fun({Pos, Validation}) -> + ok = emqx_schema_validation_registry:insert(Pos, Validation) + end, + lists:enumerate(Validations) + ). + +unload() -> + Validations = emqx:get_config(?VALIDATIONS_CONF_PATH, []), + lists:foreach( + fun(Validation) -> + ok = emqx_schema_validation_registry:delete(Validation) + end, + Validations + ). + +-spec list() -> [validation()]. +list() -> + emqx:get_config(?VALIDATIONS_CONF_PATH, []). + +-spec reorder([validation_name()]) -> + {ok, _} | {error, _}. +reorder(Order) -> + emqx_conf:update( + ?VALIDATIONS_CONF_PATH, + {reorder, Order}, + #{override_to => cluster} + ). + +-spec lookup(validation_name()) -> {ok, validation()} | {error, not_found}. +lookup(Name) -> + Validations = emqx:get_config(?VALIDATIONS_CONF_PATH, []), + do_lookup(Name, Validations). + +-spec insert(raw_validation()) -> + {ok, _} | {error, _}. +insert(Validation) -> + emqx_conf:update( + ?VALIDATIONS_CONF_PATH, + {append, Validation}, + #{override_to => cluster} + ). + +-spec update(raw_validation()) -> + {ok, _} | {error, _}. +update(Validation) -> + emqx_conf:update( + ?VALIDATIONS_CONF_PATH, + {update, Validation}, + #{override_to => cluster} + ). + +-spec delete(validation_name()) -> + {ok, _} | {error, _}. +delete(Name) -> + emqx_conf:update( + ?VALIDATIONS_CONF_PATH, + {delete, Name}, + #{override_to => cluster} + ). + +%%------------------------------------------------------------------------------ +%% `emqx_config_handler' API +%%------------------------------------------------------------------------------ + +pre_config_update(?VALIDATIONS_CONF_PATH, {append, Validation}, OldValidations) -> + Validations = OldValidations ++ [Validation], + {ok, Validations}; +pre_config_update(?VALIDATIONS_CONF_PATH, {update, Validation}, OldValidations) -> + replace(OldValidations, Validation); +pre_config_update(?VALIDATIONS_CONF_PATH, {delete, Validation}, OldValidations) -> + delete(OldValidations, Validation); +pre_config_update(?VALIDATIONS_CONF_PATH, {reorder, Order}, OldValidations) -> + reorder(OldValidations, Order); +pre_config_update([?CONF_ROOT], {merge, NewConfig}, OldConfig) -> + #{resulting_config := Config} = prepare_config_merge(NewConfig, OldConfig), + {ok, Config}; +pre_config_update([?CONF_ROOT], {replace, NewConfig}, _OldConfig) -> + {ok, NewConfig}. + +post_config_update(?VALIDATIONS_CONF_PATH, {append, #{<<"name">> := Name}}, New, _Old, _AppEnvs) -> + {Pos, Validation} = fetch_with_index(New, Name), + ok = emqx_schema_validation_registry:insert(Pos, Validation), + ok; +post_config_update(?VALIDATIONS_CONF_PATH, {update, #{<<"name">> := Name}}, New, Old, _AppEnvs) -> + {_Pos, OldValidation} = fetch_with_index(Old, Name), + {Pos, NewValidation} = fetch_with_index(New, Name), + ok = emqx_schema_validation_registry:update(OldValidation, Pos, NewValidation), + ok; +post_config_update(?VALIDATIONS_CONF_PATH, {delete, Name}, _New, Old, _AppEnvs) -> + {_Pos, Validation} = fetch_with_index(Old, Name), + ok = emqx_schema_validation_registry:delete(Validation), + ok; +post_config_update(?VALIDATIONS_CONF_PATH, {reorder, _Order}, New, _Old, _AppEnvs) -> + ok = emqx_schema_validation_registry:reindex_positions(New), + ok; +post_config_update([?CONF_ROOT], {merge, _}, ResultingConfig, Old, _AppEnvs) -> + #{validations := ResultingValidations} = ResultingConfig, + #{validations := OldValidations} = Old, + #{added := NewValidations0} = + emqx_utils:diff_lists( + ResultingValidations, + OldValidations, + fun(#{name := N}) -> N end + ), + NewValidations = + lists:map( + fun(#{name := Name}) -> + {Pos, Validation} = fetch_with_index(ResultingValidations, Name), + ok = emqx_schema_validation_registry:insert(Pos, Validation), + #{name => Name, pos => Pos} + end, + NewValidations0 + ), + {ok, #{new_validations => NewValidations}}; +post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnvs) -> + #{ + new_validations := NewValidations, + changed_validations := ChangedValidations0, + deleted_validations := DeletedValidations + } = prepare_config_replace(Input, Old), + #{validations := ResultingValidations} = ResultingConfig, + #{validations := OldValidations} = Old, + lists:foreach( + fun(Name) -> + {_Pos, Validation} = fetch_with_index(OldValidations, Name), + ok = emqx_schema_validation_registry:delete(Validation) + end, + DeletedValidations + ), + lists:foreach( + fun(Name) -> + {Pos, Validation} = fetch_with_index(ResultingValidations, Name), + ok = emqx_schema_validation_registry:insert(Pos, Validation) + end, + NewValidations + ), + ChangedValidations = + lists:map( + fun(Name) -> + {_Pos, OldValidation} = fetch_with_index(OldValidations, Name), + {Pos, NewValidation} = fetch_with_index(ResultingValidations, Name), + ok = emqx_schema_validation_registry:update(OldValidation, Pos, NewValidation), + #{name => Name, pos => Pos} + end, + ChangedValidations0 + ), + ok = emqx_schema_validation_registry:reindex_positions(ResultingValidations), + {ok, #{changed_validations => ChangedValidations}}. + +%%------------------------------------------------------------------------------ +%% `emqx_config_backup' API +%%------------------------------------------------------------------------------ + +import_config(#{?CONF_ROOT_BIN := RawConf0}) -> + Result = emqx_conf:update( + [?CONF_ROOT], + {merge, RawConf0}, + #{override_to => cluster, rawconf_with_defaults => true} + ), + case Result of + {error, Reason} -> + {error, #{root_key => ?CONF_ROOT, reason => Reason}}; + {ok, _} -> + Keys0 = maps:keys(RawConf0), + ChangedPaths = Keys0 -- [<<"validations">>], + {ok, #{root_key => ?CONF_ROOT, changed => ChangedPaths}} + end; +import_config(_RawConf) -> + {ok, #{root_key => ?CONF_ROOT, changed => []}}. + +%%------------------------------------------------------------------------------ +%% Internal fns +%%------------------------------------------------------------------------------ + +replace(OldValidations, Validation = #{<<"name">> := Name}) -> + {Found, RevNewValidations} = + lists:foldl( + fun + (#{<<"name">> := NameIn}, {_FoundIn, Acc}) when NameIn =:= Name -> + {true, [Validation | Acc]}; + (Val, {FoundIn, Acc}) -> + {FoundIn, [Val | Acc]} + end, + {false, []}, + OldValidations + ), + case Found of + true -> + {ok, lists:reverse(RevNewValidations)}; + false -> + {error, not_found} + end. + +delete(OldValidations, Name) -> + {Found, RevNewValidations} = + lists:foldl( + fun + (#{<<"name">> := NameIn}, {_FoundIn, Acc}) when NameIn =:= Name -> + {true, Acc}; + (Val, {FoundIn, Acc}) -> + {FoundIn, [Val | Acc]} + end, + {false, []}, + OldValidations + ), + case Found of + true -> + {ok, lists:reverse(RevNewValidations)}; + false -> + {error, not_found} + end. + +reorder(Validations, Order) -> + Context = #{ + not_found => sets:new([{version, 2}]), + duplicated => sets:new([{version, 2}]), + res => [], + seen => sets:new([{version, 2}]) + }, + reorder(Validations, Order, Context). + +reorder(NotReordered, _Order = [], #{not_found := NotFound0, duplicated := Duplicated0, res := Res}) -> + NotFound = sets:to_list(NotFound0), + Duplicated = sets:to_list(Duplicated0), + case {NotReordered, NotFound, Duplicated} of + {[], [], []} -> + {ok, lists:reverse(Res)}; + {_, _, _} -> + Error = #{ + not_found => NotFound, + duplicated => Duplicated, + not_reordered => [N || #{<<"name">> := N} <- NotReordered] + }, + {error, Error} + end; +reorder(RemainingValidations, [Name | Rest], Context0 = #{seen := Seen0}) -> + case sets:is_element(Name, Seen0) of + true -> + Context = maps:update_with( + duplicated, fun(S) -> sets:add_element(Name, S) end, Context0 + ), + reorder(RemainingValidations, Rest, Context); + false -> + case safe_take(Name, RemainingValidations) of + error -> + Context = maps:update_with( + not_found, fun(S) -> sets:add_element(Name, S) end, Context0 + ), + reorder(RemainingValidations, Rest, Context); + {ok, {Validation, Front, Rear}} -> + Context1 = maps:update_with( + seen, fun(S) -> sets:add_element(Name, S) end, Context0 + ), + Context = maps:update_with(res, fun(Vs) -> [Validation | Vs] end, Context1), + reorder(Front ++ Rear, Rest, Context) + end + end. + +fetch_with_index([{Pos, #{name := Name} = Validation} | _Rest], Name) -> + {Pos, Validation}; +fetch_with_index([{_, _} | Rest], Name) -> + fetch_with_index(Rest, Name); +fetch_with_index(Validations, Name) -> + fetch_with_index(lists:enumerate(Validations), Name). + +safe_take(Name, Validations) -> + case lists:splitwith(fun(#{<<"name">> := N}) -> N =/= Name end, Validations) of + {_Front, []} -> + error; + {Front, [Found | Rear]} -> + {ok, {Found, Front, Rear}} + end. + +do_lookup(_Name, _Validations = []) -> + {error, not_found}; +do_lookup(Name, [#{name := Name} = Validation | _Rest]) -> + {ok, Validation}; +do_lookup(Name, [_ | Rest]) -> + do_lookup(Name, Rest). + +%% "Merging" in the context of the validation array means: +%% * Existing validations (identified by `name') are left untouched. +%% * No validations are removed. +%% * New validations are appended to the existing list. +%% * Existing validations are not reordered. +prepare_config_merge(NewConfig0, OldConfig) -> + {ImportedRawValidations, NewConfigNoValidations} = + case maps:take(<<"validations">>, NewConfig0) of + error -> + {[], NewConfig0}; + {V, R} -> + {V, R} + end, + OldRawValidations = maps:get(<<"validations">>, OldConfig, []), + #{added := NewRawValidations} = emqx_utils:diff_lists( + ImportedRawValidations, + OldRawValidations, + fun(#{<<"name">> := N}) -> N end + ), + Config0 = emqx_utils_maps:deep_merge(OldConfig, NewConfigNoValidations), + Config = maps:update_with( + <<"validations">>, + fun(OldVs) -> OldVs ++ NewRawValidations end, + NewRawValidations, + Config0 + ), + #{ + new_validations => NewRawValidations, + resulting_config => Config + }. + +prepare_config_replace(NewConfig, OldConfig) -> + ImportedRawValidations = maps:get(<<"validations">>, NewConfig, []), + OldValidations = maps:get(validations, OldConfig, []), + %% Since, at this point, we have an input raw config but a parsed old config, we + %% project both to the to have only their names, and consider common names as changed. + #{ + added := NewValidations, + removed := DeletedValidations, + changed := ChangedValidations0, + identical := ChangedValidations1 + } = emqx_utils:diff_lists( + lists:map(fun(#{<<"name">> := N}) -> N end, ImportedRawValidations), + lists:map(fun(#{name := N}) -> N end, OldValidations), + fun(N) -> N end + ), + #{ + new_validations => NewValidations, + changed_validations => ChangedValidations0 ++ ChangedValidations1, + deleted_validations => DeletedValidations + }. From 05ebb17cd64e92c8109575bf066892d8a92efe68 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 15:54:37 -0300 Subject: [PATCH 26/33] refactor: index positions rather than names Addresses https://github.com/emqx/emqx/pull/13199#discussion_r1633096025 --- .../src/emqx_schema_validation_config.erl | 20 ++-- .../src/emqx_schema_validation_registry.erl | 98 +++++++++++++------ 2 files changed, 76 insertions(+), 42 deletions(-) diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl b/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl index df882a078..2bfd11c6d 100644 --- a/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl +++ b/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl @@ -66,10 +66,10 @@ load() -> unload() -> Validations = emqx:get_config(?VALIDATIONS_CONF_PATH, []), lists:foreach( - fun(Validation) -> - ok = emqx_schema_validation_registry:delete(Validation) + fun({Pos, Validation}) -> + ok = emqx_schema_validation_registry:delete(Validation, Pos) end, - Validations + lists:enumerate(Validations) ). -spec list() -> [validation()]. @@ -146,11 +146,11 @@ post_config_update(?VALIDATIONS_CONF_PATH, {update, #{<<"name">> := Name}}, New, ok = emqx_schema_validation_registry:update(OldValidation, Pos, NewValidation), ok; post_config_update(?VALIDATIONS_CONF_PATH, {delete, Name}, _New, Old, _AppEnvs) -> - {_Pos, Validation} = fetch_with_index(Old, Name), - ok = emqx_schema_validation_registry:delete(Validation), + {Pos, Validation} = fetch_with_index(Old, Name), + ok = emqx_schema_validation_registry:delete(Validation, Pos), ok; -post_config_update(?VALIDATIONS_CONF_PATH, {reorder, _Order}, New, _Old, _AppEnvs) -> - ok = emqx_schema_validation_registry:reindex_positions(New), +post_config_update(?VALIDATIONS_CONF_PATH, {reorder, _Order}, New, Old, _AppEnvs) -> + ok = emqx_schema_validation_registry:reindex_positions(New, Old), ok; post_config_update([?CONF_ROOT], {merge, _}, ResultingConfig, Old, _AppEnvs) -> #{validations := ResultingValidations} = ResultingConfig, @@ -181,8 +181,8 @@ post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnv #{validations := OldValidations} = Old, lists:foreach( fun(Name) -> - {_Pos, Validation} = fetch_with_index(OldValidations, Name), - ok = emqx_schema_validation_registry:delete(Validation) + {Pos, Validation} = fetch_with_index(OldValidations, Name), + ok = emqx_schema_validation_registry:delete(Validation, Pos) end, DeletedValidations ), @@ -203,7 +203,7 @@ post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnv end, ChangedValidations0 ), - ok = emqx_schema_validation_registry:reindex_positions(ResultingValidations), + ok = emqx_schema_validation_registry:reindex_positions(ResultingValidations, OldValidations), {ok, #{changed_validations => ChangedValidations}}. %%------------------------------------------------------------------------------ diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation_registry.erl b/apps/emqx_schema_validation/src/emqx_schema_validation_registry.erl index c25007424..de90fa7cd 100644 --- a/apps/emqx_schema_validation/src/emqx_schema_validation_registry.erl +++ b/apps/emqx_schema_validation/src/emqx_schema_validation_registry.erl @@ -10,8 +10,8 @@ lookup/1, insert/2, update/3, - delete/1, - reindex_positions/1, + delete/2, + reindex_positions/2, matching_validations/1, @@ -51,10 +51,10 @@ -type validation() :: _TODO. -type position_index() :: pos_integer(). --record(reindex_positions, {validations :: [validation()]}). +-record(reindex_positions, {new_validations :: [validation()], old_validations :: [validation()]}). -record(insert, {pos :: position_index(), validation :: validation()}). -record(update, {old :: validation(), pos :: position_index(), new :: validation()}). --record(delete, {validation :: validation()}). +-record(delete, {validation :: validation(), pos :: position_index()}). %%------------------------------------------------------------------------------ %% API @@ -74,9 +74,16 @@ lookup(Name) -> {ok, Validation} end. --spec reindex_positions([validation()]) -> ok. -reindex_positions(Validations) -> - gen_server:call(?MODULE, #reindex_positions{validations = Validations}, infinity). +-spec reindex_positions([validation()], [validation()]) -> ok. +reindex_positions(NewValidations, OldValidations) -> + gen_server:call( + ?MODULE, + #reindex_positions{ + new_validations = NewValidations, + old_validations = OldValidations + }, + infinity + ). -spec insert(position_index(), validation()) -> ok. insert(Pos, Validation) -> @@ -86,23 +93,36 @@ insert(Pos, Validation) -> update(Old, Pos, New) -> gen_server:call(?MODULE, #update{old = Old, pos = Pos, new = New}, infinity). --spec delete(validation()) -> ok. -delete(Validation) -> - gen_server:call(?MODULE, #delete{validation = Validation}, infinity). +-spec delete(validation(), position_index()) -> ok. +delete(Validation, Pos) -> + gen_server:call(?MODULE, #delete{validation = Validation, pos = Pos}, infinity). %% @doc Returns a list of matching validation names, sorted by their configuration order. -spec matching_validations(emqx_types:topic()) -> [validation()]. matching_validations(Topic) -> - Validations0 = [ - {Pos, Validation} - || M <- emqx_topic_index:matches(Topic, ?VALIDATION_TOPIC_INDEX, [unique]), - [Pos] <- [emqx_topic_index:get_record(M, ?VALIDATION_TOPIC_INDEX)], - {ok, Validation} <- [ - lookup(emqx_topic_index:get_id(M)) - ] - ], - Validations1 = lists:sort(fun({Pos1, _V1}, {Pos2, _V2}) -> Pos1 =< Pos2 end, Validations0), - lists:map(fun({_Pos, V}) -> V end, Validations1). + Validations0 = + lists:flatmap( + fun(M) -> + case emqx_topic_index:get_record(M, ?VALIDATION_TOPIC_INDEX) of + [Name] -> + [Name]; + _ -> + [] + end + end, + emqx_topic_index:matches(Topic, ?VALIDATION_TOPIC_INDEX, [unique]) + ), + lists:flatmap( + fun(Name) -> + case lookup(Name) of + {ok, Validation} -> + [Validation]; + _ -> + [] + end + end, + Validations0 + ). -spec metrics_worker_spec() -> supervisor:child_spec(). metrics_worker_spec() -> @@ -133,8 +153,15 @@ init(_) -> State = #{}, {ok, State}. -handle_call(#reindex_positions{validations = Validations}, _From, State) -> - do_reindex_positions(Validations), +handle_call( + #reindex_positions{ + new_validations = NewValidations, + old_validations = OldValidations + }, + _From, + State +) -> + do_reindex_positions(NewValidations, OldValidations), {reply, ok, State}; handle_call(#insert{pos = Pos, validation = Validation}, _From, State) -> do_insert(Pos, Validation), @@ -142,8 +169,8 @@ handle_call(#insert{pos = Pos, validation = Validation}, _From, State) -> handle_call(#update{old = OldValidation, pos = Pos, new = NewValidation}, _From, State) -> ok = do_update(OldValidation, Pos, NewValidation), {reply, ok, State}; -handle_call(#delete{validation = Validation}, _From, State) -> - do_delete(Validation), +handle_call(#delete{validation = Validation, pos = Pos}, _From, State) -> + do_delete(Validation, Pos), {reply, ok, State}; handle_call(_Call, _From, State) -> {reply, ignored, State}. @@ -160,7 +187,14 @@ create_tables() -> _ = emqx_utils_ets:new(?VALIDATION_TAB, [public, ordered_set, {read_concurrency, true}]), ok. -do_reindex_positions(Validations) -> +do_reindex_positions(NewValidations, OldValidations) -> + lists:foreach( + fun({Pos, Validation}) -> + #{topics := Topics} = Validation, + delete_topic_index(Pos, Topics) + end, + lists:enumerate(OldValidations) + ), lists:foreach( fun({Pos, Validation}) -> #{ @@ -170,7 +204,7 @@ do_reindex_positions(Validations) -> do_insert_into_tab(Name, Validation, Pos), update_topic_index(Name, Pos, Topics) end, - lists:enumerate(Validations) + lists:enumerate(NewValidations) ). do_insert(Pos, Validation) -> @@ -193,17 +227,17 @@ do_update(OldValidation, Pos, NewValidation) -> } = NewValidation, maybe_create_metrics(Name), do_insert_into_tab(Name, NewValidation, Pos), - delete_topic_index(Name, OldTopics), + delete_topic_index(Pos, OldTopics), Enabled andalso update_topic_index(Name, Pos, NewTopics), ok. -do_delete(Validation) -> +do_delete(Validation, Pos) -> #{ name := Name, topics := Topics } = Validation, ets:delete(?VALIDATION_TAB, Name), - delete_topic_index(Name, Topics), + delete_topic_index(Pos, Topics), drop_metrics(Name), ok. @@ -226,15 +260,15 @@ drop_metrics(Name) -> update_topic_index(Name, Pos, Topics) -> lists:foreach( fun(Topic) -> - true = emqx_topic_index:insert(Topic, Name, Pos, ?VALIDATION_TOPIC_INDEX) + true = emqx_topic_index:insert(Topic, Pos, Name, ?VALIDATION_TOPIC_INDEX) end, Topics ). -delete_topic_index(Name, Topics) -> +delete_topic_index(Pos, Topics) -> lists:foreach( fun(Topic) -> - true = emqx_topic_index:delete(Topic, Name, ?VALIDATION_TOPIC_INDEX) + true = emqx_topic_index:delete(Topic, Pos, ?VALIDATION_TOPIC_INDEX) end, Topics ). From fb0da9848c9134ef4ae3aa74d4eb98835ba5627c Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Tue, 11 Jun 2024 11:42:43 +0200 Subject: [PATCH 27/33] feat(tpl): add separate `placeholders/1` function The purpose is to have a clearer view of placeholders used in a template, without going the usual `render(Template, #{})` route that is actually subtly misleading: it won't mention that `${}` / `${.}` placeholder has been used. Also unify handling of `${}` / `${.}` in a couple of places. --- apps/emqx_utils/src/emqx_template.erl | 35 +++++++++++++++++--- apps/emqx_utils/test/emqx_template_SUITE.erl | 18 ++++++++++ 2 files changed, 48 insertions(+), 5 deletions(-) diff --git a/apps/emqx_utils/src/emqx_template.erl b/apps/emqx_utils/src/emqx_template.erl index 1383f90e1..02e18017e 100644 --- a/apps/emqx_utils/src/emqx_template.erl +++ b/apps/emqx_utils/src/emqx_template.erl @@ -20,6 +20,7 @@ -export([parse/2]). -export([parse_deep/1]). -export([parse_deep/2]). +-export([placeholders/1]). -export([validate/2]). -export([is_const/1]). -export([unparse/1]). @@ -143,14 +144,19 @@ parse_accessor(Var) -> Name end. +-spec placeholders(t()) -> [varname()]. +placeholders(Template) when is_list(Template) -> + [Name || {var, Name, _} <- Template]; +placeholders({'$tpl', Template}) -> + placeholders_deep(Template). + %% @doc Validate a template against a set of allowed variables. %% If the given template contains any variable not in the allowed set, an error %% is returned. -spec validate([varname() | {var_namespace, varname()}], t()) -> ok | {error, [_Error :: {varname(), disallowed}]}. validate(Allowed, Template) -> - {_, Errors} = render(Template, #{}), - {Used, _} = lists:unzip(Errors), + Used = placeholders(Template), case find_disallowed(lists:usort(Used), Allowed) of [] -> ok; @@ -192,10 +198,13 @@ is_allowed(Var, [{var_namespace, VarPrefix} | Allowed]) -> false -> is_allowed(Var, Allowed) end; -is_allowed(Var, [Var | _Allowed]) -> +is_allowed(Var, [VarAllowed | Rest]) -> + is_same_varname(Var, VarAllowed) orelse is_allowed(Var, Rest). + +is_same_varname("", ".") -> true; -is_allowed(Var, [_ | Allowed]) -> - is_allowed(Var, Allowed). +is_same_varname(V1, V2) -> + V1 =:= V2. %% @doc Check if a template is constant with respect to rendering, i.e. does not %% contain any placeholders. @@ -322,6 +331,22 @@ parse_deep_term(Term, Opts) when is_binary(Term) -> parse_deep_term(Term, _Opts) -> Term. +-spec placeholders_deep(deeptpl()) -> [varname()]. +placeholders_deep(Template) when is_map(Template) -> + maps:fold( + fun(KT, VT, Acc) -> placeholders_deep(KT) ++ placeholders_deep(VT) ++ Acc end, + [], + Template + ); +placeholders_deep({list, Template}) when is_list(Template) -> + lists:flatmap(fun placeholders_deep/1, Template); +placeholders_deep({tuple, Template}) when is_list(Template) -> + lists:flatmap(fun placeholders_deep/1, Template); +placeholders_deep(Template) when is_list(Template) -> + placeholders(Template); +placeholders_deep(_Term) -> + []. + render_deep(Template, Context, Opts) when is_map(Template) -> maps:fold( fun(KT, VT, {Acc, Errors}) -> diff --git a/apps/emqx_utils/test/emqx_template_SUITE.erl b/apps/emqx_utils/test/emqx_template_SUITE.erl index 0a3273170..a049ebfbc 100644 --- a/apps/emqx_utils/test/emqx_template_SUITE.erl +++ b/apps/emqx_utils/test/emqx_template_SUITE.erl @@ -128,6 +128,14 @@ t_render_custom_bindings(_) -> render_string(Template, {?MODULE, []}) ). +t_placeholders(_) -> + TString = <<"a:${a},b:${b},c:$${c},d:{${d.d1}},e:${$}{e},lit:${$}{$}">>, + Template = emqx_template:parse(TString), + ?assertEqual( + ["a", "b", "c", "d.d1"], + emqx_template:placeholders(Template) + ). + t_unparse(_) -> TString = <<"a:${a},b:${b},c:$${c},d:{${d.d1}},e:${$}{e},lit:${$}{$}">>, Template = emqx_template:parse(TString), @@ -337,6 +345,16 @@ t_unparse_tmpl_deep(_) -> Template = emqx_template:parse_deep(Term), ?assertEqual(Term, emqx_template:unparse(Template)). +t_allow_this(_) -> + ?assertEqual( + {error, [{"", disallowed}]}, + emqx_template:validate(["d"], emqx_template:parse(<<"this:${}">>)) + ), + ?assertEqual( + {error, [{"", disallowed}]}, + emqx_template:validate(["d"], emqx_template:parse(<<"this:${.}">>)) + ). + t_allow_var_by_namespace(_) -> Context = #{d => #{d1 => <<"hi">>}}, Template = emqx_template:parse(<<"d.d1:${d.d1}">>), From 29fc30ea69041ad0e0f726d23cde52a1ce259dbe Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Tue, 11 Jun 2024 11:46:34 +0200 Subject: [PATCH 28/33] fix(bridge-s3): validate aggreg key template before adding a channel --- .../src/emqx_bridge_s3_connector.erl | 17 +++++++-- .../src/emqx_bridge_s3_upload.erl | 36 ++++++++++++++----- 2 files changed, 41 insertions(+), 12 deletions(-) diff --git a/apps/emqx_bridge_s3/src/emqx_bridge_s3_connector.erl b/apps/emqx_bridge_s3/src/emqx_bridge_s3_connector.erl index bc9f37935..00c03fd3a 100644 --- a/apps/emqx_bridge_s3/src/emqx_bridge_s3_connector.erl +++ b/apps/emqx_bridge_s3/src/emqx_bridge_s3_connector.erl @@ -162,8 +162,13 @@ on_get_status(_InstId, State = #{client_config := Config}) -> -spec on_add_channel(_InstanceId :: resource_id(), state(), channel_id(), channel_config()) -> {ok, state()} | {error, _Reason}. on_add_channel(_InstId, State = #{channels := Channels}, ChannelId, Config) -> - ChannelState = start_channel(State, Config), - {ok, State#{channels => Channels#{ChannelId => ChannelState}}}. + try + ChannelState = start_channel(State, Config), + {ok, State#{channels => Channels#{ChannelId => ChannelState}}} + catch + throw:Reason -> + {error, Reason} + end. -spec on_remove_channel(_InstanceId :: resource_id(), state(), channel_id()) -> {ok, state()}. @@ -221,9 +226,10 @@ start_channel(State, #{ max_records => MaxRecords, work_dir => work_dir(Type, Name) }, + Template = ensure_ok(emqx_bridge_s3_upload:mk_key_template(Parameters)), DeliveryOpts = #{ bucket => Bucket, - key => emqx_bridge_s3_upload:mk_key_template(Parameters), + key => Template, container => Container, upload_options => emqx_bridge_s3_upload:mk_upload_options(Parameters), callback_module => ?MODULE, @@ -247,6 +253,11 @@ start_channel(State, #{ on_stop => fun() -> ?AGGREG_SUP:delete_child(AggregId) end }. +ensure_ok({ok, V}) -> + V; +ensure_ok({error, Reason}) -> + throw(Reason). + upload_options(Parameters) -> #{acl => maps:get(acl, Parameters, undefined)}. diff --git a/apps/emqx_bridge_s3/src/emqx_bridge_s3_upload.erl b/apps/emqx_bridge_s3/src/emqx_bridge_s3_upload.erl index 6c5ee5d0e..2bf12f24b 100644 --- a/apps/emqx_bridge_s3/src/emqx_bridge_s3_upload.erl +++ b/apps/emqx_bridge_s3/src/emqx_bridge_s3_upload.erl @@ -248,17 +248,35 @@ convert_action(Conf = #{<<"parameters">> := Params, <<"resource_opts">> := Resou %% Interpreting options --spec mk_key_template(_Parameters :: map()) -> emqx_template:str(). +-spec mk_key_template(_Parameters :: map()) -> + {ok, emqx_template:str()} | {error, _Reason}. mk_key_template(#{key := Key}) -> Template = emqx_template:parse(Key), - {_, BindingErrors} = emqx_template:render(Template, #{}), - {UsedBindings, _} = lists:unzip(BindingErrors), - SuffixTemplate = mk_suffix_template(UsedBindings), - case emqx_template:is_const(SuffixTemplate) of - true -> - Template; - false -> - Template ++ SuffixTemplate + case validate_bindings(emqx_template:placeholders(Template)) of + UsedBindings when is_list(UsedBindings) -> + SuffixTemplate = mk_suffix_template(UsedBindings), + case emqx_template:is_const(SuffixTemplate) of + true -> + {ok, Template}; + false -> + {ok, Template ++ SuffixTemplate} + end; + Error = {error, _} -> + Error + end. + +validate_bindings(Bindings) -> + Formats = ["rfc3339", "rfc3339utc", "unix"], + AllowedBindings = lists:append([ + ["action", "node", "sequence"], + ["datetime." ++ F || F <- Formats], + ["datetime_until." ++ F || F <- Formats] + ]), + case Bindings -- AllowedBindings of + [] -> + Bindings; + Disallowed -> + {error, {invalid_key_template, {disallowed_placeholders, Disallowed}}} end. mk_suffix_template(UsedBindings) -> From d4fb812352558165e2d61bb9cc03f64b53bfbfee Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Tue, 11 Jun 2024 11:47:08 +0200 Subject: [PATCH 29/33] fix(aggreg): anticipate and handle delivery startup errors --- .../src/emqx_connector_aggregator.app.src | 2 +- .../src/emqx_connector_aggregator.erl | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/apps/emqx_connector_aggregator/src/emqx_connector_aggregator.app.src b/apps/emqx_connector_aggregator/src/emqx_connector_aggregator.app.src index 6562958ee..b79cba2b2 100644 --- a/apps/emqx_connector_aggregator/src/emqx_connector_aggregator.app.src +++ b/apps/emqx_connector_aggregator/src/emqx_connector_aggregator.app.src @@ -1,6 +1,6 @@ {application, emqx_connector_aggregator, [ {description, "EMQX Enterprise Connector Data Aggregator"}, - {vsn, "0.1.0"}, + {vsn, "0.1.1"}, {registered, []}, {applications, [ kernel, diff --git a/apps/emqx_connector_aggregator/src/emqx_connector_aggregator.erl b/apps/emqx_connector_aggregator/src/emqx_connector_aggregator.erl index f3936fd54..935fa6b52 100644 --- a/apps/emqx_connector_aggregator/src/emqx_connector_aggregator.erl +++ b/apps/emqx_connector_aggregator/src/emqx_connector_aggregator.erl @@ -372,9 +372,13 @@ lookup_current_buffer(Name) -> %% enqueue_delivery(Buffer, St = #st{name = Name, deliveries = Ds}) -> - {ok, Pid} = emqx_connector_aggreg_upload_sup:start_delivery(Name, Buffer), - MRef = erlang:monitor(process, Pid), - St#st{deliveries = Ds#{MRef => Buffer}}. + case emqx_connector_aggreg_upload_sup:start_delivery(Name, Buffer) of + {ok, Pid} -> + MRef = erlang:monitor(process, Pid), + St#st{deliveries = Ds#{MRef => Buffer}}; + {error, _} = Error -> + handle_delivery_exit(Buffer, Error, St) + end. handle_delivery_exit(Buffer, Normal, St = #st{name = Name}) when Normal == normal; Normal == noproc From 57ecf4de9cf4cacbc65f38a3da213aeb08ca80fc Mon Sep 17 00:00:00 2001 From: Andrew Mayorov Date: Tue, 11 Jun 2024 11:54:02 +0200 Subject: [PATCH 30/33] chore: add changelog entry --- changes/ee/fix-13227.en.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changes/ee/fix-13227.en.md diff --git a/changes/ee/fix-13227.en.md b/changes/ee/fix-13227.en.md new file mode 100644 index 000000000..f2c1c2f38 --- /dev/null +++ b/changes/ee/fix-13227.en.md @@ -0,0 +1 @@ +Fixed an issue with S3 Bridge when running in aggregated mode, where invalid key template in the configuration haven't been reported as an error during bridge setup, but instead caused a storm of hard to recover crashes later. From 03b226248a1fbc2b495c9bf20020592451c8e9ba Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Fri, 7 Jun 2024 14:42:02 -0300 Subject: [PATCH 31/33] feat(schema registry): add check for inner types Currently, only `protobuf` has any. --- .../include/emqx_schema_registry.hrl | 2 +- .../src/emqx_schema_registry.erl | 11 +++++ .../src/emqx_schema_registry_serde.erl | 45 +++++++++++++++++++ .../test/emqx_schema_registry_serde_SUITE.erl | 43 +++++++++++++++--- 4 files changed, 94 insertions(+), 7 deletions(-) diff --git a/apps/emqx_schema_registry/include/emqx_schema_registry.hrl b/apps/emqx_schema_registry/include/emqx_schema_registry.hrl index b25042c20..11bbb0b72 100644 --- a/apps/emqx_schema_registry/include/emqx_schema_registry.hrl +++ b/apps/emqx_schema_registry/include/emqx_schema_registry.hrl @@ -26,7 +26,7 @@ -type encoded_data() :: iodata(). -type decoded_data() :: map(). --type serde_type() :: avro | protobuf | json. +-type serde_type() :: emqx_schema_registry_serde:serde_type(). -type serde_opts() :: map(). -record(serde, { diff --git a/apps/emqx_schema_registry/src/emqx_schema_registry.erl b/apps/emqx_schema_registry/src/emqx_schema_registry.erl index 7ba4ebcf8..5005b0dc8 100644 --- a/apps/emqx_schema_registry/src/emqx_schema_registry.erl +++ b/apps/emqx_schema_registry/src/emqx_schema_registry.erl @@ -16,6 +16,8 @@ start_link/0, add_schema/2, get_schema/1, + is_existing_type/1, + is_existing_type/2, delete_schema/1, list_schemas/0 ]). @@ -52,6 +54,7 @@ %% API %%------------------------------------------------------------------------------------------------- +-spec start_link() -> gen_server:start_ret(). start_link() -> gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). @@ -64,6 +67,14 @@ get_serde(SchemaName) -> {ok, Serde} end. +-spec is_existing_type(schema_name()) -> boolean(). +is_existing_type(SchemaName) -> + is_existing_type(SchemaName, []). + +-spec is_existing_type(schema_name(), [binary()]) -> boolean(). +is_existing_type(SchemaName, Path) -> + emqx_schema_registry_serde:is_existing_type(SchemaName, Path). + -spec get_schema(schema_name()) -> {ok, map()} | {error, not_found}. get_schema(SchemaName) -> case diff --git a/apps/emqx_schema_registry/src/emqx_schema_registry_serde.erl b/apps/emqx_schema_registry/src/emqx_schema_registry_serde.erl index e8da35449..1a2f90ee7 100644 --- a/apps/emqx_schema_registry/src/emqx_schema_registry_serde.erl +++ b/apps/emqx_schema_registry/src/emqx_schema_registry_serde.erl @@ -3,6 +3,8 @@ %%-------------------------------------------------------------------- -module(emqx_schema_registry_serde). +-feature(maybe_expr, enable). + -behaviour(emqx_rule_funcs). -include("emqx_schema_registry.hrl"). @@ -14,6 +16,8 @@ make_serde/3, handle_rule_function/2, schema_check/3, + is_existing_type/1, + is_existing_type/2, destroy/1 ]). @@ -27,6 +31,10 @@ eval_encode/2 ]). +%%------------------------------------------------------------------------------ +%% Type definitions +%%------------------------------------------------------------------------------ + -define(BOOL(SerdeName, EXPR), try _ = EXPR, @@ -38,10 +46,28 @@ end ). +-type eval_context() :: term(). + +-export_type([serde_type/0]). + %%------------------------------------------------------------------------------ %% API %%------------------------------------------------------------------------------ +-spec is_existing_type(schema_name()) -> boolean(). +is_existing_type(SchemaName) -> + is_existing_type(SchemaName, []). + +-spec is_existing_type(schema_name(), [binary()]) -> boolean(). +is_existing_type(SchemaName, Path) -> + maybe + {ok, #serde{type = SerdeType, eval_context = EvalContext}} ?= + emqx_schema_registry:get_serde(SchemaName), + has_inner_type(SerdeType, EvalContext, Path) + else + _ -> false + end. + -spec handle_rule_function(atom(), list()) -> any() | {error, no_match_for_function}. handle_rule_function(sparkplug_decode, [Data]) -> handle_rule_function( @@ -338,3 +364,22 @@ unload_code(SerdeMod) -> _ = code:purge(SerdeMod), _ = code:delete(SerdeMod), ok. + +-spec has_inner_type(serde_type(), eval_context(), [binary()]) -> + boolean(). +has_inner_type(protobuf, _SerdeMod, [_, _ | _]) -> + %% Protobuf only has one level of message types. + false; +has_inner_type(protobuf, SerdeMod, [MessageTypeBin]) -> + try apply(SerdeMod, get_msg_names, []) of + Names -> + lists:member(MessageTypeBin, [atom_to_binary(N, utf8) || N <- Names]) + catch + _:_ -> + false + end; +has_inner_type(_SerdeType, _EvalContext, []) -> + %% This function is only called if we already found a serde, so the root does exist. + true; +has_inner_type(_SerdeType, _EvalContext, _Path) -> + false. diff --git a/apps/emqx_schema_registry/test/emqx_schema_registry_serde_SUITE.erl b/apps/emqx_schema_registry/test/emqx_schema_registry_serde_SUITE.erl index 0fad015f0..bdc083736 100644 --- a/apps/emqx_schema_registry/test/emqx_schema_registry_serde_SUITE.erl +++ b/apps/emqx_schema_registry/test/emqx_schema_registry_serde_SUITE.erl @@ -14,7 +14,6 @@ -import(emqx_common_test_helpers, [on_exit/1]). --define(APPS, [emqx_conf, emqx_rule_engine, emqx_schema_registry]). -define(INVALID_JSON, #{ reason := #{expected := "emqx_schema:json_binary()"}, kind := validation_error @@ -28,12 +27,20 @@ all() -> emqx_common_test_helpers:all(?MODULE). init_per_suite(Config) -> - emqx_config:save_schema_mod_and_names(emqx_schema_registry_schema), - emqx_mgmt_api_test_util:init_suite(?APPS), - Config. + Apps = emqx_cth_suite:start( + [ + emqx, + emqx_conf, + emqx_schema_registry, + emqx_rule_engine + ], + #{work_dir => emqx_cth_suite:work_dir(Config)} + ), + [{apps, Apps} | Config]. -end_per_suite(_Config) -> - emqx_mgmt_api_test_util:end_suite(lists:reverse(?APPS)), +end_per_suite(Config) -> + Apps = ?config(apps, Config), + emqx_cth_suite:stop(Apps), ok. init_per_testcase(_TestCase, Config) -> Config. @@ -240,3 +247,27 @@ t_json_validation(_Config) -> ?assertNot(F(schema_check, <<"{\"bar\": 2}">>)), ?assertNot(F(schema_check, <<"{\"foo\": \"notinteger\", \"bar\": 2}">>)), ok. + +t_is_existing_type(_Config) -> + JsonName = <<"myjson">>, + ?assertNot(emqx_schema_registry:is_existing_type(JsonName)), + ok = emqx_schema_registry:add_schema(JsonName, schema_params(json)), + AvroName = <<"myavro">>, + ?assertNot(emqx_schema_registry:is_existing_type(AvroName)), + ok = emqx_schema_registry:add_schema(AvroName, schema_params(avro)), + ProtobufName = <<"myprotobuf">>, + MessageType = <<"Person">>, + ?assertNot(emqx_schema_registry:is_existing_type(ProtobufName)), + ok = emqx_schema_registry:add_schema(ProtobufName, schema_params(protobuf)), + %% JSON Schema: no inner names + ?assert(emqx_schema_registry:is_existing_type(JsonName)), + ?assertNot(emqx_schema_registry:is_existing_type(JsonName, [JsonName])), + %% Avro: no inner names + ?assert(emqx_schema_registry:is_existing_type(AvroName)), + ?assertNot(emqx_schema_registry:is_existing_type(AvroName, [AvroName])), + %% Protobuf: one level of message types + ?assert(emqx_schema_registry:is_existing_type(ProtobufName)), + ?assertNot(emqx_schema_registry:is_existing_type(ProtobufName, [ProtobufName])), + ?assert(emqx_schema_registry:is_existing_type(ProtobufName, [MessageType])), + ?assertNot(emqx_schema_registry:is_existing_type(ProtobufName, [MessageType, MessageType])), + ok. From 0f9c3b4cea5aea11277f665fb12fc58f6b622297 Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Fri, 7 Jun 2024 15:27:50 -0300 Subject: [PATCH 32/33] feat(schema validation): check references schema names and types before changing config Fixes https://emqx.atlassian.net/browse/EMQX-12368 --- .../include/emqx_schema_registry.hrl | 2 +- .../src/emqx_schema_validation.erl | 14 +- .../src/emqx_schema_validation_config.erl | 175 +++++++++++++----- .../emqx_schema_validation_http_api_SUITE.erl | 127 ++++++++++--- changes/ee/feat-13210.en.md | 1 + 5 files changed, 250 insertions(+), 69 deletions(-) create mode 100644 changes/ee/feat-13210.en.md diff --git a/apps/emqx_schema_registry/include/emqx_schema_registry.hrl b/apps/emqx_schema_registry/include/emqx_schema_registry.hrl index 11bbb0b72..b25042c20 100644 --- a/apps/emqx_schema_registry/include/emqx_schema_registry.hrl +++ b/apps/emqx_schema_registry/include/emqx_schema_registry.hrl @@ -26,7 +26,7 @@ -type encoded_data() :: iodata(). -type decoded_data() :: map(). --type serde_type() :: emqx_schema_registry_serde:serde_type(). +-type serde_type() :: avro | protobuf | json. -type serde_opts() :: map(). -record(serde, { diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation.erl b/apps/emqx_schema_validation/src/emqx_schema_validation.erl index 6a5e76573..755773f13 100644 --- a/apps/emqx_schema_validation/src/emqx_schema_validation.erl +++ b/apps/emqx_schema_validation/src/emqx_schema_validation.erl @@ -3,6 +3,8 @@ %%-------------------------------------------------------------------- -module(emqx_schema_validation). +-feature(maybe_expr, enable). + -include_lib("snabbkaffe/include/trace.hrl"). -include_lib("emqx_utils/include/emqx_message.hrl"). -include_lib("emqx/include/emqx_hooks.hrl"). @@ -41,7 +43,13 @@ -define(TRACE_TAG, "SCHEMA_VALIDATION"). -type validation_name() :: binary(). --type validation() :: _TODO. +-type raw_validation() :: #{binary() => _}. +-type validation() :: #{ + name := validation_name(), + strategy := all_pass | any_pass, + failure_action := drop | disconnect | ignore, + log_failure := #{level := error | warning | notice | info | debug | none} +}. -export_type([ validation/0, @@ -65,12 +73,12 @@ reorder(Order) -> lookup(Name) -> emqx_schema_validation_config:lookup(Name). --spec insert(validation()) -> +-spec insert(raw_validation()) -> {ok, _} | {error, _}. insert(Validation) -> emqx_schema_validation_config:insert(Validation). --spec update(validation()) -> +-spec update(raw_validation()) -> {ok, _} | {error, _}. update(Validation) -> emqx_schema_validation_config:update(Validation). diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl b/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl index 2bfd11c6d..aef662887 100644 --- a/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl +++ b/apps/emqx_schema_validation/src/emqx_schema_validation_config.erl @@ -3,6 +3,8 @@ %%-------------------------------------------------------------------- -module(emqx_schema_validation_config). +-feature(maybe_expr, enable). + %% API -export([ add_handler/0, @@ -136,15 +138,25 @@ pre_config_update([?CONF_ROOT], {merge, NewConfig}, OldConfig) -> pre_config_update([?CONF_ROOT], {replace, NewConfig}, _OldConfig) -> {ok, NewConfig}. -post_config_update(?VALIDATIONS_CONF_PATH, {append, #{<<"name">> := Name}}, New, _Old, _AppEnvs) -> - {Pos, Validation} = fetch_with_index(New, Name), - ok = emqx_schema_validation_registry:insert(Pos, Validation), - ok; -post_config_update(?VALIDATIONS_CONF_PATH, {update, #{<<"name">> := Name}}, New, Old, _AppEnvs) -> - {_Pos, OldValidation} = fetch_with_index(Old, Name), - {Pos, NewValidation} = fetch_with_index(New, Name), - ok = emqx_schema_validation_registry:update(OldValidation, Pos, NewValidation), - ok; +post_config_update( + ?VALIDATIONS_CONF_PATH, {append, #{<<"name">> := Name} = RawValidation}, New, _Old, _AppEnvs +) -> + maybe + ok ?= assert_referenced_schemas_exist(RawValidation), + {Pos, Validation} = fetch_with_index(New, Name), + ok = emqx_schema_validation_registry:insert(Pos, Validation), + ok + end; +post_config_update( + ?VALIDATIONS_CONF_PATH, {update, #{<<"name">> := Name} = RawValidation}, New, Old, _AppEnvs +) -> + maybe + ok ?= assert_referenced_schemas_exist(RawValidation), + {_Pos, OldValidation} = fetch_with_index(Old, Name), + {Pos, NewValidation} = fetch_with_index(New, Name), + ok = emqx_schema_validation_registry:update(OldValidation, Pos, NewValidation), + ok + end; post_config_update(?VALIDATIONS_CONF_PATH, {delete, Name}, _New, Old, _AppEnvs) -> {Pos, Validation} = fetch_with_index(Old, Name), ok = emqx_schema_validation_registry:delete(Validation, Pos), @@ -161,16 +173,19 @@ post_config_update([?CONF_ROOT], {merge, _}, ResultingConfig, Old, _AppEnvs) -> OldValidations, fun(#{name := N}) -> N end ), - NewValidations = - lists:map( - fun(#{name := Name}) -> - {Pos, Validation} = fetch_with_index(ResultingValidations, Name), - ok = emqx_schema_validation_registry:insert(Pos, Validation), - #{name => Name, pos => Pos} - end, - NewValidations0 - ), - {ok, #{new_validations => NewValidations}}; + maybe + ok ?= multi_assert_referenced_schemas_exist(NewValidations0), + NewValidations = + lists:map( + fun(#{name := Name}) -> + {Pos, Validation} = fetch_with_index(ResultingValidations, Name), + ok = emqx_schema_validation_registry:insert(Pos, Validation), + #{name => Name, pos => Pos} + end, + NewValidations0 + ), + {ok, #{new_validations => NewValidations}} + end; post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnvs) -> #{ new_validations := NewValidations, @@ -179,32 +194,46 @@ post_config_update([?CONF_ROOT], {replace, Input}, ResultingConfig, Old, _AppEnv } = prepare_config_replace(Input, Old), #{validations := ResultingValidations} = ResultingConfig, #{validations := OldValidations} = Old, - lists:foreach( - fun(Name) -> - {Pos, Validation} = fetch_with_index(OldValidations, Name), - ok = emqx_schema_validation_registry:delete(Validation, Pos) - end, - DeletedValidations - ), - lists:foreach( - fun(Name) -> - {Pos, Validation} = fetch_with_index(ResultingValidations, Name), - ok = emqx_schema_validation_registry:insert(Pos, Validation) - end, - NewValidations - ), - ChangedValidations = - lists:map( + NewOrChangedValidationNames = NewValidations ++ ChangedValidations0, + maybe + ok ?= + multi_assert_referenced_schemas_exist( + lists:filter( + fun(#{name := N}) -> + lists:member(N, NewOrChangedValidationNames) + end, + ResultingValidations + ) + ), + lists:foreach( fun(Name) -> - {_Pos, OldValidation} = fetch_with_index(OldValidations, Name), - {Pos, NewValidation} = fetch_with_index(ResultingValidations, Name), - ok = emqx_schema_validation_registry:update(OldValidation, Pos, NewValidation), - #{name => Name, pos => Pos} + {Pos, Validation} = fetch_with_index(OldValidations, Name), + ok = emqx_schema_validation_registry:delete(Validation, Pos) end, - ChangedValidations0 + DeletedValidations ), - ok = emqx_schema_validation_registry:reindex_positions(ResultingValidations, OldValidations), - {ok, #{changed_validations => ChangedValidations}}. + lists:foreach( + fun(Name) -> + {Pos, Validation} = fetch_with_index(ResultingValidations, Name), + ok = emqx_schema_validation_registry:insert(Pos, Validation) + end, + NewValidations + ), + ChangedValidations = + lists:map( + fun(Name) -> + {_Pos, OldValidation} = fetch_with_index(OldValidations, Name), + {Pos, NewValidation} = fetch_with_index(ResultingValidations, Name), + ok = emqx_schema_validation_registry:update(OldValidation, Pos, NewValidation), + #{name => Name, pos => Pos} + end, + ChangedValidations0 + ), + ok = emqx_schema_validation_registry:reindex_positions( + ResultingValidations, OldValidations + ), + {ok, #{changed_validations => ChangedValidations}} + end. %%------------------------------------------------------------------------------ %% `emqx_config_backup' API @@ -388,3 +417,65 @@ prepare_config_replace(NewConfig, OldConfig) -> changed_validations => ChangedValidations0 ++ ChangedValidations1, deleted_validations => DeletedValidations }. + +-spec assert_referenced_schemas_exist(raw_validation()) -> ok | {error, map()}. +assert_referenced_schemas_exist(RawValidation) -> + #{<<"checks">> := RawChecks} = RawValidation, + SchemasToCheck = + lists:filtermap( + fun + (#{<<"schema">> := SchemaName} = Check) -> + %% so far, only protobuf has inner types + InnerPath = + case maps:find(<<"message_type">>, Check) of + {ok, MessageType} -> [MessageType]; + error -> [] + end, + {true, {SchemaName, InnerPath}}; + (_Check) -> + false + end, + RawChecks + ), + do_assert_referenced_schemas_exist(SchemasToCheck). + +do_assert_referenced_schemas_exist(SchemasToCheck) -> + MissingSchemas = + lists:foldl( + fun({SchemaName, InnerPath}, Acc) -> + case emqx_schema_registry:is_existing_type(SchemaName, InnerPath) of + true -> + Acc; + false -> + [[SchemaName | InnerPath] | Acc] + end + end, + [], + SchemasToCheck + ), + case MissingSchemas of + [] -> + ok; + [_ | _] -> + {error, #{missing_schemas => MissingSchemas}} + end. + +-spec multi_assert_referenced_schemas_exist([validation()]) -> ok | {error, map()}. +multi_assert_referenced_schemas_exist(Validations) -> + SchemasToCheck = + lists:filtermap( + fun + (#{schema := SchemaName} = Check) -> + %% so far, only protobuf has inner types + InnerPath = + case maps:find(message_type, Check) of + {ok, MessageType} -> [MessageType]; + error -> [] + end, + {true, {SchemaName, InnerPath}}; + (_Check) -> + false + end, + [Check || #{checks := Checks} <- Validations, Check <- Checks] + ), + do_assert_referenced_schemas_exist(SchemasToCheck). diff --git a/apps/emqx_schema_validation/test/emqx_schema_validation_http_api_SUITE.erl b/apps/emqx_schema_validation/test/emqx_schema_validation_http_api_SUITE.erl index 41731fa1b..76a54ebcf 100644 --- a/apps/emqx_schema_validation/test/emqx_schema_validation_http_api_SUITE.erl +++ b/apps/emqx_schema_validation/test/emqx_schema_validation_http_api_SUITE.erl @@ -356,25 +356,36 @@ protobuf_invalid_payloads() -> ]. protobuf_create_serde(SerdeName) -> - Source = - << - "message Person {\n" - " required string name = 1;\n" - " required int32 id = 2;\n" - " optional string email = 3;\n" - " }\n" - "message UnionValue {\n" - " oneof u {\n" - " int32 a = 1;\n" - " string b = 2;\n" - " }\n" - "}" - >>, + protobuf_upsert_serde(SerdeName, <<"Person">>). + +protobuf_upsert_serde(SerdeName, MessageType) -> + Source = protobuf_source(MessageType), Schema = #{type => protobuf, source => Source}, ok = emqx_schema_registry:add_schema(SerdeName, Schema), on_exit(fun() -> ok = emqx_schema_registry:delete_schema(SerdeName) end), ok. +protobuf_source(MessageType) -> + iolist_to_binary( + [ + <<"message ">>, + MessageType, + <<" {\n">>, + << + " required string name = 1;\n" + " required int32 id = 2;\n" + " optional string email = 3;\n" + " }\n" + "message UnionValue {\n" + " oneof u {\n" + " int32 a = 1;\n" + " string b = 2;\n" + " }\n" + "}" + >> + ] + ). + %% Checks that the internal order in the registry/index matches expectation. assert_index_order(ExpectedOrder, Topic, Comment) -> ?assertEqual( @@ -1041,6 +1052,7 @@ t_duplicated_schema_checks(_Config) -> Name1 = <<"foo">>, SerdeName = <<"myserde">>, Check = schema_check(json, SerdeName), + json_create_serde(SerdeName), Validation1 = validation(Name1, [Check, sql_check(), Check]), ?assertMatch({400, _}, insert(Validation1)), @@ -1130,18 +1142,87 @@ t_multiple_validations(_Config) -> ok. +%% Test that we validate schema registry serde existency when using the HTTP API. t_schema_check_non_existent_serde(_Config) -> SerdeName = <<"idontexist">>, Name1 = <<"foo">>, + Check1 = schema_check(json, SerdeName), Validation1 = validation(Name1, [Check1]), - {201, _} = insert(Validation1), + ?assertMatch({400, _}, insert(Validation1)), - C = connect(<<"c1">>), - {ok, _, [_]} = emqtt:subscribe(C, <<"t/#">>), + Check2 = schema_check(avro, SerdeName), + Validation2 = validation(Name1, [Check2]), + ?assertMatch({400, _}, insert(Validation2)), - ok = publish(C, <<"t/1">>, #{i => 10, s => <<"s">>}), - ?assertNotReceive({publish, _}), + MessageType = <<"idontexisteither">>, + Check3 = schema_check(protobuf, SerdeName, #{<<"message_type">> => MessageType}), + Validation3 = validation(Name1, [Check3]), + ?assertMatch({400, _}, insert(Validation3)), + + protobuf_create_serde(SerdeName), + %% Still fails because reference message type doesn't exist. + ?assertMatch({400, _}, insert(Validation3)), + + ok. + +%% Test that we validate schema registry serde existency when loading configs. +t_schema_check_non_existent_serde_load_config(_Config) -> + Name1 = <<"1">>, + SerdeName1 = <<"serde1">>, + MessageType1 = <<"mt">>, + Check1A = schema_check(protobuf, SerdeName1, #{<<"message_type">> => MessageType1}), + Validation1A = validation(Name1, [Check1A]), + protobuf_upsert_serde(SerdeName1, MessageType1), + {201, _} = insert(Validation1A), + Name2 = <<"2">>, + SerdeName2 = <<"serde2">>, + Check2A = schema_check(json, SerdeName2), + Validation2A = validation(Name2, [Check2A]), + json_create_serde(SerdeName2), + {201, _} = insert(Validation2A), + + %% Config to load + %% Will replace existing config + MissingMessageType = <<"missing_mt">>, + Check1B = schema_check(protobuf, SerdeName1, #{<<"message_type">> => MissingMessageType}), + Validation1B = validation(Name1, [Check1B]), + + %% Will replace existing config + MissingSerdeName1 = <<"missing1">>, + Check2B = schema_check(json, MissingSerdeName1), + Validation2B = validation(Name2, [Check2B]), + + %% New validation; should be appended + Name3 = <<"3">>, + MissingSerdeName2 = <<"missing2">>, + Check3 = schema_check(avro, MissingSerdeName2), + Validation3 = validation(Name3, [Check3]), + + ConfRootBin = <<"schema_validation">>, + ConfigToLoad1 = #{ + ConfRootBin => #{ + <<"validations">> => [Validation1B, Validation2B, Validation3] + } + }, + ConfigToLoadBin1 = iolist_to_binary(hocon_pp:do(ConfigToLoad1, #{})), + %% Merge + ResMerge = emqx_conf_cli:load_config(ConfigToLoadBin1, #{mode => merge}), + ?assertMatch({error, _}, ResMerge), + {error, ErrorMessage1} = ResMerge, + ?assertEqual(match, re:run(ErrorMessage1, <<"missing_schemas">>, [{capture, none}])), + ?assertEqual(match, re:run(ErrorMessage1, MissingSerdeName1, [{capture, none}])), + ?assertEqual(match, re:run(ErrorMessage1, MissingSerdeName2, [{capture, none}])), + ?assertEqual(match, re:run(ErrorMessage1, MissingMessageType, [{capture, none}])), + + %% Replace + ResReplace = emqx_conf_cli:load_config(ConfigToLoadBin1, #{mode => replace}), + ?assertMatch({error, _}, ResReplace), + {error, ErrorMessage2} = ResReplace, + ?assertEqual(match, re:run(ErrorMessage2, <<"missing_schemas">>, [{capture, none}])), + ?assertEqual(match, re:run(ErrorMessage2, MissingSerdeName1, [{capture, none}])), + ?assertEqual(match, re:run(ErrorMessage2, MissingSerdeName2, [{capture, none}])), + ?assertEqual(match, re:run(ErrorMessage2, MissingMessageType, [{capture, none}])), ok. @@ -1232,16 +1313,16 @@ t_schema_check_protobuf(_Config) -> ), %% Bad config: unknown message name - Check2 = schema_check(protobuf, SerdeName, #{<<"message_type">> => <<"idontexist">>}), - Validation2 = validation(Name1, [Check2]), - {200, _} = update(Validation2), + %% Schema updated to use another message type after validation was created + OtherMessageType = <<"NewPersonType">>, + protobuf_upsert_serde(SerdeName, OtherMessageType), lists:foreach( fun(Payload) -> ok = publish(C, <<"t/1">>, {raw, Payload}), ?assertNotReceive({publish, _}) end, - protobuf_valid_payloads(SerdeName, MessageType) + protobuf_valid_payloads(SerdeName, OtherMessageType) ), ok. diff --git a/changes/ee/feat-13210.en.md b/changes/ee/feat-13210.en.md new file mode 100644 index 000000000..ed059a873 --- /dev/null +++ b/changes/ee/feat-13210.en.md @@ -0,0 +1 @@ +Now, when inserting or updating a Schema Validation, EMQX will check if the referenced schemas and message types exist in Schema Registry. From 337009c3a00be8f1bfeda0fa7deb1d8ed37a855b Mon Sep 17 00:00:00 2001 From: Thales Macedo Garitezi Date: Mon, 10 Jun 2024 13:45:29 -0300 Subject: [PATCH 33/33] fix: declare `emqx_schema_registry` as a dependency of `emqx_schema_validation` --- apps/emqx_schema_registry/src/emqx_schema_registry.app.src | 3 ++- apps/emqx_schema_validation/src/emqx_schema_validation.app.src | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/apps/emqx_schema_registry/src/emqx_schema_registry.app.src b/apps/emqx_schema_registry/src/emqx_schema_registry.app.src index efd9f1162..7577f8aeb 100644 --- a/apps/emqx_schema_registry/src/emqx_schema_registry.app.src +++ b/apps/emqx_schema_registry/src/emqx_schema_registry.app.src @@ -11,7 +11,8 @@ stdlib, erlavro, gpb, - jesse + jesse, + emqx ]}, {env, []}, {modules, []}, diff --git a/apps/emqx_schema_validation/src/emqx_schema_validation.app.src b/apps/emqx_schema_validation/src/emqx_schema_validation.app.src index 773e0fff0..2dfe710db 100644 --- a/apps/emqx_schema_validation/src/emqx_schema_validation.app.src +++ b/apps/emqx_schema_validation/src/emqx_schema_validation.app.src @@ -5,7 +5,8 @@ {mod, {emqx_schema_validation_app, []}}, {applications, [ kernel, - stdlib + stdlib, + emqx_schema_registry ]}, {env, []}, {modules, []},